Merge "Avoid crash for stss sync sample number 0" into nyc-mr2-dev am: 7d1bfc53ae
am: 92c8f3a08d
Change-Id: I78c661a7f55f85e6ce4292085e3ba9dd9421ff33
diff --git a/camera/aidl/android/hardware/camera2/ICameraDeviceCallbacks.aidl b/camera/aidl/android/hardware/camera2/ICameraDeviceCallbacks.aidl
index 755ec8e..8308095 100644
--- a/camera/aidl/android/hardware/camera2/ICameraDeviceCallbacks.aidl
+++ b/camera/aidl/android/hardware/camera2/ICameraDeviceCallbacks.aidl
@@ -44,4 +44,5 @@
* @param lastFrameNumber Frame number of the last frame of the streaming request.
*/
oneway void onRepeatingRequestError(in long lastFrameNumber);
+ oneway void onRequestQueueEmpty();
}
diff --git a/camera/ndk/Android.mk b/camera/ndk/Android.mk
index 40dbeef..591dfc2 100644
--- a/camera/ndk/Android.mk
+++ b/camera/ndk/Android.mk
@@ -51,8 +51,6 @@
libcutils \
libcamera_metadata
-LOCAL_CLANG := true
-
include $(BUILD_SHARED_LIBRARY)
endif
diff --git a/camera/ndk/impl/ACameraDevice.cpp b/camera/ndk/impl/ACameraDevice.cpp
index 7d78e2b..229b159 100644
--- a/camera/ndk/impl/ACameraDevice.cpp
+++ b/camera/ndk/impl/ACameraDevice.cpp
@@ -1347,6 +1347,12 @@
}
binder::Status
+CameraDevice::ServiceCallback::onRequestQueueEmpty() {
+ // onRequestQueueEmpty not yet implemented in NDK
+ return binder::Status::ok();
+}
+
+binder::Status
CameraDevice::ServiceCallback::onRepeatingRequestError(int64_t lastFrameNumber) {
binder::Status ret = binder::Status::ok();
diff --git a/camera/ndk/impl/ACameraDevice.h b/camera/ndk/impl/ACameraDevice.h
index 051462b..eb8028b 100644
--- a/camera/ndk/impl/ACameraDevice.h
+++ b/camera/ndk/impl/ACameraDevice.h
@@ -74,6 +74,7 @@
binder::Status onResultReceived(const CameraMetadata& metadata,
const CaptureResultExtras& resultExtras) override;
binder::Status onPrepared(int streamId) override;
+ binder::Status onRequestQueueEmpty() override;
binder::Status onRepeatingRequestError(int64_t lastFrameNumber) override;
private:
const wp<CameraDevice> mDevice;
diff --git a/camera/ndk/impl/ACameraManager.cpp b/camera/ndk/impl/ACameraManager.cpp
index 35555ff..1609da1 100644
--- a/camera/ndk/impl/ACameraManager.cpp
+++ b/camera/ndk/impl/ACameraManager.cpp
@@ -475,6 +475,7 @@
sp<hardware::ICameraService> cs = CameraManagerGlobal::getInstance().getCameraService();
if (cs == nullptr) {
ALOGE("%s: Cannot reach camera service!", __FUNCTION__);
+ delete device;
return ACAMERA_ERROR_CAMERA_DISCONNECTED;
}
diff --git a/camera/tests/CameraBinderTests.cpp b/camera/tests/CameraBinderTests.cpp
index 828a758..b91e0f3 100644
--- a/camera/tests/CameraBinderTests.cpp
+++ b/camera/tests/CameraBinderTests.cpp
@@ -151,6 +151,7 @@
SENT_RESULT,
UNINITIALIZED,
REPEATING_REQUEST_ERROR,
+ REQUEST_QUEUE_EMPTY,
};
protected:
@@ -225,6 +226,14 @@
return binder::Status::ok();
}
+ virtual binder::Status onRequestQueueEmpty() {
+ Mutex::Autolock l(mLock);
+ mLastStatus = REQUEST_QUEUE_EMPTY;
+ mStatusesHit.push_back(mLastStatus);
+ mStatusCondition.broadcast();
+ return binder::Status::ok();
+ }
+
// Test helper functions:
bool hadError() const {
diff --git a/cmds/screenrecord/Overlay.cpp b/cmds/screenrecord/Overlay.cpp
index 9fd192c..be993e0 100644
--- a/cmds/screenrecord/Overlay.cpp
+++ b/cmds/screenrecord/Overlay.cpp
@@ -259,6 +259,11 @@
const char* format = "%T";
struct tm tm;
+ if (mUseMonotonicTimestamps) {
+ snprintf(buf, bufLen, "%" PRId64, monotonicNsec);
+ return;
+ }
+
// localtime/strftime is not the fastest way to do this, but a trivial
// benchmark suggests that the cost is negligible.
int64_t realTime = mStartRealtimeNsecs +
diff --git a/cmds/screenrecord/Overlay.h b/cmds/screenrecord/Overlay.h
index ee3444d..1d8a569 100644
--- a/cmds/screenrecord/Overlay.h
+++ b/cmds/screenrecord/Overlay.h
@@ -37,7 +37,7 @@
*/
class Overlay : public GLConsumer::FrameAvailableListener, Thread {
public:
- Overlay() : Thread(false),
+ Overlay(bool monotonicTimestamps) : Thread(false),
mThreadResult(UNKNOWN_ERROR),
mState(UNINITIALIZED),
mFrameAvailable(false),
@@ -45,7 +45,8 @@
mStartMonotonicNsecs(0),
mStartRealtimeNsecs(0),
mLastFrameNumber(-1),
- mTotalDroppedFrames(0)
+ mTotalDroppedFrames(0),
+ mUseMonotonicTimestamps(monotonicTimestamps)
{}
// Creates a thread that performs the overlay. Pass in the surface that
@@ -151,6 +152,8 @@
nsecs_t mLastFrameNumber;
size_t mTotalDroppedFrames;
+ bool mUseMonotonicTimestamps;
+
static const char* kPropertyNames[];
};
diff --git a/cmds/screenrecord/screenrecord.cpp b/cmds/screenrecord/screenrecord.cpp
index 59d5661..6097f01 100644
--- a/cmds/screenrecord/screenrecord.cpp
+++ b/cmds/screenrecord/screenrecord.cpp
@@ -51,6 +51,7 @@
#include <media/stagefright/MediaErrors.h>
#include <media/stagefright/MediaMuxer.h>
#include <media/ICrypto.h>
+#include <media/MediaCodecBuffer.h>
#include "screenrecord.h"
#include "Overlay.h"
@@ -68,6 +69,7 @@
// Command-line parameters.
static bool gVerbose = false; // chatty on stdout
static bool gRotate = false; // rotate 90 degrees
+static bool gMonotonicTime = false; // use system monotonic time for timestamps
static enum {
FORMAT_MP4, FORMAT_H264, FORMAT_FRAMES, FORMAT_RAW_FRAMES
} gOutputFormat = FORMAT_MP4; // data format for output
@@ -327,7 +329,7 @@
assert((rawFp == NULL && muxer != NULL) || (rawFp != NULL && muxer == NULL));
- Vector<sp<ABuffer> > buffers;
+ Vector<sp<MediaCodecBuffer> > buffers;
err = encoder->getOutputBuffers(&buffers);
if (err != NO_ERROR) {
fprintf(stderr, "Unable to get output buffers (err=%d)\n", err);
@@ -410,7 +412,10 @@
// want to queue these up and do them on a different thread.
ATRACE_NAME("write sample");
assert(trackIdx != -1);
- err = muxer->writeSampleData(buffers[bufIndex], trackIdx,
+ // TODO
+ sp<ABuffer> buffer = new ABuffer(
+ buffers[bufIndex]->data(), buffers[bufIndex]->size());
+ err = muxer->writeSampleData(buffer, trackIdx,
ptsUsec, flags);
if (err != NO_ERROR) {
fprintf(stderr,
@@ -609,7 +614,7 @@
sp<Overlay> overlay;
if (gWantFrameTime) {
// Send virtual display frames to an external texture.
- overlay = new Overlay();
+ overlay = new Overlay(gMonotonicTime);
err = overlay->start(encoderInputSurface, &bufferProducer);
if (err != NO_ERROR) {
if (encoder != NULL) encoder->release();
@@ -892,6 +897,7 @@
{ "show-frame-time", no_argument, NULL, 'f' },
{ "rotate", no_argument, NULL, 'r' },
{ "output-format", required_argument, NULL, 'o' },
+ { "monotonic-time", no_argument, NULL, 'm' },
{ NULL, 0, NULL, 0 }
};
@@ -971,6 +977,9 @@
return 2;
}
break;
+ case 'm':
+ gMonotonicTime = true;
+ break;
default:
if (ic != '?') {
fprintf(stderr, "getopt_long returned unexpected value 0x%x\n", ic);
diff --git a/cmds/stagefright/Android.mk b/cmds/stagefright/Android.mk
index f8c8d3d..b28d509 100644
--- a/cmds/stagefright/Android.mk
+++ b/cmds/stagefright/Android.mk
@@ -18,7 +18,6 @@
external/jpeg \
LOCAL_CFLAGS += -Wno-multichar -Werror -Wall
-LOCAL_CLANG := true
LOCAL_MODULE_TAGS := optional
@@ -43,7 +42,6 @@
$(TOP)/frameworks/native/include/media/hardware
LOCAL_CFLAGS += -Wno-multichar -Werror -Wall
-LOCAL_CLANG := true
LOCAL_MODULE_TAGS := optional
@@ -68,7 +66,6 @@
$(TOP)/frameworks/native/include/media/hardware
LOCAL_CFLAGS += -Wno-multichar -Werror -Wall
-LOCAL_CLANG := true
LOCAL_MODULE_TAGS := optional
@@ -93,7 +90,6 @@
$(TOP)/frameworks/native/include/media/openmax
LOCAL_CFLAGS += -Wno-multichar -Werror -Wall
-LOCAL_CLANG := true
LOCAL_MODULE_TAGS := optional
@@ -117,7 +113,6 @@
$(TOP)/frameworks/native/include/media/openmax
LOCAL_CFLAGS += -Wno-multichar -Werror -Wall
-LOCAL_CLANG := true
LOCAL_MODULE_TAGS := optional
@@ -141,7 +136,6 @@
$(TOP)/frameworks/native/include/media/openmax
LOCAL_CFLAGS += -Wno-multichar -Werror -Wall
-LOCAL_CLANG := true
LOCAL_MODULE_TAGS := optional
@@ -166,7 +160,6 @@
$(TOP)/frameworks/native/include/media/openmax
LOCAL_CFLAGS += -Wno-multichar -Werror -Wall
-LOCAL_CLANG := true
LOCAL_MODULE_TAGS := optional
@@ -208,7 +201,6 @@
libstagefright_mediafilter
LOCAL_CFLAGS += -Wno-multichar -Werror -Wall
-LOCAL_CLANG := true
LOCAL_MODULE_TAGS := optional
@@ -232,7 +224,6 @@
$(TOP)/frameworks/native/include/media/openmax
LOCAL_CFLAGS += -Wno-multichar -Werror -Wall
-LOCAL_CLANG := true
LOCAL_MODULE_TAGS := optional
diff --git a/cmds/stagefright/SimplePlayer.cpp b/cmds/stagefright/SimplePlayer.cpp
index 50913cd..afb7db3 100644
--- a/cmds/stagefright/SimplePlayer.cpp
+++ b/cmds/stagefright/SimplePlayer.cpp
@@ -25,6 +25,7 @@
#include <media/AudioTrack.h>
#include <media/ICrypto.h>
#include <media/IMediaHTTPService.h>
+#include <media/MediaCodecBuffer.h>
#include <media/stagefright/foundation/ABuffer.h>
#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/foundation/AMessage.h>
@@ -355,7 +356,7 @@
err = state->mCodec->dequeueInputBuffer(&index, -1ll);
CHECK_EQ(err, (status_t)OK);
- const sp<ABuffer> &dstBuffer = state->mBuffers[0].itemAt(index);
+ const sp<MediaCodecBuffer> &dstBuffer = state->mBuffers[0].itemAt(index);
CHECK_LE(srcBuffer->size(), dstBuffer->capacity());
dstBuffer->setRange(0, srcBuffer->size());
@@ -482,11 +483,13 @@
state->mAvailInputBufferIndices.erase(
state->mAvailInputBufferIndices.begin());
- const sp<ABuffer> &dstBuffer =
+ const sp<MediaCodecBuffer> &dstBuffer =
state->mBuffers[0].itemAt(index);
+ sp<ABuffer> abuffer = new ABuffer(dstBuffer->base(), dstBuffer->capacity());
- err = mExtractor->readSampleData(dstBuffer);
+ err = mExtractor->readSampleData(abuffer);
CHECK_EQ(err, (status_t)OK);
+ dstBuffer->setRange(abuffer->offset(), abuffer->size());
int64_t timeUs;
CHECK_EQ(mExtractor->getSampleTime(&timeUs), (status_t)OK);
@@ -530,7 +533,7 @@
state->mCodec->releaseOutputBuffer(info->mIndex);
} else {
if (state->mAudioTrack != NULL) {
- const sp<ABuffer> &srcBuffer =
+ const sp<MediaCodecBuffer> &srcBuffer =
state->mBuffers[1].itemAt(info->mIndex);
renderAudio(state, info, srcBuffer);
@@ -597,7 +600,7 @@
}
void SimplePlayer::renderAudio(
- CodecState *state, BufferInfo *info, const sp<ABuffer> &buffer) {
+ CodecState *state, BufferInfo *info, const sp<MediaCodecBuffer> &buffer) {
CHECK(state->mAudioTrack != NULL);
if (state->mAudioTrack->stopped()) {
diff --git a/cmds/stagefright/SimplePlayer.h b/cmds/stagefright/SimplePlayer.h
index ae9dfd2..918fd24 100644
--- a/cmds/stagefright/SimplePlayer.h
+++ b/cmds/stagefright/SimplePlayer.h
@@ -25,6 +25,7 @@
struct AudioTrack;
class IGraphicBufferProducer;
struct MediaCodec;
+class MediaCodecBuffer;
struct NuMediaExtractor;
class Surface;
@@ -73,7 +74,7 @@
{
sp<MediaCodec> mCodec;
Vector<sp<ABuffer> > mCSD;
- Vector<sp<ABuffer> > mBuffers[2];
+ Vector<sp<MediaCodecBuffer> > mBuffers[2];
List<size_t> mAvailInputBufferIndices;
List<BufferInfo> mAvailOutputBufferInfos;
@@ -101,7 +102,7 @@
status_t onOutputFormatChanged(size_t trackIndex, CodecState *state);
void renderAudio(
- CodecState *state, BufferInfo *info, const sp<ABuffer> &buffer);
+ CodecState *state, BufferInfo *info, const sp<MediaCodecBuffer> &buffer);
DISALLOW_EVIL_CONSTRUCTORS(SimplePlayer);
};
diff --git a/cmds/stagefright/codec.cpp b/cmds/stagefright/codec.cpp
index dae9bbe..26135d7 100644
--- a/cmds/stagefright/codec.cpp
+++ b/cmds/stagefright/codec.cpp
@@ -26,6 +26,7 @@
#include <media/ICrypto.h>
#include <media/IMediaHTTPService.h>
#include <media/IMediaPlayerService.h>
+#include <media/MediaCodecBuffer.h>
#include <media/stagefright/foundation/ABuffer.h>
#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/foundation/ALooper.h>
@@ -56,8 +57,8 @@
struct CodecState {
sp<MediaCodec> mCodec;
- Vector<sp<ABuffer> > mInBuffers;
- Vector<sp<ABuffer> > mOutBuffers;
+ Vector<sp<MediaCodecBuffer> > mInBuffers;
+ Vector<sp<MediaCodecBuffer> > mOutBuffers;
bool mSignalledInputEOS;
bool mSawOutputEOS;
int64_t mNumBuffersDecoded;
@@ -174,10 +175,12 @@
if (err == OK) {
ALOGV("filling input buffer %zu", index);
- const sp<ABuffer> &buffer = state->mInBuffers.itemAt(index);
+ const sp<MediaCodecBuffer> &buffer = state->mInBuffers.itemAt(index);
+ sp<ABuffer> abuffer = new ABuffer(buffer->base(), buffer->capacity());
- err = extractor->readSampleData(buffer);
+ err = extractor->readSampleData(abuffer);
CHECK_EQ(err, (status_t)OK);
+ buffer->setRange(abuffer->offset(), abuffer->size());
int64_t timeUs;
err = extractor->getSampleTime(&timeUs);
diff --git a/cmds/stagefright/mediafilter.cpp b/cmds/stagefright/mediafilter.cpp
index d829df0..410dd69 100644
--- a/cmds/stagefright/mediafilter.cpp
+++ b/cmds/stagefright/mediafilter.cpp
@@ -26,6 +26,7 @@
#include <gui/Surface.h>
#include <media/ICrypto.h>
#include <media/IMediaHTTPService.h>
+#include <media/MediaCodecBuffer.h>
#include <media/stagefright/foundation/ABuffer.h>
#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/foundation/AMessage.h>
@@ -131,8 +132,8 @@
struct CodecState {
sp<MediaCodec> mCodec;
- Vector<sp<ABuffer> > mInBuffers;
- Vector<sp<ABuffer> > mOutBuffers;
+ Vector<sp<MediaCodecBuffer> > mInBuffers;
+ Vector<sp<MediaCodecBuffer> > mOutBuffers;
bool mSignalledInputEOS;
bool mSawOutputEOS;
int64_t mNumBuffersDecoded;
@@ -183,9 +184,9 @@
}
size_t outIndex = frame.index;
- const sp<ABuffer> &srcBuffer =
+ const sp<MediaCodecBuffer> &srcBuffer =
vidState->mOutBuffers.itemAt(outIndex);
- const sp<ABuffer> &destBuffer =
+ const sp<MediaCodecBuffer> &destBuffer =
filterState->mInBuffers.itemAt(filterIndex);
sp<AMessage> srcFormat, destFormat;
@@ -532,10 +533,12 @@
if (err == OK) {
ALOGV("filling input buffer %zu", index);
- const sp<ABuffer> &buffer = state->mInBuffers.itemAt(index);
+ const sp<MediaCodecBuffer> &buffer = state->mInBuffers.itemAt(index);
+ sp<ABuffer> abuffer = new ABuffer(buffer->base(), buffer->capacity());
- err = extractor->readSampleData(buffer);
+ err = extractor->readSampleData(abuffer);
CHECK(err == OK);
+ buffer->setRange(abuffer->offset(), abuffer->size());
int64_t timeUs;
err = extractor->getSampleTime(&timeUs);
diff --git a/cmds/stagefright/sf2.cpp b/cmds/stagefright/sf2.cpp
index 1a4bf08..8fe1dd4 100644
--- a/cmds/stagefright/sf2.cpp
+++ b/cmds/stagefright/sf2.cpp
@@ -230,7 +230,6 @@
mCodec->signalResume();
(new AMessage(kWhatSeek, this))->post(5000000ll);
- } else if (what == CodecBase::kWhatOutputFormatChanged) {
} else if (what == CodecBase::kWhatShutdownCompleted) {
mDecodeLooper->unregisterHandler(mCodec->id());
diff --git a/drm/libdrmframework/plugins/forward-lock/internal-format/doc/FwdLock.html b/drm/libdrmframework/plugins/forward-lock/internal-format/doc/FwdLock.html
old mode 100755
new mode 100644
diff --git a/drm/libmediadrm/Android.mk b/drm/libmediadrm/Android.mk
index 3be1d60..3f0e663 100644
--- a/drm/libmediadrm/Android.mk
+++ b/drm/libmediadrm/Android.mk
@@ -7,22 +7,25 @@
include $(CLEAR_VARS)
LOCAL_SRC_FILES:= \
- Crypto.cpp \
- Drm.cpp \
- DrmSessionManager.cpp \
- SharedLibrary.cpp
+ Crypto.cpp \
+ Drm.cpp \
+ DrmSessionManager.cpp \
+ ICrypto.cpp \
+ IDrm.cpp \
+ IDrmClient.cpp \
+ IMediaDrmService.cpp \
+ SharedLibrary.cpp
LOCAL_SHARED_LIBRARIES := \
- libbinder \
- libcutils \
- libdl \
- liblog \
- libmedia \
- libstagefright \
- libutils
+ libbinder \
+ libcutils \
+ libdl \
+ liblog \
+ libmediautils \
+ libstagefright_foundation \
+ libutils
LOCAL_CFLAGS += -Werror -Wno-error=deprecated-declarations -Wall
-LOCAL_CLANG := true
LOCAL_MODULE:= libmediadrm
diff --git a/drm/libmediadrm/Drm.cpp b/drm/libmediadrm/Drm.cpp
index 9ab08db..07e9414 100644
--- a/drm/libmediadrm/Drm.cpp
+++ b/drm/libmediadrm/Drm.cpp
@@ -334,6 +334,7 @@
return -EINVAL;
}
+ setListener(NULL);
delete mPlugin;
mPlugin = NULL;
diff --git a/media/libmedia/ICrypto.cpp b/drm/libmediadrm/ICrypto.cpp
similarity index 98%
rename from media/libmedia/ICrypto.cpp
rename to drm/libmediadrm/ICrypto.cpp
index 7b261be..8ba80c6 100644
--- a/media/libmedia/ICrypto.cpp
+++ b/drm/libmediadrm/ICrypto.cpp
@@ -302,6 +302,10 @@
int32_t offset = data.readInt32();
int32_t numSubSamples = data.readInt32();
+ if (numSubSamples < 0 || numSubSamples > 0xffff) {
+ reply->writeInt32(BAD_VALUE);
+ return OK;
+ }
CryptoPlugin::SubSample *subSamples =
new CryptoPlugin::SubSample[numSubSamples];
diff --git a/media/libmedia/IDrm.cpp b/drm/libmediadrm/IDrm.cpp
similarity index 100%
rename from media/libmedia/IDrm.cpp
rename to drm/libmediadrm/IDrm.cpp
diff --git a/media/libmedia/IDrmClient.cpp b/drm/libmediadrm/IDrmClient.cpp
similarity index 100%
rename from media/libmedia/IDrmClient.cpp
rename to drm/libmediadrm/IDrmClient.cpp
diff --git a/media/libmedia/IMediaDrmService.cpp b/drm/libmediadrm/IMediaDrmService.cpp
similarity index 97%
rename from media/libmedia/IMediaDrmService.cpp
rename to drm/libmediadrm/IMediaDrmService.cpp
index 9b6ecfd..84812dc 100644
--- a/media/libmedia/IMediaDrmService.cpp
+++ b/drm/libmediadrm/IMediaDrmService.cpp
@@ -37,7 +37,7 @@
class BpMediaDrmService: public BpInterface<IMediaDrmService>
{
public:
- BpMediaDrmService(const sp<IBinder>& impl)
+ explicit BpMediaDrmService(const sp<IBinder>& impl)
: BpInterface<IMediaDrmService>(impl)
{
}
diff --git a/include/media/AudioEffect.h b/include/media/AudioEffect.h
index 6af1962..c69c11d 100644
--- a/include/media/AudioEffect.h
+++ b/include/media/AudioEffect.h
@@ -24,8 +24,8 @@
#include <media/IAudioPolicyService.h>
#include <media/IEffect.h>
#include <media/IEffectClient.h>
-#include <hardware/audio_effect.h>
#include <media/AudioSystem.h>
+#include <system/audio_effect.h>
#include <utils/RefBase.h>
#include <utils/Errors.h>
diff --git a/include/media/AudioParameter.h b/include/media/AudioParameter.h
index 891bc4b..63c4d6c 100644
--- a/include/media/AudioParameter.h
+++ b/include/media/AudioParameter.h
@@ -49,7 +49,33 @@
static const char * const keyInputSource;
static const char * const keyScreenState;
- String8 toString();
+ // keyBtNrec: BT SCO Noise Reduction + Echo Cancellation parameters
+ // keyHwAvSync: get HW synchronization source identifier from a device
+ // keyMonoOutput: Enable mono audio playback
+ // keyStreamHwAvSync: set HW synchronization source identifier on a stream
+ static const char * const keyBtNrec;
+ static const char * const keyHwAvSync;
+ static const char * const keyMonoOutput;
+ static const char * const keyStreamHwAvSync;
+
+ // keyStreamConnect / Disconnect: value is an int in audio_devices_t
+ static const char * const keyStreamConnect;
+ static const char * const keyStreamDisconnect;
+
+ // For querying stream capabilities. All the returned values are lists.
+ // keyStreamSupportedFormats: audio_format_t
+ // keyStreamSupportedChannels: audio_channel_mask_t
+ // keyStreamSupportedSamplingRates: sampling rate values
+ static const char * const keyStreamSupportedFormats;
+ static const char * const keyStreamSupportedChannels;
+ static const char * const keyStreamSupportedSamplingRates;
+
+ static const char * const valueOn;
+ static const char * const valueOff;
+
+ static const char * const valueListSeparator;
+
+ String8 toString() const;
status_t add(const String8& key, const String8& value);
status_t addInt(const String8& key, const int value);
@@ -57,12 +83,12 @@
status_t remove(const String8& key);
- status_t get(const String8& key, String8& value);
- status_t getInt(const String8& key, int& value);
- status_t getFloat(const String8& key, float& value);
- status_t getAt(size_t index, String8& key, String8& value);
+ status_t get(const String8& key, String8& value) const;
+ status_t getInt(const String8& key, int& value) const;
+ status_t getFloat(const String8& key, float& value) const;
+ status_t getAt(size_t index, String8& key, String8& value) const;
- size_t size() { return mParameters.size(); }
+ size_t size() const { return mParameters.size(); }
private:
String8 mKeyValuePairs;
diff --git a/include/media/AudioRecord.h b/include/media/AudioRecord.h
index 63076e9..f7eb397 100644
--- a/include/media/AudioRecord.h
+++ b/include/media/AudioRecord.h
@@ -180,7 +180,7 @@
audio_session_t sessionId = AUDIO_SESSION_ALLOCATE,
transfer_type transferType = TRANSFER_DEFAULT,
audio_input_flags_t flags = AUDIO_INPUT_FLAG_NONE,
- int uid = -1,
+ uid_t uid = AUDIO_UID_INVALID,
pid_t pid = -1,
const audio_attributes_t* pAttributes = NULL);
@@ -218,7 +218,7 @@
audio_session_t sessionId = AUDIO_SESSION_ALLOCATE,
transfer_type transferType = TRANSFER_DEFAULT,
audio_input_flags_t flags = AUDIO_INPUT_FLAG_NONE,
- int uid = -1,
+ uid_t uid = AUDIO_UID_INVALID,
pid_t pid = -1,
const audio_attributes_t* pAttributes = NULL);
@@ -642,7 +642,7 @@
sp<DeathNotifier> mDeathNotifier;
uint32_t mSequence; // incremented for each new IAudioRecord attempt
- int mClientUid;
+ uid_t mClientUid;
pid_t mClientPid;
audio_attributes_t mAttributes;
diff --git a/include/media/AudioSystem.h b/include/media/AudioSystem.h
index 2174b2f..0533ba6 100644
--- a/include/media/AudioSystem.h
+++ b/include/media/AudioSystem.h
@@ -17,12 +17,12 @@
#ifndef ANDROID_AUDIOSYSTEM_H_
#define ANDROID_AUDIOSYSTEM_H_
-#include <hardware/audio_effect.h>
#include <media/AudioPolicy.h>
#include <media/AudioIoDescriptor.h>
#include <media/IAudioFlingerClient.h>
#include <media/IAudioPolicyServiceClient.h>
#include <system/audio.h>
+#include <system/audio_effect.h>
#include <system/audio_policy.h>
#include <utils/Errors.h>
#include <utils/Mutex.h>
@@ -330,8 +330,8 @@
static status_t startAudioSource(const struct audio_port_config *source,
const audio_attributes_t *attributes,
- audio_io_handle_t *handle);
- static status_t stopAudioSource(audio_io_handle_t handle);
+ audio_patch_handle_t *handle);
+ static status_t stopAudioSource(audio_patch_handle_t handle);
static status_t setMasterMono(bool mono);
static status_t getMasterMono(bool *mono);
diff --git a/include/media/AudioTrack.h b/include/media/AudioTrack.h
index 88c4e61..7c5686a 100644
--- a/include/media/AudioTrack.h
+++ b/include/media/AudioTrack.h
@@ -233,7 +233,7 @@
audio_session_t sessionId = AUDIO_SESSION_ALLOCATE,
transfer_type transferType = TRANSFER_DEFAULT,
const audio_offload_info_t *offloadInfo = NULL,
- int uid = -1,
+ uid_t uid = AUDIO_UID_INVALID,
pid_t pid = -1,
const audio_attributes_t* pAttributes = NULL,
bool doNotReconnect = false,
@@ -263,7 +263,7 @@
audio_session_t sessionId = AUDIO_SESSION_ALLOCATE,
transfer_type transferType = TRANSFER_DEFAULT,
const audio_offload_info_t *offloadInfo = NULL,
- int uid = -1,
+ uid_t uid = AUDIO_UID_INVALID,
pid_t pid = -1,
const audio_attributes_t* pAttributes = NULL,
bool doNotReconnect = false,
@@ -309,7 +309,7 @@
audio_session_t sessionId = AUDIO_SESSION_ALLOCATE,
transfer_type transferType = TRANSFER_DEFAULT,
const audio_offload_info_t *offloadInfo = NULL,
- int uid = -1,
+ uid_t uid = AUDIO_UID_INVALID,
pid_t pid = -1,
const audio_attributes_t* pAttributes = NULL,
bool doNotReconnect = false,
@@ -758,6 +758,9 @@
* The timestamp parameter is undefined on return, if status is not NO_ERROR.
*/
status_t getTimestamp(AudioTimestamp& timestamp);
+private:
+ status_t getTimestamp_l(AudioTimestamp& timestamp);
+public:
/* Return the extended timestamp, with additional timebase info and improved drain behavior.
*
@@ -840,6 +843,24 @@
status_t pendingDuration(int32_t *msec,
ExtendedTimestamp::Location location = ExtendedTimestamp::LOCATION_SERVER);
+ /* hasStarted() is used to determine if audio is now audible at the device after
+ * a start() command. The underlying implementation checks a nonzero timestamp position
+ * or increment for the audible assumption.
+ *
+ * hasStarted() returns true if the track has been started() and audio is audible
+ * and no subsequent pause() or flush() has been called. Immediately after pause() or
+ * flush() hasStarted() will return false.
+ *
+ * If stop() has been called, hasStarted() will return true if audio is still being
+ * delivered or has finished delivery (even if no audio was written) for both offloaded
+ * and normal tracks. This property removes a race condition in checking hasStarted()
+ * for very short clips, where stop() must be called to finish drain.
+ *
+ * In all cases, hasStarted() may turn false briefly after a subsequent start() is called
+ * until audio becomes audible again.
+ */
+ bool hasStarted(); // not const
+
protected:
/* copying audio tracks is not allowed */
AudioTrack(const AudioTrack& other);
@@ -1041,6 +1062,10 @@
// and could be easily widened to uint64_t
int64_t mStartUs; // the start time after flush or stop.
// only used for offloaded and direct tracks.
+ ExtendedTimestamp mStartEts; // Extended timestamp at start for normal
+ // AudioTracks.
+ AudioTimestamp mStartTs; // Timestamp at start for offloaded or direct
+ // AudioTracks.
bool mPreviousTimestampValid;// true if mPreviousTimestamp is valid
bool mTimestampStartupGlitchReported; // reduce log spam
@@ -1056,6 +1081,10 @@
// after flush.
int64_t mFramesWrittenServerOffset; // An offset to server frames due to
// restoring AudioTrack, or stop/start.
+ // This offset is also used for static tracks.
+ int64_t mFramesWrittenAtRestore; // Frames written at restore point (or frames
+ // delivered for static tracks).
+ // -1 indicates no previous restore point.
audio_output_flags_t mFlags; // same as mOrigFlags, except for bits that may
// be denied by client or server, such as
@@ -1101,7 +1130,7 @@
sp<DeathNotifier> mDeathNotifier;
uint32_t mSequence; // incremented for each new IAudioTrack attempt
- int mClientUid;
+ uid_t mClientUid;
pid_t mClientPid;
sp<AudioSystem::AudioDeviceCallback> mDeviceCallback;
diff --git a/include/media/IAudioFlinger.h b/include/media/IAudioFlinger.h
index 096f7ef..638f552 100644
--- a/include/media/IAudioFlinger.h
+++ b/include/media/IAudioFlinger.h
@@ -28,9 +28,8 @@
#include <media/IAudioRecord.h>
#include <media/IAudioFlingerClient.h>
#include <system/audio.h>
+#include <system/audio_effect.h>
#include <system/audio_policy.h>
-#include <hardware/audio_policy.h>
-#include <hardware/audio_effect.h>
#include <media/IEffect.h>
#include <media/IEffectClient.h>
#include <utils/String8.h>
diff --git a/include/media/IAudioPolicyService.h b/include/media/IAudioPolicyService.h
index ef15a0c..5637dd5 100644
--- a/include/media/IAudioPolicyService.h
+++ b/include/media/IAudioPolicyService.h
@@ -164,8 +164,8 @@
virtual status_t startAudioSource(const struct audio_port_config *source,
const audio_attributes_t *attributes,
- audio_io_handle_t *handle) = 0;
- virtual status_t stopAudioSource(audio_io_handle_t handle) = 0;
+ audio_patch_handle_t *handle) = 0;
+ virtual status_t stopAudioSource(audio_patch_handle_t handle) = 0;
virtual status_t setMasterMono(bool mono) = 0;
virtual status_t getMasterMono(bool *mono) = 0;
diff --git a/include/media/IMediaPlayer.h b/include/media/IMediaPlayer.h
index 0fd8933..edcca64 100644
--- a/include/media/IMediaPlayer.h
+++ b/include/media/IMediaPlayer.h
@@ -65,7 +65,14 @@
virtual status_t setSyncSettings(const AVSyncSettings& sync, float videoFpsHint) = 0;
virtual status_t getSyncSettings(AVSyncSettings* sync /* nonnull */,
float* videoFps /* nonnull */) = 0;
- virtual status_t seekTo(int msec) = 0;
+ // When |precise| is true, it's required that the first rendered media position after seekTo
+ // is precisely at |msec|, up to rounding error of granuality, e.g., video frame interval or
+ // audio length of decoding buffer. In this case, it might take a little long time to finish
+ // seekTo.
+ // When |precise| is false, |msec| is a hint to the mediaplayer which will try its best to
+ // fulfill the request, but it's not guaranteed. This option could result in fast finish of
+ // seekTo.
+ virtual status_t seekTo(int msec, bool precise = false) = 0;
virtual status_t getCurrentPosition(int* msec) = 0;
virtual status_t getDuration(int* msec) = 0;
virtual status_t reset() = 0;
diff --git a/include/media/IMediaRecorder.h b/include/media/IMediaRecorder.h
index 68a65f0..3e05532 100644
--- a/include/media/IMediaRecorder.h
+++ b/include/media/IMediaRecorder.h
@@ -28,8 +28,8 @@
}
class ICameraRecordingProxy;
class IMediaRecorderClient;
-class IGraphicBufferConsumer;
class IGraphicBufferProducer;
+struct PersistentSurface;
class IMediaRecorder: public IInterface
{
@@ -60,7 +60,7 @@
virtual status_t init() = 0;
virtual status_t close() = 0;
virtual status_t release() = 0;
- virtual status_t setInputSurface(const sp<IGraphicBufferConsumer>& surface) = 0;
+ virtual status_t setInputSurface(const sp<PersistentSurface>& surface) = 0;
virtual sp<IGraphicBufferProducer> querySurfaceMediaSource() = 0;
};
diff --git a/include/media/IOMX.h b/include/media/IOMX.h
index 1c39b9c..839945c 100644
--- a/include/media/IOMX.h
+++ b/include/media/IOMX.h
@@ -20,8 +20,6 @@
#include <binder/IInterface.h>
#include <gui/IGraphicBufferProducer.h>
-#include <gui/IGraphicBufferConsumer.h>
-#include <ui/GraphicBuffer.h>
#include <utils/List.h>
#include <utils/String8.h>
@@ -34,23 +32,25 @@
namespace android {
+class IGraphicBufferProducer;
+class IGraphicBufferSource;
class IMemory;
+class IOMXBufferSource;
+class IOMXNode;
class IOMXObserver;
-class IOMXRenderer;
class NativeHandle;
-class Surface;
+class OMXBuffer;
+struct omx_message;
class IOMX : public IInterface {
public:
DECLARE_META_INTERFACE(OMX);
typedef uint32_t buffer_id;
- typedef uint32_t node_id;
- // Given a node_id and the calling process' pid, returns true iff
- // the implementation of the OMX interface lives in the same
- // process.
- virtual bool livesLocally(node_id node, pid_t pid) = 0;
+ enum {
+ kFenceTimeoutMs = 1000
+ };
struct ComponentInfo {
String8 mName;
@@ -60,88 +60,57 @@
virtual status_t allocateNode(
const char *name, const sp<IOMXObserver> &observer,
- sp<IBinder> *nodeBinder,
- node_id *node) = 0;
+ sp<IOMXNode> *omxNode) = 0;
- virtual status_t freeNode(node_id node) = 0;
+ virtual status_t createInputSurface(
+ sp<IGraphicBufferProducer> *bufferProducer,
+ sp<IGraphicBufferSource> *bufferSource) = 0;
+};
+
+class IOMXNode : public IInterface {
+public:
+ DECLARE_META_INTERFACE(OMXNode);
+
+ typedef IOMX::buffer_id buffer_id;
+
+ virtual status_t freeNode() = 0;
virtual status_t sendCommand(
- node_id node, OMX_COMMANDTYPE cmd, OMX_S32 param) = 0;
+ OMX_COMMANDTYPE cmd, OMX_S32 param) = 0;
virtual status_t getParameter(
- node_id node, OMX_INDEXTYPE index,
- void *params, size_t size) = 0;
+ OMX_INDEXTYPE index, void *params, size_t size) = 0;
virtual status_t setParameter(
- node_id node, OMX_INDEXTYPE index,
- const void *params, size_t size) = 0;
+ OMX_INDEXTYPE index, const void *params, size_t size) = 0;
virtual status_t getConfig(
- node_id node, OMX_INDEXTYPE index,
- void *params, size_t size) = 0;
+ OMX_INDEXTYPE index, void *params, size_t size) = 0;
virtual status_t setConfig(
- node_id node, OMX_INDEXTYPE index,
- const void *params, size_t size) = 0;
-
- virtual status_t getState(
- node_id node, OMX_STATETYPE* state) = 0;
+ OMX_INDEXTYPE index, const void *params, size_t size) = 0;
// This will set *type to previous metadata buffer type on OMX error (not on binder error), and
// new metadata buffer type on success.
virtual status_t storeMetaDataInBuffers(
- node_id node, OMX_U32 port_index, OMX_BOOL enable, MetadataBufferType *type = NULL) = 0;
+ OMX_U32 port_index, OMX_BOOL enable, MetadataBufferType *type = NULL) = 0;
virtual status_t prepareForAdaptivePlayback(
- node_id node, OMX_U32 portIndex, OMX_BOOL enable,
+ OMX_U32 portIndex, OMX_BOOL enable,
OMX_U32 maxFrameWidth, OMX_U32 maxFrameHeight) = 0;
virtual status_t configureVideoTunnelMode(
- node_id node, OMX_U32 portIndex, OMX_BOOL tunneled,
+ OMX_U32 portIndex, OMX_BOOL tunneled,
OMX_U32 audioHwSync, native_handle_t **sidebandHandle) = 0;
virtual status_t enableNativeBuffers(
- node_id node, OMX_U32 port_index, OMX_BOOL graphic, OMX_BOOL enable) = 0;
+ OMX_U32 port_index, OMX_BOOL graphic, OMX_BOOL enable) = 0;
virtual status_t getGraphicBufferUsage(
- node_id node, OMX_U32 port_index, OMX_U32* usage) = 0;
+ OMX_U32 port_index, OMX_U32* usage) = 0;
- // Use |params| as an OMX buffer, but limit the size of the OMX buffer to |allottedSize|.
- virtual status_t useBuffer(
- node_id node, OMX_U32 port_index, const sp<IMemory> ¶ms,
- buffer_id *buffer, OMX_U32 allottedSize) = 0;
-
- virtual status_t useGraphicBuffer(
- node_id node, OMX_U32 port_index,
- const sp<GraphicBuffer> &graphicBuffer, buffer_id *buffer) = 0;
-
- virtual status_t updateGraphicBufferInMeta(
- node_id node, OMX_U32 port_index,
- const sp<GraphicBuffer> &graphicBuffer, buffer_id buffer) = 0;
-
- virtual status_t updateNativeHandleInMeta(
- node_id node, OMX_U32 port_index,
- const sp<NativeHandle> &nativeHandle, buffer_id buffer) = 0;
-
- // This will set *type to resulting metadata buffer type on OMX error (not on binder error) as
- // well as on success.
- virtual status_t createInputSurface(
- node_id node, OMX_U32 port_index, android_dataspace dataSpace,
- sp<IGraphicBufferProducer> *bufferProducer,
- MetadataBufferType *type = NULL) = 0;
-
- virtual status_t createPersistentInputSurface(
- sp<IGraphicBufferProducer> *bufferProducer,
- sp<IGraphicBufferConsumer> *bufferConsumer) = 0;
-
- // This will set *type to resulting metadata buffer type on OMX error (not on binder error) as
- // well as on success.
virtual status_t setInputSurface(
- node_id node, OMX_U32 port_index,
- const sp<IGraphicBufferConsumer> &bufferConsumer,
- MetadataBufferType *type) = 0;
-
- virtual status_t signalEndOfInputStream(node_id node) = 0;
+ const sp<IOMXBufferSource> &bufferSource) = 0;
// Allocate an opaque buffer as a native handle. If component supports returning native
// handles, those are returned in *native_handle. Otherwise, the allocated buffer is
@@ -149,57 +118,48 @@
// same process as the callee, i.e. is the media_server, as the returned "buffer_data"
// pointer is just that, a pointer into local address space.
virtual status_t allocateSecureBuffer(
- node_id node, OMX_U32 port_index, size_t size,
- buffer_id *buffer, void **buffer_data, sp<NativeHandle> *native_handle) = 0;
+ OMX_U32 port_index, size_t size, buffer_id *buffer,
+ void **buffer_data, sp<NativeHandle> *native_handle) = 0;
- // Allocate an OMX buffer of size |allotedSize|. Use |params| as the backup buffer, which
- // may be larger.
- virtual status_t allocateBufferWithBackup(
- node_id node, OMX_U32 port_index, const sp<IMemory> ¶ms,
- buffer_id *buffer, OMX_U32 allottedSize) = 0;
+ // Instructs the component to use the buffer passed in via |omxBuf| on the
+ // specified port. Returns in |*buffer| the buffer id that the component
+ // assigns to this buffer. |omxBuf| must be one of:
+ // 1) OMXBuffer::sPreset for meta-mode,
+ // 2) type kBufferTypeANWBuffer for non-meta-graphic buffer mode,
+ // 3) type kBufferTypeSharedMem for bytebuffer mode.
+ virtual status_t useBuffer(
+ OMX_U32 port_index, const OMXBuffer &omxBuf, buffer_id *buffer) = 0;
+ // Frees the buffer on the specified port with buffer id |buffer|.
virtual status_t freeBuffer(
- node_id node, OMX_U32 port_index, buffer_id buffer) = 0;
+ OMX_U32 port_index, buffer_id buffer) = 0;
- enum {
- kFenceTimeoutMs = 1000
- };
- // Calls OMX_FillBuffer on buffer, and passes |fenceFd| to component if it supports
- // fences. Otherwise, it waits on |fenceFd| before calling OMX_FillBuffer.
- // Takes ownership of |fenceFd| even if this call fails.
- virtual status_t fillBuffer(node_id node, buffer_id buffer, int fenceFd = -1) = 0;
+ // Calls OMX_FillBuffer on buffer. Passes |fenceFd| to component if it
+ // supports fences. Otherwise, it waits on |fenceFd| before calling
+ // OMX_FillBuffer. Takes ownership of |fenceFd| even if this call fails.
+ // If the port is in metadata mode, the buffer will be updated to point
+ // to the new buffer passed in via |omxBuf| before OMX_FillBuffer is called.
+ // Otherwise info in the |omxBuf| is not used.
+ virtual status_t fillBuffer(
+ buffer_id buffer, const OMXBuffer &omxBuf, int fenceFd = -1) = 0;
- // Calls OMX_EmptyBuffer on buffer (after updating buffer header with |range_offset|,
- // |range_length|, |flags| and |timestamp|). Passes |fenceFd| to component if it
- // supports fences. Otherwise, it waits on |fenceFd| before calling OMX_EmptyBuffer.
- // Takes ownership of |fenceFd| even if this call fails.
+ // Calls OMX_EmptyBuffer on buffer. Passes |fenceFd| to component if it
+ // supports fences. Otherwise, it waits on |fenceFd| before calling
+ // OMX_EmptyBuffer. Takes ownership of |fenceFd| even if this call fails.
+ // If the port is in metadata mode, the buffer will be updated to point
+ // to the new buffer passed in via |omxBuf| before OMX_EmptyBuffer is called.
virtual status_t emptyBuffer(
- node_id node,
- buffer_id buffer,
- OMX_U32 range_offset, OMX_U32 range_length,
+ buffer_id buffer, const OMXBuffer &omxBuf,
OMX_U32 flags, OMX_TICKS timestamp, int fenceFd = -1) = 0;
virtual status_t getExtensionIndex(
- node_id node,
const char *parameter_name,
OMX_INDEXTYPE *index) = 0;
- enum InternalOptionType {
- INTERNAL_OPTION_SUSPEND, // data is a bool
- INTERNAL_OPTION_REPEAT_PREVIOUS_FRAME_DELAY, // data is an int64_t
- INTERNAL_OPTION_MAX_TIMESTAMP_GAP, // data is int64_t
- INTERNAL_OPTION_MAX_FPS, // data is float
- INTERNAL_OPTION_START_TIME, // data is an int64_t
- INTERNAL_OPTION_TIME_LAPSE, // data is an int64_t[2]
- INTERNAL_OPTION_COLOR_ASPECTS, // data is ColorAspects
- INTERNAL_OPTION_TIME_OFFSET, // data is an int64_t
- };
- virtual status_t setInternalOption(
- node_id node,
- OMX_U32 port_index,
- InternalOptionType type,
- const void *data,
- size_t size) = 0;
+ virtual status_t dispatchMessage(const omx_message &msg) = 0;
+
+ // TODO: this is temporary, will be removed when quirks move to OMX side
+ virtual status_t setQuirks(OMX_U32 quirks) = 0;
};
struct omx_message {
@@ -210,7 +170,6 @@
FRAME_RENDERED,
} type;
- IOMX::node_id node;
int fenceFd; // used for EMPTY_BUFFER_DONE and FILL_BUFFER_DONE; client must close this
union {
@@ -219,6 +178,8 @@
OMX_EVENTTYPE event;
OMX_U32 data1;
OMX_U32 data2;
+ OMX_U32 data3;
+ OMX_U32 data4;
} event_data;
// if type == EMPTY_BUFFER_DONE
@@ -258,10 +219,17 @@
virtual status_t onTransact(
uint32_t code, const Parcel &data, Parcel *reply,
uint32_t flags = 0);
+};
+
+class BnOMXNode : public BnInterface<IOMXNode> {
+public:
+ virtual status_t onTransact(
+ uint32_t code, const Parcel &data, Parcel *reply,
+ uint32_t flags = 0);
protected:
// check if the codec is secure.
- virtual bool isSecure(IOMX::node_id node) {
+ virtual bool isSecure() const {
return false;
}
};
diff --git a/include/media/MediaCodecBuffer.h b/include/media/MediaCodecBuffer.h
new file mode 100644
index 0000000..501c00b
--- /dev/null
+++ b/include/media/MediaCodecBuffer.h
@@ -0,0 +1,74 @@
+/*
+ * Copyright 2016, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef MEDIA_CODEC_BUFFER_H_
+
+#define MEDIA_CODEC_BUFFER_H_
+
+#include <utils/Errors.h>
+#include <utils/RefBase.h>
+#include <utils/StrongPointer.h>
+
+namespace android {
+
+struct ABuffer;
+struct AMessage;
+class MediaBufferBase;
+
+/**
+ * Buffers used by MediaCodec.
+ */
+class MediaCodecBuffer : public RefBase {
+public:
+ MediaCodecBuffer(const sp<AMessage> &format, const sp<ABuffer> &buffer);
+
+ /**
+ * MediaCodec will release all references to the buffer when it's done using
+ * it, so the destructor should return the buffer to the owner, such as OMX
+ * components, buffer allocators, surfaces, etc.
+ */
+ virtual ~MediaCodecBuffer() = default;
+
+ // ABuffer-like interface
+ uint8_t *base();
+ uint8_t *data();
+ size_t capacity() const;
+ size_t size() const;
+ size_t offset() const;
+ // Default implementation calls ABuffer::setRange() and returns OK.
+ virtual status_t setRange(size_t offset, size_t size);
+ // TODO: These can be removed if we finish replacing all MediaBuffer's.
+ MediaBufferBase *getMediaBufferBase();
+ void setMediaBufferBase(MediaBufferBase *mediaBuffer);
+
+ // TODO: Specify each field for meta/format.
+ sp<AMessage> meta();
+ sp<AMessage> format();
+
+ void setFormat(const sp<AMessage> &format);
+
+private:
+ MediaCodecBuffer() = delete;
+
+ const sp<AMessage> mMeta;
+ sp<AMessage> mFormat;
+ const sp<ABuffer> mBuffer;
+ MediaBufferBase *mMediaBufferBase;
+};
+
+} // namespace android
+
+#endif // MEDIA_CODEC_BUFFER_H_
diff --git a/include/media/MediaDefs.h b/include/media/MediaDefs.h
new file mode 100644
index 0000000..5f2a32d
--- /dev/null
+++ b/include/media/MediaDefs.h
@@ -0,0 +1,82 @@
+/*
+ * Copyright (C) 2009 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef MEDIA_DEFS_H_
+
+#define MEDIA_DEFS_H_
+
+namespace android {
+
+extern const char *MEDIA_MIMETYPE_IMAGE_JPEG;
+
+extern const char *MEDIA_MIMETYPE_VIDEO_VP8;
+extern const char *MEDIA_MIMETYPE_VIDEO_VP9;
+extern const char *MEDIA_MIMETYPE_VIDEO_AVC;
+extern const char *MEDIA_MIMETYPE_VIDEO_HEVC;
+extern const char *MEDIA_MIMETYPE_VIDEO_MPEG4;
+extern const char *MEDIA_MIMETYPE_VIDEO_H263;
+extern const char *MEDIA_MIMETYPE_VIDEO_MPEG2;
+extern const char *MEDIA_MIMETYPE_VIDEO_RAW;
+extern const char *MEDIA_MIMETYPE_VIDEO_DOLBY_VISION;
+
+extern const char *MEDIA_MIMETYPE_AUDIO_AMR_NB;
+extern const char *MEDIA_MIMETYPE_AUDIO_AMR_WB;
+extern const char *MEDIA_MIMETYPE_AUDIO_MPEG; // layer III
+extern const char *MEDIA_MIMETYPE_AUDIO_MPEG_LAYER_I;
+extern const char *MEDIA_MIMETYPE_AUDIO_MPEG_LAYER_II;
+extern const char *MEDIA_MIMETYPE_AUDIO_MIDI;
+extern const char *MEDIA_MIMETYPE_AUDIO_AAC;
+extern const char *MEDIA_MIMETYPE_AUDIO_QCELP;
+extern const char *MEDIA_MIMETYPE_AUDIO_VORBIS;
+extern const char *MEDIA_MIMETYPE_AUDIO_OPUS;
+extern const char *MEDIA_MIMETYPE_AUDIO_G711_ALAW;
+extern const char *MEDIA_MIMETYPE_AUDIO_G711_MLAW;
+extern const char *MEDIA_MIMETYPE_AUDIO_RAW;
+extern const char *MEDIA_MIMETYPE_AUDIO_FLAC;
+extern const char *MEDIA_MIMETYPE_AUDIO_AAC_ADTS;
+extern const char *MEDIA_MIMETYPE_AUDIO_MSGSM;
+extern const char *MEDIA_MIMETYPE_AUDIO_AC3;
+extern const char *MEDIA_MIMETYPE_AUDIO_EAC3;
+
+extern const char *MEDIA_MIMETYPE_CONTAINER_MPEG4;
+extern const char *MEDIA_MIMETYPE_CONTAINER_WAV;
+extern const char *MEDIA_MIMETYPE_CONTAINER_OGG;
+extern const char *MEDIA_MIMETYPE_CONTAINER_MATROSKA;
+extern const char *MEDIA_MIMETYPE_CONTAINER_MPEG2TS;
+extern const char *MEDIA_MIMETYPE_CONTAINER_AVI;
+extern const char *MEDIA_MIMETYPE_CONTAINER_MPEG2PS;
+
+extern const char *MEDIA_MIMETYPE_CONTAINER_WVM;
+
+extern const char *MEDIA_MIMETYPE_TEXT_3GPP;
+extern const char *MEDIA_MIMETYPE_TEXT_SUBRIP;
+extern const char *MEDIA_MIMETYPE_TEXT_VTT;
+extern const char *MEDIA_MIMETYPE_TEXT_CEA_608;
+extern const char *MEDIA_MIMETYPE_TEXT_CEA_708;
+extern const char *MEDIA_MIMETYPE_DATA_TIMED_ID3;
+
+// These are values exported to JAVA API that need to be in sync with
+// frameworks/base/media/java/android/media/AudioFormat.java. Unfortunately,
+// they are not defined in frameworks/av, so defining them here.
+enum AudioEncoding {
+ kAudioEncodingPcm16bit = 2,
+ kAudioEncodingPcm8bit = 3,
+ kAudioEncodingPcmFloat = 4,
+};
+
+} // namespace android
+
+#endif // MEDIA_DEFS_H_
diff --git a/include/media/MediaPlayerInterface.h b/include/media/MediaPlayerInterface.h
index 4977efd..b488159 100644
--- a/include/media/MediaPlayerInterface.h
+++ b/include/media/MediaPlayerInterface.h
@@ -205,7 +205,7 @@
*videoFps = -1.f;
return OK;
}
- virtual status_t seekTo(int msec) = 0;
+ virtual status_t seekTo(int msec, bool precise = false) = 0;
virtual status_t getCurrentPosition(int *msec) = 0;
virtual status_t getDuration(int *msec) = 0;
virtual status_t reset() = 0;
diff --git a/include/media/MediaRecorderBase.h b/include/media/MediaRecorderBase.h
index 5195993..59dfd18 100644
--- a/include/media/MediaRecorderBase.h
+++ b/include/media/MediaRecorderBase.h
@@ -25,9 +25,8 @@
namespace android {
class ICameraRecordingProxy;
-class Surface;
-class IGraphicBufferConsumer;
class IGraphicBufferProducer;
+struct PersistentSurface;
struct MediaRecorderBase {
MediaRecorderBase(const String16 &opPackageName)
@@ -59,7 +58,7 @@
virtual status_t reset() = 0;
virtual status_t getMaxAmplitude(int *max) = 0;
virtual status_t dump(int fd, const Vector<String16>& args) const = 0;
- virtual status_t setInputSurface(const sp<IGraphicBufferConsumer>& surface) = 0;
+ virtual status_t setInputSurface(const sp<PersistentSurface>& surface) = 0;
virtual sp<IGraphicBufferProducer> querySurfaceMediaSource() const = 0;
diff --git a/include/media/MidiDeviceInfo.h b/include/media/MidiDeviceInfo.h
new file mode 100644
index 0000000..5b4a241
--- /dev/null
+++ b/include/media/MidiDeviceInfo.h
@@ -0,0 +1,81 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_MEDIA_MIDI_DEVICE_INFO_H
+#define ANDROID_MEDIA_MIDI_DEVICE_INFO_H
+
+#include <binder/Parcelable.h>
+#include <binder/PersistableBundle.h>
+#include <utils/String16.h>
+#include <utils/Vector.h>
+
+namespace android {
+namespace media {
+namespace midi {
+
+class MidiDeviceInfo : public Parcelable {
+public:
+ MidiDeviceInfo() = default;
+ virtual ~MidiDeviceInfo() = default;
+ MidiDeviceInfo(const MidiDeviceInfo& midiDeviceInfo) = default;
+
+ status_t writeToParcel(Parcel* parcel) const override;
+ status_t readFromParcel(const Parcel* parcel) override;
+
+ int getType() const { return mType; }
+ int getUid() const { return mId; }
+ bool isPrivate() const { return mIsPrivate; }
+ const Vector<String16>& getInputPortNames() const { return mInputPortNames; }
+ const Vector<String16>& getOutputPortNames() const { return mOutputPortNames; }
+ String16 getProperty(const char* propertyName);
+
+ // The constants need to be kept in sync with MidiDeviceInfo.java
+ enum {
+ TYPE_USB = 1,
+ TYPE_VIRTUAL = 2,
+ TYPE_BLUETOOTH = 3,
+ };
+ static const char* const PROPERTY_NAME;
+ static const char* const PROPERTY_MANUFACTURER;
+ static const char* const PROPERTY_PRODUCT;
+ static const char* const PROPERTY_VERSION;
+ static const char* const PROPERTY_SERIAL_NUMBER;
+ static const char* const PROPERTY_ALSA_CARD;
+ static const char* const PROPERTY_ALSA_DEVICE;
+
+ friend bool operator==(const MidiDeviceInfo& lhs, const MidiDeviceInfo& rhs);
+ friend bool operator!=(const MidiDeviceInfo& lhs, const MidiDeviceInfo& rhs) {
+ return !(lhs == rhs);
+ }
+
+private:
+ status_t readStringVector(
+ const Parcel* parcel, Vector<String16> *vectorPtr, size_t defaultLength);
+ status_t writeStringVector(Parcel* parcel, const Vector<String16>& vector) const;
+
+ int32_t mType;
+ int32_t mId;
+ Vector<String16> mInputPortNames;
+ Vector<String16> mOutputPortNames;
+ os::PersistableBundle mProperties;
+ bool mIsPrivate;
+};
+
+} // namespace midi
+} // namespace media
+} // namespace android
+
+#endif // ANDROID_MEDIA_MIDI_DEVICE_INFO_H
diff --git a/include/media/OMXBuffer.h b/include/media/OMXBuffer.h
new file mode 100644
index 0000000..0322b73
--- /dev/null
+++ b/include/media/OMXBuffer.h
@@ -0,0 +1,95 @@
+/*
+ * Copyright 2016, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef _OMXBUFFER_H_
+#define _OMXBUFFER_H_
+
+#include <cutils/native_handle.h>
+#include <media/IOMX.h>
+#include <system/window.h>
+#include <utils/StrongPointer.h>
+
+namespace android {
+
+class GraphicBuffer;
+class IMemory;
+class MediaCodecBuffer;
+class NativeHandle;
+class OMXNodeInstance;
+
+class OMXBuffer {
+public:
+ // sPreset is used in places where we are referring to a pre-registered
+ // buffer on a port. It has type kBufferTypePreset and mRangeLength of 0.
+ static OMXBuffer sPreset;
+
+ // Default constructor, constructs a buffer of type kBufferTypeInvalid.
+ OMXBuffer();
+
+ // Constructs a buffer of type kBufferTypePreset with mRangeLength set to
+ // |codecBuffer|'s size (or 0 if |codecBuffer| is NULL).
+ OMXBuffer(const sp<MediaCodecBuffer> &codecBuffer);
+
+ // Constructs a buffer of type kBufferTypeSharedMem.
+ OMXBuffer(const sp<IMemory> &mem, size_t allottedSize = 0);
+
+ // Constructs a buffer of type kBufferTypeANWBuffer.
+ OMXBuffer(const sp<GraphicBuffer> &gbuf);
+
+ // Constructs a buffer of type kBufferTypeNativeHandle.
+ OMXBuffer(const sp<NativeHandle> &handle);
+
+ // Parcelling/Un-parcelling.
+ status_t writeToParcel(Parcel *parcel) const;
+ status_t readFromParcel(const Parcel *parcel);
+
+ ~OMXBuffer();
+
+private:
+ friend class OMXNodeInstance;
+
+ enum BufferType {
+ kBufferTypeInvalid = 0,
+ kBufferTypePreset,
+ kBufferTypeSharedMem,
+ kBufferTypeANWBuffer,
+ kBufferTypeNativeHandle,
+ };
+
+ BufferType mBufferType;
+
+ // kBufferTypePreset
+ // If the port is operating in byte buffer mode, mRangeLength is the valid
+ // range length. Otherwise the range info should also be ignored.
+ OMX_U32 mRangeLength;
+
+ // kBufferTypeSharedMem
+ sp<IMemory> mMem;
+ OMX_U32 mAllottedSize;
+
+ // kBufferTypeANWBuffer
+ sp<GraphicBuffer> mGraphicBuffer;
+
+ // kBufferTypeNativeHandle
+ sp<NativeHandle> mNativeHandle;
+
+ OMXBuffer(const OMXBuffer &);
+ OMXBuffer &operator=(const OMXBuffer &);
+};
+
+} // namespace android
+
+#endif // _OMXBUFFER_H_
diff --git a/include/media/OMXFenceParcelable.h b/include/media/OMXFenceParcelable.h
new file mode 100644
index 0000000..c9da301
--- /dev/null
+++ b/include/media/OMXFenceParcelable.h
@@ -0,0 +1,64 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef _OMX_FENCE_PARCELABLE_
+#define _OMX_FENCE_PARCELABLE_
+
+#include <binder/Parcel.h>
+
+namespace android {
+
+struct OMXFenceParcelable : public Parcelable {
+ OMXFenceParcelable() : mFenceFd(-1) {}
+ OMXFenceParcelable(int fenceFd) : mFenceFd(fenceFd) {}
+
+ int get() const { return mFenceFd; }
+
+ status_t readFromParcel(const Parcel* parcel) override;
+ status_t writeToParcel(Parcel* parcel) const override;
+
+private:
+ // Disable copy ctor and operator=
+ OMXFenceParcelable(const OMXFenceParcelable &);
+ OMXFenceParcelable &operator=(const OMXFenceParcelable &);
+
+ int mFenceFd;
+};
+
+inline status_t OMXFenceParcelable::readFromParcel(const Parcel* parcel) {
+ int32_t haveFence;
+ status_t err = parcel->readInt32(&haveFence);
+ if (err == OK && haveFence) {
+ int fd = ::dup(parcel->readFileDescriptor());
+ if (fd < 0) {
+ return fd;
+ }
+ mFenceFd = fd;
+ }
+ return err;
+}
+
+inline status_t OMXFenceParcelable::writeToParcel(Parcel* parcel) const {
+ status_t err = parcel->writeInt32(mFenceFd >= 0);
+ if (err == OK && mFenceFd >= 0) {
+ err = parcel->writeFileDescriptor(mFenceFd, true /* takeOwnership */);
+ }
+ return err;
+}
+
+} // namespace android
+
+#endif // _OMX_FENCE_PARCELABLE_
diff --git a/include/media/TypeConverter.h b/include/media/TypeConverter.h
new file mode 100644
index 0000000..ffe4c1f
--- /dev/null
+++ b/include/media/TypeConverter.h
@@ -0,0 +1,245 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_TYPE_CONVERTER_H_
+#define ANDROID_TYPE_CONVERTER_H_
+
+#include <string>
+#include <string.h>
+
+#include <system/audio.h>
+#include <utils/Log.h>
+#include <utils/Vector.h>
+#include <utils/SortedVector.h>
+
+#include "convert.h"
+#include "AudioParameter.h"
+
+namespace android {
+
+struct SampleRateTraits
+{
+ typedef uint32_t Type;
+ typedef SortedVector<Type> Collection;
+};
+struct DeviceTraits
+{
+ typedef audio_devices_t Type;
+ typedef Vector<Type> Collection;
+};
+struct OutputDeviceTraits : public DeviceTraits {};
+struct InputDeviceTraits : public DeviceTraits {};
+struct OutputFlagTraits
+{
+ typedef audio_output_flags_t Type;
+ typedef Vector<Type> Collection;
+};
+struct InputFlagTraits
+{
+ typedef audio_input_flags_t Type;
+ typedef Vector<Type> Collection;
+};
+struct FormatTraits
+{
+ typedef audio_format_t Type;
+ typedef Vector<Type> Collection;
+};
+struct ChannelTraits
+{
+ typedef audio_channel_mask_t Type;
+ typedef SortedVector<Type> Collection;
+};
+struct OutputChannelTraits : public ChannelTraits {};
+struct InputChannelTraits : public ChannelTraits {};
+struct ChannelIndexTraits : public ChannelTraits {};
+struct GainModeTraits
+{
+ typedef audio_gain_mode_t Type;
+ typedef Vector<Type> Collection;
+};
+struct StreamTraits
+{
+ typedef audio_stream_type_t Type;
+ typedef Vector<Type> Collection;
+};
+struct AudioModeTraits
+{
+ typedef audio_mode_t Type;
+ typedef Vector<Type> Collection;
+};
+template <typename T>
+struct DefaultTraits
+{
+ typedef T Type;
+ typedef Vector<Type> Collection;
+};
+
+template <class Traits>
+static void collectionFromString(const std::string &str, typename Traits::Collection &collection,
+ const char *del = AudioParameter::valueListSeparator)
+{
+ char *literal = strdup(str.c_str());
+ for (const char *cstr = strtok(literal, del); cstr != NULL; cstr = strtok(NULL, del)) {
+ typename Traits::Type value;
+ if (utilities::convertTo<std::string, typename Traits::Type >(cstr, value)) {
+ collection.add(value);
+ }
+ }
+ free(literal);
+}
+
+template <class Traits>
+class TypeConverter
+{
+public:
+ static bool toString(const typename Traits::Type &value, std::string &str);
+
+ static bool fromString(const std::string &str, typename Traits::Type &result);
+
+ static void collectionFromString(const std::string &str,
+ typename Traits::Collection &collection,
+ const char *del = AudioParameter::valueListSeparator);
+
+ static uint32_t maskFromString(
+ const std::string &str, const char *del = AudioParameter::valueListSeparator);
+
+ static void maskToString(
+ uint32_t mask, std::string &str, const char *del = AudioParameter::valueListSeparator);
+
+protected:
+ struct Table {
+ const char *literal;
+ typename Traits::Type value;
+ };
+
+ static const Table mTable[];
+};
+
+template <class Traits>
+inline bool TypeConverter<Traits>::toString(const typename Traits::Type &value, std::string &str)
+{
+ for (size_t i = 0; mTable[i].literal; i++) {
+ if (mTable[i].value == value) {
+ str = mTable[i].literal;
+ return true;
+ }
+ }
+ char result[64];
+ snprintf(result, sizeof(result), "Unknown enum value %d", value);
+ str = result;
+ return false;
+}
+
+template <class Traits>
+inline bool TypeConverter<Traits>::fromString(const std::string &str, typename Traits::Type &result)
+{
+ for (size_t i = 0; mTable[i].literal; i++) {
+ if (strcmp(mTable[i].literal, str.c_str()) == 0) {
+ ALOGV("stringToEnum() found %s", mTable[i].literal);
+ result = mTable[i].value;
+ return true;
+ }
+ }
+ return false;
+}
+
+template <class Traits>
+inline void TypeConverter<Traits>::collectionFromString(const std::string &str,
+ typename Traits::Collection &collection,
+ const char *del)
+{
+ char *literal = strdup(str.c_str());
+
+ for (const char *cstr = strtok(literal, del); cstr != NULL; cstr = strtok(NULL, del)) {
+ typename Traits::Type value;
+ if (fromString(cstr, value)) {
+ collection.add(value);
+ }
+ }
+ free(literal);
+}
+
+template <class Traits>
+inline uint32_t TypeConverter<Traits>::maskFromString(const std::string &str, const char *del)
+{
+ char *literal = strdup(str.c_str());
+ uint32_t value = 0;
+ for (const char *cstr = strtok(literal, del); cstr != NULL; cstr = strtok(NULL, del)) {
+ typename Traits::Type type;
+ if (fromString(cstr, type)) {
+ value |= static_cast<uint32_t>(type);
+ }
+ }
+ free(literal);
+ return value;
+}
+
+template <class Traits>
+inline void TypeConverter<Traits>::maskToString(uint32_t mask, std::string &str, const char *del)
+{
+ if (mask != 0) {
+ bool first_flag = true;
+ for (size_t i = 0; mTable[i].literal; i++) {
+ if (mTable[i].value != 0 && (mask & mTable[i].value) == mTable[i].value) {
+ if (!first_flag) str += del;
+ first_flag = false;
+ str += mTable[i].literal;
+ }
+ }
+ } else {
+ toString(static_cast<typename Traits::Type>(0), str);
+ }
+}
+
+typedef TypeConverter<OutputDeviceTraits> OutputDeviceConverter;
+typedef TypeConverter<InputDeviceTraits> InputDeviceConverter;
+typedef TypeConverter<OutputFlagTraits> OutputFlagConverter;
+typedef TypeConverter<InputFlagTraits> InputFlagConverter;
+typedef TypeConverter<FormatTraits> FormatConverter;
+typedef TypeConverter<OutputChannelTraits> OutputChannelConverter;
+typedef TypeConverter<InputChannelTraits> InputChannelConverter;
+typedef TypeConverter<ChannelIndexTraits> ChannelIndexConverter;
+typedef TypeConverter<GainModeTraits> GainModeConverter;
+typedef TypeConverter<StreamTraits> StreamTypeConverter;
+typedef TypeConverter<AudioModeTraits> AudioModeConverter;
+
+bool deviceFromString(const std::string& literalDevice, audio_devices_t& device);
+
+bool deviceToString(audio_devices_t device, std::string& literalDevice);
+
+SampleRateTraits::Collection samplingRatesFromString(
+ const std::string &samplingRates, const char *del = AudioParameter::valueListSeparator);
+
+FormatTraits::Collection formatsFromString(
+ const std::string &formats, const char *del = AudioParameter::valueListSeparator);
+
+audio_format_t formatFromString(
+ const std::string &literalFormat, audio_format_t defaultFormat = AUDIO_FORMAT_DEFAULT);
+
+audio_channel_mask_t channelMaskFromString(const std::string &literalChannels);
+
+ChannelTraits::Collection channelMasksFromString(
+ const std::string &channels, const char *del = AudioParameter::valueListSeparator);
+
+InputChannelTraits::Collection inputChannelMasksFromString(
+ const std::string &inChannels, const char *del = AudioParameter::valueListSeparator);
+
+OutputChannelTraits::Collection outputChannelMasksFromString(
+ const std::string &outChannels, const char *del = AudioParameter::valueListSeparator);
+
+}; // namespace android
+
+#endif /*ANDROID_TYPE_CONVERTER_H_*/
diff --git a/include/media/Visualizer.h b/include/media/Visualizer.h
index 7bb9e8b..f8f4f50 100644
--- a/include/media/Visualizer.h
+++ b/include/media/Visualizer.h
@@ -18,7 +18,7 @@
#define ANDROID_MEDIA_VISUALIZER_H
#include <media/AudioEffect.h>
-#include <audio_effects/effect_visualizer.h>
+#include <system/audio_effects/effect_visualizer.h>
#include <utils/Thread.h>
/**
diff --git a/include/media/audiohal/DeviceHalInterface.h b/include/media/audiohal/DeviceHalInterface.h
new file mode 100644
index 0000000..caf01be
--- /dev/null
+++ b/include/media/audiohal/DeviceHalInterface.h
@@ -0,0 +1,120 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_HARDWARE_DEVICE_HAL_INTERFACE_H
+#define ANDROID_HARDWARE_DEVICE_HAL_INTERFACE_H
+
+#include <system/audio.h>
+#include <utils/Errors.h>
+#include <utils/RefBase.h>
+#include <utils/String8.h>
+
+namespace android {
+
+class StreamInHalInterface;
+class StreamOutHalInterface;
+
+class DeviceHalInterface : public RefBase
+{
+ public:
+ // Sets the value of 'devices' to a bitmask of 1 or more values of audio_devices_t.
+ virtual status_t getSupportedDevices(uint32_t *devices) = 0;
+
+ // Check to see if the audio hardware interface has been initialized.
+ virtual status_t initCheck() = 0;
+
+ // Set the audio volume of a voice call. Range is between 0.0 and 1.0.
+ virtual status_t setVoiceVolume(float volume) = 0;
+
+ // Set the audio volume for all audio activities other than voice call.
+ virtual status_t setMasterVolume(float volume) = 0;
+
+ // Get the current master volume value for the HAL.
+ virtual status_t getMasterVolume(float *volume) = 0;
+
+ // Called when the audio mode changes.
+ virtual status_t setMode(audio_mode_t mode) = 0;
+
+ // Muting control.
+ virtual status_t setMicMute(bool state) = 0;
+ virtual status_t getMicMute(bool *state) = 0;
+ virtual status_t setMasterMute(bool state) = 0;
+ virtual status_t getMasterMute(bool *state) = 0;
+
+ // Set global audio parameters.
+ virtual status_t setParameters(const String8& kvPairs) = 0;
+
+ // Get global audio parameters.
+ virtual status_t getParameters(const String8& keys, String8 *values) = 0;
+
+ // Returns audio input buffer size according to parameters passed.
+ virtual status_t getInputBufferSize(const struct audio_config *config,
+ size_t *size) = 0;
+
+ // Creates and opens the audio hardware output stream. The stream is closed
+ // by releasing all references to the returned object.
+ virtual status_t openOutputStream(
+ audio_io_handle_t handle,
+ audio_devices_t devices,
+ audio_output_flags_t flags,
+ struct audio_config *config,
+ const char *address,
+ sp<StreamOutHalInterface> *outStream) = 0;
+
+ // Creates and opens the audio hardware input stream. The stream is closed
+ // by releasing all references to the returned object.
+ virtual status_t openInputStream(
+ audio_io_handle_t handle,
+ audio_devices_t devices,
+ struct audio_config *config,
+ audio_input_flags_t flags,
+ const char *address,
+ audio_source_t source,
+ sp<StreamInHalInterface> *inStream) = 0;
+
+ // Returns whether createAudioPatch and releaseAudioPatch operations are supported.
+ virtual status_t supportsAudioPatches(bool *supportsPatches) = 0;
+
+ // Creates an audio patch between several source and sink ports.
+ virtual status_t createAudioPatch(
+ unsigned int num_sources,
+ const struct audio_port_config *sources,
+ unsigned int num_sinks,
+ const struct audio_port_config *sinks,
+ audio_patch_handle_t *patch) = 0;
+
+ // Releases an audio patch.
+ virtual status_t releaseAudioPatch(audio_patch_handle_t patch) = 0;
+
+ // Fills the list of supported attributes for a given audio port.
+ virtual status_t getAudioPort(struct audio_port *port) = 0;
+
+ // Set audio port configuration.
+ virtual status_t setAudioPortConfig(const struct audio_port_config *config) = 0;
+
+ virtual status_t dump(int fd) = 0;
+
+ protected:
+ // Subclasses can not be constructed directly by clients.
+ DeviceHalInterface() {}
+
+ // The destructor automatically closes the device.
+ virtual ~DeviceHalInterface() {}
+};
+
+} // namespace android
+
+#endif // ANDROID_HARDWARE_DEVICE_HAL_INTERFACE_H
diff --git a/include/media/audiohal/DevicesFactoryHalInterface.h b/include/media/audiohal/DevicesFactoryHalInterface.h
new file mode 100644
index 0000000..823a0da
--- /dev/null
+++ b/include/media/audiohal/DevicesFactoryHalInterface.h
@@ -0,0 +1,44 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_HARDWARE_DEVICES_FACTORY_HAL_INTERFACE_H
+#define ANDROID_HARDWARE_DEVICES_FACTORY_HAL_INTERFACE_H
+
+#include <media/audiohal/DeviceHalInterface.h>
+#include <utils/Errors.h>
+#include <utils/RefBase.h>
+
+namespace android {
+
+class DevicesFactoryHalInterface : public RefBase
+{
+ public:
+ virtual ~DevicesFactoryHalInterface() {}
+
+ // Opens a device with the specified name. To close the device, it is
+ // necessary to release references to the returned object.
+ virtual status_t openDevice(const char *name, sp<DeviceHalInterface> *device) = 0;
+
+ static sp<DevicesFactoryHalInterface> create();
+
+ protected:
+ // Subclasses can not be constructed directly by clients.
+ DevicesFactoryHalInterface() {}
+};
+
+} // namespace android
+
+#endif // ANDROID_HARDWARE_DEVICES_FACTORY_HAL_INTERFACE_H
diff --git a/include/media/audiohal/EffectHalInterface.h b/include/media/audiohal/EffectHalInterface.h
new file mode 100644
index 0000000..7bbd3b5
--- /dev/null
+++ b/include/media/audiohal/EffectHalInterface.h
@@ -0,0 +1,55 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_HARDWARE_EFFECT_HAL_INTERFACE_H
+#define ANDROID_HARDWARE_EFFECT_HAL_INTERFACE_H
+
+#include <system/audio_effect.h>
+#include <utils/Errors.h>
+#include <utils/RefBase.h>
+
+namespace android {
+
+class EffectHalInterface : public RefBase
+{
+ public:
+ // Effect process function. Takes input samples as specified
+ // in input buffer descriptor and output processed samples as specified
+ // in output buffer descriptor.
+ virtual status_t process(audio_buffer_t *inBuffer, audio_buffer_t *outBuffer) = 0;
+
+ // Process reverse stream function. This function is used to pass
+ // a reference stream to the effect engine.
+ virtual status_t processReverse(audio_buffer_t *inBuffer, audio_buffer_t *outBuffer) = 0;
+
+ // Send a command and receive a response to/from effect engine.
+ virtual status_t command(uint32_t cmdCode, uint32_t cmdSize, void *pCmdData,
+ uint32_t *replySize, void *pReplyData) = 0;
+
+ // Returns the effect descriptor.
+ virtual status_t getDescriptor(effect_descriptor_t *pDescriptor) = 0;
+
+ protected:
+ // Subclasses can not be constructed directly by clients.
+ EffectHalInterface() {}
+
+ // The destructor automatically releases the effect.
+ virtual ~EffectHalInterface() {}
+};
+
+} // namespace android
+
+#endif // ANDROID_HARDWARE_EFFECT_HAL_INTERFACE_H
diff --git a/include/media/audiohal/EffectsFactoryHalInterface.h b/include/media/audiohal/EffectsFactoryHalInterface.h
new file mode 100644
index 0000000..a616e86
--- /dev/null
+++ b/include/media/audiohal/EffectsFactoryHalInterface.h
@@ -0,0 +1,63 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_HARDWARE_EFFECTS_FACTORY_HAL_INTERFACE_H
+#define ANDROID_HARDWARE_EFFECTS_FACTORY_HAL_INTERFACE_H
+
+#include <media/audiohal/EffectHalInterface.h>
+#include <system/audio_effect.h>
+#include <utils/Errors.h>
+#include <utils/RefBase.h>
+
+namespace android {
+
+class EffectsFactoryHalInterface : public RefBase
+{
+ public:
+ // Returns the number of different effects in all loaded libraries.
+ virtual status_t queryNumberEffects(uint32_t *pNumEffects) = 0;
+
+ // Returns a descriptor of the next available effect.
+ virtual status_t getDescriptor(uint32_t index,
+ effect_descriptor_t *pDescriptor) = 0;
+
+ virtual status_t getDescriptor(const effect_uuid_t *pEffectUuid,
+ effect_descriptor_t *pDescriptor) = 0;
+
+ // Creates an effect engine of the specified type.
+ // To release the effect engine, it is necessary to release references
+ // to the returned effect object.
+ virtual status_t createEffect(const effect_uuid_t *pEffectUuid,
+ int32_t sessionId, int32_t ioId,
+ sp<EffectHalInterface> *effect) = 0;
+
+ virtual status_t dumpEffects(int fd) = 0;
+
+ static sp<EffectsFactoryHalInterface> create();
+
+ // Helper function to compare effect uuid to EFFECT_UUID_NULL.
+ static bool isNullUuid(const effect_uuid_t *pEffectUuid);
+
+ protected:
+ // Subclasses can not be constructed directly by clients.
+ EffectsFactoryHalInterface() {}
+
+ virtual ~EffectsFactoryHalInterface() {}
+};
+
+} // namespace android
+
+#endif // ANDROID_HARDWARE_EFFECTS_FACTORY_HAL_INTERFACE_H
diff --git a/include/media/audiohal/StreamHalInterface.h b/include/media/audiohal/StreamHalInterface.h
new file mode 100644
index 0000000..0772d8f
--- /dev/null
+++ b/include/media/audiohal/StreamHalInterface.h
@@ -0,0 +1,153 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_HARDWARE_STREAM_HAL_INTERFACE_H
+#define ANDROID_HARDWARE_STREAM_HAL_INTERFACE_H
+
+#include <media/audiohal/EffectHalInterface.h>
+#include <system/audio.h>
+#include <utils/Errors.h>
+#include <utils/RefBase.h>
+#include <utils/String8.h>
+
+namespace android {
+
+class StreamHalInterface : public virtual RefBase
+{
+ public:
+ // Return the sampling rate in Hz - eg. 44100.
+ virtual status_t getSampleRate(uint32_t *rate) = 0;
+
+ // Return size of input/output buffer in bytes for this stream - eg. 4800.
+ virtual status_t getBufferSize(size_t *size) = 0;
+
+ // Return the channel mask.
+ virtual status_t getChannelMask(audio_channel_mask_t *mask) = 0;
+
+ // Return the audio format - e.g. AUDIO_FORMAT_PCM_16_BIT.
+ virtual status_t getFormat(audio_format_t *format) = 0;
+
+ // Convenience method.
+ virtual status_t getAudioProperties(
+ uint32_t *sampleRate, audio_channel_mask_t *mask, audio_format_t *format) = 0;
+
+ // Set audio stream parameters.
+ virtual status_t setParameters(const String8& kvPairs) = 0;
+
+ // Get audio stream parameters.
+ virtual status_t getParameters(const String8& keys, String8 *values) = 0;
+
+ // Return the frame size (number of bytes per sample) of a stream.
+ virtual status_t getFrameSize(size_t *size) = 0;
+
+ // Add or remove the effect on the stream.
+ virtual status_t addEffect(sp<EffectHalInterface> effect) = 0;
+ virtual status_t removeEffect(sp<EffectHalInterface> effect) = 0;
+
+ // Put the audio hardware input/output into standby mode.
+ virtual status_t standby() = 0;
+
+ virtual status_t dump(int fd) = 0;
+
+ protected:
+ // Subclasses can not be constructed directly by clients.
+ StreamHalInterface() {}
+
+ // The destructor automatically closes the stream.
+ virtual ~StreamHalInterface() {}
+};
+
+class StreamOutHalInterfaceCallback : public virtual RefBase {
+ public:
+ virtual void onWriteReady() {}
+ virtual void onDrainReady() {}
+ virtual void onError() {}
+
+ protected:
+ StreamOutHalInterfaceCallback() {}
+ virtual ~StreamOutHalInterfaceCallback() {}
+};
+
+class StreamOutHalInterface : public virtual StreamHalInterface {
+ public:
+ // Return the audio hardware driver estimated latency in milliseconds.
+ virtual status_t getLatency(uint32_t *latency) = 0;
+
+ // Use this method in situations where audio mixing is done in the hardware.
+ virtual status_t setVolume(float left, float right) = 0;
+
+ // Write audio buffer to driver.
+ virtual status_t write(const void *buffer, size_t bytes, size_t *written) = 0;
+
+ // Return the number of audio frames written by the audio dsp to DAC since
+ // the output has exited standby.
+ virtual status_t getRenderPosition(uint32_t *dspFrames) = 0;
+
+ // Get the local time at which the next write to the audio driver will be presented.
+ virtual status_t getNextWriteTimestamp(int64_t *timestamp) = 0;
+
+ // Set the callback for notifying completion of non-blocking write and drain.
+ // The callback must be owned by someone else. The output stream does not own it
+ // to avoid strong pointer loops.
+ virtual status_t setCallback(wp<StreamOutHalInterfaceCallback> callback) = 0;
+
+ // Returns whether pause and resume operations are supported.
+ virtual status_t supportsPauseAndResume(bool *supportsPause, bool *supportsResume) = 0;
+
+ // Notifies to the audio driver to resume playback following a pause.
+ virtual status_t pause() = 0;
+
+ // Notifies to the audio driver to resume playback following a pause.
+ virtual status_t resume() = 0;
+
+ // Returns whether drain operation is supported.
+ virtual status_t supportsDrain(bool *supportsDrain) = 0;
+
+ // Requests notification when data buffered by the driver/hardware has been played.
+ virtual status_t drain(bool earlyNotify) = 0;
+
+ // Notifies to the audio driver to flush the queued data.
+ virtual status_t flush() = 0;
+
+ // Return a recent count of the number of audio frames presented to an external observer.
+ virtual status_t getPresentationPosition(uint64_t *frames, struct timespec *timestamp) = 0;
+
+ protected:
+ virtual ~StreamOutHalInterface() {}
+};
+
+class StreamInHalInterface : public virtual StreamHalInterface {
+ public:
+ // Set the input gain for the audio driver.
+ virtual status_t setGain(float gain) = 0;
+
+ // Read audio buffer in from driver.
+ virtual status_t read(void *buffer, size_t bytes, size_t *read) = 0;
+
+ // Return the amount of input frames lost in the audio driver.
+ virtual status_t getInputFramesLost(uint32_t *framesLost) = 0;
+
+ // Return a recent count of the number of audio frames received and
+ // the clock time associated with that frame count.
+ virtual status_t getCapturePosition(int64_t *frames, int64_t *time) = 0;
+
+ protected:
+ virtual ~StreamInHalInterface() {}
+};
+
+} // namespace android
+
+#endif // ANDROID_HARDWARE_STREAM_HAL_INTERFACE_H
diff --git a/services/audiopolicy/utilities/convert/convert.h b/include/media/convert.h
similarity index 100%
rename from services/audiopolicy/utilities/convert/convert.h
rename to include/media/convert.h
diff --git a/include/media/mediaplayer.h b/include/media/mediaplayer.h
index 389ec01..c556f0a 100644
--- a/include/media/mediaplayer.h
+++ b/include/media/mediaplayer.h
@@ -233,7 +233,7 @@
float* videoFps /* nonnull */);
status_t getVideoWidth(int *w);
status_t getVideoHeight(int *h);
- status_t seekTo(int msec);
+ status_t seekTo(int msec, bool precise = false);
status_t getCurrentPosition(int *msec);
status_t getDuration(int *msec);
status_t reset();
@@ -257,7 +257,7 @@
private:
void clear_l();
- status_t seekTo_l(int msec);
+ status_t seekTo_l(int msec, bool precise);
status_t prepareAsync_l();
status_t getDuration_l(int *msec);
status_t attachNewPlayer(const sp<IMediaPlayer>& player);
@@ -274,7 +274,9 @@
void* mCookie;
media_player_states mCurrentState;
int mCurrentPosition;
+ bool mCurrentSeekPrecise;
int mSeekPosition;
+ int mSeekPrecise;
bool mPrepareSync;
status_t mPrepareStatus;
audio_stream_type_t mStreamType;
diff --git a/include/media/nbaio/AudioStreamInSource.h b/include/media/nbaio/AudioStreamInSource.h
index a6e7992..508e0fe 100644
--- a/include/media/nbaio/AudioStreamInSource.h
+++ b/include/media/nbaio/AudioStreamInSource.h
@@ -17,16 +17,17 @@
#ifndef ANDROID_AUDIO_STREAM_IN_SOURCE_H
#define ANDROID_AUDIO_STREAM_IN_SOURCE_H
-#include <hardware/audio.h>
#include "NBAIO.h"
namespace android {
+class StreamInHalInterface;
+
// not multi-thread safe
class AudioStreamInSource : public NBAIO_Source {
public:
- AudioStreamInSource(audio_stream_in *stream);
+ AudioStreamInSource(sp<StreamInHalInterface> stream);
virtual ~AudioStreamInSource();
// NBAIO_Port interface
@@ -50,11 +51,11 @@
// NBAIO_Sink end
#if 0 // until necessary
- audio_stream_in *stream() const { return mStream; }
+ sp<StreamInHalInterface> stream() const { return mStream; }
#endif
private:
- audio_stream_in * const mStream;
+ sp<StreamInHalInterface> mStream;
size_t mStreamBufferSizeBytes; // as reported by get_buffer_size()
int64_t mFramesOverrun;
int64_t mOverruns;
diff --git a/include/media/nbaio/AudioStreamOutSink.h b/include/media/nbaio/AudioStreamOutSink.h
index e86b018..56052a6 100644
--- a/include/media/nbaio/AudioStreamOutSink.h
+++ b/include/media/nbaio/AudioStreamOutSink.h
@@ -17,16 +17,17 @@
#ifndef ANDROID_AUDIO_STREAM_OUT_SINK_H
#define ANDROID_AUDIO_STREAM_OUT_SINK_H
-#include <hardware/audio.h>
#include "NBAIO.h"
namespace android {
+class StreamOutHalInterface;
+
// not multi-thread safe
class AudioStreamOutSink : public NBAIO_Sink {
public:
- AudioStreamOutSink(audio_stream_out *stream);
+ AudioStreamOutSink(sp<StreamOutHalInterface> stream);
virtual ~AudioStreamOutSink();
// NBAIO_Port interface
@@ -52,11 +53,11 @@
// NBAIO_Sink end
#if 0 // until necessary
- audio_stream_out *stream() const { return mStream; }
+ sp<StreamOutHalInterface> stream() const { return mStream; }
#endif
private:
- audio_stream_out * const mStream;
+ sp<StreamOutHalInterface> mStream;
size_t mStreamBufferSizeBytes; // as reported by get_buffer_size()
};
diff --git a/include/media/nbaio/NBAIO.h b/include/media/nbaio/NBAIO.h
index 120de4f..3fd97ac 100644
--- a/include/media/nbaio/NBAIO.h
+++ b/include/media/nbaio/NBAIO.h
@@ -35,13 +35,16 @@
// In addition to the usual status_t
enum {
- NEGOTIATE = 0x80000010, // Must (re-)negotiate format. For negotiate() only, the offeree
- // doesn't accept offers, and proposes counter-offers
- OVERRUN = 0x80000011, // availableToRead(), read(), or readVia() detected lost input due
- // to overrun; an event is counted and the caller should re-try
- UNDERRUN = 0x80000012, // availableToWrite(), write(), or writeVia() detected a gap in
- // output due to underrun (not being called often enough, or with
- // enough data); an event is counted and the caller should re-try
+ NEGOTIATE = (UNKNOWN_ERROR + 0x100), // Must (re-)negotiate format. For negotiate() only,
+ // the offeree doesn't accept offers, and proposes
+ // counter-offers
+ OVERRUN = (UNKNOWN_ERROR + 0x101), // availableToRead(), read(), or readVia() detected
+ // lost input due to overrun; an event is counted and
+ // the caller should re-try
+ UNDERRUN = (UNKNOWN_ERROR + 0x102), // availableToWrite(), write(), or writeVia() detected
+ // a gap in output due to underrun (not being called
+ // often enough, or with enough data); an event is
+ // counted and the caller should re-try
};
// Negotiation of format is based on the data provider and data sink, or the data consumer and
@@ -266,6 +269,17 @@
// One or more frames were lost due to overrun, try again to read more recent data.
virtual ssize_t read(void *buffer, size_t count) = 0;
+ // Flush data from buffer. There is no notion of overrun as all data is dropped.
+ // Flushed frames also count towards frames read.
+ //
+ // Return value:
+ // >= 0 Number of frames successfully flushed
+ // < 0 status_t error occurred
+ // Errors:
+ // NEGOTIATE (Re-)negotiation is needed.
+ // INVALID_OPERATION Not implemented
+ virtual ssize_t flush() { return INVALID_OPERATION; }
+
// Transfer data from source using a series of callbacks. More suitable for zero-fill,
// synthesis, and non-contiguous transfers (e.g. circular buffer or readv).
// Inputs:
diff --git a/include/media/nbaio/PipeReader.h b/include/media/nbaio/PipeReader.h
index 7c733ad..00c2b3c 100644
--- a/include/media/nbaio/PipeReader.h
+++ b/include/media/nbaio/PipeReader.h
@@ -47,6 +47,8 @@
virtual ssize_t read(void *buffer, size_t count);
+ virtual ssize_t flush();
+
// NBAIO_Source end
#if 0 // until necessary
diff --git a/include/media/stagefright/ACodec.h b/include/media/stagefright/ACodec.h
index 7b3e71c..13ceeb6 100644
--- a/include/media/stagefright/ACodec.h
+++ b/include/media/stagefright/ACodec.h
@@ -35,6 +35,7 @@
namespace android {
struct ABuffer;
+class MediaCodecBuffer;
struct MemoryDealer;
struct DescribeColorFormat2Params;
struct DataConverter;
@@ -74,30 +75,20 @@
struct PortDescription : public CodecBase::PortDescription {
size_t countBuffers();
IOMX::buffer_id bufferIDAt(size_t index) const;
- sp<ABuffer> bufferAt(size_t index) const;
- sp<NativeHandle> handleAt(size_t index) const;
- sp<RefBase> memRefAt(size_t index) const;
+ sp<MediaCodecBuffer> bufferAt(size_t index) const;
private:
friend struct ACodec;
Vector<IOMX::buffer_id> mBufferIDs;
- Vector<sp<ABuffer> > mBuffers;
- Vector<sp<NativeHandle> > mHandles;
- Vector<sp<RefBase> > mMemRefs;
+ Vector<sp<MediaCodecBuffer> > mBuffers;
PortDescription();
- void addBuffer(
- IOMX::buffer_id id, const sp<ABuffer> &buffer,
- const sp<NativeHandle> &handle, const sp<RefBase> &memRef);
+ void addBuffer(IOMX::buffer_id id, const sp<MediaCodecBuffer> &buffer);
DISALLOW_EVIL_CONSTRUCTORS(PortDescription);
};
- static bool isFlexibleColorFormat(
- const sp<IOMX> &omx, IOMX::node_id node,
- uint32_t colorFormat, bool usingNativeBuffers, OMX_U32 *flexibleEquivalent);
-
// Returns 0 if configuration is not supported. NOTE: this is treated by
// some OMX components as auto level, and by others as invalid level.
static int /* OMX_VIDEO_AVCLEVELTYPE */ getAVCLevelFor(
@@ -190,13 +181,13 @@
Status mStatus;
unsigned mDequeuedAt;
- sp<ABuffer> mData; // the client's buffer; if not using data conversion, this is the
- // codec buffer; otherwise, it is allocated separately
- sp<RefBase> mMemRef; // and a reference to the IMemory, so it does not go away
- sp<ABuffer> mCodecData; // the codec's buffer
- sp<RefBase> mCodecRef; // and a reference to the IMemory
+ sp<MediaCodecBuffer> mData; // the client's buffer; if not using data conversion, this is
+ // the codec buffer; otherwise, it is allocated separately
+ sp<RefBase> mMemRef; // and a reference to the IMemory, so it does not go away
+ sp<MediaCodecBuffer> mCodecData; // the codec's buffer
+ sp<RefBase> mCodecRef; // and a reference to the IMemory
+
sp<GraphicBuffer> mGraphicBuffer;
- sp<NativeHandle> mNativeHandle;
int mFenceFd;
FrameRenderTracker::Info *mRenderInfo;
@@ -242,10 +233,9 @@
AString mComponentName;
uint32_t mFlags;
- uint32_t mQuirks;
sp<IOMX> mOMX;
- sp<IBinder> mNodeBinder;
- IOMX::node_id mNode;
+ sp<IOMXNode> mOMXNode;
+ int32_t mNodeGeneration;
sp<MemoryDealer> mDealer[2];
bool mUsingNativeWindow;
@@ -294,13 +284,12 @@
size_t mNumUndequeuedBuffers;
sp<DataConverter> mConverter[2];
+ sp<IGraphicBufferSource> mGraphicBufferSource;
int64_t mRepeatFrameDelayUs;
int64_t mMaxPtsGapUs;
float mMaxFps;
-
int64_t mTimePerFrameUs;
int64_t mTimePerCaptureUs;
-
bool mCreateInputBuffersSuspended;
bool mTunneled;
@@ -341,10 +330,9 @@
uint32_t portIndex, IOMX::buffer_id bufferID,
ssize_t *index = NULL);
+ status_t fillBuffer(BufferInfo *info);
+
status_t setComponentRole(bool isEncoder, const char *mime);
- static const char *getComponentRole(bool isEncoder, const char *mime);
- static status_t setComponentRole(
- const sp<IOMX> &omx, IOMX::node_id node, const char *role);
status_t configureCodec(const char *mime, const sp<AMessage> &msg);
@@ -552,11 +540,6 @@
OMX_ERRORTYPE error = OMX_ErrorUndefined,
status_t internalError = UNKNOWN_ERROR);
- static bool describeDefaultColorFormat(DescribeColorFormat2Params &describeParams);
- static bool describeColorFormat(
- const sp<IOMX> &omx, IOMX::node_id node,
- DescribeColorFormat2Params &describeParams);
-
status_t requestIDRFrame();
status_t setParameters(const sp<AMessage> ¶ms);
diff --git a/include/media/stagefright/AudioSource.h b/include/media/stagefright/AudioSource.h
index 2ec89a4..f20c2cd 100644
--- a/include/media/stagefright/AudioSource.h
+++ b/include/media/stagefright/AudioSource.h
@@ -89,6 +89,8 @@
int64_t mPrevSampleTimeUs;
int64_t mInitialReadTimeUs;
int64_t mNumFramesReceived;
+ int64_t mNumFramesSkipped;
+ int64_t mNumFramesLost;
int64_t mNumClientOwnedBuffers;
List<MediaBuffer * > mBuffersReceived;
diff --git a/include/media/stagefright/CodecBase.h b/include/media/stagefright/CodecBase.h
index be2835d..d8c43a4 100644
--- a/include/media/stagefright/CodecBase.h
+++ b/include/media/stagefright/CodecBase.h
@@ -34,8 +34,9 @@
namespace android {
-struct ABuffer;
+class MediaCodecBuffer;
struct PersistentSurface;
+class Surface;
struct CodecBase : public AHandler, /* static */ ColorUtils {
enum {
@@ -44,7 +45,6 @@
kWhatEOS = 'eos ',
kWhatShutdownCompleted = 'scom',
kWhatFlushCompleted = 'fcom',
- kWhatOutputFormatChanged = 'outC',
kWhatError = 'erro',
kWhatComponentAllocated = 'cAll',
kWhatComponentConfigured = 'cCon',
@@ -88,9 +88,7 @@
struct PortDescription : public RefBase {
virtual size_t countBuffers() = 0;
virtual IOMX::buffer_id bufferIDAt(size_t index) const = 0;
- virtual sp<ABuffer> bufferAt(size_t index) const = 0;
- virtual sp<NativeHandle> handleAt(size_t index) const { return NULL; };
- virtual sp<RefBase> memRefAt(size_t index) const { return NULL; }
+ virtual sp<MediaCodecBuffer> bufferAt(size_t index) const = 0;
protected:
PortDescription();
diff --git a/include/media/stagefright/MPEG4Writer.h b/include/media/stagefright/MPEG4Writer.h
index 8f0eaa7..d7fe23c 100644
--- a/include/media/stagefright/MPEG4Writer.h
+++ b/include/media/stagefright/MPEG4Writer.h
@@ -36,9 +36,8 @@
MPEG4Writer(int fd);
// Limitations
- // 1. No more than 2 tracks can be added
- // 2. Only video or audio source can be added
- // 3. No more than one video and/or one audio source can be added.
+ // No more than one video and/or one audio source can be added, but
+ // multiple metadata sources can be added.
virtual status_t addSource(const sp<IMediaSource> &source);
// Returns INVALID_OPERATION if there is no source or track.
@@ -98,6 +97,8 @@
int64_t mStartTimestampUs;
int mLatitudex10000;
int mLongitudex10000;
+ bool mHasAudioTrack;
+ bool mHasVideoTrack;
bool mAreGeoTagsAvailable;
int32_t mStartTimeOffsetMs;
diff --git a/include/media/stagefright/MediaCodec.h b/include/media/stagefright/MediaCodec.h
index 497974d..89def5d 100644
--- a/include/media/stagefright/MediaCodec.h
+++ b/include/media/stagefright/MediaCodec.h
@@ -35,6 +35,7 @@
struct CodecBase;
class IBatteryStats;
struct ICrypto;
+class MediaCodecBuffer;
class IMemory;
struct MemoryDealer;
class IResourceManagerClient;
@@ -63,14 +64,15 @@
};
static const pid_t kNoPid = -1;
+ static const uid_t kNoUid = -1;
static sp<MediaCodec> CreateByType(
const sp<ALooper> &looper, const AString &mime, bool encoder, status_t *err = NULL,
- pid_t pid = kNoPid);
+ pid_t pid = kNoPid, uid_t uid = kNoUid);
static sp<MediaCodec> CreateByComponentName(
const sp<ALooper> &looper, const AString &name, status_t *err = NULL,
- pid_t pid = kNoPid);
+ pid_t pid = kNoPid, uid_t uid = kNoUid);
static sp<PersistentSurface> CreatePersistentInputSurface();
@@ -149,14 +151,14 @@
status_t getOutputFormat(sp<AMessage> *format) const;
status_t getInputFormat(sp<AMessage> *format) const;
- status_t getWidevineLegacyBuffers(Vector<sp<ABuffer> > *buffers) const;
+ status_t getWidevineLegacyBuffers(Vector<sp<MediaCodecBuffer> > *buffers) const;
- status_t getInputBuffers(Vector<sp<ABuffer> > *buffers) const;
- status_t getOutputBuffers(Vector<sp<ABuffer> > *buffers) const;
+ status_t getInputBuffers(Vector<sp<MediaCodecBuffer> > *buffers) const;
+ status_t getOutputBuffers(Vector<sp<MediaCodecBuffer> > *buffers) const;
- status_t getOutputBuffer(size_t index, sp<ABuffer> *buffer);
+ status_t getOutputBuffer(size_t index, sp<MediaCodecBuffer> *buffer);
status_t getOutputFormat(size_t index, sp<AMessage> *format);
- status_t getInputBuffer(size_t index, sp<ABuffer> *buffer);
+ status_t getInputBuffer(size_t index, sp<MediaCodecBuffer> *buffer);
status_t setSurface(const sp<Surface> &nativeWindow);
@@ -245,7 +247,7 @@
kFlagIsSecure = 64,
kFlagSawMediaServerDie = 128,
kFlagIsEncoder = 256,
- kFlagGatherCodecSpecificData = 512,
+ // 512 skipped
kFlagIsAsync = 1024,
kFlagIsComponentAllocated = 2048,
kFlagPushBlankBuffersOnShutdown = 4096,
@@ -253,13 +255,10 @@
struct BufferInfo {
uint32_t mBufferID;
- sp<ABuffer> mData;
- sp<NativeHandle> mNativeHandle;
- sp<RefBase> mMemRef;
- sp<ABuffer> mEncryptedData;
+ sp<MediaCodecBuffer> mData;
+ sp<MediaCodecBuffer> mSecureData;
sp<IMemory> mSharedEncryptedBuffer;
sp<AMessage> mNotify;
- sp<AMessage> mFormat;
bool mOwnedByClient;
};
@@ -288,6 +287,7 @@
};
State mState;
+ uid_t mUid;
bool mReleasedByResourceManager;
sp<ALooper> mLooper;
sp<ALooper> mCodecLooper;
@@ -330,6 +330,7 @@
List<size_t> mAvailPortBuffers[2];
Vector<BufferInfo> mPortBuffers[2];
+ Vector<sp<MediaCodecBuffer>> mPortBufferArrays[2];
int32_t mDequeueInputTimeoutGeneration;
sp<AReplyToken> mDequeueInputReplyID;
@@ -346,7 +347,7 @@
bool mHaveInputSurface;
bool mHavePendingInputBuffers;
- MediaCodec(const sp<ALooper> &looper, pid_t pid);
+ MediaCodec(const sp<ALooper> &looper, pid_t pid, uid_t uid);
static sp<CodecBase> GetCodecBase(const AString &name, bool nameIsType = false);
@@ -367,7 +368,7 @@
status_t getBufferAndFormat(
size_t portIndex, size_t index,
- sp<ABuffer> *buffer, sp<AMessage> *format);
+ sp<MediaCodecBuffer> *buffer, sp<AMessage> *format);
bool handleDequeueInputBuffer(const sp<AReplyToken> &replyID, bool newRequest = false);
bool handleDequeueOutputBuffer(const sp<AReplyToken> &replyID, bool newRequest = false);
@@ -389,7 +390,7 @@
status_t onSetParameters(const sp<AMessage> ¶ms);
- status_t amendOutputFormatWithCodecSpecificData(const sp<ABuffer> &buffer);
+ status_t amendOutputFormatWithCodecSpecificData(const sp<MediaCodecBuffer> &buffer);
void updateBatteryStat();
bool isExecuting() const;
diff --git a/include/media/stagefright/MediaCodecList.h b/include/media/stagefright/MediaCodecList.h
index 44dbde0..430bc16 100644
--- a/include/media/stagefright/MediaCodecList.h
+++ b/include/media/stagefright/MediaCodecList.h
@@ -115,7 +115,6 @@
Vector<sp<MediaCodecInfo> > mCodecInfos;
sp<MediaCodecInfo> mCurrentInfo;
- sp<IOMX> mOMX;
MediaCodecList();
~MediaCodecList();
diff --git a/include/media/stagefright/MediaCodecSource.h b/include/media/stagefright/MediaCodecSource.h
index 18b1955..f9a46a9 100644
--- a/include/media/stagefright/MediaCodecSource.h
+++ b/include/media/stagefright/MediaCodecSource.h
@@ -21,8 +21,7 @@
#include <media/stagefright/foundation/AHandlerReflector.h>
#include <media/stagefright/foundation/Mutexed.h>
#include <media/stagefright/MediaSource.h>
-
-#include <gui/IGraphicBufferConsumer.h>
+#include <media/stagefright/PersistentSurface.h>
namespace android {
@@ -44,7 +43,7 @@
const sp<ALooper> &looper,
const sp<AMessage> &format,
const sp<MediaSource> &source,
- const sp<IGraphicBufferConsumer> &consumer = NULL,
+ const sp<PersistentSurface> &persistentSurface = NULL,
uint32_t flags = 0);
bool isVideo() const { return mIsVideo; }
@@ -88,7 +87,7 @@
const sp<ALooper> &looper,
const sp<AMessage> &outputFormat,
const sp<MediaSource> &source,
- const sp<IGraphicBufferConsumer> &consumer,
+ const sp<PersistentSurface> &persistentSurface,
uint32_t flags = 0);
status_t onStart(MetaData *params);
@@ -121,7 +120,7 @@
int32_t mEncoderDataSpace;
sp<AMessage> mEncoderActivityNotify;
sp<IGraphicBufferProducer> mGraphicBufferProducer;
- sp<IGraphicBufferConsumer> mGraphicBufferConsumer;
+ sp<PersistentSurface> mPersistentSurface;
List<MediaBuffer *> mInputBufferQueue;
List<size_t> mAvailEncoderInputIndices;
List<int64_t> mDecodingTimeQueue; // decoding time (us) for video
diff --git a/include/media/stagefright/MediaDefs.h b/include/media/stagefright/MediaDefs.h
index 5f2a32d..359fb69 100644
--- a/include/media/stagefright/MediaDefs.h
+++ b/include/media/stagefright/MediaDefs.h
@@ -1,5 +1,5 @@
/*
- * Copyright (C) 2009 The Android Open Source Project
+ * Copyright (C) 2016 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -14,69 +14,18 @@
* limitations under the License.
*/
-#ifndef MEDIA_DEFS_H_
-#define MEDIA_DEFS_H_
+#ifndef STAGEFRIGHT_MEDIA_DEFS_H_
+#define STAGEFRIGHT_MEDIA_DEFS_H_
-namespace android {
+/*
+ * Please, DO NOT USE!
+ *
+ * This file is here only for legacy reasons. Instead, include directly
+ * the header below.
+ *
+ */
-extern const char *MEDIA_MIMETYPE_IMAGE_JPEG;
+#include <media/MediaDefs.h>
-extern const char *MEDIA_MIMETYPE_VIDEO_VP8;
-extern const char *MEDIA_MIMETYPE_VIDEO_VP9;
-extern const char *MEDIA_MIMETYPE_VIDEO_AVC;
-extern const char *MEDIA_MIMETYPE_VIDEO_HEVC;
-extern const char *MEDIA_MIMETYPE_VIDEO_MPEG4;
-extern const char *MEDIA_MIMETYPE_VIDEO_H263;
-extern const char *MEDIA_MIMETYPE_VIDEO_MPEG2;
-extern const char *MEDIA_MIMETYPE_VIDEO_RAW;
-extern const char *MEDIA_MIMETYPE_VIDEO_DOLBY_VISION;
-
-extern const char *MEDIA_MIMETYPE_AUDIO_AMR_NB;
-extern const char *MEDIA_MIMETYPE_AUDIO_AMR_WB;
-extern const char *MEDIA_MIMETYPE_AUDIO_MPEG; // layer III
-extern const char *MEDIA_MIMETYPE_AUDIO_MPEG_LAYER_I;
-extern const char *MEDIA_MIMETYPE_AUDIO_MPEG_LAYER_II;
-extern const char *MEDIA_MIMETYPE_AUDIO_MIDI;
-extern const char *MEDIA_MIMETYPE_AUDIO_AAC;
-extern const char *MEDIA_MIMETYPE_AUDIO_QCELP;
-extern const char *MEDIA_MIMETYPE_AUDIO_VORBIS;
-extern const char *MEDIA_MIMETYPE_AUDIO_OPUS;
-extern const char *MEDIA_MIMETYPE_AUDIO_G711_ALAW;
-extern const char *MEDIA_MIMETYPE_AUDIO_G711_MLAW;
-extern const char *MEDIA_MIMETYPE_AUDIO_RAW;
-extern const char *MEDIA_MIMETYPE_AUDIO_FLAC;
-extern const char *MEDIA_MIMETYPE_AUDIO_AAC_ADTS;
-extern const char *MEDIA_MIMETYPE_AUDIO_MSGSM;
-extern const char *MEDIA_MIMETYPE_AUDIO_AC3;
-extern const char *MEDIA_MIMETYPE_AUDIO_EAC3;
-
-extern const char *MEDIA_MIMETYPE_CONTAINER_MPEG4;
-extern const char *MEDIA_MIMETYPE_CONTAINER_WAV;
-extern const char *MEDIA_MIMETYPE_CONTAINER_OGG;
-extern const char *MEDIA_MIMETYPE_CONTAINER_MATROSKA;
-extern const char *MEDIA_MIMETYPE_CONTAINER_MPEG2TS;
-extern const char *MEDIA_MIMETYPE_CONTAINER_AVI;
-extern const char *MEDIA_MIMETYPE_CONTAINER_MPEG2PS;
-
-extern const char *MEDIA_MIMETYPE_CONTAINER_WVM;
-
-extern const char *MEDIA_MIMETYPE_TEXT_3GPP;
-extern const char *MEDIA_MIMETYPE_TEXT_SUBRIP;
-extern const char *MEDIA_MIMETYPE_TEXT_VTT;
-extern const char *MEDIA_MIMETYPE_TEXT_CEA_608;
-extern const char *MEDIA_MIMETYPE_TEXT_CEA_708;
-extern const char *MEDIA_MIMETYPE_DATA_TIMED_ID3;
-
-// These are values exported to JAVA API that need to be in sync with
-// frameworks/base/media/java/android/media/AudioFormat.java. Unfortunately,
-// they are not defined in frameworks/av, so defining them here.
-enum AudioEncoding {
- kAudioEncodingPcm16bit = 2,
- kAudioEncodingPcm8bit = 3,
- kAudioEncodingPcmFloat = 4,
-};
-
-} // namespace android
-
-#endif // MEDIA_DEFS_H_
+#endif // STAGEFRIGHT_MEDIA_DEFS_H_
diff --git a/include/media/stagefright/MediaFilter.h b/include/media/stagefright/MediaFilter.h
index d0a572c..0e39431 100644
--- a/include/media/stagefright/MediaFilter.h
+++ b/include/media/stagefright/MediaFilter.h
@@ -21,7 +21,6 @@
namespace android {
-struct ABuffer;
struct GraphicBufferListener;
struct MemoryDealer;
struct SimpleFilter;
@@ -51,7 +50,7 @@
struct PortDescription : public CodecBase::PortDescription {
virtual size_t countBuffers();
virtual IOMX::buffer_id bufferIDAt(size_t index) const;
- virtual sp<ABuffer> bufferAt(size_t index) const;
+ virtual sp<MediaCodecBuffer> bufferAt(size_t index) const;
protected:
PortDescription();
@@ -60,9 +59,9 @@
friend struct MediaFilter;
Vector<IOMX::buffer_id> mBufferIDs;
- Vector<sp<ABuffer> > mBuffers;
+ Vector<sp<MediaCodecBuffer> > mBuffers;
- void addBuffer(IOMX::buffer_id id, const sp<ABuffer> &buffer);
+ void addBuffer(IOMX::buffer_id id, const sp<MediaCodecBuffer> &buffer);
DISALLOW_EVIL_CONSTRUCTORS(PortDescription);
};
@@ -82,7 +81,7 @@
int32_t mOutputFlags;
Status mStatus;
- sp<ABuffer> mData;
+ sp<MediaCodecBuffer> mData;
};
enum State {
@@ -145,7 +144,6 @@
void postFillThisBuffer(BufferInfo *info);
void postDrainThisBuffer(BufferInfo *info);
void postEOS();
- void sendFormatChange();
void requestFillEmptyInput();
void processBuffers();
diff --git a/include/media/stagefright/OMXClient.h b/include/media/stagefright/OMXClient.h
index 2f14d06..6973405 100644
--- a/include/media/stagefright/OMXClient.h
+++ b/include/media/stagefright/OMXClient.h
@@ -18,10 +18,10 @@
#define OMX_CLIENT_H_
-#include <media/IOMX.h>
-
namespace android {
+class IOMX;
+
class OMXClient {
public:
OMXClient();
diff --git a/include/media/stagefright/PersistentSurface.h b/include/media/stagefright/PersistentSurface.h
index a35b9f1..d8b75a2 100644
--- a/include/media/stagefright/PersistentSurface.h
+++ b/include/media/stagefright/PersistentSurface.h
@@ -19,29 +19,46 @@
#define PERSISTENT_SURFACE_H_
#include <gui/IGraphicBufferProducer.h>
-#include <gui/IGraphicBufferConsumer.h>
+#include <android/IGraphicBufferSource.h>
#include <media/stagefright/foundation/ABase.h>
+#include <binder/Parcel.h>
namespace android {
struct PersistentSurface : public RefBase {
+ PersistentSurface() {}
+
PersistentSurface(
const sp<IGraphicBufferProducer>& bufferProducer,
- const sp<IGraphicBufferConsumer>& bufferConsumer) :
+ const sp<IGraphicBufferSource>& bufferSource) :
mBufferProducer(bufferProducer),
- mBufferConsumer(bufferConsumer) { }
+ mBufferSource(bufferSource) { }
sp<IGraphicBufferProducer> getBufferProducer() const {
return mBufferProducer;
}
- sp<IGraphicBufferConsumer> getBufferConsumer() const {
- return mBufferConsumer;
+ sp<IGraphicBufferSource> getBufferSource() const {
+ return mBufferSource;
+ }
+
+ status_t writeToParcel(Parcel *parcel) const {
+ parcel->writeStrongBinder(IInterface::asBinder(mBufferProducer));
+ parcel->writeStrongBinder(IInterface::asBinder(mBufferSource));
+ return NO_ERROR;
+ }
+
+ status_t readFromParcel(const Parcel *parcel) {
+ mBufferProducer = interface_cast<IGraphicBufferProducer>(
+ parcel->readStrongBinder());
+ mBufferSource = interface_cast<IGraphicBufferSource>(
+ parcel->readStrongBinder());
+ return NO_ERROR;
}
private:
- const sp<IGraphicBufferProducer> mBufferProducer;
- const sp<IGraphicBufferConsumer> mBufferConsumer;
+ sp<IGraphicBufferProducer> mBufferProducer;
+ sp<IGraphicBufferSource> mBufferSource;
DISALLOW_EVIL_CONSTRUCTORS(PersistentSurface);
};
diff --git a/include/media/stagefright/SimpleDecodingSource.h b/include/media/stagefright/SimpleDecodingSource.h
index e6aee6a..534097b 100644
--- a/include/media/stagefright/SimpleDecodingSource.h
+++ b/include/media/stagefright/SimpleDecodingSource.h
@@ -71,13 +71,12 @@
// Construct this using a codec, source and looper.
SimpleDecodingSource(
const sp<MediaCodec> &codec, const sp<IMediaSource> &source, const sp<ALooper> &looper,
- bool usingSurface, bool isVorbis, const sp<AMessage> &format);
+ bool usingSurface, const sp<AMessage> &format);
sp<MediaCodec> mCodec;
sp<IMediaSource> mSource;
sp<ALooper> mLooper;
bool mUsingSurface;
- bool mIsVorbis;
enum State {
INIT,
STARTED,
diff --git a/include/media/stagefright/SkipCutBuffer.h b/include/media/stagefright/SkipCutBuffer.h
index 61f9949..0fb5690 100644
--- a/include/media/stagefright/SkipCutBuffer.h
+++ b/include/media/stagefright/SkipCutBuffer.h
@@ -18,6 +18,7 @@
#define SKIP_CUT_BUFFER_H_
+#include <media/MediaCodecBuffer.h>
#include <media/stagefright/MediaBuffer.h>
#include <media/stagefright/foundation/ABuffer.h>
@@ -39,6 +40,7 @@
// After this, the caller should continue processing the buffer as usual.
void submit(MediaBuffer *buffer);
void submit(const sp<ABuffer>& buffer); // same as above, but with an ABuffer
+ void submit(const sp<MediaCodecBuffer>& buffer); // same as above, but with an ABuffer
void clear();
size_t size(); // how many bytes are currently stored in the buffer
@@ -48,6 +50,8 @@
private:
void write(const char *src, size_t num);
size_t read(char *dst, size_t num);
+ template <typename T>
+ void submitInternal(const sp<T>& buffer);
int32_t mSkip;
int32_t mFrontPadding;
int32_t mBackPadding;
diff --git a/include/media/stagefright/foundation/ColorUtils.h b/include/media/stagefright/foundation/ColorUtils.h
index 2368b82..b889a02 100644
--- a/include/media/stagefright/foundation/ColorUtils.h
+++ b/include/media/stagefright/foundation/ColorUtils.h
@@ -138,6 +138,12 @@
int32_t primaries, int32_t transfer, int32_t coeffs, bool fullRange,
ColorAspects &aspects);
+ // unpack a uint32_t to a full ColorAspects struct
+ static ColorAspects unpackToColorAspects(uint32_t packed);
+
+ // pack a full ColorAspects struct into a uint32_t
+ static uint32_t packToU32(const ColorAspects &aspects);
+
// updates Unspecified color aspects to their defaults based on the video size
static void setDefaultCodecColorAspectsIfNeeded(
ColorAspects &aspects, int32_t width, int32_t height);
diff --git a/media/audioserver/audioserver.rc b/media/audioserver/audioserver.rc
index 80f78b6..4b0f6a2 100644
--- a/media/audioserver/audioserver.rc
+++ b/media/audioserver/audioserver.rc
@@ -5,3 +5,4 @@
group audio camera drmrpc inet media mediadrm net_bt net_bt_admin net_bw_acct
ioprio rt 4
writepid /dev/cpuset/foreground/tasks /dev/stune/foreground/tasks
+ onrestart restart audio-hal-2-0
\ No newline at end of file
diff --git a/media/libaudioclient/AudioRecord.cpp b/media/libaudioclient/AudioRecord.cpp
index ff5903d..778540c 100644
--- a/media/libaudioclient/AudioRecord.cpp
+++ b/media/libaudioclient/AudioRecord.cpp
@@ -85,7 +85,7 @@
audio_session_t sessionId,
transfer_type transferType,
audio_input_flags_t flags,
- int uid,
+ uid_t uid,
pid_t pid,
const audio_attributes_t* pAttributes)
: mActive(false),
@@ -143,7 +143,7 @@
audio_session_t sessionId,
transfer_type transferType,
audio_input_flags_t flags,
- int uid,
+ uid_t uid,
pid_t pid,
const audio_attributes_t* pAttributes)
{
@@ -236,7 +236,7 @@
int callingpid = IPCThreadState::self()->getCallingPid();
int mypid = getpid();
- if (uid == -1 || (callingpid != mypid)) {
+ if (uid == AUDIO_UID_INVALID || (callingpid != mypid)) {
mClientUid = IPCThreadState::self()->getCallingUid();
} else {
mClientUid = uid;
@@ -1274,6 +1274,9 @@
return true;
}
}
+ if (exitPending()) {
+ return false;
+ }
nsecs_t ns = mReceiver.processAudioBuffer();
switch (ns) {
case 0:
diff --git a/media/libaudioclient/AudioSystem.cpp b/media/libaudioclient/AudioSystem.cpp
index 33974e0..d45b12f 100644
--- a/media/libaudioclient/AudioSystem.cpp
+++ b/media/libaudioclient/AudioSystem.cpp
@@ -1175,14 +1175,14 @@
status_t AudioSystem::startAudioSource(const struct audio_port_config *source,
const audio_attributes_t *attributes,
- audio_io_handle_t *handle)
+ audio_patch_handle_t *handle)
{
const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
if (aps == 0) return PERMISSION_DENIED;
return aps->startAudioSource(source, attributes, handle);
}
-status_t AudioSystem::stopAudioSource(audio_io_handle_t handle)
+status_t AudioSystem::stopAudioSource(audio_patch_handle_t handle)
{
const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
if (aps == 0) return PERMISSION_DENIED;
diff --git a/media/libaudioclient/AudioTrack.cpp b/media/libaudioclient/AudioTrack.cpp
index c96f16a..3c7e8b7 100644
--- a/media/libaudioclient/AudioTrack.cpp
+++ b/media/libaudioclient/AudioTrack.cpp
@@ -50,6 +50,8 @@
return x > y ? x : y;
}
+static const int32_t NANOS_PER_SECOND = 1000000000;
+
static inline nsecs_t framesToNanoseconds(ssize_t frames, uint32_t sampleRate, float speed)
{
return ((double)frames * 1000000000) / ((double)sampleRate * speed);
@@ -60,6 +62,11 @@
return tv.tv_sec * 1000000ll + tv.tv_nsec / 1000;
}
+static inline nsecs_t convertTimespecToNs(const struct timespec &tv)
+{
+ return tv.tv_sec * (long long)NANOS_PER_SECOND + tv.tv_nsec;
+}
+
// current monotonic time in microseconds.
static int64_t getNowUs()
{
@@ -197,7 +204,7 @@
audio_session_t sessionId,
transfer_type transferType,
const audio_offload_info_t *offloadInfo,
- int uid,
+ uid_t uid,
pid_t pid,
const audio_attributes_t* pAttributes,
bool doNotReconnect,
@@ -228,7 +235,7 @@
audio_session_t sessionId,
transfer_type transferType,
const audio_offload_info_t *offloadInfo,
- int uid,
+ uid_t uid,
pid_t pid,
const audio_attributes_t* pAttributes,
bool doNotReconnect,
@@ -289,7 +296,7 @@
audio_session_t sessionId,
transfer_type transferType,
const audio_offload_info_t *offloadInfo,
- int uid,
+ uid_t uid,
pid_t pid,
const audio_attributes_t* pAttributes,
bool doNotReconnect,
@@ -483,7 +490,7 @@
}
int callingpid = IPCThreadState::self()->getCallingPid();
int mypid = getpid();
- if (uid == -1 || (callingpid != mypid)) {
+ if (uid == AUDIO_UID_INVALID || (callingpid != mypid)) {
mClientUid = IPCThreadState::self()->getCallingUid();
} else {
mClientUid = uid;
@@ -536,9 +543,11 @@
mTimestampStartupGlitchReported = false;
mRetrogradeMotionReported = false;
mPreviousLocation = ExtendedTimestamp::LOCATION_INVALID;
+ mStartTs.mPosition = 0;
mUnderrunCountOffset = 0;
mFramesWritten = 0;
mFramesWrittenServerOffset = 0;
+ mFramesWrittenAtRestore = -1; // -1 is a unique initializer.
return NO_ERROR;
}
@@ -562,6 +571,17 @@
mState = STATE_ACTIVE;
}
(void) updateAndGetPosition_l();
+
+ // save start timestamp
+ if (isOffloadedOrDirect_l()) {
+ if (getTimestamp_l(mStartTs) != OK) {
+ mStartTs.mPosition = 0;
+ }
+ } else {
+ if (getTimestamp_l(&mStartEts) != OK) {
+ mStartEts.clear();
+ }
+ }
if (previousState == STATE_STOPPED || previousState == STATE_FLUSHED) {
// reset current position as seen by client to 0
mPosition = 0;
@@ -570,19 +590,17 @@
mRetrogradeMotionReported = false;
mPreviousLocation = ExtendedTimestamp::LOCATION_INVALID;
- // read last server side position change via timestamp.
- ExtendedTimestamp ets;
- if (mProxy->getTimestamp(&ets) == OK &&
- ets.mTimeNs[ExtendedTimestamp::LOCATION_SERVER] > 0) {
+ if (!isOffloadedOrDirect_l()
+ && mStartEts.mTimeNs[ExtendedTimestamp::LOCATION_SERVER] > 0) {
// Server side has consumed something, but is it finished consuming?
// It is possible since flush and stop are asynchronous that the server
// is still active at this point.
ALOGV("start: server read:%lld cumulative flushed:%lld client written:%lld",
(long long)(mFramesWrittenServerOffset
- + ets.mPosition[ExtendedTimestamp::LOCATION_SERVER]),
- (long long)ets.mFlushed,
+ + mStartEts.mPosition[ExtendedTimestamp::LOCATION_SERVER]),
+ (long long)mStartEts.mFlushed,
(long long)mFramesWritten);
- mFramesWrittenServerOffset = -ets.mPosition[ExtendedTimestamp::LOCATION_SERVER];
+ mFramesWrittenServerOffset = -mStartEts.mPosition[ExtendedTimestamp::LOCATION_SERVER];
}
mFramesWritten = 0;
mProxy->clearTimestamp(); // need new server push for valid timestamp
@@ -599,7 +617,7 @@
mRefreshRemaining = true;
}
mNewPosition = mPosition + mUpdatePeriod;
- int32_t flags = android_atomic_and(~CBLK_DISABLED, &mCblk->mFlags);
+ int32_t flags = android_atomic_and(~(CBLK_STREAM_END_DONE | CBLK_DISABLED), &mCblk->mFlags);
status_t status = NO_ERROR;
if (!(flags & CBLK_INVALID)) {
@@ -2181,10 +2199,12 @@
mUnderrunCountOffset = getUnderrunCount_l();
// save the old static buffer position
+ uint32_t staticPosition = 0;
size_t bufferPosition = 0;
int loopCount = 0;
if (mStaticProxy != 0) {
mStaticProxy->getBufferPositionAndLoopCount(&bufferPosition, &loopCount);
+ staticPosition = mStaticProxy->getPosition().unsignedValue();
}
mFlags = mOrigFlags;
@@ -2216,8 +2236,11 @@
}
if (mState == STATE_ACTIVE) {
result = mAudioTrack->start();
- mFramesWrittenServerOffset = mFramesWritten; // server resets to zero so we offset
}
+ // server resets to zero so we offset
+ mFramesWrittenServerOffset =
+ mStaticProxy.get() != nullptr ? staticPosition : mFramesWritten;
+ mFramesWrittenAtRestore = mFramesWrittenServerOffset;
}
if (result != NO_ERROR) {
ALOGW("restoreTrack_l() failed status %d", result);
@@ -2315,7 +2338,11 @@
status_t AudioTrack::getTimestamp(AudioTimestamp& timestamp)
{
AutoMutex lock(mLock);
+ return getTimestamp_l(timestamp);
+}
+status_t AudioTrack::getTimestamp_l(AudioTimestamp& timestamp)
+{
bool previousTimestampValid = mPreviousTimestampValid;
// Set false here to cover all the error return cases.
mPreviousTimestampValid = false;
@@ -2393,6 +2420,26 @@
ALOGV_IF(mPreviousLocation == ExtendedTimestamp::LOCATION_SERVER,
"getTimestamp() location moved from server to kernel");
}
+
+ // We update the timestamp time even when paused.
+ if (mState == STATE_PAUSED /* not needed: STATE_PAUSED_STOPPING */) {
+ const int64_t now = systemTime();
+ const int64_t at = convertTimespecToNs(timestamp.mTime);
+ const int64_t lag =
+ (ets.mTimeNs[ExtendedTimestamp::LOCATION_SERVER_LASTKERNELOK] < 0 ||
+ ets.mTimeNs[ExtendedTimestamp::LOCATION_KERNEL_LASTKERNELOK] < 0)
+ ? int64_t(mAfLatency * 1000000LL)
+ : (ets.mPosition[ExtendedTimestamp::LOCATION_SERVER_LASTKERNELOK]
+ - ets.mPosition[ExtendedTimestamp::LOCATION_KERNEL_LASTKERNELOK])
+ * NANOS_PER_SECOND / mSampleRate;
+ const int64_t limit = now - lag; // no earlier than this limit
+ if (at < limit) {
+ ALOGV("timestamp pause lag:%lld adjusting from %lld to %lld",
+ (long long)lag, (long long)at, (long long)limit);
+ timestamp.mTime.tv_sec = limit / NANOS_PER_SECOND;
+ timestamp.mTime.tv_nsec = limit % NANOS_PER_SECOND; // compiler opt.
+ }
+ }
mPreviousLocation = location;
} else {
// right after AudioTrack is started, one may not find a timestamp
@@ -2400,7 +2447,17 @@
}
}
if (status == INVALID_OPERATION) {
- status = WOULD_BLOCK;
+ // INVALID_OPERATION occurs when no timestamp has been issued by the server;
+ // other failures are signaled by a negative time.
+ // If we come out of FLUSHED or STOPPED where the position is known
+ // to be zero we convert this to WOULD_BLOCK (with the implicit meaning of
+ // "zero" for NuPlayer). We don't convert for track restoration as position
+ // does not reset.
+ ALOGV("timestamp server offset:%lld restore frames:%lld",
+ (long long)mFramesWrittenServerOffset, (long long)mFramesWrittenAtRestore);
+ if (mFramesWrittenServerOffset != mFramesWrittenAtRestore) {
+ status = WOULD_BLOCK;
+ }
}
}
if (status != NO_ERROR) {
@@ -2412,6 +2469,7 @@
// use cached paused position in case another offloaded track is running.
timestamp.mPosition = mPausedPosition;
clock_gettime(CLOCK_MONOTONIC, ×tamp.mTime);
+ // TODO: adjust for delay
return NO_ERROR;
}
@@ -2498,21 +2556,18 @@
// This is sometimes caused by erratic reports of the available space in the ALSA drivers.
if (status == NO_ERROR) {
if (previousTimestampValid) {
-#define TIME_TO_NANOS(time) ((int64_t)(time).tv_sec * 1000000000 + (time).tv_nsec)
- const int64_t previousTimeNanos = TIME_TO_NANOS(mPreviousTimestamp.mTime);
- const int64_t currentTimeNanos = TIME_TO_NANOS(timestamp.mTime);
-#undef TIME_TO_NANOS
+ const int64_t previousTimeNanos = convertTimespecToNs(mPreviousTimestamp.mTime);
+ const int64_t currentTimeNanos = convertTimespecToNs(timestamp.mTime);
if (currentTimeNanos < previousTimeNanos) {
- ALOGW("retrograde timestamp time");
- // FIXME Consider blocking this from propagating upwards.
+ ALOGW("retrograde timestamp time corrected, %lld < %lld",
+ (long long)currentTimeNanos, (long long)previousTimeNanos);
+ timestamp.mTime = mPreviousTimestamp.mTime;
}
// Looking at signed delta will work even when the timestamps
// are wrapping around.
int32_t deltaPosition = (Modulo<uint32_t>(timestamp.mPosition)
- mPreviousTimestamp.mPosition).signedValue();
- // position can bobble slightly as an artifact; this hides the bobble
- static const int32_t MINIMUM_POSITION_DELTA = 8;
if (deltaPosition < 0) {
// Only report once per position instead of spamming the log.
if (!mRetrogradeMotionReported) {
@@ -2525,9 +2580,21 @@
} else {
mRetrogradeMotionReported = false;
}
- if (deltaPosition < MINIMUM_POSITION_DELTA) {
- timestamp = mPreviousTimestamp; // Use last valid timestamp.
+ if (deltaPosition < 0) {
+ timestamp.mPosition = mPreviousTimestamp.mPosition;
+ deltaPosition = 0;
}
+#if 0
+ // Uncomment this to verify audio timestamp rate.
+ const int64_t deltaTime =
+ convertTimespecToNs(timestamp.mTime) - previousTimeNanos;
+ if (deltaTime != 0) {
+ const int64_t computedSampleRate =
+ deltaPosition * (long long)NANOS_PER_SECOND / deltaTime;
+ ALOGD("computedSampleRate:%u sampleRate:%u",
+ (unsigned)computedSampleRate, mSampleRate);
+ }
+#endif
}
mPreviousTimestamp = timestamp;
mPreviousTimestampValid = true;
@@ -2697,6 +2764,75 @@
return NO_ERROR;
}
+bool AudioTrack::hasStarted()
+{
+ AutoMutex lock(mLock);
+ switch (mState) {
+ case STATE_STOPPED:
+ if (isOffloadedOrDirect_l()) {
+ // check if we have started in the past to return true.
+ return mStartUs > 0;
+ }
+ // A normal audio track may still be draining, so
+ // check if stream has ended. This covers fasttrack position
+ // instability and start/stop without any data written.
+ if (mProxy->getStreamEndDone()) {
+ return true;
+ }
+ // fall through
+ case STATE_ACTIVE:
+ case STATE_STOPPING:
+ break;
+ case STATE_PAUSED:
+ case STATE_PAUSED_STOPPING:
+ case STATE_FLUSHED:
+ return false; // we're not active
+ default:
+ LOG_ALWAYS_FATAL("Invalid mState in hasStarted(): %d", mState);
+ break;
+ }
+
+ // wait indicates whether we need to wait for a timestamp.
+ // This is conservatively figured - if we encounter an unexpected error
+ // then we will not wait.
+ bool wait = false;
+ if (isOffloadedOrDirect_l()) {
+ AudioTimestamp ts;
+ status_t status = getTimestamp_l(ts);
+ if (status == WOULD_BLOCK) {
+ wait = true;
+ } else if (status == OK) {
+ wait = (ts.mPosition == 0 || ts.mPosition == mStartTs.mPosition);
+ }
+ ALOGV("hasStarted wait:%d ts:%u start position:%lld",
+ (int)wait,
+ ts.mPosition,
+ (long long)mStartTs.mPosition);
+ } else {
+ int location = ExtendedTimestamp::LOCATION_SERVER; // for ALOG
+ ExtendedTimestamp ets;
+ status_t status = getTimestamp_l(&ets);
+ if (status == WOULD_BLOCK) { // no SERVER or KERNEL frame info in ets
+ wait = true;
+ } else if (status == OK) {
+ for (location = ExtendedTimestamp::LOCATION_KERNEL;
+ location >= ExtendedTimestamp::LOCATION_SERVER; --location) {
+ if (ets.mTimeNs[location] < 0 || mStartEts.mTimeNs[location] < 0) {
+ continue;
+ }
+ wait = ets.mPosition[location] == 0
+ || ets.mPosition[location] == mStartEts.mPosition[location];
+ break;
+ }
+ }
+ ALOGV("hasStarted wait:%d ets:%lld start position:%lld",
+ (int)wait,
+ (long long)ets.mPosition[location],
+ (long long)mStartEts.mPosition[location]);
+ }
+ return !wait;
+}
+
// =========================================================================
void AudioTrack::DeathNotifier::binderDied(const wp<IBinder>& who __unused)
diff --git a/media/libaudioclient/IAudioPolicyService.cpp b/media/libaudioclient/IAudioPolicyService.cpp
index 946da8a..2fb2da6 100644
--- a/media/libaudioclient/IAudioPolicyService.cpp
+++ b/media/libaudioclient/IAudioPolicyService.cpp
@@ -738,7 +738,7 @@
virtual status_t startAudioSource(const struct audio_port_config *source,
const audio_attributes_t *attributes,
- audio_io_handle_t *handle)
+ audio_patch_handle_t *handle)
{
Parcel data, reply;
data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
@@ -755,11 +755,11 @@
if (status != NO_ERROR) {
return status;
}
- *handle = (audio_io_handle_t)reply.readInt32();
+ *handle = (audio_patch_handle_t)reply.readInt32();
return status;
}
- virtual status_t stopAudioSource(audio_io_handle_t handle)
+ virtual status_t stopAudioSource(audio_patch_handle_t handle)
{
Parcel data, reply;
data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
@@ -1332,7 +1332,7 @@
data.read(&source, sizeof(struct audio_port_config));
audio_attributes_t attributes;
data.read(&attributes, sizeof(audio_attributes_t));
- audio_io_handle_t handle = {};
+ audio_patch_handle_t handle = AUDIO_PATCH_HANDLE_NONE;
status_t status = startAudioSource(&source, &attributes, &handle);
reply->writeInt32(status);
reply->writeInt32(handle);
@@ -1341,7 +1341,7 @@
case STOP_AUDIO_SOURCE: {
CHECK_INTERFACE(IAudioPolicyService, data, reply);
- audio_io_handle_t handle = (audio_io_handle_t)data.readInt32();
+ audio_patch_handle_t handle = (audio_patch_handle_t) data.readInt32();
status_t status = stopAudioSource(handle);
reply->writeInt32(status);
return NO_ERROR;
diff --git a/media/libaudiohal/Android.bp b/media/libaudiohal/Android.bp
new file mode 100644
index 0000000..e943eed
--- /dev/null
+++ b/media/libaudiohal/Android.bp
@@ -0,0 +1,24 @@
+cc_library_shared {
+ name: "libaudiohal",
+
+ srcs: [
+ "DeviceHalLocal.cpp",
+ "DevicesFactoryHalLocal.cpp",
+ "EffectHalLocal.cpp",
+ "EffectsFactoryHalLocal.cpp",
+ "StreamHalLocal.cpp",
+ ],
+
+ shared_libs: [
+ "libcutils",
+ "libhardware",
+ "liblog",
+ "libeffects",
+ "libutils",
+ ],
+
+ cflags: [
+ "-Werror",
+ "-Wall",
+ ],
+}
diff --git a/media/libaudiohal/DeviceHalLocal.cpp b/media/libaudiohal/DeviceHalLocal.cpp
new file mode 100644
index 0000000..78adfef
--- /dev/null
+++ b/media/libaudiohal/DeviceHalLocal.cpp
@@ -0,0 +1,187 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "DeviceHalLocal"
+//#define LOG_NDEBUG 0
+
+#include <utils/Log.h>
+
+#include "DeviceHalLocal.h"
+#include "StreamHalLocal.h"
+
+namespace android {
+
+DeviceHalLocal::DeviceHalLocal(audio_hw_device_t *dev)
+ : mDev(dev) {
+}
+
+DeviceHalLocal::~DeviceHalLocal() {
+ int status = audio_hw_device_close(mDev);
+ ALOGW_IF(status, "Error closing audio hw device %p: %s", mDev, strerror(-status));
+ mDev = 0;
+}
+
+status_t DeviceHalLocal::getSupportedDevices(uint32_t *devices) {
+ if (mDev->get_supported_devices == NULL) return INVALID_OPERATION;
+ *devices = mDev->get_supported_devices(mDev);
+ return OK;
+}
+
+status_t DeviceHalLocal::initCheck() {
+ return mDev->init_check(mDev);
+}
+
+status_t DeviceHalLocal::setVoiceVolume(float volume) {
+ return mDev->set_voice_volume(mDev, volume);
+}
+
+status_t DeviceHalLocal::setMasterVolume(float volume) {
+ if (mDev->set_master_volume == NULL) return INVALID_OPERATION;
+ return mDev->set_master_volume(mDev, volume);
+}
+
+status_t DeviceHalLocal::getMasterVolume(float *volume) {
+ if (mDev->get_master_volume == NULL) return INVALID_OPERATION;
+ return mDev->get_master_volume(mDev, volume);
+}
+
+status_t DeviceHalLocal::setMode(audio_mode_t mode) {
+ return mDev->set_mode(mDev, mode);
+}
+
+status_t DeviceHalLocal::setMicMute(bool state) {
+ return mDev->set_mic_mute(mDev, state);
+}
+
+status_t DeviceHalLocal::getMicMute(bool *state) {
+ return mDev->get_mic_mute(mDev, state);
+}
+
+status_t DeviceHalLocal::setMasterMute(bool state) {
+ if (mDev->set_master_mute == NULL) return INVALID_OPERATION;
+ return mDev->set_master_mute(mDev, state);
+}
+
+status_t DeviceHalLocal::getMasterMute(bool *state) {
+ if (mDev->get_master_mute == NULL) return INVALID_OPERATION;
+ return mDev->get_master_mute(mDev, state);
+}
+
+status_t DeviceHalLocal::setParameters(const String8& kvPairs) {
+ return mDev->set_parameters(mDev, kvPairs.string());
+}
+
+status_t DeviceHalLocal::getParameters(const String8& keys, String8 *values) {
+ char *halValues = mDev->get_parameters(mDev, keys.string());
+ if (halValues != NULL) {
+ values->setTo(halValues);
+ free(halValues);
+ } else {
+ values->clear();
+ }
+ return OK;
+}
+
+status_t DeviceHalLocal::getInputBufferSize(
+ const struct audio_config *config, size_t *size) {
+ *size = mDev->get_input_buffer_size(mDev, config);
+ return OK;
+}
+
+status_t DeviceHalLocal::openOutputStream(
+ audio_io_handle_t handle,
+ audio_devices_t devices,
+ audio_output_flags_t flags,
+ struct audio_config *config,
+ const char *address,
+ sp<StreamOutHalInterface> *outStream) {
+ audio_stream_out_t *halStream;
+ int openResut = mDev->open_output_stream(
+ mDev, handle, devices, flags, config, &halStream, address);
+ if (openResut == OK) {
+ *outStream = new StreamOutHalLocal(halStream, this);
+ }
+ return openResut;
+}
+
+status_t DeviceHalLocal::openInputStream(
+ audio_io_handle_t handle,
+ audio_devices_t devices,
+ struct audio_config *config,
+ audio_input_flags_t flags,
+ const char *address,
+ audio_source_t source,
+ sp<StreamInHalInterface> *inStream) {
+ audio_stream_in_t *halStream;
+ int openResult = mDev->open_input_stream(
+ mDev, handle, devices, config, &halStream, flags, address, source);
+ if (openResult == OK) {
+ *inStream = new StreamInHalLocal(halStream, this);
+ }
+ return openResult;
+}
+
+status_t DeviceHalLocal::supportsAudioPatches(bool *supportsPatches) {
+ *supportsPatches = version() >= AUDIO_DEVICE_API_VERSION_3_0;
+ return OK;
+}
+
+status_t DeviceHalLocal::createAudioPatch(
+ unsigned int num_sources,
+ const struct audio_port_config *sources,
+ unsigned int num_sinks,
+ const struct audio_port_config *sinks,
+ audio_patch_handle_t *patch) {
+ if (version() >= AUDIO_DEVICE_API_VERSION_3_0) {
+ return mDev->create_audio_patch(
+ mDev, num_sources, sources, num_sinks, sinks, patch);
+ } else {
+ return INVALID_OPERATION;
+ }
+}
+
+status_t DeviceHalLocal::releaseAudioPatch(audio_patch_handle_t patch) {
+ if (version() >= AUDIO_DEVICE_API_VERSION_3_0) {
+ return mDev->release_audio_patch(mDev, patch);
+ } else {
+ return INVALID_OPERATION;
+ }
+}
+
+status_t DeviceHalLocal::getAudioPort(struct audio_port *port) {
+ return mDev->get_audio_port(mDev, port);
+}
+
+status_t DeviceHalLocal::setAudioPortConfig(const struct audio_port_config *config) {
+ if (version() >= AUDIO_DEVICE_API_VERSION_3_0)
+ return mDev->set_audio_port_config(mDev, config);
+ else
+ return INVALID_OPERATION;
+}
+
+status_t DeviceHalLocal::dump(int fd) {
+ return mDev->dump(mDev, fd);
+}
+
+void DeviceHalLocal::closeOutputStream(struct audio_stream_out *stream_out) {
+ mDev->close_output_stream(mDev, stream_out);
+}
+
+void DeviceHalLocal::closeInputStream(struct audio_stream_in *stream_in) {
+ mDev->close_input_stream(mDev, stream_in);
+}
+
+} // namespace android
diff --git a/media/libaudiohal/DeviceHalLocal.h b/media/libaudiohal/DeviceHalLocal.h
new file mode 100644
index 0000000..865f296
--- /dev/null
+++ b/media/libaudiohal/DeviceHalLocal.h
@@ -0,0 +1,124 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_HARDWARE_DEVICE_HAL_LOCAL_H
+#define ANDROID_HARDWARE_DEVICE_HAL_LOCAL_H
+
+#include <hardware/audio.h>
+#include <media/audiohal/DeviceHalInterface.h>
+
+namespace android {
+
+class DeviceHalLocal : public DeviceHalInterface
+{
+ public:
+ // Sets the value of 'devices' to a bitmask of 1 or more values of audio_devices_t.
+ virtual status_t getSupportedDevices(uint32_t *devices);
+
+ // Check to see if the audio hardware interface has been initialized.
+ virtual status_t initCheck();
+
+ // Set the audio volume of a voice call. Range is between 0.0 and 1.0.
+ virtual status_t setVoiceVolume(float volume);
+
+ // Set the audio volume for all audio activities other than voice call.
+ virtual status_t setMasterVolume(float volume);
+
+ // Get the current master volume value for the HAL.
+ virtual status_t getMasterVolume(float *volume);
+
+ // Called when the audio mode changes.
+ virtual status_t setMode(audio_mode_t mode);
+
+ // Muting control.
+ virtual status_t setMicMute(bool state);
+ virtual status_t getMicMute(bool *state);
+ virtual status_t setMasterMute(bool state);
+ virtual status_t getMasterMute(bool *state);
+
+ // Set global audio parameters.
+ virtual status_t setParameters(const String8& kvPairs);
+
+ // Get global audio parameters.
+ virtual status_t getParameters(const String8& keys, String8 *values);
+
+ // Returns audio input buffer size according to parameters passed.
+ virtual status_t getInputBufferSize(const struct audio_config *config,
+ size_t *size);
+
+ // Creates and opens the audio hardware output stream. The stream is closed
+ // by releasing all references to the returned object.
+ virtual status_t openOutputStream(
+ audio_io_handle_t handle,
+ audio_devices_t devices,
+ audio_output_flags_t flags,
+ struct audio_config *config,
+ const char *address,
+ sp<StreamOutHalInterface> *outStream);
+
+ // Creates and opens the audio hardware input stream. The stream is closed
+ // by releasing all references to the returned object.
+ virtual status_t openInputStream(
+ audio_io_handle_t handle,
+ audio_devices_t devices,
+ struct audio_config *config,
+ audio_input_flags_t flags,
+ const char *address,
+ audio_source_t source,
+ sp<StreamInHalInterface> *inStream);
+
+ // Returns whether createAudioPatch and releaseAudioPatch operations are supported.
+ virtual status_t supportsAudioPatches(bool *supportsPatches);
+
+ // Creates an audio patch between several source and sink ports.
+ virtual status_t createAudioPatch(
+ unsigned int num_sources,
+ const struct audio_port_config *sources,
+ unsigned int num_sinks,
+ const struct audio_port_config *sinks,
+ audio_patch_handle_t *patch);
+
+ // Releases an audio patch.
+ virtual status_t releaseAudioPatch(audio_patch_handle_t patch);
+
+ // Fills the list of supported attributes for a given audio port.
+ virtual status_t getAudioPort(struct audio_port *port);
+
+ // Set audio port configuration.
+ virtual status_t setAudioPortConfig(const struct audio_port_config *config);
+
+ virtual status_t dump(int fd);
+
+ void closeOutputStream(struct audio_stream_out *stream_out);
+ void closeInputStream(struct audio_stream_in *stream_in);
+
+ private:
+ audio_hw_device_t *mDev;
+
+ friend class DevicesFactoryHalLocal;
+
+ // Can not be constructed directly by clients.
+ explicit DeviceHalLocal(audio_hw_device_t *dev);
+
+ // The destructor automatically closes the device.
+ virtual ~DeviceHalLocal();
+
+ uint32_t version() const { return mDev->common.version; }
+};
+
+} // namespace android
+
+#endif // ANDROID_HARDWARE_DEVICE_HAL_LOCAL_H
diff --git a/media/libaudiohal/DevicesFactoryHalLocal.cpp b/media/libaudiohal/DevicesFactoryHalLocal.cpp
new file mode 100644
index 0000000..cd9a9e7
--- /dev/null
+++ b/media/libaudiohal/DevicesFactoryHalLocal.cpp
@@ -0,0 +1,74 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "DevicesFactoryHalLocal"
+//#define LOG_NDEBUG 0
+
+#include <string.h>
+
+#include <hardware/audio.h>
+#include <utils/Log.h>
+
+#include "DeviceHalLocal.h"
+#include "DevicesFactoryHalLocal.h"
+
+namespace android {
+
+// static
+sp<DevicesFactoryHalInterface> DevicesFactoryHalInterface::create() {
+ return new DevicesFactoryHalLocal();
+}
+
+static status_t load_audio_interface(const char *if_name, audio_hw_device_t **dev)
+{
+ const hw_module_t *mod;
+ int rc;
+
+ rc = hw_get_module_by_class(AUDIO_HARDWARE_MODULE_ID, if_name, &mod);
+ if (rc) {
+ ALOGE("%s couldn't load audio hw module %s.%s (%s)", __func__,
+ AUDIO_HARDWARE_MODULE_ID, if_name, strerror(-rc));
+ goto out;
+ }
+ rc = audio_hw_device_open(mod, dev);
+ if (rc) {
+ ALOGE("%s couldn't open audio hw device in %s.%s (%s)", __func__,
+ AUDIO_HARDWARE_MODULE_ID, if_name, strerror(-rc));
+ goto out;
+ }
+ if ((*dev)->common.version < AUDIO_DEVICE_API_VERSION_MIN) {
+ ALOGE("%s wrong audio hw device version %04x", __func__, (*dev)->common.version);
+ rc = BAD_VALUE;
+ audio_hw_device_close(*dev);
+ goto out;
+ }
+ return OK;
+
+out:
+ *dev = NULL;
+ return rc;
+}
+
+status_t DevicesFactoryHalLocal::openDevice(const char *name, sp<DeviceHalInterface> *device) {
+ audio_hw_device_t *dev;
+ status_t rc = load_audio_interface(name, &dev);
+ if (rc == OK) {
+ *device = new DeviceHalLocal(dev);
+ }
+ return rc;
+}
+
+} // namespace android
diff --git a/media/libaudiohal/DevicesFactoryHalLocal.h b/media/libaudiohal/DevicesFactoryHalLocal.h
new file mode 100644
index 0000000..690cd34
--- /dev/null
+++ b/media/libaudiohal/DevicesFactoryHalLocal.h
@@ -0,0 +1,46 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_HARDWARE_DEVICES_FACTORY_HAL_LOCAL_H
+#define ANDROID_HARDWARE_DEVICES_FACTORY_HAL_LOCAL_H
+
+#include <media/audiohal/DevicesFactoryHalInterface.h>
+#include <utils/Errors.h>
+#include <utils/RefBase.h>
+
+#include "DeviceHalLocal.h"
+
+namespace android {
+
+class DevicesFactoryHalLocal : public DevicesFactoryHalInterface
+{
+ public:
+ virtual ~DevicesFactoryHalLocal() {}
+
+ // Opens a device with the specified name. To close the device, it is
+ // necessary to release references to the returned object.
+ virtual status_t openDevice(const char *name, sp<DeviceHalInterface> *device);
+
+ private:
+ friend class DevicesFactoryHalInterface;
+
+ // Can not be constructed directly by clients.
+ DevicesFactoryHalLocal() {}
+};
+
+} // namespace android
+
+#endif // ANDROID_HARDWARE_DEVICES_FACTORY_HAL_LOCAL_H
diff --git a/media/libaudiohal/EffectHalLocal.cpp b/media/libaudiohal/EffectHalLocal.cpp
new file mode 100644
index 0000000..56a365c
--- /dev/null
+++ b/media/libaudiohal/EffectHalLocal.cpp
@@ -0,0 +1,54 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "EffectHalLocal"
+//#define LOG_NDEBUG 0
+
+#include <media/EffectsFactoryApi.h>
+#include <utils/Log.h>
+
+#include "EffectHalLocal.h"
+
+namespace android {
+
+EffectHalLocal::EffectHalLocal(effect_handle_t handle)
+ : mHandle(handle) {
+}
+
+EffectHalLocal::~EffectHalLocal() {
+ int status = EffectRelease(mHandle);
+ ALOGW_IF(status, "Error releasing effect %p: %s", mHandle, strerror(-status));
+ mHandle = 0;
+}
+
+status_t EffectHalLocal::process(audio_buffer_t *inBuffer, audio_buffer_t *outBuffer) {
+ return (*mHandle)->process(mHandle, inBuffer, outBuffer);
+}
+
+status_t EffectHalLocal::processReverse(audio_buffer_t *inBuffer, audio_buffer_t *outBuffer) {
+ return (*mHandle)->process_reverse(mHandle, inBuffer, outBuffer);
+}
+
+status_t EffectHalLocal::command(uint32_t cmdCode, uint32_t cmdSize, void *pCmdData,
+ uint32_t *replySize, void *pReplyData) {
+ return (*mHandle)->command(mHandle, cmdCode, cmdSize, pCmdData, replySize, pReplyData);
+}
+
+status_t EffectHalLocal::getDescriptor(effect_descriptor_t *pDescriptor) {
+ return (*mHandle)->get_descriptor(mHandle, pDescriptor);
+}
+
+} // namespace android
diff --git a/media/libaudiohal/EffectHalLocal.h b/media/libaudiohal/EffectHalLocal.h
new file mode 100644
index 0000000..77f774f
--- /dev/null
+++ b/media/libaudiohal/EffectHalLocal.h
@@ -0,0 +1,60 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_HARDWARE_EFFECT_HAL_LOCAL_H
+#define ANDROID_HARDWARE_EFFECT_HAL_LOCAL_H
+
+#include <hardware/audio_effect.h>
+#include <media/audiohal/EffectHalInterface.h>
+
+namespace android {
+
+class EffectHalLocal : public EffectHalInterface
+{
+ public:
+ // Effect process function. Takes input samples as specified
+ // in input buffer descriptor and output processed samples as specified
+ // in output buffer descriptor.
+ virtual status_t process(audio_buffer_t *inBuffer, audio_buffer_t *outBuffer);
+
+ // Process reverse stream function. This function is used to pass
+ // a reference stream to the effect engine.
+ virtual status_t processReverse(audio_buffer_t *inBuffer, audio_buffer_t *outBuffer);
+
+ // Send a command and receive a response to/from effect engine.
+ virtual status_t command(uint32_t cmdCode, uint32_t cmdSize, void *pCmdData,
+ uint32_t *replySize, void *pReplyData);
+
+ // Returns the effect descriptor.
+ virtual status_t getDescriptor(effect_descriptor_t *pDescriptor);
+
+ effect_handle_t handle() const { return mHandle; }
+
+ private:
+ effect_handle_t mHandle;
+
+ friend class EffectsFactoryHalLocal;
+
+ // Can not be constructed directly by clients.
+ explicit EffectHalLocal(effect_handle_t handle);
+
+ // The destructor automatically releases the effect.
+ virtual ~EffectHalLocal();
+};
+
+} // namespace android
+
+#endif // ANDROID_HARDWARE_EFFECT_HAL_LOCAL_H
diff --git a/media/libaudiohal/EffectsFactoryHalLocal.cpp b/media/libaudiohal/EffectsFactoryHalLocal.cpp
new file mode 100644
index 0000000..bbdef5d
--- /dev/null
+++ b/media/libaudiohal/EffectsFactoryHalLocal.cpp
@@ -0,0 +1,63 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <media/EffectsFactoryApi.h>
+
+#include "EffectHalLocal.h"
+#include "EffectsFactoryHalLocal.h"
+
+namespace android {
+
+// static
+sp<EffectsFactoryHalInterface> EffectsFactoryHalInterface::create() {
+ return new EffectsFactoryHalLocal();
+}
+
+// static
+bool EffectsFactoryHalInterface::isNullUuid(const effect_uuid_t *pEffectUuid) {
+ return EffectIsNullUuid(pEffectUuid);
+}
+
+status_t EffectsFactoryHalLocal::queryNumberEffects(uint32_t *pNumEffects) {
+ return EffectQueryNumberEffects(pNumEffects);
+}
+
+status_t EffectsFactoryHalLocal::getDescriptor(
+ uint32_t index, effect_descriptor_t *pDescriptor) {
+ return EffectQueryEffect(index, pDescriptor);
+}
+
+status_t EffectsFactoryHalLocal::getDescriptor(
+ const effect_uuid_t *pEffectUuid, effect_descriptor_t *pDescriptor) {
+ return EffectGetDescriptor(pEffectUuid, pDescriptor);
+}
+
+status_t EffectsFactoryHalLocal::createEffect(
+ const effect_uuid_t *pEffectUuid, int32_t sessionId, int32_t ioId,
+ sp<EffectHalInterface> *effect) {
+ effect_handle_t handle;
+ int result = EffectCreate(pEffectUuid, sessionId, ioId, &handle);
+ if (result == 0) {
+ *effect = new EffectHalLocal(handle);
+ }
+ return result;
+}
+
+status_t EffectsFactoryHalLocal::dumpEffects(int fd) {
+ return EffectDumpEffects(fd);
+}
+
+} // namespace android
diff --git a/media/libaudiohal/EffectsFactoryHalLocal.h b/media/libaudiohal/EffectsFactoryHalLocal.h
new file mode 100644
index 0000000..d5b81be
--- /dev/null
+++ b/media/libaudiohal/EffectsFactoryHalLocal.h
@@ -0,0 +1,57 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_HARDWARE_EFFECTS_FACTORY_HAL_LOCAL_H
+#define ANDROID_HARDWARE_EFFECTS_FACTORY_HAL_LOCAL_H
+
+#include <media/audiohal/EffectsFactoryHalInterface.h>
+
+namespace android {
+
+class EffectsFactoryHalLocal : public EffectsFactoryHalInterface
+{
+ public:
+ // Returns the number of different effects in all loaded libraries.
+ virtual status_t queryNumberEffects(uint32_t *pNumEffects);
+
+ // Returns a descriptor of the next available effect.
+ virtual status_t getDescriptor(uint32_t index,
+ effect_descriptor_t *pDescriptor);
+
+ virtual status_t getDescriptor(const effect_uuid_t *pEffectUuid,
+ effect_descriptor_t *pDescriptor);
+
+ // Creates an effect engine of the specified type.
+ // To release the effect engine, it is necessary to release references
+ // to the returned effect object.
+ virtual status_t createEffect(const effect_uuid_t *pEffectUuid,
+ int32_t sessionId, int32_t ioId,
+ sp<EffectHalInterface> *effect);
+
+ virtual status_t dumpEffects(int fd);
+
+ private:
+ friend class EffectsFactoryHalInterface;
+
+ // Can not be constructed directly by clients.
+ EffectsFactoryHalLocal() {}
+
+ virtual ~EffectsFactoryHalLocal() {}
+};
+
+} // namespace android
+
+#endif // ANDROID_HARDWARE_EFFECTS_FACTORY_HAL_LOCAL_H
diff --git a/media/libaudiohal/StreamHalLocal.cpp b/media/libaudiohal/StreamHalLocal.cpp
new file mode 100644
index 0000000..12a1222
--- /dev/null
+++ b/media/libaudiohal/StreamHalLocal.cpp
@@ -0,0 +1,258 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "StreamHalLocal"
+//#define LOG_NDEBUG 0
+
+#include <hardware/audio.h>
+#include <utils/Log.h>
+
+#include "DeviceHalLocal.h"
+#include "EffectHalLocal.h"
+#include "StreamHalLocal.h"
+
+namespace android {
+
+StreamHalLocal::StreamHalLocal(audio_stream_t *stream, sp<DeviceHalLocal> device)
+ : mDevice(device), mStream(stream) {
+}
+
+StreamHalLocal::~StreamHalLocal() {
+ mStream = 0;
+ mDevice.clear();
+}
+
+status_t StreamHalLocal::getSampleRate(uint32_t *rate) {
+ *rate = mStream->get_sample_rate(mStream);
+ return OK;
+}
+
+status_t StreamHalLocal::getBufferSize(size_t *size) {
+ *size = mStream->get_buffer_size(mStream);
+ return OK;
+}
+
+status_t StreamHalLocal::getChannelMask(audio_channel_mask_t *mask) {
+ *mask = mStream->get_channels(mStream);
+ return OK;
+}
+
+status_t StreamHalLocal::getFormat(audio_format_t *format) {
+ *format = mStream->get_format(mStream);
+ return OK;
+}
+
+status_t StreamHalLocal::getAudioProperties(
+ uint32_t *sampleRate, audio_channel_mask_t *mask, audio_format_t *format) {
+ *sampleRate = mStream->get_sample_rate(mStream);
+ *mask = mStream->get_channels(mStream);
+ *format = mStream->get_format(mStream);
+ return OK;
+}
+
+status_t StreamHalLocal::setParameters(const String8& kvPairs) {
+ return mStream->set_parameters(mStream, kvPairs.string());
+}
+
+status_t StreamHalLocal::getParameters(const String8& keys, String8 *values) {
+ char *halValues = mStream->get_parameters(mStream, keys.string());
+ if (halValues != NULL) {
+ values->setTo(halValues);
+ free(halValues);
+ } else {
+ values->clear();
+ }
+ return OK;
+}
+
+status_t StreamHalLocal::addEffect(sp<EffectHalInterface> effect) {
+ return mStream->add_audio_effect(mStream,
+ static_cast<EffectHalLocal*>(effect.get())->handle());
+}
+
+status_t StreamHalLocal::removeEffect(sp<EffectHalInterface> effect) {
+ return mStream->remove_audio_effect(mStream,
+ static_cast<EffectHalLocal*>(effect.get())->handle());
+}
+
+status_t StreamHalLocal::standby() {
+ return mStream->standby(mStream);
+}
+
+status_t StreamHalLocal::dump(int fd) {
+ return mStream->dump(mStream, fd);
+}
+
+StreamOutHalLocal::StreamOutHalLocal(audio_stream_out_t *stream, sp<DeviceHalLocal> device)
+ : StreamHalLocal(&stream->common, device), mStream(stream) {
+}
+
+StreamOutHalLocal::~StreamOutHalLocal() {
+ mCallback.clear();
+ mDevice->closeOutputStream(mStream);
+ mStream = 0;
+}
+
+status_t StreamOutHalLocal::getFrameSize(size_t *size) {
+ *size = audio_stream_out_frame_size(mStream);
+ return OK;
+}
+
+status_t StreamOutHalLocal::getLatency(uint32_t *latency) {
+ *latency = mStream->get_latency(mStream);
+ return OK;
+}
+
+status_t StreamOutHalLocal::setVolume(float left, float right) {
+ if (mStream->set_volume == NULL) return INVALID_OPERATION;
+ return mStream->set_volume(mStream, left, right);
+}
+
+status_t StreamOutHalLocal::write(const void *buffer, size_t bytes, size_t *written) {
+ ssize_t writeResult = mStream->write(mStream, buffer, bytes);
+ if (writeResult > 0) {
+ *written = writeResult;
+ return OK;
+ } else {
+ *written = 0;
+ return writeResult;
+ }
+}
+
+status_t StreamOutHalLocal::getRenderPosition(uint32_t *dspFrames) {
+ return mStream->get_render_position(mStream, dspFrames);
+}
+
+status_t StreamOutHalLocal::getNextWriteTimestamp(int64_t *timestamp) {
+ if (mStream->get_next_write_timestamp == NULL) return INVALID_OPERATION;
+ return mStream->get_next_write_timestamp(mStream, timestamp);
+}
+
+status_t StreamOutHalLocal::setCallback(wp<StreamOutHalInterfaceCallback> callback) {
+ if (mStream->set_callback == NULL) return INVALID_OPERATION;
+ status_t result = mStream->set_callback(mStream, StreamOutHalLocal::asyncCallback, this);
+ if (result == OK) {
+ mCallback = callback;
+ }
+ return result;
+}
+
+// static
+int StreamOutHalLocal::asyncCallback(stream_callback_event_t event, void*, void *cookie) {
+ // We act as if we gave a wp<StreamOutHalLocal> to HAL. This way we should handle
+ // correctly the case when the callback is invoked while StreamOutHalLocal's destructor is
+ // already running, because the destructor is invoked after the refcount has been atomically
+ // decremented.
+ wp<StreamOutHalLocal> weakSelf(reinterpret_cast<StreamOutHalLocal*>(cookie));
+ sp<StreamOutHalLocal> self = weakSelf.promote();
+ if (self == 0) return 0;
+ sp<StreamOutHalInterfaceCallback> callback = self->mCallback.promote();
+ if (callback == 0) return 0;
+ ALOGV("asyncCallback() event %d", event);
+ switch (event) {
+ case STREAM_CBK_EVENT_WRITE_READY:
+ callback->onWriteReady();
+ break;
+ case STREAM_CBK_EVENT_DRAIN_READY:
+ callback->onDrainReady();
+ break;
+ case STREAM_CBK_EVENT_ERROR:
+ callback->onError();
+ break;
+ default:
+ ALOGW("asyncCallback() unknown event %d", event);
+ break;
+ }
+ return 0;
+}
+
+status_t StreamOutHalLocal::supportsPauseAndResume(bool *supportsPause, bool *supportsResume) {
+ *supportsPause = mStream->pause != NULL;
+ *supportsResume = mStream->resume != NULL;
+ return OK;
+}
+
+status_t StreamOutHalLocal::pause() {
+ if (mStream->pause == NULL) return INVALID_OPERATION;
+ return mStream->pause(mStream);
+}
+
+status_t StreamOutHalLocal::resume() {
+ if (mStream->resume == NULL) return INVALID_OPERATION;
+ return mStream->resume(mStream);
+}
+
+status_t StreamOutHalLocal::supportsDrain(bool *supportsDrain) {
+ *supportsDrain = mStream->drain != NULL;
+ return OK;
+}
+
+status_t StreamOutHalLocal::drain(bool earlyNotify) {
+ if (mStream->drain == NULL) return INVALID_OPERATION;
+ return mStream->drain(mStream, earlyNotify ? AUDIO_DRAIN_EARLY_NOTIFY : AUDIO_DRAIN_ALL);
+}
+
+status_t StreamOutHalLocal::flush() {
+ if (mStream->flush == NULL) return INVALID_OPERATION;
+ return mStream->flush(mStream);
+}
+
+status_t StreamOutHalLocal::getPresentationPosition(uint64_t *frames, struct timespec *timestamp) {
+ if (mStream->get_presentation_position == NULL) return INVALID_OPERATION;
+ return mStream->get_presentation_position(mStream, frames, timestamp);
+}
+
+
+StreamInHalLocal::StreamInHalLocal(audio_stream_in_t *stream, sp<DeviceHalLocal> device)
+ : StreamHalLocal(&stream->common, device), mStream(stream) {
+}
+
+StreamInHalLocal::~StreamInHalLocal() {
+ mDevice->closeInputStream(mStream);
+ mStream = 0;
+}
+
+status_t StreamInHalLocal::getFrameSize(size_t *size) {
+ *size = audio_stream_in_frame_size(mStream);
+ return OK;
+}
+
+status_t StreamInHalLocal::setGain(float gain) {
+ return mStream->set_gain(mStream, gain);
+}
+
+status_t StreamInHalLocal::read(void *buffer, size_t bytes, size_t *read) {
+ ssize_t readResult = mStream->read(mStream, buffer, bytes);
+ if (readResult > 0) {
+ *read = readResult;
+ return OK;
+ } else {
+ *read = 0;
+ return readResult;
+ }
+}
+
+status_t StreamInHalLocal::getInputFramesLost(uint32_t *framesLost) {
+ *framesLost = mStream->get_input_frames_lost(mStream);
+ return OK;
+}
+
+status_t StreamInHalLocal::getCapturePosition(int64_t *frames, int64_t *time) {
+ if (mStream->get_capture_position == NULL) return INVALID_OPERATION;
+ return mStream->get_capture_position(mStream, frames, time);
+}
+
+} // namespace android
diff --git a/media/libaudiohal/StreamHalLocal.h b/media/libaudiohal/StreamHalLocal.h
new file mode 100644
index 0000000..1df18cc
--- /dev/null
+++ b/media/libaudiohal/StreamHalLocal.h
@@ -0,0 +1,163 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_HARDWARE_STREAM_HAL_LOCAL_H
+#define ANDROID_HARDWARE_STREAM_HAL_LOCAL_H
+
+#include <media/audiohal/StreamHalInterface.h>
+
+namespace android {
+
+class DeviceHalLocal;
+
+class StreamHalLocal : public virtual StreamHalInterface
+{
+ public:
+ // Return the sampling rate in Hz - eg. 44100.
+ virtual status_t getSampleRate(uint32_t *rate);
+
+ // Return size of input/output buffer in bytes for this stream - eg. 4800.
+ virtual status_t getBufferSize(size_t *size);
+
+ // Return the channel mask.
+ virtual status_t getChannelMask(audio_channel_mask_t *mask);
+
+ // Return the audio format - e.g. AUDIO_FORMAT_PCM_16_BIT.
+ virtual status_t getFormat(audio_format_t *format);
+
+ // Convenience method.
+ virtual status_t getAudioProperties(
+ uint32_t *sampleRate, audio_channel_mask_t *mask, audio_format_t *format);
+
+ // Set audio stream parameters.
+ virtual status_t setParameters(const String8& kvPairs);
+
+ // Get audio stream parameters.
+ virtual status_t getParameters(const String8& keys, String8 *values);
+
+ // Add or remove the effect on the stream.
+ virtual status_t addEffect(sp<EffectHalInterface> effect);
+ virtual status_t removeEffect(sp<EffectHalInterface> effect);
+
+ // Put the audio hardware input/output into standby mode.
+ virtual status_t standby();
+
+ virtual status_t dump(int fd);
+
+ protected:
+ // Subclasses can not be constructed directly by clients.
+ StreamHalLocal(audio_stream_t *stream, sp<DeviceHalLocal> device);
+
+ // The destructor automatically closes the stream.
+ virtual ~StreamHalLocal();
+
+ sp<DeviceHalLocal> mDevice;
+
+ private:
+ audio_stream_t *mStream;
+};
+
+class StreamOutHalLocal : public StreamOutHalInterface, public StreamHalLocal {
+ public:
+ // Return the frame size (number of bytes per sample) of a stream.
+ virtual status_t getFrameSize(size_t *size);
+
+ // Return the audio hardware driver estimated latency in milliseconds.
+ virtual status_t getLatency(uint32_t *latency);
+
+ // Use this method in situations where audio mixing is done in the hardware.
+ virtual status_t setVolume(float left, float right);
+
+ // Write audio buffer to driver.
+ virtual status_t write(const void *buffer, size_t bytes, size_t *written);
+
+ // Return the number of audio frames written by the audio dsp to DAC since
+ // the output has exited standby.
+ virtual status_t getRenderPosition(uint32_t *dspFrames);
+
+ // Get the local time at which the next write to the audio driver will be presented.
+ virtual status_t getNextWriteTimestamp(int64_t *timestamp);
+
+ // Set the callback for notifying completion of non-blocking write and drain.
+ virtual status_t setCallback(wp<StreamOutHalInterfaceCallback> callback);
+
+ // Returns whether pause and resume operations are supported.
+ virtual status_t supportsPauseAndResume(bool *supportsPause, bool *supportsResume);
+
+ // Notifies to the audio driver to resume playback following a pause.
+ virtual status_t pause();
+
+ // Notifies to the audio driver to resume playback following a pause.
+ virtual status_t resume();
+
+ // Returns whether drain operation is supported.
+ virtual status_t supportsDrain(bool *supportsDrain);
+
+ // Requests notification when data buffered by the driver/hardware has been played.
+ virtual status_t drain(bool earlyNotify);
+
+ // Notifies to the audio driver to flush the queued data.
+ virtual status_t flush();
+
+ // Return a recent count of the number of audio frames presented to an external observer.
+ virtual status_t getPresentationPosition(uint64_t *frames, struct timespec *timestamp);
+
+ private:
+ audio_stream_out_t *mStream;
+ wp<StreamOutHalInterfaceCallback> mCallback;
+
+ friend class DeviceHalLocal;
+
+ // Can not be constructed directly by clients.
+ StreamOutHalLocal(audio_stream_out_t *stream, sp<DeviceHalLocal> device);
+
+ virtual ~StreamOutHalLocal();
+
+ static int asyncCallback(stream_callback_event_t event, void *param, void *cookie);
+};
+
+class StreamInHalLocal : public StreamInHalInterface, public StreamHalLocal {
+ public:
+ // Return the frame size (number of bytes per sample) of a stream.
+ virtual status_t getFrameSize(size_t *size);
+
+ // Set the input gain for the audio driver.
+ virtual status_t setGain(float gain);
+
+ // Read audio buffer in from driver.
+ virtual status_t read(void *buffer, size_t bytes, size_t *read);
+
+ // Return the amount of input frames lost in the audio driver.
+ virtual status_t getInputFramesLost(uint32_t *framesLost);
+
+ // Return a recent count of the number of audio frames received and
+ // the clock time associated with that frame count.
+ virtual status_t getCapturePosition(int64_t *frames, int64_t *time);
+
+ private:
+ audio_stream_in_t *mStream;
+
+ friend class DeviceHalLocal;
+
+ // Can not be constructed directly by clients.
+ StreamInHalLocal(audio_stream_in_t *stream, sp<DeviceHalLocal> device);
+
+ virtual ~StreamInHalLocal();
+};
+
+} // namespace android
+
+#endif // ANDROID_HARDWARE_STREAM_HAL_LOCAL_H
diff --git a/media/libeffects/downmix/Android.mk b/media/libeffects/downmix/Android.mk
index e0ca8af..78601d5 100644
--- a/media/libeffects/downmix/Android.mk
+++ b/media/libeffects/downmix/Android.mk
@@ -20,5 +20,6 @@
$(call include-path-for, audio-utils)
LOCAL_CFLAGS += -fvisibility=hidden
+LOCAL_CFLAGS += -Wall -Werror
include $(BUILD_SHARED_LIBRARY)
diff --git a/media/libeffects/downmix/EffectDownmix.c b/media/libeffects/downmix/EffectDownmix.c
index 9823c55..6cbe78d 100644
--- a/media/libeffects/downmix/EffectDownmix.c
+++ b/media/libeffects/downmix/EffectDownmix.c
@@ -389,7 +389,6 @@
downmix_module_t *pDwmModule = (downmix_module_t *) self;
downmix_object_t *pDownmixer;
- int retsize;
if (pDwmModule == NULL || pDwmModule->context.state == DOWNMIX_STATE_UNINITIALIZED) {
return -EINVAL;
diff --git a/media/libeffects/factory/EffectsFactory.c b/media/libeffects/factory/EffectsFactory.c
index db7865a..606608a 100644
--- a/media/libeffects/factory/EffectsFactory.c
+++ b/media/libeffects/factory/EffectsFactory.c
@@ -25,7 +25,7 @@
#include <cutils/misc.h>
#include <cutils/config_utils.h>
#include <cutils/properties.h>
-#include <audio_effects/audio_effects_conf.h>
+#include <system/audio_effects/audio_effects_conf.h>
static list_elem_t *gEffectList; // list of effect_entry_t: all currently created effects
static list_elem_t *gLibraryList; // list of lib_entry_t: all currently loaded libraries
diff --git a/media/libeffects/loudness/Android.mk b/media/libeffects/loudness/Android.mk
index 55d0611..3db4a79 100644
--- a/media/libeffects/loudness/Android.mk
+++ b/media/libeffects/loudness/Android.mk
@@ -8,6 +8,7 @@
dsp/core/dynamic_range_compression.cpp
LOCAL_CFLAGS+= -O2 -fvisibility=hidden
+LOCAL_CFLAGS += -Wall -Werror
LOCAL_SHARED_LIBRARIES := \
libcutils \
diff --git a/media/libeffects/loudness/EffectLoudnessEnhancer.cpp b/media/libeffects/loudness/EffectLoudnessEnhancer.cpp
index a5a1a3f..11c6133 100644
--- a/media/libeffects/loudness/EffectLoudnessEnhancer.cpp
+++ b/media/libeffects/loudness/EffectLoudnessEnhancer.cpp
@@ -194,7 +194,6 @@
effect_handle_t *pHandle) {
ALOGV("LELib_Create()");
int ret;
- int i;
if (pHandle == NULL || uuid == NULL) {
return -EINVAL;
@@ -311,7 +310,6 @@
void *pCmdData, uint32_t *replySize, void *pReplyData) {
LoudnessEnhancerContext * pContext = (LoudnessEnhancerContext *)self;
- int retsize;
if (pContext == NULL || pContext->mState == LOUDNESS_ENHANCER_STATE_UNINITIALIZED) {
return -EINVAL;
diff --git a/media/libeffects/lvm/lib/Android.mk b/media/libeffects/lvm/lib/Android.mk
index bb56c75..afc87bb 100644
--- a/media/libeffects/lvm/lib/Android.mk
+++ b/media/libeffects/lvm/lib/Android.mk
@@ -120,6 +120,7 @@
$(LOCAL_PATH)/StereoWidening/lib
LOCAL_CFLAGS += -fvisibility=hidden
+LOCAL_CFLAGS += -Wall -Werror
include $(BUILD_STATIC_LIBRARY)
@@ -177,4 +178,5 @@
$(LOCAL_PATH)/Common/src
LOCAL_CFLAGS += -fvisibility=hidden
+LOCAL_CFLAGS += -Wall -Werror
include $(BUILD_STATIC_LIBRARY)
diff --git a/media/libeffects/lvm/wrapper/Android.mk b/media/libeffects/lvm/wrapper/Android.mk
index 4e38e3d..9051587 100644
--- a/media/libeffects/lvm/wrapper/Android.mk
+++ b/media/libeffects/lvm/wrapper/Android.mk
@@ -10,6 +10,7 @@
Bundle/EffectBundle.cpp
LOCAL_CFLAGS += -fvisibility=hidden
+LOCAL_CFLAGS += -Wall -Werror
LOCAL_MODULE:= libbundlewrapper
@@ -40,6 +41,7 @@
Reverb/EffectReverb.cpp
LOCAL_CFLAGS += -fvisibility=hidden
+LOCAL_CFLAGS += -Wall -Werror
LOCAL_MODULE:= libreverbwrapper
diff --git a/media/libeffects/lvm/wrapper/Bundle/EffectBundle.cpp b/media/libeffects/lvm/wrapper/Bundle/EffectBundle.cpp
index b79bf2a..2dfdfde 100644
--- a/media/libeffects/lvm/wrapper/Bundle/EffectBundle.cpp
+++ b/media/libeffects/lvm/wrapper/Bundle/EffectBundle.cpp
@@ -501,8 +501,6 @@
//----------------------------------------------------------------------------
int LvmBundle_init(EffectContext *pContext){
- int status;
-
ALOGV("\tLvmBundle_init start");
pContext->config.inputCfg.accessMode = EFFECT_BUFFER_ACCESS_READ;
@@ -716,7 +714,6 @@
int frameCount,
EffectContext *pContext){
- LVM_ControlParams_t ActiveParams; /* Current control Parameters */
LVM_ReturnStatus_en LvmStatus = LVM_SUCCESS; /* Function call status */
LVM_INT16 *pOutTmp;
@@ -1040,7 +1037,6 @@
void LvmEffect_free(EffectContext *pContext){
LVM_ReturnStatus_en LvmStatus=LVM_SUCCESS; /* Function call status */
- LVM_ControlParams_t params; /* Control Parameters */
LVM_MemTab_t MemTab;
/* Free the algorithm memory */
@@ -2007,8 +2003,6 @@
int status = 0;
int32_t *pParamTemp = (int32_t *)pParam;
int32_t param = *pParamTemp++;
- int32_t param2;
- char *name;
//ALOGV("\tBassBoost_getParameter start");
@@ -2125,7 +2119,6 @@
int status = 0;
int32_t *pParamTemp = (int32_t *)pParam;
int32_t param = *pParamTemp++;
- char *name;
//ALOGV("\tVirtualizer_getParameter start");
@@ -2282,7 +2275,6 @@
uint32_t *pValueSize,
void *pValue){
int status = 0;
- int bMute = 0;
int32_t *pParamTemp = (int32_t *)pParam;
int32_t param = *pParamTemp++;
int32_t param2;
@@ -2552,10 +2544,8 @@
uint32_t *pValueSize,
void *pValue){
int status = 0;
- int bMute = 0;
int32_t *pParamTemp = (int32_t *)pParam;
int32_t param = *pParamTemp++;;
- char *name;
//ALOGV("\tVolume_getParameter start");
@@ -2885,11 +2875,8 @@
audio_buffer_t *inBuffer,
audio_buffer_t *outBuffer){
EffectContext * pContext = (EffectContext *) self;
- LVM_ReturnStatus_en LvmStatus = LVM_SUCCESS; /* Function call status */
int status = 0;
int processStatus = 0;
- LVM_INT16 *in = (LVM_INT16 *)inBuffer->raw;
- LVM_INT16 *out = (LVM_INT16 *)outBuffer->raw;
//ALOGV("\tEffect_process Start : Enabled = %d Called = %d (%8d %8d %8d)",
//pContext->pBundledContext->NumberEffectsEnabled,pContext->pBundledContext->NumberEffectsCalled,
@@ -3018,7 +3005,6 @@
uint32_t *replySize,
void *pReplyData){
EffectContext * pContext = (EffectContext *) self;
- int retsize;
//ALOGV("\t\nEffect_command start");
@@ -3402,7 +3388,6 @@
int16_t leftdB, rightdB;
int16_t maxdB, pandB;
int32_t vol_ret[2] = {1<<24,1<<24}; // Apply no volume
- int status = 0;
LVM_ControlParams_t ActiveParams; /* Current control Parameters */
LVM_ReturnStatus_en LvmStatus=LVM_SUCCESS; /* Function call status */
diff --git a/media/libeffects/lvm/wrapper/Reverb/EffectReverb.cpp b/media/libeffects/lvm/wrapper/Reverb/EffectReverb.cpp
index f7dcdda..f8b1a8e 100644
--- a/media/libeffects/lvm/wrapper/Reverb/EffectReverb.cpp
+++ b/media/libeffects/lvm/wrapper/Reverb/EffectReverb.cpp
@@ -326,6 +326,7 @@
} \
}
+#if 0
//----------------------------------------------------------------------------
// MonoTo2I_32()
//----------------------------------------------------------------------------
@@ -384,6 +385,7 @@
return;
}
+#endif
static inline int16_t clamp16(int32_t sample)
{
@@ -559,7 +561,6 @@
void Reverb_free(ReverbContext *pContext){
LVREV_ReturnStatus_en LvmStatus=LVREV_SUCCESS; /* Function call status */
- LVREV_ControlParams_st params; /* Control Parameters */
LVREV_MemoryTable_st MemTab;
/* Free the algorithm memory */
@@ -708,8 +709,6 @@
//----------------------------------------------------------------------------
int Reverb_init(ReverbContext *pContext){
- int status;
-
ALOGV("\tReverb_init start");
CHECK_ARG(pContext != NULL);
@@ -1542,7 +1541,6 @@
int status = 0;
int32_t *pParamTemp = (int32_t *)pParam;
int32_t param = *pParamTemp++;
- char *name;
t_reverb_settings *pProperties;
//ALOGV("\tReverb_getParameter start");
@@ -1898,7 +1896,6 @@
uint32_t *replySize,
void *pReplyData){
android::ReverbContext * pContext = (android::ReverbContext *) self;
- int retsize;
LVREV_ControlParams_st ActiveParams; /* Current control Parameters */
LVREV_ReturnStatus_en LvmStatus=LVREV_SUCCESS; /* Function call status */
diff --git a/media/libeffects/preprocessing/Android.mk b/media/libeffects/preprocessing/Android.mk
index bd67aa1..60030ac 100644
--- a/media/libeffects/preprocessing/Android.mk
+++ b/media/libeffects/preprocessing/Android.mk
@@ -28,5 +28,6 @@
-DWEBRTC_POSIX
LOCAL_CFLAGS += -fvisibility=hidden
+LOCAL_CFLAGS += -Wall -Werror
include $(BUILD_SHARED_LIBRARY)
diff --git a/media/libeffects/preprocessing/PreProcessing.cpp b/media/libeffects/preprocessing/PreProcessing.cpp
index f48bac1..ccfd29c 100644
--- a/media/libeffects/preprocessing/PreProcessing.cpp
+++ b/media/libeffects/preprocessing/PreProcessing.cpp
@@ -933,7 +933,6 @@
int Session_SetConfig(preproc_session_t *session, effect_config_t *config)
{
- uint32_t sr;
uint32_t inCnl = audio_channel_count_from_in_mask(config->inputCfg.channels);
uint32_t outCnl = audio_channel_count_from_in_mask(config->outputCfg.channels);
@@ -1153,7 +1152,6 @@
preproc_session_t *PreProc_GetSession(int32_t procId, int32_t sessionId, int32_t ioId)
{
size_t i;
- int free = -1;
for (i = 0; i < PREPROC_NUM_SESSIONS; i++) {
if (sSessions[i].io == ioId) {
if (sSessions[i].createdMsk & (1 << procId)) {
@@ -1210,7 +1208,6 @@
audio_buffer_t *outBuffer)
{
preproc_effect_t * effect = (preproc_effect_t *)self;
- int status = 0;
if (effect == NULL){
ALOGV("PreProcessingFx_Process() ERROR effect == NULL");
@@ -1402,8 +1399,6 @@
void *pReplyData)
{
preproc_effect_t * effect = (preproc_effect_t *) self;
- int retsize;
- int status;
if (effect == NULL){
return -EINVAL;
@@ -1777,7 +1772,6 @@
audio_buffer_t *outBuffer __unused)
{
preproc_effect_t * effect = (preproc_effect_t *)self;
- int status = 0;
if (effect == NULL){
ALOGW("PreProcessingFx_ProcessReverse() ERROR effect == NULL");
@@ -1926,7 +1920,6 @@
int PreProcessingLib_Release(effect_handle_t interface)
{
- int status;
ALOGV("EffectRelease start %p", interface);
if (PreProc_Init() != 0) {
return sInitStatus;
diff --git a/media/libeffects/proxy/Android.mk b/media/libeffects/proxy/Android.mk
index 2ba452e..0acf1c0 100644
--- a/media/libeffects/proxy/Android.mk
+++ b/media/libeffects/proxy/Android.mk
@@ -23,6 +23,7 @@
EffectProxy.cpp
LOCAL_CFLAGS+= -fvisibility=hidden
+LOCAL_CFLAGS += -Wall -Werror
LOCAL_SHARED_LIBRARIES := liblog libcutils libutils libdl libeffects
diff --git a/media/libeffects/proxy/EffectProxy.cpp b/media/libeffects/proxy/EffectProxy.cpp
index 7f777e7..14ded6a 100644
--- a/media/libeffects/proxy/EffectProxy.cpp
+++ b/media/libeffects/proxy/EffectProxy.cpp
@@ -44,12 +44,6 @@
};
-static const effect_descriptor_t *const gDescriptors[] =
-{
- &gProxyDescriptor,
-};
-
-
int EffectProxyCreate(const effect_uuid_t *uuid,
int32_t sessionId,
int32_t ioId,
@@ -242,6 +236,11 @@
// pCmdData points to a memory holding effect_offload_param_t structure
if (cmdCode == EFFECT_CMD_OFFLOAD) {
ALOGV("Effect_command() cmdCode = EFFECT_CMD_OFFLOAD");
+ if (replySize == NULL || *replySize < sizeof(int)) {
+ ALOGV("effectsOffload: Effect_command: CMD_OFFLOAD has no reply");
+ android_errorWriteLog(0x534e4554, "32448121");
+ return FAILED_TRANSACTION;
+ }
if (cmdSize == 0 || pCmdData == NULL) {
ALOGV("effectsOffload: Effect_command: CMD_OFFLOAD has no data");
*(int*)pReplyData = FAILED_TRANSACTION;
diff --git a/media/libeffects/visualizer/Android.mk b/media/libeffects/visualizer/Android.mk
index c92c543..ddcc565 100644
--- a/media/libeffects/visualizer/Android.mk
+++ b/media/libeffects/visualizer/Android.mk
@@ -7,6 +7,7 @@
EffectVisualizer.cpp
LOCAL_CFLAGS+= -O2 -fvisibility=hidden
+LOCAL_CFLAGS += -Wall -Werror
LOCAL_SHARED_LIBRARIES := \
libcutils \
diff --git a/media/libeffects/visualizer/EffectVisualizer.cpp b/media/libeffects/visualizer/EffectVisualizer.cpp
index b7d27d6..1d9801f 100644
--- a/media/libeffects/visualizer/EffectVisualizer.cpp
+++ b/media/libeffects/visualizer/EffectVisualizer.cpp
@@ -235,7 +235,6 @@
int32_t /*ioId*/,
effect_handle_t *pHandle) {
int ret;
- int i;
if (pHandle == NULL || uuid == NULL) {
return -EINVAL;
@@ -417,7 +416,6 @@
void *pCmdData, uint32_t *replySize, void *pReplyData) {
VisualizerContext * pContext = (VisualizerContext *)self;
- int retsize;
if (pContext == NULL || pContext->mState == VISUALIZER_STATE_UNINITIALIZED) {
return -EINVAL;
diff --git a/media/libmedia/Android.bp b/media/libmedia/Android.bp
index 7fde4b2..6e28ba9 100644
--- a/media/libmedia/Android.bp
+++ b/media/libmedia/Android.bp
@@ -1,6 +1,6 @@
cc_library_static {
name: "libmedia_helper",
- srcs: ["AudioParameter.cpp"],
+ srcs: ["AudioParameter.cpp", "TypeConverter.cpp"],
cflags: [
"-Werror",
"-Wno-error=deprecated-declarations",
diff --git a/media/libmedia/Android.mk b/media/libmedia/Android.mk
index 3ba7ec1..2e4cf7d 100644
--- a/media/libmedia/Android.mk
+++ b/media/libmedia/Android.mk
@@ -2,16 +2,19 @@
include $(CLEAR_VARS)
+LOCAL_AIDL_INCLUDES := \
+ frameworks/av/media/libmedia/aidl
+
LOCAL_SRC_FILES:= \
- ICrypto.cpp \
+ aidl/android/IGraphicBufferSource.aidl \
+ aidl/android/IOMXBufferSource.aidl
+
+LOCAL_SRC_FILES += \
IDataSource.cpp \
- IDrm.cpp \
- IDrmClient.cpp \
IHDCP.cpp \
mediaplayer.cpp \
IMediaCodecList.cpp \
IMediaCodecService.cpp \
- IMediaDrmService.cpp \
IMediaHTTPConnection.cpp \
IMediaHTTPService.cpp \
IMediaExtractor.cpp \
@@ -27,12 +30,15 @@
IResourceManagerClient.cpp \
IResourceManagerService.cpp \
IStreamSource.cpp \
+ MediaCodecBuffer.cpp \
MediaCodecInfo.cpp \
+ MediaDefs.cpp \
MediaUtils.cpp \
Metadata.cpp \
mediarecorder.cpp \
IMediaMetadataRetriever.cpp \
mediametadataretriever.cpp \
+ MidiDeviceInfo.cpp \
MidiIoWrapper.cpp \
JetPlayer.cpp \
IOMX.cpp \
@@ -43,6 +49,7 @@
MediaProfiles.cpp \
MediaResource.cpp \
MediaResourcePolicy.cpp \
+ OMXBuffer.cpp \
Visualizer.cpp \
StringArray.cpp \
@@ -66,11 +73,14 @@
$(TOP)/frameworks/native/include/media/openmax \
$(TOP)/frameworks/av/include/media/ \
$(TOP)/frameworks/av/media/libstagefright \
- $(call include-path-for, audio-effects) \
+ $(TOP)/frameworks/av/media/libmedia/aidl \
$(call include-path-for, audio-utils)
+LOCAL_EXPORT_C_INCLUDE_DIRS := \
+ frameworks/av/include/media \
+ frameworks/av/media/libmedia/aidl
+
LOCAL_CFLAGS += -Werror -Wno-error=deprecated-declarations -Wall
-LOCAL_CLANG := true
LOCAL_SANITIZE := unsigned-integer-overflow signed-integer-overflow
include $(BUILD_SHARED_LIBRARY)
diff --git a/media/libmedia/AudioParameter.cpp b/media/libmedia/AudioParameter.cpp
index 8c8cf45..7871ede 100644
--- a/media/libmedia/AudioParameter.cpp
+++ b/media/libmedia/AudioParameter.cpp
@@ -19,8 +19,8 @@
#include <utils/Log.h>
-#include <hardware/audio.h>
#include <media/AudioParameter.h>
+#include <system/audio.h>
namespace android {
@@ -32,6 +32,19 @@
const char * const AudioParameter::keyFrameCount = AUDIO_PARAMETER_STREAM_FRAME_COUNT;
const char * const AudioParameter::keyInputSource = AUDIO_PARAMETER_STREAM_INPUT_SOURCE;
const char * const AudioParameter::keyScreenState = AUDIO_PARAMETER_KEY_SCREEN_STATE;
+const char * const AudioParameter::keyBtNrec = AUDIO_PARAMETER_KEY_BT_NREC;
+const char * const AudioParameter::keyHwAvSync = AUDIO_PARAMETER_HW_AV_SYNC;
+const char * const AudioParameter::keyMonoOutput = AUDIO_PARAMETER_MONO_OUTPUT;
+const char * const AudioParameter::keyStreamHwAvSync = AUDIO_PARAMETER_STREAM_HW_AV_SYNC;
+const char * const AudioParameter::keyStreamConnect = AUDIO_PARAMETER_DEVICE_CONNECT;
+const char * const AudioParameter::keyStreamDisconnect = AUDIO_PARAMETER_DEVICE_DISCONNECT;
+const char * const AudioParameter::keyStreamSupportedFormats = AUDIO_PARAMETER_STREAM_SUP_FORMATS;
+const char * const AudioParameter::keyStreamSupportedChannels = AUDIO_PARAMETER_STREAM_SUP_CHANNELS;
+const char * const AudioParameter::keyStreamSupportedSamplingRates =
+ AUDIO_PARAMETER_STREAM_SUP_SAMPLING_RATES;
+const char * const AudioParameter::valueOn = AUDIO_PARAMETER_VALUE_ON;
+const char * const AudioParameter::valueOff = AUDIO_PARAMETER_VALUE_OFF;
+const char * const AudioParameter::valueListSeparator = AUDIO_PARAMETER_VALUE_LIST_SEPARATOR;
AudioParameter::AudioParameter(const String8& keyValuePairs)
{
@@ -70,7 +83,7 @@
mParameters.clear();
}
-String8 AudioParameter::toString()
+String8 AudioParameter::toString() const
{
String8 str = String8("");
@@ -127,7 +140,7 @@
}
}
-status_t AudioParameter::get(const String8& key, String8& value)
+status_t AudioParameter::get(const String8& key, String8& value) const
{
if (mParameters.indexOfKey(key) >= 0) {
value = mParameters.valueFor(key);
@@ -137,7 +150,7 @@
}
}
-status_t AudioParameter::getInt(const String8& key, int& value)
+status_t AudioParameter::getInt(const String8& key, int& value) const
{
String8 str8;
status_t result = get(key, str8);
@@ -153,7 +166,7 @@
return result;
}
-status_t AudioParameter::getFloat(const String8& key, float& value)
+status_t AudioParameter::getFloat(const String8& key, float& value) const
{
String8 str8;
status_t result = get(key, str8);
@@ -169,7 +182,7 @@
return result;
}
-status_t AudioParameter::getAt(size_t index, String8& key, String8& value)
+status_t AudioParameter::getAt(size_t index, String8& key, String8& value) const
{
if (mParameters.size() > index) {
key = mParameters.keyAt(index);
diff --git a/media/libmedia/IMediaCodecService.cpp b/media/libmedia/IMediaCodecService.cpp
index dcf2b27..2d62419 100644
--- a/media/libmedia/IMediaCodecService.cpp
+++ b/media/libmedia/IMediaCodecService.cpp
@@ -33,7 +33,7 @@
class BpMediaCodecService : public BpInterface<IMediaCodecService>
{
public:
- BpMediaCodecService(const sp<IBinder>& impl)
+ explicit BpMediaCodecService(const sp<IBinder>& impl)
: BpInterface<IMediaCodecService>(impl)
{
}
diff --git a/media/libmedia/IMediaExtractor.cpp b/media/libmedia/IMediaExtractor.cpp
index 72d1d7c..94c96f6 100644
--- a/media/libmedia/IMediaExtractor.cpp
+++ b/media/libmedia/IMediaExtractor.cpp
@@ -43,7 +43,7 @@
class BpMediaExtractor : public BpInterface<IMediaExtractor> {
public:
- BpMediaExtractor(const sp<IBinder>& impl)
+ explicit BpMediaExtractor(const sp<IBinder>& impl)
: BpInterface<IMediaExtractor>(impl)
{
}
diff --git a/media/libmedia/IMediaExtractorService.cpp b/media/libmedia/IMediaExtractorService.cpp
index d170c22..8b00d85 100644
--- a/media/libmedia/IMediaExtractorService.cpp
+++ b/media/libmedia/IMediaExtractorService.cpp
@@ -33,7 +33,7 @@
class BpMediaExtractorService : public BpInterface<IMediaExtractorService>
{
public:
- BpMediaExtractorService(const sp<IBinder>& impl)
+ explicit BpMediaExtractorService(const sp<IBinder>& impl)
: BpInterface<IMediaExtractorService>(impl)
{
}
diff --git a/media/libmedia/IMediaPlayer.cpp b/media/libmedia/IMediaPlayer.cpp
index f8345e4..41b6988 100644
--- a/media/libmedia/IMediaPlayer.cpp
+++ b/media/libmedia/IMediaPlayer.cpp
@@ -246,11 +246,12 @@
return reply.readInt32();
}
- status_t seekTo(int msec)
+ status_t seekTo(int msec, bool precise)
{
Parcel data, reply;
data.writeInterfaceToken(IMediaPlayer::getInterfaceDescriptor());
data.writeInt32(msec);
+ data.writeBool(precise);
remote()->transact(SEEK_TO, data, &reply);
return reply.readInt32();
}
@@ -573,7 +574,9 @@
} break;
case SEEK_TO: {
CHECK_INTERFACE(IMediaPlayer, data, reply);
- reply->writeInt32(seekTo(data.readInt32()));
+ int msec = data.readInt32();
+ bool precise = data.readBool();
+ reply->writeInt32(seekTo(msec, precise));
return NO_ERROR;
} break;
case GET_CURRENT_POSITION: {
diff --git a/media/libmedia/IMediaRecorder.cpp b/media/libmedia/IMediaRecorder.cpp
index a6860e2..5599830 100644
--- a/media/libmedia/IMediaRecorder.cpp
+++ b/media/libmedia/IMediaRecorder.cpp
@@ -29,6 +29,7 @@
#include <media/IMediaRecorder.h>
#include <gui/Surface.h>
#include <gui/IGraphicBufferProducer.h>
+#include <media/stagefright/PersistentSurface.h>
namespace android {
@@ -79,12 +80,12 @@
return reply.readInt32();
}
- status_t setInputSurface(const sp<IGraphicBufferConsumer>& surface)
+ status_t setInputSurface(const sp<PersistentSurface>& surface)
{
ALOGV("setInputSurface(%p)", surface.get());
Parcel data, reply;
data.writeInterfaceToken(IMediaRecorder::getInterfaceDescriptor());
- data.writeStrongBinder(IInterface::asBinder(surface));
+ surface->writeToParcel(&data);
remote()->transact(SET_INPUT_SURFACE, data, &reply);
return reply.readInt32();
}
@@ -490,8 +491,8 @@
case SET_INPUT_SURFACE: {
ALOGV("SET_INPUT_SURFACE");
CHECK_INTERFACE(IMediaRecorder, data, reply);
- sp<IGraphicBufferConsumer> surface = interface_cast<IGraphicBufferConsumer>(
- data.readStrongBinder());
+ sp<PersistentSurface> surface = new PersistentSurface();
+ surface->readFromParcel(&data);
reply->writeInt32(setInputSurface(surface));
return NO_ERROR;
} break;
diff --git a/media/libmedia/IMediaSource.cpp b/media/libmedia/IMediaSource.cpp
index 595bad9..fdbc869 100644
--- a/media/libmedia/IMediaSource.cpp
+++ b/media/libmedia/IMediaSource.cpp
@@ -67,7 +67,7 @@
class BpMediaSource : public BpInterface<IMediaSource> {
public:
- BpMediaSource(const sp<IBinder>& impl)
+ explicit BpMediaSource(const sp<IBinder>& impl)
: BpInterface<IMediaSource>(impl), mBuffersSinceStop(0)
{
}
diff --git a/media/libmedia/IOMX.cpp b/media/libmedia/IOMX.cpp
index ddfb6f1..1a6d6b8 100644
--- a/media/libmedia/IOMX.cpp
+++ b/media/libmedia/IOMX.cpp
@@ -26,12 +26,15 @@
#include <media/stagefright/foundation/ADebug.h>
#include <media/openmax/OMX_IndexExt.h>
#include <utils/NativeHandle.h>
+#include <media/OMXBuffer.h>
+
+#include <android/IGraphicBufferSource.h>
+#include <android/IOMXBufferSource.h>
namespace android {
enum {
CONNECT = IBinder::FIRST_CALL_TRANSACTION,
- LIVES_LOCALLY,
LIST_NODES,
ALLOCATE_NODE,
FREE_NODE,
@@ -40,28 +43,22 @@
SET_PARAMETER,
GET_CONFIG,
SET_CONFIG,
- GET_STATE,
ENABLE_NATIVE_BUFFERS,
USE_BUFFER,
- USE_GRAPHIC_BUFFER,
CREATE_INPUT_SURFACE,
- CREATE_PERSISTENT_INPUT_SURFACE,
SET_INPUT_SURFACE,
- SIGNAL_END_OF_INPUT_STREAM,
STORE_META_DATA_IN_BUFFERS,
PREPARE_FOR_ADAPTIVE_PLAYBACK,
ALLOC_SECURE_BUFFER,
- ALLOC_BUFFER_WITH_BACKUP,
FREE_BUFFER,
FILL_BUFFER,
EMPTY_BUFFER,
GET_EXTENSION_INDEX,
OBSERVER_ON_MSG,
GET_GRAPHIC_BUFFER_USAGE,
- SET_INTERNAL_OPTION,
- UPDATE_GRAPHIC_BUFFER_IN_META,
CONFIGURE_VIDEO_TUNNEL_MODE,
- UPDATE_NATIVE_HANDLE_IN_META,
+ DISPATCH_MESSAGE,
+ SET_QUIRKS,
};
class BpOMX : public BpInterface<IOMX> {
@@ -70,16 +67,6 @@
: BpInterface<IOMX>(impl) {
}
- virtual bool livesLocally(node_id node, pid_t pid) {
- Parcel data, reply;
- data.writeInterfaceToken(IOMX::getInterfaceDescriptor());
- data.writeInt32((int32_t)node);
- data.writeInt32(pid);
- remote()->transact(LIVES_LOCALLY, data, &reply);
-
- return reply.readInt32() != 0;
- }
-
virtual status_t listNodes(List<ComponentInfo> *list) {
list->clear();
@@ -104,8 +91,7 @@
virtual status_t allocateNode(
const char *name, const sp<IOMXObserver> &observer,
- sp<IBinder> *nodeBinder,
- node_id *node) {
+ sp<IOMXNode> *omxNode) {
Parcel data, reply;
data.writeInterfaceToken(IOMX::getInterfaceDescriptor());
data.writeCString(name);
@@ -114,31 +100,58 @@
status_t err = reply.readInt32();
if (err == OK) {
- *node = (node_id)reply.readInt32();
- if (nodeBinder != NULL) {
- *nodeBinder = remote();
- }
+ *omxNode = IOMXNode::asInterface(reply.readStrongBinder());
} else {
- *node = 0;
+ omxNode->clear();
}
return err;
}
- virtual status_t freeNode(node_id node) {
+ virtual status_t createInputSurface(
+ sp<IGraphicBufferProducer> *bufferProducer,
+ sp<IGraphicBufferSource> *bufferSource) {
Parcel data, reply;
+ status_t err;
data.writeInterfaceToken(IOMX::getInterfaceDescriptor());
- data.writeInt32((int32_t)node);
+ err = remote()->transact(CREATE_INPUT_SURFACE, data, &reply);
+ if (err != OK) {
+ ALOGW("binder transaction failed: %d", err);
+ return err;
+ }
+
+ err = reply.readInt32();
+ if (err != OK) {
+ return err;
+ }
+
+ *bufferProducer = IGraphicBufferProducer::asInterface(
+ reply.readStrongBinder());
+ *bufferSource = IGraphicBufferSource::asInterface(
+ reply.readStrongBinder());
+
+ return err;
+ }
+};
+
+class BpOMXNode : public BpInterface<IOMXNode> {
+public:
+ explicit BpOMXNode(const sp<IBinder> &impl)
+ : BpInterface<IOMXNode>(impl) {
+ }
+
+ virtual status_t freeNode() {
+ Parcel data, reply;
+ data.writeInterfaceToken(IOMXNode::getInterfaceDescriptor());
remote()->transact(FREE_NODE, data, &reply);
return reply.readInt32();
}
virtual status_t sendCommand(
- node_id node, OMX_COMMANDTYPE cmd, OMX_S32 param) {
+ OMX_COMMANDTYPE cmd, OMX_S32 param) {
Parcel data, reply;
- data.writeInterfaceToken(IOMX::getInterfaceDescriptor());
- data.writeInt32((int32_t)node);
+ data.writeInterfaceToken(IOMXNode::getInterfaceDescriptor());
data.writeInt32(cmd);
data.writeInt32(param);
remote()->transact(SEND_COMMAND, data, &reply);
@@ -147,11 +160,10 @@
}
virtual status_t getParameter(
- node_id node, OMX_INDEXTYPE index,
+ OMX_INDEXTYPE index,
void *params, size_t size) {
Parcel data, reply;
- data.writeInterfaceToken(IOMX::getInterfaceDescriptor());
- data.writeInt32((int32_t)node);
+ data.writeInterfaceToken(IOMXNode::getInterfaceDescriptor());
data.writeInt32(index);
data.writeInt64(size);
data.write(params, size);
@@ -168,11 +180,10 @@
}
virtual status_t setParameter(
- node_id node, OMX_INDEXTYPE index,
+ OMX_INDEXTYPE index,
const void *params, size_t size) {
Parcel data, reply;
- data.writeInterfaceToken(IOMX::getInterfaceDescriptor());
- data.writeInt32((int32_t)node);
+ data.writeInterfaceToken(IOMXNode::getInterfaceDescriptor());
data.writeInt32(index);
data.writeInt64(size);
data.write(params, size);
@@ -182,11 +193,10 @@
}
virtual status_t getConfig(
- node_id node, OMX_INDEXTYPE index,
+ OMX_INDEXTYPE index,
void *params, size_t size) {
Parcel data, reply;
- data.writeInterfaceToken(IOMX::getInterfaceDescriptor());
- data.writeInt32((int32_t)node);
+ data.writeInterfaceToken(IOMXNode::getInterfaceDescriptor());
data.writeInt32(index);
data.writeInt64(size);
data.write(params, size);
@@ -203,11 +213,10 @@
}
virtual status_t setConfig(
- node_id node, OMX_INDEXTYPE index,
+ OMX_INDEXTYPE index,
const void *params, size_t size) {
Parcel data, reply;
- data.writeInterfaceToken(IOMX::getInterfaceDescriptor());
- data.writeInt32((int32_t)node);
+ data.writeInterfaceToken(IOMXNode::getInterfaceDescriptor());
data.writeInt32(index);
data.writeInt64(size);
data.write(params, size);
@@ -216,22 +225,10 @@
return reply.readInt32();
}
- virtual status_t getState(
- node_id node, OMX_STATETYPE* state) {
- Parcel data, reply;
- data.writeInterfaceToken(IOMX::getInterfaceDescriptor());
- data.writeInt32((int32_t)node);
- remote()->transact(GET_STATE, data, &reply);
-
- *state = static_cast<OMX_STATETYPE>(reply.readInt32());
- return reply.readInt32();
- }
-
virtual status_t enableNativeBuffers(
- node_id node, OMX_U32 port_index, OMX_BOOL graphic, OMX_BOOL enable) {
+ OMX_U32 port_index, OMX_BOOL graphic, OMX_BOOL enable) {
Parcel data, reply;
- data.writeInterfaceToken(IOMX::getInterfaceDescriptor());
- data.writeInt32((int32_t)node);
+ data.writeInterfaceToken(IOMXNode::getInterfaceDescriptor());
data.writeInt32(port_index);
data.writeInt32((uint32_t)graphic);
data.writeInt32((uint32_t)enable);
@@ -242,10 +239,9 @@
}
virtual status_t getGraphicBufferUsage(
- node_id node, OMX_U32 port_index, OMX_U32* usage) {
+ OMX_U32 port_index, OMX_U32* usage) {
Parcel data, reply;
- data.writeInterfaceToken(IOMX::getInterfaceDescriptor());
- data.writeInt32((int32_t)node);
+ data.writeInterfaceToken(IOMXNode::getInterfaceDescriptor());
data.writeInt32(port_index);
remote()->transact(GET_GRAPHIC_BUFFER_USAGE, data, &reply);
@@ -255,17 +251,19 @@
}
virtual status_t useBuffer(
- node_id node, OMX_U32 port_index, const sp<IMemory> ¶ms,
- buffer_id *buffer, OMX_U32 allottedSize) {
+ OMX_U32 port_index, const OMXBuffer &omxBuf, buffer_id *buffer) {
Parcel data, reply;
- data.writeInterfaceToken(IOMX::getInterfaceDescriptor());
- data.writeInt32((int32_t)node);
+ data.writeInterfaceToken(IOMXNode::getInterfaceDescriptor());
data.writeInt32(port_index);
- data.writeStrongBinder(IInterface::asBinder(params));
- data.writeInt32(allottedSize);
+
+ status_t err = omxBuf.writeToParcel(&data);
+ if (err != OK) {
+ return err;
+ }
+
remote()->transact(USE_BUFFER, data, &reply);
- status_t err = reply.readInt32();
+ err = reply.readInt32();
if (err != OK) {
*buffer = 0;
@@ -277,164 +275,29 @@
return err;
}
-
- virtual status_t useGraphicBuffer(
- node_id node, OMX_U32 port_index,
- const sp<GraphicBuffer> &graphicBuffer, buffer_id *buffer) {
- Parcel data, reply;
- data.writeInterfaceToken(IOMX::getInterfaceDescriptor());
- data.writeInt32((int32_t)node);
- data.writeInt32(port_index);
- data.write(*graphicBuffer);
- remote()->transact(USE_GRAPHIC_BUFFER, data, &reply);
-
- status_t err = reply.readInt32();
- if (err != OK) {
- *buffer = 0;
-
- return err;
- }
-
- *buffer = (buffer_id)reply.readInt32();
-
- return err;
- }
-
- virtual status_t updateGraphicBufferInMeta(
- node_id node, OMX_U32 port_index,
- const sp<GraphicBuffer> &graphicBuffer, buffer_id buffer) {
- Parcel data, reply;
- data.writeInterfaceToken(IOMX::getInterfaceDescriptor());
- data.writeInt32((int32_t)node);
- data.writeInt32(port_index);
- data.write(*graphicBuffer);
- data.writeInt32((int32_t)buffer);
- remote()->transact(UPDATE_GRAPHIC_BUFFER_IN_META, data, &reply);
-
- status_t err = reply.readInt32();
- return err;
- }
-
- virtual status_t updateNativeHandleInMeta(
- node_id node, OMX_U32 port_index,
- const sp<NativeHandle> &nativeHandle, buffer_id buffer) {
- Parcel data, reply;
- data.writeInterfaceToken(IOMX::getInterfaceDescriptor());
- data.writeInt32((int32_t)node);
- data.writeInt32(port_index);
- data.writeInt32(nativeHandle != NULL);
- if (nativeHandle != NULL) {
- data.writeNativeHandle(nativeHandle->handle());
- }
- data.writeInt32((int32_t)buffer);
- remote()->transact(UPDATE_NATIVE_HANDLE_IN_META, data, &reply);
-
- status_t err = reply.readInt32();
- return err;
- }
-
- virtual status_t createInputSurface(
- node_id node, OMX_U32 port_index, android_dataspace dataSpace,
- sp<IGraphicBufferProducer> *bufferProducer, MetadataBufferType *type) {
- Parcel data, reply;
- status_t err;
- data.writeInterfaceToken(IOMX::getInterfaceDescriptor());
- data.writeInt32((int32_t)node);
- data.writeInt32(port_index);
- data.writeInt32(dataSpace);
- err = remote()->transact(CREATE_INPUT_SURFACE, data, &reply);
- if (err != OK) {
- ALOGW("binder transaction failed: %d", err);
- return err;
- }
-
- // read type even if createInputSurface failed
- int negotiatedType = reply.readInt32();
- if (type != NULL) {
- *type = (MetadataBufferType)negotiatedType;
- }
-
- err = reply.readInt32();
- if (err != OK) {
- return err;
- }
-
- *bufferProducer = IGraphicBufferProducer::asInterface(
- reply.readStrongBinder());
-
- return err;
- }
-
- virtual status_t createPersistentInputSurface(
- sp<IGraphicBufferProducer> *bufferProducer,
- sp<IGraphicBufferConsumer> *bufferConsumer) {
- Parcel data, reply;
- status_t err;
- data.writeInterfaceToken(IOMX::getInterfaceDescriptor());
- err = remote()->transact(CREATE_PERSISTENT_INPUT_SURFACE, data, &reply);
- if (err != OK) {
- ALOGW("binder transaction failed: %d", err);
- return err;
- }
-
- err = reply.readInt32();
- if (err != OK) {
- return err;
- }
-
- *bufferProducer = IGraphicBufferProducer::asInterface(
- reply.readStrongBinder());
- *bufferConsumer = IGraphicBufferConsumer::asInterface(
- reply.readStrongBinder());
-
- return err;
- }
-
virtual status_t setInputSurface(
- node_id node, OMX_U32 port_index,
- const sp<IGraphicBufferConsumer> &bufferConsumer, MetadataBufferType *type) {
+ const sp<IOMXBufferSource> &bufferSource) {
Parcel data, reply;
- data.writeInterfaceToken(IOMX::getInterfaceDescriptor());
- status_t err;
- data.writeInt32((int32_t)node);
- data.writeInt32(port_index);
- data.writeStrongBinder(IInterface::asBinder(bufferConsumer));
+ data.writeInterfaceToken(IOMXNode::getInterfaceDescriptor());
- err = remote()->transact(SET_INPUT_SURFACE, data, &reply);
+ data.writeStrongBinder(IInterface::asBinder(bufferSource));
+
+ status_t err = remote()->transact(SET_INPUT_SURFACE, data, &reply);
if (err != OK) {
ALOGW("binder transaction failed: %d", err);
return err;
}
- // read type even if setInputSurface failed
- int negotiatedType = reply.readInt32();
- if (type != NULL) {
- *type = (MetadataBufferType)negotiatedType;
- }
+ err = reply.readInt32();
- return reply.readInt32();
- }
-
- virtual status_t signalEndOfInputStream(node_id node) {
- Parcel data, reply;
- status_t err;
- data.writeInterfaceToken(IOMX::getInterfaceDescriptor());
- data.writeInt32((int32_t)node);
- err = remote()->transact(SIGNAL_END_OF_INPUT_STREAM, data, &reply);
- if (err != OK) {
- ALOGW("binder transaction failed: %d", err);
- return err;
- }
-
- return reply.readInt32();
+ return err;
}
virtual status_t storeMetaDataInBuffers(
- node_id node, OMX_U32 port_index, OMX_BOOL enable, MetadataBufferType *type) {
+ OMX_U32 port_index, OMX_BOOL enable, MetadataBufferType *type) {
Parcel data, reply;
- data.writeInterfaceToken(IOMX::getInterfaceDescriptor());
- data.writeInt32((int32_t)node);
+ data.writeInterfaceToken(IOMXNode::getInterfaceDescriptor());
data.writeInt32(port_index);
data.writeInt32((int32_t)enable);
data.writeInt32(type == NULL ? kMetadataBufferTypeANWBuffer : *type);
@@ -451,11 +314,10 @@
}
virtual status_t prepareForAdaptivePlayback(
- node_id node, OMX_U32 port_index, OMX_BOOL enable,
+ OMX_U32 port_index, OMX_BOOL enable,
OMX_U32 max_width, OMX_U32 max_height) {
Parcel data, reply;
- data.writeInterfaceToken(IOMX::getInterfaceDescriptor());
- data.writeInt32((int32_t)node);
+ data.writeInterfaceToken(IOMXNode::getInterfaceDescriptor());
data.writeInt32(port_index);
data.writeInt32((int32_t)enable);
data.writeInt32(max_width);
@@ -467,11 +329,10 @@
}
virtual status_t configureVideoTunnelMode(
- node_id node, OMX_U32 portIndex, OMX_BOOL tunneled,
+ OMX_U32 portIndex, OMX_BOOL tunneled,
OMX_U32 audioHwSync, native_handle_t **sidebandHandle ) {
Parcel data, reply;
- data.writeInterfaceToken(IOMX::getInterfaceDescriptor());
- data.writeInt32((int32_t)node);
+ data.writeInterfaceToken(IOMXNode::getInterfaceDescriptor());
data.writeInt32(portIndex);
data.writeInt32((int32_t)tunneled);
data.writeInt32(audioHwSync);
@@ -486,11 +347,10 @@
virtual status_t allocateSecureBuffer(
- node_id node, OMX_U32 port_index, size_t size,
+ OMX_U32 port_index, size_t size,
buffer_id *buffer, void **buffer_data, sp<NativeHandle> *native_handle) {
Parcel data, reply;
- data.writeInterfaceToken(IOMX::getInterfaceDescriptor());
- data.writeInt32((int32_t)node);
+ data.writeInterfaceToken(IOMXNode::getInterfaceDescriptor());
data.writeInt32(port_index);
data.writeInt64(size);
remote()->transact(ALLOC_SECURE_BUFFER, data, &reply);
@@ -514,34 +374,10 @@
return err;
}
- virtual status_t allocateBufferWithBackup(
- node_id node, OMX_U32 port_index, const sp<IMemory> ¶ms,
- buffer_id *buffer, OMX_U32 allottedSize) {
- Parcel data, reply;
- data.writeInterfaceToken(IOMX::getInterfaceDescriptor());
- data.writeInt32((int32_t)node);
- data.writeInt32(port_index);
- data.writeStrongBinder(IInterface::asBinder(params));
- data.writeInt32(allottedSize);
- remote()->transact(ALLOC_BUFFER_WITH_BACKUP, data, &reply);
-
- status_t err = reply.readInt32();
- if (err != OK) {
- *buffer = 0;
-
- return err;
- }
-
- *buffer = (buffer_id)reply.readInt32();
-
- return err;
- }
-
virtual status_t freeBuffer(
- node_id node, OMX_U32 port_index, buffer_id buffer) {
+ OMX_U32 port_index, buffer_id buffer) {
Parcel data, reply;
- data.writeInterfaceToken(IOMX::getInterfaceDescriptor());
- data.writeInt32((int32_t)node);
+ data.writeInterfaceToken(IOMXNode::getInterfaceDescriptor());
data.writeInt32(port_index);
data.writeInt32((int32_t)buffer);
remote()->transact(FREE_BUFFER, data, &reply);
@@ -549,11 +385,15 @@
return reply.readInt32();
}
- virtual status_t fillBuffer(node_id node, buffer_id buffer, int fenceFd) {
+ virtual status_t fillBuffer(
+ buffer_id buffer, const OMXBuffer &omxBuf, int fenceFd) {
Parcel data, reply;
- data.writeInterfaceToken(IOMX::getInterfaceDescriptor());
- data.writeInt32((int32_t)node);
+ data.writeInterfaceToken(IOMXNode::getInterfaceDescriptor());
data.writeInt32((int32_t)buffer);
+ status_t err = omxBuf.writeToParcel(&data);
+ if (err != OK) {
+ return err;
+ }
data.writeInt32(fenceFd >= 0);
if (fenceFd >= 0) {
data.writeFileDescriptor(fenceFd, true /* takeOwnership */);
@@ -564,16 +404,15 @@
}
virtual status_t emptyBuffer(
- node_id node,
- buffer_id buffer,
- OMX_U32 range_offset, OMX_U32 range_length,
+ buffer_id buffer, const OMXBuffer &omxBuf,
OMX_U32 flags, OMX_TICKS timestamp, int fenceFd) {
Parcel data, reply;
- data.writeInterfaceToken(IOMX::getInterfaceDescriptor());
- data.writeInt32((int32_t)node);
+ data.writeInterfaceToken(IOMXNode::getInterfaceDescriptor());
data.writeInt32((int32_t)buffer);
- data.writeInt32(range_offset);
- data.writeInt32(range_length);
+ status_t err = omxBuf.writeToParcel(&data);
+ if (err != OK) {
+ return err;
+ }
data.writeInt32(flags);
data.writeInt64(timestamp);
data.writeInt32(fenceFd >= 0);
@@ -586,12 +425,10 @@
}
virtual status_t getExtensionIndex(
- node_id node,
const char *parameter_name,
OMX_INDEXTYPE *index) {
Parcel data, reply;
- data.writeInterfaceToken(IOMX::getInterfaceDescriptor());
- data.writeInt32((int32_t)node);
+ data.writeInterfaceToken(IOMXNode::getInterfaceDescriptor());
data.writeCString(parameter_name);
remote()->transact(GET_EXTENSION_INDEX, data, &reply);
@@ -606,26 +443,34 @@
return err;
}
- virtual status_t setInternalOption(
- node_id node,
- OMX_U32 port_index,
- InternalOptionType type,
- const void *optionData,
- size_t size) {
+ virtual status_t dispatchMessage(const omx_message &msg) {
Parcel data, reply;
- data.writeInterfaceToken(IOMX::getInterfaceDescriptor());
- data.writeInt32((int32_t)node);
- data.writeInt32(port_index);
- data.writeInt64(size);
- data.write(optionData, size);
- data.writeInt32(type);
- remote()->transact(SET_INTERNAL_OPTION, data, &reply);
+ data.writeInterfaceToken(IOMXNode::getInterfaceDescriptor());
+ data.writeInt32(msg.fenceFd >= 0);
+ if (msg.fenceFd >= 0) {
+ data.writeFileDescriptor(msg.fenceFd, true /* takeOwnership */);
+ }
+ data.writeInt32(msg.type);
+ data.write(&msg.u, sizeof(msg.u));
+
+ remote()->transact(DISPATCH_MESSAGE, data, &reply);
+
+ return reply.readInt32();
+ }
+
+ virtual status_t setQuirks(OMX_U32 quirks) {
+ Parcel data, reply;
+ data.writeInterfaceToken(IOMXNode::getInterfaceDescriptor());
+ data.writeInt32(quirks);
+
+ remote()->transact(SET_QUIRKS, data, &reply);
return reply.readInt32();
}
};
IMPLEMENT_META_INTERFACE(OMX, "android.hardware.IOMX");
+IMPLEMENT_META_INTERFACE(OMXNode, "android.hardware.IOMXNode");
////////////////////////////////////////////////////////////////////////////////
@@ -638,16 +483,6 @@
status_t BnOMX::onTransact(
uint32_t code, const Parcel &data, Parcel *reply, uint32_t flags) {
switch (code) {
- case LIVES_LOCALLY:
- {
- CHECK_OMX_INTERFACE(IOMX, data, reply);
- node_id node = (node_id)data.readInt32();
- pid_t pid = (pid_t)data.readInt32();
- reply->writeInt32(livesLocally(node, pid));
-
- return OK;
- }
-
case LIST_NODES:
{
CHECK_OMX_INTERFACE(IOMX, data, reply);
@@ -686,40 +521,62 @@
return NO_ERROR;
}
- node_id node;
+ sp<IOMXNode> omxNode;
- status_t err = allocateNode(name, observer,
- NULL /* nodeBinder */, &node);
+ status_t err = allocateNode(name, observer, &omxNode);
+
reply->writeInt32(err);
if (err == OK) {
- reply->writeInt32((int32_t)node);
+ reply->writeStrongBinder(IInterface::asBinder(omxNode));
}
return NO_ERROR;
}
- case FREE_NODE:
+ case CREATE_INPUT_SURFACE:
{
CHECK_OMX_INTERFACE(IOMX, data, reply);
- node_id node = (node_id)data.readInt32();
+ sp<IGraphicBufferProducer> bufferProducer;
+ sp<IGraphicBufferSource> bufferSource;
+ status_t err = createInputSurface(&bufferProducer, &bufferSource);
- reply->writeInt32(freeNode(node));
+ reply->writeInt32(err);
+
+ if (err == OK) {
+ reply->writeStrongBinder(IInterface::asBinder(bufferProducer));
+ reply->writeStrongBinder(IInterface::asBinder(bufferSource));
+ }
+
+ return NO_ERROR;
+ }
+
+ default:
+ return BBinder::onTransact(code, data, reply, flags);
+ }
+}
+
+status_t BnOMXNode::onTransact(
+ uint32_t code, const Parcel &data, Parcel *reply, uint32_t flags) {
+ switch (code) {
+ case FREE_NODE:
+ {
+ CHECK_OMX_INTERFACE(IOMXNode, data, reply);
+
+ reply->writeInt32(freeNode());
return NO_ERROR;
}
case SEND_COMMAND:
{
- CHECK_OMX_INTERFACE(IOMX, data, reply);
-
- node_id node = (node_id)data.readInt32();
+ CHECK_OMX_INTERFACE(IOMXNode, data, reply);
OMX_COMMANDTYPE cmd =
static_cast<OMX_COMMANDTYPE>(data.readInt32());
OMX_S32 param = data.readInt32();
- reply->writeInt32(sendCommand(node, cmd, param));
+ reply->writeInt32(sendCommand(cmd, param));
return NO_ERROR;
}
@@ -728,11 +585,9 @@
case SET_PARAMETER:
case GET_CONFIG:
case SET_CONFIG:
- case SET_INTERNAL_OPTION:
{
- CHECK_OMX_INTERFACE(IOMX, data, reply);
+ CHECK_OMX_INTERFACE(IOMXNode, data, reply);
- node_id node = (node_id)data.readInt32();
OMX_INDEXTYPE index = static_cast<OMX_INDEXTYPE>(data.readInt32());
size_t size = data.readInt64();
@@ -742,8 +597,7 @@
size_t pageSize = 0;
size_t allocSize = 0;
bool isUsageBits = (index == (OMX_INDEXTYPE) OMX_IndexParamConsumerUsageBits);
- if ((isUsageBits && size < 4) ||
- (!isUsageBits && code != SET_INTERNAL_OPTION && size < 8)) {
+ if ((isUsageBits && size < 4) || (!isUsageBits && size < 8)) {
// we expect the structure to contain at least the size and
// version, 8 bytes total
ALOGE("b/27207275 (%zu) (%d/%d)", size, int(index), int(code));
@@ -765,8 +619,7 @@
} else {
err = NOT_ENOUGH_DATA;
OMX_U32 declaredSize = *(OMX_U32*)params;
- if (code != SET_INTERNAL_OPTION &&
- index != (OMX_INDEXTYPE) OMX_IndexParamConsumerUsageBits &&
+ if (index != (OMX_INDEXTYPE) OMX_IndexParamConsumerUsageBits &&
declaredSize > size) {
// the buffer says it's bigger than it actually is
ALOGE("b/27207275 (%u/%zu)", declaredSize, size);
@@ -781,26 +634,17 @@
} else {
switch (code) {
case GET_PARAMETER:
- err = getParameter(node, index, params, size);
+ err = getParameter(index, params, size);
break;
case SET_PARAMETER:
- err = setParameter(node, index, params, size);
+ err = setParameter(index, params, size);
break;
case GET_CONFIG:
- err = getConfig(node, index, params, size);
+ err = getConfig(index, params, size);
break;
case SET_CONFIG:
- err = setConfig(node, index, params, size);
+ err = setConfig(index, params, size);
break;
- case SET_INTERNAL_OPTION:
- {
- InternalOptionType type =
- (InternalOptionType)data.readInt32();
-
- err = setInternalOption(node, index, type, params, size);
- break;
- }
-
default:
TRESPASS();
}
@@ -826,30 +670,15 @@
return NO_ERROR;
}
- case GET_STATE:
- {
- CHECK_OMX_INTERFACE(IOMX, data, reply);
-
- node_id node = (node_id)data.readInt32();
- OMX_STATETYPE state = OMX_StateInvalid;
-
- status_t err = getState(node, &state);
- reply->writeInt32(state);
- reply->writeInt32(err);
-
- return NO_ERROR;
- }
-
case ENABLE_NATIVE_BUFFERS:
{
- CHECK_OMX_INTERFACE(IOMX, data, reply);
+ CHECK_OMX_INTERFACE(IOMXNode, data, reply);
- node_id node = (node_id)data.readInt32();
OMX_U32 port_index = data.readInt32();
OMX_BOOL graphic = (OMX_BOOL)data.readInt32();
OMX_BOOL enable = (OMX_BOOL)data.readInt32();
- status_t err = enableNativeBuffers(node, port_index, graphic, enable);
+ status_t err = enableNativeBuffers(port_index, graphic, enable);
reply->writeInt32(err);
return NO_ERROR;
@@ -857,13 +686,12 @@
case GET_GRAPHIC_BUFFER_USAGE:
{
- CHECK_OMX_INTERFACE(IOMX, data, reply);
+ CHECK_OMX_INTERFACE(IOMXNode, data, reply);
- node_id node = (node_id)data.readInt32();
OMX_U32 port_index = data.readInt32();
OMX_U32 usage = 0;
- status_t err = getGraphicBufferUsage(node, port_index, &usage);
+ status_t err = getGraphicBufferUsage(port_index, &usage);
reply->writeInt32(err);
reply->writeInt32(usage);
@@ -872,22 +700,18 @@
case USE_BUFFER:
{
- CHECK_OMX_INTERFACE(IOMX, data, reply);
+ CHECK_OMX_INTERFACE(IOMXNode, data, reply);
- node_id node = (node_id)data.readInt32();
OMX_U32 port_index = data.readInt32();
- sp<IMemory> params =
- interface_cast<IMemory>(data.readStrongBinder());
- OMX_U32 allottedSize = data.readInt32();
- if (params == NULL) {
- ALOGE("b/26392700");
- reply->writeInt32(INVALID_OPERATION);
- return NO_ERROR;
+ OMXBuffer omxBuf;
+ status_t err = omxBuf.readFromParcel(&data);
+ if (err != OK) {
+ return err;
}
buffer_id buffer;
- status_t err = useBuffer(node, port_index, params, &buffer, allottedSize);
+ err = useBuffer(port_index, omxBuf, &buffer);
reply->writeInt32(err);
if (err == OK) {
@@ -897,143 +721,14 @@
return NO_ERROR;
}
- case USE_GRAPHIC_BUFFER:
- {
- CHECK_OMX_INTERFACE(IOMX, data, reply);
-
- node_id node = (node_id)data.readInt32();
- OMX_U32 port_index = data.readInt32();
- sp<GraphicBuffer> graphicBuffer = new GraphicBuffer();
- data.read(*graphicBuffer);
-
- buffer_id buffer;
- status_t err = useGraphicBuffer(
- node, port_index, graphicBuffer, &buffer);
- reply->writeInt32(err);
-
- if (err == OK) {
- reply->writeInt32((int32_t)buffer);
- }
-
- return NO_ERROR;
- }
-
- case UPDATE_GRAPHIC_BUFFER_IN_META:
- {
- CHECK_OMX_INTERFACE(IOMX, data, reply);
-
- node_id node = (node_id)data.readInt32();
- OMX_U32 port_index = data.readInt32();
- sp<GraphicBuffer> graphicBuffer = new GraphicBuffer();
- data.read(*graphicBuffer);
- buffer_id buffer = (buffer_id)data.readInt32();
-
- status_t err = updateGraphicBufferInMeta(
- node, port_index, graphicBuffer, buffer);
- reply->writeInt32(err);
-
- return NO_ERROR;
- }
-
- case UPDATE_NATIVE_HANDLE_IN_META:
- {
- CHECK_OMX_INTERFACE(IOMX, data, reply);
-
- node_id node = (node_id)data.readInt32();
- OMX_U32 port_index = data.readInt32();
- native_handle *handle = NULL;
- if (data.readInt32()) {
- handle = data.readNativeHandle();
- }
- buffer_id buffer = (buffer_id)data.readInt32();
-
- status_t err = updateNativeHandleInMeta(
- node, port_index, NativeHandle::create(handle, true /* ownshandle */), buffer);
- reply->writeInt32(err);
-
- return NO_ERROR;
- }
-
- case CREATE_INPUT_SURFACE:
- {
- CHECK_OMX_INTERFACE(IOMX, data, reply);
-
- node_id node = (node_id)data.readInt32();
- OMX_U32 port_index = data.readInt32();
- android_dataspace dataSpace = (android_dataspace)data.readInt32();
-
- sp<IGraphicBufferProducer> bufferProducer;
- MetadataBufferType type = kMetadataBufferTypeInvalid;
- status_t err = createInputSurface(node, port_index, dataSpace, &bufferProducer, &type);
-
- if ((err != OK) && (type == kMetadataBufferTypeInvalid)) {
- android_errorWriteLog(0x534e4554, "26324358");
- }
-
- reply->writeInt32(type);
- reply->writeInt32(err);
-
- if (err == OK) {
- reply->writeStrongBinder(IInterface::asBinder(bufferProducer));
- }
-
- return NO_ERROR;
- }
-
- case CREATE_PERSISTENT_INPUT_SURFACE:
- {
- CHECK_OMX_INTERFACE(IOMX, data, reply);
-
- sp<IGraphicBufferProducer> bufferProducer;
- sp<IGraphicBufferConsumer> bufferConsumer;
- status_t err = createPersistentInputSurface(
- &bufferProducer, &bufferConsumer);
-
- reply->writeInt32(err);
-
- if (err == OK) {
- reply->writeStrongBinder(IInterface::asBinder(bufferProducer));
- reply->writeStrongBinder(IInterface::asBinder(bufferConsumer));
- }
-
- return NO_ERROR;
- }
-
case SET_INPUT_SURFACE:
{
- CHECK_OMX_INTERFACE(IOMX, data, reply);
+ CHECK_OMX_INTERFACE(IOMXNode, data, reply);
- node_id node = (node_id)data.readInt32();
- OMX_U32 port_index = data.readInt32();
+ sp<IOMXBufferSource> bufferSource =
+ interface_cast<IOMXBufferSource>(data.readStrongBinder());
- sp<IGraphicBufferConsumer> bufferConsumer =
- interface_cast<IGraphicBufferConsumer>(data.readStrongBinder());
-
- MetadataBufferType type = kMetadataBufferTypeInvalid;
-
- status_t err = INVALID_OPERATION;
- if (bufferConsumer == NULL) {
- ALOGE("b/26392700");
- } else {
- err = setInputSurface(node, port_index, bufferConsumer, &type);
-
- if ((err != OK) && (type == kMetadataBufferTypeInvalid)) {
- android_errorWriteLog(0x534e4554, "26324358");
- }
- }
-
- reply->writeInt32(type);
- reply->writeInt32(err);
- return NO_ERROR;
- }
-
- case SIGNAL_END_OF_INPUT_STREAM:
- {
- CHECK_OMX_INTERFACE(IOMX, data, reply);
-
- node_id node = (node_id)data.readInt32();
-
- status_t err = signalEndOfInputStream(node);
+ status_t err = setInputSurface(bufferSource);
reply->writeInt32(err);
return NO_ERROR;
@@ -1041,14 +736,13 @@
case STORE_META_DATA_IN_BUFFERS:
{
- CHECK_OMX_INTERFACE(IOMX, data, reply);
+ CHECK_OMX_INTERFACE(IOMXNode, data, reply);
- node_id node = (node_id)data.readInt32();
OMX_U32 port_index = data.readInt32();
OMX_BOOL enable = (OMX_BOOL)data.readInt32();
MetadataBufferType type = (MetadataBufferType)data.readInt32();
- status_t err = storeMetaDataInBuffers(node, port_index, enable, &type);
+ status_t err = storeMetaDataInBuffers(port_index, enable, &type);
reply->writeInt32(type);
reply->writeInt32(err);
@@ -1058,16 +752,15 @@
case PREPARE_FOR_ADAPTIVE_PLAYBACK:
{
- CHECK_OMX_INTERFACE(IOMX, data, reply);
+ CHECK_OMX_INTERFACE(IOMXNode, data, reply);
- node_id node = (node_id)data.readInt32();
OMX_U32 port_index = data.readInt32();
OMX_BOOL enable = (OMX_BOOL)data.readInt32();
OMX_U32 max_width = data.readInt32();
OMX_U32 max_height = data.readInt32();
status_t err = prepareForAdaptivePlayback(
- node, port_index, enable, max_width, max_height);
+ port_index, enable, max_width, max_height);
reply->writeInt32(err);
return NO_ERROR;
@@ -1075,16 +768,15 @@
case CONFIGURE_VIDEO_TUNNEL_MODE:
{
- CHECK_OMX_INTERFACE(IOMX, data, reply);
+ CHECK_OMX_INTERFACE(IOMXNode, data, reply);
- node_id node = (node_id)data.readInt32();
OMX_U32 port_index = data.readInt32();
OMX_BOOL tunneled = (OMX_BOOL)data.readInt32();
OMX_U32 audio_hw_sync = data.readInt32();
native_handle_t *sideband_handle = NULL;
status_t err = configureVideoTunnelMode(
- node, port_index, tunneled, audio_hw_sync, &sideband_handle);
+ port_index, tunneled, audio_hw_sync, &sideband_handle);
reply->writeInt32(err);
if(err == OK){
reply->writeNativeHandle(sideband_handle);
@@ -1095,11 +787,10 @@
case ALLOC_SECURE_BUFFER:
{
- CHECK_OMX_INTERFACE(IOMX, data, reply);
+ CHECK_OMX_INTERFACE(IOMXNode, data, reply);
- node_id node = (node_id)data.readInt32();
OMX_U32 port_index = data.readInt32();
- if (!isSecure(node) || port_index != 0 /* kPortIndexInput */) {
+ if (!isSecure() || port_index != 0 /* kPortIndexInput */) {
ALOGE("b/24310423");
reply->writeInt32(INVALID_OPERATION);
return NO_ERROR;
@@ -1111,7 +802,7 @@
void *buffer_data = NULL;
sp<NativeHandle> native_handle;
status_t err = allocateSecureBuffer(
- node, port_index, size, &buffer, &buffer_data, &native_handle);
+ port_index, size, &buffer, &buffer_data, &native_handle);
reply->writeInt32(err);
if (err == OK) {
@@ -1125,83 +816,61 @@
return NO_ERROR;
}
- case ALLOC_BUFFER_WITH_BACKUP:
- {
- CHECK_OMX_INTERFACE(IOMX, data, reply);
-
- node_id node = (node_id)data.readInt32();
- OMX_U32 port_index = data.readInt32();
- sp<IMemory> params =
- interface_cast<IMemory>(data.readStrongBinder());
- OMX_U32 allottedSize = data.readInt32();
-
- if (params == NULL) {
- ALOGE("b/26392700");
- reply->writeInt32(INVALID_OPERATION);
- return NO_ERROR;
- }
-
- buffer_id buffer;
- status_t err = allocateBufferWithBackup(
- node, port_index, params, &buffer, allottedSize);
-
- reply->writeInt32(err);
-
- if (err == OK) {
- reply->writeInt32((int32_t)buffer);
- }
-
- return NO_ERROR;
- }
-
case FREE_BUFFER:
{
- CHECK_OMX_INTERFACE(IOMX, data, reply);
+ CHECK_OMX_INTERFACE(IOMXNode, data, reply);
- node_id node = (node_id)data.readInt32();
OMX_U32 port_index = data.readInt32();
buffer_id buffer = (buffer_id)data.readInt32();
- reply->writeInt32(freeBuffer(node, port_index, buffer));
+ reply->writeInt32(freeBuffer(port_index, buffer));
return NO_ERROR;
}
case FILL_BUFFER:
{
- CHECK_OMX_INTERFACE(IOMX, data, reply);
+ CHECK_OMX_INTERFACE(IOMXNode, data, reply);
- node_id node = (node_id)data.readInt32();
buffer_id buffer = (buffer_id)data.readInt32();
+
+ OMXBuffer omxBuf;
+ status_t err = omxBuf.readFromParcel(&data);
+ if (err != OK) {
+ return err;
+ }
+
bool haveFence = data.readInt32();
int fenceFd = haveFence ? ::dup(data.readFileDescriptor()) : -1;
- reply->writeInt32(fillBuffer(node, buffer, fenceFd));
+
+ reply->writeInt32(fillBuffer(buffer, omxBuf, fenceFd));
return NO_ERROR;
}
case EMPTY_BUFFER:
{
- CHECK_OMX_INTERFACE(IOMX, data, reply);
+ CHECK_OMX_INTERFACE(IOMXNode, data, reply);
- node_id node = (node_id)data.readInt32();
buffer_id buffer = (buffer_id)data.readInt32();
- OMX_U32 range_offset = data.readInt32();
- OMX_U32 range_length = data.readInt32();
+ OMXBuffer omxBuf;
+ status_t err = omxBuf.readFromParcel(&data);
+ if (err != OK) {
+ return err;
+ }
OMX_U32 flags = data.readInt32();
OMX_TICKS timestamp = data.readInt64();
bool haveFence = data.readInt32();
int fenceFd = haveFence ? ::dup(data.readFileDescriptor()) : -1;
reply->writeInt32(emptyBuffer(
- node, buffer, range_offset, range_length, flags, timestamp, fenceFd));
+ buffer, omxBuf, flags, timestamp, fenceFd));
return NO_ERROR;
}
case GET_EXTENSION_INDEX:
{
- CHECK_OMX_INTERFACE(IOMX, data, reply);
+ CHECK_OMX_INTERFACE(IOMXNode, data, reply);
- node_id node = (node_id)data.readInt32();
const char *parameter_name = data.readCString();
if (parameter_name == NULL) {
@@ -1211,7 +880,7 @@
}
OMX_INDEXTYPE index;
- status_t err = getExtensionIndex(node, parameter_name, &index);
+ status_t err = getExtensionIndex(parameter_name, &index);
reply->writeInt32(err);
@@ -1222,6 +891,34 @@
return OK;
}
+ case DISPATCH_MESSAGE:
+ {
+ CHECK_OMX_INTERFACE(IOMXNode, data, reply);
+ omx_message msg;
+ int haveFence = data.readInt32();
+ msg.fenceFd = haveFence ? ::dup(data.readFileDescriptor()) : -1;
+ msg.type = (typeof(msg.type))data.readInt32();
+ status_t err = data.read(&msg.u, sizeof(msg.u));
+
+ if (err == OK) {
+ err = dispatchMessage(msg);
+ }
+ reply->writeInt32(err);
+
+ return NO_ERROR;
+ }
+
+ case SET_QUIRKS:
+ {
+ CHECK_OMX_INTERFACE(IOMXNode, data, reply);
+
+ OMX_U32 quirks = data.readInt32();
+
+ reply->writeInt32(setQuirks(quirks));
+
+ return NO_ERROR;
+ }
+
default:
return BBinder::onTransact(code, data, reply, flags);
}
@@ -1238,14 +935,12 @@
virtual void onMessages(const std::list<omx_message> &messages) {
Parcel data, reply;
std::list<omx_message>::const_iterator it = messages.cbegin();
- bool first = true;
+ if (messages.empty()) {
+ return;
+ }
+ data.writeInterfaceToken(IOMXObserver::getInterfaceDescriptor());
while (it != messages.cend()) {
const omx_message &msg = *it++;
- if (first) {
- data.writeInterfaceToken(IOMXObserver::getInterfaceDescriptor());
- data.writeInt32(msg.node);
- first = false;
- }
data.writeInt32(msg.fenceFd >= 0);
if (msg.fenceFd >= 0) {
data.writeFileDescriptor(msg.fenceFd, true /* takeOwnership */);
@@ -1254,10 +949,8 @@
data.write(&msg.u, sizeof(msg.u));
ALOGV("onMessage writing message %d, size %zu", msg.type, sizeof(msg));
}
- if (!first) {
- data.writeInt32(-1); // mark end
- remote()->transact(OBSERVER_ON_MSG, data, &reply, IBinder::FLAG_ONEWAY);
- }
+ data.writeInt32(-1); // mark end
+ remote()->transact(OBSERVER_ON_MSG, data, &reply, IBinder::FLAG_ONEWAY);
}
};
@@ -1269,7 +962,6 @@
case OBSERVER_ON_MSG:
{
CHECK_OMX_INTERFACE(IOMXObserver, data, reply);
- IOMX::node_id node = data.readInt32();
std::list<omx_message> messages;
status_t err = FAILED_TRANSACTION; // must receive at least one message
do {
@@ -1278,7 +970,6 @@
break;
}
omx_message msg;
- msg.node = node;
msg.fenceFd = haveFence ? ::dup(data.readFileDescriptor()) : -1;
msg.type = (typeof(msg.type))data.readInt32();
err = data.read(&msg.u, sizeof(msg.u));
diff --git a/media/libmedia/MediaCodecBuffer.cpp b/media/libmedia/MediaCodecBuffer.cpp
new file mode 100644
index 0000000..59d6164
--- /dev/null
+++ b/media/libmedia/MediaCodecBuffer.cpp
@@ -0,0 +1,88 @@
+/*
+ * Copyright 2016, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "MediaCodecBuffer"
+#include <utils/Log.h>
+
+#include <media/MediaCodecBuffer.h>
+#include <media/stagefright/foundation/ABuffer.h>
+#include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/foundation/MediaBufferBase.h>
+
+namespace android {
+
+MediaCodecBuffer::MediaCodecBuffer(const sp<AMessage> &format, const sp<ABuffer> &buffer)
+ : mMeta(new AMessage),
+ mFormat(format),
+ mBuffer(buffer),
+ mMediaBufferBase(nullptr) {
+}
+
+// ABuffer-like interface
+uint8_t *MediaCodecBuffer::base() {
+ return mBuffer->base();
+}
+
+uint8_t *MediaCodecBuffer::data() {
+ return mBuffer->data();
+}
+
+size_t MediaCodecBuffer::capacity() const {
+ return mBuffer->capacity();
+}
+
+size_t MediaCodecBuffer::size() const {
+ return mBuffer->size();
+}
+
+size_t MediaCodecBuffer::offset() const {
+ return mBuffer->offset();
+}
+
+status_t MediaCodecBuffer::setRange(size_t offset, size_t size) {
+ mBuffer->setRange(offset, size);
+ return OK;
+}
+
+MediaBufferBase *MediaCodecBuffer::getMediaBufferBase() {
+ if (mMediaBufferBase != NULL) {
+ mMediaBufferBase->add_ref();
+ }
+ return mMediaBufferBase;
+}
+
+void MediaCodecBuffer::setMediaBufferBase(MediaBufferBase *mediaBuffer) {
+ if (mMediaBufferBase != NULL) {
+ mMediaBufferBase->release();
+ }
+ mMediaBufferBase = mediaBuffer;
+}
+
+sp<AMessage> MediaCodecBuffer::meta() {
+ return mMeta;
+}
+
+sp<AMessage> MediaCodecBuffer::format() {
+ return mFormat;
+}
+
+void MediaCodecBuffer::setFormat(const sp<AMessage> &format) {
+ mMeta->clear();
+ mFormat = format;
+}
+
+} // namespace android
diff --git a/media/libmedia/MediaCodecInfo.cpp b/media/libmedia/MediaCodecInfo.cpp
index 1b3b3eb..62a7bdf 100644
--- a/media/libmedia/MediaCodecInfo.cpp
+++ b/media/libmedia/MediaCodecInfo.cpp
@@ -121,9 +121,11 @@
}
bool MediaCodecInfo::hasQuirk(const char *name) const {
- for (size_t ix = 0; ix < mQuirks.size(); ix++) {
- if (mQuirks.itemAt(ix).equalsIgnoreCase(name)) {
- return true;
+ if (name) {
+ for (size_t ix = 0; ix < mQuirks.size(); ix++) {
+ if (mQuirks.itemAt(ix).equalsIgnoreCase(name)) {
+ return true;
+ }
}
}
return false;
@@ -190,9 +192,11 @@
}
ssize_t MediaCodecInfo::getCapabilityIndex(const char *mime) const {
- for (size_t ix = 0; ix < mCaps.size(); ix++) {
- if (mCaps.keyAt(ix).equalsIgnoreCase(mime)) {
- return ix;
+ if (mime) {
+ for (size_t ix = 0; ix < mCaps.size(); ix++) {
+ if (mCaps.keyAt(ix).equalsIgnoreCase(mime)) {
+ return ix;
+ }
}
}
return -1;
diff --git a/media/libstagefright/MediaDefs.cpp b/media/libmedia/MediaDefs.cpp
similarity index 98%
rename from media/libstagefright/MediaDefs.cpp
rename to media/libmedia/MediaDefs.cpp
index 845462b..a2110c9 100644
--- a/media/libstagefright/MediaDefs.cpp
+++ b/media/libmedia/MediaDefs.cpp
@@ -14,7 +14,7 @@
* limitations under the License.
*/
-#include <media/stagefright/MediaDefs.h>
+#include <media/MediaDefs.h>
namespace android {
diff --git a/media/libmedia/MidiDeviceInfo.cpp b/media/libmedia/MidiDeviceInfo.cpp
new file mode 100644
index 0000000..02efc5f
--- /dev/null
+++ b/media/libmedia/MidiDeviceInfo.cpp
@@ -0,0 +1,138 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "MidiDeviceInfo"
+
+#include "MidiDeviceInfo.h"
+
+#include <binder/Parcel.h>
+#include <log/log.h>
+#include <utils/Errors.h>
+#include <utils/String16.h>
+
+namespace android {
+namespace media {
+namespace midi {
+
+// The constant values need to be kept in sync with MidiDeviceInfo.java.
+// static
+const char* const MidiDeviceInfo::PROPERTY_NAME = "name";
+const char* const MidiDeviceInfo::PROPERTY_MANUFACTURER = "manufacturer";
+const char* const MidiDeviceInfo::PROPERTY_PRODUCT = "product";
+const char* const MidiDeviceInfo::PROPERTY_VERSION = "version";
+const char* const MidiDeviceInfo::PROPERTY_SERIAL_NUMBER = "serial_number";
+const char* const MidiDeviceInfo::PROPERTY_ALSA_CARD = "alsa_card";
+const char* const MidiDeviceInfo::PROPERTY_ALSA_DEVICE = "alsa_device";
+
+String16 MidiDeviceInfo::getProperty(const char* propertyName) {
+ String16 value;
+ if (mProperties.getString(String16(propertyName), &value)) {
+ return value;
+ } else {
+ return String16();
+ }
+}
+
+#define RETURN_IF_FAILED(calledOnce) \
+ { \
+ status_t returnStatus = calledOnce; \
+ if (returnStatus) { \
+ ALOGE("Failed at %s:%d (%s)", __FILE__, __LINE__, __func__); \
+ return returnStatus; \
+ } \
+ }
+
+status_t MidiDeviceInfo::writeToParcel(Parcel* parcel) const {
+ // Needs to be kept in sync with code in MidiDeviceInfo.java
+ RETURN_IF_FAILED(parcel->writeInt32(mType));
+ RETURN_IF_FAILED(parcel->writeInt32(mId));
+ RETURN_IF_FAILED(parcel->writeInt32((int32_t)mInputPortNames.size()));
+ RETURN_IF_FAILED(parcel->writeInt32((int32_t)mOutputPortNames.size()));
+ RETURN_IF_FAILED(writeStringVector(parcel, mInputPortNames));
+ RETURN_IF_FAILED(writeStringVector(parcel, mOutputPortNames));
+ RETURN_IF_FAILED(parcel->writeInt32(mIsPrivate ? 1 : 0));
+ RETURN_IF_FAILED(mProperties.writeToParcel(parcel));
+ // This corresponds to "extra" properties written by Java code
+ RETURN_IF_FAILED(mProperties.writeToParcel(parcel));
+ return OK;
+}
+
+status_t MidiDeviceInfo::readFromParcel(const Parcel* parcel) {
+ // Needs to be kept in sync with code in MidiDeviceInfo.java
+ RETURN_IF_FAILED(parcel->readInt32(&mType));
+ RETURN_IF_FAILED(parcel->readInt32(&mId));
+ int32_t inputPortCount;
+ RETURN_IF_FAILED(parcel->readInt32(&inputPortCount));
+ int32_t outputPortCount;
+ RETURN_IF_FAILED(parcel->readInt32(&outputPortCount));
+ RETURN_IF_FAILED(readStringVector(parcel, &mInputPortNames, inputPortCount));
+ RETURN_IF_FAILED(readStringVector(parcel, &mOutputPortNames, outputPortCount));
+ int32_t isPrivate;
+ RETURN_IF_FAILED(parcel->readInt32(&isPrivate));
+ mIsPrivate = isPrivate == 1;
+ RETURN_IF_FAILED(mProperties.readFromParcel(parcel));
+ // Ignore "extra" properties as they may contain Java Parcelables
+ return OK;
+}
+
+status_t MidiDeviceInfo::readStringVector(
+ const Parcel* parcel, Vector<String16> *vectorPtr, size_t defaultLength) {
+ std::unique_ptr<std::vector<std::unique_ptr<String16>>> v;
+ status_t result = parcel->readString16Vector(&v);
+ if (result != OK) return result;
+ vectorPtr->clear();
+ if (v.get() != nullptr) {
+ for (const auto& iter : *v) {
+ if (iter.get() != nullptr) {
+ vectorPtr->push_back(*iter);
+ } else {
+ vectorPtr->push_back(String16());
+ }
+ }
+ } else {
+ vectorPtr->resize(defaultLength);
+ }
+ return OK;
+}
+
+status_t MidiDeviceInfo::writeStringVector(Parcel* parcel, const Vector<String16>& vector) const {
+ std::vector<String16> v;
+ for (size_t i = 0; i < vector.size(); ++i) {
+ v.push_back(vector[i]);
+ }
+ return parcel->writeString16Vector(v);
+}
+
+// Vector does not define operator==
+static inline bool areVectorsEqual(const Vector<String16>& lhs, const Vector<String16>& rhs) {
+ if (lhs.size() != rhs.size()) return false;
+ for (size_t i = 0; i < lhs.size(); ++i) {
+ if (lhs[i] != rhs[i]) return false;
+ }
+ return true;
+}
+
+bool operator==(const MidiDeviceInfo& lhs, const MidiDeviceInfo& rhs) {
+ return (lhs.mType == rhs.mType && lhs.mId == rhs.mId &&
+ areVectorsEqual(lhs.mInputPortNames, rhs.mInputPortNames) &&
+ areVectorsEqual(lhs.mOutputPortNames, rhs.mOutputPortNames) &&
+ lhs.mProperties == rhs.mProperties &&
+ lhs.mIsPrivate == rhs.mIsPrivate);
+}
+
+} // namespace midi
+} // namespace media
+} // namespace android
diff --git a/media/libmedia/OMXBuffer.cpp b/media/libmedia/OMXBuffer.cpp
new file mode 100644
index 0000000..0931872
--- /dev/null
+++ b/media/libmedia/OMXBuffer.cpp
@@ -0,0 +1,148 @@
+/*
+ * Copyright 2016, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "OMXBuffer"
+
+#include <media/MediaCodecBuffer.h>
+#include <media/OMXBuffer.h>
+#include <binder/IMemory.h>
+#include <binder/Parcel.h>
+#include <ui/GraphicBuffer.h>
+#include <utils/NativeHandle.h>
+
+namespace android {
+
+//static
+OMXBuffer OMXBuffer::sPreset(static_cast<sp<MediaCodecBuffer> >(NULL));
+
+OMXBuffer::OMXBuffer()
+ : mBufferType(kBufferTypeInvalid) {
+}
+
+OMXBuffer::OMXBuffer(const sp<MediaCodecBuffer>& codecBuffer)
+ : mBufferType(kBufferTypePreset),
+ mRangeLength(codecBuffer != NULL ? codecBuffer->size() : 0) {
+}
+
+OMXBuffer::OMXBuffer(const sp<IMemory> &mem, size_t allottedSize)
+ : mBufferType(kBufferTypeSharedMem),
+ mMem(mem),
+ mAllottedSize(allottedSize ? : mem->size()) {
+}
+
+OMXBuffer::OMXBuffer(const sp<GraphicBuffer> &gbuf)
+ : mBufferType(kBufferTypeANWBuffer),
+ mGraphicBuffer(gbuf) {
+}
+
+OMXBuffer::OMXBuffer(const sp<NativeHandle> &handle)
+ : mBufferType(kBufferTypeNativeHandle),
+ mNativeHandle(handle) {
+}
+
+OMXBuffer::~OMXBuffer() {
+}
+
+status_t OMXBuffer::writeToParcel(Parcel *parcel) const {
+ parcel->writeInt32(mBufferType);
+
+ switch(mBufferType) {
+ case kBufferTypePreset:
+ {
+ return parcel->writeUint32(mRangeLength);
+ }
+
+ case kBufferTypeSharedMem:
+ {
+ status_t err = parcel->writeStrongBinder(IInterface::asBinder(mMem));
+ if (err != NO_ERROR) {
+ return err;
+ }
+ return parcel->writeUint32(mAllottedSize);
+ }
+
+ case kBufferTypeANWBuffer:
+ {
+ return parcel->write(*mGraphicBuffer);
+ }
+
+ case kBufferTypeNativeHandle:
+ {
+ return parcel->writeNativeHandle(mNativeHandle->handle());
+ }
+
+ default:
+ return BAD_VALUE;
+ }
+ return BAD_VALUE;
+}
+
+status_t OMXBuffer::readFromParcel(const Parcel *parcel) {
+ BufferType bufferType = (BufferType) parcel->readInt32();
+
+ switch(bufferType) {
+ case kBufferTypePreset:
+ {
+ mRangeLength = parcel->readUint32();
+ break;
+ }
+
+ case kBufferTypeSharedMem:
+ {
+ sp<IMemory> params = interface_cast<IMemory>(parcel->readStrongBinder());
+
+ mMem = params;
+ mAllottedSize = parcel->readUint32();
+ break;
+ }
+
+ case kBufferTypeANWBuffer:
+ {
+ sp<GraphicBuffer> buffer = new GraphicBuffer();
+
+ status_t err = parcel->read(*buffer);
+
+ if (err != OK) {
+ return err;
+ }
+
+ mGraphicBuffer = buffer;
+ break;
+ }
+
+ case kBufferTypeNativeHandle:
+ {
+ sp<NativeHandle> handle = NativeHandle::create(
+ parcel->readNativeHandle(), true /* ownsHandle */);
+
+ mNativeHandle = handle;
+ break;
+ }
+
+ default:
+ return BAD_VALUE;
+ }
+
+ mBufferType = bufferType;
+ return OK;
+}
+
+} // namespace android
+
+
+
+
diff --git a/media/libmedia/TypeConverter.cpp b/media/libmedia/TypeConverter.cpp
new file mode 100644
index 0000000..54d1fc1
--- /dev/null
+++ b/media/libmedia/TypeConverter.cpp
@@ -0,0 +1,322 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <media/TypeConverter.h>
+
+namespace android {
+
+#define MAKE_STRING_FROM_ENUM(string) { #string, string }
+#define TERMINATOR { .literal = nullptr }
+
+template <>
+const OutputDeviceConverter::Table OutputDeviceConverter::mTable[] = {
+ MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_NONE),
+ MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_EARPIECE),
+ MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_SPEAKER),
+ MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_SPEAKER_SAFE),
+ MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_WIRED_HEADSET),
+ MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_WIRED_HEADPHONE),
+ MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_BLUETOOTH_SCO),
+ MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_BLUETOOTH_SCO_HEADSET),
+ MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_BLUETOOTH_SCO_CARKIT),
+ MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_ALL_SCO),
+ MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_BLUETOOTH_A2DP),
+ MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_HEADPHONES),
+ MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_SPEAKER),
+ MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_ALL_A2DP),
+ MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_AUX_DIGITAL),
+ MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_HDMI),
+ MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_ANLG_DOCK_HEADSET),
+ MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_DGTL_DOCK_HEADSET),
+ MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_USB_ACCESSORY),
+ MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_USB_DEVICE),
+ MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_ALL_USB),
+ MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_REMOTE_SUBMIX),
+ MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_TELEPHONY_TX),
+ MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_LINE),
+ MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_HDMI_ARC),
+ MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_SPDIF),
+ MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_FM),
+ MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_AUX_LINE),
+ MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_IP),
+ MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_BUS),
+ MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_DEFAULT),
+ // STUB must be after DEFAULT, so the latter is picked up by toString first.
+ MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_STUB),
+ TERMINATOR
+};
+
+template <>
+const InputDeviceConverter::Table InputDeviceConverter::mTable[] = {
+ MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_NONE),
+ MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_COMMUNICATION),
+ MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_AMBIENT),
+ MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_BUILTIN_MIC),
+ MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_BLUETOOTH_SCO_HEADSET),
+ MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_ALL_SCO),
+ MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_WIRED_HEADSET),
+ MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_AUX_DIGITAL),
+ MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_HDMI),
+ MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_TELEPHONY_RX),
+ MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_VOICE_CALL),
+ MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_BACK_MIC),
+ MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_REMOTE_SUBMIX),
+ MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_ANLG_DOCK_HEADSET),
+ MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_DGTL_DOCK_HEADSET),
+ MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_USB_ACCESSORY),
+ MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_USB_DEVICE),
+ MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_ALL_USB),
+ MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_FM_TUNER),
+ MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_TV_TUNER),
+ MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_LINE),
+ MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_SPDIF),
+ MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_BLUETOOTH_A2DP),
+ MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_LOOPBACK),
+ MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_IP),
+ MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_BUS),
+ MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_DEFAULT),
+ // STUB must be after DEFAULT, so the latter is picked up by toString first.
+ MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_STUB),
+ TERMINATOR
+};
+
+
+template <>
+const OutputFlagConverter::Table OutputFlagConverter::mTable[] = {
+ MAKE_STRING_FROM_ENUM(AUDIO_OUTPUT_FLAG_NONE),
+ MAKE_STRING_FROM_ENUM(AUDIO_OUTPUT_FLAG_DIRECT),
+ MAKE_STRING_FROM_ENUM(AUDIO_OUTPUT_FLAG_PRIMARY),
+ MAKE_STRING_FROM_ENUM(AUDIO_OUTPUT_FLAG_FAST),
+ MAKE_STRING_FROM_ENUM(AUDIO_OUTPUT_FLAG_DEEP_BUFFER),
+ MAKE_STRING_FROM_ENUM(AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD),
+ MAKE_STRING_FROM_ENUM(AUDIO_OUTPUT_FLAG_NON_BLOCKING),
+ MAKE_STRING_FROM_ENUM(AUDIO_OUTPUT_FLAG_HW_AV_SYNC),
+ MAKE_STRING_FROM_ENUM(AUDIO_OUTPUT_FLAG_TTS),
+ MAKE_STRING_FROM_ENUM(AUDIO_OUTPUT_FLAG_RAW),
+ MAKE_STRING_FROM_ENUM(AUDIO_OUTPUT_FLAG_SYNC),
+ MAKE_STRING_FROM_ENUM(AUDIO_OUTPUT_FLAG_IEC958_NONAUDIO),
+ TERMINATOR
+};
+
+
+template <>
+const InputFlagConverter::Table InputFlagConverter::mTable[] = {
+ MAKE_STRING_FROM_ENUM(AUDIO_INPUT_FLAG_NONE),
+ MAKE_STRING_FROM_ENUM(AUDIO_INPUT_FLAG_FAST),
+ MAKE_STRING_FROM_ENUM(AUDIO_INPUT_FLAG_HW_HOTWORD),
+ MAKE_STRING_FROM_ENUM(AUDIO_INPUT_FLAG_RAW),
+ MAKE_STRING_FROM_ENUM(AUDIO_INPUT_FLAG_SYNC),
+ TERMINATOR
+};
+
+
+template <>
+const FormatConverter::Table FormatConverter::mTable[] = {
+ MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_PCM_16_BIT),
+ MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_PCM_8_BIT),
+ MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_PCM_32_BIT),
+ MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_PCM_8_24_BIT),
+ MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_PCM_FLOAT),
+ MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_PCM_24_BIT_PACKED),
+ MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_MP3),
+ MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_AMR_NB),
+ MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_AMR_WB),
+ MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_AAC),
+ MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_AAC_MAIN),
+ MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_AAC_LC),
+ MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_AAC_SSR),
+ MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_AAC_LTP),
+ MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_AAC_HE_V1),
+ MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_AAC_SCALABLE),
+ MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_AAC_ERLC),
+ MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_AAC_LD),
+ MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_AAC_HE_V2),
+ MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_AAC_ELD),
+ MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_VORBIS),
+ MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_HE_AAC_V1),
+ MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_HE_AAC_V2),
+ MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_OPUS),
+ MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_AC3),
+ MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_E_AC3),
+ MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_DTS),
+ MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_DTS_HD),
+ MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_IEC61937),
+ MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_DOLBY_TRUEHD),
+ TERMINATOR
+};
+
+
+template <>
+const OutputChannelConverter::Table OutputChannelConverter::mTable[] = {
+ MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_OUT_MONO),
+ MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_OUT_STEREO),
+ MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_OUT_QUAD),
+ MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_OUT_5POINT1),
+ MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_OUT_7POINT1),
+ TERMINATOR
+};
+
+
+template <>
+const InputChannelConverter::Table InputChannelConverter::mTable[] = {
+ MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_IN_MONO),
+ MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_IN_STEREO),
+ MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_IN_FRONT_BACK),
+ TERMINATOR
+};
+
+template <>
+const ChannelIndexConverter::Table ChannelIndexConverter::mTable[] = {
+ {"AUDIO_CHANNEL_INDEX_MASK_1", static_cast<audio_channel_mask_t>(AUDIO_CHANNEL_INDEX_MASK_1)},
+ {"AUDIO_CHANNEL_INDEX_MASK_2", static_cast<audio_channel_mask_t>(AUDIO_CHANNEL_INDEX_MASK_2)},
+ {"AUDIO_CHANNEL_INDEX_MASK_3", static_cast<audio_channel_mask_t>(AUDIO_CHANNEL_INDEX_MASK_3)},
+ {"AUDIO_CHANNEL_INDEX_MASK_4", static_cast<audio_channel_mask_t>(AUDIO_CHANNEL_INDEX_MASK_4)},
+ {"AUDIO_CHANNEL_INDEX_MASK_5", static_cast<audio_channel_mask_t>(AUDIO_CHANNEL_INDEX_MASK_5)},
+ {"AUDIO_CHANNEL_INDEX_MASK_6", static_cast<audio_channel_mask_t>(AUDIO_CHANNEL_INDEX_MASK_6)},
+ {"AUDIO_CHANNEL_INDEX_MASK_7", static_cast<audio_channel_mask_t>(AUDIO_CHANNEL_INDEX_MASK_7)},
+ {"AUDIO_CHANNEL_INDEX_MASK_8", static_cast<audio_channel_mask_t>(AUDIO_CHANNEL_INDEX_MASK_8)},
+ TERMINATOR
+};
+
+
+template <>
+const GainModeConverter::Table GainModeConverter::mTable[] = {
+ MAKE_STRING_FROM_ENUM(AUDIO_GAIN_MODE_JOINT),
+ MAKE_STRING_FROM_ENUM(AUDIO_GAIN_MODE_CHANNELS),
+ MAKE_STRING_FROM_ENUM(AUDIO_GAIN_MODE_RAMP),
+ TERMINATOR
+};
+
+
+template <>
+const StreamTypeConverter::Table StreamTypeConverter::mTable[] = {
+ MAKE_STRING_FROM_ENUM(AUDIO_STREAM_VOICE_CALL),
+ MAKE_STRING_FROM_ENUM(AUDIO_STREAM_SYSTEM),
+ MAKE_STRING_FROM_ENUM(AUDIO_STREAM_RING),
+ MAKE_STRING_FROM_ENUM(AUDIO_STREAM_MUSIC),
+ MAKE_STRING_FROM_ENUM(AUDIO_STREAM_ALARM),
+ MAKE_STRING_FROM_ENUM(AUDIO_STREAM_NOTIFICATION),
+ MAKE_STRING_FROM_ENUM(AUDIO_STREAM_BLUETOOTH_SCO ),
+ MAKE_STRING_FROM_ENUM(AUDIO_STREAM_ENFORCED_AUDIBLE),
+ MAKE_STRING_FROM_ENUM(AUDIO_STREAM_DTMF),
+ MAKE_STRING_FROM_ENUM(AUDIO_STREAM_TTS),
+ MAKE_STRING_FROM_ENUM(AUDIO_STREAM_ACCESSIBILITY),
+ MAKE_STRING_FROM_ENUM(AUDIO_STREAM_REROUTING),
+ MAKE_STRING_FROM_ENUM(AUDIO_STREAM_PATCH),
+ TERMINATOR
+};
+
+template<>
+const AudioModeConverter::Table AudioModeConverter::mTable[] = {
+ MAKE_STRING_FROM_ENUM(AUDIO_MODE_INVALID),
+ MAKE_STRING_FROM_ENUM(AUDIO_MODE_CURRENT),
+ MAKE_STRING_FROM_ENUM(AUDIO_MODE_NORMAL),
+ MAKE_STRING_FROM_ENUM(AUDIO_MODE_RINGTONE),
+ MAKE_STRING_FROM_ENUM(AUDIO_MODE_IN_CALL),
+ MAKE_STRING_FROM_ENUM(AUDIO_MODE_IN_COMMUNICATION),
+ TERMINATOR
+};
+
+template class TypeConverter<OutputDeviceTraits>;
+template class TypeConverter<InputDeviceTraits>;
+template class TypeConverter<OutputFlagTraits>;
+template class TypeConverter<InputFlagTraits>;
+template class TypeConverter<FormatTraits>;
+template class TypeConverter<OutputChannelTraits>;
+template class TypeConverter<InputChannelTraits>;
+template class TypeConverter<ChannelIndexTraits>;
+template class TypeConverter<GainModeTraits>;
+template class TypeConverter<StreamTraits>;
+template class TypeConverter<AudioModeTraits>;
+
+bool deviceFromString(const std::string& literalDevice, audio_devices_t& device) {
+ return InputDeviceConverter::fromString(literalDevice, device) ||
+ OutputDeviceConverter::fromString(literalDevice, device);
+}
+
+bool deviceToString(audio_devices_t device, std::string& literalDevice) {
+ if (device & AUDIO_DEVICE_BIT_IN) {
+ return InputDeviceConverter::toString(device, literalDevice);
+ } else {
+ return OutputDeviceConverter::toString(device, literalDevice);
+ }
+}
+
+SampleRateTraits::Collection samplingRatesFromString(
+ const std::string &samplingRates, const char *del)
+{
+ SampleRateTraits::Collection samplingRateCollection;
+ collectionFromString<SampleRateTraits>(samplingRates, samplingRateCollection, del);
+ return samplingRateCollection;
+}
+
+FormatTraits::Collection formatsFromString(
+ const std::string &formats, const char *del)
+{
+ FormatTraits::Collection formatCollection;
+ FormatConverter::collectionFromString(formats, formatCollection, del);
+ return formatCollection;
+}
+
+audio_format_t formatFromString(const std::string &literalFormat, audio_format_t defaultFormat)
+{
+ audio_format_t format;
+ if (literalFormat.empty()) {
+ return defaultFormat;
+ }
+ FormatConverter::fromString(literalFormat, format);
+ return format;
+}
+
+audio_channel_mask_t channelMaskFromString(const std::string &literalChannels)
+{
+ audio_channel_mask_t channels;
+ if (!OutputChannelConverter::fromString(literalChannels, channels) ||
+ !InputChannelConverter::fromString(literalChannels, channels)) {
+ return AUDIO_CHANNEL_INVALID;
+ }
+ return channels;
+}
+
+ChannelTraits::Collection channelMasksFromString(
+ const std::string &channels, const char *del)
+{
+ ChannelTraits::Collection channelMaskCollection;
+ OutputChannelConverter::collectionFromString(channels, channelMaskCollection, del);
+ InputChannelConverter::collectionFromString(channels, channelMaskCollection, del);
+ ChannelIndexConverter::collectionFromString(channels, channelMaskCollection, del);
+ return channelMaskCollection;
+}
+
+InputChannelTraits::Collection inputChannelMasksFromString(
+ const std::string &inChannels, const char *del)
+{
+ InputChannelTraits::Collection inputChannelMaskCollection;
+ InputChannelConverter::collectionFromString(inChannels, inputChannelMaskCollection, del);
+ ChannelIndexConverter::collectionFromString(inChannels, inputChannelMaskCollection, del);
+ return inputChannelMaskCollection;
+}
+
+OutputChannelTraits::Collection outputChannelMasksFromString(
+ const std::string &outChannels, const char *del)
+{
+ OutputChannelTraits::Collection outputChannelMaskCollection;
+ OutputChannelConverter::collectionFromString(outChannels, outputChannelMaskCollection, del);
+ ChannelIndexConverter::collectionFromString(outChannels, outputChannelMaskCollection, del);
+ return outputChannelMaskCollection;
+}
+
+}; // namespace android
diff --git a/media/libmedia/aidl/android/IGraphicBufferSource.aidl b/media/libmedia/aidl/android/IGraphicBufferSource.aidl
new file mode 100644
index 0000000..a8dd309
--- /dev/null
+++ b/media/libmedia/aidl/android/IGraphicBufferSource.aidl
@@ -0,0 +1,36 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android;
+
+import android.IOMXNode;
+
+/**
+ * Binder interface for controlling a graphic buffer source.
+ *
+ * @hide
+ */
+interface IGraphicBufferSource {
+ void configure(IOMXNode omxNode, int dataSpace);
+ void setSuspend(boolean suspend);
+ void setRepeatPreviousFrameDelayUs(long repeatAfterUs);
+ void setMaxFps(float maxFps);
+ void setTimeLapseConfig(long timePerFrameUs, long timePerCaptureUs);
+ void setStartTimeUs(long startTimeUs);
+ void setColorAspects(int aspects);
+ void setTimeOffsetUs(long timeOffsetsUs);
+ void signalEndOfInputStream();
+}
\ No newline at end of file
diff --git a/media/libmedia/aidl/android/IOMXBufferSource.aidl b/media/libmedia/aidl/android/IOMXBufferSource.aidl
new file mode 100644
index 0000000..a5bf448
--- /dev/null
+++ b/media/libmedia/aidl/android/IOMXBufferSource.aidl
@@ -0,0 +1,62 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android;
+
+import android.OMXFenceParcelable;
+
+/**
+ * Binder interface for a buffer source to be used together with an OMX encoder
+ *
+ * @hide
+ */
+interface IOMXBufferSource {
+ /**
+ * This is called when OMX transitions to OMX_StateExecuting, which means
+ * we can start handing it buffers. If we already have buffers of data
+ * sitting in the BufferQueue, this will send them to the codec.
+ */
+ void onOmxExecuting();
+
+ /**
+ * This is called when OMX transitions to OMX_StateIdle, indicating that
+ * the codec is meant to return all buffers back to the client for them
+ * to be freed. Do NOT submit any more buffers to the component.
+ */
+ void onOmxIdle();
+
+ /**
+ * This is called when OMX transitions to OMX_StateLoaded, indicating that
+ * we are shutting down.
+ */
+ void onOmxLoaded();
+
+ /**
+ * A "codec buffer", i.e. a buffer that can be used to pass data into
+ * the encoder, has been allocated.
+ */
+ void onInputBufferAdded(int bufferID);
+
+ /**
+ * Called from OnEmptyBufferDone. If we have a BQ buffer available,
+ * fill it with a new frame of data; otherwise, just mark it as available.
+ *
+ * fenceParcel contains the fence's fd that the callee should wait on before
+ * using the buffer (or pass on to the user of the buffer, if the user supports
+ * fences). Callee takes ownership of the fence fd even if it fails.
+ */
+ void onInputBufferEmptied(int bufferID, in OMXFenceParcelable fenceParcel);
+}
\ No newline at end of file
diff --git a/media/libmedia/aidl/android/IOMXNode.aidl b/media/libmedia/aidl/android/IOMXNode.aidl
new file mode 100644
index 0000000..ec87fd2
--- /dev/null
+++ b/media/libmedia/aidl/android/IOMXNode.aidl
@@ -0,0 +1,22 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android;
+
+/** @hide */
+interface IOMXNode {
+ // Stub for manual implementation
+}
diff --git a/media/libmedia/aidl/android/IOMXNode.h b/media/libmedia/aidl/android/IOMXNode.h
new file mode 100644
index 0000000..7b17614
--- /dev/null
+++ b/media/libmedia/aidl/android/IOMXNode.h
@@ -0,0 +1,17 @@
+/*
+ * Copyright (c) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <media/IOMX.h>
diff --git a/media/libmedia/aidl/android/OMXFenceParcelable.aidl b/media/libmedia/aidl/android/OMXFenceParcelable.aidl
new file mode 100644
index 0000000..6d517e8
--- /dev/null
+++ b/media/libmedia/aidl/android/OMXFenceParcelable.aidl
@@ -0,0 +1,20 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android;
+
+/** @hide */
+parcelable OMXFenceParcelable cpp_header "media/OMXFenceParcelable.h";
diff --git a/media/libmedia/mediaplayer.cpp b/media/libmedia/mediaplayer.cpp
index fbe749c..846a24c 100644
--- a/media/libmedia/mediaplayer.cpp
+++ b/media/libmedia/mediaplayer.cpp
@@ -55,7 +55,9 @@
mStreamType = AUDIO_STREAM_MUSIC;
mAudioAttributesParcel = NULL;
mCurrentPosition = -1;
+ mCurrentSeekPrecise = false;
mSeekPosition = -1;
+ mSeekPrecise = false;
mCurrentState = MEDIA_PLAYER_IDLE;
mPrepareSync = false;
mPrepareStatus = NO_ERROR;
@@ -100,7 +102,9 @@
void MediaPlayer::clear_l()
{
mCurrentPosition = -1;
+ mCurrentSeekPrecise = false;
mSeekPosition = -1;
+ mSeekPrecise = false;
mVideoWidth = mVideoHeight = 0;
mRetransmitEndpointValid = false;
}
@@ -508,9 +512,9 @@
return getDuration_l(msec);
}
-status_t MediaPlayer::seekTo_l(int msec)
+status_t MediaPlayer::seekTo_l(int msec, bool precise)
{
- ALOGV("seekTo %d", msec);
+ ALOGV("seekTo (%d, %d)", msec, precise);
if ((mPlayer != 0) && ( mCurrentState & ( MEDIA_PLAYER_STARTED | MEDIA_PLAYER_PREPARED |
MEDIA_PLAYER_PAUSED | MEDIA_PLAYER_PLAYBACK_COMPLETE) ) ) {
if ( msec < 0 ) {
@@ -537,12 +541,14 @@
// cache duration
mCurrentPosition = msec;
+ mCurrentSeekPrecise = precise;
if (mSeekPosition < 0) {
mSeekPosition = msec;
- return mPlayer->seekTo(msec);
+ mSeekPrecise = precise;
+ return mPlayer->seekTo(msec, precise);
}
else {
- ALOGV("Seek in progress - queue up seekTo[%d]", msec);
+ ALOGV("Seek in progress - queue up seekTo[%d, %d]", msec, precise);
return NO_ERROR;
}
}
@@ -551,11 +557,11 @@
return INVALID_OPERATION;
}
-status_t MediaPlayer::seekTo(int msec)
+status_t MediaPlayer::seekTo(int msec, bool precise)
{
mLockThreadId = getThreadId();
Mutex::Autolock _l(mLock);
- status_t result = seekTo_l(msec);
+ status_t result = seekTo_l(msec, precise);
mLockThreadId = 0;
return result;
@@ -869,14 +875,16 @@
break;
case MEDIA_SEEK_COMPLETE:
ALOGV("Received seek complete");
- if (mSeekPosition != mCurrentPosition) {
- ALOGV("Executing queued seekTo(%d)", mSeekPosition);
+ if (mSeekPosition != mCurrentPosition || (!mSeekPrecise && mCurrentSeekPrecise)) {
+ ALOGV("Executing queued seekTo(%d, %d)", mCurrentPosition, mCurrentSeekPrecise);
mSeekPosition = -1;
- seekTo_l(mCurrentPosition);
+ mSeekPrecise = false;
+ seekTo_l(mCurrentPosition, mCurrentSeekPrecise);
}
else {
ALOGV("All seeks complete - return to regularly scheduled program");
mCurrentPosition = mSeekPosition = -1;
+ mCurrentSeekPrecise = mSeekPrecise = false;
}
break;
case MEDIA_BUFFERING_UPDATE:
diff --git a/media/libmedia/mediarecorder.cpp b/media/libmedia/mediarecorder.cpp
index 59c077a..6eb208c 100644
--- a/media/libmedia/mediarecorder.cpp
+++ b/media/libmedia/mediarecorder.cpp
@@ -361,7 +361,7 @@
return INVALID_OPERATION;
}
- return mMediaRecorder->setInputSurface(surface->getBufferConsumer());
+ return mMediaRecorder->setInputSurface(surface);
}
status_t MediaRecorder::setVideoFrameRate(int frames_per_second)
diff --git a/media/libmediaplayerservice/Android.mk b/media/libmediaplayerservice/Android.mk
index 93064c3..1786e6b 100644
--- a/media/libmediaplayerservice/Android.mk
+++ b/media/libmediaplayerservice/Android.mk
@@ -55,7 +55,6 @@
$(TOP)/external/tremolo/Tremolo \
LOCAL_CFLAGS += -Werror -Wno-error=deprecated-declarations -Wall
-LOCAL_CLANG := true
LOCAL_MODULE:= libmediaplayerservice
diff --git a/media/libmediaplayerservice/MediaPlayerService.cpp b/media/libmediaplayerservice/MediaPlayerService.cpp
index 95c91d1..3ff9d98 100644
--- a/media/libmediaplayerservice/MediaPlayerService.cpp
+++ b/media/libmediaplayerservice/MediaPlayerService.cpp
@@ -684,10 +684,18 @@
sp<IServiceManager> sm = defaultServiceManager();
sp<IBinder> binder = sm->getService(String16("media.extractor"));
+ if (binder == NULL) {
+ ALOGE("extractor service not available");
+ return NULL;
+ }
mExtractorDeathListener = new ServiceDeathNotifier(binder, p, MEDIAEXTRACTOR_PROCESS_DEATH);
binder->linkToDeath(mExtractorDeathListener);
binder = sm->getService(String16("media.codec"));
+ if (binder == NULL) {
+ ALOGE("codec service not available");
+ return NULL;
+ }
mCodecDeathListener = new ServiceDeathNotifier(binder, p, MEDIACODEC_PROCESS_DEATH);
binder->linkToDeath(mCodecDeathListener);
@@ -1113,12 +1121,12 @@
return OK;
}
-status_t MediaPlayerService::Client::seekTo(int msec)
+status_t MediaPlayerService::Client::seekTo(int msec, bool precise)
{
ALOGV("[%d] seekTo(%d)", mConnId, msec);
sp<MediaPlayerBase> p = getPlayer();
if (p == 0) return UNKNOWN_ERROR;
- return p->seekTo(msec);
+ return p->seekTo(msec, precise);
}
status_t MediaPlayerService::Client::reset()
@@ -1513,57 +1521,45 @@
}
uint32_t numFramesPlayed;
- int64_t numFramesPlayedAt;
+ int64_t numFramesPlayedAtUs;
AudioTimestamp ts;
- static const int64_t kStaleTimestamp100ms = 100000;
status_t res = mTrack->getTimestamp(ts);
if (res == OK) { // case 1: mixing audio tracks and offloaded tracks.
numFramesPlayed = ts.mPosition;
- numFramesPlayedAt = ts.mTime.tv_sec * 1000000LL + ts.mTime.tv_nsec / 1000;
- const int64_t timestampAge = nowUs - numFramesPlayedAt;
- if (timestampAge > kStaleTimestamp100ms) {
- // This is an audio FIXME.
- // getTimestamp returns a timestamp which may come from audio mixing threads.
- // After pausing, the MixerThread may go idle, thus the mTime estimate may
- // become stale. Assuming that the MixerThread runs 20ms, with FastMixer at 5ms,
- // the max latency should be about 25ms with an average around 12ms (to be verified).
- // For safety we use 100ms.
- ALOGV("getTimestamp: returned stale timestamp nowUs(%lld) numFramesPlayedAt(%lld)",
- (long long)nowUs, (long long)numFramesPlayedAt);
- numFramesPlayedAt = nowUs - kStaleTimestamp100ms;
- }
- //ALOGD("getTimestamp: OK %d %lld", numFramesPlayed, (long long)numFramesPlayedAt);
+ numFramesPlayedAtUs = ts.mTime.tv_sec * 1000000LL + ts.mTime.tv_nsec / 1000;
+ //ALOGD("getTimestamp: OK %d %lld", numFramesPlayed, (long long)numFramesPlayedAtUs);
} else if (res == WOULD_BLOCK) { // case 2: transitory state on start of a new track
numFramesPlayed = 0;
- numFramesPlayedAt = nowUs;
+ numFramesPlayedAtUs = nowUs;
//ALOGD("getTimestamp: WOULD_BLOCK %d %lld",
- // numFramesPlayed, (long long)numFramesPlayedAt);
+ // numFramesPlayed, (long long)numFramesPlayedAtUs);
} else { // case 3: transitory at new track or audio fast tracks.
res = mTrack->getPosition(&numFramesPlayed);
CHECK_EQ(res, (status_t)OK);
- numFramesPlayedAt = nowUs;
- numFramesPlayedAt += 1000LL * mTrack->latency() / 2; /* XXX */
- //ALOGD("getPosition: %u %lld", numFramesPlayed, (long long)numFramesPlayedAt);
+ numFramesPlayedAtUs = nowUs;
+ numFramesPlayedAtUs += 1000LL * mTrack->latency() / 2; /* XXX */
+ //ALOGD("getPosition: %u %lld", numFramesPlayed, (long long)numFramesPlayedAtUs);
}
// CHECK_EQ(numFramesPlayed & (1 << 31), 0); // can't be negative until 12.4 hrs, test
// TODO: remove the (int32_t) casting below as it may overflow at 12.4 hours.
int64_t durationUs = (int64_t)((int32_t)numFramesPlayed * 1000000LL / mSampleRateHz)
- + nowUs - numFramesPlayedAt;
+ + nowUs - numFramesPlayedAtUs;
if (durationUs < 0) {
// Occurs when numFramesPlayed position is very small and the following:
// (1) In case 1, the time nowUs is computed before getTimestamp() is called and
- // numFramesPlayedAt is greater than nowUs by time more than numFramesPlayed.
+ // numFramesPlayedAtUs is greater than nowUs by time more than numFramesPlayed.
// (2) In case 3, using getPosition and adding mAudioSink->latency() to
- // numFramesPlayedAt, by a time amount greater than numFramesPlayed.
+ // numFramesPlayedAtUs, by a time amount greater than numFramesPlayed.
//
// Both of these are transitory conditions.
ALOGV("getPlayedOutDurationUs: negative duration %lld set to zero", (long long)durationUs);
durationUs = 0;
}
ALOGV("getPlayedOutDurationUs(%lld) nowUs(%lld) frames(%u) framesAt(%lld)",
- (long long)durationUs, (long long)nowUs, numFramesPlayed, (long long)numFramesPlayedAt);
+ (long long)durationUs, (long long)nowUs,
+ numFramesPlayed, (long long)numFramesPlayedAtUs);
return durationUs;
}
diff --git a/media/libmediaplayerservice/MediaPlayerService.h b/media/libmediaplayerservice/MediaPlayerService.h
index 601b046..ef82b48 100644
--- a/media/libmediaplayerservice/MediaPlayerService.h
+++ b/media/libmediaplayerservice/MediaPlayerService.h
@@ -298,7 +298,7 @@
virtual status_t setSyncSettings(const AVSyncSettings& rate, float videoFpsHint);
virtual status_t getSyncSettings(AVSyncSettings* rate /* nonnull */,
float* videoFps /* nonnull */);
- virtual status_t seekTo(int msec);
+ virtual status_t seekTo(int msec, bool precise = false);
virtual status_t getCurrentPosition(int* msec);
virtual status_t getDuration(int* msec);
virtual status_t reset();
diff --git a/media/libmediaplayerservice/MediaRecorderClient.cpp b/media/libmediaplayerservice/MediaRecorderClient.cpp
index d011d70..94ceae4 100644
--- a/media/libmediaplayerservice/MediaRecorderClient.cpp
+++ b/media/libmediaplayerservice/MediaRecorderClient.cpp
@@ -52,7 +52,7 @@
return ok;
}
-status_t MediaRecorderClient::setInputSurface(const sp<IGraphicBufferConsumer>& surface)
+status_t MediaRecorderClient::setInputSurface(const sp<PersistentSurface>& surface)
{
ALOGV("setInputSurface");
Mutex::Autolock lock(mLock);
diff --git a/media/libmediaplayerservice/MediaRecorderClient.h b/media/libmediaplayerservice/MediaRecorderClient.h
index eceb653..12656cf 100644
--- a/media/libmediaplayerservice/MediaRecorderClient.h
+++ b/media/libmediaplayerservice/MediaRecorderClient.h
@@ -25,7 +25,6 @@
class MediaRecorderBase;
class MediaPlayerService;
class ICameraRecordingProxy;
-class IGraphicBufferProducer;
class MediaRecorderClient : public BnMediaRecorder
{
@@ -73,7 +72,7 @@
virtual status_t close();
virtual status_t release();
virtual status_t dump(int fd, const Vector<String16>& args);
- virtual status_t setInputSurface(const sp<IGraphicBufferConsumer>& surface);
+ virtual status_t setInputSurface(const sp<PersistentSurface>& surface);
virtual sp<IGraphicBufferProducer> querySurfaceMediaSource();
private:
diff --git a/media/libmediaplayerservice/StagefrightRecorder.cpp b/media/libmediaplayerservice/StagefrightRecorder.cpp
index cdb0a7b..279bc86 100644
--- a/media/libmediaplayerservice/StagefrightRecorder.cpp
+++ b/media/libmediaplayerservice/StagefrightRecorder.cpp
@@ -45,6 +45,7 @@
#include <media/stagefright/MediaDefs.h>
#include <media/stagefright/MetaData.h>
#include <media/stagefright/MediaCodecSource.h>
+#include <media/stagefright/PersistentSurface.h>
#include <media/MediaProfiles.h>
#include <camera/CameraParameters.h>
@@ -248,7 +249,7 @@
}
status_t StagefrightRecorder::setInputSurface(
- const sp<IGraphicBufferConsumer>& surface) {
+ const sp<PersistentSurface>& surface) {
mPersistentSurface = surface;
return OK;
@@ -884,7 +885,10 @@
case OUTPUT_FORMAT_RTP_AVP:
case OUTPUT_FORMAT_MPEG2TS:
{
- status = mWriter->start();
+ sp<MetaData> meta = new MetaData;
+ int64_t startTimeUs = systemTime() / 1000;
+ meta->setInt64(kKeyTime, startTimeUs);
+ status = mWriter->start(meta.get());
break;
}
diff --git a/media/libmediaplayerservice/StagefrightRecorder.h b/media/libmediaplayerservice/StagefrightRecorder.h
index 4dbd039..4c2e65c 100644
--- a/media/libmediaplayerservice/StagefrightRecorder.h
+++ b/media/libmediaplayerservice/StagefrightRecorder.h
@@ -38,9 +38,6 @@
class MetaData;
struct AudioSource;
class MediaProfiles;
-class IGraphicBufferConsumer;
-class IGraphicBufferProducer;
-class SurfaceMediaSource;
struct ALooper;
struct StagefrightRecorder : public MediaRecorderBase {
@@ -57,7 +54,7 @@
virtual status_t setVideoFrameRate(int frames_per_second);
virtual status_t setCamera(const sp<hardware::ICamera>& camera, const sp<ICameraRecordingProxy>& proxy);
virtual status_t setPreviewSurface(const sp<IGraphicBufferProducer>& surface);
- virtual status_t setInputSurface(const sp<IGraphicBufferConsumer>& surface);
+ virtual status_t setInputSurface(const sp<PersistentSurface>& surface);
virtual status_t setOutputFile(int fd, int64_t offset, int64_t length);
virtual status_t setParameters(const String8 ¶ms);
virtual status_t setListener(const sp<IMediaRecorderClient> &listener);
@@ -78,7 +75,7 @@
sp<hardware::ICamera> mCamera;
sp<ICameraRecordingProxy> mCameraProxy;
sp<IGraphicBufferProducer> mPreviewSurface;
- sp<IGraphicBufferConsumer> mPersistentSurface;
+ sp<PersistentSurface> mPersistentSurface;
sp<IMediaRecorderClient> mListener;
String16 mClientName;
uid_t mClientUid;
diff --git a/media/libmediaplayerservice/TestPlayerStub.h b/media/libmediaplayerservice/TestPlayerStub.h
index 55bf2c8..c0d6a59 100644
--- a/media/libmediaplayerservice/TestPlayerStub.h
+++ b/media/libmediaplayerservice/TestPlayerStub.h
@@ -87,7 +87,7 @@
virtual status_t stop() {return mPlayer->stop();}
virtual status_t pause() {return mPlayer->pause();}
virtual bool isPlaying() {return mPlayer->isPlaying();}
- virtual status_t seekTo(int msec) {return mPlayer->seekTo(msec);}
+ virtual status_t seekTo(int msec, bool precise = false) {return mPlayer->seekTo(msec, precise);}
virtual status_t getCurrentPosition(int *p) {
return mPlayer->getCurrentPosition(p);
}
diff --git a/media/libmediaplayerservice/nuplayer/Android.mk b/media/libmediaplayerservice/nuplayer/Android.mk
index 3ea2159..a0e633c 100644
--- a/media/libmediaplayerservice/nuplayer/Android.mk
+++ b/media/libmediaplayerservice/nuplayer/Android.mk
@@ -32,8 +32,6 @@
LOCAL_CFLAGS += -DENABLE_STAGEFRIGHT_EXPERIMENTS
endif
-LOCAL_CLANG := true
-
LOCAL_SHARED_LIBRARIES := libmedia
LOCAL_MODULE:= libstagefright_nuplayer
diff --git a/media/libmediaplayerservice/nuplayer/GenericSource.cpp b/media/libmediaplayerservice/nuplayer/GenericSource.cpp
index 6b88404..cdbc2f8 100644
--- a/media/libmediaplayerservice/nuplayer/GenericSource.cpp
+++ b/media/libmediaplayerservice/nuplayer/GenericSource.cpp
@@ -665,8 +665,8 @@
} else {
timeUs = mVideoLastDequeueTimeUs;
}
- readBuffer(trackType, timeUs, &actualTimeUs, formatChange);
- readBuffer(counterpartType, -1, NULL, formatChange);
+ readBuffer(trackType, timeUs, false /* precise */, &actualTimeUs, formatChange);
+ readBuffer(counterpartType, -1, false /* precise */, NULL, !formatChange);
ALOGV("timeUs %lld actualTimeUs %lld", (long long)timeUs, (long long)actualTimeUs);
break;
@@ -759,7 +759,7 @@
CHECK(msg->findInt64("timeUs", &timeUs));
int64_t subTimeUs;
- readBuffer(type, timeUs, &subTimeUs);
+ readBuffer(type, timeUs, false /* precise */, &subTimeUs);
int64_t delayUs = subTimeUs - timeUs;
if (msg->what() == kWhatFetchSubtitleData) {
@@ -790,7 +790,7 @@
}
int64_t nextSubTimeUs;
- readBuffer(type, -1, &nextSubTimeUs);
+ readBuffer(type, -1, false /* precise */, &nextSubTimeUs);
sp<ABuffer> buffer;
status_t dequeueStatus = packets->dequeueAccessUnit(&buffer);
@@ -1186,9 +1186,10 @@
return INVALID_OPERATION;
}
-status_t NuPlayer::GenericSource::seekTo(int64_t seekTimeUs) {
+status_t NuPlayer::GenericSource::seekTo(int64_t seekTimeUs, bool precise) {
sp<AMessage> msg = new AMessage(kWhatSeek, this);
msg->setInt64("seekTimeUs", seekTimeUs);
+ msg->setInt32("precise", precise);
sp<AMessage> response;
status_t err = msg->postAndAwaitResponse(&response);
@@ -1201,10 +1202,12 @@
void NuPlayer::GenericSource::onSeek(const sp<AMessage>& msg) {
int64_t seekTimeUs;
+ int32_t precise;
CHECK(msg->findInt64("seekTimeUs", &seekTimeUs));
+ CHECK(msg->findInt32("precise", &precise));
sp<AMessage> response = new AMessage;
- status_t err = doSeek(seekTimeUs);
+ status_t err = doSeek(seekTimeUs, precise);
response->setInt32("err", err);
sp<AReplyToken> replyID;
@@ -1212,7 +1215,7 @@
response->postReply(replyID);
}
-status_t NuPlayer::GenericSource::doSeek(int64_t seekTimeUs) {
+status_t NuPlayer::GenericSource::doSeek(int64_t seekTimeUs, bool precise) {
mBufferingMonitor->updateDequeuedBufferTime(-1ll);
// If the Widevine source is stopped, do not attempt to read any
@@ -1222,10 +1225,12 @@
}
if (mVideoTrack.mSource != NULL) {
int64_t actualTimeUs;
- readBuffer(MEDIA_TRACK_TYPE_VIDEO, seekTimeUs, &actualTimeUs);
+ readBuffer(MEDIA_TRACK_TYPE_VIDEO, seekTimeUs, precise, &actualTimeUs);
- seekTimeUs = actualTimeUs;
- mVideoLastDequeueTimeUs = seekTimeUs;
+ if (!precise) {
+ seekTimeUs = actualTimeUs;
+ }
+ mVideoLastDequeueTimeUs = actualTimeUs;
}
if (mAudioTrack.mSource != NULL) {
@@ -1249,9 +1254,7 @@
sp<ABuffer> NuPlayer::GenericSource::mediaBufferToABuffer(
MediaBuffer* mb,
- media_track_type trackType,
- int64_t /* seekTimeUs */,
- int64_t *actualTimeUs) {
+ media_track_type trackType) {
bool audio = trackType == MEDIA_TRACK_TYPE_AUDIO;
size_t outLength = mb->range_length();
@@ -1288,16 +1291,6 @@
CHECK(mb->meta_data()->findInt64(kKeyTime, &timeUs));
meta->setInt64("timeUs", timeUs);
-#if 0
- // Temporarily disable pre-roll till we have a full solution to handle
- // both single seek and continous seek gracefully.
- if (seekTimeUs > timeUs) {
- sp<AMessage> extra = new AMessage;
- extra->setInt64("resume-at-mediaTimeUs", seekTimeUs);
- meta->setMessage("extra", extra);
- }
-#endif
-
if (trackType == MEDIA_TRACK_TYPE_VIDEO) {
int32_t layerId;
if (mb->meta_data()->findInt32(kKeyTemporalLayerId, &layerId)) {
@@ -1337,10 +1330,6 @@
meta->setBuffer("mpegUserData", mpegUserData);
}
- if (actualTimeUs) {
- *actualTimeUs = timeUs;
- }
-
mb->release();
mb = NULL;
@@ -1372,7 +1361,8 @@
}
void NuPlayer::GenericSource::readBuffer(
- media_track_type trackType, int64_t seekTimeUs, int64_t *actualTimeUs, bool formatChange) {
+ media_track_type trackType, int64_t seekTimeUs, bool precise,
+ int64_t *actualTimeUs, bool formatChange) {
// Do not read data if Widevine source is stopped
if (mStopRead) {
return;
@@ -1465,9 +1455,19 @@
queueDiscontinuityIfNeeded(seeking, formatChange, trackType, track);
- sp<ABuffer> buffer = mediaBufferToABuffer(
- mbuf, trackType, seekTimeUs,
- numBuffers == 0 ? actualTimeUs : NULL);
+ sp<ABuffer> buffer = mediaBufferToABuffer(mbuf, trackType);
+ if (numBuffers == 0 && actualTimeUs != nullptr) {
+ *actualTimeUs = timeUs;
+ }
+ if (seeking && buffer != nullptr) {
+ sp<AMessage> meta = buffer->meta();
+ if (meta != nullptr && precise && seekTimeUs > timeUs) {
+ sp<AMessage> extra = new AMessage;
+ extra->setInt64("resume-at-mediaTimeUs", seekTimeUs);
+ meta->setMessage("extra", extra);
+ }
+ }
+
track->mPackets->queueAccessUnit(buffer);
formatChange = false;
seeking = false;
diff --git a/media/libmediaplayerservice/nuplayer/GenericSource.h b/media/libmediaplayerservice/nuplayer/GenericSource.h
index 0957778..368d11c 100644
--- a/media/libmediaplayerservice/nuplayer/GenericSource.h
+++ b/media/libmediaplayerservice/nuplayer/GenericSource.h
@@ -71,7 +71,7 @@
virtual sp<AMessage> getTrackInfo(size_t trackIndex) const;
virtual ssize_t getSelectedTrack(media_track_type type) const;
virtual status_t selectTrack(size_t trackIndex, bool select, int64_t timeUs);
- virtual status_t seekTo(int64_t seekTimeUs);
+ virtual status_t seekTo(int64_t seekTimeUs, bool precise = false) override;
virtual status_t setBuffers(bool audio, Vector<MediaBuffer *> &buffers);
@@ -258,7 +258,7 @@
status_t doSelectTrack(size_t trackIndex, bool select, int64_t timeUs);
void onSeek(const sp<AMessage>& msg);
- status_t doSeek(int64_t seekTimeUs);
+ status_t doSeek(int64_t seekTimeUs, bool precise);
void onPrepareAsync();
@@ -276,15 +276,19 @@
sp<ABuffer> mediaBufferToABuffer(
MediaBuffer *mbuf,
- media_track_type trackType,
- int64_t seekTimeUs,
- int64_t *actualTimeUs = NULL);
+ media_track_type trackType);
void postReadBuffer(media_track_type trackType);
void onReadBuffer(const sp<AMessage>& msg);
+ // |precise| is a modifier of |seekTimeUs|.
+ // When |precise| is true, the buffer read shall include an item indicating skipping
+ // rendering all buffers with timestamp earlier than |seekTimeUs|.
+ // When |precise| is false, the buffer read will not include the item as above in order
+ // to facilitate fast seek operation.
void readBuffer(
media_track_type trackType,
- int64_t seekTimeUs = -1ll, int64_t *actualTimeUs = NULL, bool formatChange = false);
+ int64_t seekTimeUs = -1ll, bool precise = false,
+ int64_t *actualTimeUs = NULL, bool formatChange = false);
void queueDiscontinuityIfNeeded(
bool seeking, bool formatChange, media_track_type trackType, Track *track);
diff --git a/media/libmediaplayerservice/nuplayer/HTTPLiveSource.cpp b/media/libmediaplayerservice/nuplayer/HTTPLiveSource.cpp
index 5027e01..1a6a233 100644
--- a/media/libmediaplayerservice/nuplayer/HTTPLiveSource.cpp
+++ b/media/libmediaplayerservice/nuplayer/HTTPLiveSource.cpp
@@ -214,8 +214,8 @@
return (err == OK || err == BAD_VALUE) ? (status_t)OK : err;
}
-status_t NuPlayer::HTTPLiveSource::seekTo(int64_t seekTimeUs) {
- return mLiveSession->seekTo(seekTimeUs);
+status_t NuPlayer::HTTPLiveSource::seekTo(int64_t seekTimeUs, bool precise) {
+ return mLiveSession->seekTo(seekTimeUs, precise);
}
void NuPlayer::HTTPLiveSource::pollForRawData(
@@ -317,8 +317,9 @@
notifyVideoSizeChanged();
}
- uint32_t flags = FLAG_CAN_PAUSE;
+ uint32_t flags = 0;
if (mLiveSession->isSeekable()) {
+ flags |= FLAG_CAN_PAUSE;
flags |= FLAG_CAN_SEEK;
flags |= FLAG_CAN_SEEK_BACKWARD;
flags |= FLAG_CAN_SEEK_FORWARD;
diff --git a/media/libmediaplayerservice/nuplayer/HTTPLiveSource.h b/media/libmediaplayerservice/nuplayer/HTTPLiveSource.h
index 574937d..16c3c37 100644
--- a/media/libmediaplayerservice/nuplayer/HTTPLiveSource.h
+++ b/media/libmediaplayerservice/nuplayer/HTTPLiveSource.h
@@ -47,7 +47,7 @@
virtual sp<AMessage> getTrackInfo(size_t trackIndex) const;
virtual ssize_t getSelectedTrack(media_track_type /* type */) const;
virtual status_t selectTrack(size_t trackIndex, bool select, int64_t timeUs);
- virtual status_t seekTo(int64_t seekTimeUs);
+ virtual status_t seekTo(int64_t seekTimeUs, bool precise = false) override;
protected:
virtual ~HTTPLiveSource();
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
index 4e16fba..90b7b7f 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
@@ -39,6 +39,7 @@
#include <media/AudioResamplerPublic.h>
#include <media/AVSyncSettings.h>
+#include <media/MediaCodecBuffer.h>
#include <media/stagefright/foundation/hexdump.h>
#include <media/stagefright/foundation/ABuffer.h>
@@ -69,16 +70,18 @@
};
struct NuPlayer::SeekAction : public Action {
- explicit SeekAction(int64_t seekTimeUs)
- : mSeekTimeUs(seekTimeUs) {
+ explicit SeekAction(int64_t seekTimeUs, bool precise)
+ : mSeekTimeUs(seekTimeUs),
+ mPrecise(precise) {
}
virtual void execute(NuPlayer *player) {
- player->performSeek(mSeekTimeUs);
+ player->performSeek(mSeekTimeUs, mPrecise);
}
private:
int64_t mSeekTimeUs;
+ bool mPrecise;
DISALLOW_EVIL_CONSTRUCTORS(SeekAction);
};
@@ -419,9 +422,10 @@
(new AMessage(kWhatReset, this))->post();
}
-void NuPlayer::seekToAsync(int64_t seekTimeUs, bool needNotify) {
+void NuPlayer::seekToAsync(int64_t seekTimeUs, bool precise, bool needNotify) {
sp<AMessage> msg = new AMessage(kWhatSeek, this);
msg->setInt64("seekTimeUs", seekTimeUs);
+ msg->setInt32("precise", precise);
msg->setInt32("needNotify", needNotify);
msg->post();
}
@@ -680,7 +684,7 @@
int64_t currentPositionUs = 0;
if (getCurrentPosition(¤tPositionUs) == OK) {
mDeferredActions.push_back(
- new SeekAction(currentPositionUs));
+ new SeekAction(currentPositionUs, false /* precise */));
}
}
@@ -1196,12 +1200,14 @@
case kWhatSeek:
{
int64_t seekTimeUs;
+ int32_t precise;
int32_t needNotify;
CHECK(msg->findInt64("seekTimeUs", &seekTimeUs));
+ CHECK(msg->findInt32("precise", &precise));
CHECK(msg->findInt32("needNotify", &needNotify));
- ALOGV("kWhatSeek seekTimeUs=%lld us, needNotify=%d",
- (long long)seekTimeUs, needNotify);
+ ALOGV("kWhatSeek seekTimeUs=%lld us, precise=%d, needNotify=%d",
+ (long long)seekTimeUs, precise, needNotify);
if (!mStarted) {
// Seek before the player is started. In order to preview video,
@@ -1209,7 +1215,7 @@
// only once if needed. After the player is started, any seek
// operation will go through normal path.
// Audio-only cases are handled separately.
- onStart(seekTimeUs);
+ onStart(seekTimeUs, precise);
if (mStarted) {
onPause();
mPausedByClient = true;
@@ -1225,7 +1231,7 @@
FLUSH_CMD_FLUSH /* video */));
mDeferredActions.push_back(
- new SeekAction(seekTimeUs));
+ new SeekAction(seekTimeUs, precise));
// After a flush without shutdown, decoder is paused.
// Don't resume it until source seek is done, otherwise it could
@@ -1314,13 +1320,13 @@
return OK;
}
-void NuPlayer::onStart(int64_t startPositionUs) {
+void NuPlayer::onStart(int64_t startPositionUs, bool precise) {
if (!mSourceStarted) {
mSourceStarted = true;
mSource->start();
}
if (startPositionUs > 0) {
- performSeek(startPositionUs);
+ performSeek(startPositionUs, precise);
if (mSource->getFormat(false /* audio */) == NULL) {
return;
}
@@ -1536,7 +1542,7 @@
mRenderer->flush(false /* audio */, false /* notifyComplete */);
}
- performSeek(currentPositionUs);
+ performSeek(currentPositionUs, false /* precise */);
if (forceNonOffload) {
mRenderer->signalDisableOffloadAudio();
@@ -1640,7 +1646,7 @@
} else {
mSource->setOffloadAudio(false /* offload */);
- *decoder = new Decoder(notify, mSource, mPID, mRenderer);
+ *decoder = new Decoder(notify, mSource, mPID, mUID, mRenderer);
}
} else {
sp<AMessage> notify = new AMessage(kWhatVideoNotify, this);
@@ -1648,7 +1654,7 @@
notify->setInt32("generation", mVideoDecoderGeneration);
*decoder = new Decoder(
- notify, mSource, mPID, mRenderer, mSurface, mCCDecoder);
+ notify, mSource, mPID, mUID, mRenderer, mSurface, mCCDecoder);
// enable FRC if high-quality AV sync is requested, even if not
// directly queuing to display, as this will even improve textureview
@@ -1666,12 +1672,12 @@
// allocate buffers to decrypt widevine source buffers
if (!audio && (mSourceFlags & Source::FLAG_SECURE)) {
- Vector<sp<ABuffer> > inputBufs;
+ Vector<sp<MediaCodecBuffer> > inputBufs;
CHECK_EQ((*decoder)->getInputBuffers(&inputBufs), (status_t)OK);
Vector<MediaBuffer *> mediaBufs;
for (size_t i = 0; i < inputBufs.size(); i++) {
- const sp<ABuffer> &buffer = inputBufs[i];
+ const sp<MediaCodecBuffer> &buffer = inputBufs[i];
MediaBuffer *mbuf = new MediaBuffer(buffer->data(), buffer->size());
mediaBufs.push(mbuf);
}
@@ -1755,6 +1761,20 @@
displayWidth = (displayWidth * sarWidth) / sarHeight;
ALOGV("display dimensions %d x %d", displayWidth, displayHeight);
+ } else {
+ int32_t width, height;
+ if (inputFormat->findInt32("display-width", &width)
+ && inputFormat->findInt32("display-height", &height)
+ && width > 0 && height > 0
+ && displayWidth > 0 && displayHeight > 0) {
+ if (displayHeight * (int64_t)width / height > (int64_t)displayWidth) {
+ displayHeight = (int32_t)(displayWidth * (int64_t)height / width);
+ } else {
+ displayWidth = (int32_t)(displayHeight * (int64_t)width / height);
+ }
+ ALOGV("Video display width and height are overridden to %d x %d",
+ displayWidth, displayHeight);
+ }
}
int32_t rotationDegrees;
@@ -1979,10 +1999,9 @@
}
}
-void NuPlayer::performSeek(int64_t seekTimeUs) {
- ALOGV("performSeek seekTimeUs=%lld us (%.2f secs)",
- (long long)seekTimeUs,
- seekTimeUs / 1E6);
+void NuPlayer::performSeek(int64_t seekTimeUs, bool precise) {
+ ALOGV("performSeek seekTimeUs=%lld us (%.2f secs), precise=%d",
+ (long long)seekTimeUs, seekTimeUs / 1E6, precise);
if (mSource == NULL) {
// This happens when reset occurs right before the loop mode
@@ -1993,7 +2012,7 @@
return;
}
mPreviousSeekTimeUs = seekTimeUs;
- mSource->seekTo(seekTimeUs);
+ mSource->seekTo(seekTimeUs, precise);
++mTimedTextGeneration;
// everything's flushed, continue playback.
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayer.h b/media/libmediaplayerservice/nuplayer/NuPlayer.h
index a002f6f..3ae2ada 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayer.h
+++ b/media/libmediaplayerservice/nuplayer/NuPlayer.h
@@ -70,7 +70,7 @@
// Will notify the driver through "notifySeekComplete" once finished
// and needNotify is true.
- void seekToAsync(int64_t seekTimeUs, bool needNotify = false);
+ void seekToAsync(int64_t seekTimeUs, bool precise = false, bool needNotify = false);
status_t setVideoScalingMode(int32_t mode);
status_t getTrackInfo(Parcel* reply) const;
@@ -245,7 +245,7 @@
void handleFlushComplete(bool audio, bool isDecoder);
void finishFlushIfPossible();
- void onStart(int64_t startPositionUs = -1);
+ void onStart(int64_t startPositionUs = -1, bool precise = false);
void onResume();
void onPause();
@@ -263,7 +263,7 @@
void processDeferredActions();
- void performSeek(int64_t seekTimeUs);
+ void performSeek(int64_t seekTimeUs, bool precise);
void performDecoderFlush(FlushCommand audio, FlushCommand video);
void performReset();
void performScanSources();
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerCCDecoder.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerCCDecoder.cpp
index 978d360..73b07bb 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerCCDecoder.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerCCDecoder.cpp
@@ -72,37 +72,37 @@
if (cc->mData1 >= 0x20 && cc->mData1 <= 0x7f) {
// 2 basic chars
- sprintf(tmp, "[%d]Basic: %c %c", cc->mType, cc->mData1, cc->mData2);
+ snprintf(tmp, sizeof(tmp), "[%d]Basic: %c %c", cc->mType, cc->mData1, cc->mData2);
} else if ((cc->mData1 == 0x11 || cc->mData1 == 0x19)
&& cc->mData2 >= 0x30 && cc->mData2 <= 0x3f) {
// 1 special char
- sprintf(tmp, "[%d]Special: %02x %02x", cc->mType, cc->mData1, cc->mData2);
+ snprintf(tmp, sizeof(tmp), "[%d]Special: %02x %02x", cc->mType, cc->mData1, cc->mData2);
} else if ((cc->mData1 == 0x12 || cc->mData1 == 0x1A)
&& cc->mData2 >= 0x20 && cc->mData2 <= 0x3f){
// 1 Spanish/French char
- sprintf(tmp, "[%d]Spanish: %02x %02x", cc->mType, cc->mData1, cc->mData2);
+ snprintf(tmp, sizeof(tmp), "[%d]Spanish: %02x %02x", cc->mType, cc->mData1, cc->mData2);
} else if ((cc->mData1 == 0x13 || cc->mData1 == 0x1B)
&& cc->mData2 >= 0x20 && cc->mData2 <= 0x3f){
// 1 Portuguese/German/Danish char
- sprintf(tmp, "[%d]German: %02x %02x", cc->mType, cc->mData1, cc->mData2);
+ snprintf(tmp, sizeof(tmp), "[%d]German: %02x %02x", cc->mType, cc->mData1, cc->mData2);
} else if ((cc->mData1 == 0x11 || cc->mData1 == 0x19)
&& cc->mData2 >= 0x20 && cc->mData2 <= 0x2f){
// Mid-Row Codes (Table 69)
- sprintf(tmp, "[%d]Mid-row: %02x %02x", cc->mType, cc->mData1, cc->mData2);
+ snprintf(tmp, sizeof(tmp), "[%d]Mid-row: %02x %02x", cc->mType, cc->mData1, cc->mData2);
} else if (((cc->mData1 == 0x14 || cc->mData1 == 0x1c)
&& cc->mData2 >= 0x20 && cc->mData2 <= 0x2f)
||
((cc->mData1 == 0x17 || cc->mData1 == 0x1f)
&& cc->mData2 >= 0x21 && cc->mData2 <= 0x23)){
// Misc Control Codes (Table 70)
- sprintf(tmp, "[%d]Ctrl: %02x %02x", cc->mType, cc->mData1, cc->mData2);
+ snprintf(tmp, sizeof(tmp), "[%d]Ctrl: %02x %02x", cc->mType, cc->mData1, cc->mData2);
} else if ((cc->mData1 & 0x70) == 0x10
&& (cc->mData2 & 0x40) == 0x40
&& ((cc->mData1 & 0x07) || !(cc->mData2 & 0x20)) ) {
// Preamble Address Codes (Table 71)
- sprintf(tmp, "[%d]PAC: %02x %02x", cc->mType, cc->mData1, cc->mData2);
+ snprintf(tmp, sizeof(tmp), "[%d]PAC: %02x %02x", cc->mType, cc->mData1, cc->mData2);
} else {
- sprintf(tmp, "[%d]Invalid: %02x %02x", cc->mType, cc->mData1, cc->mData2);
+ snprintf(tmp, sizeof(tmp), "[%d]Invalid: %02x %02x", cc->mType, cc->mData1, cc->mData2);
}
if (out.size() > 0) {
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.cpp
index 594128c..d2452af 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.cpp
@@ -28,6 +28,7 @@
#include <cutils/properties.h>
#include <media/ICrypto.h>
+#include <media/MediaCodecBuffer.h>
#include <media/stagefright/foundation/ABuffer.h>
#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/foundation/AMessage.h>
@@ -57,6 +58,7 @@
const sp<AMessage> ¬ify,
const sp<Source> &source,
pid_t pid,
+ uid_t uid,
const sp<Renderer> &renderer,
const sp<Surface> &surface,
const sp<CCDecoder> &ccDecoder)
@@ -66,6 +68,7 @@
mRenderer(renderer),
mCCDecoder(ccDecoder),
mPid(pid),
+ mUid(uid),
mSkipRenderingUntilMediaTimeUs(-1ll),
mNumFramesTotal(0ll),
mNumInputFramesDropped(0ll),
@@ -265,7 +268,7 @@
ALOGV("[%s] onConfigure (surface=%p)", mComponentName.c_str(), mSurface.get());
mCodec = MediaCodec::CreateByType(
- mCodecLooper, mime.c_str(), false /* encoder */, NULL /* err */, mPid);
+ mCodecLooper, mime.c_str(), false /* encoder */, NULL /* err */, mPid, mUid);
int32_t secure = 0;
if (format->findInt32("secure", &secure) && secure != 0) {
if (mCodec != NULL) {
@@ -274,7 +277,7 @@
mCodec->release();
ALOGI("[%s] creating", mComponentName.c_str());
mCodec = MediaCodec::CreateByComponentName(
- mCodecLooper, mComponentName.c_str(), NULL /* err */, mPid);
+ mCodecLooper, mComponentName.c_str(), NULL /* err */, mPid, mUid);
}
}
if (mCodec == NULL) {
@@ -417,7 +420,7 @@
}
void NuPlayer::Decoder::onGetInputBuffers(
- Vector<sp<ABuffer> > *dstBuffers) {
+ Vector<sp<MediaCodecBuffer> > *dstBuffers) {
CHECK_EQ((status_t)OK, mCodec->getWidevineLegacyBuffers(dstBuffers));
}
@@ -561,7 +564,7 @@
return false;
}
- sp<ABuffer> buffer;
+ sp<MediaCodecBuffer> buffer;
mCodec->getInputBuffer(index, &buffer);
if (buffer == NULL) {
@@ -628,7 +631,7 @@
int64_t timeUs,
int32_t flags) {
// CHECK_LT(bufferIx, mOutputBuffers.size());
- sp<ABuffer> buffer;
+ sp<MediaCodecBuffer> buffer;
mCodec->getOutputBuffer(index, &buffer);
if (index >= mOutputBuffers.size()) {
@@ -708,6 +711,10 @@
flags = AUDIO_OUTPUT_FLAG_NONE;
}
+ // TODO: This is a temporary fix to flush audio buffers in renderer. The real
+ // fix should be to wait for all buffers rendered normally, then open a new
+ // AudioSink.
+ mRenderer->flush(true /* audio */, false /* notifyComplete */);
status_t err = mRenderer->openAudioSink(
format, false /* offloadOnly */, hasVideo, flags, NULL /* isOffloaed */);
if (err != OK) {
@@ -865,10 +872,11 @@
size_t bufferIx;
CHECK(msg->findSize("buffer-ix", &bufferIx));
CHECK_LT(bufferIx, mInputBuffers.size());
- sp<ABuffer> codecBuffer = mInputBuffers[bufferIx];
+ sp<MediaCodecBuffer> codecBuffer = mInputBuffers[bufferIx];
sp<ABuffer> buffer;
bool hasBuffer = msg->findBuffer("buffer", &buffer);
+ bool needsCopy = true;
// handle widevine classic source - that fills an arbitrary input buffer
MediaBuffer *mediaBuffer = NULL;
@@ -878,7 +886,7 @@
// likely filled another buffer than we requested: adjust buffer index
size_t ix;
for (ix = 0; ix < mInputBuffers.size(); ix++) {
- const sp<ABuffer> &buf = mInputBuffers[ix];
+ const sp<MediaCodecBuffer> &buf = mInputBuffers[ix];
if (buf->data() == mediaBuffer->data()) {
// all input buffers are dequeued on start, hence the check
if (!mInputBufferIsDequeued[ix]) {
@@ -891,11 +899,12 @@
// TRICKY: need buffer for the metadata, so instead, set
// codecBuffer to the same (though incorrect) buffer to
// avoid a memcpy into the codecBuffer
- codecBuffer = buffer;
+ codecBuffer = new MediaCodecBuffer(codecBuffer->format(), buffer);
codecBuffer->setRange(
mediaBuffer->range_offset(),
mediaBuffer->range_length());
bufferIx = ix;
+ needsCopy = false;
break;
}
}
@@ -955,7 +964,7 @@
}
// copy into codec buffer
- if (buffer != codecBuffer) {
+ if (needsCopy) {
if (buffer->size() > codecBuffer->capacity()) {
handleError(ERROR_BUFFER_TOO_SMALL);
mDequeuedInputBuffers.push_back(bufferIx);
@@ -998,7 +1007,7 @@
if (!mIsAudio) {
int64_t timeUs;
- sp<ABuffer> buffer = mOutputBuffers[bufferIx];
+ sp<MediaCodecBuffer> buffer = mOutputBuffers[bufferIx];
buffer->meta()->findInt64("timeUs", &timeUs);
if (mCCDecoder != NULL && mCCDecoder->isSelected()) {
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.h b/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.h
index 0c619ed..a576ae5 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.h
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.h
@@ -23,10 +23,13 @@
namespace android {
+class MediaCodecBuffer;
+
struct NuPlayer::Decoder : public DecoderBase {
Decoder(const sp<AMessage> ¬ify,
const sp<Source> &source,
pid_t pid,
+ uid_t uid,
const sp<Renderer> &renderer = NULL,
const sp<Surface> &surface = NULL,
const sp<CCDecoder> &ccDecoder = NULL);
@@ -44,7 +47,7 @@
virtual void onConfigure(const sp<AMessage> &format);
virtual void onSetParameters(const sp<AMessage> ¶ms);
virtual void onSetRenderer(const sp<Renderer> &renderer);
- virtual void onGetInputBuffers(Vector<sp<ABuffer> > *dstBuffers);
+ virtual void onGetInputBuffers(Vector<sp<MediaCodecBuffer> > *dstBuffers);
virtual void onResume(bool notifyComplete);
virtual void onFlush();
virtual void onShutdown(bool notifyComplete);
@@ -74,8 +77,8 @@
List<sp<AMessage> > mPendingInputMessages;
- Vector<sp<ABuffer> > mInputBuffers;
- Vector<sp<ABuffer> > mOutputBuffers;
+ Vector<sp<MediaCodecBuffer> > mInputBuffers;
+ Vector<sp<MediaCodecBuffer> > mOutputBuffers;
Vector<sp<ABuffer> > mCSDsForCurrentFormat;
Vector<sp<ABuffer> > mCSDsToSubmit;
Vector<bool> mInputBufferIsDequeued;
@@ -83,6 +86,7 @@
Vector<size_t> mDequeuedInputBuffers;
const pid_t mPid;
+ const uid_t mUid;
int64_t mSkipRenderingUntilMediaTimeUs;
int64_t mNumFramesTotal;
int64_t mNumInputFramesDropped;
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDecoderBase.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerDecoderBase.cpp
index 04bb61c..9c007ae 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerDecoderBase.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerDecoderBase.cpp
@@ -23,6 +23,7 @@
#include "NuPlayerRenderer.h"
+#include <media/MediaCodecBuffer.h>
#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/foundation/AMessage.h>
@@ -91,7 +92,7 @@
PostAndAwaitResponse(msg, &response);
}
-status_t NuPlayer::DecoderBase::getInputBuffers(Vector<sp<ABuffer> > *buffers) const {
+status_t NuPlayer::DecoderBase::getInputBuffers(Vector<sp<MediaCodecBuffer> > *buffers) const {
sp<AMessage> msg = new AMessage(kWhatGetInputBuffers, this);
msg->setPointer("buffers", buffers);
@@ -170,7 +171,7 @@
sp<AReplyToken> replyID;
CHECK(msg->senderAwaitsResponse(&replyID));
- Vector<sp<ABuffer> > *dstBuffers;
+ Vector<sp<MediaCodecBuffer> > *dstBuffers;
CHECK(msg->findPointer("buffers", (void **)&dstBuffers));
onGetInputBuffers(dstBuffers);
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDecoderBase.h b/media/libmediaplayerservice/nuplayer/NuPlayerDecoderBase.h
index 9966144..6f4ead6 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerDecoderBase.h
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerDecoderBase.h
@@ -27,6 +27,7 @@
struct ABuffer;
struct MediaCodec;
class MediaBuffer;
+class MediaCodecBuffer;
class Surface;
struct NuPlayer::DecoderBase : public AHandler {
@@ -42,7 +43,7 @@
void setRenderer(const sp<Renderer> &renderer);
virtual status_t setVideoSurface(const sp<Surface> &) { return INVALID_OPERATION; }
- status_t getInputBuffers(Vector<sp<ABuffer> > *dstBuffers) const;
+ status_t getInputBuffers(Vector<sp<MediaCodecBuffer> > *dstBuffers) const;
void signalFlush();
void signalResume(bool notifyComplete);
void initiateShutdown();
@@ -70,7 +71,7 @@
virtual void onConfigure(const sp<AMessage> &format) = 0;
virtual void onSetParameters(const sp<AMessage> ¶ms) = 0;
virtual void onSetRenderer(const sp<Renderer> &renderer) = 0;
- virtual void onGetInputBuffers(Vector<sp<ABuffer> > *dstBuffers) = 0;
+ virtual void onGetInputBuffers(Vector<sp<MediaCodecBuffer> > *dstBuffers) = 0;
virtual void onResume(bool notifyComplete) = 0;
virtual void onFlush() = 0;
virtual void onShutdown(bool notifyComplete) = 0;
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDecoderPassThrough.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerDecoderPassThrough.cpp
index f224635..e4767ff 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerDecoderPassThrough.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerDecoderPassThrough.cpp
@@ -25,6 +25,7 @@
#include "NuPlayerSource.h"
#include <media/ICrypto.h>
+#include <media/MediaCodecBuffer.h>
#include <media/stagefright/foundation/ABuffer.h>
#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/foundation/AMessage.h>
@@ -93,7 +94,7 @@
}
void NuPlayer::DecoderPassThrough::onGetInputBuffers(
- Vector<sp<ABuffer> > * /* dstBuffers */) {
+ Vector<sp<MediaCodecBuffer> > * /* dstBuffers */) {
ALOGE("onGetInputBuffers() called unexpectedly");
}
@@ -319,10 +320,9 @@
int32_t bufferSize = buffer->size();
mCachedBytes += bufferSize;
+ int64_t timeUs = 0;
+ CHECK(buffer->meta()->findInt64("timeUs", &timeUs));
if (mSkipRenderingUntilMediaTimeUs >= 0) {
- int64_t timeUs = 0;
- CHECK(buffer->meta()->findInt64("timeUs", &timeUs));
-
if (timeUs < mSkipRenderingUntilMediaTimeUs) {
ALOGV("[%s] dropping buffer at time %lld as requested.",
mComponentName.c_str(), (long long)timeUs);
@@ -343,7 +343,10 @@
reply->setInt32("generation", mBufferGeneration);
reply->setInt32("size", bufferSize);
- mRenderer->queueBuffer(true /* audio */, buffer, reply);
+ sp<MediaCodecBuffer> mcBuffer = new MediaCodecBuffer(nullptr, buffer);
+ mcBuffer->meta()->setInt64("timeUs", timeUs);
+
+ mRenderer->queueBuffer(true /* audio */, mcBuffer, reply);
++mPendingBuffersToDrain;
ALOGV("onInputBufferFilled: #ToDrain = %zu, cachedBytes = %zu",
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDecoderPassThrough.h b/media/libmediaplayerservice/nuplayer/NuPlayerDecoderPassThrough.h
index 5850efa..9af25ff 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerDecoderPassThrough.h
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerDecoderPassThrough.h
@@ -38,7 +38,7 @@
virtual void onConfigure(const sp<AMessage> &format);
virtual void onSetParameters(const sp<AMessage> ¶ms);
virtual void onSetRenderer(const sp<Renderer> &renderer);
- virtual void onGetInputBuffers(Vector<sp<ABuffer> > *dstBuffers);
+ virtual void onGetInputBuffers(Vector<sp<MediaCodecBuffer> > *dstBuffers);
virtual void onResume(bool notifyComplete);
virtual void onFlush();
virtual void onShutdown(bool notifyComplete);
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp
index 0f4dce9..7f287e3 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp
@@ -208,7 +208,7 @@
mAtEOS = false;
mState = STATE_STOPPED_AND_PREPARING;
mIsAsyncPrepare = false;
- mPlayer->seekToAsync(0, true /* needNotify */);
+ mPlayer->seekToAsync(0, false /* precise */, true /* needNotify */);
while (mState == STATE_STOPPED_AND_PREPARING) {
mCondition.wait(mLock);
}
@@ -233,7 +233,7 @@
mAtEOS = false;
mState = STATE_STOPPED_AND_PREPARING;
mIsAsyncPrepare = true;
- mPlayer->seekToAsync(0, true /* needNotify */);
+ mPlayer->seekToAsync(0, false /* precise */, true /* needNotify */);
return OK;
default:
return INVALID_OPERATION;
@@ -382,8 +382,8 @@
return mPlayer->getSyncSettings(sync, videoFps);
}
-status_t NuPlayerDriver::seekTo(int msec) {
- ALOGD("seekTo(%p) %d ms at state %d", this, msec, mState);
+status_t NuPlayerDriver::seekTo(int msec, bool precise) {
+ ALOGD("seekTo(%p) (%d ms, %d) at state %d", this, msec, precise, mState);
Mutex::Autolock autoLock(mLock);
int64_t seekTimeUs = msec * 1000ll;
@@ -398,7 +398,7 @@
mSeekInProgress = true;
// seeks can take a while, so we essentially paused
notifyListener_l(MEDIA_PAUSED);
- mPlayer->seekToAsync(seekTimeUs, true /* needNotify */);
+ mPlayer->seekToAsync(seekTimeUs, precise, true /* needNotify */);
break;
}
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDriver.h b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.h
index 58008f0..034b3f9 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerDriver.h
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.h
@@ -53,7 +53,7 @@
virtual status_t getPlaybackSettings(AudioPlaybackRate *rate);
virtual status_t setSyncSettings(const AVSyncSettings &sync, float videoFpsHint);
virtual status_t getSyncSettings(AVSyncSettings *sync, float *videoFps);
- virtual status_t seekTo(int msec);
+ virtual status_t seekTo(int msec, bool precise = false);
virtual status_t getCurrentPosition(int *msec);
virtual status_t getDuration(int *msec);
virtual status_t reset();
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp
index b742762..b78bdfb 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp
@@ -21,7 +21,6 @@
#include "NuPlayerRenderer.h"
#include <algorithm>
#include <cutils/properties.h>
-#include <media/stagefright/foundation/ABuffer.h>
#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/foundation/AMessage.h>
#include <media/stagefright/foundation/AUtils.h>
@@ -31,6 +30,7 @@
#include <media/stagefright/MetaData.h>
#include <media/stagefright/Utils.h>
#include <media/stagefright/VideoFrameScheduler.h>
+#include <media/MediaCodecBuffer.h>
#include <inttypes.h>
@@ -156,12 +156,12 @@
void NuPlayer::Renderer::queueBuffer(
bool audio,
- const sp<ABuffer> &buffer,
+ const sp<MediaCodecBuffer> &buffer,
const sp<AMessage> ¬ifyConsumed) {
sp<AMessage> msg = new AMessage(kWhatQueueBuffer, this);
msg->setInt32("queueGeneration", getQueueGeneration(audio));
msg->setInt32("audio", static_cast<int32_t>(audio));
- msg->setBuffer("buffer", buffer);
+ msg->setObject("buffer", buffer);
msg->setMessage("notifyConsumed", notifyConsumed);
msg->post();
}
@@ -1368,8 +1368,9 @@
}
}
- sp<ABuffer> buffer;
- CHECK(msg->findBuffer("buffer", &buffer));
+ sp<RefBase> obj;
+ CHECK(msg->findObject("buffer", &obj));
+ sp<MediaCodecBuffer> buffer = static_cast<MediaCodecBuffer *>(obj.get());
sp<AMessage> notifyConsumed;
CHECK(msg->findMessage("notifyConsumed", ¬ifyConsumed));
@@ -1395,8 +1396,8 @@
return;
}
- sp<ABuffer> firstAudioBuffer = (*mAudioQueue.begin()).mBuffer;
- sp<ABuffer> firstVideoBuffer = (*mVideoQueue.begin()).mBuffer;
+ sp<MediaCodecBuffer> firstAudioBuffer = (*mAudioQueue.begin()).mBuffer;
+ sp<MediaCodecBuffer> firstVideoBuffer = (*mVideoQueue.begin()).mBuffer;
if (firstAudioBuffer == NULL || firstVideoBuffer == NULL) {
// EOS signalled on either queue.
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.h b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.h
index fe7f8fa..0bd3aa1 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.h
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.h
@@ -25,9 +25,9 @@
namespace android {
-struct ABuffer;
class AWakeLock;
struct MediaClock;
+class MediaCodecBuffer;
struct VideoFrameScheduler;
struct NuPlayer::Renderer : public AHandler {
@@ -46,7 +46,7 @@
void queueBuffer(
bool audio,
- const sp<ABuffer> &buffer,
+ const sp<MediaCodecBuffer> &buffer,
const sp<AMessage> ¬ifyConsumed);
void queueEOS(bool audio, status_t finalResult);
@@ -125,7 +125,7 @@
};
struct QueueEntry {
- sp<ABuffer> mBuffer;
+ sp<MediaCodecBuffer> mBuffer;
sp<AMessage> mNotifyConsumed;
size_t mOffset;
status_t mFinalResult;
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerSource.h b/media/libmediaplayerservice/nuplayer/NuPlayerSource.h
index 3a96138..fe4fc63 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerSource.h
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerSource.h
@@ -104,7 +104,8 @@
return INVALID_OPERATION;
}
- virtual status_t seekTo(int64_t /* seekTimeUs */) {
+ // Refer to comment of seekTo in IMediaPlayer.h for meaning of |precise|.
+ virtual status_t seekTo(int64_t /* seekTimeUs */, bool /* precise */ = false) {
return INVALID_OPERATION;
}
diff --git a/media/libmediaplayerservice/nuplayer/RTSPSource.cpp b/media/libmediaplayerservice/nuplayer/RTSPSource.cpp
index c4e5df7..f430f03 100644
--- a/media/libmediaplayerservice/nuplayer/RTSPSource.cpp
+++ b/media/libmediaplayerservice/nuplayer/RTSPSource.cpp
@@ -258,7 +258,7 @@
}
status_t NuPlayer::RTSPSource::getDuration(int64_t *durationUs) {
- *durationUs = 0ll;
+ *durationUs = -1ll;
int64_t audioDurationUs;
if (mAudioTrack != NULL
@@ -279,10 +279,11 @@
return OK;
}
-status_t NuPlayer::RTSPSource::seekTo(int64_t seekTimeUs) {
+status_t NuPlayer::RTSPSource::seekTo(int64_t seekTimeUs, bool precise) {
sp<AMessage> msg = new AMessage(kWhatPerformSeek, this);
msg->setInt32("generation", ++mSeekGeneration);
msg->setInt64("timeUs", seekTimeUs);
+ msg->setInt32("precise", precise);
sp<AMessage> response;
status_t err = msg->postAndAwaitResponse(&response);
@@ -465,8 +466,11 @@
}
int64_t seekTimeUs;
+ int32_t precise;
CHECK(msg->findInt64("timeUs", &seekTimeUs));
+ CHECK(msg->findInt32("precise", &precise));
+ // TODO: add "precise" to performSeek.
performSeek(seekTimeUs);
return;
} else if (msg->what() == kWhatPollBuffering) {
diff --git a/media/libmediaplayerservice/nuplayer/RTSPSource.h b/media/libmediaplayerservice/nuplayer/RTSPSource.h
index c7834ef..b2962ed6 100644
--- a/media/libmediaplayerservice/nuplayer/RTSPSource.h
+++ b/media/libmediaplayerservice/nuplayer/RTSPSource.h
@@ -49,7 +49,7 @@
virtual status_t dequeueAccessUnit(bool audio, sp<ABuffer> *accessUnit);
virtual status_t getDuration(int64_t *durationUs);
- virtual status_t seekTo(int64_t seekTimeUs);
+ virtual status_t seekTo(int64_t seekTimeUs, bool precise = false) override;
void onMessageReceived(const sp<AMessage> &msg);
diff --git a/media/libmediaplayerservice/tests/Android.mk b/media/libmediaplayerservice/tests/Android.mk
index ea75a97..c0b3265 100644
--- a/media/libmediaplayerservice/tests/Android.mk
+++ b/media/libmediaplayerservice/tests/Android.mk
@@ -20,7 +20,6 @@
frameworks/av/media/libmediaplayerservice \
LOCAL_CFLAGS += -Werror -Wall
-LOCAL_CLANG := true
LOCAL_32_BIT_ONLY := true
diff --git a/media/libnbaio/AudioStreamInSource.cpp b/media/libnbaio/AudioStreamInSource.cpp
index 2dc3050..1054b68 100644
--- a/media/libnbaio/AudioStreamInSource.cpp
+++ b/media/libnbaio/AudioStreamInSource.cpp
@@ -19,33 +19,38 @@
#include <cutils/compiler.h>
#include <utils/Log.h>
+#include <media/audiohal/StreamHalInterface.h>
#include <media/nbaio/AudioStreamInSource.h>
namespace android {
-AudioStreamInSource::AudioStreamInSource(audio_stream_in *stream) :
+AudioStreamInSource::AudioStreamInSource(sp<StreamInHalInterface> stream) :
NBAIO_Source(),
mStream(stream),
mStreamBufferSizeBytes(0),
mFramesOverrun(0),
mOverruns(0)
{
- ALOG_ASSERT(stream != NULL);
+ ALOG_ASSERT(stream != 0);
}
AudioStreamInSource::~AudioStreamInSource()
{
+ mStream.clear();
}
ssize_t AudioStreamInSource::negotiate(const NBAIO_Format offers[], size_t numOffers,
NBAIO_Format counterOffers[], size_t& numCounterOffers)
{
if (!Format_isValid(mFormat)) {
- mStreamBufferSizeBytes = mStream->common.get_buffer_size(&mStream->common);
- audio_format_t streamFormat = mStream->common.get_format(&mStream->common);
- uint32_t sampleRate = mStream->common.get_sample_rate(&mStream->common);
- audio_channel_mask_t channelMask =
- (audio_channel_mask_t) mStream->common.get_channels(&mStream->common);
+ status_t result;
+ result = mStream->getBufferSize(&mStreamBufferSizeBytes);
+ if (result != OK) return result;
+ audio_format_t streamFormat;
+ uint32_t sampleRate;
+ audio_channel_mask_t channelMask;
+ result = mStream->getAudioProperties(&sampleRate, &channelMask, &streamFormat);
+ if (result != OK) return result;
mFormat = Format_from_SR_C(sampleRate,
audio_channel_count_from_in_mask(channelMask), streamFormat);
mFrameSize = Format_frameSize(mFormat);
@@ -55,11 +60,14 @@
int64_t AudioStreamInSource::framesOverrun()
{
- uint32_t framesOverrun = mStream->get_input_frames_lost(mStream);
- if (framesOverrun > 0) {
+ uint32_t framesOverrun;
+ status_t result = mStream->getInputFramesLost(&framesOverrun);
+ if (result == OK && framesOverrun > 0) {
mFramesOverrun += framesOverrun;
// FIXME only increment for contiguous ranges
++mOverruns;
+ } else if (result != OK) {
+ ALOGE("Error when retrieving lost frames count from HAL: %d", result);
}
return mFramesOverrun;
}
@@ -69,12 +77,14 @@
if (CC_UNLIKELY(!Format_isValid(mFormat))) {
return NEGOTIATE;
}
- ssize_t bytesRead = mStream->read(mStream, buffer, count * mFrameSize);
- if (bytesRead > 0) {
+ size_t bytesRead;
+ status_t result = mStream->read(buffer, count * mFrameSize, &bytesRead);
+ if (result == OK && bytesRead > 0) {
size_t framesRead = bytesRead / mFrameSize;
mFramesRead += framesRead;
return framesRead;
} else {
+ ALOGE_IF(result != OK, "Error while reading data from HAL: %d", result);
return bytesRead;
}
}
diff --git a/media/libnbaio/AudioStreamOutSink.cpp b/media/libnbaio/AudioStreamOutSink.cpp
index ee44678..cbff87d 100644
--- a/media/libnbaio/AudioStreamOutSink.cpp
+++ b/media/libnbaio/AudioStreamOutSink.cpp
@@ -18,31 +18,36 @@
//#define LOG_NDEBUG 0
#include <utils/Log.h>
+#include <media/audiohal/StreamHalInterface.h>
#include <media/nbaio/AudioStreamOutSink.h>
namespace android {
-AudioStreamOutSink::AudioStreamOutSink(audio_stream_out *stream) :
+AudioStreamOutSink::AudioStreamOutSink(sp<StreamOutHalInterface> stream) :
NBAIO_Sink(),
mStream(stream),
mStreamBufferSizeBytes(0)
{
- ALOG_ASSERT(stream != NULL);
+ ALOG_ASSERT(stream != 0);
}
AudioStreamOutSink::~AudioStreamOutSink()
{
+ mStream.clear();
}
ssize_t AudioStreamOutSink::negotiate(const NBAIO_Format offers[], size_t numOffers,
NBAIO_Format counterOffers[], size_t& numCounterOffers)
{
if (!Format_isValid(mFormat)) {
- mStreamBufferSizeBytes = mStream->common.get_buffer_size(&mStream->common);
- audio_format_t streamFormat = mStream->common.get_format(&mStream->common);
- uint32_t sampleRate = mStream->common.get_sample_rate(&mStream->common);
- audio_channel_mask_t channelMask =
- (audio_channel_mask_t) mStream->common.get_channels(&mStream->common);
+ status_t result;
+ result = mStream->getBufferSize(&mStreamBufferSizeBytes);
+ if (result != OK) return result;
+ audio_format_t streamFormat;
+ uint32_t sampleRate;
+ audio_channel_mask_t channelMask;
+ result = mStream->getAudioProperties(&sampleRate, &channelMask, &streamFormat);
+ if (result != OK) return result;
mFormat = Format_from_SR_C(sampleRate,
audio_channel_count_from_out_mask(channelMask), streamFormat);
mFrameSize = Format_frameSize(mFormat);
@@ -56,25 +61,24 @@
return NEGOTIATE;
}
ALOG_ASSERT(Format_isValid(mFormat));
- ssize_t ret = mStream->write(mStream, buffer, count * mFrameSize);
- if (ret > 0) {
- ret /= mFrameSize;
- mFramesWritten += ret;
+ size_t written;
+ status_t ret = mStream->write(buffer, count * mFrameSize, &written);
+ if (ret == OK && written > 0) {
+ written /= mFrameSize;
+ mFramesWritten += written;
+ return written;
} else {
// FIXME verify HAL implementations are returning the correct error codes e.g. WOULD_BLOCK
+ ALOGE_IF(ret != OK, "Error while writing data to HAL: %d", ret);
+ return ret;
}
- return ret;
}
status_t AudioStreamOutSink::getTimestamp(ExtendedTimestamp ×tamp)
{
- if (mStream->get_presentation_position == NULL) {
- return INVALID_OPERATION;
- }
-
uint64_t position64;
struct timespec time;
- if (mStream->get_presentation_position(mStream, &position64, &time) != OK) {
+ if (mStream->getPresentationPosition(&position64, &time) != OK) {
return INVALID_OPERATION;
}
timestamp.mPosition[ExtendedTimestamp::LOCATION_KERNEL] = position64;
diff --git a/media/libnbaio/PipeReader.cpp b/media/libnbaio/PipeReader.cpp
index a879647..fdea68e 100644
--- a/media/libnbaio/PipeReader.cpp
+++ b/media/libnbaio/PipeReader.cpp
@@ -45,6 +45,7 @@
ALOG_ASSERT(readers > 0);
}
+__attribute__((no_sanitize("integer")))
ssize_t PipeReader::availableToRead()
{
if (CC_UNLIKELY(!mNegotiated)) {
@@ -54,16 +55,16 @@
// read() is not multi-thread safe w.r.t. itself, so no mutex or atomic op needed to read mFront
size_t avail = rear - mFront;
if (CC_UNLIKELY(avail > mPipe.mMaxFrames)) {
- // Discard 1/16 of the most recent data in pipe to avoid another overrun immediately
- int32_t oldFront = mFront;
- mFront = rear - mPipe.mMaxFrames + (mPipe.mMaxFrames >> 4);
- mFramesOverrun += (size_t) (mFront - oldFront);
+ // Discard all data in pipe to avoid another overrun immediately
+ mFront = rear;
+ mFramesOverrun += avail;
++mOverruns;
return OVERRUN;
}
return avail;
}
+__attribute__((no_sanitize("integer")))
ssize_t PipeReader::read(void *buffer, size_t count)
{
ssize_t avail = availableToRead();
@@ -97,4 +98,19 @@
return red;
}
+__attribute__((no_sanitize("integer")))
+ssize_t PipeReader::flush()
+{
+ if (CC_UNLIKELY(!mNegotiated)) {
+ return NEGOTIATE;
+ }
+ const int32_t rear = android_atomic_acquire_load(&mPipe.mRear);
+ const size_t flushed = rear - mFront;
+ // We don't check if flushed > mPipe.mMaxFrames (an overrun occurred) as the
+ // distinction is unimportant; all data is dropped.
+ mFront = rear;
+ mFramesRead += flushed; // we consider flushed frames as read.
+ return flushed;
+}
+
} // namespace android
diff --git a/media/libstagefright/ACodec.cpp b/media/libstagefright/ACodec.cpp
index e7057ce..2f97fac 100644
--- a/media/libstagefright/ACodec.cpp
+++ b/media/libstagefright/ACodec.cpp
@@ -44,6 +44,7 @@
#include <media/stagefright/PersistentSurface.h>
#include <media/stagefright/SurfaceUtils.h>
#include <media/hardware/HardwareAPI.h>
+#include <media/OMXBuffer.h>
#include <OMX_AudioExt.h>
#include <OMX_VideoExt.h>
@@ -53,10 +54,14 @@
#include "include/avc_utils.h"
#include "include/DataConverter.h"
+#include "include/SecureBuffer.h"
+#include "include/SharedMemoryBuffer.h"
#include "omx/OMXUtils.h"
namespace android {
+using binder::Status;
+
enum {
kMaxIndicesToCheck = 32, // used when enumerating supported formats and profiles
};
@@ -90,6 +95,21 @@
}
}
+static inline status_t statusFromBinderStatus(const Status &status) {
+ if (status.isOk()) {
+ return OK;
+ }
+ status_t err;
+ if ((err = status.serviceSpecificErrorCode()) != OK) {
+ return err;
+ }
+ if ((err = status.transactionError()) != OK) {
+ return err;
+ }
+ // Other exception
+ return UNKNOWN_ERROR;
+}
+
// checks and converts status_t to a non-side-effect status_t
static inline status_t makeNoSideEffectStatus(status_t err) {
switch (err) {
@@ -136,15 +156,10 @@
}
sp<AMessage> notify = mNotify->dup();
- bool first = true;
sp<MessageList> msgList = new MessageList();
for (std::list<omx_message>::const_iterator it = messages.cbegin();
it != messages.cend(); ++it) {
const omx_message &omx_msg = *it;
- if (first) {
- notify->setInt32("node", omx_msg.node);
- first = false;
- }
sp<AMessage> msg = new AMessage;
msg->setInt32("type", omx_msg.type);
@@ -495,8 +510,7 @@
ACodec::ACodec()
: mSampleRate(0),
- mQuirks(0),
- mNode(0),
+ mNodeGeneration(0),
mUsingNativeWindow(false),
mNativeWindowUsageBits(0),
mLastNativeWindowDataSpace(HAL_DATASPACE_UNKNOWN),
@@ -783,7 +797,7 @@
status_t err;
if (mNativeWindow != NULL && portIndex == kPortIndexOutput) {
- if (storingMetadataInDecodedBuffers()) {
+ if (storingMetadataInDecodedBuffers() && !mLegacyAdaptiveExperiment) {
err = allocateOutputMetadataBuffers();
} else {
err = allocateOutputBuffersFromNativeWindow();
@@ -793,8 +807,8 @@
InitOMXParams(&def);
def.nPortIndex = portIndex;
- err = mOMX->getParameter(
- mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
+ err = mOMXNode->getParameter(
+ OMX_IndexParamPortDefinition, &def, sizeof(def));
if (err == OK) {
MetadataBufferType type =
@@ -853,6 +867,8 @@
size_t totalSize = def.nBufferCountActual * (alignedSize + alignedConvSize);
mDealer[portIndex] = new MemoryDealer(totalSize, "ACodec");
+ const sp<AMessage> &format =
+ portIndex == kPortIndexInput ? mInputFormat : mOutputFormat;
for (OMX_U32 i = 0; i < def.nBufferCountActual && err == OK; ++i) {
sp<IMemory> mem = mDealer[portIndex]->allocate(bufSize);
if (mem == NULL || mem->pointer() == NULL) {
@@ -863,45 +879,27 @@
info.mStatus = BufferInfo::OWNED_BY_US;
info.mFenceFd = -1;
info.mRenderInfo = NULL;
- info.mNativeHandle = NULL;
-
- uint32_t requiresAllocateBufferBit =
- (portIndex == kPortIndexInput)
- ? kRequiresAllocateBufferOnInputPorts
- : kRequiresAllocateBufferOnOutputPorts;
if (portIndex == kPortIndexInput && (mFlags & kFlagIsSecure)) {
mem.clear();
void *ptr = NULL;
sp<NativeHandle> native_handle;
- err = mOMX->allocateSecureBuffer(
- mNode, portIndex, bufSize, &info.mBufferID,
+ err = mOMXNode->allocateSecureBuffer(
+ portIndex, bufSize, &info.mBufferID,
&ptr, &native_handle);
- // TRICKY: this representation is unorthodox, but ACodec requires
- // an ABuffer with a proper size to validate range offsets and lengths.
- // Since mData is never referenced for secure input, it is used to store
- // either the pointer to the secure buffer, or the opaque handle as on
- // some devices ptr is actually an opaque handle, not a pointer.
-
- // TRICKY2: use native handle as the base of the ABuffer if received one,
- // because Widevine source only receives these base addresses.
- const native_handle_t *native_handle_ptr =
- native_handle == NULL ? NULL : native_handle->handle();
- info.mData = new ABuffer(
- ptr != NULL ? ptr : (void *)native_handle_ptr, bufSize);
- info.mNativeHandle = native_handle;
+ info.mData = (native_handle == NULL)
+ ? new SecureBuffer(format, ptr, bufSize)
+ : new SecureBuffer(format, native_handle, bufSize);
info.mCodecData = info.mData;
- } else if (mQuirks & requiresAllocateBufferBit) {
- err = mOMX->allocateBufferWithBackup(
- mNode, portIndex, mem, &info.mBufferID, allottedSize);
} else {
- err = mOMX->useBuffer(mNode, portIndex, mem, &info.mBufferID, allottedSize);
+ err = mOMXNode->useBuffer(portIndex,
+ OMXBuffer(mem, allottedSize), &info.mBufferID);
}
if (mem != NULL) {
- info.mCodecData = new ABuffer(mem->pointer(), bufSize);
+ info.mCodecData = new SharedMemoryBuffer(format, mem);
info.mCodecRef = mem;
if (type == kMetadataBufferTypeANWBuffer) {
@@ -916,7 +914,7 @@
if (mem == NULL|| mem->pointer() == NULL) {
return NO_MEMORY;
}
- info.mData = new ABuffer(mem->pointer(), conversionBufferSize);
+ info.mData = new SharedMemoryBuffer(format, mem);
info.mMemRef = mem;
} else {
info.mData = info.mCodecData;
@@ -942,7 +940,7 @@
for (size_t i = 0; i < mBuffers[portIndex].size(); ++i) {
const BufferInfo &info = mBuffers[portIndex][i];
- desc->addBuffer(info.mBufferID, info.mData, info.mNativeHandle, info.mMemRef);
+ desc->addBuffer(info.mBufferID, info.mData);
}
notify->setObject("portDesc", desc);
@@ -958,15 +956,15 @@
InitOMXParams(&def);
def.nPortIndex = kPortIndexOutput;
- status_t err = mOMX->getParameter(
- mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
+ status_t err = mOMXNode->getParameter(
+ OMX_IndexParamPortDefinition, &def, sizeof(def));
if (err != OK) {
return err;
}
OMX_U32 usage = 0;
- err = mOMX->getGraphicBufferUsage(mNode, kPortIndexOutput, &usage);
+ err = mOMXNode->getGraphicBufferUsage(kPortIndexOutput, &usage);
if (err != 0) {
ALOGW("querying usage flags from OMX IL component failed: %d", err);
// XXX: Currently this error is logged, but not fatal.
@@ -1003,8 +1001,8 @@
InitOMXParams(&def);
def.nPortIndex = kPortIndexOutput;
- status_t err = mOMX->getParameter(
- mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
+ status_t err = mOMXNode->getParameter(
+ OMX_IndexParamPortDefinition, &def, sizeof(def));
if (err == OK) {
err = setupNativeWindowSizeFormatAndUsage(
@@ -1021,8 +1019,8 @@
if (mTunneled) {
ALOGV("Tunneled Playback: skipping native window buffer allocation.");
def.nBufferCountActual = 0;
- err = mOMX->setParameter(
- mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
+ err = mOMXNode->setParameter(
+ OMX_IndexParamPortDefinition, &def, sizeof(def));
*minUndequeuedBuffers = 0;
*bufferCount = 0;
@@ -1057,8 +1055,8 @@
OMX_U32 newBufferCount =
def.nBufferCountMin + *minUndequeuedBuffers + extraBuffers;
def.nBufferCountActual = newBufferCount;
- err = mOMX->setParameter(
- mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
+ err = mOMXNode->setParameter(
+ OMX_IndexParamPortDefinition, &def, sizeof(def));
if (err == OK) {
*minUndequeuedBuffers += extraBuffers;
@@ -1088,6 +1086,11 @@
}
status_t ACodec::allocateOutputBuffersFromNativeWindow() {
+ // This method only handles the non-metadata mode, or legacy metadata mode
+ // (where the headers for each buffer id will be fixed). Non-legacy metadata
+ // mode shouldn't go through this path.
+ CHECK(!storingMetadataInDecodedBuffers() || mLegacyAdaptiveExperiment);
+
OMX_U32 bufferCount, bufferSize, minUndequeuedBuffers;
status_t err = configureOutputBuffersFromNativeWindow(
&bufferCount, &bufferSize, &minUndequeuedBuffers, true /* preregister */);
@@ -1095,10 +1098,8 @@
return err;
mNumUndequeuedBuffers = minUndequeuedBuffers;
- if (!storingMetadataInDecodedBuffers()) {
- static_cast<Surface*>(mNativeWindow.get())
- ->getIGraphicBufferProducer()->allowAllocation(true);
- }
+ static_cast<Surface*>(mNativeWindow.get())
+ ->getIGraphicBufferProducer()->allowAllocation(true);
ALOGV("[%s] Allocating %u buffers from a native window of size %u on "
"output port",
@@ -1120,14 +1121,20 @@
info.mFenceFd = fenceFd;
info.mIsReadFence = false;
info.mRenderInfo = NULL;
- info.mData = new ABuffer(NULL /* data */, bufferSize /* capacity */);
- info.mCodecData = info.mData;
info.mGraphicBuffer = graphicBuffer;
+
+ // TODO: We shouln't need to create MediaCodecBuffer. In metadata mode
+ // OMX doesn't use the shared memory buffer, but some code still
+ // access info.mData. Create an ABuffer as a placeholder.
+ if (storingMetadataInDecodedBuffers()) {
+ info.mData = new MediaCodecBuffer(mOutputFormat, new ABuffer(bufferSize));
+ info.mCodecData = info.mData;
+ }
+
mBuffers[kPortIndexOutput].push(info);
IOMX::buffer_id bufferId;
- err = mOMX->useGraphicBuffer(mNode, kPortIndexOutput, graphicBuffer,
- &bufferId);
+ err = mOMXNode->useBuffer(kPortIndexOutput, graphicBuffer, &bufferId);
if (err != 0) {
ALOGE("registering GraphicBuffer %u with OMX IL component failed: "
"%d", i, err);
@@ -1144,9 +1151,9 @@
OMX_U32 cancelStart;
OMX_U32 cancelEnd;
- if (err != 0) {
+ if (err != 0 || storingMetadataInDecodedBuffers()) {
// If an error occurred while dequeuing we need to cancel any buffers
- // that were dequeued.
+ // that were dequeued. Also cancel all if we're in legacy metadata mode.
cancelStart = 0;
cancelEnd = mBuffers[kPortIndexOutput].size();
} else {
@@ -1165,19 +1172,23 @@
}
}
- if (!storingMetadataInDecodedBuffers()) {
- static_cast<Surface*>(mNativeWindow.get())
- ->getIGraphicBufferProducer()->allowAllocation(false);
+ static_cast<Surface*>(mNativeWindow.get())
+ ->getIGraphicBufferProducer()->allowAllocation(false);
+
+ if (storingMetadataInDecodedBuffers()) {
+ mMetadataBuffersToSubmit = bufferCount - minUndequeuedBuffers;
}
return err;
}
status_t ACodec::allocateOutputMetadataBuffers() {
+ CHECK(storingMetadataInDecodedBuffers() && !mLegacyAdaptiveExperiment);
+
OMX_U32 bufferCount, bufferSize, minUndequeuedBuffers;
status_t err = configureOutputBuffersFromNativeWindow(
&bufferCount, &bufferSize, &minUndequeuedBuffers,
- mLegacyAdaptiveExperiment /* preregister */);
+ false /* preregister */);
if (err != 0)
return err;
mNumUndequeuedBuffers = minUndequeuedBuffers;
@@ -1190,7 +1201,6 @@
size_t totalSize = bufferCount * align(bufSize, MemoryDealer::getAllocationAlignment());
mDealer[kPortIndexOutput] = new MemoryDealer(totalSize, "ACodec");
- // Dequeue buffers and send them to OMX
for (OMX_U32 i = 0; i < bufferCount; i++) {
BufferInfo info;
info.mStatus = BufferInfo::OWNED_BY_NATIVE_WINDOW;
@@ -1206,63 +1216,18 @@
if (mOutputMetadataType == kMetadataBufferTypeANWBuffer) {
((VideoNativeMetadata *)mem->pointer())->nFenceFd = -1;
}
- info.mData = new ABuffer(mem->pointer(), mem->size());
+ info.mData = new SharedMemoryBuffer(mOutputFormat, mem);
info.mMemRef = mem;
info.mCodecData = info.mData;
info.mCodecRef = mem;
- // we use useBuffer for metadata regardless of quirks
- err = mOMX->useBuffer(
- mNode, kPortIndexOutput, mem, &info.mBufferID, mem->size());
+ err = mOMXNode->useBuffer(kPortIndexOutput, mem, &info.mBufferID);
mBuffers[kPortIndexOutput].push(info);
ALOGV("[%s] allocated meta buffer with ID %u (pointer = %p)",
mComponentName.c_str(), info.mBufferID, mem->pointer());
}
- if (mLegacyAdaptiveExperiment) {
- // preallocate and preregister buffers
- static_cast<Surface *>(mNativeWindow.get())
- ->getIGraphicBufferProducer()->allowAllocation(true);
-
- ALOGV("[%s] Allocating %u buffers from a native window of size %u on "
- "output port",
- mComponentName.c_str(), bufferCount, bufferSize);
-
- // Dequeue buffers then cancel them all
- for (OMX_U32 i = 0; i < bufferCount; i++) {
- BufferInfo *info = &mBuffers[kPortIndexOutput].editItemAt(i);
-
- ANativeWindowBuffer *buf;
- int fenceFd;
- err = mNativeWindow->dequeueBuffer(mNativeWindow.get(), &buf, &fenceFd);
- if (err != 0) {
- ALOGE("dequeueBuffer failed: %s (%d)", strerror(-err), -err);
- break;
- }
-
- sp<GraphicBuffer> graphicBuffer(new GraphicBuffer(buf, false));
- mOMX->updateGraphicBufferInMeta(
- mNode, kPortIndexOutput, graphicBuffer, info->mBufferID);
- info->mStatus = BufferInfo::OWNED_BY_US;
- info->setWriteFence(fenceFd, "allocateOutputMetadataBuffers for legacy");
- info->mGraphicBuffer = graphicBuffer;
- }
-
- for (OMX_U32 i = 0; i < mBuffers[kPortIndexOutput].size(); i++) {
- BufferInfo *info = &mBuffers[kPortIndexOutput].editItemAt(i);
- if (info->mStatus == BufferInfo::OWNED_BY_US) {
- status_t error = cancelBufferToNativeWindow(info);
- if (err == OK) {
- err = error;
- }
- }
- }
-
- static_cast<Surface*>(mNativeWindow.get())
- ->getIGraphicBufferProducer()->allowAllocation(false);
- }
-
mMetadataBuffersToSubmit = bufferCount - minUndequeuedBuffers;
return err;
}
@@ -1282,13 +1247,7 @@
--mMetadataBuffersToSubmit;
info->checkWriteFence("submitOutputMetadataBuffer");
- status_t err = mOMX->fillBuffer(mNode, info->mBufferID, info->mFenceFd);
- info->mFenceFd = -1;
- if (err == OK) {
- info->mStatus = BufferInfo::OWNED_BY_COMPONENT;
- }
-
- return err;
+ return fillBuffer(info);
}
status_t ACodec::waitForFence(int fd, const char *dbg ) {
@@ -1473,6 +1432,12 @@
// while loop above does not complete
CHECK(storingMetadataInDecodedBuffers());
+ if (storingMetadataInDecodedBuffers() && mLegacyAdaptiveExperiment) {
+ // If we're here while running legacy experiment, we dequeued some
+ // unrecognized buffers, and the experiment can't continue.
+ ALOGE("Legacy experiment failed, drop back to metadata mode");
+ mLegacyAdaptiveExperiment = false;
+ }
// discard buffer in LRU info and replace with new buffer
oldest->mGraphicBuffer = new GraphicBuffer(buf, false);
oldest->mStatus = BufferInfo::OWNED_BY_US;
@@ -1480,26 +1445,22 @@
mRenderTracker.untrackFrame(oldest->mRenderInfo);
oldest->mRenderInfo = NULL;
- mOMX->updateGraphicBufferInMeta(
- mNode, kPortIndexOutput, oldest->mGraphicBuffer,
- oldest->mBufferID);
-
if (mOutputMetadataType == kMetadataBufferTypeGrallocSource) {
VideoGrallocMetadata *grallocMeta =
- reinterpret_cast<VideoGrallocMetadata *>(oldest->mData->base());
+ reinterpret_cast<VideoGrallocMetadata *>(oldest->mCodecData->base());
ALOGV("replaced oldest buffer #%u with age %u (%p/%p stored in %p)",
(unsigned)(oldest - &mBuffers[kPortIndexOutput][0]),
mDequeueCounter - oldest->mDequeuedAt,
(void *)(uintptr_t)grallocMeta->pHandle,
- oldest->mGraphicBuffer->handle, oldest->mData->base());
+ oldest->mGraphicBuffer->handle, oldest->mCodecData->base());
} else if (mOutputMetadataType == kMetadataBufferTypeANWBuffer) {
VideoNativeMetadata *nativeMeta =
- reinterpret_cast<VideoNativeMetadata *>(oldest->mData->base());
+ reinterpret_cast<VideoNativeMetadata *>(oldest->mCodecData->base());
ALOGV("replaced oldest buffer #%u with age %u (%p/%p stored in %p)",
(unsigned)(oldest - &mBuffers[kPortIndexOutput][0]),
mDequeueCounter - oldest->mDequeuedAt,
(void *)(uintptr_t)nativeMeta->pBuffer,
- oldest->mGraphicBuffer->getNativeBuffer(), oldest->mData->base());
+ oldest->mGraphicBuffer->getNativeBuffer(), oldest->mCodecData->base());
}
updateRenderInfoForDequeuedBuffer(buf, fenceFd, oldest);
@@ -1549,9 +1510,9 @@
// there should not be any fences in the metadata
MetadataBufferType type =
portIndex == kPortIndexOutput ? mOutputMetadataType : mInputMetadataType;
- if (type == kMetadataBufferTypeANWBuffer && info->mData != NULL
- && info->mData->size() >= sizeof(VideoNativeMetadata)) {
- int fenceFd = ((VideoNativeMetadata *)info->mData->data())->nFenceFd;
+ if (type == kMetadataBufferTypeANWBuffer && info->mCodecData != NULL
+ && info->mCodecData->size() >= sizeof(VideoNativeMetadata)) {
+ int fenceFd = ((VideoNativeMetadata *)info->mCodecData->base())->nFenceFd;
if (fenceFd >= 0) {
ALOGW("unreleased fence (%d) in %s metadata buffer %zu",
fenceFd, portIndex == kPortIndexInput ? "input" : "output", i);
@@ -1566,7 +1527,7 @@
// fall through
case BufferInfo::OWNED_BY_NATIVE_WINDOW:
- err = mOMX->freeBuffer(mNode, portIndex, info->mBufferID);
+ err = mOMXNode->freeBuffer(portIndex, info->mBufferID);
break;
default:
@@ -1584,7 +1545,7 @@
info->mRenderInfo = NULL;
}
- // remove buffer even if mOMX->freeBuffer fails
+ // remove buffer even if mOMXNode->freeBuffer fails
mBuffers[portIndex].removeAt(i);
return err;
}
@@ -1606,13 +1567,30 @@
return NULL;
}
+status_t ACodec::fillBuffer(BufferInfo *info) {
+ status_t err;
+ if (!storingMetadataInDecodedBuffers() || mLegacyAdaptiveExperiment) {
+ err = mOMXNode->fillBuffer(
+ info->mBufferID, OMXBuffer::sPreset, info->mFenceFd);
+ } else {
+ err = mOMXNode->fillBuffer(
+ info->mBufferID, info->mGraphicBuffer, info->mFenceFd);
+ }
+
+ info->mFenceFd = -1;
+ if (err == OK) {
+ info->mStatus = BufferInfo::OWNED_BY_COMPONENT;
+ }
+ return err;
+}
+
status_t ACodec::setComponentRole(
bool isEncoder, const char *mime) {
- const char *role = getComponentRole(isEncoder, mime);
+ const char *role = GetComponentRole(isEncoder, mime);
if (role == NULL) {
return BAD_VALUE;
}
- status_t err = setComponentRole(mOMX, mNode, role);
+ status_t err = SetComponentRole(mOMXNode, role);
if (err != OK) {
ALOGW("[%s] Failed to set standard component role '%s'.",
mComponentName.c_str(), role);
@@ -1620,98 +1598,6 @@
return err;
}
-//static
-const char *ACodec::getComponentRole(
- bool isEncoder, const char *mime) {
- struct MimeToRole {
- const char *mime;
- const char *decoderRole;
- const char *encoderRole;
- };
-
- static const MimeToRole kMimeToRole[] = {
- { MEDIA_MIMETYPE_AUDIO_MPEG,
- "audio_decoder.mp3", "audio_encoder.mp3" },
- { MEDIA_MIMETYPE_AUDIO_MPEG_LAYER_I,
- "audio_decoder.mp1", "audio_encoder.mp1" },
- { MEDIA_MIMETYPE_AUDIO_MPEG_LAYER_II,
- "audio_decoder.mp2", "audio_encoder.mp2" },
- { MEDIA_MIMETYPE_AUDIO_AMR_NB,
- "audio_decoder.amrnb", "audio_encoder.amrnb" },
- { MEDIA_MIMETYPE_AUDIO_AMR_WB,
- "audio_decoder.amrwb", "audio_encoder.amrwb" },
- { MEDIA_MIMETYPE_AUDIO_AAC,
- "audio_decoder.aac", "audio_encoder.aac" },
- { MEDIA_MIMETYPE_AUDIO_VORBIS,
- "audio_decoder.vorbis", "audio_encoder.vorbis" },
- { MEDIA_MIMETYPE_AUDIO_OPUS,
- "audio_decoder.opus", "audio_encoder.opus" },
- { MEDIA_MIMETYPE_AUDIO_G711_MLAW,
- "audio_decoder.g711mlaw", "audio_encoder.g711mlaw" },
- { MEDIA_MIMETYPE_AUDIO_G711_ALAW,
- "audio_decoder.g711alaw", "audio_encoder.g711alaw" },
- { MEDIA_MIMETYPE_VIDEO_AVC,
- "video_decoder.avc", "video_encoder.avc" },
- { MEDIA_MIMETYPE_VIDEO_HEVC,
- "video_decoder.hevc", "video_encoder.hevc" },
- { MEDIA_MIMETYPE_VIDEO_MPEG4,
- "video_decoder.mpeg4", "video_encoder.mpeg4" },
- { MEDIA_MIMETYPE_VIDEO_H263,
- "video_decoder.h263", "video_encoder.h263" },
- { MEDIA_MIMETYPE_VIDEO_VP8,
- "video_decoder.vp8", "video_encoder.vp8" },
- { MEDIA_MIMETYPE_VIDEO_VP9,
- "video_decoder.vp9", "video_encoder.vp9" },
- { MEDIA_MIMETYPE_AUDIO_RAW,
- "audio_decoder.raw", "audio_encoder.raw" },
- { MEDIA_MIMETYPE_VIDEO_DOLBY_VISION,
- "video_decoder.dolby-vision", "video_encoder.dolby-vision" },
- { MEDIA_MIMETYPE_AUDIO_FLAC,
- "audio_decoder.flac", "audio_encoder.flac" },
- { MEDIA_MIMETYPE_AUDIO_MSGSM,
- "audio_decoder.gsm", "audio_encoder.gsm" },
- { MEDIA_MIMETYPE_VIDEO_MPEG2,
- "video_decoder.mpeg2", "video_encoder.mpeg2" },
- { MEDIA_MIMETYPE_AUDIO_AC3,
- "audio_decoder.ac3", "audio_encoder.ac3" },
- { MEDIA_MIMETYPE_AUDIO_EAC3,
- "audio_decoder.eac3", "audio_encoder.eac3" },
- };
-
- static const size_t kNumMimeToRole =
- sizeof(kMimeToRole) / sizeof(kMimeToRole[0]);
-
- size_t i;
- for (i = 0; i < kNumMimeToRole; ++i) {
- if (!strcasecmp(mime, kMimeToRole[i].mime)) {
- break;
- }
- }
-
- if (i == kNumMimeToRole) {
- return NULL;
- }
-
- return isEncoder ? kMimeToRole[i].encoderRole
- : kMimeToRole[i].decoderRole;
-}
-
-//static
-status_t ACodec::setComponentRole(
- const sp<IOMX> &omx, IOMX::node_id node, const char *role) {
- OMX_PARAM_COMPONENTROLETYPE roleParams;
- InitOMXParams(&roleParams);
-
- strncpy((char *)roleParams.cRole,
- role, OMX_MAX_STRINGNAME_SIZE - 1);
-
- roleParams.cRole[OMX_MAX_STRINGNAME_SIZE - 1] = '\0';
-
- return omx->setParameter(
- node, OMX_IndexParamStandardComponentRole,
- &roleParams, sizeof(roleParams));
-}
-
status_t ACodec::configureCodec(
const char *mime, const sp<AMessage> &msg) {
int32_t encoder;
@@ -1754,8 +1640,8 @@
&& msg->findInt32("android._input-metadata-buffer-type", &storeMeta)
&& storeMeta != kMetadataBufferTypeInvalid) {
mInputMetadataType = (MetadataBufferType)storeMeta;
- err = mOMX->storeMetaDataInBuffers(
- mNode, kPortIndexInput, OMX_TRUE, &mInputMetadataType);
+ err = mOMXNode->storeMetaDataInBuffers(
+ kPortIndexInput, OMX_TRUE, &mInputMetadataType);
if (err != OK) {
ALOGE("[%s] storeMetaDataInBuffers (input) failed w/ err %d",
mComponentName.c_str(), err);
@@ -1768,8 +1654,8 @@
}
uint32_t usageBits;
- if (mOMX->getParameter(
- mNode, (OMX_INDEXTYPE)OMX_IndexParamConsumerUsageBits,
+ if (mOMXNode->getParameter(
+ (OMX_INDEXTYPE)OMX_IndexParamConsumerUsageBits,
&usageBits, sizeof(usageBits)) == OK) {
inputFormat->setInt32(
"using-sw-read-often", !!(usageBits & GRALLOC_USAGE_SW_READ_OFTEN));
@@ -1781,18 +1667,15 @@
&& msg->findInt32("prepend-sps-pps-to-idr-frames", &prependSPSPPS)
&& prependSPSPPS != 0) {
OMX_INDEXTYPE index;
- err = mOMX->getExtensionIndex(
- mNode,
- "OMX.google.android.index.prependSPSPPSToIDRFrames",
- &index);
+ err = mOMXNode->getExtensionIndex(
+ "OMX.google.android.index.prependSPSPPSToIDRFrames", &index);
if (err == OK) {
PrependSPSPPSToIDRFramesParams params;
InitOMXParams(¶ms);
params.bEnable = OMX_TRUE;
- err = mOMX->setParameter(
- mNode, index, ¶ms, sizeof(params));
+ err = mOMXNode->setParameter(index, ¶ms, sizeof(params));
}
if (err != OK) {
@@ -1814,7 +1697,7 @@
&& storeMeta != 0);
mOutputMetadataType = kMetadataBufferTypeNativeHandleSource;
- err = mOMX->storeMetaDataInBuffers(mNode, kPortIndexOutput, enable, &mOutputMetadataType);
+ err = mOMXNode->storeMetaDataInBuffers(kPortIndexOutput, enable, &mOutputMetadataType);
if (err != OK) {
ALOGE("[%s] storeMetaDataInBuffers (output) failed w/ err %d",
mComponentName.c_str(), err);
@@ -1826,8 +1709,12 @@
mRepeatFrameDelayUs = -1ll;
}
+ // only allow 32-bit value, since we pass it as U32 to OMX.
if (!msg->findInt64("max-pts-gap-to-encoder", &mMaxPtsGapUs)) {
mMaxPtsGapUs = -1ll;
+ } else if (mMaxPtsGapUs > INT32_MAX || mMaxPtsGapUs < 0) {
+ ALOGW("Unsupported value for max pts gap %lld", (long long) mMaxPtsGapUs);
+ mMaxPtsGapUs = -1ll;
}
if (!msg->findFloat("max-fps-to-encoder", &mMaxFps)) {
@@ -1866,8 +1753,8 @@
if (mFlags & kFlagIsSecure) {
// use native_handles for secure input buffers
- err = mOMX->enableNativeBuffers(
- mNode, kPortIndexInput, OMX_FALSE /* graphic */, OMX_TRUE);
+ err = mOMXNode->enableNativeBuffers(
+ kPortIndexInput, OMX_FALSE /* graphic */, OMX_TRUE);
ALOGI_IF(err != OK, "falling back to non-native_handles");
err = OK; // ignore error for now
}
@@ -1883,8 +1770,8 @@
OMX_CONFIG_BOOLEANTYPE config;
InitOMXParams(&config);
config.bEnabled = (OMX_BOOL)enabled;
- status_t temp = mOMX->setConfig(
- mNode, (OMX_INDEXTYPE)OMX_IndexConfigAutoFramerateConversion,
+ status_t temp = mOMXNode->setConfig(
+ (OMX_INDEXTYPE)OMX_IndexConfigAutoFramerateConversion,
&config, sizeof(config));
if (temp == OK) {
outputFormat->setInt32("auto-frc", enabled);
@@ -1915,8 +1802,8 @@
if (msg->findInt32("max-width", &maxWidth) &&
msg->findInt32("max-height", &maxHeight)) {
- err = mOMX->prepareForAdaptivePlayback(
- mNode, kPortIndexOutput, OMX_TRUE, maxWidth, maxHeight);
+ err = mOMXNode->prepareForAdaptivePlayback(
+ kPortIndexOutput, OMX_TRUE, maxWidth, maxHeight);
if (err != OK) {
ALOGW("[%s] prepareForAdaptivePlayback failed w/ err %d",
mComponentName.c_str(), err);
@@ -1943,8 +1830,8 @@
// Always try to enable dynamic output buffers on native surface
mOutputMetadataType = kMetadataBufferTypeANWBuffer;
- err = mOMX->storeMetaDataInBuffers(
- mNode, kPortIndexOutput, OMX_TRUE, &mOutputMetadataType);
+ err = mOMXNode->storeMetaDataInBuffers(
+ kPortIndexOutput, OMX_TRUE, &mOutputMetadataType);
if (err != OK) {
ALOGE("[%s] storeMetaDataInBuffers failed w/ err %d",
mComponentName.c_str(), err);
@@ -1978,9 +1865,8 @@
ALOGV("[%s] prepareForAdaptivePlayback(%dx%d)",
mComponentName.c_str(), maxWidth, maxHeight);
- err = mOMX->prepareForAdaptivePlayback(
- mNode, kPortIndexOutput, OMX_TRUE, maxWidth,
- maxHeight);
+ err = mOMXNode->prepareForAdaptivePlayback(
+ kPortIndexOutput, OMX_TRUE, maxWidth, maxHeight);
ALOGW_IF(err != OK,
"[%s] prepareForAdaptivePlayback failed w/ err %d",
mComponentName.c_str(), err);
@@ -2068,8 +1954,8 @@
}
ALOGD("[%s] Requested output format %#x and got %#x.",
mComponentName.c_str(), requestedColorFormat, colorFormat);
- if (!isFlexibleColorFormat(
- mOMX, mNode, colorFormat, haveNativeWindow, &flexibleEquivalent)
+ if (!IsFlexibleColorFormat(
+ mOMXNode, colorFormat, haveNativeWindow, &flexibleEquivalent)
|| flexibleEquivalent != (OMX_U32)requestedColorFormat) {
// device did not handle flex-YUV request for native window, fall back
// to SW renderer
@@ -2079,16 +1965,16 @@
haveNativeWindow = false;
usingSwRenderer = true;
if (storingMetadataInDecodedBuffers()) {
- err = mOMX->storeMetaDataInBuffers(
- mNode, kPortIndexOutput, OMX_FALSE, &mOutputMetadataType);
+ err = mOMXNode->storeMetaDataInBuffers(
+ kPortIndexOutput, OMX_FALSE, &mOutputMetadataType);
mOutputMetadataType = kMetadataBufferTypeInvalid; // just in case
// TODO: implement adaptive-playback support for bytebuffer mode.
// This is done by SW codecs, but most HW codecs don't support it.
inputFormat->setInt32("adaptive-playback", false);
}
if (err == OK) {
- err = mOMX->enableNativeBuffers(
- mNode, kPortIndexOutput, OMX_TRUE /* graphic */, OMX_FALSE);
+ err = mOMXNode->enableNativeBuffers(
+ kPortIndexOutput, OMX_TRUE /* graphic */, OMX_FALSE);
}
if (mFlags & kFlagIsGrallocUsageProtected) {
// fallback is not supported for protected playback
@@ -2290,7 +2176,6 @@
// NOTE: both mBaseOutputFormat and mOutputFormat are outputFormat to signal first frame.
mBaseOutputFormat = outputFormat;
- // trigger a kWhatOutputFormatChanged msg on first buffer
mLastOutputFormat.clear();
err = getPortFormat(kPortIndexInput, inputFormat);
@@ -2330,8 +2215,8 @@
OMX_PARAM_U32TYPE config;
InitOMXParams(&config);
config.nU32 = (OMX_U32)priority;
- status_t temp = mOMX->setConfig(
- mNode, (OMX_INDEXTYPE)OMX_IndexConfigPriority,
+ status_t temp = mOMXNode->setConfig(
+ (OMX_INDEXTYPE)OMX_IndexConfigPriority,
&config, sizeof(config));
if (temp != OK) {
ALOGI("codec does not support config priority (err %d)", temp);
@@ -2358,8 +2243,8 @@
OMX_PARAM_U32TYPE config;
InitOMXParams(&config);
config.nU32 = rate;
- status_t err = mOMX->setConfig(
- mNode, (OMX_INDEXTYPE)OMX_IndexConfigOperatingRate,
+ status_t err = mOMXNode->setConfig(
+ (OMX_INDEXTYPE)OMX_IndexConfigOperatingRate,
&config, sizeof(config));
if (err != OK) {
ALOGI("codec does not support config operating rate (err %d)", err);
@@ -2371,8 +2256,8 @@
OMX_VIDEO_CONFIG_ANDROID_INTRAREFRESHTYPE params;
InitOMXParams(¶ms);
params.nPortIndex = kPortIndexOutput;
- status_t err = mOMX->getConfig(
- mNode, (OMX_INDEXTYPE)OMX_IndexConfigAndroidIntraRefresh, ¶ms, sizeof(params));
+ status_t err = mOMXNode->getConfig(
+ (OMX_INDEXTYPE)OMX_IndexConfigAndroidIntraRefresh, ¶ms, sizeof(params));
if (err == OK) {
*intraRefreshPeriod = params.nRefreshPeriod;
return OK;
@@ -2383,8 +2268,8 @@
InitOMXParams(&refreshParams);
refreshParams.nPortIndex = kPortIndexOutput;
refreshParams.eRefreshMode = OMX_VIDEO_IntraRefreshCyclic;
- err = mOMX->getParameter(
- mNode, OMX_IndexParamVideoIntraRefresh, &refreshParams, sizeof(refreshParams));
+ err = mOMXNode->getParameter(
+ OMX_IndexParamVideoIntraRefresh, &refreshParams, sizeof(refreshParams));
if (err != OK || refreshParams.nCirMBs == 0) {
*intraRefreshPeriod = 0;
return OK;
@@ -2396,8 +2281,8 @@
InitOMXParams(&def);
OMX_VIDEO_PORTDEFINITIONTYPE *video_def = &def.format.video;
def.nPortIndex = kPortIndexOutput;
- err = mOMX->getParameter(
- mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
+ err = mOMXNode->getParameter(
+ OMX_IndexParamPortDefinition, &def, sizeof(def));
if (err != OK) {
*intraRefreshPeriod = 0;
return err;
@@ -2415,8 +2300,8 @@
InitOMXParams(¶ms);
params.nPortIndex = kPortIndexOutput;
params.nRefreshPeriod = intraRefreshPeriod;
- status_t err = mOMX->setConfig(
- mNode, (OMX_INDEXTYPE)OMX_IndexConfigAndroidIntraRefresh, ¶ms, sizeof(params));
+ status_t err = mOMXNode->setConfig(
+ (OMX_INDEXTYPE)OMX_IndexConfigAndroidIntraRefresh, ¶ms, sizeof(params));
if (err == OK) {
return OK;
}
@@ -2443,8 +2328,8 @@
InitOMXParams(&def);
OMX_VIDEO_PORTDEFINITIONTYPE *video_def = &def.format.video;
def.nPortIndex = kPortIndexOutput;
- err = mOMX->getParameter(
- mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
+ err = mOMXNode->getParameter(
+ OMX_IndexParamPortDefinition, &def, sizeof(def));
if (err != OK) {
return err;
}
@@ -2454,8 +2339,9 @@
refreshParams.nCirMBs = divUp((divUp(width, 16u) * divUp(height, 16u)), intraRefreshPeriod);
}
- err = mOMX->setParameter(mNode, OMX_IndexParamVideoIntraRefresh,
- &refreshParams, sizeof(refreshParams));
+ err = mOMXNode->setParameter(
+ OMX_IndexParamVideoIntraRefresh,
+ &refreshParams, sizeof(refreshParams));
if (err != OK) {
return err;
}
@@ -2498,9 +2384,9 @@
InitOMXParams(&layerParams);
layerParams.nPortIndex = kPortIndexOutput;
- status_t err = mOMX->getParameter(
- mNode, (OMX_INDEXTYPE)OMX_IndexParamAndroidVideoTemporalLayering,
- &layerParams, sizeof(layerParams));
+ status_t err = mOMXNode->getParameter(
+ (OMX_INDEXTYPE)OMX_IndexParamAndroidVideoTemporalLayering,
+ &layerParams, sizeof(layerParams));
if (err != OK) {
return err;
@@ -2520,8 +2406,8 @@
layerConfig.nBLayerCountActual = numBLayers;
layerConfig.bBitrateRatiosSpecified = OMX_FALSE;
- err = mOMX->setConfig(
- mNode, (OMX_INDEXTYPE)OMX_IndexConfigAndroidVideoTemporalLayering,
+ err = mOMXNode->setConfig(
+ (OMX_INDEXTYPE)OMX_IndexConfigAndroidVideoTemporalLayering,
&layerConfig, sizeof(layerConfig));
} else {
layerParams.ePattern = pattern;
@@ -2529,8 +2415,8 @@
layerParams.nBLayerCountActual = numBLayers;
layerParams.bBitrateRatiosSpecified = OMX_FALSE;
- err = mOMX->setParameter(
- mNode, (OMX_INDEXTYPE)OMX_IndexParamAndroidVideoTemporalLayering,
+ err = mOMXNode->setParameter(
+ (OMX_INDEXTYPE)OMX_IndexParamAndroidVideoTemporalLayering,
&layerParams, sizeof(layerParams));
}
@@ -2547,8 +2433,8 @@
return err;
}
- err = mOMX->getParameter(
- mNode, (OMX_INDEXTYPE)OMX_IndexParamAndroidVideoTemporalLayering,
+ err = mOMXNode->getParameter(
+ (OMX_INDEXTYPE)OMX_IndexParamAndroidVideoTemporalLayering,
&layerParams, sizeof(layerParams));
if (err == OK) {
@@ -2571,8 +2457,8 @@
InitOMXParams(&def);
def.nPortIndex = portIndex;
- status_t err = mOMX->getParameter(
- mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
+ status_t err = mOMXNode->getParameter(
+ OMX_IndexParamPortDefinition, &def, sizeof(def));
if (err != OK) {
return err;
@@ -2584,15 +2470,15 @@
def.nBufferSize = size;
- err = mOMX->setParameter(
- mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
+ err = mOMXNode->setParameter(
+ OMX_IndexParamPortDefinition, &def, sizeof(def));
if (err != OK) {
return err;
}
- err = mOMX->getParameter(
- mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
+ err = mOMXNode->getParameter(
+ OMX_IndexParamPortDefinition, &def, sizeof(def));
if (err != OK) {
return err;
@@ -2614,9 +2500,8 @@
format.nPortIndex = portIndex;
for (OMX_U32 index = 0; index <= kMaxIndicesToCheck; ++index) {
format.nIndex = index;
- status_t err = mOMX->getParameter(
- mNode, OMX_IndexParamAudioPortFormat,
- &format, sizeof(format));
+ status_t err = mOMXNode->getParameter(
+ OMX_IndexParamAudioPortFormat, &format, sizeof(format));
if (err != OK) {
return err;
@@ -2634,8 +2519,8 @@
}
}
- return mOMX->setParameter(
- mNode, OMX_IndexParamAudioPortFormat, &format, sizeof(format));
+ return mOMXNode->setParameter(
+ OMX_IndexParamAudioPortFormat, &format, sizeof(format));
}
status_t ACodec::setupAACCodec(
@@ -2667,8 +2552,8 @@
InitOMXParams(&def);
def.nPortIndex = kPortIndexOutput;
- err = mOMX->getParameter(
- mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
+ err = mOMXNode->getParameter(
+ OMX_IndexParamPortDefinition, &def, sizeof(def));
if (err != OK) {
return err;
@@ -2677,8 +2562,8 @@
def.format.audio.bFlagErrorConcealment = OMX_TRUE;
def.format.audio.eEncoding = OMX_AUDIO_CodingAAC;
- err = mOMX->setParameter(
- mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
+ err = mOMXNode->setParameter(
+ OMX_IndexParamPortDefinition, &def, sizeof(def));
if (err != OK) {
return err;
@@ -2688,8 +2573,8 @@
InitOMXParams(&profile);
profile.nPortIndex = kPortIndexOutput;
- err = mOMX->getParameter(
- mNode, OMX_IndexParamAudioAac, &profile, sizeof(profile));
+ err = mOMXNode->getParameter(
+ OMX_IndexParamAudioAac, &profile, sizeof(profile));
if (err != OK) {
return err;
@@ -2736,8 +2621,8 @@
}
- err = mOMX->setParameter(
- mNode, OMX_IndexParamAudioAac, &profile, sizeof(profile));
+ err = mOMXNode->setParameter(
+ OMX_IndexParamAudioAac, &profile, sizeof(profile));
if (err != OK) {
return err;
@@ -2750,8 +2635,8 @@
InitOMXParams(&profile);
profile.nPortIndex = kPortIndexInput;
- err = mOMX->getParameter(
- mNode, OMX_IndexParamAudioAac, &profile, sizeof(profile));
+ err = mOMXNode->getParameter(
+ OMX_IndexParamAudioAac, &profile, sizeof(profile));
if (err != OK) {
return err;
@@ -2775,10 +2660,12 @@
presentation.nEncodedTargetLevel = drc.encodedTargetLevel;
presentation.nPCMLimiterEnable = pcmLimiterEnable;
- status_t res = mOMX->setParameter(mNode, OMX_IndexParamAudioAac, &profile, sizeof(profile));
+ status_t res = mOMXNode->setParameter(
+ OMX_IndexParamAudioAac, &profile, sizeof(profile));
if (res == OK) {
// optional parameters, will not cause configuration failure
- mOMX->setParameter(mNode, (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidAacPresentation,
+ mOMXNode->setParameter(
+ (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidAacPresentation,
&presentation, sizeof(presentation));
} else {
ALOGW("did not set AudioAndroidAacPresentation due to error %d when setting AudioAac", res);
@@ -2805,11 +2692,8 @@
InitOMXParams(&def);
def.nPortIndex = kPortIndexInput;
- err = mOMX->getParameter(
- mNode,
- (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidAc3,
- &def,
- sizeof(def));
+ err = mOMXNode->getParameter(
+ (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidAc3, &def, sizeof(def));
if (err != OK) {
return err;
@@ -2818,11 +2702,8 @@
def.nChannels = numChannels;
def.nSampleRate = sampleRate;
- return mOMX->setParameter(
- mNode,
- (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidAc3,
- &def,
- sizeof(def));
+ return mOMXNode->setParameter(
+ (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidAc3, &def, sizeof(def));
}
status_t ACodec::setupEAC3Codec(
@@ -2843,11 +2724,8 @@
InitOMXParams(&def);
def.nPortIndex = kPortIndexInput;
- err = mOMX->getParameter(
- mNode,
- (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidEac3,
- &def,
- sizeof(def));
+ err = mOMXNode->getParameter(
+ (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidEac3, &def, sizeof(def));
if (err != OK) {
return err;
@@ -2856,11 +2734,8 @@
def.nChannels = numChannels;
def.nSampleRate = sampleRate;
- return mOMX->setParameter(
- mNode,
- (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidEac3,
- &def,
- sizeof(def));
+ return mOMXNode->setParameter(
+ (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidEac3, &def, sizeof(def));
}
static OMX_AUDIO_AMRBANDMODETYPE pickModeFromBitRate(
@@ -2913,8 +2788,8 @@
InitOMXParams(&def);
def.nPortIndex = encoder ? kPortIndexOutput : kPortIndexInput;
- status_t err =
- mOMX->getParameter(mNode, OMX_IndexParamAudioAmr, &def, sizeof(def));
+ status_t err = mOMXNode->getParameter(
+ OMX_IndexParamAudioAmr, &def, sizeof(def));
if (err != OK) {
return err;
@@ -2923,8 +2798,8 @@
def.eAMRFrameFormat = OMX_AUDIO_AMRFrameFormatFSF;
def.eAMRBandMode = pickModeFromBitRate(isWAMR, bitrate);
- err = mOMX->setParameter(
- mNode, OMX_IndexParamAudioAmr, &def, sizeof(def));
+ err = mOMXNode->setParameter(
+ OMX_IndexParamAudioAmr, &def, sizeof(def));
if (err != OK) {
return err;
@@ -2954,13 +2829,13 @@
def.nPortIndex = kPortIndexOutput;
// configure compression level
- status_t err = mOMX->getParameter(mNode, OMX_IndexParamAudioFlac, &def, sizeof(def));
+ status_t err = mOMXNode->getParameter(OMX_IndexParamAudioFlac, &def, sizeof(def));
if (err != OK) {
ALOGE("setupFlacCodec(): Error %d getting OMX_IndexParamAudioFlac parameter", err);
return err;
}
def.nCompressionLevel = compressionLevel;
- err = mOMX->setParameter(mNode, OMX_IndexParamAudioFlac, &def, sizeof(def));
+ err = mOMXNode->setParameter(OMX_IndexParamAudioFlac, &def, sizeof(def));
if (err != OK) {
ALOGE("setupFlacCodec(): Error %d setting OMX_IndexParamAudioFlac parameter", err);
return err;
@@ -2979,8 +2854,8 @@
InitOMXParams(&def);
def.nPortIndex = portIndex;
- status_t err = mOMX->getParameter(
- mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
+ status_t err = mOMXNode->getParameter(
+ OMX_IndexParamPortDefinition, &def, sizeof(def));
if (err != OK) {
return err;
@@ -2988,8 +2863,8 @@
def.format.audio.eEncoding = OMX_AUDIO_CodingPCM;
- err = mOMX->setParameter(
- mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
+ err = mOMXNode->setParameter(
+ OMX_IndexParamPortDefinition, &def, sizeof(def));
if (err != OK) {
return err;
@@ -2999,8 +2874,8 @@
InitOMXParams(&pcmParams);
pcmParams.nPortIndex = portIndex;
- err = mOMX->getParameter(
- mNode, OMX_IndexParamAudioPcm, &pcmParams, sizeof(pcmParams));
+ err = mOMXNode->getParameter(
+ OMX_IndexParamAudioPcm, &pcmParams, sizeof(pcmParams));
if (err != OK) {
return err;
@@ -3031,15 +2906,15 @@
return OMX_ErrorNone;
}
- err = mOMX->setParameter(
- mNode, OMX_IndexParamAudioPcm, &pcmParams, sizeof(pcmParams));
+ err = mOMXNode->setParameter(
+ OMX_IndexParamAudioPcm, &pcmParams, sizeof(pcmParams));
// if we could not set up raw format to non-16-bit, try with 16-bit
// NOTE: we will also verify this via readback, in case codec ignores these fields
if (err != OK && encoding != kAudioEncodingPcm16bit) {
pcmParams.eNumData = OMX_NumericalDataSigned;
pcmParams.nBitPerSample = 16;
- err = mOMX->setParameter(
- mNode, OMX_IndexParamAudioPcm, &pcmParams, sizeof(pcmParams));
+ err = mOMXNode->setParameter(
+ OMX_IndexParamAudioPcm, &pcmParams, sizeof(pcmParams));
}
return err;
}
@@ -3048,8 +2923,8 @@
int32_t audioHwSync, const sp<ANativeWindow> &nativeWindow) {
native_handle_t* sidebandHandle;
- status_t err = mOMX->configureVideoTunnelMode(
- mNode, kPortIndexOutput, OMX_TRUE, audioHwSync, &sidebandHandle);
+ status_t err = mOMXNode->configureVideoTunnelMode(
+ kPortIndexOutput, OMX_TRUE, audioHwSync, &sidebandHandle);
if (err != OK) {
ALOGE("configureVideoTunnelMode failed! (err %d).", err);
return err;
@@ -3078,8 +2953,8 @@
for (OMX_U32 index = 0; index <= kMaxIndicesToCheck; ++index) {
format.nIndex = index;
- status_t err = mOMX->getParameter(
- mNode, OMX_IndexParamVideoPortFormat,
+ status_t err = mOMXNode->getParameter(
+ OMX_IndexParamVideoPortFormat,
&format, sizeof(format));
if (err != OK) {
@@ -3089,8 +2964,8 @@
// substitute back flexible color format to codec supported format
OMX_U32 flexibleEquivalent;
if (compressionFormat == OMX_VIDEO_CodingUnused
- && isFlexibleColorFormat(
- mOMX, mNode, format.eColorFormat, usingNativeBuffers, &flexibleEquivalent)
+ && IsFlexibleColorFormat(
+ mOMXNode, format.eColorFormat, usingNativeBuffers, &flexibleEquivalent)
&& colorFormat == flexibleEquivalent) {
ALOGI("[%s] using color format %#x in place of %#x",
mComponentName.c_str(), format.eColorFormat, colorFormat);
@@ -3133,9 +3008,8 @@
return UNKNOWN_ERROR;
}
- status_t err = mOMX->setParameter(
- mNode, OMX_IndexParamVideoPortFormat,
- &format, sizeof(format));
+ status_t err = mOMXNode->setParameter(
+ OMX_IndexParamVideoPortFormat, &format, sizeof(format));
return err;
}
@@ -3165,9 +3039,8 @@
for (OMX_U32 index = 0; ; ++index) {
format.nIndex = index;
- status_t err = mOMX->getParameter(
- mNode, OMX_IndexParamVideoPortFormat,
- &format, sizeof(format));
+ status_t err = mOMXNode->getParameter(
+ OMX_IndexParamVideoPortFormat, &format, sizeof(format));
if (err != OK) {
// no more formats, pick legacy format if found
if (legacyFormat.eColorFormat != OMX_COLOR_FormatUnused) {
@@ -3193,16 +3066,15 @@
// find best legacy non-standard format
OMX_U32 flexibleEquivalent;
if (legacyFormat.eColorFormat == OMX_COLOR_FormatUnused
- && isFlexibleColorFormat(
- mOMX, mNode, format.eColorFormat, false /* usingNativeBuffers */,
+ && IsFlexibleColorFormat(
+ mOMXNode, format.eColorFormat, false /* usingNativeBuffers */,
&flexibleEquivalent)
&& flexibleEquivalent == OMX_COLOR_FormatYUV420Flexible) {
memcpy(&legacyFormat, &format, sizeof(format));
}
}
- return mOMX->setParameter(
- mNode, OMX_IndexParamVideoPortFormat,
- &format, sizeof(format));
+ return mOMXNode->setParameter(
+ OMX_IndexParamVideoPortFormat, &format, sizeof(format));
}
static const struct VideoCodingMapEntry {
@@ -3258,14 +3130,14 @@
status_t err;
ALOGD("Setting [%s] %s port buffer number: %d", mComponentName.c_str(),
portIndex == kPortIndexInput ? "input" : "output", bufferNum);
- err = mOMX->getParameter(
- mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
+ err = mOMXNode->getParameter(
+ OMX_IndexParamPortDefinition, &def, sizeof(def));
if (err != OK) {
return err;
}
def.nBufferCountActual = bufferNum;
- err = mOMX->setParameter(
- mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
+ err = mOMXNode->setParameter(
+ OMX_IndexParamPortDefinition, &def, sizeof(def));
if (err != OK) {
// Component could reject this request.
ALOGW("Fail to set [%s] %s port buffer number: %d", mComponentName.c_str(),
@@ -3296,11 +3168,9 @@
params.nPortIndex = kPortIndexInput;
// Check if VP9 decoder advertises supported profiles.
params.nProfileIndex = 0;
- status_t err = mOMX->getParameter(
- mNode,
+ status_t err = mOMXNode->getParameter(
OMX_IndexParamVideoProfileLevelQuerySupported,
- ¶ms,
- sizeof(params));
+ ¶ms, sizeof(params));
mIsLegacyVP9Decoder = err != OK;
}
@@ -3389,8 +3259,8 @@
}
status_t ACodec::initDescribeColorAspectsIndex() {
- status_t err = mOMX->getExtensionIndex(
- mNode, "OMX.google.android.index.describeColorAspects", &mDescribeColorAspectsIndex);
+ status_t err = mOMXNode->getExtensionIndex(
+ "OMX.google.android.index.describeColorAspects", &mDescribeColorAspectsIndex);
if (err != OK) {
mDescribeColorAspectsIndex = (OMX_INDEXTYPE)0;
}
@@ -3400,7 +3270,7 @@
status_t ACodec::setCodecColorAspects(DescribeColorAspectsParams ¶ms, bool verify) {
status_t err = ERROR_UNSUPPORTED;
if (mDescribeColorAspectsIndex) {
- err = mOMX->setConfig(mNode, mDescribeColorAspectsIndex, ¶ms, sizeof(params));
+ err = mOMXNode->setConfig(mDescribeColorAspectsIndex, ¶ms, sizeof(params));
}
ALOGV("[%s] setting color aspects (R:%d(%s), P:%d(%s), M:%d(%s), T:%d(%s)) err=%d(%s)",
mComponentName.c_str(),
@@ -3445,7 +3315,7 @@
status_t ACodec::getCodecColorAspects(DescribeColorAspectsParams ¶ms) {
status_t err = ERROR_UNSUPPORTED;
if (mDescribeColorAspectsIndex) {
- err = mOMX->getConfig(mNode, mDescribeColorAspectsIndex, ¶ms, sizeof(params));
+ err = mOMXNode->getConfig(mDescribeColorAspectsIndex, ¶ms, sizeof(params));
}
ALOGV("[%s] got color aspects (R:%d(%s), P:%d(%s), M:%d(%s), T:%d(%s)) err=%d(%s)",
mComponentName.c_str(),
@@ -3667,8 +3537,8 @@
}
status_t ACodec::initDescribeHDRStaticInfoIndex() {
- status_t err = mOMX->getExtensionIndex(
- mNode, "OMX.google.android.index.describeHDRStaticInfo", &mDescribeHDRStaticInfoIndex);
+ status_t err = mOMXNode->getExtensionIndex(
+ "OMX.google.android.index.describeHDRStaticInfo", &mDescribeHDRStaticInfoIndex);
if (err != OK) {
mDescribeHDRStaticInfoIndex = (OMX_INDEXTYPE)0;
}
@@ -3678,7 +3548,7 @@
status_t ACodec::setHDRStaticInfo(const DescribeHDRStaticInfoParams ¶ms) {
status_t err = ERROR_UNSUPPORTED;
if (mDescribeHDRStaticInfoIndex) {
- err = mOMX->setConfig(mNode, mDescribeHDRStaticInfoIndex, ¶ms, sizeof(params));
+ err = mOMXNode->setConfig(mDescribeHDRStaticInfoIndex, ¶ms, sizeof(params));
}
const HDRStaticInfo *info = ¶ms.sInfo;
@@ -3699,7 +3569,7 @@
status_t ACodec::getHDRStaticInfo(DescribeHDRStaticInfoParams ¶ms) {
status_t err = ERROR_UNSUPPORTED;
if (mDescribeHDRStaticInfoIndex) {
- err = mOMX->getConfig(mNode, mDescribeHDRStaticInfoIndex, ¶ms, sizeof(params));
+ err = mOMXNode->getConfig(mDescribeHDRStaticInfoIndex, ¶ms, sizeof(params));
}
ALOGW_IF(err == ERROR_UNSUPPORTED && mDescribeHDRStaticInfoIndex,
@@ -3738,8 +3608,8 @@
def.nPortIndex = kPortIndexInput;
- err = mOMX->getParameter(
- mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
+ err = mOMXNode->getParameter(
+ OMX_IndexParamPortDefinition, &def, sizeof(def));
if (err != OK) {
return err;
@@ -3789,8 +3659,8 @@
video_def->eColorFormat = colorFormat;
}
- err = mOMX->setParameter(
- mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
+ err = mOMXNode->setParameter(
+ OMX_IndexParamPortDefinition, &def, sizeof(def));
if (err != OK) {
ALOGE("[%s] failed to set input port definition parameters.",
@@ -3820,8 +3690,8 @@
def.nPortIndex = kPortIndexOutput;
- err = mOMX->getParameter(
- mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
+ err = mOMXNode->getParameter(
+ OMX_IndexParamPortDefinition, &def, sizeof(def));
if (err != OK) {
return err;
@@ -3834,8 +3704,8 @@
video_def->eCompressionFormat = compressionFormat;
video_def->eColorFormat = OMX_COLOR_FormatUnused;
- err = mOMX->setParameter(
- mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
+ err = mOMXNode->setParameter(
+ OMX_IndexParamPortDefinition, &def, sizeof(def));
if (err != OK) {
ALOGE("[%s] failed to set output port definition parameters.",
@@ -3964,9 +3834,8 @@
params.nAirRef = ref;
}
- status_t err = mOMX->setParameter(
- mNode, OMX_IndexParamVideoIntraRefresh,
- ¶ms, sizeof(params));
+ status_t err = mOMXNode->setParameter(
+ OMX_IndexParamVideoIntraRefresh, ¶ms, sizeof(params));
return err;
}
@@ -4029,8 +3898,8 @@
InitOMXParams(&mpeg4type);
mpeg4type.nPortIndex = kPortIndexOutput;
- status_t err = mOMX->getParameter(
- mNode, OMX_IndexParamVideoMpeg4, &mpeg4type, sizeof(mpeg4type));
+ status_t err = mOMXNode->getParameter(
+ OMX_IndexParamVideoMpeg4, &mpeg4type, sizeof(mpeg4type));
if (err != OK) {
return err;
@@ -4072,8 +3941,8 @@
mpeg4type.eLevel = static_cast<OMX_VIDEO_MPEG4LEVELTYPE>(level);
}
- err = mOMX->setParameter(
- mNode, OMX_IndexParamVideoMpeg4, &mpeg4type, sizeof(mpeg4type));
+ err = mOMXNode->setParameter(
+ OMX_IndexParamVideoMpeg4, &mpeg4type, sizeof(mpeg4type));
if (err != OK) {
return err;
@@ -4111,8 +3980,8 @@
InitOMXParams(&h263type);
h263type.nPortIndex = kPortIndexOutput;
- status_t err = mOMX->getParameter(
- mNode, OMX_IndexParamVideoH263, &h263type, sizeof(h263type));
+ status_t err = mOMXNode->getParameter(
+ OMX_IndexParamVideoH263, &h263type, sizeof(h263type));
if (err != OK) {
return err;
@@ -4149,8 +4018,8 @@
h263type.nPictureHeaderRepetition = 0;
h263type.nGOBHeaderInterval = 0;
- err = mOMX->setParameter(
- mNode, OMX_IndexParamVideoH263, &h263type, sizeof(h263type));
+ err = mOMXNode->setParameter(
+ OMX_IndexParamVideoH263, &h263type, sizeof(h263type));
if (err != OK) {
return err;
@@ -4251,8 +4120,8 @@
InitOMXParams(&h264type);
h264type.nPortIndex = kPortIndexOutput;
- err = mOMX->getParameter(
- mNode, OMX_IndexParamVideoAvc, &h264type, sizeof(h264type));
+ err = mOMXNode->getParameter(
+ OMX_IndexParamVideoAvc, &h264type, sizeof(h264type));
if (err != OK) {
return err;
@@ -4341,8 +4210,8 @@
h264type.bMBAFF = OMX_FALSE;
h264type.eLoopFilterMode = OMX_VIDEO_AVCLoopFilterEnable;
- err = mOMX->setParameter(
- mNode, OMX_IndexParamVideoAvc, &h264type, sizeof(h264type));
+ err = mOMXNode->setParameter(
+ OMX_IndexParamVideoAvc, &h264type, sizeof(h264type));
if (err != OK) {
return err;
@@ -4358,8 +4227,8 @@
OMX_VIDEO_PARAM_ANDROID_TEMPORALLAYERINGTYPE layering;
InitOMXParams(&layering);
layering.nPortIndex = kPortIndexOutput;
- if (mOMX->getParameter(
- mNode, (OMX_INDEXTYPE)OMX_IndexParamAndroidVideoTemporalLayering,
+ if (mOMXNode->getParameter(
+ (OMX_INDEXTYPE)OMX_IndexParamAndroidVideoTemporalLayering,
&layering, sizeof(layering)) == OK
&& layering.eSupportedPatterns
&& layering.nBLayerCountMax == 0) {
@@ -4367,8 +4236,8 @@
h264type.nPFrames = setPFramesSpacing(iFrameInterval, frameRate, h264type.nBFrames);
h264type.nAllowedPictureTypes &= ~OMX_VIDEO_PictureTypeB;
ALOGI("disabling B-frames");
- err = mOMX->setParameter(
- mNode, OMX_IndexParamVideoAvc, &h264type, sizeof(h264type));
+ err = mOMXNode->setParameter(
+ OMX_IndexParamVideoAvc, &h264type, sizeof(h264type));
if (err != OK) {
return err;
@@ -4403,8 +4272,8 @@
hevcType.nPortIndex = kPortIndexOutput;
status_t err = OK;
- err = mOMX->getParameter(
- mNode, (OMX_INDEXTYPE)OMX_IndexParamVideoHevc, &hevcType, sizeof(hevcType));
+ err = mOMXNode->getParameter(
+ (OMX_INDEXTYPE)OMX_IndexParamVideoHevc, &hevcType, sizeof(hevcType));
if (err != OK) {
return err;
}
@@ -4427,8 +4296,8 @@
// TODO: finer control?
hevcType.nKeyFrameInterval = setPFramesSpacing(iFrameInterval, frameRate) + 1;
- err = mOMX->setParameter(
- mNode, (OMX_INDEXTYPE)OMX_IndexParamVideoHevc, &hevcType, sizeof(hevcType));
+ err = mOMXNode->setParameter(
+ (OMX_INDEXTYPE)OMX_IndexParamVideoHevc, &hevcType, sizeof(hevcType));
if (err != OK) {
return err;
}
@@ -4496,8 +4365,8 @@
OMX_VIDEO_PARAM_ANDROID_VP8ENCODERTYPE vp8type;
InitOMXParams(&vp8type);
vp8type.nPortIndex = kPortIndexOutput;
- status_t err = mOMX->getParameter(
- mNode, (OMX_INDEXTYPE)OMX_IndexParamVideoAndroidVp8Encoder,
+ status_t err = mOMXNode->getParameter(
+ (OMX_INDEXTYPE)OMX_IndexParamVideoAndroidVp8Encoder,
&vp8type, sizeof(vp8type));
if (err == OK) {
@@ -4517,8 +4386,8 @@
vp8type.nMaxQuantizer = 63;
}
- err = mOMX->setParameter(
- mNode, (OMX_INDEXTYPE)OMX_IndexParamVideoAndroidVp8Encoder,
+ err = mOMXNode->setParameter(
+ (OMX_INDEXTYPE)OMX_IndexParamVideoAndroidVp8Encoder,
&vp8type, sizeof(vp8type));
if (err != OK) {
ALOGW("Extended VP8 parameters set failed: %d", err);
@@ -4542,11 +4411,9 @@
for (OMX_U32 index = 0; index <= kMaxIndicesToCheck; ++index) {
params.nProfileIndex = index;
- status_t err = mOMX->getParameter(
- mNode,
+ status_t err = mOMXNode->getParameter(
OMX_IndexParamVideoProfileLevelQuerySupported,
- ¶ms,
- sizeof(params));
+ ¶ms, sizeof(params));
if (err != OK) {
return err;
@@ -4574,9 +4441,8 @@
InitOMXParams(&bitrateType);
bitrateType.nPortIndex = kPortIndexOutput;
- status_t err = mOMX->getParameter(
- mNode, OMX_IndexParamVideoBitrate,
- &bitrateType, sizeof(bitrateType));
+ status_t err = mOMXNode->getParameter(
+ OMX_IndexParamVideoBitrate, &bitrateType, sizeof(bitrateType));
if (err != OK) {
return err;
@@ -4585,9 +4451,8 @@
bitrateType.eControlRate = bitrateMode;
bitrateType.nTargetBitrate = bitrate;
- return mOMX->setParameter(
- mNode, OMX_IndexParamVideoBitrate,
- &bitrateType, sizeof(bitrateType));
+ return mOMXNode->setParameter(
+ OMX_IndexParamVideoBitrate, &bitrateType, sizeof(bitrateType));
}
status_t ACodec::setupErrorCorrectionParameters() {
@@ -4595,8 +4460,8 @@
InitOMXParams(&errorCorrectionType);
errorCorrectionType.nPortIndex = kPortIndexOutput;
- status_t err = mOMX->getParameter(
- mNode, OMX_IndexParamVideoErrorCorrection,
+ status_t err = mOMXNode->getParameter(
+ OMX_IndexParamVideoErrorCorrection,
&errorCorrectionType, sizeof(errorCorrectionType));
if (err != OK) {
@@ -4609,8 +4474,8 @@
errorCorrectionType.bEnableDataPartitioning = OMX_FALSE;
errorCorrectionType.bEnableRVLC = OMX_FALSE;
- return mOMX->setParameter(
- mNode, OMX_IndexParamVideoErrorCorrection,
+ return mOMXNode->setParameter(
+ OMX_IndexParamVideoErrorCorrection,
&errorCorrectionType, sizeof(errorCorrectionType));
}
@@ -4624,8 +4489,8 @@
OMX_VIDEO_PORTDEFINITIONTYPE *video_def = &def.format.video;
- status_t err = mOMX->getParameter(
- mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
+ status_t err = mOMXNode->getParameter(
+ OMX_IndexParamPortDefinition, &def, sizeof(def));
if (err != OK) {
return err;
}
@@ -4654,18 +4519,18 @@
}
}
- err = mOMX->setParameter(
- mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
+ err = mOMXNode->setParameter(
+ OMX_IndexParamPortDefinition, &def, sizeof(def));
return err;
}
status_t ACodec::initNativeWindow() {
if (mNativeWindow != NULL) {
- return mOMX->enableNativeBuffers(mNode, kPortIndexOutput, OMX_TRUE /* graphic */, OMX_TRUE);
+ return mOMXNode->enableNativeBuffers(kPortIndexOutput, OMX_TRUE /* graphic */, OMX_TRUE);
}
- mOMX->enableNativeBuffers(mNode, kPortIndexOutput, OMX_TRUE /* graphic */, OMX_FALSE);
+ mOMXNode->enableNativeBuffers(kPortIndexOutput, OMX_TRUE /* graphic */, OMX_FALSE);
return OK;
}
@@ -4747,195 +4612,13 @@
}
}
-// static
-bool ACodec::describeDefaultColorFormat(DescribeColorFormat2Params ¶ms) {
- MediaImage2 &image = params.sMediaImage;
- memset(&image, 0, sizeof(image));
-
- image.mType = MediaImage2::MEDIA_IMAGE_TYPE_UNKNOWN;
- image.mNumPlanes = 0;
-
- const OMX_COLOR_FORMATTYPE fmt = params.eColorFormat;
- image.mWidth = params.nFrameWidth;
- image.mHeight = params.nFrameHeight;
-
- // only supporting YUV420
- if (fmt != OMX_COLOR_FormatYUV420Planar &&
- fmt != OMX_COLOR_FormatYUV420PackedPlanar &&
- fmt != OMX_COLOR_FormatYUV420SemiPlanar &&
- fmt != OMX_COLOR_FormatYUV420PackedSemiPlanar &&
- fmt != (OMX_COLOR_FORMATTYPE)HAL_PIXEL_FORMAT_YV12) {
- ALOGW("do not know color format 0x%x = %d", fmt, fmt);
- return false;
- }
-
- // TEMPORARY FIX for some vendors that advertise sliceHeight as 0
- if (params.nStride != 0 && params.nSliceHeight == 0) {
- ALOGW("using sliceHeight=%u instead of what codec advertised (=0)",
- params.nFrameHeight);
- params.nSliceHeight = params.nFrameHeight;
- }
-
- // we need stride and slice-height to be non-zero and sensible. These values were chosen to
- // prevent integer overflows further down the line, and do not indicate support for
- // 32kx32k video.
- if (params.nStride == 0 || params.nSliceHeight == 0
- || params.nStride > 32768 || params.nSliceHeight > 32768) {
- ALOGW("cannot describe color format 0x%x = %d with stride=%u and sliceHeight=%u",
- fmt, fmt, params.nStride, params.nSliceHeight);
- return false;
- }
-
- // set-up YUV format
- image.mType = MediaImage2::MEDIA_IMAGE_TYPE_YUV;
- image.mNumPlanes = 3;
- image.mBitDepth = 8;
- image.mBitDepthAllocated = 8;
- image.mPlane[image.Y].mOffset = 0;
- image.mPlane[image.Y].mColInc = 1;
- image.mPlane[image.Y].mRowInc = params.nStride;
- image.mPlane[image.Y].mHorizSubsampling = 1;
- image.mPlane[image.Y].mVertSubsampling = 1;
-
- switch ((int)fmt) {
- case HAL_PIXEL_FORMAT_YV12:
- if (params.bUsingNativeBuffers) {
- size_t ystride = align(params.nStride, 16);
- size_t cstride = align(params.nStride / 2, 16);
- image.mPlane[image.Y].mRowInc = ystride;
-
- image.mPlane[image.V].mOffset = ystride * params.nSliceHeight;
- image.mPlane[image.V].mColInc = 1;
- image.mPlane[image.V].mRowInc = cstride;
- image.mPlane[image.V].mHorizSubsampling = 2;
- image.mPlane[image.V].mVertSubsampling = 2;
-
- image.mPlane[image.U].mOffset = image.mPlane[image.V].mOffset
- + (cstride * params.nSliceHeight / 2);
- image.mPlane[image.U].mColInc = 1;
- image.mPlane[image.U].mRowInc = cstride;
- image.mPlane[image.U].mHorizSubsampling = 2;
- image.mPlane[image.U].mVertSubsampling = 2;
- break;
- } else {
- // fall through as YV12 is used for YUV420Planar by some codecs
- }
-
- case OMX_COLOR_FormatYUV420Planar:
- case OMX_COLOR_FormatYUV420PackedPlanar:
- image.mPlane[image.U].mOffset = params.nStride * params.nSliceHeight;
- image.mPlane[image.U].mColInc = 1;
- image.mPlane[image.U].mRowInc = params.nStride / 2;
- image.mPlane[image.U].mHorizSubsampling = 2;
- image.mPlane[image.U].mVertSubsampling = 2;
-
- image.mPlane[image.V].mOffset = image.mPlane[image.U].mOffset
- + (params.nStride * params.nSliceHeight / 4);
- image.mPlane[image.V].mColInc = 1;
- image.mPlane[image.V].mRowInc = params.nStride / 2;
- image.mPlane[image.V].mHorizSubsampling = 2;
- image.mPlane[image.V].mVertSubsampling = 2;
- break;
-
- case OMX_COLOR_FormatYUV420SemiPlanar:
- // FIXME: NV21 for sw-encoder, NV12 for decoder and hw-encoder
- case OMX_COLOR_FormatYUV420PackedSemiPlanar:
- // NV12
- image.mPlane[image.U].mOffset = params.nStride * params.nSliceHeight;
- image.mPlane[image.U].mColInc = 2;
- image.mPlane[image.U].mRowInc = params.nStride;
- image.mPlane[image.U].mHorizSubsampling = 2;
- image.mPlane[image.U].mVertSubsampling = 2;
-
- image.mPlane[image.V].mOffset = image.mPlane[image.U].mOffset + 1;
- image.mPlane[image.V].mColInc = 2;
- image.mPlane[image.V].mRowInc = params.nStride;
- image.mPlane[image.V].mHorizSubsampling = 2;
- image.mPlane[image.V].mVertSubsampling = 2;
- break;
-
- default:
- TRESPASS();
- }
- return true;
-}
-
-// static
-bool ACodec::describeColorFormat(
- const sp<IOMX> &omx, IOMX::node_id node,
- DescribeColorFormat2Params &describeParams)
-{
- OMX_INDEXTYPE describeColorFormatIndex;
- if (omx->getExtensionIndex(
- node, "OMX.google.android.index.describeColorFormat",
- &describeColorFormatIndex) == OK) {
- DescribeColorFormatParams describeParamsV1(describeParams);
- if (omx->getParameter(
- node, describeColorFormatIndex,
- &describeParamsV1, sizeof(describeParamsV1)) == OK) {
- describeParams.initFromV1(describeParamsV1);
- return describeParams.sMediaImage.mType != MediaImage2::MEDIA_IMAGE_TYPE_UNKNOWN;
- }
- } else if (omx->getExtensionIndex(
- node, "OMX.google.android.index.describeColorFormat2", &describeColorFormatIndex) == OK
- && omx->getParameter(
- node, describeColorFormatIndex, &describeParams, sizeof(describeParams)) == OK) {
- return describeParams.sMediaImage.mType != MediaImage2::MEDIA_IMAGE_TYPE_UNKNOWN;
- }
-
- return describeDefaultColorFormat(describeParams);
-}
-
-// static
-bool ACodec::isFlexibleColorFormat(
- const sp<IOMX> &omx, IOMX::node_id node,
- uint32_t colorFormat, bool usingNativeBuffers, OMX_U32 *flexibleEquivalent) {
- DescribeColorFormat2Params describeParams;
- InitOMXParams(&describeParams);
- describeParams.eColorFormat = (OMX_COLOR_FORMATTYPE)colorFormat;
- // reasonable dummy values
- describeParams.nFrameWidth = 128;
- describeParams.nFrameHeight = 128;
- describeParams.nStride = 128;
- describeParams.nSliceHeight = 128;
- describeParams.bUsingNativeBuffers = (OMX_BOOL)usingNativeBuffers;
-
- CHECK(flexibleEquivalent != NULL);
-
- if (!describeColorFormat(omx, node, describeParams)) {
- return false;
- }
-
- const MediaImage2 &img = describeParams.sMediaImage;
- if (img.mType == MediaImage2::MEDIA_IMAGE_TYPE_YUV) {
- if (img.mNumPlanes != 3
- || img.mPlane[img.Y].mHorizSubsampling != 1
- || img.mPlane[img.Y].mVertSubsampling != 1) {
- return false;
- }
-
- // YUV 420
- if (img.mPlane[img.U].mHorizSubsampling == 2
- && img.mPlane[img.U].mVertSubsampling == 2
- && img.mPlane[img.V].mHorizSubsampling == 2
- && img.mPlane[img.V].mVertSubsampling == 2) {
- // possible flexible YUV420 format
- if (img.mBitDepth <= 8) {
- *flexibleEquivalent = OMX_COLOR_FormatYUV420Flexible;
- return true;
- }
- }
- }
- return false;
-}
-
status_t ACodec::getPortFormat(OMX_U32 portIndex, sp<AMessage> ¬ify) {
const char *niceIndex = portIndex == kPortIndexInput ? "input" : "output";
OMX_PARAM_PORTDEFINITIONTYPE def;
InitOMXParams(&def);
def.nPortIndex = portIndex;
- status_t err = mOMX->getParameter(mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
+ status_t err = mOMXNode->getParameter(OMX_IndexParamPortDefinition, &def, sizeof(def));
if (err != OK) {
return err;
}
@@ -4969,7 +4652,7 @@
describeParams.nSliceHeight = videoDef->nSliceHeight;
describeParams.bUsingNativeBuffers = OMX_FALSE;
- if (describeColorFormat(mOMX, mNode, describeParams)) {
+ if (DescribeColorFormat(mOMXNode, describeParams)) {
notify->setBuffer(
"image-data",
ABuffer::CreateAsCopy(
@@ -4994,8 +4677,7 @@
InitOMXParams(&rect);
rect.nPortIndex = portIndex;
- if (mOMX->getConfig(
- mNode,
+ if (mOMXNode->getConfig(
(portIndex == kPortIndexOutput ?
OMX_IndexConfigCommonOutputCrop :
OMX_IndexConfigCommonInputCrop),
@@ -5051,8 +4733,7 @@
OMX_VIDEO_PARAM_ANDROID_VP8ENCODERTYPE vp8type;
InitOMXParams(&vp8type);
vp8type.nPortIndex = kPortIndexOutput;
- status_t err = mOMX->getParameter(
- mNode,
+ status_t err = mOMXNode->getParameter(
(OMX_INDEXTYPE)OMX_IndexParamVideoAndroidVp8Encoder,
&vp8type,
sizeof(vp8type));
@@ -5121,8 +4802,8 @@
InitOMXParams(¶ms);
params.nPortIndex = portIndex;
- err = mOMX->getParameter(
- mNode, OMX_IndexParamAudioPcm, ¶ms, sizeof(params));
+ err = mOMXNode->getParameter(
+ OMX_IndexParamAudioPcm, ¶ms, sizeof(params));
if (err != OK) {
return err;
}
@@ -5169,8 +4850,8 @@
InitOMXParams(¶ms);
params.nPortIndex = portIndex;
- err = mOMX->getParameter(
- mNode, OMX_IndexParamAudioAac, ¶ms, sizeof(params));
+ err = mOMXNode->getParameter(
+ OMX_IndexParamAudioAac, ¶ms, sizeof(params));
if (err != OK) {
return err;
}
@@ -5187,8 +4868,8 @@
InitOMXParams(¶ms);
params.nPortIndex = portIndex;
- err = mOMX->getParameter(
- mNode, OMX_IndexParamAudioAmr, ¶ms, sizeof(params));
+ err = mOMXNode->getParameter(
+ OMX_IndexParamAudioAmr, ¶ms, sizeof(params));
if (err != OK) {
return err;
}
@@ -5210,8 +4891,8 @@
InitOMXParams(¶ms);
params.nPortIndex = portIndex;
- err = mOMX->getParameter(
- mNode, OMX_IndexParamAudioFlac, ¶ms, sizeof(params));
+ err = mOMXNode->getParameter(
+ OMX_IndexParamAudioFlac, ¶ms, sizeof(params));
if (err != OK) {
return err;
}
@@ -5228,8 +4909,8 @@
InitOMXParams(¶ms);
params.nPortIndex = portIndex;
- err = mOMX->getParameter(
- mNode, OMX_IndexParamAudioMp3, ¶ms, sizeof(params));
+ err = mOMXNode->getParameter(
+ OMX_IndexParamAudioMp3, ¶ms, sizeof(params));
if (err != OK) {
return err;
}
@@ -5246,8 +4927,8 @@
InitOMXParams(¶ms);
params.nPortIndex = portIndex;
- err = mOMX->getParameter(
- mNode, OMX_IndexParamAudioVorbis, ¶ms, sizeof(params));
+ err = mOMXNode->getParameter(
+ OMX_IndexParamAudioVorbis, ¶ms, sizeof(params));
if (err != OK) {
return err;
}
@@ -5264,8 +4945,8 @@
InitOMXParams(¶ms);
params.nPortIndex = portIndex;
- err = mOMX->getParameter(
- mNode, (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidAc3,
+ err = mOMXNode->getParameter(
+ (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidAc3,
¶ms, sizeof(params));
if (err != OK) {
return err;
@@ -5283,8 +4964,8 @@
InitOMXParams(¶ms);
params.nPortIndex = portIndex;
- err = mOMX->getParameter(
- mNode, (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidEac3,
+ err = mOMXNode->getParameter(
+ (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidEac3,
¶ms, sizeof(params));
if (err != OK) {
return err;
@@ -5302,8 +4983,8 @@
InitOMXParams(¶ms);
params.nPortIndex = portIndex;
- err = mOMX->getParameter(
- mNode, (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidOpus,
+ err = mOMXNode->getParameter(
+ (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidOpus,
¶ms, sizeof(params));
if (err != OK) {
return err;
@@ -5321,8 +5002,8 @@
InitOMXParams(¶ms);
params.nPortIndex = portIndex;
- err = mOMX->getParameter(
- mNode, (OMX_INDEXTYPE)OMX_IndexParamAudioPcm, ¶ms, sizeof(params));
+ err = mOMXNode->getParameter(
+ (OMX_INDEXTYPE)OMX_IndexParamAudioPcm, ¶ms, sizeof(params));
if (err != OK) {
return err;
}
@@ -5348,8 +5029,8 @@
InitOMXParams(¶ms);
params.nPortIndex = portIndex;
- err = mOMX->getParameter(
- mNode, OMX_IndexParamAudioPcm, ¶ms, sizeof(params));
+ err = mOMXNode->getParameter(
+ OMX_IndexParamAudioPcm, ¶ms, sizeof(params));
if (err != OK) {
return err;
}
@@ -5492,11 +5173,6 @@
mSkipCutBuffer = new SkipCutBuffer(mEncoderDelay, mEncoderPadding, channelCount);
}
- sp<AMessage> notify = mNotify->dup();
- notify->setInt32("what", kWhatOutputFormatChanged);
- notify->setMessage("format", mOutputFormat);
- notify->post();
-
// mLastOutputFormat is not used when tunneled; doing this just to stay consistent
mLastOutputFormat = mOutputFormat;
}
@@ -5522,11 +5198,6 @@
notify->post();
}
-////////////////////////////////////////////////////////////////////////////////
-
-ACodec::PortDescription::PortDescription() {
-}
-
status_t ACodec::requestIDRFrame() {
if (!mIsEncoder) {
return ERROR_UNSUPPORTED;
@@ -5538,20 +5209,21 @@
params.nPortIndex = kPortIndexOutput;
params.IntraRefreshVOP = OMX_TRUE;
- return mOMX->setConfig(
- mNode,
+ return mOMXNode->setConfig(
OMX_IndexConfigVideoIntraVOPRefresh,
¶ms,
sizeof(params));
}
+////////////////////////////////////////////////////////////////////////////////
+
+ACodec::PortDescription::PortDescription() {
+}
+
void ACodec::PortDescription::addBuffer(
- IOMX::buffer_id id, const sp<ABuffer> &buffer,
- const sp<NativeHandle> &handle, const sp<RefBase> &memRef) {
+ IOMX::buffer_id id, const sp<MediaCodecBuffer> &buffer) {
mBufferIDs.push_back(id);
mBuffers.push_back(buffer);
- mHandles.push_back(handle);
- mMemRefs.push_back(memRef);
}
size_t ACodec::PortDescription::countBuffers() {
@@ -5562,18 +5234,10 @@
return mBufferIDs.itemAt(index);
}
-sp<ABuffer> ACodec::PortDescription::bufferAt(size_t index) const {
+sp<MediaCodecBuffer> ACodec::PortDescription::bufferAt(size_t index) const {
return mBuffers.itemAt(index);
}
-sp<NativeHandle> ACodec::PortDescription::handleAt(size_t index) const {
- return mHandles.itemAt(index);
-}
-
-sp<RefBase> ACodec::PortDescription::memRefAt(size_t index) const {
- return mMemRefs.itemAt(index);
-}
-
////////////////////////////////////////////////////////////////////////////////
ACodec::BaseState::BaseState(ACodec *codec, const sp<AState> &parentState)
@@ -5646,6 +5310,7 @@
{
// This will result in kFlagSawMediaServerDie handling in MediaCodec.
ALOGE("OMX/mediaserver died, signalling error!");
+ mCodec->mGraphicBufferSource.clear();
mCodec->signalError(OMX_ErrorResourcesLost, DEAD_OBJECT);
break;
}
@@ -5654,7 +5319,7 @@
{
ALOGI("[%s] forcing the release of codec",
mCodec->mComponentName.c_str());
- status_t err = mCodec->mOMX->freeNode(mCodec->mNode);
+ status_t err = mCodec->mOMXNode->freeNode();
ALOGE_IF("[%s] failed to release codec instance: err=%d",
mCodec->mComponentName.c_str(), err);
sp<AMessage> notify = mCodec->mNotify->dup();
@@ -5673,16 +5338,17 @@
bool ACodec::BaseState::checkOMXMessage(const sp<AMessage> &msg) {
// there is a possibility that this is an outstanding message for a
// codec that we have already destroyed
- if (mCodec->mNode == 0) {
+ if (mCodec->mOMXNode == NULL) {
ALOGI("ignoring message as already freed component: %s",
msg->debugString().c_str());
return false;
}
- IOMX::node_id nodeID;
- CHECK(msg->findInt32("node", (int32_t*)&nodeID));
- if (nodeID != mCodec->mNode) {
- ALOGE("Unexpected message for nodeID: %u, should have been %u", nodeID, mCodec->mNode);
+ int32_t generation;
+ CHECK(msg->findInt32("generation", (int32_t*)&generation));
+ if (generation != mCodec->mNodeGeneration) {
+ ALOGW("Unexpected message for component: %s, gen %u, cur %u",
+ msg->debugString().c_str(), generation, mCodec->mNodeGeneration);
return false;
}
return true;
@@ -5800,11 +5466,7 @@
bool ACodec::BaseState::onOMXEvent(
OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) {
if (event == OMX_EventDataSpaceChanged) {
- ColorAspects aspects;
- aspects.mRange = (ColorAspects::Range)((data2 >> 24) & 0xFF);
- aspects.mPrimaries = (ColorAspects::Primaries)((data2 >> 16) & 0xFF);
- aspects.mMatrixCoeffs = (ColorAspects::MatrixCoeffs)((data2 >> 8) & 0xFF);
- aspects.mTransfer = (ColorAspects::Transfer)(data2 & 0xFF);
+ ColorAspects aspects = ColorUtils::unpackToColorAspects(data2);
mCodec->onDataSpaceChanged((android_dataspace)data1, aspects);
return true;
@@ -5890,8 +5552,9 @@
notify->setInt32("what", CodecBase::kWhatFillThisBuffer);
notify->setInt32("buffer-id", info->mBufferID);
- info->mData->meta()->clear();
- notify->setBuffer("buffer", info->mData);
+ info->mData->setFormat(mCodec->mInputFormat);
+ notify->setObject("buffer", info->mData);
+ info->mData.clear();
sp<AMessage> reply = new AMessage(kWhatInputBufferFilled, mCodec);
reply->setInt32("buffer-id", info->mBufferID);
@@ -5906,12 +5569,13 @@
void ACodec::BaseState::onInputBufferFilled(const sp<AMessage> &msg) {
IOMX::buffer_id bufferID;
CHECK(msg->findInt32("buffer-id", (int32_t*)&bufferID));
- sp<ABuffer> buffer;
+ sp<MediaCodecBuffer> buffer;
int32_t err = OK;
bool eos = false;
PortMode mode = getPortMode(kPortIndexInput);
- if (!msg->findBuffer("buffer", &buffer)) {
+ sp<RefBase> obj;
+ if (!msg->findObject("buffer", &obj)) {
/* these are unfilled buffers returned by client */
CHECK(msg->findInt32("err", &err));
@@ -5923,8 +5587,8 @@
mCodec->mComponentName.c_str(), err);
eos = true;
}
-
- buffer.clear();
+ } else {
+ buffer = static_cast<MediaCodecBuffer *>(obj.get());
}
int32_t tmp;
@@ -5943,6 +5607,7 @@
}
info->mStatus = BufferInfo::OWNED_BY_US;
+ info->mData = buffer;
switch (mode) {
case KEEP_BUFFERS:
@@ -5987,11 +5652,12 @@
flags |= OMX_BUFFERFLAG_EOS;
}
- if (buffer != info->mCodecData) {
+ size_t size = buffer->size();
+ if (buffer->base() != info->mCodecData->base()) {
ALOGV("[%s] Needs to copy input data for buffer %u. (%p != %p)",
mCodec->mComponentName.c_str(),
bufferID,
- buffer.get(), info->mCodecData.get());
+ buffer->base(), info->mCodecData->base());
sp<DataConverter> converter = mCodec->mConverter[kPortIndexInput];
if (converter == NULL || isCSD) {
@@ -6002,6 +5668,9 @@
mCodec->signalError(OMX_ErrorUndefined, err);
return;
}
+ size = info->mCodecData->size();
+ } else {
+ info->mCodecData->setRange(0, size);
}
if (flags & OMX_BUFFERFLAG_CODECCONFIG) {
@@ -6044,25 +5713,29 @@
status_t err2 = OK;
switch (metaType) {
case kMetadataBufferTypeInvalid:
+ {
+ err2 = mCodec->mOMXNode->emptyBuffer(
+ bufferID, info->mCodecData, flags, timeUs, info->mFenceFd);
+ }
break;
#ifndef OMX_ANDROID_COMPILE_AS_32BIT_ON_64BIT_PLATFORMS
case kMetadataBufferTypeNativeHandleSource:
if (info->mCodecData->size() >= sizeof(VideoNativeHandleMetadata)) {
VideoNativeHandleMetadata *vnhmd =
(VideoNativeHandleMetadata*)info->mCodecData->base();
- err2 = mCodec->mOMX->updateNativeHandleInMeta(
- mCodec->mNode, kPortIndexInput,
- NativeHandle::create(vnhmd->pHandle, false /* ownsHandle */),
- bufferID);
+ sp<NativeHandle> handle = NativeHandle::create(
+ vnhmd->pHandle, false /* ownsHandle */);
+ err2 = mCodec->mOMXNode->emptyBuffer(
+ bufferID, handle, flags, timeUs, info->mFenceFd);
}
break;
case kMetadataBufferTypeANWBuffer:
if (info->mCodecData->size() >= sizeof(VideoNativeMetadata)) {
VideoNativeMetadata *vnmd = (VideoNativeMetadata*)info->mCodecData->base();
- err2 = mCodec->mOMX->updateGraphicBufferInMeta(
- mCodec->mNode, kPortIndexInput,
- new GraphicBuffer(vnmd->pBuffer, false /* keepOwnership */),
- bufferID);
+ sp<GraphicBuffer> graphicBuffer = new GraphicBuffer(
+ vnmd->pBuffer, false /* keepOwnership */);
+ err2 = mCodec->mOMXNode->emptyBuffer(
+ bufferID, graphicBuffer, flags, timeUs, info->mFenceFd);
}
break;
#endif
@@ -6074,22 +5747,14 @@
break;
}
- if (err2 == OK) {
- err2 = mCodec->mOMX->emptyBuffer(
- mCodec->mNode,
- bufferID,
- 0,
- info->mCodecData->size(),
- flags,
- timeUs,
- info->mFenceFd);
- }
info->mFenceFd = -1;
if (err2 != OK) {
mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err2));
return;
}
info->mStatus = BufferInfo::OWNED_BY_COMPONENT;
+ // Hold the reference while component is using the buffer.
+ info->mData = buffer;
if (!eos && err == OK) {
getMoreInputDataIfPossible();
@@ -6113,14 +5778,8 @@
mCodec->mComponentName.c_str(), bufferID);
info->checkReadFence("onInputBufferFilled");
- status_t err2 = mCodec->mOMX->emptyBuffer(
- mCodec->mNode,
- bufferID,
- 0,
- 0,
- OMX_BUFFERFLAG_EOS,
- 0,
- info->mFenceFd);
+ status_t err2 = mCodec->mOMXNode->emptyBuffer(
+ bufferID, OMXBuffer::sPreset, OMX_BUFFERFLAG_EOS, 0, info->mFenceFd);
info->mFenceFd = -1;
if (err2 != OK) {
mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err2));
@@ -6244,19 +5903,17 @@
ALOGV("[%s] calling fillBuffer %u",
mCodec->mComponentName.c_str(), info->mBufferID);
- err = mCodec->mOMX->fillBuffer(mCodec->mNode, info->mBufferID, info->mFenceFd);
- info->mFenceFd = -1;
+ err = mCodec->fillBuffer(info);
if (err != OK) {
mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err));
return true;
}
-
- info->mStatus = BufferInfo::OWNED_BY_COMPONENT;
break;
}
sp<AMessage> reply =
new AMessage(kWhatOutputBufferDrained, mCodec);
+ sp<MediaCodecBuffer> buffer = info->mData;
if (mCodec->mOutputFormat != mCodec->mLastOutputFormat && rangeLength > 0) {
// pretend that output format has changed on the first frame (we used to do this)
@@ -6270,12 +5927,13 @@
// data space) so that we can set it if and once the buffer is rendered.
mCodec->addKeyFormatChangesToRenderBufferNotification(reply);
}
+ buffer->setFormat(mCodec->mOutputFormat);
if (mCodec->usingMetadataOnEncoderOutput()) {
native_handle_t *handle = NULL;
VideoNativeHandleMetadata &nativeMeta =
- *(VideoNativeHandleMetadata *)info->mData->data();
- if (info->mData->size() >= sizeof(nativeMeta)
+ *(VideoNativeHandleMetadata *)buffer->data();
+ if (buffer->size() >= sizeof(nativeMeta)
&& nativeMeta.eType == kMetadataBufferTypeNativeHandleSource) {
#ifdef OMX_ANDROID_COMPILE_AS_32BIT_ON_64BIT_PLATFORMS
// handle is only valid on 32-bit/mediaserver process
@@ -6284,16 +5942,16 @@
handle = (native_handle_t *)nativeMeta.pHandle;
#endif
}
- info->mData->meta()->setPointer("handle", handle);
- info->mData->meta()->setInt32("rangeOffset", rangeOffset);
- info->mData->meta()->setInt32("rangeLength", rangeLength);
- } else if (info->mData == info->mCodecData) {
- info->mData->setRange(rangeOffset, rangeLength);
+ buffer->meta()->setPointer("handle", handle);
+ buffer->meta()->setInt32("rangeOffset", rangeOffset);
+ buffer->meta()->setInt32("rangeLength", rangeLength);
+ } else if (buffer->base() == info->mCodecData->base()) {
+ buffer->setRange(rangeOffset, rangeLength);
} else {
info->mCodecData->setRange(rangeOffset, rangeLength);
// in this case we know that mConverter is not null
status_t err = mCodec->mConverter[kPortIndexOutput]->convert(
- info->mCodecData, info->mData);
+ info->mCodecData, buffer);
if (err != OK) {
mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err));
return true;
@@ -6308,14 +5966,15 @@
#endif
if (mCodec->mSkipCutBuffer != NULL) {
- mCodec->mSkipCutBuffer->submit(info->mData);
+ mCodec->mSkipCutBuffer->submit(buffer);
}
- info->mData->meta()->setInt64("timeUs", timeUs);
+ buffer->meta()->setInt64("timeUs", timeUs);
sp<AMessage> notify = mCodec->mNotify->dup();
notify->setInt32("what", CodecBase::kWhatDrainThisBuffer);
notify->setInt32("buffer-id", info->mBufferID);
- notify->setBuffer("buffer", info->mData);
+ notify->setObject("buffer", buffer);
+ info->mData.clear();
notify->setInt32("flags", flags);
reply->setInt32("buffer-id", info->mBufferID);
@@ -6358,6 +6017,11 @@
void ACodec::BaseState::onOutputBufferDrained(const sp<AMessage> &msg) {
IOMX::buffer_id bufferID;
CHECK(msg->findInt32("buffer-id", (int32_t*)&bufferID));
+ sp<RefBase> obj;
+ sp<MediaCodecBuffer> buffer = nullptr;
+ if (msg->findObject("buffer", &obj)) {
+ buffer = static_cast<MediaCodecBuffer *>(obj.get());
+ }
ssize_t index;
BufferInfo *info = mCodec->findBufferByID(kPortIndexOutput, bufferID, &index);
BufferInfo::Status status = BufferInfo::getSafeStatus(info);
@@ -6367,6 +6031,7 @@
mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION);
return;
}
+ info->mData = buffer;
android_native_rect_t crop;
if (msg->findRect("crop", &crop.left, &crop.top, &crop.right, &crop.bottom)
@@ -6388,13 +6053,13 @@
int32_t render;
if (mCodec->mNativeWindow != NULL
&& msg->findInt32("render", &render) && render != 0
- && info->mData != NULL && info->mData->size() != 0) {
+ && buffer != NULL && buffer->size() != 0) {
ATRACE_NAME("render");
// The client wants this buffer to be rendered.
// save buffers sent to the surface so we can get render time when they return
int64_t mediaTimeUs = -1;
- info->mData->meta()->findInt64("timeUs", &mediaTimeUs);
+ buffer->meta()->findInt64("timeUs", &mediaTimeUs);
if (mediaTimeUs >= 0) {
mCodec->mRenderTracker.onFrameQueued(
mediaTimeUs, info->mGraphicBuffer, new Fence(::dup(info->mFenceFd)));
@@ -6403,7 +6068,7 @@
int64_t timestampNs = 0;
if (!msg->findInt64("timestampNs", ×tampNs)) {
// use media timestamp if client did not request a specific render timestamp
- if (info->mData->meta()->findInt64("timeUs", ×tampNs)) {
+ if (buffer->meta()->findInt64("timeUs", ×tampNs)) {
ALOGV("using buffer PTS of %lld", (long long)timestampNs);
timestampNs *= 1000;
}
@@ -6428,7 +6093,7 @@
}
} else {
if (mCodec->mNativeWindow != NULL &&
- (info->mData == NULL || info->mData->size() != 0)) {
+ (buffer == NULL || buffer->size() != 0)) {
// move read fence into write fence to avoid clobbering
info->mIsReadFence = false;
ATRACE_NAME("frame-drop");
@@ -6466,12 +6131,8 @@
ALOGV("[%s] calling fillBuffer %u",
mCodec->mComponentName.c_str(), info->mBufferID);
info->checkWriteFence("onOutputBufferDrained::RESUBMIT_BUFFERS");
- status_t err = mCodec->mOMX->fillBuffer(
- mCodec->mNode, info->mBufferID, info->mFenceFd);
- info->mFenceFd = -1;
- if (err == OK) {
- info->mStatus = BufferInfo::OWNED_BY_COMPONENT;
- } else {
+ status_t err = mCodec->fillBuffer(info);
+ if (err != OK) {
mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err));
}
}
@@ -6504,16 +6165,18 @@
ALOGV("Now uninitialized");
if (mDeathNotifier != NULL) {
- mCodec->mNodeBinder->unlinkToDeath(mDeathNotifier);
+ if (mCodec->mOMXNode != NULL) {
+ sp<IBinder> binder = IInterface::asBinder(mCodec->mOMXNode);
+ binder->unlinkToDeath(mDeathNotifier);
+ }
mDeathNotifier.clear();
}
mCodec->mUsingNativeWindow = false;
mCodec->mNativeWindow.clear();
mCodec->mNativeWindowUsageBits = 0;
- mCodec->mNode = 0;
mCodec->mOMX.clear();
- mCodec->mQuirks = 0;
+ mCodec->mOMXNode.clear();
mCodec->mFlags = 0;
mCodec->mInputMetadataType = kMetadataBufferTypeInvalid;
mCodec->mOutputMetadataType = kMetadataBufferTypeInvalid;
@@ -6592,7 +6255,7 @@
bool ACodec::UninitializedState::onAllocateComponent(const sp<AMessage> &msg) {
ALOGV("onAllocateComponent");
- CHECK(mCodec->mNode == 0);
+ CHECK(mCodec->mOMXNode == NULL);
OMXClient client;
if (client.connect() != OK) {
@@ -6631,7 +6294,7 @@
}
sp<CodecObserver> observer = new CodecObserver;
- IOMX::node_id node = 0;
+ sp<IOMXNode> omxNode;
status_t err = NAME_NOT_FOUND;
for (size_t matchIndex = 0; matchIndex < matchingCodecs.size();
@@ -6642,7 +6305,7 @@
pid_t tid = gettid();
int prevPriority = androidGetThreadPriority(tid);
androidSetThreadPriority(tid, ANDROID_PRIORITY_FOREGROUND);
- err = omx->allocateNode(componentName.c_str(), observer, &mCodec->mNodeBinder, &node);
+ err = omx->allocateNode(componentName.c_str(), observer, &omxNode);
androidSetThreadPriority(tid, prevPriority);
if (err == OK) {
@@ -6651,10 +6314,10 @@
ALOGW("Allocating component '%s' failed, try next one.", componentName.c_str());
}
- node = 0;
+ omxNode = NULL;
}
- if (node == 0) {
+ if (omxNode == NULL) {
if (!mime.empty()) {
ALOGE("Unable to instantiate a %scoder for type '%s' with err %#x.",
encoder ? "en" : "de", mime.c_str(), err);
@@ -6667,14 +6330,14 @@
}
mDeathNotifier = new DeathNotifier(notify);
- if (mCodec->mNodeBinder == NULL ||
- mCodec->mNodeBinder->linkToDeath(mDeathNotifier) != OK) {
+ if (IInterface::asBinder(omxNode)->linkToDeath(mDeathNotifier) != OK) {
// This was a local binder, if it dies so do we, we won't care
// about any notifications in the afterlife.
mDeathNotifier.clear();
}
notify = new AMessage(kWhatOMXMessageList, mCodec);
+ notify->setInt32("generation", ++mCodec->mNodeGeneration);
observer->setNotificationMessage(notify);
mCodec->mComponentName = componentName;
@@ -6687,9 +6350,9 @@
mCodec->mFlags |= kFlagPushBlankBuffersToNativeWindowOnShutdown;
}
- mCodec->mQuirks = quirks;
+ omxNode->setQuirks(quirks);
mCodec->mOMX = omx;
- mCodec->mNode = node;
+ mCodec->mOMXNode = omxNode;
{
sp<AMessage> notify = mCodec->mNotify->dup();
@@ -6723,6 +6386,7 @@
mCodec->mInputFormat.clear();
mCodec->mOutputFormat.clear();
mCodec->mBaseOutputFormat.clear();
+ mCodec->mGraphicBufferSource.clear();
if (mCodec->mShutdownInProgress) {
bool keepComponentAllocated = mCodec->mKeepComponentAllocated;
@@ -6739,7 +6403,7 @@
void ACodec::LoadedState::onShutdown(bool keepComponentAllocated) {
if (!keepComponentAllocated) {
- (void)mCodec->mOMX->freeNode(mCodec->mNode);
+ (void)mCodec->mOMXNode->freeNode();
mCodec->changeState(mCodec->mUninitializedState);
}
@@ -6818,7 +6482,7 @@
const sp<AMessage> &msg) {
ALOGV("onConfigureComponent");
- CHECK(mCodec->mNode != 0);
+ CHECK(mCodec->mOMXNode != NULL);
status_t err = OK;
AString mime;
@@ -6847,97 +6511,94 @@
}
status_t ACodec::LoadedState::setupInputSurface() {
- status_t err = OK;
+ if (mCodec->mGraphicBufferSource == NULL) {
+ return BAD_VALUE;
+ }
+
+ android_dataspace dataSpace;
+ status_t err =
+ mCodec->setInitialColorAspectsForVideoEncoderSurfaceAndGetDataSpace(&dataSpace);
+ if (err != OK) {
+ ALOGE("Failed to get default data space");
+ return err;
+ }
+
+ err = statusFromBinderStatus(
+ mCodec->mGraphicBufferSource->configure(mCodec->mOMXNode, dataSpace));
+ if (err != OK) {
+ ALOGE("[%s] Unable to configure for node (err %d)",
+ mCodec->mComponentName.c_str(), err);
+ return err;
+ }
if (mCodec->mRepeatFrameDelayUs > 0ll) {
- err = mCodec->mOMX->setInternalOption(
- mCodec->mNode,
- kPortIndexInput,
- IOMX::INTERNAL_OPTION_REPEAT_PREVIOUS_FRAME_DELAY,
- &mCodec->mRepeatFrameDelayUs,
- sizeof(mCodec->mRepeatFrameDelayUs));
+ err = statusFromBinderStatus(
+ mCodec->mGraphicBufferSource->setRepeatPreviousFrameDelayUs(
+ mCodec->mRepeatFrameDelayUs));
if (err != OK) {
ALOGE("[%s] Unable to configure option to repeat previous "
"frames (err %d)",
- mCodec->mComponentName.c_str(),
- err);
+ mCodec->mComponentName.c_str(), err);
return err;
}
}
if (mCodec->mMaxPtsGapUs > 0ll) {
- err = mCodec->mOMX->setInternalOption(
- mCodec->mNode,
- kPortIndexInput,
- IOMX::INTERNAL_OPTION_MAX_TIMESTAMP_GAP,
- &mCodec->mMaxPtsGapUs,
- sizeof(mCodec->mMaxPtsGapUs));
+ OMX_PARAM_U32TYPE maxPtsGapParams;
+ InitOMXParams(&maxPtsGapParams);
+ maxPtsGapParams.nPortIndex = kPortIndexInput;
+ maxPtsGapParams.nU32 = (uint32_t) mCodec->mMaxPtsGapUs;
+
+ err = mCodec->mOMXNode->setParameter(
+ (OMX_INDEXTYPE)OMX_IndexParamMaxFrameDurationForBitrateControl,
+ &maxPtsGapParams, sizeof(maxPtsGapParams));
if (err != OK) {
ALOGE("[%s] Unable to configure max timestamp gap (err %d)",
- mCodec->mComponentName.c_str(),
- err);
+ mCodec->mComponentName.c_str(), err);
return err;
}
}
if (mCodec->mMaxFps > 0) {
- err = mCodec->mOMX->setInternalOption(
- mCodec->mNode,
- kPortIndexInput,
- IOMX::INTERNAL_OPTION_MAX_FPS,
- &mCodec->mMaxFps,
- sizeof(mCodec->mMaxFps));
+ err = statusFromBinderStatus(
+ mCodec->mGraphicBufferSource->setMaxFps(mCodec->mMaxFps));
if (err != OK) {
ALOGE("[%s] Unable to configure max fps (err %d)",
- mCodec->mComponentName.c_str(),
- err);
+ mCodec->mComponentName.c_str(), err);
return err;
}
}
if (mCodec->mTimePerCaptureUs > 0ll
&& mCodec->mTimePerFrameUs > 0ll) {
- int64_t timeLapse[2];
- timeLapse[0] = mCodec->mTimePerFrameUs;
- timeLapse[1] = mCodec->mTimePerCaptureUs;
- err = mCodec->mOMX->setInternalOption(
- mCodec->mNode,
- kPortIndexInput,
- IOMX::INTERNAL_OPTION_TIME_LAPSE,
- &timeLapse[0],
- sizeof(timeLapse));
+ err = statusFromBinderStatus(
+ mCodec->mGraphicBufferSource->setTimeLapseConfig(
+ mCodec->mTimePerFrameUs, mCodec->mTimePerCaptureUs));
if (err != OK) {
ALOGE("[%s] Unable to configure time lapse (err %d)",
- mCodec->mComponentName.c_str(),
- err);
+ mCodec->mComponentName.c_str(), err);
return err;
}
}
if (mCodec->mCreateInputBuffersSuspended) {
- bool suspend = true;
- err = mCodec->mOMX->setInternalOption(
- mCodec->mNode,
- kPortIndexInput,
- IOMX::INTERNAL_OPTION_SUSPEND,
- &suspend,
- sizeof(suspend));
+ err = statusFromBinderStatus(
+ mCodec->mGraphicBufferSource->setSuspend(true));
if (err != OK) {
ALOGE("[%s] Unable to configure option to suspend (err %d)",
- mCodec->mComponentName.c_str(),
- err);
+ mCodec->mComponentName.c_str(), err);
return err;
}
}
uint32_t usageBits;
- if (mCodec->mOMX->getParameter(
- mCodec->mNode, (OMX_INDEXTYPE)OMX_IndexParamConsumerUsageBits,
+ if (mCodec->mOMXNode->getParameter(
+ (OMX_INDEXTYPE)OMX_IndexParamConsumerUsageBits,
&usageBits, sizeof(usageBits)) == OK) {
mCodec->mInputFormat->setInt32(
"using-sw-read-often", !!(usageBits & GRALLOC_USAGE_SW_READ_OFTEN));
@@ -6945,9 +6606,14 @@
sp<ABuffer> colorAspectsBuffer;
if (mCodec->mInputFormat->findBuffer("android._color-aspects", &colorAspectsBuffer)) {
- err = mCodec->mOMX->setInternalOption(
- mCodec->mNode, kPortIndexInput, IOMX::INTERNAL_OPTION_COLOR_ASPECTS,
- colorAspectsBuffer->base(), colorAspectsBuffer->capacity());
+ if (colorAspectsBuffer->size() != sizeof(ColorAspects)) {
+ return INVALID_OPERATION;
+ }
+
+ err = statusFromBinderStatus(
+ mCodec->mGraphicBufferSource->setColorAspects(ColorUtils::packToU32(
+ *(ColorAspects *)colorAspectsBuffer->base())));
+
if (err != OK) {
ALOGE("[%s] Unable to configure color aspects (err %d)",
mCodec->mComponentName.c_str(), err);
@@ -6964,29 +6630,20 @@
sp<AMessage> notify = mCodec->mNotify->dup();
notify->setInt32("what", CodecBase::kWhatInputSurfaceCreated);
- android_dataspace dataSpace;
- status_t err =
- mCodec->setInitialColorAspectsForVideoEncoderSurfaceAndGetDataSpace(&dataSpace);
- notify->setMessage("input-format", mCodec->mInputFormat);
- notify->setMessage("output-format", mCodec->mOutputFormat);
-
sp<IGraphicBufferProducer> bufferProducer;
- if (err == OK) {
- mCodec->mInputMetadataType = kMetadataBufferTypeANWBuffer;
- err = mCodec->mOMX->createInputSurface(
- mCodec->mNode, kPortIndexInput, dataSpace, &bufferProducer,
- &mCodec->mInputMetadataType);
- // framework uses ANW buffers internally instead of gralloc handles
- if (mCodec->mInputMetadataType == kMetadataBufferTypeGrallocSource) {
- mCodec->mInputMetadataType = kMetadataBufferTypeANWBuffer;
- }
- }
+ status_t err = mCodec->mOMX->createInputSurface(
+ &bufferProducer, &mCodec->mGraphicBufferSource);
if (err == OK) {
err = setupInputSurface();
}
if (err == OK) {
+ mCodec->mInputMetadataType = kMetadataBufferTypeANWBuffer;
+
+ notify->setMessage("input-format", mCodec->mInputFormat);
+ notify->setMessage("output-format", mCodec->mOutputFormat);
+
notify->setObject("input-surface",
new BufferProducerWrapper(bufferProducer));
} else {
@@ -7010,30 +6667,16 @@
sp<RefBase> obj;
CHECK(msg->findObject("input-surface", &obj));
sp<PersistentSurface> surface = static_cast<PersistentSurface *>(obj.get());
+ mCodec->mGraphicBufferSource = surface->getBufferSource();
- android_dataspace dataSpace;
- status_t err =
- mCodec->setInitialColorAspectsForVideoEncoderSurfaceAndGetDataSpace(&dataSpace);
- notify->setMessage("input-format", mCodec->mInputFormat);
- notify->setMessage("output-format", mCodec->mOutputFormat);
+ status_t err = setupInputSurface();
if (err == OK) {
mCodec->mInputMetadataType = kMetadataBufferTypeANWBuffer;
- err = mCodec->mOMX->setInputSurface(
- mCodec->mNode, kPortIndexInput, surface->getBufferConsumer(),
- &mCodec->mInputMetadataType);
- // framework uses ANW buffers internally instead of gralloc handles
- if (mCodec->mInputMetadataType == kMetadataBufferTypeGrallocSource) {
- mCodec->mInputMetadataType = kMetadataBufferTypeANWBuffer;
- }
- }
- if (err == OK) {
- surface->getBufferConsumer()->setDefaultBufferDataSpace(dataSpace);
- err = setupInputSurface();
- }
-
- if (err != OK) {
+ notify->setMessage("input-format", mCodec->mInputFormat);
+ notify->setMessage("output-format", mCodec->mOutputFormat);
+ } else {
// Can't use mCodec->signalError() here -- MediaCodec won't forward
// the error through because it's in the "configured" state. We
// send a kWhatInputSurfaceAccepted with an error value instead.
@@ -7047,7 +6690,7 @@
void ACodec::LoadedState::onStart() {
ALOGV("onStart");
- status_t err = mCodec->mOMX->sendCommand(mCodec->mNode, OMX_CommandStateSet, OMX_StateIdle);
+ status_t err = mCodec->mOMXNode->sendCommand(OMX_CommandStateSet, OMX_StateIdle);
if (err != OK) {
mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err));
} else {
@@ -7072,8 +6715,8 @@
mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err));
- mCodec->mOMX->sendCommand(
- mCodec->mNode, OMX_CommandStateSet, OMX_StateLoaded);
+ mCodec->mOMXNode->sendCommand(
+ OMX_CommandStateSet, OMX_StateLoaded);
if (mCodec->allYourBuffersAreBelongToUs(kPortIndexInput)) {
mCodec->freeBuffersOnPort(kPortIndexInput);
}
@@ -7145,8 +6788,8 @@
}
if (err == OK) {
- err = mCodec->mOMX->sendCommand(
- mCodec->mNode, OMX_CommandStateSet, OMX_StateExecuting);
+ err = mCodec->mOMXNode->sendCommand(
+ OMX_CommandStateSet, OMX_StateExecuting);
}
if (err != OK) {
@@ -7289,14 +6932,11 @@
ALOGV("[%s] calling fillBuffer %u", mCodec->mComponentName.c_str(), info->mBufferID);
info->checkWriteFence("submitRegularOutputBuffers");
- status_t err = mCodec->mOMX->fillBuffer(mCodec->mNode, info->mBufferID, info->mFenceFd);
- info->mFenceFd = -1;
+ status_t err = mCodec->fillBuffer(info);
if (err != OK) {
failed = true;
break;
}
-
- info->mStatus = BufferInfo::OWNED_BY_COMPONENT;
}
if (failed) {
@@ -7357,8 +6997,8 @@
mActive = false;
- status_t err = mCodec->mOMX->sendCommand(
- mCodec->mNode, OMX_CommandStateSet, OMX_StateIdle);
+ status_t err = mCodec->mOMXNode->sendCommand(
+ OMX_CommandStateSet, OMX_StateIdle);
if (err != OK) {
if (keepComponentAllocated) {
mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION);
@@ -7384,7 +7024,7 @@
mActive = false;
- status_t err = mCodec->mOMX->sendCommand(mCodec->mNode, OMX_CommandFlush, OMX_ALL);
+ status_t err = mCodec->mOMXNode->sendCommand(OMX_CommandFlush, OMX_ALL);
if (err != OK) {
mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION);
} else {
@@ -7467,8 +7107,7 @@
configParams.nPortIndex = kPortIndexOutput;
configParams.nEncodeBitrate = videoBitrate;
- status_t err = mOMX->setConfig(
- mNode,
+ status_t err = mOMXNode->setConfig(
OMX_IndexConfigVideoBitrate,
&configParams,
sizeof(configParams));
@@ -7483,12 +7122,14 @@
int64_t timeOffsetUs;
if (params->findInt64("time-offset-us", &timeOffsetUs)) {
- status_t err = mOMX->setInternalOption(
- mNode,
- kPortIndexInput,
- IOMX::INTERNAL_OPTION_TIME_OFFSET,
- &timeOffsetUs,
- sizeof(timeOffsetUs));
+ if (mGraphicBufferSource == NULL) {
+ ALOGE("[%s] Invalid to set input buffer time offset without surface",
+ mComponentName.c_str());
+ return INVALID_OPERATION;
+ }
+
+ status_t err = statusFromBinderStatus(
+ mGraphicBufferSource->setTimeOffsetUs(timeOffsetUs));
if (err != OK) {
ALOGE("[%s] Unable to set input buffer time offset (err %d)",
@@ -7500,13 +7141,14 @@
int64_t skipFramesBeforeUs;
if (params->findInt64("skip-frames-before", &skipFramesBeforeUs)) {
- status_t err =
- mOMX->setInternalOption(
- mNode,
- kPortIndexInput,
- IOMX::INTERNAL_OPTION_START_TIME,
- &skipFramesBeforeUs,
- sizeof(skipFramesBeforeUs));
+ if (mGraphicBufferSource == NULL) {
+ ALOGE("[%s] Invalid to set start time without surface",
+ mComponentName.c_str());
+ return INVALID_OPERATION;
+ }
+
+ status_t err = statusFromBinderStatus(
+ mGraphicBufferSource->setStartTimeUs(skipFramesBeforeUs));
if (err != OK) {
ALOGE("Failed to set parameter 'skip-frames-before' (err %d)", err);
@@ -7516,15 +7158,14 @@
int32_t dropInputFrames;
if (params->findInt32("drop-input-frames", &dropInputFrames)) {
- bool suspend = dropInputFrames != 0;
+ if (mGraphicBufferSource == NULL) {
+ ALOGE("[%s] Invalid to set suspend without surface",
+ mComponentName.c_str());
+ return INVALID_OPERATION;
+ }
- status_t err =
- mOMX->setInternalOption(
- mNode,
- kPortIndexInput,
- IOMX::INTERNAL_OPTION_SUSPEND,
- &suspend,
- sizeof(suspend));
+ status_t err = statusFromBinderStatus(
+ mGraphicBufferSource->setSuspend(dropInputFrames != 0));
if (err != OK) {
ALOGE("Failed to set parameter 'drop-input-frames' (err %d)", err);
@@ -7574,7 +7215,10 @@
sp<AMessage> notify = mNotify->dup();
notify->setInt32("what", CodecBase::kWhatSignaledInputEOS);
- status_t err = mOMX->signalEndOfInputStream(mNode);
+ status_t err = INVALID_OPERATION;
+ if (mGraphicBufferSource != NULL) {
+ err = statusFromBinderStatus(mGraphicBufferSource->signalEndOfInputStream());
+ }
if (err != OK) {
notify->setInt32("err", err);
}
@@ -7597,8 +7241,7 @@
if (data2 == 0 || data2 == OMX_IndexParamPortDefinition) {
mCodec->mMetadataBuffersToSubmit = 0;
- CHECK_EQ(mCodec->mOMX->sendCommand(
- mCodec->mNode,
+ CHECK_EQ(mCodec->mOMXNode->sendCommand(
OMX_CommandPortDisable, kPortIndexOutput),
(status_t)OK);
@@ -7703,8 +7346,8 @@
}
if (err == OK) {
- err = mCodec->mOMX->sendCommand(
- mCodec->mNode, OMX_CommandPortEnable, kPortIndexOutput);
+ err = mCodec->mOMXNode->sendCommand(
+ OMX_CommandPortEnable, kPortIndexOutput);
}
if (err == OK) {
@@ -7829,8 +7472,8 @@
void ACodec::ExecutingToIdleState::changeStateIfWeOwnAllBuffers() {
if (mComponentNowIdle && mCodec->allYourBuffersAreBelongToUs()) {
- status_t err = mCodec->mOMX->sendCommand(
- mCodec->mNode, OMX_CommandStateSet, OMX_StateLoaded);
+ status_t err = mCodec->mOMXNode->sendCommand(
+ OMX_CommandStateSet, OMX_StateLoaded);
if (err == OK) {
err = mCodec->freeBuffersOnPort(kPortIndexInput);
status_t err2 = mCodec->freeBuffersOnPort(kPortIndexOutput);
@@ -8016,7 +7659,7 @@
{
sp<AMessage> msg = new AMessage(kWhatOMXMessage, mCodec);
msg->setInt32("type", omx_message::EVENT);
- msg->setInt32("node", mCodec->mNode);
+ msg->setInt32("generation", mCodec->mNodeGeneration);
msg->setInt32("event", event);
msg->setInt32("data1", data1);
msg->setInt32("data2", data2);
@@ -8079,7 +7722,7 @@
const AString &name, const AString &mime, bool isEncoder,
sp<MediaCodecInfo::Capabilities> *caps) {
(*caps).clear();
- const char *role = getComponentRole(isEncoder, mime.c_str());
+ const char *role = GetComponentRole(isEncoder, mime.c_str());
if (role == NULL) {
return BAD_VALUE;
}
@@ -8092,17 +7735,17 @@
sp<IOMX> omx = client.interface();
sp<CodecObserver> observer = new CodecObserver;
- IOMX::node_id node = 0;
+ sp<IOMXNode> omxNode;
- err = omx->allocateNode(name.c_str(), observer, NULL, &node);
+ err = omx->allocateNode(name.c_str(), observer, &omxNode);
if (err != OK) {
client.disconnect();
return err;
}
- err = setComponentRole(omx, node, role);
+ err = SetComponentRole(omxNode, role);
if (err != OK) {
- omx->freeNode(node);
+ omxNode->freeNode();
client.disconnect();
return err;
}
@@ -8117,8 +7760,8 @@
for (OMX_U32 index = 0; index <= kMaxIndicesToCheck; ++index) {
param.nProfileIndex = index;
- status_t err = omx->getParameter(
- node, OMX_IndexParamVideoProfileLevelQuerySupported,
+ status_t err = omxNode->getParameter(
+ OMX_IndexParamVideoProfileLevelQuerySupported,
¶m, sizeof(param));
if (err != OK) {
break;
@@ -8141,16 +7784,16 @@
Vector<uint32_t> supportedColors; // shadow copy to check for duplicates
for (OMX_U32 index = 0; index <= kMaxIndicesToCheck; ++index) {
portFormat.nIndex = index;
- status_t err = omx->getParameter(
- node, OMX_IndexParamVideoPortFormat,
+ status_t err = omxNode->getParameter(
+ OMX_IndexParamVideoPortFormat,
&portFormat, sizeof(portFormat));
if (err != OK) {
break;
}
OMX_U32 flexibleEquivalent;
- if (isFlexibleColorFormat(
- omx, node, portFormat.eColorFormat, false /* usingNativeWindow */,
+ if (IsFlexibleColorFormat(
+ omxNode, portFormat.eColorFormat, false /* usingNativeWindow */,
&flexibleEquivalent)) {
bool marked = false;
for (size_t i = 0; i < supportedColors.size(); ++i) {
@@ -8180,8 +7823,8 @@
param.nPortIndex = isEncoder ? kPortIndexOutput : kPortIndexInput;
for (OMX_U32 index = 0; index <= kMaxIndicesToCheck; ++index) {
param.nProfileIndex = index;
- status_t err = omx->getParameter(
- node, (OMX_INDEXTYPE)OMX_IndexParamAudioProfileQuerySupported,
+ status_t err = omxNode->getParameter(
+ (OMX_INDEXTYPE)OMX_IndexParamAudioProfileQuerySupported,
¶m, sizeof(param));
if (err != OK) {
break;
@@ -8205,15 +7848,15 @@
if (isVideo && !isEncoder) {
native_handle_t *sidebandHandle = NULL;
- if (omx->configureVideoTunnelMode(
- node, kPortIndexOutput, OMX_TRUE, 0, &sidebandHandle) == OK) {
+ if (omxNode->configureVideoTunnelMode(
+ kPortIndexOutput, OMX_TRUE, 0, &sidebandHandle) == OK) {
// tunneled playback includes adaptive playback
builder->addFlags(MediaCodecInfo::Capabilities::kFlagSupportsAdaptivePlayback
| MediaCodecInfo::Capabilities::kFlagSupportsTunneledPlayback);
- } else if (omx->storeMetaDataInBuffers(
- node, kPortIndexOutput, OMX_TRUE) == OK ||
- omx->prepareForAdaptivePlayback(
- node, kPortIndexOutput, OMX_TRUE,
+ } else if (omxNode->storeMetaDataInBuffers(
+ kPortIndexOutput, OMX_TRUE) == OK ||
+ omxNode->prepareForAdaptivePlayback(
+ kPortIndexOutput, OMX_TRUE,
1280 /* width */, 720 /* height */) == OK) {
builder->addFlags(MediaCodecInfo::Capabilities::kFlagSupportsAdaptivePlayback);
}
@@ -8224,15 +7867,15 @@
InitOMXParams(¶ms);
params.nPortIndex = kPortIndexOutput;
// TODO: should we verify if fallback is supported?
- if (omx->getConfig(
- node, (OMX_INDEXTYPE)OMX_IndexConfigAndroidIntraRefresh,
+ if (omxNode->getConfig(
+ (OMX_INDEXTYPE)OMX_IndexConfigAndroidIntraRefresh,
¶ms, sizeof(params)) == OK) {
builder->addFlags(MediaCodecInfo::Capabilities::kFlagSupportsIntraRefresh);
}
}
*caps = builder;
- omx->freeNode(node);
+ omxNode->freeNode();
client.disconnect();
return OK;
}
diff --git a/media/libstagefright/Android.mk b/media/libstagefright/Android.mk
index 604ad7c..5e921e3 100644
--- a/media/libstagefright/Android.mk
+++ b/media/libstagefright/Android.mk
@@ -10,6 +10,7 @@
AMRWriter.cpp \
AudioPlayer.cpp \
AudioSource.cpp \
+ BufferImpl.cpp \
CallbackDataSource.cpp \
CameraSource.cpp \
CameraSourceTimeLapse.cpp \
@@ -35,7 +36,6 @@
MediaCodecList.cpp \
MediaCodecListOverrides.cpp \
MediaCodecSource.cpp \
- MediaDefs.cpp \
MediaExtractor.cpp \
MediaSync.cpp \
MidiExtractor.cpp \
@@ -46,7 +46,6 @@
NuMediaExtractor.cpp \
OMXClient.cpp \
OggExtractor.cpp \
- ProcessInfo.cpp \
SampleIterator.cpp \
SampleTable.cpp \
SimpleDecodingSource.cpp \
@@ -126,7 +125,6 @@
LOCAL_CFLAGS += -DENABLE_STAGEFRIGHT_EXPERIMENTS
endif
-LOCAL_CLANG := true
LOCAL_SANITIZE := unsigned-integer-overflow signed-integer-overflow
LOCAL_MODULE:= libstagefright
diff --git a/media/libstagefright/AudioSource.cpp b/media/libstagefright/AudioSource.cpp
index efdee77..4ccd2d0 100644
--- a/media/libstagefright/AudioSource.cpp
+++ b/media/libstagefright/AudioSource.cpp
@@ -62,6 +62,8 @@
mPrevSampleTimeUs(0),
mInitialReadTimeUs(0),
mNumFramesReceived(0),
+ mNumFramesSkipped(0),
+ mNumFramesLost(0),
mNumClientOwnedBuffers(0) {
ALOGV("sampleRate: %u, outSampleRate: %u, channelCount: %u",
sampleRate, outSampleRate, channelCount);
@@ -295,11 +297,27 @@
}
status_t AudioSource::dataCallback(const AudioRecord::Buffer& audioBuffer) {
- int64_t timeUs = systemTime() / 1000ll;
- // Estimate the real sampling time of the 1st sample in this buffer
- // from AudioRecord's latency. (Apply this adjustment first so that
- // the start time logic is not affected.)
- timeUs -= mRecord->latency() * 1000LL;
+ int64_t timeUs, position, timeNs;
+ ExtendedTimestamp ts;
+ ExtendedTimestamp::Location location;
+ const int32_t usPerSec = 1000000;
+
+ if (mRecord->getTimestamp(&ts) == OK &&
+ ts.getBestTimestamp(&position, &timeNs, ExtendedTimestamp::TIMEBASE_MONOTONIC,
+ &location) == OK) {
+ // Use audio timestamp.
+ timeUs = timeNs / 1000 -
+ (position - mNumFramesSkipped -
+ mNumFramesReceived + mNumFramesLost) * usPerSec / mSampleRate;
+ } else {
+ // This should not happen in normal case.
+ ALOGW("Failed to get audio timestamp, fallback to use systemclock");
+ timeUs = systemTime() / 1000ll;
+ // Estimate the real sampling time of the 1st sample in this buffer
+ // from AudioRecord's latency. (Apply this adjustment first so that
+ // the start time logic is not affected.)
+ timeUs -= mRecord->latency() * 1000LL;
+ }
ALOGV("dataCallbackTimestamp: %" PRId64 " us", timeUs);
Mutex::Autolock autoLock(mLock);
@@ -308,10 +326,15 @@
return OK;
}
+ const size_t bufferSize = audioBuffer.size;
+
// Drop retrieved and previously lost audio data.
if (mNumFramesReceived == 0 && timeUs < mStartTimeUs) {
(void) mRecord->getInputFramesLost();
- ALOGV("Drop audio data at %" PRId64 "/%" PRId64 " us", timeUs, mStartTimeUs);
+ int64_t receievedFrames = bufferSize / mRecord->frameSize();
+ ALOGV("Drop audio data(%" PRId64 " frames) at %" PRId64 "/%" PRId64 " us",
+ receievedFrames, timeUs, mStartTimeUs);
+ mNumFramesSkipped += receievedFrames;
return OK;
}
@@ -320,11 +343,7 @@
// Initial delay
if (mStartTimeUs > 0) {
mStartTimeUs = timeUs - mStartTimeUs;
- } else {
- // Assume latency is constant.
- mStartTimeUs += mRecord->latency() * 1000;
}
-
mPrevSampleTimeUs = mStartTimeUs;
}
@@ -354,6 +373,7 @@
MediaBuffer *lostAudioBuffer = new MediaBuffer(bufferSize);
memset(lostAudioBuffer->data(), 0, bufferSize);
lostAudioBuffer->set_range(0, bufferSize);
+ mNumFramesLost += bufferSize / mRecord->frameSize();
queueInputBuffer_l(lostAudioBuffer, timeUs);
}
@@ -362,7 +382,6 @@
return OK;
}
- const size_t bufferSize = audioBuffer.size;
MediaBuffer *buffer = new MediaBuffer(bufferSize);
memcpy((uint8_t *) buffer->data(),
audioBuffer.i16, audioBuffer.size);
diff --git a/media/libstagefright/BufferImpl.cpp b/media/libstagefright/BufferImpl.cpp
new file mode 100644
index 0000000..37a40ec
--- /dev/null
+++ b/media/libstagefright/BufferImpl.cpp
@@ -0,0 +1,58 @@
+/*
+ * Copyright 2016, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "BufferImpl"
+#include <utils/Log.h>
+
+#include <binder/IMemory.h>
+#include <media/stagefright/foundation/ABuffer.h>
+#include <media/stagefright/foundation/AMessage.h>
+#include <media/ICrypto.h>
+#include <utils/NativeHandle.h>
+
+#include "include/SecureBuffer.h"
+#include "include/SharedMemoryBuffer.h"
+
+namespace android {
+
+SharedMemoryBuffer::SharedMemoryBuffer(const sp<AMessage> &format, const sp<IMemory> &mem)
+ : MediaCodecBuffer(format, new ABuffer(mem->pointer(), mem->size())),
+ mMemory(mem) {
+}
+
+SecureBuffer::SecureBuffer(const sp<AMessage> &format, const void *ptr, size_t size)
+ : MediaCodecBuffer(format, new ABuffer(nullptr, size)),
+ mPointer(ptr) {
+}
+
+SecureBuffer::SecureBuffer(
+ const sp<AMessage> &format, const sp<NativeHandle> &handle, size_t size)
+ : MediaCodecBuffer(format, new ABuffer(nullptr, size)),
+ mPointer(nullptr),
+ mHandle(handle) {
+}
+
+void *SecureBuffer::getDestinationPointer() {
+ return (void *)(mHandle == nullptr ? mPointer : mHandle->handle());
+}
+
+ICrypto::DestinationType SecureBuffer::getDestinationType() {
+ return mHandle == nullptr ? ICrypto::kDestinationTypeOpaqueHandle
+ : ICrypto::kDestinationTypeNativeHandle;
+}
+
+} // namespace android
diff --git a/media/libstagefright/CameraSource.cpp b/media/libstagefright/CameraSource.cpp
index 408ad7a..990d4b7 100644
--- a/media/libstagefright/CameraSource.cpp
+++ b/media/libstagefright/CameraSource.cpp
@@ -111,6 +111,11 @@
}
static int32_t getColorFormat(const char* colorFormat) {
+ if (!colorFormat) {
+ ALOGE("Invalid color format");
+ return -1;
+ }
+
if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_YUV420P)) {
return OMX_COLOR_FormatYUV420Planar;
}
diff --git a/media/libstagefright/DataConverter.cpp b/media/libstagefright/DataConverter.cpp
index aea47f3..52be054 100644
--- a/media/libstagefright/DataConverter.cpp
+++ b/media/libstagefright/DataConverter.cpp
@@ -21,13 +21,13 @@
#include <audio_utils/primitives.h>
-#include <media/stagefright/foundation/ABuffer.h>
+#include <media/MediaCodecBuffer.h>
#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/foundation/AUtils.h>
namespace android {
-status_t DataConverter::convert(const sp<ABuffer> &source, sp<ABuffer> &target) {
+status_t DataConverter::convert(const sp<MediaCodecBuffer> &source, sp<MediaCodecBuffer> &target) {
CHECK(source->base() != target->base());
size_t size = targetSize(source->size());
status_t err = OK;
@@ -43,7 +43,8 @@
return err;
}
-status_t DataConverter::safeConvert(const sp<ABuffer> &source, sp<ABuffer> &target) {
+status_t DataConverter::safeConvert(
+ const sp<MediaCodecBuffer> &source, sp<MediaCodecBuffer> &target) {
memcpy(target->base(), source->data(), source->size());
return OK;
}
@@ -101,7 +102,7 @@
return NULL;
}
-status_t AudioConverter::safeConvert(const sp<ABuffer> &src, sp<ABuffer> &tgt) {
+status_t AudioConverter::safeConvert(const sp<MediaCodecBuffer> &src, sp<MediaCodecBuffer> &tgt) {
if (mTo == kAudioEncodingPcm8bit && mFrom == kAudioEncodingPcm16bit) {
memcpy_to_u8_from_i16((uint8_t*)tgt->base(), (const int16_t*)src->data(), src->size() / 2);
} else if (mTo == kAudioEncodingPcm8bit && mFrom == kAudioEncodingPcmFloat) {
diff --git a/media/libstagefright/MPEG2TSWriter.cpp b/media/libstagefright/MPEG2TSWriter.cpp
index 931b280..b83b0a0 100644
--- a/media/libstagefright/MPEG2TSWriter.cpp
+++ b/media/libstagefright/MPEG2TSWriter.cpp
@@ -37,7 +37,7 @@
struct MPEG2TSWriter::SourceInfo : public AHandler {
explicit SourceInfo(const sp<IMediaSource> &source);
- void start(const sp<AMessage> ¬ify);
+ void start(const sp<AMessage> ¬ify, const sp<MetaData> ¶ms);
void stop();
unsigned streamType() const;
@@ -75,7 +75,7 @@
sp<ABuffer> mAACCodecSpecificData;
- sp<ABuffer> mAACBuffer;
+ sp<ABuffer> mBuffer;
sp<ABuffer> mLastAccessUnit;
bool mEOSReceived;
@@ -85,10 +85,8 @@
void extractCodecSpecificData();
- bool appendAACFrames(MediaBuffer *buffer);
- bool flushAACFrames();
-
- void postAVCFrame(MediaBuffer *buffer);
+ void appendAACFrames(MediaBuffer *buffer);
+ void appendAVCFrame(MediaBuffer *buffer);
DISALLOW_EVIL_CONSTRUCTORS(SourceInfo);
};
@@ -129,13 +127,14 @@
return mContinuityCounter;
}
-void MPEG2TSWriter::SourceInfo::start(const sp<AMessage> ¬ify) {
+void MPEG2TSWriter::SourceInfo::start(const sp<AMessage> ¬ify, const sp<MetaData> ¶ms) {
mLooper->registerHandler(this);
mLooper->start();
-
mNotify = notify;
- (new AMessage(kWhatStart, this))->post();
+ sp<AMessage> msg = new AMessage(kWhatStart, this);
+ msg->setObject("meta", params);
+ msg->post();
}
void MPEG2TSWriter::SourceInfo::stop() {
@@ -250,56 +249,51 @@
notify->post();
}
-void MPEG2TSWriter::SourceInfo::postAVCFrame(MediaBuffer *buffer) {
+void MPEG2TSWriter::SourceInfo::appendAVCFrame(MediaBuffer *buffer) {
sp<AMessage> notify = mNotify->dup();
notify->setInt32("what", kNotifyBuffer);
- sp<ABuffer> copy =
- new ABuffer(buffer->range_length());
- memcpy(copy->data(),
+ if (mBuffer == NULL || buffer->range_length() > mBuffer->capacity()) {
+ mBuffer = new ABuffer(buffer->range_length());
+ }
+ mBuffer->setRange(0, 0);
+
+ memcpy(mBuffer->data(),
(const uint8_t *)buffer->data()
+ buffer->range_offset(),
buffer->range_length());
int64_t timeUs;
CHECK(buffer->meta_data()->findInt64(kKeyTime, &timeUs));
- copy->meta()->setInt64("timeUs", timeUs);
+ mBuffer->meta()->setInt64("timeUs", timeUs);
int32_t isSync;
if (buffer->meta_data()->findInt32(kKeyIsSyncFrame, &isSync)
&& isSync != 0) {
- copy->meta()->setInt32("isSync", true);
+ mBuffer->meta()->setInt32("isSync", true);
}
- notify->setBuffer("buffer", copy);
+ mBuffer->setRange(0, buffer->range_length());
+
+ notify->setBuffer("buffer", mBuffer);
notify->post();
}
-bool MPEG2TSWriter::SourceInfo::appendAACFrames(MediaBuffer *buffer) {
- bool accessUnitPosted = false;
+void MPEG2TSWriter::SourceInfo::appendAACFrames(MediaBuffer *buffer) {
+ sp<AMessage> notify = mNotify->dup();
+ notify->setInt32("what", kNotifyBuffer);
- if (mAACBuffer != NULL
- && mAACBuffer->size() + 7 + buffer->range_length()
- > mAACBuffer->capacity()) {
- accessUnitPosted = flushAACFrames();
+ if (mBuffer == NULL || 7 + buffer->range_length() > mBuffer->capacity()) {
+ mBuffer = new ABuffer(7 + buffer->range_length());
}
- if (mAACBuffer == NULL) {
- size_t alloc = 4096;
- if (buffer->range_length() + 7 > alloc) {
- alloc = 7 + buffer->range_length();
- }
+ int64_t timeUs;
+ CHECK(buffer->meta_data()->findInt64(kKeyTime, &timeUs));
- mAACBuffer = new ABuffer(alloc);
+ mBuffer->meta()->setInt64("timeUs", timeUs);
+ mBuffer->meta()->setInt32("isSync", true);
- int64_t timeUs;
- CHECK(buffer->meta_data()->findInt64(kKeyTime, &timeUs));
-
- mAACBuffer->meta()->setInt64("timeUs", timeUs);
- mAACBuffer->meta()->setInt32("isSync", true);
-
- mAACBuffer->setRange(0, 0);
- }
+ mBuffer->setRange(0, 0);
const uint8_t *codec_specific_data = mAACCodecSpecificData->data();
@@ -312,7 +306,7 @@
unsigned channel_configuration =
(codec_specific_data[1] >> 3) & 0x0f;
- uint8_t *ptr = mAACBuffer->data() + mAACBuffer->size();
+ uint8_t *ptr = mBuffer->data() + mBuffer->size();
const uint32_t aac_frame_length = buffer->range_length() + 7;
@@ -340,24 +334,10 @@
ptr += buffer->range_length();
- mAACBuffer->setRange(0, ptr - mAACBuffer->data());
+ mBuffer->setRange(0, ptr - mBuffer->data());
- return accessUnitPosted;
-}
-
-bool MPEG2TSWriter::SourceInfo::flushAACFrames() {
- if (mAACBuffer == NULL) {
- return false;
- }
-
- sp<AMessage> notify = mNotify->dup();
- notify->setInt32("what", kNotifyBuffer);
- notify->setBuffer("buffer", mAACBuffer);
+ notify->setBuffer("buffer", mBuffer);
notify->post();
-
- mAACBuffer.clear();
-
- return true;
}
void MPEG2TSWriter::SourceInfo::readMore() {
@@ -368,7 +348,10 @@
switch (msg->what()) {
case kWhatStart:
{
- status_t err = mSource->start();
+ sp<RefBase> obj;
+ CHECK(msg->findObject("meta", &obj));
+ MetaData *params = static_cast<MetaData *>(obj.get());
+ status_t err = mSource->start(params);
if (err != OK) {
sp<AMessage> notify = mNotify->dup();
notify->setInt32("what", kNotifyStartFailed);
@@ -376,6 +359,7 @@
break;
}
+ // Extract CSD from config format.
extractCodecSpecificData();
readMore();
@@ -388,10 +372,6 @@
status_t err = mSource->read(&buffer);
if (err != OK && err != INFO_FORMAT_CHANGED) {
- if (mStreamType == 0x0f) {
- flushAACFrames();
- }
-
sp<AMessage> notify = mNotify->dup();
notify->setInt32("what", kNotifyReachedEOS);
notify->setInt32("status", err);
@@ -401,23 +381,20 @@
if (err == OK) {
if (mStreamType == 0x0f && mAACCodecSpecificData == NULL) {
- // The first buffer contains codec specific data.
-
+ // The first audio buffer must contain CSD if not received yet.
CHECK_GE(buffer->range_length(), 2u);
-
mAACCodecSpecificData = new ABuffer(buffer->range_length());
memcpy(mAACCodecSpecificData->data(),
(const uint8_t *)buffer->data()
+ buffer->range_offset(),
buffer->range_length());
+ readMore();
} else if (buffer->range_length() > 0) {
if (mStreamType == 0x0f) {
- if (!appendAACFrames(buffer)) {
- msg->post();
- }
+ appendAACFrames(buffer);
} else {
- postAVCFrame(buffer);
+ appendAVCFrame(buffer);
}
} else {
readMore();
@@ -452,7 +429,6 @@
int64_t timeUs;
CHECK(mLastAccessUnit->meta()->findInt64("timeUs", &timeUs));
-
return timeUs;
}
@@ -542,7 +518,7 @@
return OK;
}
-status_t MPEG2TSWriter::start(MetaData * /* param */) {
+status_t MPEG2TSWriter::start(MetaData *param ) {
CHECK(!mStarted);
mStarted = true;
@@ -556,7 +532,7 @@
notify->setInt32("source-index", i);
- mSources.editItemAt(i)->start(notify);
+ mSources.editItemAt(i)->start(notify, param);
}
return OK;
@@ -594,13 +570,13 @@
{
int32_t sourceIndex;
CHECK(msg->findInt32("source-index", &sourceIndex));
+ sp<SourceInfo> source = mSources.editItemAt(sourceIndex);
int32_t what;
CHECK(msg->findInt32("what", &what));
if (what == SourceInfo::kNotifyReachedEOS
|| what == SourceInfo::kNotifyStartFailed) {
- sp<SourceInfo> source = mSources.editItemAt(sourceIndex);
source->setEOSReceived();
sp<ABuffer> buffer = source->lastAccessUnit();
@@ -615,6 +591,7 @@
} else if (what == SourceInfo::kNotifyBuffer) {
sp<ABuffer> buffer;
CHECK(msg->findBuffer("buffer", &buffer));
+ CHECK(source->lastAccessUnit() == NULL);
int32_t oob;
if (msg->findInt32("oob", &oob) && oob) {
@@ -635,15 +612,10 @@
// Rinse, repeat.
// If we don't have data on any track we don't write
// anything just yet.
-
- sp<SourceInfo> source = mSources.editItemAt(sourceIndex);
-
- CHECK(source->lastAccessUnit() == NULL);
source->setLastAccessUnit(buffer);
ALOGV("lastAccessUnitTimeUs[%d] = %.2f secs",
- sourceIndex, source->lastAccessUnitTimeUs() / 1E6);
-
+ sourceIndex, source->lastAccessUnitTimeUs() / 1E6);
int64_t minTimeUs = -1;
size_t minIndex = 0;
@@ -665,15 +637,14 @@
}
if (minTimeUs < 0) {
- ALOGV("not a all tracks have valid data.");
+ ALOGV("not all tracks have valid data.");
break;
}
ALOGV("writing access unit at time %.2f secs (index %zu)",
- minTimeUs / 1E6, minIndex);
+ minTimeUs / 1E6, minIndex);
source = mSources.editItemAt(minIndex);
-
buffer = source->lastAccessUnit();
source->setLastAccessUnit(NULL);
diff --git a/media/libstagefright/MPEG4Extractor.cpp b/media/libstagefright/MPEG4Extractor.cpp
index 6765282..ee603a4 100644
--- a/media/libstagefright/MPEG4Extractor.cpp
+++ b/media/libstagefright/MPEG4Extractor.cpp
@@ -815,6 +815,10 @@
ALOGE("b/23540914");
return ERROR_MALFORMED;
}
+ if (depth > 100) {
+ ALOGE("b/27456299");
+ return ERROR_MALFORMED;
+ }
uint32_t hdr[2];
if (mDataSource->readAt(*offset, hdr, 8) < 8) {
return ERROR_IO;
@@ -2311,6 +2315,12 @@
return UNKNOWN_ERROR; // stop parsing after sidx
}
+ case FOURCC('a', 'c', '-', '3'):
+ {
+ *offset += chunk_size;
+ return parseAC3SampleEntry(data_offset);
+ }
+
case FOURCC('f', 't', 'y', 'p'):
{
if (chunk_data_size < 8 || depth != 0) {
@@ -2359,6 +2369,99 @@
return OK;
}
+status_t MPEG4Extractor::parseAC3SampleEntry(off64_t offset) {
+ // skip 16 bytes:
+ // + 6-byte reserved,
+ // + 2-byte data reference index,
+ // + 8-byte reserved
+ offset += 16;
+ uint16_t channelCount;
+ if (!mDataSource->getUInt16(offset, &channelCount)) {
+ return ERROR_MALFORMED;
+ }
+ // skip 8 bytes:
+ // + 2-byte channelCount,
+ // + 2-byte sample size,
+ // + 4-byte reserved
+ offset += 8;
+ uint16_t sampleRate;
+ if (!mDataSource->getUInt16(offset, &sampleRate)) {
+ ALOGE("MPEG4Extractor: error while reading ac-3 block: cannot read sample rate");
+ return ERROR_MALFORMED;
+ }
+
+ // skip 4 bytes:
+ // + 2-byte sampleRate,
+ // + 2-byte reserved
+ offset += 4;
+ return parseAC3SpecificBox(offset, sampleRate);
+}
+
+status_t MPEG4Extractor::parseAC3SpecificBox(
+ off64_t offset, uint16_t sampleRate) {
+ uint32_t size;
+ // + 4-byte size
+ // + 4-byte type
+ // + 3-byte payload
+ const uint32_t kAC3SpecificBoxSize = 11;
+ if (!mDataSource->getUInt32(offset, &size) || size < kAC3SpecificBoxSize) {
+ ALOGE("MPEG4Extractor: error while reading ac-3 block: cannot read specific box size");
+ return ERROR_MALFORMED;
+ }
+
+ offset += 4;
+ uint32_t type;
+ if (!mDataSource->getUInt32(offset, &type) || type != FOURCC('d', 'a', 'c', '3')) {
+ ALOGE("MPEG4Extractor: error while reading ac-3 specific block: header not dac3");
+ return ERROR_MALFORMED;
+ }
+
+ offset += 4;
+ const uint32_t kAC3SpecificBoxPayloadSize = 3;
+ uint8_t chunk[kAC3SpecificBoxPayloadSize];
+ if (mDataSource->readAt(offset, chunk, sizeof(chunk)) != sizeof(chunk)) {
+ ALOGE("MPEG4Extractor: error while reading ac-3 specific block: bitstream fields");
+ return ERROR_MALFORMED;
+ }
+
+ ABitReader br(chunk, sizeof(chunk));
+ static const unsigned channelCountTable[] = {2, 1, 2, 3, 3, 4, 4, 5};
+ static const unsigned sampleRateTable[] = {48000, 44100, 32000};
+
+ unsigned fscod = br.getBits(2);
+ if (fscod == 3) {
+ ALOGE("Incorrect fscod (3) in AC3 header");
+ return ERROR_MALFORMED;
+ }
+ unsigned boxSampleRate = sampleRateTable[fscod];
+ if (boxSampleRate != sampleRate) {
+ ALOGE("sample rate mismatch: boxSampleRate = %d, sampleRate = %d",
+ boxSampleRate, sampleRate);
+ return ERROR_MALFORMED;
+ }
+
+ unsigned bsid = br.getBits(5);
+ if (bsid > 8) {
+ ALOGW("Incorrect bsid in AC3 header. Possibly E-AC-3?");
+ return ERROR_MALFORMED;
+ }
+
+ // skip
+ unsigned bsmod __unused = br.getBits(3);
+
+ unsigned acmod = br.getBits(3);
+ unsigned lfeon = br.getBits(1);
+ unsigned channelCount = channelCountTable[acmod] + lfeon;
+
+ if (mLastTrack == NULL) {
+ return ERROR_MALFORMED;
+ }
+ mLastTrack->meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_AUDIO_AC3);
+ mLastTrack->meta->setInt32(kKeyChannelCount, channelCount);
+ mLastTrack->meta->setInt32(kKeySampleRate, sampleRate);
+ return OK;
+}
+
status_t MPEG4Extractor::parseSegmentIndex(off64_t offset, size_t size) {
ALOGV("MPEG4Extractor::parseSegmentIndex");
@@ -2903,7 +3006,7 @@
int32_t type = U32_AT(&buffer[0]);
if ((type == FOURCC('n', 'c', 'l', 'x') && size >= 11)
- || (type == FOURCC('n', 'c', 'l', 'c' && size >= 10))) {
+ || (type == FOURCC('n', 'c', 'l', 'c') && size >= 10)) {
int32_t primaries = U16_AT(&buffer[4]);
int32_t transfer = U16_AT(&buffer[6]);
int32_t coeffs = U16_AT(&buffer[8]);
diff --git a/media/libstagefright/MPEG4Writer.cpp b/media/libstagefright/MPEG4Writer.cpp
index c9bcfc3..9978b76 100644
--- a/media/libstagefright/MPEG4Writer.cpp
+++ b/media/libstagefright/MPEG4Writer.cpp
@@ -69,6 +69,7 @@
static const uint8_t kNalUnitTypeSeqParamSet = 0x07;
static const uint8_t kNalUnitTypePicParamSet = 0x08;
static const int64_t kInitialDelayTimeUs = 700000LL;
+static const int64_t kMaxMetadataSize = 0x4000000LL; // 64MB max per-frame metadata size
static const char kMetaKey_Version[] = "com.android.version";
#ifdef SHOW_MODEL_BUILD
@@ -116,6 +117,7 @@
int32_t getTrackId() const { return mTrackId; }
status_t dump(int fd, const Vector<String16>& args) const;
static const char *getFourCCForMime(const char *mime);
+ const char *getTrackType() const;
private:
enum {
@@ -271,6 +273,7 @@
bool mIsAvc;
bool mIsHevc;
bool mIsAudio;
+ bool mIsVideo;
bool mIsMPEG4;
bool mIsMalformed;
int32_t mTrackId;
@@ -393,6 +396,7 @@
void writeMdhdBox(uint32_t now);
void writeSmhdBox();
void writeVmhdBox();
+ void writeNmhdBox();
void writeHdlrBox();
void writeTkhdBox(uint32_t now);
void writeColrBox();
@@ -400,6 +404,7 @@
void writeMp4vEsdsBox();
void writeAudioFourCCBox();
void writeVideoFourCCBox();
+ void writeMetadataFourCCBox();
void writeStblBox(bool use32BitOffset);
Track(const Track &);
@@ -430,6 +435,8 @@
mStartTimestampUs(-1ll),
mLatitudex10000(0),
mLongitudex10000(0),
+ mHasAudioTrack(false),
+ mHasVideoTrack(false),
mAreGeoTagsAvailable(false),
mStartTimeOffsetMs(-1),
mMetaKeys(new AMessage()) {
@@ -477,7 +484,7 @@
const size_t SIZE = 256;
char buffer[SIZE];
String8 result;
- snprintf(buffer, SIZE, " %s track\n", mIsAudio? "Audio": "Video");
+ snprintf(buffer, SIZE, " %s track\n", getTrackType());
result.append(buffer);
snprintf(buffer, SIZE, " reached EOS: %s\n",
mReachedEOS? "true": "false");
@@ -513,8 +520,10 @@
} else if (!strcasecmp(MEDIA_MIMETYPE_VIDEO_HEVC, mime)) {
return "hvc1";
}
+ } else if (!strncasecmp(mime, "application/", 12)) {
+ return "mett";
} else {
- ALOGE("Track (%s) other than video or audio is not supported", mime);
+ ALOGE("Track (%s) other than video/audio/metadata is not supported", mime);
}
return NULL;
}
@@ -526,37 +535,33 @@
return UNKNOWN_ERROR;
}
- // At most 2 tracks can be supported.
- if (mTracks.size() >= 2) {
- ALOGE("Too many tracks (%zu) to add", mTracks.size());
- return ERROR_UNSUPPORTED;
- }
-
CHECK(source.get() != NULL);
const char *mime;
source->getFormat()->findCString(kKeyMIMEType, &mime);
- bool isAudio = !strncasecmp(mime, "audio/", 6);
+
+ if (!strncasecmp(mime, "audio/", 6)) {
+ if (mHasAudioTrack) {
+ ALOGE("At most one audio track can be added");
+ return ERROR_UNSUPPORTED;
+ }
+ mHasAudioTrack = true;
+ }
+
+ if (!strncasecmp(mime, "video/", 6)) {
+ if (mHasVideoTrack) {
+ ALOGE("At most one video track can be added");
+ return ERROR_UNSUPPORTED;
+ }
+ mHasVideoTrack = true;
+ }
+
if (Track::getFourCCForMime(mime) == NULL) {
ALOGE("Unsupported mime '%s'", mime);
return ERROR_UNSUPPORTED;
}
- // At this point, we know the track to be added is either
- // video or audio. Thus, we only need to check whether it
- // is an audio track or not (if it is not, then it must be
- // a video track).
-
- // No more than one video or one audio track is supported.
- for (List<Track*>::iterator it = mTracks.begin();
- it != mTracks.end(); ++it) {
- if ((*it)->isAudio() == isAudio) {
- ALOGE("%s track already exists", isAudio? "Audio": "Video");
- return ERROR_UNSUPPORTED;
- }
- }
-
- // This is the first track of either audio or video.
+ // This is a metadata track or the first track of either audio or video
// Go ahead to add the track.
Track *track = new Track(this, source, 1 + mTracks.size());
mTracks.push_back(track);
@@ -1561,11 +1566,12 @@
mIsAvc = !strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_AVC);
mIsHevc = !strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_HEVC);
mIsAudio = !strncasecmp(mime, "audio/", 6);
+ mIsVideo = !strncasecmp(mime, "video/", 6);
mIsMPEG4 = !strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_MPEG4) ||
!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AAC);
// store temporal layer count
- if (!mIsAudio) {
+ if (mIsVideo) {
int32_t count;
if (mMeta->findInt32(kKeyTemporalLayerCount, &count) && count > 1) {
mOwner->setTemporalLayerCount(count);
@@ -1621,7 +1627,7 @@
void MPEG4Writer::Track::addOneCttsTableEntry(
size_t sampleCount, int32_t duration) {
- if (mIsAudio) {
+ if (!mIsVideo) {
return;
}
mCttsTableEntries->add(htonl(sampleCount));
@@ -1753,7 +1759,7 @@
void MPEG4Writer::writeChunkToFile(Chunk* chunk) {
ALOGV("writeChunkToFile: %" PRId64 " from %s track",
- chunk->mTimeStampUs, chunk->mTrack->isAudio()? "audio": "video");
+ chunk->mTimeStampUs, chunk->mTrack->getTrackType());
int32_t isFirstSample = true;
while (!chunk->mSamples.empty()) {
@@ -1906,7 +1912,7 @@
mStartTimeRealUs = startTimeUs;
int32_t rotationDegrees;
- if (!mIsAudio && params && params->findInt32(kKeyRotation, &rotationDegrees)) {
+ if (mIsVideo && params && params->findInt32(kKeyRotation, &rotationDegrees)) {
mRotation = rotationDegrees;
}
@@ -1964,7 +1970,7 @@
}
status_t MPEG4Writer::Track::stop() {
- ALOGD("%s track stopping", mIsAudio? "Audio": "Video");
+ ALOGD("%s track stopping", getTrackType());
if (!mStarted) {
ALOGE("Stop() called but track is not started");
return ERROR_END_OF_STREAM;
@@ -1975,15 +1981,15 @@
}
mDone = true;
- ALOGD("%s track source stopping", mIsAudio? "Audio": "Video");
+ ALOGD("%s track source stopping", getTrackType());
mSource->stop();
- ALOGD("%s track source stopped", mIsAudio? "Audio": "Video");
+ ALOGD("%s track source stopped", getTrackType());
void *dummy;
pthread_join(mThread, &dummy);
status_t err = static_cast<status_t>(reinterpret_cast<uintptr_t>(dummy));
- ALOGD("%s track stopped", mIsAudio? "Audio": "Video");
+ ALOGD("%s track stopped", getTrackType());
return err;
}
@@ -2381,8 +2387,10 @@
if (mIsAudio) {
prctl(PR_SET_NAME, (unsigned long)"AudioTrackEncoding", 0, 0, 0);
- } else {
+ } else if (mIsVideo) {
prctl(PR_SET_NAME, (unsigned long)"VideoTrackEncoding", 0, 0, 0);
+ } else {
+ prctl(PR_SET_NAME, (unsigned long)"MetadataTrackEncoding", 0, 0, 0);
}
if (mOwner->isRealTimeRecording()) {
@@ -2393,7 +2401,7 @@
status_t err = OK;
MediaBuffer *buffer;
- const char *trackName = mIsAudio ? "Audio" : "Video";
+ const char *trackName = getTrackType();
while (!mDone && (err = mSource->read(&buffer)) == OK) {
if (buffer->range_length() == 0) {
buffer->release();
@@ -2450,6 +2458,16 @@
continue;
}
+ // Per-frame metadata sample's size must be smaller than max allowed.
+ if (!mIsVideo && !mIsAudio && buffer->range_length() >= kMaxMetadataSize) {
+ ALOGW("Buffer size is %zu. Maximum metadata buffer size is %lld for %s track",
+ buffer->range_length(), (long long)kMaxMetadataSize, trackName);
+ buffer->release();
+ mSource->stop();
+ mIsMalformed = true;
+ break;
+ }
+
++nActualFrames;
// Make a deep copy of the MediaBuffer and Metadata and release
@@ -2536,7 +2554,7 @@
break;
}
- if (!mIsAudio) {
+ if (mIsVideo) {
/*
* Composition time: timestampUs
* Decoding time: decodingTimeUs
@@ -2661,7 +2679,6 @@
timestampUs += deltaUs;
}
}
-
mStszTableEntries->add(htonl(sampleSize));
if (mStszTableEntries->count() > 2) {
@@ -2808,7 +2825,7 @@
return true;
}
- if (!mIsAudio && mStssTableEntries->count() == 0) { // no sync frames for video
+ if (mIsVideo && mStssTableEntries->count() == 0) { // no sync frames for video
ALOGE("There are no sync frames for video track");
return true;
}
@@ -2831,7 +2848,7 @@
mOwner->notify(MEDIA_RECORDER_TRACK_EVENT_INFO,
trackNum | MEDIA_RECORDER_TRACK_INFO_TYPE,
- mIsAudio? 0: 1);
+ mIsAudio ? 0: 1);
mOwner->notify(MEDIA_RECORDER_TRACK_EVENT_INFO,
trackNum | MEDIA_RECORDER_TRACK_INFO_DURATION_MS,
@@ -2971,11 +2988,11 @@
return OK;
}
+const char *MPEG4Writer::Track::getTrackType() const {
+ return mIsAudio ? "Audio" : (mIsVideo ? "Video" : "Metadata");
+}
+
void MPEG4Writer::Track::writeTrackHeader(bool use32BitOffset) {
-
- ALOGV("%s track time scale: %d",
- mIsAudio? "Audio": "Video", mTimeScale);
-
uint32_t now = getMpeg4Time();
mOwner->beginBox("trak");
writeTkhdBox(now);
@@ -2985,8 +3002,10 @@
mOwner->beginBox("minf");
if (mIsAudio) {
writeSmhdBox();
- } else {
+ } else if (mIsVideo) {
writeVmhdBox();
+ } else {
+ writeNmhdBox();
}
writeDinfBox();
writeStblBox(use32BitOffset);
@@ -3002,13 +3021,15 @@
mOwner->writeInt32(1); // entry count
if (mIsAudio) {
writeAudioFourCCBox();
- } else {
+ } else if (mIsVideo) {
writeVideoFourCCBox();
+ } else {
+ writeMetadataFourCCBox();
}
mOwner->endBox(); // stsd
writeSttsBox();
- writeCttsBox();
- if (!mIsAudio) {
+ if (mIsVideo) {
+ writeCttsBox();
writeStssBox();
}
writeStszBox();
@@ -3017,6 +3038,20 @@
mOwner->endBox(); // stbl
}
+void MPEG4Writer::Track::writeMetadataFourCCBox() {
+ const char *mime;
+ bool success = mMeta->findCString(kKeyMIMEType, &mime);
+ CHECK(success);
+ const char *fourcc = getFourCCForMime(mime);
+ if (fourcc == NULL) {
+ ALOGE("Unknown mime type '%s'.", mime);
+ TRESPASS();
+ }
+ mOwner->beginBox(fourcc); // TextMetaDataSampleEntry
+ mOwner->writeCString(mime); // metadata mime_format
+ mOwner->endBox(); // mett
+}
+
void MPEG4Writer::Track::writeVideoFourCCBox() {
const char *mime;
bool success = mMeta->findCString(kKeyMIMEType, &mime);
@@ -3024,7 +3059,7 @@
const char *fourcc = getFourCCForMime(mime);
if (fourcc == NULL) {
ALOGE("Unknown mime type '%s'.", mime);
- CHECK(!"should not be here, unknown mime type.");
+ TRESPASS();
}
mOwner->beginBox(fourcc); // video format
@@ -3097,7 +3132,7 @@
const char *fourcc = getFourCCForMime(mime);
if (fourcc == NULL) {
ALOGE("Unknown mime type '%s'.", mime);
- CHECK(!"should not be here, unknown mime type.");
+ TRESPASS();
}
mOwner->beginBox(fourcc); // audio format
@@ -3240,7 +3275,7 @@
mOwner->writeCompositionMatrix(mRotation); // matrix
- if (mIsAudio) {
+ if (!mIsVideo) {
mOwner->writeInt32(0);
mOwner->writeInt32(0);
} else {
@@ -3273,16 +3308,22 @@
mOwner->endBox();
}
+void MPEG4Writer::Track::writeNmhdBox() {
+ mOwner->beginBox("nmhd");
+ mOwner->writeInt32(0); // version=0, flags=0
+ mOwner->endBox();
+}
+
void MPEG4Writer::Track::writeHdlrBox() {
mOwner->beginBox("hdlr");
mOwner->writeInt32(0); // version=0, flags=0
mOwner->writeInt32(0); // component type: should be mhlr
- mOwner->writeFourcc(mIsAudio ? "soun" : "vide"); // component subtype
+ mOwner->writeFourcc(mIsAudio ? "soun" : (mIsVideo ? "vide" : "meta")); // component subtype
mOwner->writeInt32(0); // reserved
mOwner->writeInt32(0); // reserved
mOwner->writeInt32(0); // reserved
// Removing "r" for the name string just makes the string 4 byte aligned
- mOwner->writeCString(mIsAudio ? "SoundHandle": "VideoHandle"); // name
+ mOwner->writeCString(mIsAudio ? "SoundHandle": (mIsVideo ? "VideoHandle" : "MetadHandle"));
mOwner->endBox();
}
@@ -3300,7 +3341,12 @@
// Each character is packed as the difference between its ASCII value and 0x60.
// For "English", these are 00101, 01110, 00111.
// XXX: Where is the padding bit located: 0x15C7?
- mOwner->writeInt16(0); // language code
+ const char *lang = NULL;
+ int16_t langCode = 0;
+ if (mMeta->findCString(kKeyMediaLanguage, &lang) && lang && strnlen(lang, 3) > 2) {
+ langCode = ((lang[0] & 0x1f) << 10) | ((lang[1] & 0x1f) << 5) | (lang[2] & 0x1f);
+ }
+ mOwner->writeInt16(langCode); // language code
mOwner->writeInt16(0); // predefined
mOwner->endBox();
}
@@ -3404,10 +3450,6 @@
}
void MPEG4Writer::Track::writeCttsBox() {
- if (mIsAudio) { // ctts is not for audio
- return;
- }
-
// There is no B frame at all
if (mMinCttsOffsetTimeUs == mMaxCttsOffsetTimeUs) {
return;
diff --git a/media/libstagefright/MediaCodec.cpp b/media/libstagefright/MediaCodec.cpp
index b088775..80860db 100644
--- a/media/libstagefright/MediaCodec.cpp
+++ b/media/libstagefright/MediaCodec.cpp
@@ -19,6 +19,8 @@
#include <inttypes.h>
#include "include/avc_utils.h"
+#include "include/SecureBuffer.h"
+#include "include/SharedMemoryBuffer.h"
#include "include/SoftwareRenderer.h"
#include <binder/IMemory.h>
@@ -30,6 +32,7 @@
#include <media/ICrypto.h>
#include <media/IOMX.h>
#include <media/IResourceManagerService.h>
+#include <media/MediaCodecBuffer.h>
#include <media/stagefright/foundation/ABuffer.h>
#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/foundation/AMessage.h>
@@ -170,8 +173,9 @@
// static
sp<MediaCodec> MediaCodec::CreateByType(
- const sp<ALooper> &looper, const AString &mime, bool encoder, status_t *err, pid_t pid) {
- sp<MediaCodec> codec = new MediaCodec(looper, pid);
+ const sp<ALooper> &looper, const AString &mime, bool encoder, status_t *err, pid_t pid,
+ uid_t uid) {
+ sp<MediaCodec> codec = new MediaCodec(looper, pid, uid);
const status_t ret = codec->init(mime, true /* nameIsType */, encoder);
if (err != NULL) {
@@ -182,8 +186,8 @@
// static
sp<MediaCodec> MediaCodec::CreateByComponentName(
- const sp<ALooper> &looper, const AString &name, status_t *err, pid_t pid) {
- sp<MediaCodec> codec = new MediaCodec(looper, pid);
+ const sp<ALooper> &looper, const AString &name, status_t *err, pid_t pid, uid_t uid) {
+ sp<MediaCodec> codec = new MediaCodec(looper, pid, uid);
const status_t ret = codec->init(name, false /* nameIsType */, false /* encoder */);
if (err != NULL) {
@@ -211,53 +215,27 @@
// static
sp<PersistentSurface> MediaCodec::CreatePersistentInputSurface() {
OMXClient client;
- CHECK_EQ(client.connect(), (status_t)OK);
+ if (client.connect() != OK) {
+ ALOGE("Failed to connect to OMX to create persistent input surface.");
+ return NULL;
+ }
+
sp<IOMX> omx = client.interface();
- const sp<IMediaCodecList> mediaCodecList = MediaCodecList::getInstance();
- if (mediaCodecList == NULL) {
- ALOGE("Failed to obtain MediaCodecList!");
- return NULL; // if called from Java should raise IOException
- }
-
- AString tmp;
- sp<AMessage> globalSettings = mediaCodecList->getGlobalSettings();
- if (globalSettings == NULL || !globalSettings->findString(
- kMaxEncoderInputBuffers, &tmp)) {
- ALOGE("Failed to get encoder input buffer count!");
- return NULL;
- }
-
- int32_t bufferCount = strtol(tmp.c_str(), NULL, 10);
- if (bufferCount <= 0
- || bufferCount > BufferQueue::MAX_MAX_ACQUIRED_BUFFERS) {
- ALOGE("Encoder input buffer count is invalid!");
- return NULL;
- }
-
sp<IGraphicBufferProducer> bufferProducer;
- sp<IGraphicBufferConsumer> bufferConsumer;
+ sp<IGraphicBufferSource> bufferSource;
- status_t err = omx->createPersistentInputSurface(
- &bufferProducer, &bufferConsumer);
+ status_t err = omx->createInputSurface(&bufferProducer, &bufferSource);
if (err != OK) {
ALOGE("Failed to create persistent input surface.");
return NULL;
}
- err = bufferConsumer->setMaxAcquiredBufferCount(bufferCount);
-
- if (err != NO_ERROR) {
- ALOGE("Unable to set BQ max acquired buffer count to %u: %d",
- bufferCount, err);
- return NULL;
- }
-
- return new PersistentSurface(bufferProducer, bufferConsumer);
+ return new PersistentSurface(bufferProducer, bufferSource);
}
-MediaCodec::MediaCodec(const sp<ALooper> &looper, pid_t pid)
+MediaCodec::MediaCodec(const sp<ALooper> &looper, pid_t pid, uid_t uid)
: mState(UNINITIALIZED),
mReleasedByResourceManager(false),
mLooper(looper),
@@ -279,6 +257,11 @@
mDequeueOutputReplyID(0),
mHaveInputSurface(false),
mHavePendingInputBuffers(false) {
+ if (uid == kNoUid) {
+ mUid = IPCThreadState::self()->getCallingUid();
+ } else {
+ mUid = uid;
+ }
}
MediaCodec::~MediaCodec() {
@@ -860,7 +843,7 @@
return OK;
}
-status_t MediaCodec::getWidevineLegacyBuffers(Vector<sp<ABuffer> > *buffers) const {
+status_t MediaCodec::getWidevineLegacyBuffers(Vector<sp<MediaCodecBuffer> > *buffers) const {
sp<AMessage> msg = new AMessage(kWhatGetBuffers, this);
msg->setInt32("portIndex", kPortIndexInput);
msg->setPointer("buffers", buffers);
@@ -870,7 +853,7 @@
return PostAndAwaitResponse(msg, &response);
}
-status_t MediaCodec::getInputBuffers(Vector<sp<ABuffer> > *buffers) const {
+status_t MediaCodec::getInputBuffers(Vector<sp<MediaCodecBuffer> > *buffers) const {
sp<AMessage> msg = new AMessage(kWhatGetBuffers, this);
msg->setInt32("portIndex", kPortIndexInput);
msg->setPointer("buffers", buffers);
@@ -879,7 +862,7 @@
return PostAndAwaitResponse(msg, &response);
}
-status_t MediaCodec::getOutputBuffers(Vector<sp<ABuffer> > *buffers) const {
+status_t MediaCodec::getOutputBuffers(Vector<sp<MediaCodecBuffer> > *buffers) const {
sp<AMessage> msg = new AMessage(kWhatGetBuffers, this);
msg->setInt32("portIndex", kPortIndexOutput);
msg->setPointer("buffers", buffers);
@@ -888,17 +871,17 @@
return PostAndAwaitResponse(msg, &response);
}
-status_t MediaCodec::getOutputBuffer(size_t index, sp<ABuffer> *buffer) {
+status_t MediaCodec::getOutputBuffer(size_t index, sp<MediaCodecBuffer> *buffer) {
sp<AMessage> format;
return getBufferAndFormat(kPortIndexOutput, index, buffer, &format);
}
status_t MediaCodec::getOutputFormat(size_t index, sp<AMessage> *format) {
- sp<ABuffer> buffer;
+ sp<MediaCodecBuffer> buffer;
return getBufferAndFormat(kPortIndexOutput, index, &buffer, format);
}
-status_t MediaCodec::getInputBuffer(size_t index, sp<ABuffer> *buffer) {
+status_t MediaCodec::getInputBuffer(size_t index, sp<MediaCodecBuffer> *buffer) {
sp<AMessage> format;
return getBufferAndFormat(kPortIndexInput, index, buffer, &format);
}
@@ -909,7 +892,7 @@
status_t MediaCodec::getBufferAndFormat(
size_t portIndex, size_t index,
- sp<ABuffer> *buffer, sp<AMessage> *format) {
+ sp<MediaCodecBuffer> *buffer, sp<AMessage> *format) {
// use mutex instead of a context switch
if (mReleasedByResourceManager) {
ALOGE("getBufferAndFormat - resource already released");
@@ -917,7 +900,7 @@
}
if (buffer == NULL) {
- ALOGE("getBufferAndFormat - null ABuffer");
+ ALOGE("getBufferAndFormat - null MediaCodecBuffer");
return INVALID_OPERATION;
}
@@ -952,12 +935,8 @@
return INVALID_OPERATION;
}
- // by the time buffers array is initialized, crypto is set
- *buffer = (portIndex == kPortIndexInput && mCrypto != NULL) ?
- info.mEncryptedData :
- info.mData;
-
- *format = info.mFormat;
+ *buffer = info.mData;
+ *format = info.mData->format();
return OK;
}
@@ -1046,7 +1025,7 @@
return false;
}
- const sp<ABuffer> &buffer =
+ const sp<MediaCodecBuffer> &buffer =
mPortBuffers[kPortIndexOutput].itemAt(index).mData;
response->setSize("index", index);
@@ -1123,14 +1102,16 @@
break;
}
- case STOPPING:
case RELEASING:
{
// Ignore the error, assuming we'll still get
- // the shutdown complete notification.
-
+ // the shutdown complete notification. If we
+ // don't, we'll timeout and force release.
sendErrorResponse = false;
-
+ }
+ // fall-thru
+ case STOPPING:
+ {
if (mFlags & kFlagSawMediaServerDie) {
// MediaServer died, there definitely won't
// be a shutdown complete notification after
@@ -1144,6 +1125,7 @@
mComponentName.clear();
}
(new AMessage)->postReply(mReplyID);
+ sendErrorResponse = false;
}
break;
}
@@ -1310,6 +1292,8 @@
status_t err = NO_ERROR;
sp<AMessage> response = new AMessage();
if (!msg->findInt32("err", &err)) {
+ CHECK(msg->findMessage("input-format", &mInputFormat));
+ CHECK(msg->findMessage("output-format", &mOutputFormat));
mHaveInputSurface = true;
} else {
response->setInt32("err", err);
@@ -1344,6 +1328,7 @@
|| portIndex == kPortIndexOutput);
mPortBuffers[portIndex].clear();
+ mPortBufferArrays[portIndex].clear();
Vector<BufferInfo> *buffers = &mPortBuffers[portIndex];
@@ -1370,18 +1355,14 @@
BufferInfo info;
info.mBufferID = portDesc->bufferIDAt(i);
info.mOwnedByClient = false;
- info.mData = portDesc->bufferAt(i);
- info.mNativeHandle = portDesc->handleAt(i);
- info.mMemRef = portDesc->memRefAt(i);
-
+ sp<MediaCodecBuffer> buffer = portDesc->bufferAt(i);
if (portIndex == kPortIndexInput && mCrypto != NULL) {
- sp<IMemory> mem = mDealer->allocate(info.mData->capacity());
- info.mEncryptedData =
- new ABuffer(mem->pointer(), info.mData->capacity());
- info.mSharedEncryptedBuffer = mem;
+ info.mSharedEncryptedBuffer = mDealer->allocate(buffer->capacity());
+ buffer = new SharedMemoryBuffer(
+ mInputFormat, info.mSharedEncryptedBuffer);
}
-
buffers->push_back(info);
+ mPortBufferArrays[portIndex].push_back(buffer);
}
if (portIndex == kPortIndexOutput) {
@@ -1405,62 +1386,6 @@
break;
}
- case CodecBase::kWhatOutputFormatChanged:
- {
- CHECK(msg->findMessage("format", &mOutputFormat));
-
- ALOGV("[%s] output format changed to: %s",
- mComponentName.c_str(), mOutputFormat->debugString(4).c_str());
-
- if (mSoftRenderer == NULL &&
- mSurface != NULL &&
- (mFlags & kFlagUsesSoftwareRenderer)) {
- AString mime;
- CHECK(mOutputFormat->findString("mime", &mime));
-
- // TODO: propagate color aspects to software renderer to allow better
- // color conversion to RGB. For now, just mark dataspace for YUV
- // rendering.
- int32_t dataSpace;
- if (mOutputFormat->findInt32("android._dataspace", &dataSpace)) {
- ALOGD("[%s] setting dataspace on output surface to #%x",
- mComponentName.c_str(), dataSpace);
- int err = native_window_set_buffers_data_space(
- mSurface.get(), (android_dataspace)dataSpace);
- ALOGW_IF(err != 0, "failed to set dataspace on surface (%d)", err);
- }
-
- if (mime.startsWithIgnoreCase("video/")) {
- mSoftRenderer = new SoftwareRenderer(mSurface, mRotationDegrees);
- }
- }
-
- if (mFlags & kFlagIsEncoder) {
- // Before we announce the format change we should
- // collect codec specific data and amend the output
- // format as necessary.
- mFlags |= kFlagGatherCodecSpecificData;
- } else if (mFlags & kFlagIsAsync) {
- onOutputFormatChanged();
- } else {
- mFlags |= kFlagOutputFormatChanged;
- postActivityNotificationIfPossible();
- }
-
- // Notify mCrypto of video resolution changes
- if (mCrypto != NULL) {
- int32_t left, top, right, bottom, width, height;
- if (mOutputFormat->findRect("crop", &left, &top, &right, &bottom)) {
- mCrypto->notifyResolution(right - left + 1, bottom - top + 1);
- } else if (mOutputFormat->findInt32("width", &width)
- && mOutputFormat->findInt32("height", &height)) {
- mCrypto->notifyResolution(width, height);
- }
- }
-
- break;
- }
-
case CodecBase::kWhatOutputFramesRendered:
{
// ignore these in all states except running, and check that we have a
@@ -1484,6 +1409,9 @@
break;
}
+ // TODO: hold reference of buffer from downstream when
+ // mPortBuffers is removed.
+
if (!mCSD.empty()) {
ssize_t index = dequeuePortBuffer(kPortIndexInput);
CHECK_GE(index, 0);
@@ -1539,34 +1467,74 @@
break;
}
- sp<ABuffer> buffer;
- CHECK(msg->findBuffer("buffer", &buffer));
+ sp<RefBase> obj;
+ CHECK(msg->findObject("buffer", &obj));
+ sp<MediaCodecBuffer> buffer = static_cast<MediaCodecBuffer *>(obj.get());
+ // TODO: hold buffer's reference when we remove mPortBuffers
int32_t omxFlags;
CHECK(msg->findInt32("flags", &omxFlags));
buffer->meta()->setInt32("omxFlags", omxFlags);
+ if (mOutputFormat != buffer->format()) {
+ mOutputFormat = buffer->format();
+ ALOGV("[%s] output format changed to: %s",
+ mComponentName.c_str(), mOutputFormat->debugString(4).c_str());
- if (mFlags & kFlagGatherCodecSpecificData) {
- // This is the very first output buffer after a
- // format change was signalled, it'll either contain
- // the one piece of codec specific data we can expect
- // or there won't be codec specific data.
- if (omxFlags & OMX_BUFFERFLAG_CODECCONFIG) {
- status_t err =
- amendOutputFormatWithCodecSpecificData(buffer);
+ if (mSoftRenderer == NULL &&
+ mSurface != NULL &&
+ (mFlags & kFlagUsesSoftwareRenderer)) {
+ AString mime;
+ CHECK(mOutputFormat->findString("mime", &mime));
- if (err != OK) {
- ALOGE("Codec spit out malformed codec "
- "specific data!");
+ // TODO: propagate color aspects to software renderer to allow better
+ // color conversion to RGB. For now, just mark dataspace for YUV
+ // rendering.
+ int32_t dataSpace;
+ if (mOutputFormat->findInt32("android._dataspace", &dataSpace)) {
+ ALOGD("[%s] setting dataspace on output surface to #%x",
+ mComponentName.c_str(), dataSpace);
+ int err = native_window_set_buffers_data_space(
+ mSurface.get(), (android_dataspace)dataSpace);
+ ALOGW_IF(err != 0, "failed to set dataspace on surface (%d)", err);
+ }
+
+ if (mime.startsWithIgnoreCase("video/")) {
+ mSoftRenderer = new SoftwareRenderer(mSurface, mRotationDegrees);
}
}
- mFlags &= ~kFlagGatherCodecSpecificData;
+ if (mFlags & kFlagIsEncoder) {
+ // Before we announce the format change we should
+ // collect codec specific data and amend the output
+ // format as necessary.
+ if (omxFlags & OMX_BUFFERFLAG_CODECCONFIG) {
+ status_t err =
+ amendOutputFormatWithCodecSpecificData(buffer);
+
+ if (err != OK) {
+ ALOGE("Codec spit out malformed codec "
+ "specific data!");
+ }
+ }
+ }
+
if (mFlags & kFlagIsAsync) {
onOutputFormatChanged();
} else {
mFlags |= kFlagOutputFormatChanged;
+ postActivityNotificationIfPossible();
+ }
+
+ // Notify mCrypto of video resolution changes
+ if (mCrypto != NULL) {
+ int32_t left, top, right, bottom, width, height;
+ if (mOutputFormat->findRect("crop", &left, &top, &right, &bottom)) {
+ mCrypto->notifyResolution(right - left + 1, bottom - top + 1);
+ } else if (mOutputFormat->findInt32("width", &width)
+ && mOutputFormat->findInt32("height", &height)) {
+ mCrypto->notifyResolution(width, height);
+ }
}
}
@@ -1594,6 +1562,10 @@
case CodecBase::kWhatShutdownCompleted:
{
+ if (mState == UNINITIALIZED) {
+ // Ignore shutdown complete if we're already released.
+ break;
+ }
if (mState == STOPPING) {
setState(INITIALIZED);
} else {
@@ -1915,7 +1887,9 @@
}
}
- if (!((mFlags & kFlagIsComponentAllocated) && targetState == UNINITIALIZED) // See 1
+ bool isReleasingAllocatedComponent =
+ (mFlags & kFlagIsComponentAllocated) && targetState == UNINITIALIZED;
+ if (!isReleasingAllocatedComponent // See 1
&& mState != INITIALIZED
&& mState != CONFIGURED && !isExecuting()) {
// 1) Permit release to shut down the component if allocated.
@@ -1939,6 +1913,14 @@
break;
}
+ // If we're flushing, or we're stopping but received a release
+ // request, post the reply for the pending call first, and consider
+ // it done. The reply token will be replaced after this, and we'll
+ // no longer be able to reply.
+ if (mState == FLUSHING || mState == STOPPING) {
+ (new AMessage)->postReply(mReplyID);
+ }
+
if (mFlags & kFlagSawMediaServerDie) {
// It's dead, Jim. Don't expect initiateShutdown to yield
// any useful results now...
@@ -1950,6 +1932,15 @@
break;
}
+ // If we already have an error, component may not be able to
+ // complete the shutdown properly. If we're stopping, post the
+ // reply now with an error to unblock the client, client can
+ // release after the failure (instead of ANR).
+ if (msg->what() == kWhatStop && (mFlags & kFlagStickyError)) {
+ PostReplyWithError(replyID, getStickyError());
+ break;
+ }
+
mReplyID = replyID;
setState(msg->what() == kWhatStop ? STOPPING : RELEASING);
@@ -1961,6 +1952,7 @@
if (mSoftRenderer != NULL && (mFlags & kFlagPushBlankBuffersOnShutdown)) {
pushBlankBuffersToNativeWindow(mSurface.get());
}
+
break;
}
@@ -2123,7 +2115,7 @@
sp<AReplyToken> replyID;
CHECK(msg->senderAwaitsResponse(&replyID));
- if (!isExecuting()) {
+ if (!isExecuting() || !mHaveInputSurface) {
PostReplyWithError(replyID, INVALID_OPERATION);
break;
} else if (mFlags & kFlagStickyError) {
@@ -2156,7 +2148,7 @@
int32_t portIndex;
CHECK(msg->findInt32("portIndex", &portIndex));
- Vector<sp<ABuffer> > *dstBuffers;
+ Vector<sp<MediaCodecBuffer> > *dstBuffers;
CHECK(msg->findPointer("buffers", (void **)&dstBuffers));
dstBuffers->clear();
@@ -2164,14 +2156,10 @@
// createInputSurface(), or persistent set by setInputSurface()),
// give the client an empty input buffers array.
if (portIndex != kPortIndexInput || !mHaveInputSurface) {
- const Vector<BufferInfo> &srcBuffers = mPortBuffers[portIndex];
+ const Vector<sp<MediaCodecBuffer>> &srcBuffers = mPortBufferArrays[portIndex];
for (size_t i = 0; i < srcBuffers.size(); ++i) {
- const BufferInfo &info = srcBuffers.itemAt(i);
-
- dstBuffers->push_back(
- (portIndex == kPortIndexInput && mCrypto != NULL)
- ? info.mEncryptedData : info.mData);
+ dstBuffers->push_back(srcBuffers[i]);
}
}
@@ -2306,8 +2294,7 @@
sp<ABuffer> csd = *mCSD.begin();
mCSD.erase(mCSD.begin());
- const sp<ABuffer> &codecInputData =
- (mCrypto != NULL) ? info->mEncryptedData : info->mData;
+ const sp<MediaCodecBuffer> &codecInputData = info->mData;
if (csd->size() > codecInputData->capacity()) {
return -EINVAL;
@@ -2342,7 +2329,6 @@
mFlags &= ~kFlagOutputBuffersChanged;
mFlags &= ~kFlagStickyError;
mFlags &= ~kFlagIsEncoder;
- mFlags &= ~kFlagGatherCodecSpecificData;
mFlags &= ~kFlagIsAsync;
mStickyError = OK;
@@ -2384,12 +2370,15 @@
if (info->mNotify != NULL) {
sp<AMessage> msg = info->mNotify;
info->mNotify = NULL;
+ msg->setObject("buffer", (portIndex == kPortIndexInput && mCrypto != NULL)
+ ? info->mSecureData : info->mData);
if (isReclaim && info->mOwnedByClient) {
ALOGD("port %d buffer %zu still owned by client when codec is reclaimed",
portIndex, i);
} else {
- info->mMemRef = NULL;
info->mOwnedByClient = false;
+ info->mData.clear();
+ info->mSecureData.clear();
}
if (portIndex == kPortIndexInput) {
@@ -2409,6 +2398,9 @@
uint32_t bufferID;
CHECK(msg->findInt32("buffer-id", (int32_t*)&bufferID));
+ sp<RefBase> obj;
+ CHECK(msg->findObject("buffer", &obj));
+ sp<MediaCodecBuffer> buffer = static_cast<MediaCodecBuffer *>(obj.get());
Vector<BufferInfo> *buffers = &mPortBuffers[portIndex];
@@ -2419,8 +2411,12 @@
CHECK(info->mNotify == NULL);
CHECK(msg->findMessage("reply", &info->mNotify));
- info->mFormat =
- (portIndex == kPortIndexInput) ? mInputFormat : mOutputFormat;
+ if (portIndex == kPortIndexInput && mCrypto != NULL) {
+ info->mSecureData = buffer;
+ info->mData = mPortBufferArrays[portIndex][i];
+ } else {
+ info->mData = buffer;
+ }
mAvailPortBuffers[portIndex].push_back(i);
return i;
@@ -2506,32 +2502,22 @@
sp<AMessage> reply = info->mNotify;
info->mData->setRange(offset, size);
- info->mData->meta()->setInt64("timeUs", timeUs);
- if (flags & BUFFER_FLAG_EOS) {
- info->mData->meta()->setInt32("eos", true);
- }
-
- if (flags & BUFFER_FLAG_CODECCONFIG) {
- info->mData->meta()->setInt32("csd", true);
- }
-
+ sp<MediaCodecBuffer> buffer = info->mData;
if (mCrypto != NULL) {
- if (size > info->mEncryptedData->capacity()) {
- return -ERANGE;
- }
-
AString *errorDetailMsg;
CHECK(msg->findPointer("errorDetailMsg", (void **)&errorDetailMsg));
- void *dst_pointer = info->mData->base();
+ void *dst_pointer = nullptr;
ICrypto::DestinationType dst_type = ICrypto::kDestinationTypeOpaqueHandle;
- if (info->mNativeHandle != NULL) {
- dst_pointer = (void *)info->mNativeHandle->handle();
- dst_type = ICrypto::kDestinationTypeNativeHandle;
- } else if ((mFlags & kFlagIsSecure) == 0) {
+ if ((mFlags & kFlagIsSecure) == 0) {
+ dst_pointer = info->mSecureData->base();
dst_type = ICrypto::kDestinationTypeVmPointer;
+ } else {
+ sp<SecureBuffer> secureData = static_cast<SecureBuffer *>(info->mSecureData.get());
+ dst_pointer = secureData->getDestinationPointer();
+ dst_type = secureData->getDestinationType();
}
ssize_t result = mCrypto->decrypt(
@@ -2551,7 +2537,17 @@
return result;
}
- info->mData->setRange(0, result);
+ info->mSecureData->setRange(0, result);
+ buffer = info->mSecureData;
+ }
+ buffer->meta()->setInt64("timeUs", timeUs);
+
+ if (flags & BUFFER_FLAG_EOS) {
+ buffer->meta()->setInt32("eos", true);
+ }
+
+ if (flags & BUFFER_FLAG_CODECCONFIG) {
+ buffer->meta()->setInt32("csd", true);
}
// synchronization boundary for getBufferAndFormat
@@ -2559,7 +2555,9 @@
Mutex::Autolock al(mBufferLock);
info->mOwnedByClient = false;
}
- reply->setBuffer("buffer", info->mData);
+ info->mData.clear();
+ info->mSecureData.clear();
+ reply->setObject("buffer", buffer);
reply->post();
info->mNotify = NULL;
@@ -2630,7 +2628,7 @@
if (mSoftRenderer != NULL) {
std::list<FrameRenderTracker::Info> doneFrames = mSoftRenderer->render(
info->mData->data(), info->mData->size(),
- mediaTimeUs, renderTimeNs, NULL, info->mFormat);
+ mediaTimeUs, renderTimeNs, NULL, info->mData->format());
// if we are running, notify rendered frames
if (!doneFrames.empty() && mState == STARTED && mOnFrameRenderedNotification != NULL) {
@@ -2644,8 +2642,10 @@
}
}
+ info->mNotify->setObject("buffer", info->mData);
+ info->mData.clear();
info->mNotify->post();
- info->mNotify = NULL;
+ info->mNotify.clear();
return OK;
}
@@ -2669,13 +2669,13 @@
info->mOwnedByClient = true;
// set image-data
- if (info->mFormat != NULL) {
+ if (info->mData->format() != NULL) {
sp<ABuffer> imageData;
- if (info->mFormat->findBuffer("image-data", &imageData)) {
+ if (info->mData->format()->findBuffer("image-data", &imageData)) {
info->mData->meta()->setBuffer("image-data", imageData);
}
int32_t left, top, right, bottom;
- if (info->mFormat->findRect("crop", &left, &top, &right, &bottom)) {
+ if (info->mData->format()->findRect("crop", &left, &top, &right, &bottom)) {
info->mData->meta()->setRect("crop-rect", left, top, right, bottom);
}
}
@@ -2765,7 +2765,7 @@
void MediaCodec::onOutputBufferAvailable() {
int32_t index;
while ((index = dequeuePortBuffer(kPortIndexOutput)) >= 0) {
- const sp<ABuffer> &buffer =
+ const sp<MediaCodecBuffer> &buffer =
mPortBuffers[kPortIndexOutput].itemAt(index).mData;
sp<AMessage> msg = mCallback->dup();
msg->setInt32("callbackID", CB_OUTPUT_AVAILABLE);
@@ -2866,7 +2866,7 @@
}
status_t MediaCodec::amendOutputFormatWithCodecSpecificData(
- const sp<ABuffer> &buffer) {
+ const sp<MediaCodecBuffer> &buffer) {
AString mime;
CHECK(mOutputFormat->findString("mime", &mime));
@@ -2900,7 +2900,10 @@
} else {
// For everything else we just stash the codec specific data into
// the output format as a single piece of csd under "csd-0".
- mOutputFormat->setBuffer("csd-0", buffer);
+ sp<ABuffer> csd = new ABuffer(buffer->size());
+ memcpy(csd->data(), buffer->data(), buffer->size());
+ csd->setRange(0, buffer->size());
+ mOutputFormat->setBuffer("csd-0", csd);
}
return OK;
@@ -2912,10 +2915,10 @@
}
if (mState == CONFIGURED && !mBatteryStatNotified) {
- BatteryNotifier::getInstance().noteStartVideo();
+ BatteryNotifier::getInstance().noteStartVideo(mUid);
mBatteryStatNotified = true;
} else if (mState == UNINITIALIZED && mBatteryStatNotified) {
- BatteryNotifier::getInstance().noteStopVideo();
+ BatteryNotifier::getInstance().noteStopVideo(mUid);
mBatteryStatNotified = false;
}
}
diff --git a/media/libstagefright/MediaCodecList.cpp b/media/libstagefright/MediaCodecList.cpp
index 23d49f0..d3b34b7 100644
--- a/media/libstagefright/MediaCodecList.cpp
+++ b/media/libstagefright/MediaCodecList.cpp
@@ -44,8 +44,6 @@
namespace android {
-const char *kMaxEncoderInputBuffers = "max-video-encoder-input-buffers";
-
static Mutex sInitMutex;
static bool parseBoolean(const char *s) {
@@ -196,9 +194,7 @@
if (mInitCheck != OK) {
return; // this may fail if IMediaPlayerService is not available.
}
- mOMX = client.interface();
parseXMLFile(codecs_xml);
- mOMX.clear();
if (mInitCheck != OK) {
if (ignore_errors) {
diff --git a/media/libstagefright/MediaCodecListOverrides.cpp b/media/libstagefright/MediaCodecListOverrides.cpp
index 33795f3..095fc6a 100644
--- a/media/libstagefright/MediaCodecListOverrides.cpp
+++ b/media/libstagefright/MediaCodecListOverrides.cpp
@@ -127,61 +127,6 @@
return format;
}
-static size_t doProfileEncoderInputBuffers(
- const AString &name, const AString &mime, const sp<MediaCodecInfo::Capabilities> &caps) {
- ALOGV("doProfileEncoderInputBuffers: name %s, mime %s", name.c_str(), mime.c_str());
-
- sp<AMessage> format = getMeasureFormat(true /* isEncoder */, mime, caps);
- if (format == NULL) {
- return 0;
- }
-
- format->setInt32("color-format", OMX_COLOR_FormatAndroidOpaque);
- ALOGV("doProfileEncoderInputBuffers: format %s", format->debugString().c_str());
-
- status_t err = OK;
- sp<ALooper> looper = new ALooper;
- looper->setName("MediaCodec_looper");
- looper->start(
- false /* runOnCallingThread */, false /* canCallJava */, ANDROID_PRIORITY_AUDIO);
-
- sp<MediaCodec> codec = MediaCodec::CreateByComponentName(looper, name.c_str(), &err);
- if (err != OK) {
- ALOGE("Failed to create codec: %s", name.c_str());
- return 0;
- }
-
- err = codec->configure(format, NULL, NULL, MediaCodec::CONFIGURE_FLAG_ENCODE);
- if (err != OK) {
- ALOGE("Failed to configure codec: %s with mime: %s", name.c_str(), mime.c_str());
- codec->release();
- return 0;
- }
-
- sp<IGraphicBufferProducer> bufferProducer;
- err = codec->createInputSurface(&bufferProducer);
- if (err != OK) {
- ALOGE("Failed to create surface: %s with mime: %s", name.c_str(), mime.c_str());
- codec->release();
- return 0;
- }
-
- int minUndequeued = 0;
- err = bufferProducer->query(
- NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS, &minUndequeued);
- if (err != OK) {
- ALOGE("Failed to query NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS");
- minUndequeued = 0;
- }
-
- err = codec->release();
- if (err != OK) {
- ALOGW("Failed to release codec: %s with mime: %s", name.c_str(), mime.c_str());
- }
-
- return minUndequeued;
-}
-
static size_t doProfileCodecs(
bool isEncoder, const AString &name, const AString &mime, const sp<MediaCodecInfo::Capabilities> &caps) {
sp<AMessage> format = getMeasureFormat(isEncoder, mime, caps);
@@ -276,7 +221,6 @@
bool forceToMeasure) {
KeyedVector<AString, sp<MediaCodecInfo::Capabilities>> codecsNeedMeasure;
AString supportMultipleSecureCodecs = "true";
- size_t maxEncoderInputBuffers = 0;
for (size_t i = 0; i < infos.size(); ++i) {
const sp<MediaCodecInfo> info = infos[i];
AString name = info->getCodecName();
@@ -319,21 +263,9 @@
supportMultipleSecureCodecs = "false";
}
}
- if (info->isEncoder() && mimes[i].startsWith("video/")) {
- size_t encoderInputBuffers =
- doProfileEncoderInputBuffers(name, mimes[i], caps);
- if (encoderInputBuffers > maxEncoderInputBuffers) {
- maxEncoderInputBuffers = encoderInputBuffers;
- }
- }
}
}
}
- if (maxEncoderInputBuffers > 0) {
- char tmp[32];
- sprintf(tmp, "%zu", maxEncoderInputBuffers);
- global_results->add(kMaxEncoderInputBuffers, tmp);
- }
global_results->add(kPolicySupportsMultipleSecureCodecs, supportMultipleSecureCodecs);
}
diff --git a/media/libstagefright/MediaCodecSource.cpp b/media/libstagefright/MediaCodecSource.cpp
index 35c07ca..de4d06f 100644
--- a/media/libstagefright/MediaCodecSource.cpp
+++ b/media/libstagefright/MediaCodecSource.cpp
@@ -20,10 +20,10 @@
#include <inttypes.h>
-#include <gui/IGraphicBufferConsumer.h>
#include <gui/IGraphicBufferProducer.h>
#include <gui/Surface.h>
#include <media/ICrypto.h>
+#include <media/MediaCodecBuffer.h>
#include <media/stagefright/foundation/ABuffer.h>
#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/foundation/ALooper.h>
@@ -35,7 +35,6 @@
#include <media/stagefright/MediaErrors.h>
#include <media/stagefright/MediaSource.h>
#include <media/stagefright/MetaData.h>
-#include <media/stagefright/PersistentSurface.h>
#include <media/stagefright/Utils.h>
namespace android {
@@ -325,10 +324,10 @@
const sp<ALooper> &looper,
const sp<AMessage> &format,
const sp<MediaSource> &source,
- const sp<IGraphicBufferConsumer> &consumer,
+ const sp<PersistentSurface> &persistentSurface,
uint32_t flags) {
- sp<MediaCodecSource> mediaSource =
- new MediaCodecSource(looper, format, source, consumer, flags);
+ sp<MediaCodecSource> mediaSource = new MediaCodecSource(
+ looper, format, source, persistentSurface, flags);
if (mediaSource->init() == OK) {
return mediaSource;
@@ -404,7 +403,7 @@
const sp<ALooper> &looper,
const sp<AMessage> &outputFormat,
const sp<MediaSource> &source,
- const sp<IGraphicBufferConsumer> &consumer,
+ const sp<PersistentSurface> &persistentSurface,
uint32_t flags)
: mLooper(looper),
mOutputFormat(outputFormat),
@@ -417,7 +416,7 @@
mSetEncoderFormat(false),
mEncoderFormat(0),
mEncoderDataSpace(0),
- mGraphicBufferConsumer(consumer),
+ mPersistentSurface(persistentSurface),
mInputBufferTimeOffsetUs(0),
mFirstSampleSystemTimeUs(-1ll),
mPausePending(false),
@@ -514,12 +513,11 @@
if (mFlags & FLAG_USE_SURFACE_INPUT) {
CHECK(mIsVideo);
- if (mGraphicBufferConsumer != NULL) {
+ if (mPersistentSurface != NULL) {
// When using persistent surface, we are only interested in the
// consumer, but have to use PersistentSurface as a wrapper to
// pass consumer over messages (similar to BufferProducerWrapper)
- err = mEncoder->setInputSurface(
- new PersistentSurface(NULL, mGraphicBufferConsumer));
+ err = mEncoder->setInputSurface(mPersistentSurface);
} else {
err = mEncoder->createInputSurface(&mGraphicBufferProducer);
}
@@ -689,7 +687,7 @@
#endif // DEBUG_DRIFT_TIME
}
- sp<ABuffer> inbuf;
+ sp<MediaCodecBuffer> inbuf;
status_t err = mEncoder->getInputBuffer(bufferIndex, &inbuf);
if (err != OK || inbuf == NULL) {
mbuf->release();
@@ -851,7 +849,7 @@
break;
}
- sp<ABuffer> outbuf;
+ sp<MediaCodecBuffer> outbuf;
status_t err = mEncoder->getOutputBuffer(index, &outbuf);
if (err != OK || outbuf == NULL) {
signalEOS();
@@ -906,6 +904,7 @@
}
mbuf->meta_data()->setInt64(kKeyTime, timeUs);
} else {
+ mbuf->meta_data()->setInt64(kKeyTime, 0ll);
mbuf->meta_data()->setInt32(kKeyIsCodecConfig, true);
}
if (flags & MediaCodec::BUFFER_FLAG_SYNCFRAME) {
@@ -1017,7 +1016,7 @@
CHECK(msg->findInt64("time-offset-us", &mInputBufferTimeOffsetUs));
// Propagate the timestamp offset to GraphicBufferSource.
- if (mIsVideo) {
+ if (mFlags & FLAG_USE_SURFACE_INPUT) {
sp<AMessage> params = new AMessage;
params->setInt64("time-offset-us", mInputBufferTimeOffsetUs);
err = mEncoder->setParameters(params);
diff --git a/media/libstagefright/MediaExtractor.cpp b/media/libstagefright/MediaExtractor.cpp
index 92ce88c..f2fdbc9 100644
--- a/media/libstagefright/MediaExtractor.cpp
+++ b/media/libstagefright/MediaExtractor.cpp
@@ -94,7 +94,7 @@
sp<IMemory> mMemory;
sp<DataSource> mSource;
String8 mName;
- RemoteDataSource(const sp<DataSource> &source);
+ explicit RemoteDataSource(const sp<DataSource> &source);
DISALLOW_EVIL_CONSTRUCTORS(RemoteDataSource);
};
diff --git a/media/libstagefright/MediaSync.cpp b/media/libstagefright/MediaSync.cpp
index 6f2d868..0cf6fbf 100644
--- a/media/libstagefright/MediaSync.cpp
+++ b/media/libstagefright/MediaSync.cpp
@@ -478,59 +478,43 @@
CHECK(mAudioTrack != NULL);
uint32_t numFramesPlayed;
- int64_t numFramesPlayedAt;
+ int64_t numFramesPlayedAtUs;
AudioTimestamp ts;
- static const int64_t kStaleTimestamp100ms = 100000;
status_t res = mAudioTrack->getTimestamp(ts);
if (res == OK) {
// case 1: mixing audio tracks.
numFramesPlayed = ts.mPosition;
- numFramesPlayedAt =
- ts.mTime.tv_sec * 1000000LL + ts.mTime.tv_nsec / 1000;
- const int64_t timestampAge = nowUs - numFramesPlayedAt;
- if (timestampAge > kStaleTimestamp100ms) {
- // This is an audio FIXME.
- // getTimestamp returns a timestamp which may come from audio
- // mixing threads. After pausing, the MixerThread may go idle,
- // thus the mTime estimate may become stale. Assuming that the
- // MixerThread runs 20ms, with FastMixer at 5ms, the max latency
- // should be about 25ms with an average around 12ms (to be
- // verified). For safety we use 100ms.
- ALOGV("getTimestamp: returned stale timestamp nowUs(%lld) "
- "numFramesPlayedAt(%lld)",
- (long long)nowUs, (long long)numFramesPlayedAt);
- numFramesPlayedAt = nowUs - kStaleTimestamp100ms;
- }
+ numFramesPlayedAtUs = ts.mTime.tv_sec * 1000000LL + ts.mTime.tv_nsec / 1000;
//ALOGD("getTimestamp: OK %d %lld",
- // numFramesPlayed, (long long)numFramesPlayedAt);
+ // numFramesPlayed, (long long)numFramesPlayedAtUs);
} else if (res == WOULD_BLOCK) {
// case 2: transitory state on start of a new track
numFramesPlayed = 0;
- numFramesPlayedAt = nowUs;
+ numFramesPlayedAtUs = nowUs;
//ALOGD("getTimestamp: WOULD_BLOCK %d %lld",
- // numFramesPlayed, (long long)numFramesPlayedAt);
+ // numFramesPlayed, (long long)numFramesPlayedAtUs);
} else {
// case 3: transitory at new track or audio fast tracks.
res = mAudioTrack->getPosition(&numFramesPlayed);
CHECK_EQ(res, (status_t)OK);
- numFramesPlayedAt = nowUs;
- numFramesPlayedAt += 1000LL * mAudioTrack->latency() / 2; /* XXX */
- //ALOGD("getPosition: %d %lld", numFramesPlayed, (long long)numFramesPlayedAt);
+ numFramesPlayedAtUs = nowUs;
+ numFramesPlayedAtUs += 1000LL * mAudioTrack->latency() / 2; /* XXX */
+ //ALOGD("getPosition: %d %lld", numFramesPlayed, (long long)numFramesPlayedAtUs);
}
//can't be negative until 12.4 hrs, test.
//CHECK_EQ(numFramesPlayed & (1 << 31), 0);
int64_t durationUs =
getDurationIfPlayedAtNativeSampleRate_l(numFramesPlayed)
- + nowUs - numFramesPlayedAt;
+ + nowUs - numFramesPlayedAtUs;
if (durationUs < 0) {
// Occurs when numFramesPlayed position is very small and the following:
// (1) In case 1, the time nowUs is computed before getTimestamp() is
- // called and numFramesPlayedAt is greater than nowUs by time more
+ // called and numFramesPlayedAtUs is greater than nowUs by time more
// than numFramesPlayed.
// (2) In case 3, using getPosition and adding mAudioTrack->latency()
- // to numFramesPlayedAt, by a time amount greater than
+ // to numFramesPlayedAtUs, by a time amount greater than
// numFramesPlayed.
//
// Both of these are transitory conditions.
@@ -541,7 +525,7 @@
ALOGV("getPlayedOutAudioDurationMedia_l(%lld) nowUs(%lld) frames(%u) "
"framesAt(%lld)",
(long long)durationUs, (long long)nowUs, numFramesPlayed,
- (long long)numFramesPlayedAt);
+ (long long)numFramesPlayedAtUs);
return durationUs;
}
diff --git a/media/libstagefright/NuMediaExtractor.cpp b/media/libstagefright/NuMediaExtractor.cpp
index 276d731..e3270ed 100644
--- a/media/libstagefright/NuMediaExtractor.cpp
+++ b/media/libstagefright/NuMediaExtractor.cpp
@@ -51,7 +51,8 @@
for (size_t i = 0; i < mSelectedTracks.size(); ++i) {
TrackInfo *info = &mSelectedTracks.editItemAt(i);
- CHECK_EQ((status_t)OK, info->mSource->stop());
+ status_t err = info->mSource->stop();
+ ALOGE_IF(err != OK, "error %d stopping track %zu", err, i);
}
mSelectedTracks.clear();
@@ -66,7 +67,7 @@
const KeyedVector<String8, String8> *headers) {
Mutex::Autolock autoLock(mLock);
- if (mImpl != NULL) {
+ if (mImpl != NULL || path == NULL) {
return -EINVAL;
}
diff --git a/media/libstagefright/OMXClient.cpp b/media/libstagefright/OMXClient.cpp
index e40dbcf..a29aff0 100644
--- a/media/libstagefright/OMXClient.cpp
+++ b/media/libstagefright/OMXClient.cpp
@@ -24,506 +24,18 @@
#include <utils/Log.h>
#include <binder/IServiceManager.h>
-#include <media/IMediaPlayerService.h>
#include <media/IMediaCodecService.h>
-#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/OMXClient.h>
-#include <cutils/properties.h>
-#include <utils/KeyedVector.h>
#include "include/OMX.h"
namespace android {
-static bool sCodecProcessEnabled = true;
-
-struct MuxOMX : public IOMX {
- MuxOMX(const sp<IOMX> &mediaServerOMX, const sp<IOMX> &mediaCodecOMX);
- virtual ~MuxOMX();
-
- // Nobody should be calling this. In case someone does anyway, just
- // return the media server IOMX.
- // TODO: return NULL
- virtual IBinder *onAsBinder() {
- ALOGE("MuxOMX::onAsBinder should not be called");
- return IInterface::asBinder(mMediaServerOMX).get();
- }
-
- virtual bool livesLocally(node_id node, pid_t pid);
-
- virtual status_t listNodes(List<ComponentInfo> *list);
-
- virtual status_t allocateNode(
- const char *name, const sp<IOMXObserver> &observer,
- sp<IBinder> *nodeBinder,
- node_id *node);
-
- virtual status_t freeNode(node_id node);
-
- virtual status_t sendCommand(
- node_id node, OMX_COMMANDTYPE cmd, OMX_S32 param);
-
- virtual status_t getParameter(
- node_id node, OMX_INDEXTYPE index,
- void *params, size_t size);
-
- virtual status_t setParameter(
- node_id node, OMX_INDEXTYPE index,
- const void *params, size_t size);
-
- virtual status_t getConfig(
- node_id node, OMX_INDEXTYPE index,
- void *params, size_t size);
-
- virtual status_t setConfig(
- node_id node, OMX_INDEXTYPE index,
- const void *params, size_t size);
-
- virtual status_t getState(
- node_id node, OMX_STATETYPE* state);
-
- virtual status_t storeMetaDataInBuffers(
- node_id node, OMX_U32 port_index, OMX_BOOL enable, MetadataBufferType *type);
-
- virtual status_t prepareForAdaptivePlayback(
- node_id node, OMX_U32 port_index, OMX_BOOL enable,
- OMX_U32 maxFrameWidth, OMX_U32 maxFrameHeight);
-
- virtual status_t configureVideoTunnelMode(
- node_id node, OMX_U32 portIndex, OMX_BOOL tunneled,
- OMX_U32 audioHwSync, native_handle_t **sidebandHandle);
-
- virtual status_t enableNativeBuffers(
- node_id node, OMX_U32 port_index, OMX_BOOL graphic, OMX_BOOL enable);
-
- virtual status_t getGraphicBufferUsage(
- node_id node, OMX_U32 port_index, OMX_U32* usage);
-
- virtual status_t useBuffer(
- node_id node, OMX_U32 port_index, const sp<IMemory> ¶ms,
- buffer_id *buffer, OMX_U32 allottedSize);
-
- virtual status_t useGraphicBuffer(
- node_id node, OMX_U32 port_index,
- const sp<GraphicBuffer> &graphicBuffer, buffer_id *buffer);
-
- virtual status_t updateGraphicBufferInMeta(
- node_id node, OMX_U32 port_index,
- const sp<GraphicBuffer> &graphicBuffer, buffer_id buffer);
-
- virtual status_t updateNativeHandleInMeta(
- node_id node, OMX_U32 port_index,
- const sp<NativeHandle> &nativeHandle, buffer_id buffer);
-
- virtual status_t createInputSurface(
- node_id node, OMX_U32 port_index, android_dataspace dataSpace,
- sp<IGraphicBufferProducer> *bufferProducer, MetadataBufferType *type);
-
- virtual status_t createPersistentInputSurface(
- sp<IGraphicBufferProducer> *bufferProducer,
- sp<IGraphicBufferConsumer> *bufferConsumer);
-
- virtual status_t setInputSurface(
- node_id node, OMX_U32 port_index,
- const sp<IGraphicBufferConsumer> &bufferConsumer, MetadataBufferType *type);
-
- virtual status_t signalEndOfInputStream(node_id node);
-
- virtual status_t allocateSecureBuffer(
- node_id node, OMX_U32 port_index, size_t size,
- buffer_id *buffer, void **buffer_data, sp<NativeHandle> *native_handle);
-
- virtual status_t allocateBufferWithBackup(
- node_id node, OMX_U32 port_index, const sp<IMemory> ¶ms,
- buffer_id *buffer, OMX_U32 allottedSize);
-
- virtual status_t freeBuffer(
- node_id node, OMX_U32 port_index, buffer_id buffer);
-
- virtual status_t fillBuffer(node_id node, buffer_id buffer, int fenceFd);
-
- virtual status_t emptyBuffer(
- node_id node,
- buffer_id buffer,
- OMX_U32 range_offset, OMX_U32 range_length,
- OMX_U32 flags, OMX_TICKS timestamp, int fenceFd);
-
- virtual status_t getExtensionIndex(
- node_id node,
- const char *parameter_name,
- OMX_INDEXTYPE *index);
-
- virtual status_t setInternalOption(
- node_id node,
- OMX_U32 port_index,
- InternalOptionType type,
- const void *data,
- size_t size);
-
-private:
- mutable Mutex mLock;
-
- sp<IOMX> mMediaServerOMX;
- sp<IOMX> mMediaCodecOMX;
- sp<IOMX> mLocalOMX;
-
- typedef enum {
- LOCAL,
- MEDIAPROCESS,
- CODECPROCESS
- } node_location;
-
- KeyedVector<node_id, node_location> mNodeLocation;
-
- bool isLocalNode(node_id node) const;
- bool isLocalNode_l(node_id node) const;
- const sp<IOMX> &getOMX(node_id node) const;
- const sp<IOMX> &getOMX_l(node_id node) const;
-
- static node_location getPreferredCodecLocation(const char *name);
-
- DISALLOW_EVIL_CONSTRUCTORS(MuxOMX);
-};
-
-MuxOMX::MuxOMX(const sp<IOMX> &mediaServerOMX, const sp<IOMX> &mediaCodecOMX)
- : mMediaServerOMX(mediaServerOMX),
- mMediaCodecOMX(mediaCodecOMX) {
- ALOGI("MuxOMX ctor");
-}
-
-MuxOMX::~MuxOMX() {
-}
-
-bool MuxOMX::isLocalNode(node_id node) const {
- Mutex::Autolock autoLock(mLock);
-
- return isLocalNode_l(node);
-}
-
-bool MuxOMX::isLocalNode_l(node_id node) const {
- return mNodeLocation.valueFor(node) == LOCAL;
-}
-
-// static
-MuxOMX::node_location MuxOMX::getPreferredCodecLocation(const char *name) {
- if (sCodecProcessEnabled) {
- // all codecs go to codec process unless excluded using system property, in which case
- // all non-secure decoders, OMX.google.* codecs and encoders can go in the codec process
- // (non-OMX.google.* encoders can be excluded using system property.)
- if ((strcasestr(name, "decoder")
- && strcasestr(name, ".secure") != name + strlen(name) - 7)
- || (strcasestr(name, "encoder")
- && !property_get_bool("media.stagefright.legacyencoder", false))
- || !property_get_bool("media.stagefright.less-secure", false)
- || !strncasecmp(name, "OMX.google.", 11)) {
- return CODECPROCESS;
- }
- // everything else runs in the media server
- return MEDIAPROCESS;
- } else {
-#ifdef __LP64__
- // 64 bit processes always run OMX remote on MediaServer
- return MEDIAPROCESS;
-#else
- // 32 bit processes run only OMX.google.* components locally
- if (!strncasecmp(name, "OMX.google.", 11)) {
- return LOCAL;
- }
- return MEDIAPROCESS;
-#endif
- }
-}
-
-const sp<IOMX> &MuxOMX::getOMX(node_id node) const {
- Mutex::Autolock autoLock(mLock);
- return getOMX_l(node);
-}
-
-const sp<IOMX> &MuxOMX::getOMX_l(node_id node) const {
- node_location loc = mNodeLocation.valueFor(node);
- if (loc == LOCAL) {
- return mLocalOMX;
- } else if (loc == MEDIAPROCESS) {
- return mMediaServerOMX;
- } else if (loc == CODECPROCESS) {
- return mMediaCodecOMX;
- }
- ALOGE("Couldn't determine node location for node %d: %d, using local", node, loc);
- return mLocalOMX;
-}
-
-bool MuxOMX::livesLocally(node_id node, pid_t pid) {
- return getOMX(node)->livesLocally(node, pid);
-}
-
-status_t MuxOMX::listNodes(List<ComponentInfo> *list) {
- Mutex::Autolock autoLock(mLock);
-
- if (mLocalOMX == NULL) {
- mLocalOMX = new OMX;
- }
-
- return mLocalOMX->listNodes(list);
-}
-
-status_t MuxOMX::allocateNode(
- const char *name, const sp<IOMXObserver> &observer,
- sp<IBinder> *nodeBinder,
- node_id *node) {
- Mutex::Autolock autoLock(mLock);
-
- sp<IOMX> omx;
-
- node_location loc = getPreferredCodecLocation(name);
- if (loc == CODECPROCESS) {
- omx = mMediaCodecOMX;
- } else if (loc == MEDIAPROCESS) {
- omx = mMediaServerOMX;
- } else {
- if (mLocalOMX == NULL) {
- mLocalOMX = new OMX;
- }
- omx = mLocalOMX;
- }
-
- status_t err = omx->allocateNode(name, observer, nodeBinder, node);
- ALOGV("allocated node_id %x on %s OMX", *node, omx == mMediaCodecOMX ? "codecprocess" :
- omx == mMediaServerOMX ? "mediaserver" : "local");
-
-
- if (err != OK) {
- return err;
- }
-
- mNodeLocation.add(*node, loc);
-
- return OK;
-}
-
-status_t MuxOMX::freeNode(node_id node) {
- Mutex::Autolock autoLock(mLock);
-
- // exit if we have already freed the node
- if (mNodeLocation.indexOfKey(node) < 0) {
- ALOGD("MuxOMX::freeNode: node %d seems to be released already --- ignoring.", node);
- return OK;
- }
-
- status_t err = getOMX_l(node)->freeNode(node);
-
- if (err != OK) {
- return err;
- }
-
- mNodeLocation.removeItem(node);
-
- return OK;
-}
-
-status_t MuxOMX::sendCommand(
- node_id node, OMX_COMMANDTYPE cmd, OMX_S32 param) {
- return getOMX(node)->sendCommand(node, cmd, param);
-}
-
-status_t MuxOMX::getParameter(
- node_id node, OMX_INDEXTYPE index,
- void *params, size_t size) {
- return getOMX(node)->getParameter(node, index, params, size);
-}
-
-status_t MuxOMX::setParameter(
- node_id node, OMX_INDEXTYPE index,
- const void *params, size_t size) {
- return getOMX(node)->setParameter(node, index, params, size);
-}
-
-status_t MuxOMX::getConfig(
- node_id node, OMX_INDEXTYPE index,
- void *params, size_t size) {
- return getOMX(node)->getConfig(node, index, params, size);
-}
-
-status_t MuxOMX::setConfig(
- node_id node, OMX_INDEXTYPE index,
- const void *params, size_t size) {
- return getOMX(node)->setConfig(node, index, params, size);
-}
-
-status_t MuxOMX::getState(
- node_id node, OMX_STATETYPE* state) {
- return getOMX(node)->getState(node, state);
-}
-
-status_t MuxOMX::storeMetaDataInBuffers(
- node_id node, OMX_U32 port_index, OMX_BOOL enable, MetadataBufferType *type) {
- return getOMX(node)->storeMetaDataInBuffers(node, port_index, enable, type);
-}
-
-status_t MuxOMX::prepareForAdaptivePlayback(
- node_id node, OMX_U32 port_index, OMX_BOOL enable,
- OMX_U32 maxFrameWidth, OMX_U32 maxFrameHeight) {
- return getOMX(node)->prepareForAdaptivePlayback(
- node, port_index, enable, maxFrameWidth, maxFrameHeight);
-}
-
-status_t MuxOMX::configureVideoTunnelMode(
- node_id node, OMX_U32 portIndex, OMX_BOOL enable,
- OMX_U32 audioHwSync, native_handle_t **sidebandHandle) {
- return getOMX(node)->configureVideoTunnelMode(
- node, portIndex, enable, audioHwSync, sidebandHandle);
-}
-
-status_t MuxOMX::enableNativeBuffers(
- node_id node, OMX_U32 port_index, OMX_BOOL graphic, OMX_BOOL enable) {
- return getOMX(node)->enableNativeBuffers(node, port_index, graphic, enable);
-}
-
-status_t MuxOMX::getGraphicBufferUsage(
- node_id node, OMX_U32 port_index, OMX_U32* usage) {
- return getOMX(node)->getGraphicBufferUsage(node, port_index, usage);
-}
-
-status_t MuxOMX::useBuffer(
- node_id node, OMX_U32 port_index, const sp<IMemory> ¶ms,
- buffer_id *buffer, OMX_U32 allottedSize) {
- return getOMX(node)->useBuffer(node, port_index, params, buffer, allottedSize);
-}
-
-status_t MuxOMX::useGraphicBuffer(
- node_id node, OMX_U32 port_index,
- const sp<GraphicBuffer> &graphicBuffer, buffer_id *buffer) {
- return getOMX(node)->useGraphicBuffer(
- node, port_index, graphicBuffer, buffer);
-}
-
-status_t MuxOMX::updateGraphicBufferInMeta(
- node_id node, OMX_U32 port_index,
- const sp<GraphicBuffer> &graphicBuffer, buffer_id buffer) {
- return getOMX(node)->updateGraphicBufferInMeta(
- node, port_index, graphicBuffer, buffer);
-}
-
-status_t MuxOMX::updateNativeHandleInMeta(
- node_id node, OMX_U32 port_index,
- const sp<NativeHandle> &nativeHandle, buffer_id buffer) {
- return getOMX(node)->updateNativeHandleInMeta(
- node, port_index, nativeHandle, buffer);
-}
-
-status_t MuxOMX::createInputSurface(
- node_id node, OMX_U32 port_index, android_dataspace dataSpace,
- sp<IGraphicBufferProducer> *bufferProducer, MetadataBufferType *type) {
- status_t err = getOMX(node)->createInputSurface(
- node, port_index, dataSpace, bufferProducer, type);
- return err;
-}
-
-status_t MuxOMX::createPersistentInputSurface(
- sp<IGraphicBufferProducer> *bufferProducer,
- sp<IGraphicBufferConsumer> *bufferConsumer) {
- sp<IOMX> omx;
- {
- Mutex::Autolock autoLock(mLock);
- if (property_get_bool("media.stagefright.legacyencoder", false)) {
- omx = mMediaServerOMX;
- } else {
- omx = mMediaCodecOMX;
- }
- }
- return omx->createPersistentInputSurface(
- bufferProducer, bufferConsumer);
-}
-
-status_t MuxOMX::setInputSurface(
- node_id node, OMX_U32 port_index,
- const sp<IGraphicBufferConsumer> &bufferConsumer, MetadataBufferType *type) {
- return getOMX(node)->setInputSurface(node, port_index, bufferConsumer, type);
-}
-
-status_t MuxOMX::signalEndOfInputStream(node_id node) {
- return getOMX(node)->signalEndOfInputStream(node);
-}
-
-status_t MuxOMX::allocateSecureBuffer(
- node_id node, OMX_U32 port_index, size_t size,
- buffer_id *buffer, void **buffer_data, sp<NativeHandle> *native_handle) {
- return getOMX(node)->allocateSecureBuffer(
- node, port_index, size, buffer, buffer_data, native_handle);
-}
-
-status_t MuxOMX::allocateBufferWithBackup(
- node_id node, OMX_U32 port_index, const sp<IMemory> ¶ms,
- buffer_id *buffer, OMX_U32 allottedSize) {
- return getOMX(node)->allocateBufferWithBackup(
- node, port_index, params, buffer, allottedSize);
-}
-
-status_t MuxOMX::freeBuffer(
- node_id node, OMX_U32 port_index, buffer_id buffer) {
- return getOMX(node)->freeBuffer(node, port_index, buffer);
-}
-
-status_t MuxOMX::fillBuffer(node_id node, buffer_id buffer, int fenceFd) {
- return getOMX(node)->fillBuffer(node, buffer, fenceFd);
-}
-
-status_t MuxOMX::emptyBuffer(
- node_id node,
- buffer_id buffer,
- OMX_U32 range_offset, OMX_U32 range_length,
- OMX_U32 flags, OMX_TICKS timestamp, int fenceFd) {
- return getOMX(node)->emptyBuffer(
- node, buffer, range_offset, range_length, flags, timestamp, fenceFd);
-}
-
-status_t MuxOMX::getExtensionIndex(
- node_id node,
- const char *parameter_name,
- OMX_INDEXTYPE *index) {
- return getOMX(node)->getExtensionIndex(node, parameter_name, index);
-}
-
-status_t MuxOMX::setInternalOption(
- node_id node,
- OMX_U32 port_index,
- InternalOptionType type,
- const void *data,
- size_t size) {
- return getOMX(node)->setInternalOption(node, port_index, type, data, size);
-}
-
OMXClient::OMXClient() {
- char value[PROPERTY_VALUE_MAX];
- if (property_get("media.stagefright.codecremote", value, NULL)
- && (!strcmp("0", value) || !strcasecmp("false", value))) {
- sCodecProcessEnabled = false;
- }
}
status_t OMXClient::connect() {
sp<IServiceManager> sm = defaultServiceManager();
- sp<IBinder> playerbinder = sm->getService(String16("media.player"));
- sp<IMediaPlayerService> mediaservice = interface_cast<IMediaPlayerService>(playerbinder);
-
- if (mediaservice.get() == NULL) {
- ALOGE("Cannot obtain IMediaPlayerService");
- return NO_INIT;
- }
-
- sp<IOMX> mediaServerOMX = mediaservice->getOMX();
- if (mediaServerOMX.get() == NULL) {
- ALOGE("Cannot obtain mediaserver IOMX");
- return NO_INIT;
- }
-
- // If we don't want to use the codec process, and the media server OMX
- // is local, use it directly instead of going through MuxOMX
- if (!sCodecProcessEnabled &&
- mediaServerOMX->livesLocally(0 /* node */, getpid())) {
- mOMX = mediaServerOMX;
- return OK;
- }
-
sp<IBinder> codecbinder = sm->getService(String16("media.codec"));
sp<IMediaCodecService> codecservice = interface_cast<IMediaCodecService>(codecbinder);
@@ -532,22 +44,17 @@
return NO_INIT;
}
- sp<IOMX> mediaCodecOMX = codecservice->getOMX();
- if (mediaCodecOMX.get() == NULL) {
+ mOMX = codecservice->getOMX();
+ if (mOMX.get() == NULL) {
ALOGE("Cannot obtain mediacodec IOMX");
return NO_INIT;
}
- mOMX = new MuxOMX(mediaServerOMX, mediaCodecOMX);
-
return OK;
}
void OMXClient::disconnect() {
- if (mOMX.get() != NULL) {
- mOMX.clear();
- mOMX = NULL;
- }
+ mOMX.clear();
}
} // namespace android
diff --git a/media/libstagefright/SimpleDecodingSource.cpp b/media/libstagefright/SimpleDecodingSource.cpp
index 2503a32..4c4d93e 100644
--- a/media/libstagefright/SimpleDecodingSource.cpp
+++ b/media/libstagefright/SimpleDecodingSource.cpp
@@ -17,8 +17,7 @@
#include <gui/Surface.h>
#include <media/ICrypto.h>
-#include <media/stagefright/foundation/ABuffer.h>
-#include <media/stagefright/MediaDefs.h>
+#include <media/MediaCodecBuffer.h>
#include <media/stagefright/foundation/ALooper.h>
#include <media/stagefright/foundation/AMessage.h>
#include <media/stagefright/foundation/AUtils.h>
@@ -75,10 +74,7 @@
err = codec->getOutputFormat(&format);
}
if (err == OK) {
- return new SimpleDecodingSource(codec, source, looper,
- surface != NULL,
- strcmp(mime, MEDIA_MIMETYPE_AUDIO_VORBIS) == 0,
- format);
+ return new SimpleDecodingSource(codec, source, looper, surface != NULL, format);
}
ALOGD("Failed to configure codec '%s'", componentName.c_str());
@@ -94,12 +90,11 @@
SimpleDecodingSource::SimpleDecodingSource(
const sp<MediaCodec> &codec, const sp<IMediaSource> &source, const sp<ALooper> &looper,
- bool usingSurface, bool isVorbis, const sp<AMessage> &format)
+ bool usingSurface, const sp<AMessage> &format)
: mCodec(codec),
mSource(source),
mLooper(looper),
mUsingSurface(usingSurface),
- mIsVorbis(isVorbis),
mProtectedState(format) {
mCodec->getName(&mComponentName);
}
@@ -230,7 +225,7 @@
break;
}
- sp<ABuffer> in_buffer;
+ sp<MediaCodecBuffer> in_buffer;
if (res == OK) {
res = mCodec->getInputBuffer(in_ix, &in_buffer);
}
@@ -285,25 +280,16 @@
if (in_buf != NULL) {
int64_t timestampUs = 0;
CHECK(in_buf->meta_data()->findInt64(kKeyTime, ×tampUs));
- if (in_buf->range_length() + (mIsVorbis ? 4 : 0) > in_buffer->capacity()) {
+ if (in_buf->range_length() > in_buffer->capacity()) {
ALOGW("'%s' received %zu input bytes for buffer of size %zu",
mComponentName.c_str(),
- in_buf->range_length() + (mIsVorbis ? 4 : 0), in_buffer->capacity());
+ in_buf->range_length(), in_buffer->capacity());
}
- size_t cpLen = min(in_buf->range_length(), in_buffer->capacity());
memcpy(in_buffer->base(), (uint8_t *)in_buf->data() + in_buf->range_offset(),
- cpLen );
-
- if (mIsVorbis) {
- int32_t numPageSamples;
- if (!in_buf->meta_data()->findInt32(kKeyValidSamples, &numPageSamples)) {
- numPageSamples = -1;
- }
- memcpy(in_buffer->base() + cpLen, &numPageSamples, sizeof(numPageSamples));
- }
+ min(in_buf->range_length(), in_buffer->capacity()));
res = mCodec->queueInputBuffer(
- in_ix, 0 /* offset */, in_buf->range_length() + (mIsVorbis ? 4 : 0),
+ in_ix, 0 /* offset */, in_buf->range_length(),
timestampUs, 0 /* flags */);
if (res != OK) {
ALOGI("[%s] failed to queue input buffer #%zu", mComponentName.c_str(), in_ix);
@@ -344,7 +330,7 @@
return res;
}
- sp<ABuffer> out_buffer;
+ sp<MediaCodecBuffer> out_buffer;
res = mCodec->getOutputBuffer(out_ix, &out_buffer);
if (res != OK) {
ALOGW("[%s] could not get output buffer #%zu",
diff --git a/media/libstagefright/SkipCutBuffer.cpp b/media/libstagefright/SkipCutBuffer.cpp
index d30be88..ee9016d 100644
--- a/media/libstagefright/SkipCutBuffer.cpp
+++ b/media/libstagefright/SkipCutBuffer.cpp
@@ -88,7 +88,8 @@
buffer->set_range(0, copied);
}
-void SkipCutBuffer::submit(const sp<ABuffer>& buffer) {
+template <typename T>
+void SkipCutBuffer::submitInternal(const sp<T>& buffer) {
if (mCutBuffer == NULL) {
// passthrough mode
return;
@@ -120,6 +121,14 @@
buffer->setRange(0, copied);
}
+void SkipCutBuffer::submit(const sp<ABuffer>& buffer) {
+ submitInternal(buffer);
+}
+
+void SkipCutBuffer::submit(const sp<MediaCodecBuffer>& buffer) {
+ submitInternal(buffer);
+}
+
void SkipCutBuffer::clear() {
mWriteHead = mReadHead = 0;
mFrontPadding = mSkip;
diff --git a/media/libstagefright/StagefrightMetadataRetriever.cpp b/media/libstagefright/StagefrightMetadataRetriever.cpp
index be5067d..d8fec5c 100644
--- a/media/libstagefright/StagefrightMetadataRetriever.cpp
+++ b/media/libstagefright/StagefrightMetadataRetriever.cpp
@@ -27,8 +27,8 @@
#include <media/ICrypto.h>
#include <media/IMediaHTTPService.h>
+#include <media/MediaCodecBuffer.h>
-#include <media/stagefright/foundation/ABuffer.h>
#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/foundation/AMessage.h>
#include <media/stagefright/ColorConverter.h>
@@ -223,7 +223,7 @@
return NULL;
}
- Vector<sp<ABuffer> > inputBuffers;
+ Vector<sp<MediaCodecBuffer> > inputBuffers;
err = decoder->getInputBuffers(&inputBuffers);
if (err != OK) {
ALOGW("failed to get input buffers: %d (%s)", err, asString(err));
@@ -232,7 +232,7 @@
return NULL;
}
- Vector<sp<ABuffer> > outputBuffers;
+ Vector<sp<MediaCodecBuffer> > outputBuffers;
err = decoder->getOutputBuffers(&outputBuffers);
if (err != OK) {
ALOGW("failed to get output buffers: %d (%s)", err, asString(err));
@@ -264,7 +264,7 @@
size_t inputIndex = -1;
int64_t ptsUs = 0ll;
uint32_t flags = 0;
- sp<ABuffer> codecBuffer = NULL;
+ sp<MediaCodecBuffer> codecBuffer = NULL;
while (haveMoreInputs) {
err = decoder->dequeueInputBuffer(&inputIndex, kBufferTimeOutUs);
@@ -376,7 +376,7 @@
}
ALOGV("successfully decoded video frame.");
- sp<ABuffer> videoFrameBuffer = outputBuffers.itemAt(index);
+ sp<MediaCodecBuffer> videoFrameBuffer = outputBuffers.itemAt(index);
if (thumbNailTime >= 0) {
if (timeUs != thumbNailTime) {
@@ -418,6 +418,22 @@
&& trackMeta->findInt32(kKeySARHeight, &sarHeight)
&& sarHeight != 0) {
frame->mDisplayWidth = (frame->mDisplayWidth * sarWidth) / sarHeight;
+ } else {
+ int32_t width, height;
+ if (trackMeta->findInt32(kKeyDisplayWidth, &width)
+ && trackMeta->findInt32(kKeyDisplayHeight, &height)
+ && frame->mDisplayWidth > 0 && frame->mDisplayHeight > 0
+ && width > 0 && height > 0) {
+ if (frame->mDisplayHeight * (int64_t)width / height > (int64_t)frame->mDisplayWidth) {
+ frame->mDisplayHeight =
+ (int32_t)(height * (int64_t)frame->mDisplayWidth / width);
+ } else {
+ frame->mDisplayWidth =
+ (int32_t)(frame->mDisplayHeight * (int64_t)width / height);
+ }
+ ALOGV("thumbNail width and height are overridden to %d x %d",
+ frame->mDisplayWidth, frame->mDisplayHeight);
+ }
}
int32_t srcFormat;
@@ -754,9 +770,9 @@
if (numTracks == 1) {
const char *fileMIME;
- CHECK(meta->findCString(kKeyMIMEType, &fileMIME));
- if (!strcasecmp(fileMIME, "video/x-matroska")) {
+ if (meta->findCString(kKeyMIMEType, &fileMIME) &&
+ !strcasecmp(fileMIME, "video/x-matroska")) {
sp<MetaData> trackMeta = mExtractor->getTrackMetaData(0);
const char *trackMIME;
CHECK(trackMeta->findCString(kKeyMIMEType, &trackMIME));
diff --git a/media/libstagefright/Utils.cpp b/media/libstagefright/Utils.cpp
index 36be7a0..c593eb5 100644
--- a/media/libstagefright/Utils.cpp
+++ b/media/libstagefright/Utils.cpp
@@ -41,9 +41,9 @@
#include <media/stagefright/MediaDefs.h>
#include <media/AudioSystem.h>
#include <media/MediaPlayerInterface.h>
-#include <hardware/audio.h>
#include <media/stagefright/Utils.h>
#include <media/AudioParameter.h>
+#include <system/audio.h>
namespace android {
@@ -637,6 +637,11 @@
msg->setInt32("track-id", trackID);
}
+ const char *lang;
+ if (meta->findCString(kKeyMediaLanguage, &lang)) {
+ msg->setString("language", lang);
+ }
+
if (!strncasecmp("video/", mime, 6)) {
int32_t width, height;
if (!meta->findInt32(kKeyWidth, &width)
@@ -647,6 +652,13 @@
msg->setInt32("width", width);
msg->setInt32("height", height);
+ int32_t displayWidth, displayHeight;
+ if (meta->findInt32(kKeyDisplayWidth, &displayWidth)
+ && meta->findInt32(kKeyDisplayHeight, &displayHeight)) {
+ msg->setInt32("display-width", displayWidth);
+ msg->setInt32("display-height", displayHeight);
+ }
+
int32_t sarWidth, sarHeight;
if (meta->findInt32(kKeySARWidth, &sarWidth)
&& meta->findInt32(kKeySARHeight, &sarHeight)) {
@@ -1273,6 +1285,11 @@
meta->setInt32(kKeyMaxBitRate, maxBitrate);
}
+ AString lang;
+ if (msg->findString("language", &lang)) {
+ meta->setCString(kKeyMediaLanguage, lang.c_str());
+ }
+
if (mime.startsWith("video/")) {
int32_t width;
int32_t height;
@@ -1290,6 +1307,13 @@
meta->setInt32(kKeySARHeight, sarHeight);
}
+ int32_t displayWidth, displayHeight;
+ if (msg->findInt32("display-width", &displayWidth)
+ && msg->findInt32("display-height", &displayHeight)) {
+ meta->setInt32(kKeyDisplayWidth, displayWidth);
+ meta->setInt32(kKeyDisplayHeight, displayHeight);
+ }
+
int32_t colorFormat;
if (msg->findInt32("color-format", &colorFormat)) {
meta->setInt32(kKeyColorFormat, colorFormat);
@@ -1505,6 +1529,7 @@
{ MEDIA_MIMETYPE_AUDIO_AAC, AUDIO_FORMAT_AAC },
{ MEDIA_MIMETYPE_AUDIO_VORBIS, AUDIO_FORMAT_VORBIS },
{ MEDIA_MIMETYPE_AUDIO_OPUS, AUDIO_FORMAT_OPUS},
+ { MEDIA_MIMETYPE_AUDIO_AC3, AUDIO_FORMAT_AC3},
{ 0, AUDIO_FORMAT_INVALID }
};
diff --git a/media/libstagefright/avc_utils.cpp b/media/libstagefright/avc_utils.cpp
index 0396dc6..9f15cf7 100644
--- a/media/libstagefright/avc_utils.cpp
+++ b/media/libstagefright/avc_utils.cpp
@@ -447,7 +447,8 @@
return meta;
}
-bool IsIDR(const sp<ABuffer> &buffer) {
+template <typename T>
+bool IsIDRInternal(const sp<T> &buffer) {
const uint8_t *data = buffer->data();
size_t size = buffer->size();
@@ -469,6 +470,14 @@
return foundIDR;
}
+bool IsIDR(const sp<ABuffer> &buffer) {
+ return IsIDRInternal(buffer);
+}
+
+bool IsIDR(const sp<MediaCodecBuffer> &buffer) {
+ return IsIDRInternal(buffer);
+}
+
bool IsAVCReferenceFrame(const sp<ABuffer> &accessUnit) {
const uint8_t *data = accessUnit->data();
size_t size = accessUnit->size();
diff --git a/media/libstagefright/codec2/Android.mk b/media/libstagefright/codec2/Android.mk
new file mode 100644
index 0000000..a463205
--- /dev/null
+++ b/media/libstagefright/codec2/Android.mk
@@ -0,0 +1,20 @@
+LOCAL_PATH:= $(call my-dir)
+include $(CLEAR_VARS)
+
+LOCAL_SRC_FILES:= \
+ C2.cpp \
+
+LOCAL_C_INCLUDES += \
+ $(TOP)/frameworks/av/media/libstagefright/codec2/include \
+ $(TOP)/frameworks/native/include/media/hardware \
+
+LOCAL_MODULE:= libstagefright_codec2
+LOCAL_CFLAGS += -Werror -Wall
+LOCAL_CLANG := true
+LOCAL_SANITIZE := unsigned-integer-overflow signed-integer-overflow
+
+include $(BUILD_SHARED_LIBRARY)
+
+################################################################################
+
+include $(call all-makefiles-under,$(LOCAL_PATH))
diff --git a/media/libstagefright/codec2/C2.cpp b/media/libstagefright/codec2/C2.cpp
new file mode 100644
index 0000000..a51b073
--- /dev/null
+++ b/media/libstagefright/codec2/C2.cpp
@@ -0,0 +1,36 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <C2.h>
+#include <C2Buffer.h>
+#include <C2Component.h>
+#include <C2Config.h>
+#include <C2Param.h>
+#include <C2ParamDef.h>
+#include <C2Work.h>
+
+namespace android {
+
+/**
+ * There is nothing here yet. This library is built to see what symbols and methods get
+ * defined as part of the API include files.
+ *
+ * Going forward, the Codec2 library will contain utility methods that are useful for
+ * Codec2 clients.
+ */
+
+} // namespace android
+
diff --git a/media/libstagefright/codec2/include/C2.h b/media/libstagefright/codec2/include/C2.h
new file mode 100644
index 0000000..7d00a03
--- /dev/null
+++ b/media/libstagefright/codec2/include/C2.h
@@ -0,0 +1,287 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef C2_H_
+#define C2_H_
+
+#include <string>
+#include <vector>
+#include <list>
+
+#ifdef __ANDROID__
+
+#include <utils/Errors.h> // for status_t
+#include <utils/Timers.h> // for nsecs_t
+
+namespace android {
+
+#else
+
+#include <errno.h>
+typedef int64_t nsecs_t;
+
+enum {
+ GRALLOC_USAGE_SW_READ_OFTEN,
+ GRALLOC_USAGE_RENDERSCRIPT,
+ GRALLOC_USAGE_HW_TEXTURE,
+ GRALLOC_USAGE_HW_COMPOSER,
+ GRALLOC_USAGE_HW_VIDEO_ENCODER,
+ GRALLOC_USAGE_PROTECTED,
+ GRALLOC_USAGE_SW_WRITE_OFTEN,
+ GRALLOC_USAGE_HW_RENDER,
+};
+
+#endif
+
+/** \mainpage Codec2
+ *
+ * Codec2 is a frame-based data processing API used by android.
+ *
+ * The framework accesses components via the \ref API.
+ */
+
+/** \ingroup API
+ *
+ * The Codec2 API defines the operation of data processing components and their interaction with
+ * the rest of the system.
+ *
+ * Coding Conventions
+ *
+ * Mitigating Binary Compatibility.
+ *
+ * While full binary compatibility is not a goal of the API (due to our use of STL), we try to
+ * mitigate binary breaks by adhering to the following conventions:
+ *
+ * - at most one vtable with placeholder virtual methods
+ * - all optional/placeholder virtual methods returning a status_t, with C2_NOT_IMPLEMENTED not
+ * requiring any update to input/output arguments.
+ * - limiting symbol export of inline methods
+ * - use of pimpl (or shared-pimpl)
+ *
+ * Naming
+ *
+ * - all classes and types prefix with C2
+ * - classes for internal use prefix with _C2
+ * - enum values in global namespace prefix with C2_ all caps
+ * - enum values inside classes have no C2_ prefix as class already has it
+ * - supporting two kinds of enum naming: all-caps and kCamelCase
+ * \todo revisit kCamelCase for param-type
+ *
+ * Aspects
+ *
+ * Aspects define certain common behavior across a group of objects.
+ * - classes whose name matches _C2.*Aspect
+ * - only protected constructors
+ * - no desctructor and copiable
+ * - all methods are inline or static (this is opposite of the interface paradigm where all methods
+ * are virtual, which would not work due to the at most one vtable rule.)
+ * - only private variables (this prevents subclasses interfering with the aspects.)
+ */
+
+/// \defgroup types Common Types
+/// @{
+
+/**
+ * C2String: basic string implementation
+ */
+typedef std::string C2String;
+typedef const char *C2StringLiteral;
+
+/**
+ * C2Error: status codes used.
+ */
+typedef int32_t C2Error;
+enum {
+#ifndef __ANDROID__
+ OK = 0,
+ BAD_VALUE = -EINVAL,
+ BAD_INDEX = -EOVERFLOW,
+ UNKNOWN_TRANSACTION = -EBADMSG,
+ ALREADY_EXISTS = -EEXIST,
+ NAME_NOT_FOUND = -ENOENT,
+ INVALID_OPERATION = -ENOSYS,
+ NO_MEMORY = -ENOMEM,
+ PERMISSION_DENIED = -EPERM,
+ TIMED_OUT = -ETIMEDOUT,
+ UNKNOWN_ERROR = -EINVAL,
+#endif
+
+ C2_OK = OK, ///< operation completed successfully
+
+ // bad input
+ C2_BAD_VALUE = BAD_VALUE, ///< argument has invalid value (user error)
+ C2_BAD_INDEX = BAD_INDEX, ///< argument uses invalid index (user error)
+ C2_UNSUPPORTED = UNKNOWN_TRANSACTION, ///< argument/index is value but not supported \todo is this really BAD_INDEX/VALUE?
+
+ // bad sequencing of events
+ C2_DUPLICATE = ALREADY_EXISTS, ///< object already exists
+ C2_NOT_FOUND = NAME_NOT_FOUND, ///< object not found
+ C2_BAD_STATE = INVALID_OPERATION, ///< operation is not permitted in the current state
+
+ // bad environment
+ C2_NO_MEMORY = NO_MEMORY, ///< not enough memory to complete operation
+ C2_NO_PERMISSION = PERMISSION_DENIED, ///< missing permission to complete operation
+ C2_TIMED_OUT = TIMED_OUT, ///< operation did not complete within timeout
+
+ // bad versioning
+ C2_NOT_IMPLEMENTED = UNKNOWN_TRANSACTION, ///< operation is not implemented (optional only) \todo for now reuse error code
+
+ // unknown fatal
+ C2_CORRUPTED = UNKNOWN_ERROR, ///< some unexpected error prevented the operation
+};
+
+/// @}
+
+/// \defgroup utils Utilities
+/// @{
+
+#define C2_DO_NOT_COPY(type, args...) \
+ type args& operator=(const type args&) = delete; \
+ type(const type args&) = delete; \
+
+#define C2_PURE __attribute__((pure))
+#define C2_CONST __attribute__((const))
+#define C2_HIDE __attribute__((visibility("hidden")))
+#define C2_INTERNAL __attribute__((internal_linkage))
+
+#define DEFINE_OTHER_COMPARISON_OPERATORS(type) \
+ inline bool operator!=(const type &other) { return !(*this == other); } \
+ inline bool operator<=(const type &other) { return (*this == other) || (*this < other); } \
+ inline bool operator>=(const type &other) { return !(*this < other); } \
+ inline bool operator>(const type &other) { return !(*this < other) && !(*this == other); }
+
+#define DEFINE_FIELD_BASED_COMPARISON_OPERATORS(type, field) \
+ inline bool operator<(const type &other) const { return field < other.field; } \
+ inline bool operator==(const type &other) const { return field == other.field; } \
+ DEFINE_OTHER_COMPARISON_OPERATORS(type)
+
+/// \cond INTERNAL
+
+/// \defgroup utils_internal
+/// @{
+
+template<typename... T> struct c2_types;
+
+/** specialization for a single type */
+template<typename T>
+struct c2_types<T> {
+ typedef typename std::decay<T>::type wide_type;
+ typedef wide_type narrow_type;
+ typedef wide_type mintype;
+};
+
+/** specialization for two types */
+template<typename T, typename U>
+struct c2_types<T, U> {
+ static_assert(std::is_floating_point<T>::value == std::is_floating_point<U>::value,
+ "mixing floating point and non-floating point types is disallowed");
+ static_assert(std::is_signed<T>::value == std::is_signed<U>::value,
+ "mixing signed and unsigned types is disallowed");
+
+ typedef typename std::decay<
+ decltype(true ? std::declval<T>() : std::declval<U>())>::type wide_type;
+ typedef typename std::decay<
+ typename std::conditional<sizeof(T) < sizeof(U), T, U>::type>::type narrow_type;
+ typedef typename std::conditional<
+ std::is_signed<T>::value, wide_type, narrow_type>::type mintype;
+};
+
+/// @}
+
+/// \endcond
+
+/**
+ * Type support utility class. Only supports similar classes, such as:
+ * - all floating point
+ * - all unsigned/all signed
+ * - all pointer
+ */
+template<typename T, typename U, typename... V>
+struct c2_types<T, U, V...> {
+ /** Common type that accommodates all template parameter types. */
+ typedef typename c2_types<typename c2_types<T, U>::wide_type, V...>::wide_type wide_type;
+ /** Narrowest type of the template parameter types. */
+ typedef typename c2_types<typename c2_types<T, U>::narrow_type, V...>::narrow_type narrow_type;
+ /** Type that accommodates the minimum value for any input for the template parameter types. */
+ typedef typename c2_types<typename c2_types<T, U>::mintype, V...>::mintype mintype;
+};
+
+/**
+ * \ingroup utils_internal
+ * specialization for two values */
+template<typename T, typename U>
+inline constexpr typename c2_types<T, U>::wide_type c2_max(const T a, const U b) {
+ typedef typename c2_types<T, U>::wide_type wide_type;
+ return ({ wide_type a_(a), b_(b); a_ > b_ ? a_ : b_; });
+}
+
+/**
+ * Finds the maximum value of a list of "similarly typed" values.
+ *
+ * This is an extension to std::max where the types do not have to be identical, and the smallest
+ * resulting type is used that accommodates the argument types.
+ *
+ * \note Value types must be similar, e.g. all floating point, all pointers, all signed, or all
+ * unsigned.
+ *
+ * @return the largest of the input arguments.
+ */
+template<typename T, typename U, typename... V>
+constexpr typename c2_types<T, U, V...>::wide_type c2_max(const T a, const U b, const V ... c) {
+ typedef typename c2_types<T, U, V...>::wide_type wide_type;
+ return ({ wide_type a_(a), b_(c2_max(b, c...)); a_ > b_ ? a_ : b_; });
+}
+
+/**
+ * \ingroup utils_internal
+ * specialization for two values */
+template<typename T, typename U>
+inline constexpr typename c2_types<T, U>::mintype c2_min(const T a, const U b) {
+ typedef typename c2_types<T, U>::wide_type wide_type;
+ return ({
+ wide_type a_(a), b_(b);
+ static_cast<typename c2_types<T, U>::mintype>(a_ < b_ ? a_ : b_);
+ });
+}
+
+/**
+ * Finds the minimum value of a list of "similarly typed" values.
+ *
+ * This is an extension to std::min where the types do not have to be identical, and the smallest
+ * resulting type is used that accommodates the argument types.
+ *
+ * \note Value types must be similar, e.g. all floating point, all pointers, all signed, or all
+ * unsigned.
+ *
+ * @return the smallest of the input arguments.
+ */
+template<typename T, typename U, typename... V>
+constexpr typename c2_types<T, U, V...>::mintype c2_min(const T a, const U b, const V ... c) {
+ typedef typename c2_types<U, V...>::mintype rest_type;
+ typedef typename c2_types<T, rest_type>::wide_type wide_type;
+ return ({
+ wide_type a_(a), b_(c2_min(b, c...));
+ static_cast<typename c2_types<T, rest_type>::mintype>(a_ < b_ ? a_ : b_);
+ });
+}
+
+/// @}
+
+#ifdef __ANDROID__
+} // namespace android
+#endif
+
+#endif // C2_H_
diff --git a/media/libstagefright/codec2/include/C2Buffer.h b/media/libstagefright/codec2/include/C2Buffer.h
new file mode 100644
index 0000000..9f6b487
--- /dev/null
+++ b/media/libstagefright/codec2/include/C2Buffer.h
@@ -0,0 +1,1777 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef C2BUFFER_H_
+#define C2BUFFER_H_
+
+#include <C2.h>
+#include <C2Param.h> // for C2Info
+
+#include <list>
+#include <memory>
+
+typedef int C2Fence;
+
+#ifdef __ANDROID__
+
+// #include <system/window.h>
+#include <cutils/native_handle.h>
+#include <hardware/gralloc.h> // TODO: remove
+
+typedef native_handle_t C2Handle;
+
+#else
+
+typedef void* C2Handle;
+
+#endif
+
+namespace android {
+
+/// \defgroup buffer Buffers
+/// @{
+
+/// \defgroup buffer_sync Synchronization
+/// @{
+
+/**
+ * Synchronization is accomplished using event and fence objects.
+ *
+ * These are cross-process extensions of promise/future infrastructure.
+ * Events are analogous to std::promise<void>, whereas fences are to std::shared_future<void>.
+ *
+ * Fences and events are shareable/copyable.
+ *
+ * Fences are used in two scenarios, and all copied instances refer to the same event.
+ * \todo do events need to be copyable or should they be unique?
+ *
+ * acquire sync fence object: signaled when it is safe for the component or client to access
+ * (the contents of) an object.
+ *
+ * release sync fence object: \todo
+ *
+ * Fences can be backed by hardware. Hardware fences are guaranteed to signal NO MATTER WHAT within
+ * a short (platform specific) amount of time; this guarantee is usually less than 15 msecs.
+ */
+
+/**
+ * Fence object used by components and the framework.
+ *
+ * Implements the waiting for an event, analogous to a 'future'.
+ *
+ * To be implemented by vendors if using HW fences.
+ */
+class C2Fence {
+public:
+ /**
+ * Waits for a fence to be signaled with a timeout.
+ *
+ * \todo a mechanism to cancel a wait - for now the only way to do this is to abandon the
+ * event, but fences are shared so canceling a wait will cancel all waits.
+ *
+ * \param timeoutNs the maximum time to wait in nsecs
+ *
+ * \retval C2_OK the fence has been signaled
+ * \retval C2_TIMED_OUT the fence has not been signaled within the timeout
+ * \retval C2_BAD_STATE the fence has been abandoned without being signaled (it will never
+ * be signaled)
+ * \retval C2_NO_PERMISSION no permission to wait for the fence (unexpected - system)
+ * \retval C2_CORRUPTED some unknown error prevented waiting for the fence (unexpected)
+ */
+ C2Error wait(nsecs_t timeoutNs);
+
+ /**
+ * Used to check if this fence is valid (if there is a chance for it to be signaled.)
+ * A fence becomes invalid if the controling event is destroyed without it signaling the fence.
+ *
+ * \return whether this fence is valid
+ */
+ bool valid() const;
+
+ /**
+ * Used to check if this fence has been signaled (is ready).
+ *
+ * \return whether this fence has been signaled
+ */
+ bool ready() const;
+
+ /**
+ * Returns a file descriptor that can be used to wait for this fence in a select system call.
+ * \note The returned file descriptor, if valid, must be closed by the caller.
+ *
+ * This can be used in e.g. poll() system calls. This file becomes readable (POLLIN) when the
+ * fence is signaled, and bad (POLLERR) if the fence is abandoned.
+ *
+ * \return a file descriptor representing this fence (with ownership), or -1 if the fence
+ * has already been signaled (\todo or abandoned).
+ *
+ * \todo this must be compatible with fences used by gralloc
+ */
+ int fd() const;
+
+ /**
+ * Returns whether this fence is a hardware-backed fence.
+ * \return whether this is a hardware fence
+ */
+ bool isHW() const;
+
+private:
+ class Impl;
+ std::shared_ptr<Impl> mImpl;
+};
+
+/**
+ * Event object used by components and the framework.
+ *
+ * Implements the signaling of an event, analogous to a 'promise'.
+ *
+ * Hardware backed events do not go through this object, and must be exposed directly as fences
+ * by vendors.
+ */
+class C2Event {
+public:
+ /**
+ * Returns a fence for this event.
+ */
+ C2Fence fence() const;
+
+ /**
+ * Signals (all) associated fence(s).
+ * This has no effect no effect if the event was already signaled or abandoned.
+ *
+ * \retval C2_OK the fence(s) were successfully signaled
+ * \retval C2_BAD_STATE the fence(s) have already been abandoned or merged (caller error)
+ * \retval C2_ALREADY_EXISTS the fence(s) have already been signaled (caller error)
+ * \retval C2_NO_PERMISSION no permission to signal the fence (unexpected - system)
+ * \retval C2_CORRUPTED some unknown error prevented signaling the fence(s) (unexpected)
+ */
+ C2Error fire();
+
+ /**
+ * Trigger this event from the merging of the supplied fences. This means that it will be
+ * abandoned if any of these fences have been abandoned, and it will be fired if all of these
+ * fences have been signaled.
+ *
+ * \retval C2_OK the merging was successfully done
+ * \retval C2_NO_MEMORY not enough memory to perform the merging
+ * \retval C2_ALREADY_EXISTS the fence have already been merged (caller error)
+ * \retval C2_BAD_STATE the fence have already been signaled or abandoned (caller error)
+ * \retval C2_NO_PERMISSION no permission to merge the fence (unexpected - system)
+ * \retval C2_CORRUPTED some unknown error prevented merging the fence(s) (unexpected)
+ */
+ C2Error merge(std::vector<C2Fence> fences);
+
+ /**
+ * Abandons the event and any associated fence(s).
+ * \note Call this to explicitly abandon an event before it is destructed to avoid a warning.
+ *
+ * This has no effect no effect if the event was already signaled or abandoned.
+ *
+ * \retval C2_OK the fence(s) were successfully signaled
+ * \retval C2_BAD_STATE the fence(s) have already been signaled or merged (caller error)
+ * \retval C2_ALREADY_EXISTS the fence(s) have already been abandoned (caller error)
+ * \retval C2_NO_PERMISSION no permission to abandon the fence (unexpected - system)
+ * \retval C2_CORRUPTED some unknown error prevented signaling the fence(s) (unexpected)
+ */
+ C2Error abandon();
+
+private:
+ class Impl;
+ std::shared_ptr<Impl> mImpl;
+};
+
+/// \addtogroup buf_internal Internal
+/// @{
+
+/**
+ * Interface for objects that encapsulate an updatable error value.
+ */
+struct _C2InnateError {
+ inline C2Error error() const { return mError; }
+
+protected:
+ _C2InnateError(C2Error error) : mError(error) { }
+
+ C2Error mError; // this error is updatable by the object
+};
+
+/// @}
+
+/**
+ * This is a utility template for objects protected by an acquire fence, so that errors during
+ * acquiring the object are propagated to the object itself.
+ */
+template<typename T>
+class C2Acquirable : public C2Fence {
+public:
+ /**
+ * Acquires the object protected by an acquire fence. Any errors during the mapping will be
+ * passed to the object.
+ *
+ * \return acquired object potentially invalidated if waiting for the fence failed.
+ */
+ T get();
+
+protected:
+ C2Acquirable(C2Error error, C2Fence fence, T t) : C2Fence(fence), mInitialError(error), mT(t) { }
+
+private:
+ C2Error mInitialError;
+ T mT; // TODO: move instead of copy
+};
+
+/// @}
+
+/// \defgroup linear Linear Data Blocks
+/// @{
+
+/**************************************************************************************************
+ LINEAR ASPECTS, BLOCKS AND VIEWS
+**************************************************************************************************/
+
+/**
+ * Common aspect for all objects that have a linear capacity.
+ */
+class _C2LinearCapacityAspect {
+/// \name Linear capacity interface
+/// @{
+public:
+ inline uint32_t capacity() const { return mCapacity; }
+
+protected:
+
+#if UINTPTR_MAX == 0xffffffff
+ static_assert(sizeof(size_t) == sizeof(uint32_t), "size_t is too big");
+#else
+ static_assert(sizeof(size_t) > sizeof(uint32_t), "size_t is too small");
+ // explicitly disable construction from size_t
+ inline explicit _C2LinearCapacityAspect(size_t capacity) = delete;
+#endif
+
+ inline explicit _C2LinearCapacityAspect(uint32_t capacity)
+ : mCapacity(capacity) { }
+
+ inline explicit _C2LinearCapacityAspect(const _C2LinearCapacityAspect *parent)
+ : mCapacity(parent == nullptr ? 0 : parent->capacity()) { }
+
+private:
+ const uint32_t mCapacity;
+/// @}
+};
+
+/**
+ * Aspect for objects that have a linear range.
+ *
+ * This class is copiable.
+ */
+class _C2LinearRangeAspect : public _C2LinearCapacityAspect {
+/// \name Linear range interface
+/// @{
+public:
+ inline uint32_t offset() const { return mOffset; }
+ inline uint32_t size() const { return mSize; }
+
+protected:
+ inline explicit _C2LinearRangeAspect(const _C2LinearCapacityAspect *parent)
+ : _C2LinearCapacityAspect(parent),
+ mOffset(0),
+ mSize(capacity()) { }
+
+ inline _C2LinearRangeAspect(const _C2LinearCapacityAspect *parent, size_t offset, size_t size)
+ : _C2LinearCapacityAspect(parent),
+ mOffset(c2_min(offset, capacity())),
+ mSize(c2_min(size, capacity() - mOffset)) { }
+
+ // subsection of the two [offset, offset + size] ranges
+ inline _C2LinearRangeAspect(const _C2LinearRangeAspect *parent, size_t offset, size_t size)
+ : _C2LinearCapacityAspect(parent == nullptr ? 0 : parent->capacity()),
+ mOffset(c2_min(c2_max(offset, parent == nullptr ? 0 : parent->offset()), capacity())),
+ mSize(c2_min(c2_min(size, parent == nullptr ? 0 : parent->size()), capacity() - mOffset)) { }
+
+private:
+ friend class _C2EditableLinearRange;
+ // invariants 0 <= mOffset <= mOffset + mSize <= capacity()
+ uint32_t mOffset;
+ uint32_t mSize;
+/// @}
+};
+
+/**
+ * Aspect for objects that have an editable linear range.
+ *
+ * This class is copiable.
+ */
+class _C2EditableLinearRange : public _C2LinearRangeAspect {
+protected:
+ inline explicit _C2EditableLinearRange(const _C2LinearCapacityAspect *parent)
+ : _C2LinearRangeAspect(parent) { }
+
+ inline _C2EditableLinearRange(const _C2LinearCapacityAspect *parent, size_t offset, size_t size)
+ : _C2LinearRangeAspect(parent, offset, size) { }
+
+ // subsection of the two [offset, offset + size] ranges
+ inline _C2EditableLinearRange(const _C2LinearRangeAspect *parent, size_t offset, size_t size)
+ : _C2LinearRangeAspect(parent, offset, size) { }
+
+/// \name Editable linear range interface
+/// @{
+
+ /**
+ * Sets the offset to |offset|, while trying to keep the end of the buffer unchanged (e.g.
+ * size will grow if offset is decreased, and may shrink if offset is increased.) Returns
+ * true if successful, which is equivalent to if 0 <= |offset| <= capacity().
+ *
+ * Note: setting offset and size will yield different result depending on the order of the
+ * operations. Always set offset first to ensure proper size.
+ */
+ inline bool setOffset(uint32_t offset) {
+ if (offset > capacity()) {
+ return false;
+ }
+
+ if (offset > mOffset + mSize) {
+ mSize = 0;
+ } else {
+ mSize = mOffset + mSize - offset;
+ }
+ mOffset = offset;
+ return true;
+ }
+ /**
+ * Sets the size to |size|. Returns true if successful, which is equivalent to
+ * if 0 <= |size| <= capacity() - offset().
+ *
+ * Note: setting offset and size will yield different result depending on the order of the
+ * operations. Always set offset first to ensure proper size.
+ */
+ inline bool setSize(uint32_t size) {
+ if (size > capacity() - mOffset) {
+ return false;
+ } else {
+ mSize = size;
+ return true;
+ }
+ }
+ /**
+ * Sets the offset to |offset| with best effort. Same as setOffset() except that offset will
+ * be clamped to the buffer capacity.
+ *
+ * Note: setting offset and size (even using best effort) will yield different result depending
+ * on the order of the operations. Always set offset first to ensure proper size.
+ */
+ inline void setOffset_be(uint32_t offset) {
+ if (offset > capacity()) {
+ offset = capacity();
+ }
+ if (offset > mOffset + mSize) {
+ mSize = 0;
+ } else {
+ mSize = mOffset + mSize - offset;
+ }
+ mOffset = offset;
+ }
+ /**
+ * Sets the size to |size| with best effort. Same as setSize() except that the selected region
+ * will be clamped to the buffer capacity (e.g. size is clamped to [0, capacity() - offset()]).
+ *
+ * Note: setting offset and size (even using best effort) will yield different result depending
+ * on the order of the operations. Always set offset first to ensure proper size.
+ */
+ inline void setSize_be(uint32_t size) {
+ mSize = std::min(size, capacity() - mOffset);
+ }
+/// @}
+};
+
+// ================================================================================================
+// BLOCKS
+// ================================================================================================
+
+/**
+ * Blocks are sections of allocations. They can be either 1D or 2D.
+ */
+
+class C2LinearAllocation;
+
+class C2Block1D : public _C2LinearRangeAspect {
+public:
+ const C2Handle *handle() const;
+
+protected:
+ C2Block1D(std::shared_ptr<C2LinearAllocation> alloc);
+ C2Block1D(std::shared_ptr<C2LinearAllocation> alloc, size_t offset, size_t size);
+
+private:
+ class Impl;
+ std::shared_ptr<Impl> mImpl;
+};
+
+/**
+ * Read view provides read-only access for a linear memory segment.
+ *
+ * This class is copiable.
+ */
+class C2ReadView : public _C2LinearCapacityAspect {
+public:
+ /**
+ * \return pointer to the start of the block or nullptr on error.
+ */
+ const uint8_t *data();
+
+ /**
+ * Returns a portion of this view.
+ *
+ * \param offset the start offset of the portion. \note This is clamped to the capacity of this
+ * view.
+ * \param size the size of the portion. \note This is clamped to the remaining data from offset.
+ *
+ * \return a read view containing a portion of this view
+ */
+ C2ReadView subView(size_t offset, size_t size) const;
+
+ /**
+ * \return error during the creation/mapping of this view.
+ */
+ C2Error error();
+
+private:
+ class Impl;
+ std::shared_ptr<Impl> mImpl;
+};
+
+/**
+ * Write view provides read/write access for a linear memory segment.
+ *
+ * This class is copiable. \todo movable only?
+ */
+class C2WriteView : public _C2EditableLinearRange {
+public:
+ /**
+ * Start of the block.
+ *
+ * \return pointer to the start of the block or nullptr on error.
+ */
+ uint8_t *base();
+
+ /**
+ * \return pointer to the block at the current offset or nullptr on error.
+ */
+ uint8_t *data();
+
+ /**
+ * \return error during the creation/mapping of this view.
+ */
+ C2Error error();
+
+private:
+ class Impl;
+ /// \todo should this be unique_ptr to make this movable only - to avoid inconsistent regions
+ /// between copies.
+ std::shared_ptr<Impl> mImpl;
+};
+
+/**
+ * A constant (read-only) linear block (portion of an allocation) with an acquire fence.
+ * Blocks are unmapped when created, and can be mapped into a read view on demand.
+ *
+ * This class is copiable and contains a reference to the allocation that it is based on.
+ */
+class C2ConstLinearBlock : public C2Block1D {
+public:
+ /**
+ * Maps this block into memory and returns a read view for it.
+ *
+ * \return a read view for this block.
+ */
+ C2Acquirable<C2ReadView> map() const;
+
+ /**
+ * Returns a portion of this block.
+ *
+ * \param offset the start offset of the portion. \note This is clamped to the capacity of this
+ * block.
+ * \param size the size of the portion. \note This is clamped to the remaining data from offset.
+ *
+ * \return a constant linear block containing a portion of this block
+ */
+ C2ConstLinearBlock subBlock(size_t offset, size_t size) const;
+
+ /**
+ * Returns the acquire fence for this block.
+ *
+ * \return a fence that must be waited on before reading the block.
+ */
+ C2Fence fence() const { return mFence; }
+
+private:
+ C2Fence mFence;
+};
+
+/**
+ * Linear block is a writeable 1D block. Once written, it can be shared in whole or in parts with
+ * consumers/readers as read-only const linear block(s).
+ */
+class C2LinearBlock : public C2Block1D {
+public:
+ /**
+ * Maps this block into memory and returns a write view for it.
+ *
+ * \return a write view for this block.
+ */
+ C2Acquirable<C2WriteView> map();
+
+ /**
+ * Creates a read-only const linear block for a portion of this block; optionally protected
+ * by an acquire fence. There are two ways to use this:
+ *
+ * 1) share ready block after writing data into the block. In this case no fence shall be
+ * supplied, and the block shall not be modified after calling this method.
+ * 2) share block metadata before actually (finishing) writing the data into the block. In
+ * this case a fence must be supplied that will be triggered when the data is written.
+ * The block shall be modified only until firing the event for the fence.
+ */
+ C2ConstLinearBlock share(size_t offset, size_t size, C2Fence fence);
+};
+
+/// @}
+
+/**************************************************************************************************
+ CIRCULAR BLOCKS AND VIEWS
+**************************************************************************************************/
+
+/// \defgroup circular Circular buffer support
+/// @{
+
+/**
+ * Circular blocks can be used to share data between a writer and a reader (and/or other consumers)-
+ * in a memory-efficient way by reusing a section of memory. Circular blocks are a bit more complex
+ * than single reader/single writer schemes to facilitate block-based consuming of data.
+ *
+ * They can operate in two modes:
+ *
+ * 1) one writer that creates blocks to be consumed (this model can be used by components)
+ *
+ * 2) one writer that writes continuously, and one reader that can creates blocks to be consumed
+ * by further recipients (this model is used by the framework, and cannot be used by components.)
+ *
+ * Circular blocks have four segments with running pointers:
+ * - reserved: data reserved and available for the writer
+ * - committed: data committed by the writer and available to the reader (if present)
+ * - used: data used by consumers (if present)
+ * - available: unused data available to be reserved
+ */
+class C2CircularBlock : public C2Block1D {
+ // TODO: add methods
+
+private:
+ size_t mReserved __unused; // end of reserved section
+ size_t mCommitted __unused; // end of committed section
+ size_t mUsed __unused; // end of used section
+ size_t mFree __unused; // end of free section
+};
+
+class _C2CircularBlockSegment : public _C2LinearCapacityAspect {
+public:
+ /**
+ * Returns the available size for this segment.
+ *
+ * \return currently available size for this segment
+ */
+ size_t available() const;
+
+ /**
+ * Reserve some space for this segment from its current start.
+ *
+ * \param size desired space in bytes
+ * \param fence a pointer to an acquire fence. If non-null, the reservation is asynchronous and
+ * a fence will be stored here that will be signaled when the reservation is
+ * complete. If null, the reservation is synchronous.
+ *
+ * \retval C2_OK the space was successfully reserved
+ * \retval C2_NO_MEMORY the space requested cannot be reserved
+ * \retval C2_TIMED_OUT the reservation timed out \todo when?
+ * \retval C2_CORRUPTED some unknown error prevented reserving space. (unexpected)
+ */
+ C2Error reserve(size_t size, C2Fence *fence /* nullable */);
+
+ /**
+ * Abandons a portion of this segment. This will move to the beginning of this segment.
+ *
+ * \note This methods is only allowed if this segment is producing blocks.
+ *
+ * \param size number of bytes to abandon
+ *
+ * \retval C2_OK the data was successfully abandoned
+ * \retval C2_TIMED_OUT the operation timed out (unexpected)
+ * \retval C2_CORRUPTED some unknown error prevented abandoning the data (unexpected)
+ */
+ C2Error abandon(size_t size);
+
+ /**
+ * Share a portion as block(s) with consumers (these are moved to the used section).
+ *
+ * \note This methods is only allowed if this segment is producing blocks.
+ * \note Share does not move the beginning of the segment. (\todo add abandon/offset?)
+ *
+ * \param size number of bytes to share
+ * \param fence fence to be used for the section
+ * \param blocks list where the blocks of the section are appended to
+ *
+ * \retval C2_OK the portion was successfully shared
+ * \retval C2_NO_MEMORY not enough memory to share the portion
+ * \retval C2_TIMED_OUT the operation timed out (unexpected)
+ * \retval C2_CORRUPTED some unknown error prevented sharing the data (unexpected)
+ */
+ C2Error share(size_t size, C2Fence fence, std::list<C2ConstLinearBlock> &blocks);
+
+ /**
+ * Returns the beginning offset of this segment from the start of this circular block.
+ *
+ * @return beginning offset
+ */
+ size_t begin();
+
+ /**
+ * Returns the end offset of this segment from the start of this circular block.
+ *
+ * @return end offset
+ */
+ size_t end();
+};
+
+/**
+ * A circular write-view is a dynamic mapped view for a segment of a circular block. Care must be
+ * taken when using this view so that only the section owned by the segment is modified.
+ */
+class C2CircularWriteView : public _C2LinearCapacityAspect {
+public:
+ /**
+ * Start of the circular block.
+ * \note the segment does not own this pointer.
+ *
+ * \return pointer to the start of the circular block or nullptr on error.
+ */
+ uint8_t *base();
+
+ /**
+ * \return error during the creation/mapping of this view.
+ */
+ C2Error error();
+};
+
+/**
+ * The writer of a circular buffer.
+ *
+ * Can commit data to a reader (not supported for components) OR share data blocks directly with a
+ * consumer.
+ *
+ * If a component supports outputting data into circular buffers, it must allocate a circular
+ * block and use a circular writer.
+ */
+class C2CircularWriter : public _C2CircularBlockSegment {
+public:
+ /**
+ * Commits a portion of this segment to the next segment. This moves the beginning of the
+ * segment.
+ *
+ * \param size number of bytes to commit to the next segment
+ * \param fence fence used for the commit (the fence must signal before the data is committed)
+ */
+ C2Error commit(size_t size, C2Fence fence);
+
+ /**
+ * Maps this block into memory and returns a write view for it.
+ *
+ * \return a write view for this block.
+ */
+ C2Acquirable<C2CircularWriteView> map();
+};
+
+/// @}
+
+/// \defgroup graphic Graphic Data Blocks
+/// @{
+
+/**
+ * Interface for objects that have a width and height (planar capacity).
+ */
+class _C2PlanarCapacityAspect {
+/// \name Planar capacity interface
+/// @{
+public:
+ inline uint32_t width() const { return mWidth; }
+ inline uint32_t height() const { return mHeight; }
+
+protected:
+ inline _C2PlanarCapacityAspect(uint32_t width, uint32_t height)
+ : mWidth(width), mHeight(height) { }
+
+ inline _C2PlanarCapacityAspect(const _C2PlanarCapacityAspect *parent)
+ : mWidth(parent == nullptr ? 0 : parent->width()),
+ mHeight(parent == nullptr ? 0 : parent->height()) { }
+
+private:
+ const uint32_t mWidth;
+ const uint32_t mHeight;
+/// @}
+};
+
+/**
+ * C2Rect: rectangle type with non-negative coordinates.
+ *
+ * \note This struct has public fields without getters/setters. All methods are inline.
+ */
+struct C2Rect {
+// public:
+ uint32_t mLeft;
+ uint32_t mTop;
+ uint32_t mWidth;
+ uint32_t mHeight;
+
+ inline C2Rect(uint32_t width, uint32_t height)
+ : C2Rect(width, height, 0, 0) { }
+
+ inline C2Rect(uint32_t width, uint32_t height, uint32_t left, uint32_t top)
+ : mLeft(left), mTop(top), mWidth(width), mHeight(height) { }
+
+ // utility methods
+
+ inline bool isEmpty() const {
+ return mWidth == 0 || mHeight == 0;
+ }
+
+ inline bool isValid() const {
+ return mLeft <= ~mWidth && mTop <= ~mHeight;
+ }
+
+ inline operator bool() const {
+ return isValid() && !isEmpty();
+ }
+
+ inline bool operator!() const {
+ return !bool(*this);
+ }
+
+ inline bool contains(const C2Rect &other) const {
+ if (!isValid() || !other.isValid()) {
+ return false;
+ } else if (other.isEmpty()) {
+ return true;
+ } else {
+ return mLeft <= other.mLeft && mTop <= other.mTop
+ && mLeft + mWidth >= other.mLeft + other.mWidth
+ && mTop + mHeight >= other.mTop + other.mHeight;
+ }
+ }
+
+ inline bool operator==(const C2Rect &other) const {
+ if (!isValid()) {
+ return !other.isValid();
+ } else if (isEmpty()) {
+ return other.isEmpty();
+ } else {
+ return mLeft == other.mLeft && mTop == other.mTop
+ && mWidth == other.mWidth && mHeight == other.mHeight;
+ }
+ }
+
+ inline bool operator!=(const C2Rect &other) const {
+ return !operator==(other);
+ }
+
+ inline bool operator>=(const C2Rect &other) const {
+ return contains(other);
+ }
+
+ inline bool operator>(const C2Rect &other) const {
+ return contains(other) && !operator==(other);
+ }
+
+ inline bool operator<=(const C2Rect &other) const {
+ return other.contains(*this);
+ }
+
+ inline bool operator<(const C2Rect &other) const {
+ return other.contains(*this) && !operator==(other);
+ }
+};
+
+/**
+ * C2PlaneInfo: information on the layout of flexible planes.
+ *
+ * Public fields without getters/setters.
+ */
+struct C2PlaneInfo {
+// public:
+ enum Channel : uint32_t {
+ Y,
+ R,
+ G,
+ B,
+ A,
+ Cr,
+ Cb,
+ } mChannel;
+
+ int32_t mColInc; // column increment in bytes. may be negative
+ int32_t mRowInc; // row increment in bytes. may be negative
+ uint32_t mHorizSubsampling; // subsampling compared to width
+ uint32_t mVertSubsampling; // subsampling compared to height
+
+ uint32_t mBitDepth;
+ uint32_t mAllocatedDepth;
+
+ inline ssize_t minOffset(uint32_t width, uint32_t height) {
+ ssize_t offs = 0;
+ if (width > 0 && mColInc < 0) {
+ offs += mColInc * (ssize_t)(width - 1);
+ }
+ if (height > 0 && mRowInc < 0) {
+ offs += mRowInc * (ssize_t)(height - 1);
+ }
+ return offs;
+ }
+
+ inline ssize_t maxOffset(uint32_t width, uint32_t height, uint32_t allocatedDepth) {
+ ssize_t offs = (allocatedDepth + 7) >> 3;
+ if (width > 0 && mColInc > 0) {
+ offs += mColInc * (ssize_t)(width - 1);
+ }
+ if (height > 0 && mRowInc > 0) {
+ offs += mRowInc * (ssize_t)(height - 1);
+ }
+ return offs;
+ }
+};
+
+struct C2PlaneLayout {
+public:
+ enum Type : uint32_t {
+ MEDIA_IMAGE_TYPE_UNKNOWN = 0,
+ MEDIA_IMAGE_TYPE_YUV = 0x100,
+ MEDIA_IMAGE_TYPE_YUVA,
+ MEDIA_IMAGE_TYPE_RGB,
+ MEDIA_IMAGE_TYPE_RGBA,
+ };
+
+ Type mType;
+ uint32_t mNumPlanes; // number of planes
+
+ enum PlaneIndex : uint32_t {
+ Y = 0,
+ U = 1,
+ V = 2,
+ R = 0,
+ G = 1,
+ B = 2,
+ A = 3,
+ MAX_NUM_PLANES = 4,
+ };
+
+ C2PlaneInfo mPlanes[MAX_NUM_PLANES];
+};
+
+/**
+ * Aspect for objects that have a planar section (crop rectangle).
+ *
+ * This class is copiable.
+ */
+class _C2PlanarSection : public _C2PlanarCapacityAspect {
+/// \name Planar section interface
+/// @{
+public:
+ // crop can be an empty rect, does not have to line up with subsampling
+ // NOTE: we do not support floating-point crop
+ inline const C2Rect crop() { return mCrop; }
+
+ /**
+ * Sets crop to crop intersected with [(0,0) .. (width, height)]
+ */
+ inline void setCrop_be(const C2Rect &crop);
+
+ /**
+ * If crop is within the dimensions of this object, it sets crop to it.
+ *
+ * \return true iff crop is within the dimensions of this object
+ */
+ inline bool setCrop(const C2Rect &crop);
+
+private:
+ C2Rect mCrop;
+/// @}
+};
+
+class C2Block2D : public _C2PlanarSection {
+public:
+ const C2Handle *handle() const;
+
+private:
+ class Impl;
+ std::shared_ptr<Impl> mImpl;
+};
+
+/**
+ * Graphic view provides read or read-write access for a graphic block.
+ *
+ * This class is copiable.
+ *
+ * \note Due to the subsampling of graphic buffers, a read view must still contain a crop rectangle
+ * to ensure subsampling is followed. This results in nearly identical interface between read and
+ * write views, so C2GraphicView can encompass both of them.
+ */
+class C2GraphicView : public _C2PlanarSection {
+public:
+ /**
+ * \return pointer to the start of the block or nullptr on error.
+ */
+ const uint8_t *data() const;
+
+ /**
+ * \return pointer to the start of the block or nullptr on error.
+ */
+ uint8_t *data();
+
+ /**
+ * Returns a section of this view.
+ *
+ * \param rect the dimension of the section. \note This is clamped to the crop of this view.
+ *
+ * \return a read view containing the requested section of this view
+ */
+ const C2GraphicView subView(const C2Rect &rect) const;
+ C2GraphicView subView(const C2Rect &rect);
+
+ /**
+ * \return error during the creation/mapping of this view.
+ */
+ C2Error error() const;
+
+private:
+ class Impl;
+ std::shared_ptr<Impl> mImpl;
+};
+
+/**
+ * A constant (read-only) graphic block (portion of an allocation) with an acquire fence.
+ * Blocks are unmapped when created, and can be mapped into a read view on demand.
+ *
+ * This class is copiable and contains a reference to the allocation that it is based on.
+ */
+class C2ConstGraphicBlock : public C2Block2D {
+public:
+ /**
+ * Maps this block into memory and returns a read view for it.
+ *
+ * \return a read view for this block.
+ */
+ C2Acquirable<const C2GraphicView> map() const;
+
+ /**
+ * Returns a section of this block.
+ *
+ * \param rect the coordinates of the section. \note This is clamped to the crop rectangle of
+ * this block.
+ *
+ * \return a constant graphic block containing a portion of this block
+ */
+ C2ConstGraphicBlock subBlock(const C2Rect &rect) const;
+
+ /**
+ * Returns the acquire fence for this block.
+ *
+ * \return a fence that must be waited on before reading the block.
+ */
+ C2Fence fence() const { return mFence; }
+
+private:
+ C2Fence mFence;
+};
+
+/**
+ * Graphic block is a writeable 2D block. Once written, it can be shared in whole or in part with
+ * consumers/readers as read-only const graphic block.
+ */
+class C2GraphicBlock : public C2Block2D {
+public:
+ /**
+ * Maps this block into memory and returns a write view for it.
+ *
+ * \return a write view for this block.
+ */
+ C2Acquirable<C2GraphicView> map();
+
+ /**
+ * Creates a read-only const linear block for a portion of this block; optionally protected
+ * by an acquire fence. There are two ways to use this:
+ *
+ * 1) share ready block after writing data into the block. In this case no fence shall be
+ * supplied, and the block shall not be modified after calling this method.
+ * 2) share block metadata before actually (finishing) writing the data into the block. In
+ * this case a fence must be supplied that will be triggered when the data is written.
+ * The block shall be modified only until firing the event for the fence.
+ */
+ C2ConstGraphicBlock share(const C2Rect &crop, C2Fence fence);
+};
+
+/// @}
+
+/// \defgroup buffer_onj Buffer objects
+/// @{
+
+// ================================================================================================
+// BUFFERS
+// ================================================================================================
+
+/// \todo: Do we still need this?
+///
+// There are 2 kinds of buffers: linear or graphic. Linear buffers can contain a single block, or
+// a list of blocks (LINEAR_CHUNKS). Support for list of blocks is optional, and can allow consuming
+// data from circular buffers or scattered data sources without extra memcpy. Currently, list of
+// graphic blocks is not supported.
+
+class C2LinearBuffer; // read-write buffer
+class C2GraphicBuffer; // read-write buffer
+class C2LinearChunksBuffer;
+
+/**
+ * C2BufferData: the main, non-meta data of a buffer. A buffer can contain either linear blocks
+ * or graphic blocks, and can contain either a single block or multiple blocks. This is determined
+ * by its type.
+ */
+class C2BufferData {
+public:
+ /**
+ * The type of buffer data.
+ */
+ enum Type : uint32_t {
+ LINEAR, ///< the buffer contains a single linear block
+ LINEAR_CHUNKS, ///< the buffer contains one or more linear blocks
+ GRAPHIC, ///< the buffer contains a single graphic block
+ GRAPHIC_CHUNKS, ///< the buffer contains one of more graphic blocks
+ };
+
+ /**
+ * Gets the type of this buffer (data).
+ * \return the type of this buffer data.
+ */
+ Type type() const;
+
+ /**
+ * Gets the linear blocks of this buffer.
+ * \return a constant list of const linear blocks of this buffer.
+ * \retval empty list if this buffer does not contain linear block(s).
+ */
+ const std::list<C2ConstLinearBlock> linearBlocks() const;
+
+ /**
+ * Gets the graphic blocks of this buffer.
+ * \return a constant list of const graphic blocks of this buffer.
+ * \retval empty list if this buffer does not contain graphic block(s).
+ */
+ const std::list<C2ConstGraphicBlock> graphicBlocks() const;
+
+private:
+ class Impl;
+ std::shared_ptr<Impl> mImpl;
+
+protected:
+ // no public constructor
+ // C2BufferData(const std::shared_ptr<const Impl> &impl) : mImpl(impl) {}
+};
+
+/**
+ * C2Buffer: buffer base class. These are always used as shared_ptrs. Though the underlying buffer
+ * objects (native buffers, ion buffers, or dmabufs) are reference-counted by the system,
+ * C2Buffers hold only a single reference.
+ *
+ * These objects cannot be used on the stack.
+ */
+class C2Buffer {
+public:
+ /**
+ * Gets the buffer's data.
+ *
+ * \return the buffer's data.
+ */
+ const C2BufferData data() const;
+
+ /**
+ * These will still work if used in onDeathNotify.
+ */
+#if 0
+ inline std::shared_ptr<C2LinearBuffer> asLinearBuffer() const {
+ return mType == LINEAR ? std::shared_ptr::reinterpret_cast<C2LinearBuffer>(this) : nullptr;
+ }
+
+ inline std::shared_ptr<C2GraphicBuffer> asGraphicBuffer() const {
+ return mType == GRAPHIC ? std::shared_ptr::reinterpret_cast<C2GraphicBuffer>(this) : nullptr;
+ }
+
+ inline std::shared_ptr<C2CircularBuffer> asCircularBuffer() const {
+ return mType == CIRCULAR ? std::shared_ptr::reinterpret_cast<C2CircularBuffer>(this) : nullptr;
+ }
+#endif
+
+ ///@name Pre-destroy notification handling
+ ///@{
+
+ /**
+ * Register for notification just prior to the destruction of this object.
+ */
+ typedef void (*OnDestroyNotify) (const C2Buffer *buf, void *arg);
+
+ /**
+ * Registers for a pre-destroy notification. This is called just prior to the destruction of
+ * this buffer (when this buffer is no longer valid.)
+ *
+ * \param onDestroyNotify the notification callback
+ * \param arg an arbitrary parameter passed to the callback
+ *
+ * \retval C2_OK the registration was successful.
+ * \retval C2_DUPLICATE a notification was already registered for this callback and argument
+ * \retval C2_NO_MEMORY not enough memory to register for this callback
+ * \retval C2_CORRUPTED an unknown error prevented the registration (unexpected)
+ */
+ C2Error registerOnDestroyNotify(OnDestroyNotify *onDestroyNotify, void *arg = nullptr);
+
+ /**
+ * Unregisters a previously registered pre-destroy notification.
+ *
+ * \param onDestroyNotify the notification callback
+ * \param arg an arbitrary parameter passed to the callback
+ *
+ * \retval C2_OK the unregistration was successful.
+ * \retval C2_NOT_FOUND the notification was not found
+ * \retval C2_CORRUPTED an unknown error prevented the registration (unexpected)
+ */
+ C2Error unregisterOnDestroyNotify(OnDestroyNotify *onDestroyNotify, void *arg = nullptr);
+
+ ///@}
+
+ virtual ~C2Buffer() = default;
+
+ ///@name Buffer-specific arbitrary metadata handling
+ ///@{
+
+ /**
+ * Gets the list of metadata associated with this buffer.
+ *
+ * \return a constant list of info objects associated with this buffer.
+ */
+ const std::list<std::shared_ptr<const C2Info>> infos() const;
+
+ /**
+ * Attaches (or updates) an (existing) metadata for this buffer.
+ * If the metadata is stream specific, the stream information will be reset.
+ *
+ * \param info Metadata to update
+ *
+ * \retval C2_OK the metadata was successfully attached/updated.
+ * \retval C2_NO_MEMORY not enough memory to attach the metadata (this return value is not
+ * used if the same kind of metadata is already attached to the buffer).
+ */
+ C2Error setInfo(const std::shared_ptr<C2Info> &info);
+
+ /**
+ * Checks if there is a certain type of metadata attached to this buffer.
+ *
+ * \param index the parameter type of the metadata
+ *
+ * \return true iff there is a metadata with the parameter type attached to this buffer.
+ */
+ bool hasInfo(C2Param::Type index) const;
+ std::shared_ptr<C2Info> removeInfo(C2Param::Type index) const;
+ ///@}
+
+protected:
+ // no public constructor
+ inline C2Buffer() = default;
+
+private:
+// Type _mType;
+};
+
+/**
+ * An extension of C2Info objects that can contain arbitrary buffer data.
+ *
+ * \note This object is not describable and contains opaque data.
+ */
+class C2InfoBuffer {
+public:
+ /**
+ * Gets the index of this info object.
+ *
+ * \return the parameter index.
+ */
+ const C2Param::Index index() const;
+
+ /**
+ * Gets the buffer's data.
+ *
+ * \return the buffer's data.
+ */
+ const C2BufferData data() const;
+};
+
+/// @}
+
+/**************************************************************************************************
+ ALLOCATIONS
+**************************************************************************************************/
+
+/// \defgroup allocator Allocation and memory placement
+/// @{
+
+/**
+ * Buffer/memory usage bits. These are used by the allocators to select optimal memory type/pool and
+ * buffer layout.
+ *
+ * \note This struct has public fields without getters/setters. All methods are inline.
+ */
+struct C2MemoryUsage {
+// public:
+ // TODO: match these to gralloc1.h
+ enum Consumer : uint64_t {
+ kSoftwareRead = GRALLOC_USAGE_SW_READ_OFTEN,
+ kRenderScriptRead = GRALLOC_USAGE_RENDERSCRIPT,
+ kTextureRead = GRALLOC_USAGE_HW_TEXTURE,
+ kHardwareComposer = GRALLOC_USAGE_HW_COMPOSER,
+ kHardwareEncoder = GRALLOC_USAGE_HW_VIDEO_ENCODER,
+ kProtectedRead = GRALLOC_USAGE_PROTECTED,
+ };
+
+ enum Producer : uint64_t {
+ kSoftwareWrite = GRALLOC_USAGE_SW_WRITE_OFTEN,
+ kRenderScriptWrite = GRALLOC_USAGE_RENDERSCRIPT,
+ kTextureWrite = GRALLOC_USAGE_HW_RENDER,
+ kCompositionTarget = GRALLOC_USAGE_HW_COMPOSER | GRALLOC_USAGE_HW_RENDER,
+ kHardwareDecoder = GRALLOC_USAGE_HW_VIDEO_ENCODER,
+ kProtectedWrite = GRALLOC_USAGE_PROTECTED,
+ };
+
+ uint64_t mConsumer; // e.g. input
+ uint64_t mProducer; // e.g. output
+};
+
+/**
+ * \ingroup linear allocator
+ * 1D allocation interface.
+ */
+class C2LinearAllocation : public _C2LinearCapacityAspect {
+public:
+ /**
+ * Maps a portion of an allocation starting from |offset| with |size| into local process memory.
+ * Stores the starting address into |addr|, or NULL if the operation was unsuccessful.
+ * |fenceFd| is a file descriptor referring to an acquire sync fence object. If it is already
+ * safe to access the buffer contents, then -1.
+ *
+ * \param offset starting position of the portion to be mapped (this does not have to
+ * be page aligned)
+ * \param size size of the portion to be mapped (this does not have to be page
+ * aligned)
+ * \param usage the desired usage. \todo this must be kSoftwareRead and/or
+ * kSoftwareWrite.
+ * \param fenceFd a pointer to a file descriptor if an async mapping is requested. If
+ * not-null, and acquire fence FD will be stored here on success, or -1
+ * on failure. If null, the mapping will be synchronous.
+ * \param addr a pointer to where the starting address of the mapped portion will be
+ * stored. On failure, nullptr will be stored here.
+ *
+ * \todo Only one portion can be mapped at the same time - this is true for gralloc, but there
+ * is no need for this for 1D buffers.
+ * \todo Do we need to support sync operation as we could just wait for the fence?
+ *
+ * \retval C2_OK the operation was successful
+ * \retval C2_NO_PERMISSION no permission to map the portion
+ * \retval C2_TIMED_OUT the operation timed out
+ * \retval C2_NO_MEMORY not enough memory to complete the operation
+ * \retval C2_BAD_VALUE the parameters (offset/size) are invalid or outside the allocation, or
+ * the usage flags are invalid (caller error)
+ * \retval C2_CORRUPTED some unknown error prevented the operation from completing (unexpected)
+ */
+ virtual C2Error map(
+ size_t offset, size_t size, C2MemoryUsage usage, int *fenceFd /* nullable */,
+ void **addr /* nonnull */) = 0;
+
+ /**
+ * Unmaps a portion of an allocation at |addr| with |size|. These must be parameters previously
+ * passed to |map|; otherwise, this operation is a no-op.
+ *
+ * \param addr starting address of the mapped region
+ * \param size size of the mapped region
+ * \param fenceFd a pointer to a file descriptor if an async unmapping is requested. If
+ * not-null, a release fence FD will be stored here on success, or -1
+ * on failure. This fence signals when the original allocation contains
+ * any changes that happened to the mapped region. If null, the unmapping
+ * will be synchronous.
+ *
+ * \retval C2_OK the operation was successful
+ * \retval C2_TIMED_OUT the operation timed out
+ * \retval C2_BAD_VALUE the parameters (addr/size) do not correspond to previously mapped
+ * regions (caller error)
+ * \retval C2_CORRUPTED some unknown error prevented the operation from completing (unexpected)
+ * \retval C2_NO_PERMISSION no permission to unmap the portion (unexpected - system)
+ */
+ virtual C2Error unmap(void *addr, size_t size, int *fenceFd /* nullable */) = 0;
+
+ /**
+ * Returns true if this is a valid allocation.
+ *
+ * \todo remove?
+ */
+ virtual bool isValid() const = 0;
+
+ /**
+ * Returns a pointer to the allocation handle.
+ */
+ virtual const C2Handle *handle() const = 0;
+
+ /**
+ * Returns true if this is the same allocation as |other|.
+ */
+ virtual bool equals(const std::shared_ptr<C2LinearAllocation> &other) const = 0;
+
+protected:
+ // \todo should we limit allocation directly?
+ C2LinearAllocation(size_t capacity) : _C2LinearCapacityAspect(c2_min(capacity, UINT32_MAX)) {}
+ virtual ~C2LinearAllocation() = default;
+};
+
+/**
+ * \ingroup graphic allocator
+ * 2D allocation interface.
+ */
+class C2GraphicAllocation : public _C2PlanarCapacityAspect {
+public:
+ /**
+ * Maps a rectangular section (as defined by |rect|) of a 2D allocation into local process
+ * memory for flexible access. On success, it fills out |layout| with the plane specifications
+ * and fills the |addr| array with pointers to the first byte of the top-left pixel of each
+ * plane used. Otherwise, it leaves |layout| and |addr| untouched. |fenceFd| is a file
+ * descriptor referring to an acquire sync fence object. If it is already safe to access the
+ * buffer contents, then -1.
+ *
+ * \note Only one portion of the graphic allocation can be mapped at the same time. (This is
+ * from gralloc1 limitation.)
+ *
+ * \param rect section to be mapped (this does not have to be aligned)
+ * \param usage the desired usage. \todo this must be kSoftwareRead and/or
+ * kSoftwareWrite.
+ * \param fenceFd a pointer to a file descriptor if an async mapping is requested. If
+ * not-null, and acquire fence FD will be stored here on success, or -1
+ * on failure. If null, the mapping will be synchronous.
+ * \param layout a pointer to where the mapped planes' descriptors will be
+ * stored. On failure, nullptr will be stored here.
+ *
+ * \todo Do we need to support sync operation as we could just wait for the fence?
+ *
+ * \retval C2_OK the operation was successful
+ * \retval C2_NO_PERMISSION no permission to map the section
+ * \retval C2_ALREADY_EXISTS there is already a mapped region (caller error)
+ * \retval C2_TIMED_OUT the operation timed out
+ * \retval C2_NO_MEMORY not enough memory to complete the operation
+ * \retval C2_BAD_VALUE the parameters (rect) are invalid or outside the allocation, or the
+ * usage flags are invalid (caller error)
+ * \retval C2_CORRUPTED some unknown error prevented the operation from completing (unexpected)
+
+ */
+ virtual C2Error map(
+ C2Rect rect, C2MemoryUsage usage, int *fenceFd,
+ // TODO: return <addr, size> buffers with plane sizes
+ C2PlaneLayout *layout /* nonnull */, uint8_t **addr /* nonnull */) = 0;
+
+ /**
+ * Unmaps the last mapped rectangular section.
+ *
+ * \param fenceFd a pointer to a file descriptor if an async unmapping is requested. If
+ * not-null, a release fence FD will be stored here on success, or -1
+ * on failure. This fence signals when the original allocation contains
+ * any changes that happened to the mapped section. If null, the unmapping
+ * will be synchronous.
+ *
+ * \retval C2_OK the operation was successful
+ * \retval C2_TIMED_OUT the operation timed out
+ * \retval C2_NOT_FOUND there is no mapped region (caller error)
+ * \retval C2_CORRUPTED some unknown error prevented the operation from completing (unexpected)
+ * \retval C2_NO_PERMISSION no permission to unmap the section (unexpected - system)
+ */
+ virtual C2Error unmap(C2Fence *fenceFd /* nullable */) = 0;
+
+ /**
+ * Returns true if this is a valid allocation.
+ *
+ * \todo remove?
+ */
+ virtual bool isValid() const = 0;
+
+ /**
+ * Returns a pointer to the allocation handle.
+ */
+ virtual const C2Handle *handle() const = 0;
+
+ /**
+ * Returns true if this is the same allocation as |other|.
+ */
+ virtual bool equals(const std::shared_ptr<const C2GraphicAllocation> &other) = 0;
+
+protected:
+ virtual ~C2GraphicAllocation();
+};
+
+/**
+ * Allocators are used by the framework to allocate memory (allocations) for buffers. They can
+ * support either 1D or 2D allocations.
+ *
+ * \note In theory they could support both, but in practice, we will use only one or the other.
+ *
+ * Never constructed on stack.
+ *
+ * Allocators are provided by vendors.
+ */
+class C2Allocator {
+public:
+ /**
+ * Allocates a 1D allocation of given |capacity| and |usage|. If successful, the allocation is
+ * stored in |allocation|. Otherwise, |allocation| is set to 'nullptr'.
+ *
+ * \param capacity the size of requested allocation (the allocation could be slightly
+ * larger, e.g. to account for any system-required alignment)
+ * \param usage the memory usage info for the requested allocation. \note that the
+ * returned allocation may be later used/mapped with different usage.
+ * The allocator should layout the buffer to be optimized for this usage,
+ * but must support any usage. One exception: protected buffers can
+ * only be used in a protected scenario.
+ * \param allocation pointer to where the allocation shall be stored on success. nullptr
+ * will be stored here on failure
+ *
+ * \retval C2_OK the allocation was successful
+ * \retval C2_NO_MEMORY not enough memory to complete the allocation
+ * \retval C2_TIMED_OUT the allocation timed out
+ * \retval C2_NO_PERMISSION no permission to complete the allocation
+ * \retval C2_BAD_VALUE capacity or usage are not supported (invalid) (caller error)
+ * \retval C2_UNSUPPORTED this allocator does not support 1D allocations
+ * \retval C2_CORRUPTED some unknown, unrecoverable error occured during allocation (unexpected)
+ */
+ virtual C2Error allocateLinearBuffer(
+ uint32_t capacity __unused, C2MemoryUsage usage __unused,
+ std::shared_ptr<C2LinearAllocation> *allocation /* nonnull */) {
+ *allocation = nullptr;
+ return C2_UNSUPPORTED;
+ }
+
+ /**
+ * (Re)creates a 1D allocation from a native |handle|. If successful, the allocation is stored
+ * in |allocation|. Otherwise, |allocation| is set to 'nullptr'.
+ *
+ * \param handle the handle for the existing allocation
+ * \param allocation pointer to where the allocation shall be stored on success. nullptr
+ * will be stored here on failure
+ *
+ * \retval C2_OK the allocation was recreated successfully
+ * \retval C2_NO_MEMORY not enough memory to recreate the allocation
+ * \retval C2_TIMED_OUT the recreation timed out (unexpected)
+ * \retval C2_NO_PERMISSION no permission to recreate the allocation
+ * \retval C2_BAD_VALUE invalid handle (caller error)
+ * \retval C2_UNSUPPORTED this allocator does not support 1D allocations
+ * \retval C2_CORRUPTED some unknown, unrecoverable error occured during allocation (unexpected)
+ */
+ virtual C2Error recreateLinearBuffer(
+ const C2Handle *handle __unused,
+ std::shared_ptr<C2LinearAllocation> *allocation /* nonnull */) {
+ *allocation = nullptr;
+ return C2_UNSUPPORTED;
+ }
+
+ /**
+ * Allocates a 2D allocation of given |width|, |height|, |format| and |usage|. If successful,
+ * the allocation is stored in |allocation|. Otherwise, |allocation| is set to 'nullptr'.
+ *
+ * \param width the width of requested allocation (the allocation could be slightly
+ * larger, e.g. to account for any system-required alignment)
+ * \param height the height of requested allocation (the allocation could be slightly
+ * larger, e.g. to account for any system-required alignment)
+ * \param format the pixel format of requested allocation. This could be a vendor
+ * specific format.
+ * \param usage the memory usage info for the requested allocation. \note that the
+ * returned allocation may be later used/mapped with different usage.
+ * The allocator should layout the buffer to be optimized for this usage,
+ * but must support any usage. One exception: protected buffers can
+ * only be used in a protected scenario.
+ * \param allocation pointer to where the allocation shall be stored on success. nullptr
+ * will be stored here on failure
+ *
+ * \retval C2_OK the allocation was successful
+ * \retval C2_NO_MEMORY not enough memory to complete the allocation
+ * \retval C2_TIMED_OUT the allocation timed out
+ * \retval C2_NO_PERMISSION no permission to complete the allocation
+ * \retval C2_BAD_VALUE width, height, format or usage are not supported (invalid) (caller error)
+ * \retval C2_UNSUPPORTED this allocator does not support 2D allocations
+ * \retval C2_CORRUPTED some unknown, unrecoverable error occured during allocation (unexpected)
+ */
+ virtual C2Error allocateGraphicBuffer(
+ uint32_t width __unused, uint32_t height __unused, uint32_t format __unused,
+ C2MemoryUsage usage __unused,
+ std::shared_ptr<C2GraphicAllocation> *allocation /* nonnull */) {
+ *allocation = nullptr;
+ return C2_UNSUPPORTED;
+ }
+
+ /**
+ * (Re)creates a 2D allocation from a native handle. If successful, the allocation is stored
+ * in |allocation|. Otherwise, |allocation| is set to 'nullptr'.
+ *
+ * \param handle the handle for the existing allocation
+ * \param allocation pointer to where the allocation shall be stored on success. nullptr
+ * will be stored here on failure
+ *
+ * \retval C2_OK the allocation was recreated successfully
+ * \retval C2_NO_MEMORY not enough memory to recreate the allocation
+ * \retval C2_TIMED_OUT the recreation timed out (unexpected)
+ * \retval C2_NO_PERMISSION no permission to recreate the allocation
+ * \retval C2_BAD_VALUE invalid handle (caller error)
+ * \retval C2_UNSUPPORTED this allocator does not support 2D allocations
+ * \retval C2_CORRUPTED some unknown, unrecoverable error occured during recreation (unexpected)
+ */
+ virtual C2Error recreateGraphicBuffer(
+ const C2Handle *handle __unused,
+ std::shared_ptr<C2GraphicAllocation> *allocation /* nonnull */) {
+ *allocation = nullptr;
+ return C2_UNSUPPORTED;
+ }
+
+protected:
+ C2Allocator() = default;
+
+ virtual ~C2Allocator() = default;
+};
+
+/**
+ * Block allocators are used by components to allocate memory for output buffers. They can
+ * support either linear (1D), circular (1D) or graphic (2D) allocations.
+ *
+ * Never constructed on stack.
+ *
+ * Block allocators are provided by the framework.
+ */
+class C2BlockAllocator {
+public:
+ /**
+ * Allocates a linear writeable block of given |capacity| and |usage|. If successful, the
+ * block is stored in |block|. Otherwise, |block| is set to 'nullptr'.
+ *
+ * \param capacity the size of requested block.
+ * \param usage the memory usage info for the requested allocation. \note that the
+ * returned allocation may be later used/mapped with different usage.
+ * The allocator shall lay out the buffer to be optimized for this usage,
+ * but must support any usage. One exception: protected buffers can
+ * only be used in a protected scenario.
+ * \param block pointer to where the allocated block shall be stored on success. nullptr
+ * will be stored here on failure
+ *
+ * \retval C2_OK the allocation was successful
+ * \retval C2_NO_MEMORY not enough memory to complete the allocation
+ * \retval C2_TIMED_OUT the allocation timed out
+ * \retval C2_NO_PERMISSION no permission to complete the allocation
+ * \retval C2_BAD_VALUE capacity or usage are not supported (invalid) (caller error)
+ * \retval C2_UNSUPPORTED this allocator does not support linear allocations
+ * \retval C2_CORRUPTED some unknown, unrecoverable error occured during allocation (unexpected)
+ */
+ virtual C2Error allocateLinearBlock(
+ uint32_t capacity __unused, C2MemoryUsage usage __unused,
+ std::shared_ptr<C2LinearBlock> *block /* nonnull */) {
+ *block = nullptr;
+ return C2_UNSUPPORTED;
+ }
+
+ /**
+ * Allocates a circular writeable block of given |capacity| and |usage|. If successful, the
+ * block is stored in |block|. Otherwise, |block| is set to 'nullptr'.
+ *
+ * \param capacity the size of requested circular block. (the allocation could be slightly
+ * larger, e.g. to account for any system-required alignment)
+ * \param usage the memory usage info for the requested allocation. \note that the
+ * returned allocation may be later used/mapped with different usage.
+ * The allocator shall lay out the buffer to be optimized for this usage,
+ * but must support any usage. One exception: protected buffers can
+ * only be used in a protected scenario.
+ * \param block pointer to where the allocated block shall be stored on success. nullptr
+ * will be stored here on failure
+ *
+ * \retval C2_OK the allocation was successful
+ * \retval C2_NO_MEMORY not enough memory to complete the allocation
+ * \retval C2_TIMED_OUT the allocation timed out
+ * \retval C2_NO_PERMISSION no permission to complete the allocation
+ * \retval C2_BAD_VALUE capacity or usage are not supported (invalid) (caller error)
+ * \retval C2_UNSUPPORTED this allocator does not support circular allocations
+ * \retval C2_CORRUPTED some unknown, unrecoverable error occured during allocation (unexpected)
+ */
+ virtual C2Error allocateCircularBlock(
+ uint32_t capacity __unused, C2MemoryUsage usage __unused,
+ std::shared_ptr<C2CircularBlock> *block /* nonnull */) {
+ *block = nullptr;
+ return C2_UNSUPPORTED;
+ }
+
+ /**
+ * Allocates a 2D graphic block of given |width|, |height|, |format| and |usage|. If successful,
+ * the allocation is stored in |block|. Otherwise, |block| is set to 'nullptr'.
+ *
+ * \param width the width of requested allocation (the allocation could be slightly
+ * larger, e.g. to account for any system-required alignment)
+ * \param height the height of requested allocation (the allocation could be slightly
+ * larger, e.g. to account for any system-required alignment)
+ * \param format the pixel format of requested allocation. This could be a vendor
+ * specific format.
+ * \param usage the memory usage info for the requested allocation. \note that the
+ * returned allocation may be later used/mapped with different usage.
+ * The allocator should layout the buffer to be optimized for this usage,
+ * but must support any usage. One exception: protected buffers can
+ * only be used in a protected scenario.
+ * \param block pointer to where the allocation shall be stored on success. nullptr
+ * will be stored here on failure
+ *
+ * \retval C2_OK the allocation was successful
+ * \retval C2_NO_MEMORY not enough memory to complete the allocation
+ * \retval C2_TIMED_OUT the allocation timed out
+ * \retval C2_NO_PERMISSION no permission to complete the allocation
+ * \retval C2_BAD_VALUE width, height, format or usage are not supported (invalid) (caller error)
+ * \retval C2_UNSUPPORTED this allocator does not support 2D allocations
+ * \retval C2_CORRUPTED some unknown, unrecoverable error occured during allocation (unexpected)
+ */
+ virtual C2Error allocateGraphicBlock(
+ uint32_t width __unused, uint32_t height __unused, uint32_t format __unused,
+ C2MemoryUsage usage __unused,
+ std::shared_ptr<C2GraphicBlock> *block /* nonnull */) {
+ *block = nullptr;
+ return C2_UNSUPPORTED;
+ }
+
+protected:
+ C2BlockAllocator() = default;
+
+ virtual ~C2BlockAllocator() = default;
+};
+
+/// @}
+
+/// \cond INTERNAL
+
+/// \todo These are no longer used
+
+/// \addtogroup linear
+/// @{
+
+/** \deprecated */
+class C2LinearBuffer
+ : public C2Buffer, public _C2LinearRangeAspect,
+ public std::enable_shared_from_this<C2LinearBuffer> {
+public:
+ /** \todo what is this? */
+ const C2Handle *handle() const;
+
+protected:
+ inline C2LinearBuffer(const C2ConstLinearBlock &block);
+
+private:
+ class Impl;
+ Impl *mImpl;
+};
+
+class C2ReadCursor;
+
+class C2WriteCursor {
+public:
+ uint32_t remaining() const; // remaining data to be read
+ void commit(); // commits the current position. discard data before current position
+ void reset() const; // resets position to the last committed position
+ // slices off at most |size| bytes, and moves cursor ahead by the number of bytes
+ // sliced off.
+ C2ReadCursor slice(uint32_t size) const;
+ // slices off at most |size| bytes, and moves cursor ahead by the number of bytes
+ // sliced off.
+ C2WriteCursor reserve(uint32_t size);
+ // bool read(T&);
+ // bool write(T&);
+ C2Fence waitForSpace(uint32_t size);
+};
+
+/// @}
+
+/// \addtogroup graphic
+/// @{
+
+struct C2ColorSpace {
+//public:
+ enum Standard {
+ BT601,
+ BT709,
+ BT2020,
+ // TODO
+ };
+
+ enum Range {
+ LIMITED,
+ FULL,
+ // TODO
+ };
+
+ enum TransferFunction {
+ BT709Transfer,
+ BT2020Transfer,
+ HybridLogGamma2,
+ HybridLogGamma4,
+ // TODO
+ };
+};
+
+/** \deprecated */
+class C2GraphicBuffer : public C2Buffer {
+public:
+ // constant attributes
+ inline uint32_t width() const { return mWidth; }
+ inline uint32_t height() const { return mHeight; }
+ inline uint32_t format() const { return mFormat; }
+ inline const C2MemoryUsage usage() const { return mUsage; }
+
+ // modifiable attributes
+
+
+ virtual const C2ColorSpace colorSpace() const = 0;
+ // best effort
+ virtual void setColorSpace_be(const C2ColorSpace &colorSpace) = 0;
+ virtual bool setColorSpace(const C2ColorSpace &colorSpace) = 0;
+
+ const C2Handle *handle() const;
+
+protected:
+ uint32_t mWidth;
+ uint32_t mHeight;
+ uint32_t mFormat;
+ C2MemoryUsage mUsage;
+
+ class Impl;
+ Impl *mImpl;
+};
+
+/// @}
+
+/// \endcond
+
+/// @}
+
+} // namespace android
+
+#endif // C2BUFFER_H_
diff --git a/media/libstagefright/codec2/include/C2Component.h b/media/libstagefright/codec2/include/C2Component.h
new file mode 100644
index 0000000..1ee9302
--- /dev/null
+++ b/media/libstagefright/codec2/include/C2Component.h
@@ -0,0 +1,685 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef C2COMPONENT_H_
+
+#define C2COMPONENT_H_
+
+#include <stdbool.h>
+#include <stdint.h>
+
+#include <list>
+#include <memory>
+#include <vector>
+#include <functional>
+
+#include <C2Param.h>
+#include <C2Work.h>
+
+namespace android {
+
+/// \defgroup components Components
+/// @{
+
+class C2Component;
+
+class C2ComponentListener {
+public:
+ virtual void onWorkDone(std::weak_ptr<C2Component> component,
+ std::vector<std::unique_ptr<C2Work>> workItems) = 0;
+
+ virtual void onTripped(std::weak_ptr<C2Component> component,
+ std::vector<std::shared_ptr<C2SettingResult>> settingResult) = 0;
+
+ virtual void onError(std::weak_ptr<C2Component> component,
+ uint32_t errorCode) = 0;
+
+ // virtual void onTunnelReleased(<from>, <to>) = 0;
+
+ // virtual void onComponentReleased(<id>) = 0;
+
+protected:
+ virtual ~C2ComponentListener();
+};
+
+/**
+ * Component interface object. This object contains all of the configuration of a potential or
+ * actual component. It can be created and used independently of an actual C2Component instance to
+ * query support and parameters for various component settings and configurations for a potential
+ * component. Actual components also expose this interface.
+ */
+
+class C2ComponentInterface {
+public:
+ // ALWAYS AVAILABLE METHODS
+ // =============================================================================================
+
+ /**
+ * Returns the name of this component or component interface object.
+ * This is a unique name for this component or component interface 'class'; however, multiple
+ * instances of this component SHALL have the same name.
+ *
+ * This method MUST be supported in any state. This call does not change the state nor the
+ * internal states of the component.
+ *
+ * This method MUST be "non-blocking" and return within 1ms.
+ *
+ * \return the name of this component or component interface object.
+ * \retval an empty string if there was not enough memory to allocate the actual name.
+ */
+ virtual C2String getName() const = 0;
+
+ /**
+ * Returns a unique ID for this component or interface object.
+ * This ID is used as work targets, unique work IDs, and when configuring tunneling.
+ *
+ * This method MUST be supported in any state. This call does not change the state nor the
+ * internal states of the component.
+ *
+ * This method MUST be "non-blocking" and return within 1ms.
+ *
+ * \return a unique node ID for this component or component interface instance.
+ */
+ virtual node_id getId() const = 0;
+
+ /**
+ * Queries a set of parameters from the component or interface object.
+ * Querying is performed at best effort: the component SHALL query all supported parameters and
+ * skip unsupported ones, or heap allocated parameters that could not be allocated. Any errors
+ * are communicated in the return value. Additionally, preallocated (e.g. stack) parameters that
+ * could not be queried are invalidated. Parameters to be allocated on the heap are omitted from
+ * the result.
+ *
+ * \note Parameter values do not depend on the order of query.
+ *
+ * \todo This method cannot be used to query info-buffers. Is that a problem?
+ *
+ * This method MUST be supported in any state. This call does not change the state nor the
+ * internal states of the component.
+ *
+ * This method MUST be "non-blocking" and return within 1ms.
+ *
+ * \param[in,out] stackParams a list of params queried. These are initialized specific to each
+ * setting; e.g. size and index are set and rest of the members are
+ * cleared.
+ * \note Flexible settings that are of incorrect size will be invalidated.
+ * \param[in] heapParamIndices a vector of param indices for params to be queried and returned on the
+ * heap. These parameters will be returned in heapParams. Unsupported param
+ * indices will be ignored.
+ * \param[out] heapParams a list of params where to which the supported heap parameters will be
+ * appended in the order they appear in heapParamIndices.
+ *
+ * \retval C2_OK all parameters could be queried
+ * \retval C2_BAD_INDEX all supported parameters could be queried, but some parameters were not
+ * supported
+ * \retval C2_NO_MEMORY could not allocate memory for a supported parameter
+ * \retval C2_CORRUPTED some unknown error prevented the querying of the parameters
+ * (unexpected)
+ */
+ virtual status_t query_nb(
+ const std::vector<C2Param* const> &stackParams,
+ const std::vector<C2Param::Index> &heapParamIndices,
+ std::vector<std::unique_ptr<C2Param>>* const heapParams) const = 0;
+
+ /**
+ * Sets a set of parameters for the component or interface object.
+ * Tuning is performed at best effort: the component SHALL update all supported configuration at
+ * best effort (unless configured otherwise) and skip unsupported ones. Any errors are
+ * communicated in the return value and in |failures|.
+ *
+ * \note Parameter tuning DOES depend on the order of the tuning parameters. E.g. some parameter
+ * update may allow some subsequent parameter update.
+ *
+ * This method MUST be supported in any state.
+ *
+ * This method MUST be "non-blocking" and return within 1ms.
+ *
+ * \param[in,out] params a list of parameter updates. These will be updated to the actual
+ * parameter values after the updates (this is because tuning is performed
+ * at best effort).
+ * \todo params that could not be updated are not marked here, so are
+ * confusing - are they "existing" values or intended to be configured
+ * values?
+ * \param[out] failures a list of parameter failures
+ *
+ * \retval C2_OK all parameters could be updated successfully
+ * \retval C2_BAD_INDEX all supported parameters could be updated successfully, but some
+ * parameters were not supported
+ * \retval C2_BAD_VALUE some supported parameters could not be updated successfully because
+ * they contained unsupported values. These are returned in |failures|.
+ * \retval C2_NO_MEMORY some supported parameters could not be updated successfully because
+ * they contained unsupported values, but could not allocate a failure
+ * object for them.
+ * \retval C2_CORRUPTED some unknown error prevented the update of the parameters
+ * (unexpected)
+ */
+ virtual status_t config_nb(
+ const std::vector<C2Param* const> ¶ms,
+ std::vector<std::unique_ptr<C2SettingResult>>* const failures) = 0;
+
+ /**
+ * Atomically sets a set of parameters for the component or interface object.
+ *
+ * \note This method is used mainly for reserving resources for a component.
+ *
+ * The component SHALL update all supported configuration at
+ * best effort(TBD) (unless configured otherwise) and skip unsupported ones. Any errors are
+ * communicated in the return value and in |failures|.
+ *
+ * \note Parameter tuning DOES depend on the order of the tuning parameters. E.g. some parameter
+ * update may allow some subsequent parameter update.
+ *
+ * This method MUST be supported in any state.
+ *
+ * This method may be momentarily blocking, but MUST return within 5ms.
+ *
+ * \param params[in,out] a list of parameter updates. These will be updated to the actual
+ * parameter values after the updates (this is because tuning is performed
+ * at best effort).
+ * \todo params that could not be updated are not marked here, so are
+ * confusing - are they "existing" values or intended to be configured
+ * values?
+ * \param failures[out] a list of parameter failures
+ *
+ * \retval C2_OK all parameters could be updated successfully
+ * \retval C2_BAD_INDEX all supported parameters could be updated successfully, but some
+ * parameters were not supported
+ * \retval C2_BAD_VALUE some supported parameters could not be updated successfully because
+ * they contained unsupported values. These are returned in |failures|.
+ * \retval C2_NO_MEMORY some supported parameters could not be updated successfully because
+ * they contained unsupported values, but could not allocate a failure
+ * object for them.
+ * \retval C2_CORRUPTED some unknown error prevented the update of the parameters
+ * (unexpected)
+ */
+ virtual status_t commit_sm(
+ const std::vector<C2Param* const> ¶ms,
+ std::vector<std::unique_ptr<C2SettingResult>>* const failures) = 0;
+
+ // TUNNELING
+ // =============================================================================================
+
+ /**
+ * Creates a tunnel from this component to the target component.
+ *
+ * If the component is successfully created, subsequent work items queued may include a
+ * tunneled path between these components.
+ *
+ * This method MUST be supported in any state.
+ *
+ * This method may be momentarily blocking, but MUST return within 5ms.
+ *
+ * \retval C2_OK the tunnel was successfully created
+ * \retval C2_BAD_INDEX the target component does not exist
+ * \retval C2_ALREADY_EXIST the tunnel already exists
+ * \retval C2_UNSUPPORTED the tunnel is not supported
+ *
+ * \retval C2_TIMED_OUT could not create the tunnel within the time limit (unexpected)
+ * \retval C2_CORRUPTED some unknown error prevented the creation of the tunnel (unexpected)
+ */
+ virtual status_t createTunnel_sm(node_id targetComponent) = 0;
+
+ /**
+ * Releases a tunnel from this component to the target component.
+ *
+ * The release of a tunnel is delayed while there are pending work items for the tunnel.
+ * After releasing a tunnel, subsequent work items queued MUST NOT include a tunneled
+ * path between these components.
+ *
+ * This method MUST be supported in any state.
+ *
+ * This method may be momentarily blocking, but MUST return within 5ms.
+ *
+ * \retval C2_OK the tunnel was marked for release successfully
+ * \retval C2_BAD_INDEX the target component does not exist
+ * \retval C2_NOT_FOUND the tunnel does not exist
+ *
+ * \retval C2_TIMED_OUT could not mark the tunnel for release within the time limit (unexpected)
+ * \retval C2_CORRUPTED some unknown error prevented the release of the tunnel (unexpected)
+ */
+ virtual status_t releaseTunnel_sm(node_id targetComponent) = 0;
+
+
+ // REFLECTION MECHANISM (USED FOR EXTENSION)
+ // =============================================================================================
+
+ /**
+ * Returns the parameter reflector.
+ *
+ * This is used to describe parameter fields.
+ *
+ * \return a shared parameter reflector object.
+ */
+ virtual std::shared_ptr<C2ParamReflector> getParamReflector() const = 0;
+
+ /**
+ * Returns the set of supported parameters.
+ *
+ * \param[out] params a vector of supported parameters will be appended to this vector.
+ *
+ * \retval C2_OK the operation completed successfully.
+ * \retval C2_NO_MEMORY not enough memory to complete this method.
+ */
+ virtual status_t getSupportedParams(
+ std::vector<std::shared_ptr<C2ParamDescriptor>> * const params) const = 0;
+
+ /**
+ *
+ * \todo should this take a list considering that setting some fields may further limit other
+ * fields in the same list?
+ */
+ virtual status_t getSupportedValues(
+ const std::vector<const C2ParamField> fields,
+ std::vector<C2FieldSupportedValues>* const values) const = 0;
+
+ virtual ~C2ComponentInterface() = default;
+};
+
+class C2Component {
+public:
+ // METHODS AVAILABLE WHEN RUNNING
+ // =============================================================================================
+
+ /**
+ * Queues up work for the component.
+ *
+ * This method MUST be supported in running (including tripped) states.
+ *
+ * This method MUST be "non-blocking" and return within 1ms
+ *
+ * It is acceptable for this method to return OK and return an error value using the
+ * onWorkDone() callback.
+ *
+ * \retval C2_OK the work was successfully queued
+ * \retval C2_BAD_INDEX some component(s) in the work do(es) not exist
+ * \retval C2_UNSUPPORTED the components are not tunneled
+ *
+ * \retval C2_NO_MEMORY not enough memory to queue the work
+ * \retval C2_CORRUPTED some unknown error prevented queuing the work (unexpected)
+ */
+ virtual status_t queue_nb(std::list<std::unique_ptr<C2Work>>* const items) = 0;
+
+ /**
+ * Announces a work to be queued later for the component. This reserves a slot for the queue
+ * to ensure correct work ordering even if the work is queued later.
+ *
+ * This method MUST be supported in running (including tripped) states.
+ *
+ * This method MUST be "non-blocking" and return within 1 ms
+ *
+ * \retval C2_OK the work announcement has been successfully recorded
+ * \retval C2_BAD_INDEX some component(s) in the work outline do(es) not exist
+ * \retval C2_UNSUPPORTED the componentes are not tunneled
+ *
+ * \retval C2_NO_MEMORY not enough memory to record the work announcement
+ * \retval C2_CORRUPTED some unknown error prevented recording the announcement (unexpected)
+ *
+ * \todo Can this be rolled into queue_nb?
+ */
+ virtual status_t announce_nb(const std::vector<C2WorkOutline> &items) = 0;
+
+ /**
+ * Discards and abandons any pending work for the component, and optionally any component
+ * downstream.
+ *
+ * \todo define this: we could flush all work before last item queued for component across all
+ * components linked to this; flush only work items that are queued to this
+ * component
+ * \todo return work # of last flushed item; or all flushed (but not returned items)
+ * \todo we could make flush take a work item and flush all work before/after that item to allow
+ * TBD (slicing/seek?)
+ * \todo we could simply take a list of numbers and flush those... this is bad for decoders
+ * also, what would happen to fine grade references?
+ *
+ * This method MUST be supported in running (including tripped) states.
+ *
+ * This method may be momentarily blocking, but must return within 5ms.
+ *
+ * Work that could be immediately abandoned/discarded SHALL be returned in |flushedWork|; this
+ * can be done in an arbitrary order.
+ *
+ * Work that could not be abandoned or discarded immediately SHALL be marked to be
+ * discarded at the earliest opportunity, and SHALL be returned via the onWorkDone() callback.
+ *
+ * \param flushThrough flush work from this component and all components connected downstream
+ * from it via tunneling.
+ *
+ * \retval C2_OK the work announcement has been successfully recorded
+ * \retval C2_TIMED_OUT the flush could not be completed within the time limit (unexpected)
+ * \retval C2_CORRUPTED some unknown error prevented flushing from completion (unexpected)
+ */
+ virtual status_t flush_sm(bool flushThrough, std::list<std::unique_ptr<C2Work>>* const flushedWork) = 0;
+
+ /**
+ * Drains the component, and optionally downstream components
+ *
+ * \todo define this; we could place EOS to all upstream components, just this component, or
+ * all upstream and downstream component.
+ * \todo should EOS carry over to downstream components?
+ *
+ * Marks last work item as "end-of-stream", so component is notified not to wait for further
+ * work before it processes work already queued. This method is called to set the end-of-stream
+ * flag after work has been queued. Client can continue to queue further work immediately after
+ * this method returns.
+ *
+ * This method MUST be supported in running (including tripped) states.
+ *
+ * This method MUST be "non-blocking" and return within 1ms.
+ *
+ * Work that is completed SHALL be returned via the onWorkDone() callback.
+ *
+ * \param drainThrough marks the last work item with a persistent "end-of-stream" marker that
+ * will drain downstream components.
+ *
+ * \todo this may confuse work-ordering downstream; could be an mode enum
+ *
+ * \retval C2_OK the work announcement has been successfully recorded
+ * \retval C2_TIMED_OUT the flush could not be completed within the time limit (unexpected)
+ * \retval C2_CORRUPTED some unknown error prevented flushing from completion (unexpected)
+ */
+ virtual status_t drain_nb(bool drainThrough) = 0;
+
+ // STATE CHANGE METHODS
+ // =============================================================================================
+
+ /**
+ * Starts the component.
+ *
+ * This method MUST be supported in stopped state.
+ *
+ * \todo This method MUST return within 500ms. Seems this should be able to return quickly, as
+ * there are no immediate guarantees. Though there are guarantees for responsiveness immediately
+ * after start returns.
+ *
+ * \todo Could we just start a ComponentInterface to get a Component?
+ *
+ * \retval C2_OK the work announcement has been successfully recorded
+ * \retval C2_NO_MEMORY not enough memory to start the component
+ * \retval C2_TIMED_OUT the component could not be started within the time limit (unexpected)
+ * \retval C2_CORRUPTED some unknown error prevented starting the component (unexpected)
+ */
+ virtual status_t start() = 0;
+
+ /**
+ * Stops the component.
+ *
+ * This method MUST be supported in running (including tripped) state.
+ *
+ * This method MUST return withing 500ms.
+ *
+ * Upon this call, all pending work SHALL be abandoned.
+ *
+ * \todo should this return completed work, since client will just free it? Perhaps just to
+ * verify accounting.
+ *
+ * This does not alter any settings and tunings that may have resulted in a tripped state.
+ * (Is this material given the definition? Perhaps in case we want to start again.)
+ */
+ virtual status_t stop() = 0;
+
+ /**
+ * Resets the component.
+ *
+ * This method MUST be supported in running (including tripped) state.
+ *
+ * This method MUST be supported during any other call (\todo or just blocking ones?)
+ *
+ * This method MUST return withing 500ms.
+ *
+ * After this call returns all work is/must be abandoned, all references should be released.
+ *
+ * \todo should this return completed work, since client will just free it? Also, if it unblocks
+ * a stop, where should completed work be returned?
+ *
+ * This brings settings back to their default - "guaranteeing" no tripped space.
+ *
+ * \todo reclaim support - it seems that since ownership is passed, this will allow reclaiming stuff.
+ */
+ virtual void reset() = 0;
+
+ /**
+ * Releases the component.
+ *
+ * This method MUST be supported in any state. (\todo Or shall we force reset() first to bring
+ * to a known state?)
+ *
+ * This method MUST return withing 500ms.
+ *
+ * \todo should this return completed work, since client will just free it? Also, if it unblocks
+ * a stop, where should completed work be returned?
+ *
+ * TODO: does it matter if this call has a short time limit? Yes, as upon return all references
+ * shall be abandoned.
+ */
+ virtual void release() = 0;
+
+ /**
+ * Returns the interface for this component.
+ *
+ * \return the component interface
+ */
+ virtual std::shared_ptr<C2ComponentInterface> intf() = 0;
+
+protected:
+ virtual ~C2Component() = default;
+};
+
+class C2FrameInfoParser {
+public:
+ /**
+ * \return the content type supported by this info parser.
+ *
+ * \todo this may be redundant
+ */
+ virtual C2StringLiteral getType() const = 0;
+
+ /**
+ * \return a vector of supported parameter indices parsed by this info parser.
+ *
+ * \todo sticky vs. non-sticky params? this may be communicated by param-reflector.
+ */
+ virtual const std::vector<C2Param::Index> getParsedParams() const = 0;
+
+ /**
+ * Resets this info parser. This brings this parser to its initial state after creation.
+ *
+ * This method SHALL return within 5ms.
+ *
+ * \retval C2_OK the info parser was reset
+ * \retval C2_TIMED_OUT could not reset the parser within the time limit (unexpected)
+ * \retval C2_CORRUPTED some unknown error prevented the resetting of the parser (unexpected)
+ */
+ virtual status_t reset() { return C2_OK; }
+
+ virtual status_t parseFrame(C2BufferPack &frame);
+
+ virtual ~C2FrameInfoParser() = default;
+};
+
+struct C2ComponentInfo {
+ // TBD
+
+};
+
+class C2AllocatorStore {
+public:
+ // TBD
+
+ enum Type {
+ LINEAR, ///< basic linear allocator type
+ GRALLOC, ///< basic gralloc allocator type
+ };
+
+ /**
+ * Creates an allocator.
+ *
+ * \param type the type of allocator to create
+ * \param allocator shared pointer where the created allocator is stored. Cleared on failure
+ * and updated on success.
+ *
+ * \retval C2_OK the allocator was created successfully
+ * \retval C2_TIMED_OUT could not create the allocator within the time limit (unexpected)
+ * \retval C2_CORRUPTED some unknown error prevented the creation of the allocator (unexpected)
+ *
+ * \retval C2_NOT_FOUND no such allocator
+ * \retval C2_NO_MEMORY not enough memory to create the allocator
+ */
+ virtual status_t createAllocator(Type type, std::shared_ptr<C2Allocator>* const allocator) = 0;
+
+ virtual ~C2AllocatorStore() = default;
+};
+
+class C2ComponentStore {
+ /**
+ * Creates a component.
+ *
+ * This method SHALL return within 100ms.
+ *
+ * \param name name of the component to create
+ * \param component shared pointer where the created component is stored. Cleared on
+ * failure and updated on success.
+ *
+ * \retval C2_OK the component was created successfully
+ * \retval C2_TIMED_OUT could not create the component within the time limit (unexpected)
+ * \retval C2_CORRUPTED some unknown error prevented the creation of the component (unexpected)
+ *
+ * \retval C2_NOT_FOUND no such component
+ * \retval C2_NO_MEMORY not enough memory to create the component
+ */
+ virtual status_t createComponent(C2String name, std::shared_ptr<C2Component>* const component);
+
+ /**
+ * Creates a component interface.
+ *
+ * This method SHALL return within 100ms.
+ *
+ * \param name name of the component interface to create
+ * \param interface shared pointer where the created interface is stored
+ *
+ * \retval C2_OK the component interface was created successfully
+ * \retval C2_TIMED_OUT could not create the component interface within the time limit
+ * (unexpected)
+ * \retval C2_CORRUPTED some unknown error prevented the creation of the component interface
+ * (unexpected)
+ *
+ * \retval C2_NOT_FOUND no such component interface
+ * \retval C2_NO_MEMORY not enough memory to create the component interface
+ *
+ * \todo Do we need an interface, or could this just be a component that is never started?
+ */
+ virtual status_t createInterface(C2String name, std::shared_ptr<C2ComponentInterface>* const interface);
+
+ /**
+ * Returns the list of components supported by this component store.
+ *
+ * This method SHALL return within 1ms.
+ *
+ * \retval vector of component information.
+ */
+ virtual std::vector<std::unique_ptr<const C2ComponentInfo>> getComponents();
+
+ // -------------------------------------- UTILITY METHODS --------------------------------------
+
+ // on-demand buffer layout conversion (swizzling)
+ virtual status_t copyBuffer(std::shared_ptr<C2GraphicBuffer> src, std::shared_ptr<C2GraphicBuffer> dst);
+
+ // status_t selectPreferredColor(formats<A>, formats<B>);
+
+ // GLOBAL SETTINGS
+ // system-wide stride & slice-height (???)
+
+ /**
+ * Queries a set of system-wide parameters.
+ * Querying is performed at best effort: the store SHALL query all supported parameters and
+ * skip unsupported ones, or heap allocated parameters that could not be allocated. Any errors
+ * are communicated in the return value. Additionally, preallocated (e.g. stack) parameters that
+ * could not be queried are invalidated. Parameters to be allocated on the heap are omitted from
+ * the result.
+ *
+ * \note Parameter values do not depend on the order of query.
+ *
+ * This method MUST be "non-blocking" and return within 1ms.
+ *
+ * \param stackParams a list of params queried. These are initialized specific to each
+ * setting; e.g. size and index are set and rest of the members are
+ * cleared.
+ * NOTE: Flexible settings that are of incorrect size will be invalidated.
+ * \param heapParamIndices a vector of param indices for params to be queried and returned on the
+ * heap. These parameters will be returned in heapParams. Unsupported param
+ * indices will be ignored.
+ * \param heapParams a list of params where to which the supported heap parameters will be
+ * appended in the order they appear in heapParamIndices.
+ *
+ * \retval C2_OK all parameters could be queried
+ * \retval C2_BAD_INDEX all supported parameters could be queried, but some parameters were not
+ * supported
+ * \retval C2_NO_MEMORY could not allocate memory for a supported parameter
+ * \retval C2_CORRUPTED some unknown error prevented the querying of the parameters
+ * (unexpected)
+ */
+ virtual status_t query_nb(
+ const std::vector<C2Param* const> &stackParams,
+ const std::vector<C2Param::Index> &heapParamIndices,
+ std::vector<std::unique_ptr<C2Param>>* const heapParams) = 0;
+
+ /**
+ * Sets a set of system-wide parameters.
+ *
+ * \note There are no settable system-wide parameters defined thus far, but may be added in the
+ * future.
+ *
+ * Tuning is performed at best effort: the store SHALL update all supported configuration at
+ * best effort (unless configured otherwise) and skip unsupported ones. Any errors are
+ * communicated in the return value and in |failures|.
+ *
+ * \note Parameter tuning DOES depend on the order of the tuning parameters. E.g. some parameter
+ * update may allow some subsequent parameter update.
+ *
+ * This method MUST be "non-blocking" and return within 1ms.
+ *
+ * \param params a list of parameter updates. These will be updated to the actual
+ * parameter values after the updates (this is because tuning is performed
+ * at best effort).
+ * \todo params that could not be updated are not marked here, so are
+ * confusing - are they "existing" values or intended to be configured
+ * values?
+ * \param failures a list of parameter failures
+ *
+ * \retval C2_OK all parameters could be updated successfully
+ * \retval C2_BAD_INDEX all supported parameters could be updated successfully, but some
+ * parameters were not supported
+ * \retval C2_BAD_VALUE some supported parameters could not be updated successfully because
+ * they contained unsupported values. These are returned in |failures|.
+ * \retval C2_NO_MEMORY some supported parameters could not be updated successfully because
+ * they contained unsupported values, but could not allocate a failure
+ * object for them.
+ * \retval C2_CORRUPTED some unknown error prevented the update of the parameters
+ * (unexpected)
+ */
+ virtual status_t config_nb(
+ const std::vector<C2Param* const> ¶ms,
+ std::list<std::unique_ptr<C2SettingResult>>* const failures) = 0;
+
+ virtual ~C2ComponentStore() = default;
+};
+
+// ================================================================================================
+
+/// @}
+
+} // namespace android
+
+#endif // C2COMPONENT_H_
diff --git a/media/libstagefright/codec2/include/C2Config.h b/media/libstagefright/codec2/include/C2Config.h
new file mode 100644
index 0000000..30e9193
--- /dev/null
+++ b/media/libstagefright/codec2/include/C2Config.h
@@ -0,0 +1,251 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef C2CONFIG_H_
+#define C2CONFIG_H_
+
+#include <C2ParamDef.h>
+
+namespace android {
+
+/// \defgroup config Component configuration
+/// @{
+
+#ifndef DEFINE_C2_ENUM_VALUE_AUTO_HELPER
+#define DEFINE_C2_ENUM_VALUE_AUTO_HELPER(name, type, prefix, ...)
+#define DEFINE_C2_ENUM_VALUE_CUSTOM_HELPER(name, type, names, ...)
+#endif
+
+#define C2ENUM(name, type, ...) \
+enum name : type { __VA_ARGS__ }; \
+DEFINE_C2_ENUM_VALUE_AUTO_HELPER(name, type, NULL, __VA_ARGS__)
+
+#define C2ENUM_CUSTOM_PREFIX(name, type, prefix, ...) \
+enum name : type { __VA_ARGS__ }; \
+DEFINE_C2_ENUM_VALUE_AUTO_HELPER(name, type, prefix, __VA_ARGS__)
+
+#define C2ENUM_CUSTOM_NAMES(name, type, names, ...) \
+enum name : type { __VA_ARGS__ }; \
+DEFINE_C2_ENUM_VALUE_CUSTOM_HELPER(name, type, names, __VA_ARGS__)
+
+enum C2ParamIndexKind : uint32_t {
+ /// domain
+ kParamIndexDomain,
+
+ /// configuration descriptors
+ kParamIndexSupportedParams,
+ kParamIndexRequiredParams,
+ kParamIndexReadOnlyParams,
+ kParamIndexRequestedInfos,
+
+ /// latency
+ kParamIndexLatency,
+
+ // generic time behavior
+ kParamIndexTemporal,
+
+ /// port configuration
+ kParamIndexMime,
+ kParamIndexStreamCount,
+ kParamIndexFormat,
+
+ // video info
+
+ kParamIndexStructStart = 0x1,
+ kParamIndexVideoSize,
+ kParamIndexMaxVideoSizeHint,
+
+ kParamIndexParamStart = 0x800,
+};
+
+C2ENUM(C2DomainKind, int32_t,
+ C2DomainVideo,
+ C2DomainAudio,
+ C2DomainOther = C2DomainAudio + 1
+);
+
+// read-only
+
+typedef C2GlobalParam<C2Info, C2SimpleValueStruct<C2DomainKind>, kParamIndexDomain> C2ComponentDomainInfo;
+// typedef C2GlobalParam<C2Info, C2Uint32Value, kParamIndexDomain> C2ComponentDomainInfo;
+//DESCRIBE_TEMPLATED_C2STRUCT(C2SimpleValueStruct<C2DomainKind>, { C2FIELD(mValue, "value") });
+
+// read-only
+typedef C2GlobalParam<C2Info, C2Uint32Array, kParamIndexSupportedParams> C2SupportedParamsInfo;
+
+/// \todo do we define it as a param?
+// read-only
+typedef C2GlobalParam<C2Info, C2Uint32Array, kParamIndexRequiredParams> C2RequiredParamsInfo;
+
+// read-only
+typedef C2GlobalParam<C2Info, C2Uint32Array, kParamIndexReadOnlyParams> C2ReadOnlyParamsInfo;
+
+// read-only
+typedef C2GlobalParam<C2Info, C2Uint32Array, kParamIndexRequestedInfos> C2RequestedInfosInfo;
+
+// read-only
+//typedef C2GlobalParam<C2Info, C2Uint32Value, kParamIndexRequestedInfos> C2RequestedInfosInfo;
+
+/// latency
+
+typedef C2PortParam<C2Info, C2Uint32Value, kParamIndexLatency> C2PortLatencyInfo;
+
+typedef C2GlobalParam<C2Info, C2Uint32Value, kParamIndexLatency> C2ComponentLatencyInfo;
+
+/// \todo
+typedef C2GlobalParam<C2Info, C2Uint32Value, kParamIndexTemporal> C2ComponentTemporalInfo;
+
+/// port configuration
+
+typedef C2PortParam<C2Tuning, C2StringValue, kParamIndexMime> C2PortMimeConfig;
+
+typedef C2PortParam<C2Tuning, C2Uint32Value, kParamIndexStreamCount> C2PortStreamCountConfig;
+
+typedef C2StreamParam<C2Tuning, C2StringValue, kParamIndexMime> C2StreamMimeConfig;
+
+C2ENUM(C2FormatKind, uint32_t,
+ C2FormatCompressed,
+ C2FormatAudio = 1,
+ C2FormatVideo = 4,
+)
+
+typedef C2StreamParam<C2Tuning, C2Uint32Value, kParamIndexFormat> C2StreamFormatConfig;
+
+/*
+ Component description fields:
+
+// format (video/compressed/audio/other-do we need other?) per stream
+
+// likely some of these are exposed as separate settings:
+
+struct C2BaseTuning {
+ // latency characteristics
+ uint32_t latency;
+ bool temporal; // seems this only makes sense if latency is 1..., so this could be captured as latency = 0
+ uint32_t delay;
+
+ uint32_t numInputStreams; // RW? - or suggestion only: RO
+ uint32_t numOutputStreams; // RW
+ //
+ // refs characteristics (per stream?)
+ uint32_t maxInputRefs; // RO
+ uint32_t maxOutputRefs; // RO
+ uint32_t maxInputMemory; // RO - max time refs are held for
+ uint32_t maxOutputMemory; // RO
+
+ // per stream
+ bool compressed;
+ // format... video/compressed/audio/other?
+ // actual "audio/video" format type
+ uint32_t width/height? is this needed, or just queue...
+ // mime...
+};
+*/
+
+
+
+
+
+
+// overall component
+// => C: domain: audio or video
+// => C: kind: decoder, encoder or filter
+// => "mime" class
+
+// => C: temporal (bool) => does this depend on ordering?
+// => I: latency
+// => I: history max duration...
+// => I: history max frames kept...
+// => I: reordering depth
+// => I: frc (bool) (perhaps ratio?)
+// => I: current frc
+
+// - pause
+// => last frame 'number' processed
+// => current frame 'number' processed
+// => invalid settings =>[]
+
+// video decoder configuration: // audio
+// - encoding // -encoding
+// - hint: max width/height // -hint: sample rate, channels
+// - hint: profile/level // -hint: tools used
+// - hint: framerate (bitrate?) // -hint: bitrate
+// - default: color space (from container)
+// - hint: color format // -hint: pcm-encoding
+// - hint: # of views (e.g. MVC) // -hint?: channel groups
+// - default: HDR static info (from container) // -hint?: channel mappings
+// - hint: rotation (e.g. for allocator)
+
+// => # of streams required and their formats? (setting?)
+// => # of streams produced and their formats? (tuning)
+
+// => output
+// - # of views // -channel groups && channel mappings
+// - width/height/crop/color format/color space/HDR static info (from buffers)
+// (as required by the allocator & framework)
+// - SEI (or equivalent) <= [port]
+// - CC
+// - reference info
+
+// video encoder configurations
+// - encoding // - encoding
+// - hint: width/height // - hint: sample rate, channels
+// - hint: frame rate
+// - hint: max width/height (? does this differ from width/height?)
+// - # of input (e.g. MVC) // - hint: # groups and mappings
+// - # of output (e.g. SVC) => bitrates/width/height/framerates? per stream
+// - hint: profile/level // - hint: profile/level
+// - HDR static info + (info: HDR)
+// - color space
+// - hint: color format? // - hint: pcm encoding
+// - SEI
+// - CC
+// - reference directive
+// - hint: bitrate (or quality) // - hint: bitrate/quality
+// - optional: codec-specific parameters // - optional: csd
+
+// => output // => output
+// - layers per stream? // E-AC3?... DTS?...Dolby-Vision?
+// - reference info
+
+
+// RM:
+// - need SPS for full knowledge => component should return max. (component can use less)
+// - critical parameters? (interlaced? profile? level?)
+
+struct C2VideoSizeStruct {
+ int32_t mWidth; ///< video width
+ int32_t mHeight; ///< video height
+
+ DEFINE_AND_DESCRIBE_C2STRUCT(VideoSize)
+ C2FIELD(mWidth, "width")
+ C2FIELD(mHeight, "height")
+};
+
+// video size for video decoder [OUT]
+typedef C2StreamParam<C2Info, C2VideoSizeStruct> C2VideoSizeStreamInfo;
+
+// max video size for video decoder [IN]
+typedef C2PortParam<C2Setting, C2VideoSizeStruct, kParamIndexMaxVideoSizeHint> C2MaxVideoSizeHintPortSetting;
+
+// video encoder size [IN]
+typedef C2StreamParam<C2Tuning, C2VideoSizeStruct> C2VideoSizeStreamTuning;
+
+/// @}
+
+} // namespace android
+
+#endif
diff --git a/media/libstagefright/codec2/include/C2Param.h b/media/libstagefright/codec2/include/C2Param.h
new file mode 100644
index 0000000..fd43061
--- /dev/null
+++ b/media/libstagefright/codec2/include/C2Param.h
@@ -0,0 +1,1171 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef C2PARAM_H_
+#define C2PARAM_H_
+
+#include <C2.h>
+
+#include <stdbool.h>
+#include <stdint.h>
+
+#include <algorithm>
+#include <list>
+#include <string>
+#include <type_traits>
+
+#define C2_PACK __attribute__((packed))
+
+namespace android {
+
+/// \addtogroup Parameters
+/// @{
+
+/// \defgroup internal Internal helpers.
+
+/*!
+ * \file
+ * PARAMETERS: SETTINGs, TUNINGs, and INFOs
+ * ===
+ *
+ * These represent miscellaneous control and metadata information and are likely copied into
+ * kernel space. Therefore, these are C-like structures designed to carry just a small amount of
+ * information. We are using C++ to be able to add constructors, as well as non-virtual and class
+ * methods.
+ *
+ * ==Specification details:
+ *
+ * Restrictions:
+ * - must be POD struct, e.g. no vtable (no virtual destructor)
+ * - must have the same size in 64-bit and 32-bit mode (no size_t)
+ * - as such, no pointer members
+ *
+ * Behavior:
+ * - Params can be global (not related to input or output), related to input or output,
+ * or related to an input/output stream.
+ * - All params are queried/set using a unique param index, which incorporates a potential stream
+ * index and/or port.
+ * - Querying (supported) params MUST never fail.
+ * - All params MUST have default values.
+ * - If some fields have "unsupported" or "invalid" values during setting, this SHOULD be
+ * communicated to the app.
+ * a) Ideally, this should be avoided. When setting parameters, in general, component should do
+ * "best effort" to apply all settings. It should change "invalid/unsupported" values to the
+ * nearest supported values.
+ * - This is communicated to the client by changing the source values in tune()/
+ * configure().
+ * b) If falling back to a supported value is absolutely impossible, the component SHALL return
+ * an error for the specific setting, but should continue to apply other settings.
+ * TODO: this currently may result in unintended results.
+ *
+ * **NOTE:** unlike OMX, params are not versioned. Instead, a new struct with new base index
+ * SHALL be added as new versions are required.
+ *
+ * The proper subtype (Setting, Info or Param) is incorporated into the class type. Define structs
+ * to define multiple subtyped versions of related parameters.
+ *
+ * ==Implementation details:
+ *
+ * - Use macros to define parameters
+ * - All parameters must have a default constructor
+ * - This is only used for instantiating the class in source (e.g. will not be used
+ * when building a parameter by the framework from key/value pairs.)
+ */
+
+/// \ingroup internal
+struct _C2ParamManipulator;
+
+/**
+ * Parameter base class.
+ */
+struct C2Param {
+ // param index encompasses the following:
+ //
+ // - type (setting, tuning, info, struct)
+ // - vendor extension flag
+ // - flexible parameter flag
+ // - direction (global, input, output)
+ // - stream flag
+ // - stream ID (usually 0)
+ //
+ // layout:
+ //
+ // +------+-----+---+------+--------+----|------+--------------+
+ // | kind | dir | - |stream|streamID|flex|vendor| base index |
+ // +------+-----+---+------+--------+----+------+--------------+
+ // bit: 31..30 29.28 25 24 .. 17 16 15 14 .. 0
+ //
+public:
+ /**
+ * C2Param kinds, usable as bitmaps.
+ */
+ enum Kind : uint32_t {
+ NONE = 0,
+ STRUCT = (1 << 0),
+ INFO = (1 << 1),
+ SETTING = (1 << 2),
+ TUNING = (1 << 3) | SETTING, // tunings are settings
+ };
+
+ /**
+ * base index (including the vendor extension bit) is a global index for
+ * C2 parameter structs. (e.g. the same indices cannot be reused for different
+ * structs for different components).
+ */
+ struct BaseIndex {
+ protected:
+ enum : uint32_t {
+ kTypeMask = 0xC0000000,
+ kTypeStruct = 0x00000000,
+ kTypeTuning = 0x40000000,
+ kTypeSetting = 0x80000000,
+ kTypeInfo = 0xC0000000,
+
+ kDirMask = 0x30000000,
+ kDirGlobal = 0x20000000,
+ kDirUndefined = 0x30000000, // MUST have all bits set
+ kDirInput = 0x00000000,
+ kDirOutput = 0x10000000,
+
+ kStreamFlag = 0x02000000,
+ kStreamIdMask = 0x01FE0000,
+ kStreamIdShift = 17,
+ kStreamIdMax = kStreamIdMask >> kStreamIdShift,
+ kStreamMask = kStreamFlag | kStreamIdMask,
+
+ kFlexibleFlag = 0x00010000,
+ kVendorFlag = 0x00008000,
+ kParamMask = 0x0000FFFF,
+ kBaseMask = kParamMask | kFlexibleFlag,
+ };
+
+ public:
+ enum : uint32_t {
+ kVendorStart = kVendorFlag, ///< vendor structs SHALL start after this
+ _kFlexibleFlag = kFlexibleFlag, // TODO: this is only needed for testing
+ };
+
+ /// constructor/conversion from uint32_t
+ inline BaseIndex(uint32_t index) : mIndex(index) { }
+
+ // no conversion from uint64_t
+ inline BaseIndex(uint64_t index) = delete;
+
+ /// returns true iff this is a vendor extension parameter
+ inline bool isVendor() const { return mIndex & kVendorFlag; }
+
+ /// returns true iff this is a flexible parameter (with variable size)
+ inline bool isFlexible() const { return mIndex & kFlexibleFlag; }
+
+ /// returns the base type: the index for the underlying struct
+ inline unsigned int baseIndex() const { return mIndex & kBaseMask; }
+
+ /// returns the param index for the underlying struct
+ inline unsigned int paramIndex() const { return mIndex & kParamMask; }
+
+ DEFINE_FIELD_BASED_COMPARISON_OPERATORS(BaseIndex, mIndex)
+
+ protected:
+ uint32_t mIndex;
+ };
+
+ /**
+ * type encompasses the parameter kind (tuning, setting, info), whether the
+ * parameter is global, input or output, and whether it is for a stream.
+ */
+ struct Type : public BaseIndex {
+ /// returns true iff this is a global parameter (not for input nor output)
+ inline bool isGlobal() const { return (mIndex & kDirMask) == kDirGlobal; }
+ /// returns true iff this is an input or input stream parameter
+ inline bool forInput() const { return (mIndex & kDirMask) == kDirInput; }
+ /// returns true iff this is an output or output stream parameter
+ inline bool forOutput() const { return (mIndex & kDirMask) == kDirOutput; }
+
+ /// returns true iff this is a stream parameter
+ inline bool forStream() const { return mIndex & kStreamFlag; }
+ /// returns true iff this is a port (input or output) parameter
+ inline bool forPort() const { return !forStream() && !isGlobal(); }
+
+ /// returns the parameter type: the parameter index without the stream ID
+ inline uint32_t type() const { return mIndex & (~kStreamIdMask); }
+
+ /// return the kind of this param
+ inline Kind kind() const {
+ switch (mIndex & kTypeMask) {
+ case kTypeStruct: return STRUCT;
+ case kTypeInfo: return INFO;
+ case kTypeSetting: return SETTING;
+ case kTypeTuning: return TUNING;
+ default: return NONE; // should not happen
+ }
+ }
+
+ /// constructor/conversion from uint32_t
+ inline Type(uint32_t index) : BaseIndex(index) { }
+
+ // no conversion from uint64_t
+ inline Type(uint64_t index) = delete;
+
+ private:
+ friend struct C2Param; // for setPort()
+ friend struct C2Tuning; // for kTypeTuning
+ friend struct C2Setting; // for kTypeSetting
+ friend struct C2Info; // for kTypeInfo
+ // for kDirGlobal
+ template<typename T, typename S, int I, class F> friend struct C2GlobalParam;
+ template<typename T, typename S, int I, class F> friend struct C2PortParam; // for kDir*
+ template<typename T, typename S, int I, class F> friend struct C2StreamParam; // for kDir*
+ friend struct _C2ParamInspector; // for testing
+
+ /**
+ * Sets the port/stream direction.
+ * @return true on success, false if could not set direction (e.g. it is global param).
+ */
+ inline bool setPort(bool output) {
+ if (isGlobal()) {
+ return false;
+ } else {
+ mIndex = (mIndex & ~kDirMask) | (output ? kDirOutput : kDirInput);
+ return true;
+ }
+ }
+ };
+
+ /**
+ * index encompasses all remaining information: basically the stream ID.
+ */
+ struct Index : public Type {
+ /// returns the index as uint32_t
+ inline operator uint32_t() const { return mIndex; }
+
+ /// constructor/conversion from uint32_t
+ inline Index(uint32_t index) : Type(index) { }
+
+ // no conversion from uint64_t
+ inline Index(uint64_t index) = delete;
+
+ /// returns the stream ID or ~0 if not a stream
+ inline unsigned stream() const {
+ return forStream() ? rawStream() : ~0U;
+ }
+
+ private:
+ friend struct C2Param; // for setStream, makeStreamId, isValid
+ friend struct _C2ParamInspector; // for testing
+
+ /**
+ * @return true if the type is valid, e.g. direction is not undefined AND
+ * stream is 0 if not a stream param.
+ */
+ inline bool isValid() const {
+ // there is no Type::isValid (even though some of this check could be
+ // performed on types) as this is only used on index...
+ return (forStream() ? rawStream() < kStreamIdMax : rawStream() == 0)
+ && (mIndex & kDirMask) != kDirUndefined;
+ }
+
+ /// returns the raw stream ID field
+ inline unsigned rawStream() const {
+ return (mIndex & kStreamIdMask) >> kStreamIdShift;
+ }
+
+ /// returns the streamId bitfield for a given |stream|. If stream is invalid,
+ /// returns an invalid bitfield.
+ inline static uint32_t makeStreamId(unsigned stream) {
+ // saturate stream ID (max value is invalid)
+ if (stream > kStreamIdMax) {
+ stream = kStreamIdMax;
+ }
+ return (stream << kStreamIdShift) & kStreamIdMask;
+ }
+
+ /**
+ * Sets the stream index.
+ * \return true on success, false if could not set index (e.g. not a stream param).
+ */
+ inline bool setStream(unsigned stream) {
+ if (forStream()) {
+ mIndex = (mIndex & ~kStreamIdMask) | makeStreamId(stream);
+ return this->stream() < kStreamIdMax;
+ }
+ return false;
+ }
+ };
+
+public:
+ // public getters for Index methods
+
+ /// returns true iff this is a vendor extension parameter
+ inline bool isVendor() const { return _mIndex.isVendor(); }
+ /// returns true iff this is a flexible parameter
+ inline bool isFlexible() const { return _mIndex.isFlexible(); }
+ /// returns true iff this is a global parameter (not for input nor output)
+ inline bool isGlobal() const { return _mIndex.isGlobal(); }
+ /// returns true iff this is an input or input stream parameter
+ inline bool forInput() const { return _mIndex.forInput(); }
+ /// returns true iff this is an output or output stream parameter
+ inline bool forOutput() const { return _mIndex.forOutput(); }
+
+ /// returns true iff this is a stream parameter
+ inline bool forStream() const { return _mIndex.forStream(); }
+ /// returns true iff this is a port (input or output) parameter
+ inline bool forPort() const { return _mIndex.forPort(); }
+
+ /// returns the stream ID or ~0 if not a stream
+ inline unsigned stream() const { return _mIndex.stream(); }
+
+ /// returns the parameter type: the parameter index without the stream ID
+ inline uint32_t type() const { return _mIndex.type(); }
+
+ /// returns the kind of this parameter
+ inline Kind kind() const { return _mIndex.kind(); }
+
+ /// returns the size of the parameter or 0 if the parameter is invalid
+ inline size_t size() const { return _mSize; }
+
+ /// returns true iff the parameter is valid
+ inline operator bool() const { return _mIndex.isValid() && _mSize > 0; }
+
+ /// returns true iff the parameter is invalid
+ inline bool operator!() const { return !operator bool(); }
+
+ // equality is done by memcmp (use equals() to prevent any overread)
+ inline bool operator==(const C2Param &o) const {
+ return equals(o) && memcmp(this, &o, _mSize) == 0;
+ }
+ inline bool operator!=(const C2Param &o) const { return !operator==(o); }
+
+ /// safe(r) type cast from pointer and size
+ inline static C2Param* From(void *addr, size_t len) {
+ // _mSize must fit into size
+ if (len < sizeof(_mSize) + offsetof(C2Param, _mSize)) {
+ return nullptr;
+ }
+ // _mSize must match length
+ C2Param *param = (C2Param*)addr;
+ if (param->_mSize != len) {
+ return nullptr;
+ }
+ return param;
+ }
+
+#if 0
+ template<typename P, class=decltype(C2Param(P()))>
+ P *As() { return P::From(this); }
+ template<typename P>
+ const P *As() const { return const_cast<const P*>(P::From(const_cast<C2Param*>(this))); }
+#endif
+
+protected:
+ /// sets the stream field. Returns true iff successful.
+ inline bool setStream(unsigned stream) {
+ return _mIndex.setStream(stream);
+ }
+
+ /// sets the port (direction). Returns true iff successful.
+ inline bool setPort(bool output) {
+ return _mIndex.setPort(output);
+ }
+
+public:
+ /// invalidate this parameter. There is no recovery from this call; e.g. parameter
+ /// cannot be 'corrected' to be valid.
+ inline void invalidate() { _mSize = 0; }
+
+ // if other is the same kind of (valid) param as this, copy it into this and return true.
+ // otherwise, do not copy anything, and return false.
+ inline bool updateFrom(const C2Param &other) {
+ if (other._mSize == _mSize && other._mIndex == _mIndex && _mSize > 0) {
+ memcpy(this, &other, _mSize);
+ return true;
+ }
+ return false;
+ }
+
+protected:
+ // returns |o| if it is a null ptr, or if can suitably be a param of given |type| (e.g. has
+ // same type (ignoring stream ID), and size). Otherwise, returns null. If |checkDir| is false,
+ // allow undefined or different direction (e.g. as constructed from C2PortParam() vs.
+ // C2PortParam::input), but still require equivalent type (stream, port or global); otherwise,
+ // return null.
+ inline static const C2Param* ifSuitable(
+ const C2Param* o, size_t size, Type type, size_t flexSize = 0, bool checkDir = true) {
+ if (o == nullptr || o->_mSize < size || (flexSize && ((o->_mSize - size) % flexSize))) {
+ return nullptr;
+ } else if (checkDir) {
+ return o->_mIndex.type() == type.mIndex ? o : nullptr;
+ } else if (o->_mIndex.isGlobal()) {
+ return nullptr;
+ } else {
+ return ((o->_mIndex.type() ^ type.mIndex) & ~Type::kDirMask) ? nullptr : o;
+ }
+ }
+
+ /// base constructor
+ inline C2Param(uint32_t paramSize, Index paramIndex)
+ : _mSize(paramSize),
+ _mIndex(paramIndex) {
+ if (paramSize > sizeof(C2Param)) {
+ memset(this + 1, 0, paramSize - sizeof(C2Param));
+ }
+ }
+
+ /// base constructor with stream set
+ inline C2Param(uint32_t paramSize, Index paramIndex, unsigned stream)
+ : _mSize(paramSize),
+ _mIndex(paramIndex | Index::makeStreamId(stream)) {
+ if (paramSize > sizeof(C2Param)) {
+ memset(this + 1, 0, paramSize - sizeof(C2Param));
+ }
+ if (!forStream()) {
+ invalidate();
+ }
+ }
+
+private:
+ friend struct _C2ParamInspector; // for testing
+
+ /// returns the base type: the index for the underlying struct (for testing
+ /// as this can be gotten by the baseIndex enum)
+ inline uint32_t _baseIndex() const { return _mIndex.baseIndex(); }
+
+ /// returns true iff |o| has the same size and index as this. This performs the
+ /// basic check for equality.
+ inline bool equals(const C2Param &o) const {
+ return _mSize == o._mSize && _mIndex == o._mIndex;
+ }
+
+ uint32_t _mSize;
+ Index _mIndex;
+};
+
+/// \ingroup internal
+/// allow C2Params access to private methods, e.g. constructors
+#define C2PARAM_MAKE_FRIENDS \
+ template<typename U, typename S, int I, class F> friend struct C2GlobalParam; \
+ template<typename U, typename S, int I, class F> friend struct C2PortParam; \
+ template<typename U, typename S, int I, class F> friend struct C2StreamParam; \
+
+/**
+ * Setting base structure for component method signatures. Wrap constructors.
+ */
+struct C2Setting : public C2Param {
+protected:
+ template<typename ...Args>
+ inline C2Setting(const Args(&... args)) : C2Param(args...) { }
+public: // TODO
+ enum : uint32_t { indexFlags = Type::kTypeSetting };
+};
+
+/**
+ * Tuning base structure for component method signatures. Wrap constructors.
+ */
+struct C2Tuning : public C2Setting {
+protected:
+ template<typename ...Args>
+ inline C2Tuning(const Args(&... args)) : C2Setting(args...) { }
+public: // TODO
+ enum : uint32_t { indexFlags = Type::kTypeTuning };
+};
+
+/**
+ * Info base structure for component method signatures. Wrap constructors.
+ */
+struct C2Info : public C2Param {
+protected:
+ template<typename ...Args>
+ inline C2Info(const Args(&... args)) : C2Param(args...) { }
+public: // TODO
+ enum : uint32_t { indexFlags = Type::kTypeInfo };
+};
+
+/**
+ * Structure uniquely specifying a field in an arbitrary structure.
+ *
+ * \note This structure is used differently in C2FieldDescriptor to
+ * identify array fields, such that _mSize is the size of each element. This is
+ * because the field descriptor contains the array-length, and we want to keep
+ * a relevant element size for variable length arrays.
+ */
+struct _C2FieldId {
+//public:
+ /**
+ * Constructor used for C2FieldDescriptor that removes the array extent.
+ *
+ * \param[in] offset pointer to the field in an object at address 0.
+ */
+ template<typename T, class B=typename std::remove_extent<T>::type>
+ inline _C2FieldId(T* offset)
+ : // offset is from "0" so will fit on 32-bits
+ _mOffset((uint32_t)(uintptr_t)(offset)),
+ _mSize(sizeof(B)) { }
+
+ /**
+ * Direct constructor from offset and size.
+ *
+ * \param[in] offset offset of the field.
+ * \param[in] size size of the field.
+ */
+ inline _C2FieldId(size_t offset, size_t size)
+ : _mOffset(offset), _mSize(size) {}
+
+ /**
+ * Constructor used to identify a field in an object.
+ *
+ * \param U[type] pointer to the object that contains this field. This is needed in case the
+ * field is in an (inherited) base class, in which case T will be that base class.
+ * \param pm[im] member pointer to the field
+ */
+ template<typename R, typename T, typename U, typename B=typename std::remove_extent<R>::type>
+ inline _C2FieldId(U *, R T::* pm)
+ : _mOffset((uint32_t)(uintptr_t)(&(((U*)256)->*pm)) - 256u),
+ _mSize(sizeof(B)) { }
+
+ /**
+ * Constructor used to identify a field in an object.
+ *
+ * \param U[type] pointer to the object that contains this field
+ * \param pm[im] member pointer to the field
+ */
+ template<typename R, typename T, typename B=typename std::remove_extent<R>::type>
+ inline _C2FieldId(R T::* pm)
+ : _mOffset((uint32_t)(uintptr_t)(&(((T*)0)->*pm))),
+ _mSize(sizeof(B)) { }
+
+ inline bool operator==(const _C2FieldId &other) const {
+ return _mOffset == other._mOffset && _mSize == other._mSize;
+ }
+
+ inline bool operator<(const _C2FieldId &other) const {
+ return _mOffset < other._mOffset ||
+ // NOTE: order parent structure before sub field
+ (_mOffset == other._mOffset && _mSize > other._mSize);
+ }
+
+ DEFINE_OTHER_COMPARISON_OPERATORS(_C2FieldId)
+
+#if 0
+ inline uint32_t offset() const { return _mOffset; }
+ inline uint32_t size() const { return _mSize; }
+#endif
+
+#if defined(FRIEND_TEST)
+ friend void PrintTo(const _C2FieldId &d, ::std::ostream*);
+#endif
+
+private:
+ uint32_t _mOffset; // offset of field
+ uint32_t _mSize; // size of field
+};
+
+/**
+ * Structure uniquely specifying a field in a configuration
+ */
+struct C2ParamField {
+//public:
+ // TODO: fix what this is for T[] (for now size becomes T[1])
+ template<typename S, typename T>
+ inline C2ParamField(S* param, T* offset)
+ : _mIndex(param->index()),
+ _mFieldId(offset) {}
+
+ template<typename R, typename T, typename U>
+ inline C2ParamField(U *p, R T::* pm) : _mIndex(p->type()), _mFieldId(p, pm) { }
+
+ inline bool operator==(const C2ParamField &other) const {
+ return _mIndex == other._mIndex && _mFieldId == other._mFieldId;
+ }
+
+ inline bool operator<(const C2ParamField &other) const {
+ return _mIndex < other._mIndex ||
+ (_mIndex == other._mIndex && _mFieldId < other._mFieldId);
+ }
+
+ DEFINE_OTHER_COMPARISON_OPERATORS(C2ParamField)
+
+private:
+ C2Param::Index _mIndex;
+ _C2FieldId _mFieldId;
+};
+
+/**
+ * A shared (union) representation of numeric values
+ */
+class C2Value {
+public:
+ /// A union of supported primitive types.
+ union Primitive {
+ int32_t i32; ///< int32_t value
+ uint32_t u32; ///< uint32_t value
+ int64_t i64; ///< int64_t value
+ uint64_t u64; ///< uint64_t value
+ float fp; ///< float value
+
+ // constructors - implicit
+ Primitive(int32_t value) : i32(value) { }
+ Primitive(uint32_t value) : u32(value) { }
+ Primitive(int64_t value) : i64(value) { }
+ Primitive(uint64_t value) : u64(value) { }
+ Primitive(float value) : fp(value) { }
+
+ Primitive() : u64(0) { }
+
+ private:
+ friend class C2Value;
+ template<typename T> const T &ref() const;
+ };
+
+ enum Type {
+ NO_INIT,
+ INT32,
+ UINT32,
+ INT64,
+ UINT64,
+ FLOAT,
+ };
+
+ template<typename T> static constexpr Type typeFor();
+
+ // constructors - implicit
+ template<typename T>
+ C2Value(T value) : mType(typeFor<T>()), mValue(value) { }
+
+ C2Value() : mType(NO_INIT) { }
+
+ inline Type type() const { return mType; }
+
+ template<typename T>
+ inline bool get(T *value) const {
+ if (mType == typeFor<T>()) {
+ *value = mValue.ref<T>();
+ return true;
+ }
+ return false;
+ }
+
+private:
+ Type mType;
+ Primitive mValue;
+};
+
+template<> const int32_t &C2Value::Primitive::ref<int32_t>() const { return i32; }
+template<> const int64_t &C2Value::Primitive::ref<int64_t>() const { return i64; }
+template<> const uint32_t &C2Value::Primitive::ref<uint32_t>() const { return u32; }
+template<> const uint64_t &C2Value::Primitive::ref<uint64_t>() const { return u64; }
+template<> const float &C2Value::Primitive::ref<float>() const { return fp; }
+
+template<> constexpr C2Value::Type C2Value::typeFor<int32_t>() { return INT32; }
+template<> constexpr C2Value::Type C2Value::typeFor<int64_t>() { return INT64; }
+template<> constexpr C2Value::Type C2Value::typeFor<uint32_t>() { return UINT32; }
+template<> constexpr C2Value::Type C2Value::typeFor<uint64_t>() { return UINT64; }
+template<> constexpr C2Value::Type C2Value::typeFor<float>() { return FLOAT; }
+
+/**
+ * field descriptor. A field is uniquely defined by an index into a parameter.
+ * (Note: Stream-id is not captured as a field.)
+ *
+ * Ordering of fields is by offset. In case of structures, it is depth first,
+ * with a structure taking an index just before and in addition to its members.
+ */
+struct C2FieldDescriptor {
+//public:
+ /** field types and flags
+ * \note: only 32-bit and 64-bit fields are supported (e.g. no boolean, as that
+ * is represented using INT32).
+ */
+ enum Type : uint32_t {
+ // primitive types
+ INT32 = C2Value::INT32, ///< 32-bit signed integer
+ UINT32 = C2Value::UINT32, ///< 32-bit unsigned integer
+ INT64 = C2Value::INT64, ///< 64-bit signed integer
+ UINT64 = C2Value::UINT64, ///< 64-bit signed integer
+ FLOAT = C2Value::FLOAT, ///< 32-bit floating point
+
+ // array types
+ STRING = 0x100, ///< fixed-size string (POD)
+ BLOB, ///< blob. Blobs have no sub-elements and can be thought of as byte arrays;
+ ///< however, bytes cannot be individually addressed by clients.
+
+ // complex types
+ STRUCT_FLAG = 0x10000, ///< structs. Marked with this flag in addition to their baseIndex.
+ };
+
+ typedef std::pair<C2String, C2Value::Primitive> named_value_type;
+ typedef std::vector<const named_value_type> named_values_type;
+ //typedef std::pair<std::vector<C2String>, std::vector<C2Value::Primitive>> named_values_type;
+
+ /**
+ * Template specialization that returns the named values for a type.
+ *
+ * \todo hide from client.
+ *
+ * \return a vector of name-value pairs.
+ */
+ template<typename B>
+ static named_values_type namedValuesFor(const B &);
+
+ inline C2FieldDescriptor(uint32_t type, uint32_t length, C2StringLiteral name, size_t offset, size_t size)
+ : _mType((Type)type), _mLength(length), _mName(name), _mFieldId(offset, size) { }
+
+ template<typename T, class B=typename std::remove_extent<T>::type>
+ inline C2FieldDescriptor(const T* offset, const char *name)
+ : _mType(this->getType((B*)nullptr)),
+ _mLength(std::is_array<T>::value ? std::extent<T>::value : 1),
+ _mName(name),
+ _mNamedValues(namedValuesFor(*(B*)0)),
+ _mFieldId(offset) {}
+
+/*
+ template<typename T, typename B=typename std::remove_extent<T>::type>
+ inline C2FieldDescriptor<T, B, false>(T* offset, const char *name)
+ : _mType(this->getType((B*)nullptr)),
+ _mLength(std::is_array<T>::value ? std::extent<T>::value : 1),
+ _mName(name),
+ _mFieldId(offset) {}
+*/
+
+ /// \deprecated
+ template<typename T, typename S, class B=typename std::remove_extent<T>::type>
+ constexpr inline C2FieldDescriptor(S*, T S::* field, const char *name)
+ : _mType(this->getType((B*)nullptr)),
+ _mLength(std::is_array<T>::value ? std::extent<T>::value : 1),
+ _mName(name),
+ _mFieldId(&(((S*)0)->*field)) {}
+
+ /// returns the type of this field
+ inline Type type() const { return _mType; }
+ /// returns the length of the field in case it is an array. Returns 0 for
+ /// T[] arrays, returns 1 for T[1] arrays as well as if the field is not an array.
+ inline size_t length() const { return _mLength; }
+ /// returns the name of the field
+ inline C2StringLiteral name() const { return _mName; }
+
+ const named_values_type &namedValues() const { return _mNamedValues; }
+
+#if defined(FRIEND_TEST)
+ friend void PrintTo(const C2FieldDescriptor &, ::std::ostream*);
+ friend bool operator==(const C2FieldDescriptor &, const C2FieldDescriptor &);
+ FRIEND_TEST(C2ParamTest_ParamFieldList, VerifyStruct);
+#endif
+
+private:
+ const Type _mType;
+ const uint32_t _mLength; // the last member can be arbitrary length if it is T[] array,
+ // extending to the end of the parameter (this is marked with
+ // 0). T[0]-s are not fields.
+ const C2StringLiteral _mName;
+ const named_values_type _mNamedValues;
+
+ const _C2FieldId _mFieldId; // field identifier (offset and size)
+
+ // NOTE: We do not capture default value(s) here as that may depend on the component.
+ // NOTE: We also do not capture bestEffort, as 1) this should be true for most fields,
+ // 2) this is at parameter granularity.
+
+ // type resolution
+ inline static Type getType(int32_t*) { return INT32; }
+ inline static Type getType(uint32_t*) { return UINT32; }
+ inline static Type getType(int64_t*) { return INT64; }
+ inline static Type getType(uint64_t*) { return UINT64; }
+ inline static Type getType(float*) { return FLOAT; }
+ inline static Type getType(char*) { return STRING; }
+ inline static Type getType(uint8_t*) { return BLOB; }
+
+ template<typename T,
+ class=typename std::enable_if<std::is_enum<T>::value>::type>
+ inline static Type getType(T*) {
+ typename std::underlying_type<T>::type underlying(0);
+ return getType(&underlying);
+ }
+
+ // verify C2Struct by having a fieldList and a baseIndex.
+ template<typename T,
+ class=decltype(T::baseIndex + 1), class=decltype(T::fieldList)>
+ inline static Type getType(T*) {
+ static_assert(!std::is_base_of<C2Param, T>::value, "cannot use C2Params as fields");
+ return (Type)(T::baseIndex | STRUCT_FLAG);
+ }
+};
+
+#define DEFINE_NO_NAMED_VALUES_FOR(type) \
+template<> inline C2FieldDescriptor::named_values_type C2FieldDescriptor::namedValuesFor(const type &) { \
+ return named_values_type(); \
+}
+
+// We cannot subtype constructor for enumerated types so insted define no named values for
+// non-enumerated integral types.
+DEFINE_NO_NAMED_VALUES_FOR(int32_t)
+DEFINE_NO_NAMED_VALUES_FOR(uint32_t)
+DEFINE_NO_NAMED_VALUES_FOR(int64_t)
+DEFINE_NO_NAMED_VALUES_FOR(uint64_t)
+DEFINE_NO_NAMED_VALUES_FOR(uint8_t)
+DEFINE_NO_NAMED_VALUES_FOR(char)
+DEFINE_NO_NAMED_VALUES_FOR(float)
+
+/**
+ * Describes the fields of a structure.
+ */
+struct C2StructDescriptor {
+public:
+ /// Returns the parameter type
+ inline C2Param::BaseIndex baseIndex() const { return _mType.baseIndex(); }
+
+ // Returns the number of fields in this param (not counting any recursive fields).
+ // Must be at least 1 for valid params.
+ inline size_t numFields() const { return _mFields.size(); }
+
+ // Returns the list of immediate fields (not counting any recursive fields).
+ typedef std::vector<const C2FieldDescriptor>::const_iterator field_iterator;
+ inline field_iterator cbegin() const { return _mFields.cbegin(); }
+ inline field_iterator cend() const { return _mFields.cend(); }
+
+ // only supplying const iterator - but these are needed for range based loops
+ inline field_iterator begin() const { return _mFields.cbegin(); }
+ inline field_iterator end() const { return _mFields.cend(); }
+
+ template<typename T>
+ inline C2StructDescriptor(T*)
+ : C2StructDescriptor(T::baseIndex, T::fieldList) { }
+
+ inline C2StructDescriptor(
+ C2Param::BaseIndex type,
+ std::initializer_list<const C2FieldDescriptor> fields)
+ : _mType(type), _mFields(fields) { }
+
+private:
+ const C2Param::BaseIndex _mType;
+ const std::vector<const C2FieldDescriptor> _mFields;
+};
+
+/**
+ * Describes parameters for a component.
+ */
+struct C2ParamDescriptor {
+public:
+ /**
+ * Returns whether setting this param is required to configure this component.
+ * This can only be true for builtin params for platform-defined components (e.g. video and
+ * audio encoders/decoders, video/audio filters).
+ * For vendor-defined components, it can be true even for vendor-defined params,
+ * but it is not recommended, in case the component becomes platform-defined.
+ */
+ inline bool isRequired() const { return _mIsRequired; }
+
+ /**
+ * Returns whether this parameter is persistent. This is always true for C2Tuning and C2Setting,
+ * but may be false for C2Info. If true, this parameter persists across frames and applies to
+ * the current and subsequent frames. If false, this C2Info parameter only applies to the
+ * current frame and is not assumed to have the same value (or even be present) on subsequent
+ * frames, unless it is specified for those frames.
+ */
+ inline bool isPersistent() const { return _mIsPersistent; }
+
+ /// Returns the name of this param.
+ /// This defaults to the underlying C2Struct's name, but could be altered for a component.
+ inline C2String name() const { return _mName; }
+
+ /// Returns the parameter type
+ /// \todo fix this
+ inline C2Param::Type type() const { return _mType; }
+
+ template<typename T>
+ inline C2ParamDescriptor(bool isRequired, C2StringLiteral name, const T*)
+ : _mIsRequired(isRequired),
+ _mIsPersistent(true),
+ _mName(name),
+ _mType(T::typeIndex) { }
+
+ inline C2ParamDescriptor(
+ bool isRequired, C2StringLiteral name, C2Param::Type type)
+ : _mIsRequired(isRequired),
+ _mIsPersistent(true),
+ _mName(name),
+ _mType(type) { }
+
+private:
+ const bool _mIsRequired;
+ const bool _mIsPersistent;
+ const C2String _mName;
+ const C2Param::Type _mType;
+};
+
+/// \ingroup internal
+/// Define a structure without baseIndex.
+#define DEFINE_C2STRUCT_NO_BASE(name) \
+public: \
+ typedef C2##name##Struct _type; /**< type name shorthand */ \
+ const static std::initializer_list<const C2FieldDescriptor> fieldList; /**< structure fields */
+
+/// Define a structure with matching baseIndex.
+#define DEFINE_C2STRUCT(name) \
+public: \
+ enum : uint32_t { baseIndex = kParamIndex##name }; \
+ DEFINE_C2STRUCT_NO_BASE(name)
+
+/// Define a flexible structure with matching baseIndex.
+#define DEFINE_FLEX_C2STRUCT(name, flexMember) \
+public: \
+ FLEX(C2##name##Struct, flexMember) \
+ enum : uint32_t { baseIndex = kParamIndex##name | C2Param::BaseIndex::_kFlexibleFlag }; \
+ DEFINE_C2STRUCT_NO_BASE(name)
+
+/// \ingroup internal
+/// Describe a structure of a templated structure.
+#define DESCRIBE_TEMPLATED_C2STRUCT(strukt, list) \
+ template<> \
+ const std::initializer_list<const C2FieldDescriptor> strukt::fieldList = list;
+
+/// \deprecated
+/// Describe the fields of a structure using an initializer list.
+#define DESCRIBE_C2STRUCT(name, list) \
+ const std::initializer_list<const C2FieldDescriptor> C2##name##Struct::fieldList = list;
+
+/**
+ * Describe a field of a structure.
+ * These must be in order.
+ *
+ * There are two ways to use this macro:
+ *
+ * ~~~~~~~~~~~~~ (.cpp)
+ * struct C2VideoWidthStruct {
+ * int32_t mWidth;
+ * C2VideoWidthStruct() {} // optional default constructor
+ * C2VideoWidthStruct(int32_t _width) : mWidth(_width) {}
+ *
+ * DEFINE_AND_DESCRIBE_C2STRUCT(VideoWidth)
+ * C2FIELD(mWidth, "width")
+ * };
+ * ~~~~~~~~~~~~~
+ *
+ * ~~~~~~~~~~~~~ (.cpp)
+ * struct C2VideoWidthStruct {
+ * int32_t mWidth;
+ * C2VideoWidthStruct() = default; // optional default constructor
+ * C2VideoWidthStruct(int32_t _width) : mWidth(_width) {}
+ *
+ * DEFINE_C2STRUCT(VideoWidth)
+ * } C2_PACK;
+ *
+ * DESCRIBE_C2STRUCT(VideoWidth, {
+ * C2FIELD(mWidth, "width")
+ * })
+ * ~~~~~~~~~~~~~
+ *
+ * For flexible structures (those ending in T[]), use the flexible macros:
+ *
+ * ~~~~~~~~~~~~~ (.cpp)
+ * struct C2VideoFlexWidthsStruct {
+ * int32_t mWidths[];
+ * C2VideoFlexWidthsStruct(); // must have a default constructor
+ *
+ * private:
+ * // may have private constructors taking number of widths as the first argument
+ * // This is used by the C2Param factory methods, e.g.
+ * // C2VideoFlexWidthsGlobalParam::alloc_unique(size_t, int32_t);
+ * C2VideoFlexWidthsStruct(size_t flexCount, int32_t value) {
+ * for (size_t i = 0; i < flexCount; ++i) {
+ * mWidths[i] = value;
+ * }
+ * }
+ *
+ * // If the last argument is T[N] or std::initializer_list<T>, the flexCount will
+ * // be automatically calculated and passed by the C2Param factory methods, e.g.
+ * // int widths[] = { 1, 2, 3 };
+ * // C2VideoFlexWidthsGlobalParam::alloc_unique(widths);
+ * template<unsigned N>
+ * C2VideoFlexWidthsStruct(size_t flexCount, const int32_t(&init)[N]) {
+ * for (size_t i = 0; i < flexCount; ++i) {
+ * mWidths[i] = init[i];
+ * }
+ * }
+ *
+ * DEFINE_AND_DESCRIBE_FLEX_C2STRUCT(VideoFlexWidths, mWidths)
+ * C2FIELD(mWidths, "widths")
+ * };
+ * ~~~~~~~~~~~~~
+ *
+ * ~~~~~~~~~~~~~ (.cpp)
+ * struct C2VideoFlexWidthsStruct {
+ * int32_t mWidths[];
+ * C2VideoFlexWidthsStruct(); // must have a default constructor
+ *
+ * DEFINE_FLEX_C2STRUCT(VideoFlexWidths, mWidths)
+ * } C2_PACK;
+ *
+ * DESCRIBE_C2STRUCT(VideoFlexWidths, {
+ * C2FIELD(mWidths, "widths")
+ * })
+ * ~~~~~~~~~~~~~
+ *
+ */
+#define C2FIELD(member, name) \
+ C2FieldDescriptor(&((_type*)(nullptr))->member, name),
+
+/// \deprecated
+#define C2SOLE_FIELD(member, name) \
+ C2FieldDescriptor(&_type::member, name, 0)
+
+/// Define a structure with matching baseIndex and start describing its fields.
+/// This must be at the end of the structure definition.
+#define DEFINE_AND_DESCRIBE_C2STRUCT(name) \
+ DEFINE_C2STRUCT(name) } C2_PACK; \
+ const std::initializer_list<const C2FieldDescriptor> C2##name##Struct::fieldList = {
+
+/// Define a flexible structure with matching baseIndex and start describing its fields.
+/// This must be at the end of the structure definition.
+#define DEFINE_AND_DESCRIBE_FLEX_C2STRUCT(name, flexMember) \
+ DEFINE_FLEX_C2STRUCT(name, flexMember) } C2_PACK; \
+ const std::initializer_list<const C2FieldDescriptor> C2##name##Struct::fieldList = {
+
+/**
+ * Parameter reflector class.
+ *
+ * This class centralizes the description of parameter structures. This can be shared
+ * by multiple components as describing a parameter does not imply support of that
+ * parameter. However, each supported parameter and any dependent structures within
+ * must be described by the parameter reflector provided by a component.
+ */
+class C2ParamReflector {
+public:
+ /**
+ * Describes a parameter structure.
+ *
+ * \param[in] paramIndex the base index of the parameter structure
+ *
+ * \return the description of the parameter structure
+ * \retval nullptr if the parameter is not supported by this reflector
+ *
+ * This methods shall not block and return immediately.
+ *
+ * \note this class does not take a set of indices because we would then prefer
+ * to also return any dependent structures, and we don't want this logic to be
+ * repeated in each reflector. Alternately, this could just return a map of all
+ * descriptions, but we want to conserve memory if client only wants the description
+ * of a few indices.
+ */
+ virtual std::unique_ptr<C2StructDescriptor> describe(C2Param::BaseIndex paramIndex) = 0;
+
+protected:
+ virtual ~C2ParamReflector() = default;
+};
+
+/**
+ * A useable supported values for a field.
+ *
+ * This can be either a range or a set of values. The range can be linear or geometric with a
+ * clear minimum and maximum value, and can have an optional step size or geometric ratio. Values
+ * can optionally represent flags.
+ *
+ * \note Do not use flags to represent bitfields. Use individual values or separate fields instead.
+ */
+template<typename T>
+struct C2TypedFieldSupportedValues {
+//public:
+ enum Type {
+ RANGE, ///< a numeric range that can be continuous or discrete
+ VALUES, ///< a list of values
+ FLAGS ///< a list of flags that can be OR-ed
+ };
+
+ Type type;
+
+ struct {
+ T min;
+ T max;
+ T step;
+ T nom;
+ T denom;
+ } range;
+ std::vector<T> values;
+
+ C2TypedFieldSupportedValues(T min, T max, T step = T(std::is_floating_point<T>::value ? 0 : 1))
+ : type(RANGE),
+ range{min, max, step, (T)1, (T)1} { }
+
+ C2TypedFieldSupportedValues(T min, T max, T nom, T den) :
+ type(RANGE),
+ range{min, max, (T)0, nom, den} { }
+
+ C2TypedFieldSupportedValues(bool flags, std::initializer_list<T> list) :
+ type(flags ? FLAGS : VALUES),
+ values(list) {}
+};
+
+/**
+ * Generic supported values for a field.
+ *
+ * This can be either a range or a set of values. The range can be linear or geometric with a
+ * clear minimum and maximum value, and can have an optional step size or geometric ratio. Values
+ * can optionally represent flags.
+ *
+ * \note Do not use flags to represent bitfields. Use individual values or separate fields instead.
+ */
+struct C2FieldSupportedValues {
+//public:
+ enum Type {
+ RANGE, ///< a numeric range that can be continuous or discrete
+ VALUES, ///< a list of values
+ FLAGS ///< a list of flags that can be OR-ed
+ };
+
+ Type type;
+
+ typedef C2Value::Primitive Primitive;
+
+ struct {
+ Primitive min;
+ Primitive max;
+ Primitive step;
+ Primitive nom;
+ Primitive denom;
+ } range;
+ std::vector<Primitive> values;
+
+ template<typename T>
+ C2FieldSupportedValues(T min, T max, T step = T(std::is_floating_point<T>::value ? 0 : 1))
+ : type(RANGE),
+ range{min, max, step, (T)1, (T)1} { }
+
+ template<typename T>
+ C2FieldSupportedValues(T min, T max, T nom, T den) :
+ type(RANGE),
+ range{min, max, (T)0, nom, den} { }
+
+ template<typename T>
+ C2FieldSupportedValues(bool flags, std::initializer_list<T> list)
+ : type(flags ? FLAGS : VALUES),
+ range{(T)0, (T)0, (T)0, (T)0, (T)0} {
+ for(T value : list) {
+ values.emplace_back(value);
+ }
+ }
+
+ template<typename T, typename E=decltype(C2FieldDescriptor::namedValuesFor(*(T*)0))>
+ C2FieldSupportedValues(bool flags, const T*)
+ : type(flags ? FLAGS : VALUES),
+ range{(T)0, (T)0, (T)0, (T)0, (T)0} {
+ C2FieldDescriptor::named_values_type named = C2FieldDescriptor::namedValuesFor(*(T*)0);
+ for (const C2FieldDescriptor::named_value_type &item : named) {
+ values.emplace_back(item.second);
+ }
+ }
+};
+
+/// @}
+
+} // namespace android
+
+#endif // C2PARAM_H_
diff --git a/media/libstagefright/codec2/include/C2ParamDef.h b/media/libstagefright/codec2/include/C2ParamDef.h
new file mode 100644
index 0000000..f369617
--- /dev/null
+++ b/media/libstagefright/codec2/include/C2ParamDef.h
@@ -0,0 +1,901 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/** \file
+ * Templates used to declare parameters.
+ */
+#ifndef C2PARAM_DEF_H_
+#define C2PARAM_DEF_H_
+
+#include <type_traits>
+
+#include <C2Param.h>
+
+namespace android {
+
+/// \addtogroup Parameters
+/// @{
+
+/* ======================== UTILITY TEMPLATES FOR PARAMETER DEFINITIONS ======================== */
+
+/// \addtogroup internal
+/// @{
+
+/// Helper class that checks if a type has equality and inequality operators.
+struct C2_HIDE _C2Comparable_impl
+{
+ template<typename S, typename=decltype(S() == S())>
+ static std::true_type __testEQ(int);
+ template<typename>
+ static std::false_type __testEQ(...);
+
+ template<typename S, typename=decltype(S() != S())>
+ static std::true_type __testNE(int);
+ template<typename>
+ static std::false_type __testNE(...);
+};
+
+/**
+ * Helper template that returns if a type has equality and inequality operators.
+ *
+ * Use as _C2Comparable<typename S>::value.
+ */
+template<typename S>
+struct C2_HIDE _C2Comparable
+ : public std::integral_constant<bool, decltype(_C2Comparable_impl::__testEQ<S>(0))::value
+ || decltype(_C2Comparable_impl::__testNE<S>(0))::value> {
+};
+
+/// Helper class that checks if a type has a baseIndex constant.
+struct C2_HIDE _C2BaseIndexHelper_impl
+{
+ template<typename S, int=S::baseIndex>
+ static std::true_type __testBaseIndex(int);
+ template<typename>
+ static std::false_type __testBaseIndex(...);
+};
+
+/// Helper template that verifies a type's baseIndex and creates it if the type does not have one.
+template<typename S, int BaseIndex,
+ bool HasBase=decltype(_C2BaseIndexHelper_impl::__testBaseIndex<S>(0))::value>
+struct C2_HIDE C2BaseIndexOverride {
+ // TODO: what if we allow structs without baseIndex?
+ static_assert(BaseIndex == S::baseIndex, "baseIndex differs from structure");
+};
+
+/// Specialization for types without a baseIndex.
+template<typename S, int BaseIndex>
+struct C2_HIDE C2BaseIndexOverride<S, BaseIndex, false> {
+public:
+ enum : uint32_t {
+ baseIndex = BaseIndex, ///< baseIndex override.
+ };
+};
+
+/// Helper template that adds a baseIndex to a type if it does not have one.
+template<typename S, int BaseIndex>
+struct C2_HIDE C2AddBaseIndex : public S, public C2BaseIndexOverride<S, BaseIndex> {};
+
+/**
+ * \brief Helper class to check struct requirements for parameters.
+ *
+ * Features:
+ * - verify default constructor, no virtual methods, and no equality operators.
+ * - expose typeIndex, and non-flex flexSize.
+ */
+template<typename S, int BaseIndex, unsigned TypeIndex>
+struct C2_HIDE C2StructCheck {
+ static_assert(
+ std::is_default_constructible<S>::value, "C2 structure must have default constructor");
+ static_assert(!std::is_polymorphic<S>::value, "C2 structure must not have virtual methods");
+ static_assert(!_C2Comparable<S>::value, "C2 structure must not have operator== or !=");
+
+public:
+ enum : uint32_t {
+ typeIndex = BaseIndex | TypeIndex
+ };
+
+protected:
+ enum : uint32_t {
+ flexSize = 0, // TODO: is this still needed? this may be confusing.
+ };
+};
+
+/// Helper class that checks if a type has an integer flexSize member.
+struct C2_HIDE _C2Flexible_impl {
+ /// specialization for types that have a flexSize member
+ template<typename S, unsigned=S::flexSize>
+ static std::true_type __testFlexSize(int);
+ template<typename>
+ static std::false_type __testFlexSize(...);
+};
+
+/// Helper template that returns if a type has an integer flexSize member.
+template<typename S>
+struct C2_HIDE _C2Flexible
+ : public std::integral_constant<bool, decltype(_C2Flexible_impl::__testFlexSize<S>(0))::value> {
+};
+
+/// Macro to test if a type is flexible (has a flexSize member).
+#define IF_FLEXIBLE(S) ENABLE_IF(_C2Flexible<S>::value)
+/// Shorthand for std::enable_if
+#define ENABLE_IF(cond) typename std::enable_if<cond>::type
+
+/// Helper template that exposes the flexible subtype of a struct.
+template<typename S, typename E=void>
+struct C2_HIDE _C2FlexHelper {
+ typedef void flexType;
+ enum : uint32_t { flexSize = 0 };
+};
+
+/// Specialization for flexible types.
+template<typename S>
+struct C2_HIDE _C2FlexHelper<S,
+ typename std::enable_if<!std::is_void<typename S::flexMemberType>::value>::type> {
+ typedef typename _C2FlexHelper<typename S::flexMemberType>::flexType flexType;
+ enum : uint32_t { flexSize = _C2FlexHelper<typename S::flexMemberType>::flexSize };
+};
+
+/// Specialization for flex arrays.
+template<typename S>
+struct C2_HIDE _C2FlexHelper<S[],
+ typename std::enable_if<std::is_void<typename _C2FlexHelper<S>::flexType>::value>::type> {
+ typedef S flexType;
+ enum : uint32_t { flexSize = sizeof(S) };
+};
+
+/**
+ * \brief Helper class to check flexible struct requirements and add common operations.
+ *
+ * Features:
+ * - expose baseIndex and fieldList (this is normally inherited from the struct, but flexible
+ * structs cannot be base classes and thus inherited from)
+ * - disable copy assignment and construction (TODO: this is already done in the FLEX macro for the
+ * flexible struct, so may not be needed here)
+ */
+template<typename S, int BaseIndex, unsigned TypeIndex>
+struct C2_HIDE C2FlexStructCheck : public C2StructCheck<S, BaseIndex, TypeIndex> {
+public:
+ enum : uint32_t {
+ /// \hideinitializer
+ baseIndex = BaseIndex | C2Param::BaseIndex::_kFlexibleFlag, ///< flexible struct base-index
+ };
+
+ const static std::initializer_list<const C2FieldDescriptor> fieldList; // TODO assign here
+
+ // default constructor needed because of the disabled copy constructor
+ inline C2FlexStructCheck() = default;
+
+protected:
+ // cannot copy flexible params
+ C2FlexStructCheck(const C2FlexStructCheck<S, BaseIndex, TypeIndex> &) = delete;
+ C2FlexStructCheck& operator= (const C2FlexStructCheck<S, BaseIndex, TypeIndex> &) = delete;
+
+ // constants used for helper methods
+ enum : uint32_t {
+ /// \hideinitializer
+ flexSize = _C2FlexHelper<S>::flexSize, ///< size of flexible type
+ /// \hideinitializer
+ maxSize = (uint32_t)std::min((size_t)UINT32_MAX, SIZE_MAX), // TODO: is this always u32 max?
+ /// \hideinitializer
+ baseSize = sizeof(S) + sizeof(C2Param), ///< size of the base param
+ };
+
+ /// returns the allocated size of this param with flexCount, or 0 if it would overflow.
+ inline static size_t calcSize(size_t flexCount, size_t size = baseSize) {
+ if (flexCount <= (maxSize - size) / S::flexSize) {
+ return size + S::flexSize * flexCount;
+ }
+ return 0;
+ }
+
+ /// dynamic new operator usable for params of type S
+ inline void* operator new(size_t size, size_t flexCount) noexcept {
+ // TODO: assert(size == baseSize);
+ size = calcSize(flexCount, size);
+ if (size > 0) {
+ return ::operator new(size);
+ }
+ return nullptr;
+ }
+};
+
+// TODO: this probably does not work.
+/// Expose fieldList from subClass;
+template<typename S, int BaseIndex, unsigned TypeIndex>
+const std::initializer_list<const C2FieldDescriptor> C2FlexStructCheck<S, BaseIndex, TypeIndex>::fieldList = S::fieldList;
+
+/// Define From() cast operators for params.
+#define DEFINE_CAST_OPERATORS(_type) \
+ inline static _type* From(C2Param *other) { \
+ return (_type*)C2Param::ifSuitable( \
+ other, sizeof(_type),_type::typeIndex, _type::flexSize, \
+ (_type::typeIndex & T::Index::kDirUndefined) != T::Index::kDirUndefined); \
+ } \
+ inline static const _type* From(const C2Param *other) { \
+ return const_cast<const _type*>(From(const_cast<C2Param *>(other))); \
+ } \
+ inline static _type* From(std::nullptr_t) { return nullptr; } \
+
+/**
+ * Define flexible allocators (alloc_shared or alloc_unique) for flexible params.
+ * - P::alloc_xyz(flexCount, args...): allocate for given flex-count.
+ * - P::alloc_xyz(args..., T[]): allocate for size of (and with) init array.
+ * - P::alloc_xyz(T[]): allocate for size of (and with) init array with no other args.
+ * - P::alloc_xyz(args..., std::initializer_list<T>): allocate for size of (and with) initializer
+ * list.
+ */
+#define DEFINE_FLEXIBLE_ALLOC(_type, S, ptr) \
+ template<typename ...Args> \
+ inline static std::ptr##_ptr<_type> alloc_##ptr(size_t flexCount, const Args(&... args)) { \
+ return std::ptr##_ptr<_type>(new(flexCount) _type(flexCount, args...)); \
+ } \
+ /* NOTE: unfortunately this is not supported by clang yet */ \
+ template<typename ...Args, typename U=typename S::flexType, unsigned N> \
+ inline static std::ptr##_ptr<_type> alloc_##ptr(const Args(&... args), const U(&init)[N]) { \
+ return std::ptr##_ptr<_type>(new(N) _type(N, args..., init)); \
+ } \
+ /* so for now, specialize for no args */ \
+ template<typename U=typename S::flexType, unsigned N> \
+ inline static std::ptr##_ptr<_type> alloc_##ptr(const U(&init)[N]) { \
+ return std::ptr##_ptr<_type>(new(N) _type(N, init)); \
+ } \
+ template<typename ...Args, typename U=typename S::flexType> \
+ inline static std::ptr##_ptr<_type> alloc_##ptr( \
+ const Args(&... args), const std::initializer_list<U> &init) { \
+ return std::ptr##_ptr<_type>(new(init.size()) _type(init.size(), args..., init)); \
+ } \
+
+/**
+ * Define flexible methods alloc_shared, alloc_unique and flexCount.
+ */
+#define DEFINE_FLEXIBLE_METHODS(_type, S) \
+ DEFINE_FLEXIBLE_ALLOC(_type, S, shared) \
+ DEFINE_FLEXIBLE_ALLOC(_type, S, unique) \
+ inline size_t flexCount() const { \
+ static_assert(sizeof(_type) == _type::baseSize, "incorrect baseSize"); \
+ size_t sz = this->size(); \
+ if (sz >= sizeof(_type)) { \
+ return (sz - sizeof(_type)) / _type::flexSize; \
+ } \
+ return 0; \
+ } \
+
+/// Mark flexible member variable and make structure flexible.
+#define FLEX(cls, m) \
+ C2_DO_NOT_COPY(cls) \
+private: \
+ C2PARAM_MAKE_FRIENDS \
+ /* default constructor with flexCount */ \
+ inline cls(size_t) : cls() {} \
+ /** \if 0 */ \
+ template<typename, typename> friend struct _C2FlexHelper; \
+ typedef decltype(m) flexMemberType; \
+public: \
+ /* constexpr static flexMemberType cls::* flexMember = &cls::m; */ \
+ typedef typename _C2FlexHelper<flexMemberType>::flexType flexType; \
+ static_assert(\
+ !std::is_void<flexType>::value, \
+ "member is not flexible, or a flexible array of a flexible type"); \
+ enum : uint32_t { flexSize = _C2FlexHelper<flexMemberType>::flexSize }; \
+ /** \endif */ \
+
+/// @}
+
+/**
+ * Global-parameter template.
+ *
+ * Base template to define a global setting/tuning or info based on a structure and
+ * an optional BaseIndex. Global parameters are not tied to a port (input or output).
+ *
+ * Parameters wrap structures by prepending a (parameter) header. The fields of the wrapped
+ * structure can be accessed directly, and constructors and potential public methods are also
+ * wrapped.
+ *
+ * \tparam T param type C2Setting, C2Tuning or C2Info
+ * \tparam S wrapped structure
+ * \tparam BaseIndex optional base-index override. Must be specified for common/reused structures.
+ */
+template<typename T, typename S, int BaseIndex=S::baseIndex, class Flex=void>
+struct C2_HIDE C2GlobalParam : public T, public S, public C2BaseIndexOverride<S, BaseIndex>,
+ public C2StructCheck<S, BaseIndex, T::indexFlags | T::Type::kDirGlobal> {
+private:
+ typedef C2GlobalParam<T, S, BaseIndex> _type;
+
+public:
+ /// Wrapper around base structure's constructor.
+ template<typename ...Args>
+ inline C2GlobalParam(const Args(&... args)) : T(sizeof(_type), _type::typeIndex), S(args...) { }
+
+ DEFINE_CAST_OPERATORS(_type)
+};
+
+/**
+ * Global-parameter template for flexible structures.
+ *
+ * Base template to define a global setting/tuning or info based on a flexible structure and
+ * an optional BaseIndex. Global parameters are not tied to a port (input or output).
+ *
+ * \tparam T param type C2Setting, C2Tuning or C2Info
+ * \tparam S wrapped flexible structure
+ * \tparam BaseIndex optional base-index override. Must be specified for common/reused structures.
+ *
+ * Parameters wrap structures by prepending a (parameter) header. The fields and methods of flexible
+ * structures can be accessed via the m member variable; however, the constructors of the structure
+ * are wrapped directly. (This is because flexible types cannot be subclassed.)
+ */
+template<typename T, typename S, int BaseIndex>
+struct C2_HIDE C2GlobalParam<T, S, BaseIndex, IF_FLEXIBLE(S)>
+ : public T, public C2FlexStructCheck<S, BaseIndex, T::indexFlags | T::Type::kDirGlobal> {
+private:
+ typedef C2GlobalParam<T, S, BaseIndex> _type;
+
+ /// Wrapper around base structure's constructor.
+ template<typename ...Args>
+ inline C2GlobalParam(size_t flexCount, const Args(&... args))
+ : T(_type::calcSize(flexCount), _type::typeIndex), m(flexCount, args...) { }
+
+public:
+ S m; ///< wrapped flexible structure
+
+ DEFINE_FLEXIBLE_METHODS(_type, S)
+ DEFINE_CAST_OPERATORS(_type)
+};
+
+/**
+ * Port-parameter template.
+ *
+ * Base template to define a port setting/tuning or info based on a structure and
+ * an optional BaseIndex. Port parameters are tied to a port (input or output), but not to a
+ * specific stream.
+ *
+ * \tparam T param type C2Setting, C2Tuning or C2Info
+ * \tparam S wrapped structure
+ * \tparam BaseIndex optional base-index override. Must be specified for common/reused structures.
+ *
+ * Parameters wrap structures by prepending a (parameter) header. The fields of the wrapped
+ * structure can be accessed directly, and constructors and potential public methods are also
+ * wrapped.
+ *
+ * There are 3 flavors of port parameters: unspecified, input and output. Parameters with
+ * unspecified port expose a setPort method, and add an initial port parameter to the constructor.
+ */
+template<typename T, typename S, int BaseIndex=S::baseIndex, class Flex=void>
+struct C2_HIDE C2PortParam : public T, public S, public C2BaseIndexOverride<S, BaseIndex>,
+ private C2StructCheck<S, BaseIndex, T::indexFlags | T::Index::kDirUndefined> {
+private:
+ typedef C2PortParam<T, S, BaseIndex> _type;
+
+public:
+ /// Default constructor.
+ inline C2PortParam() : T(sizeof(_type), _type::typeIndex) { }
+ template<typename ...Args>
+ /// Wrapper around base structure's constructor while specifying port/direction.
+ inline C2PortParam(bool _output, const Args(&... args))
+ : T(sizeof(_type), _output ? output::typeIndex : input::typeIndex), S(args...) { }
+ /// Set port/direction.
+ inline void setPort(bool output) { C2Param::setPort(output); }
+
+ DEFINE_CAST_OPERATORS(_type)
+
+ /// Specialization for an input port parameter.
+ struct input : public T, public S, public C2BaseIndexOverride<S, BaseIndex>,
+ public C2StructCheck<S, BaseIndex, T::indexFlags | T::Index::kDirInput> {
+ /// Wrapper around base structure's constructor.
+ template<typename ...Args>
+ inline input(const Args(&... args)) : T(sizeof(_type), input::typeIndex), S(args...) { }
+
+ DEFINE_CAST_OPERATORS(input)
+
+ };
+
+ /// Specialization for an output port parameter.
+ struct output : public T, public S, public C2BaseIndexOverride<S, BaseIndex>,
+ public C2StructCheck<S, BaseIndex, T::indexFlags | T::Index::kDirOutput> {
+ /// Wrapper around base structure's constructor.
+ template<typename ...Args>
+ inline output(const Args(&... args)) : T(sizeof(_type), output::typeIndex), S(args...) { }
+
+ DEFINE_CAST_OPERATORS(output)
+ };
+};
+
+/**
+ * Port-parameter template for flexible structures.
+ *
+ * Base template to define a port setting/tuning or info based on a flexible structure and
+ * an optional BaseIndex. Port parameters are tied to a port (input or output), but not to a
+ * specific stream.
+ *
+ * \tparam T param type C2Setting, C2Tuning or C2Info
+ * \tparam S wrapped flexible structure
+ * \tparam BaseIndex optional base-index override. Must be specified for common/reused structures.
+ *
+ * Parameters wrap structures by prepending a (parameter) header. The fields and methods of flexible
+ * structures can be accessed via the m member variable; however, the constructors of the structure
+ * are wrapped directly. (This is because flexible types cannot be subclassed.)
+ *
+ * There are 3 flavors of port parameters: unspecified, input and output. Parameters with
+ * unspecified port expose a setPort method, and add an initial port parameter to the constructor.
+ */
+template<typename T, typename S, int BaseIndex>
+struct C2_HIDE C2PortParam<T, S, BaseIndex, IF_FLEXIBLE(S)>
+ : public T, public C2FlexStructCheck<S, BaseIndex, T::indexFlags | T::Type::kDirUndefined> {
+private:
+ typedef C2PortParam<T, S, BaseIndex> _type;
+
+ /// Default constructor for basic allocation: new(flexCount) P.
+ inline C2PortParam(size_t flexCount) : T(_type::calcSize(flexCount), _type::typeIndex) { }
+ template<typename ...Args>
+ /// Wrapper around base structure's constructor while also specifying port/direction.
+ inline C2PortParam(size_t flexCount, bool _output, const Args(&... args))
+ : T(_type::calcSize(flexCount), _output ? output::typeIndex : input::typeIndex),
+ m(flexCount, args...) { }
+
+public:
+ /// Set port/direction.
+ inline void setPort(bool output) { C2Param::setPort(output); }
+
+ S m; ///< wrapped flexible structure
+
+ DEFINE_FLEXIBLE_METHODS(_type, S)
+ DEFINE_CAST_OPERATORS(_type)
+
+ /// Specialization for an input port parameter.
+ struct input : public T, public C2BaseIndexOverride<S, BaseIndex>,
+ public C2FlexStructCheck<S, BaseIndex, T::indexFlags | T::Index::kDirInput> {
+ private:
+ /// Wrapper around base structure's constructor while also specifying port/direction.
+ template<typename ...Args>
+ inline input(size_t flexCount, const Args(&... args))
+ : T(_type::calcSize(flexCount), input::typeIndex), m(flexCount, args...) { }
+
+ public:
+ S m; ///< wrapped flexible structure
+
+ DEFINE_FLEXIBLE_METHODS(input, S)
+ DEFINE_CAST_OPERATORS(input)
+ };
+
+ /// Specialization for an output port parameter.
+ struct output : public T, public C2BaseIndexOverride<S, BaseIndex>,
+ public C2FlexStructCheck<S, BaseIndex, T::indexFlags | T::Index::kDirOutput> {
+ private:
+ /// Wrapper around base structure's constructor while also specifying port/direction.
+ template<typename ...Args>
+ inline output(size_t flexCount, const Args(&... args))
+ : T(_type::calcSize(flexCount), output::typeIndex), m(flexCount, args...) { }
+
+ public:
+ S m; ///< wrapped flexible structure
+
+ DEFINE_FLEXIBLE_METHODS(output, S)
+ DEFINE_CAST_OPERATORS(output)
+ };
+};
+
+/**
+ * Stream-parameter template.
+ *
+ * Base template to define a stream setting/tuning or info based on a structure and
+ * an optional BaseIndex. Stream parameters are tied to a specific stream on a port (input or
+ * output).
+ *
+ * \tparam T param type C2Setting, C2Tuning or C2Info
+ * \tparam S wrapped structure
+ * \tparam BaseIndex optional base-index override. Must be specified for common/reused structures.
+ *
+ * Parameters wrap structures by prepending a (parameter) header. The fields of the wrapped
+ * structure can be accessed directly, and constructors and potential public methods are also
+ * wrapped.
+ *
+ * There are 3 flavors of stream parameters: unspecified port, input and output. All of these expose
+ * a setStream method and an extra initial streamID parameter for the constructor. Moreover,
+ * parameters with unspecified port expose a setPort method, and add an additional initial port
+ * parameter to the constructor.
+ */
+template<typename T, typename S, int BaseIndex=S::baseIndex, class Flex=void>
+struct C2_HIDE C2StreamParam : public T, public S, public C2BaseIndexOverride<S, BaseIndex>,
+ private C2StructCheck<S, BaseIndex,
+ T::indexFlags | T::Index::kStreamFlag | T::Index::kDirUndefined> {
+private:
+ typedef C2StreamParam<T, S, BaseIndex> _type;
+
+public:
+ /// Default constructor. Port/direction and stream-ID is undefined.
+ inline C2StreamParam() : T(sizeof(_type), _type::typeIndex) { }
+ /// Wrapper around base structure's constructor while also specifying port/direction and
+ /// stream-ID.
+ template<typename ...Args>
+ inline C2StreamParam(bool _output, unsigned stream, const Args(&... args))
+ : T(sizeof(_type), _output ? output::typeIndex : input::typeIndex, stream),
+ S(args...) { }
+ /// Set port/direction.
+ inline void setPort(bool output) { C2Param::setPort(output); }
+ /// Set stream-id. \retval true if the stream-id was successfully set.
+ inline bool setStream(unsigned stream) { return C2Param::setStream(stream); }
+
+ DEFINE_CAST_OPERATORS(_type)
+
+ /// Specialization for an input stream parameter.
+ struct input : public T, public S, public C2BaseIndexOverride<S, BaseIndex>,
+ public C2StructCheck<S, BaseIndex,
+ T::indexFlags | T::Index::kStreamFlag | T::Type::kDirInput> {
+ /// Default constructor. Stream-ID is undefined.
+ inline input() : T(sizeof(_type), input::typeIndex) { }
+ /// Wrapper around base structure's constructor while also specifying stream-ID.
+ template<typename ...Args>
+ inline input(unsigned stream, const Args(&... args))
+ : T(sizeof(_type), input::typeIndex, stream), S(args...) { }
+ /// Set stream-id. \retval true if the stream-id was successfully set.
+ inline bool setStream(unsigned stream) { return C2Param::setStream(stream); }
+
+ DEFINE_CAST_OPERATORS(input)
+ };
+
+ /// Specialization for an output stream parameter.
+ struct output : public T, public S, public C2BaseIndexOverride<S, BaseIndex>,
+ public C2StructCheck<S, BaseIndex,
+ T::indexFlags | T::Index::kStreamFlag | T::Type::kDirOutput> {
+ /// Default constructor. Stream-ID is undefined.
+ inline output() : T(sizeof(_type), output::typeIndex) { }
+ /// Wrapper around base structure's constructor while also specifying stream-ID.
+ template<typename ...Args>
+ inline output(unsigned stream, const Args(&... args))
+ : T(sizeof(_type), output::typeIndex, stream), S(args...) { }
+ /// Set stream-id. \retval true if the stream-id was successfully set.
+ inline bool setStream(unsigned stream) { return C2Param::setStream(stream); }
+
+ DEFINE_CAST_OPERATORS(output)
+ };
+};
+
+/**
+ * Stream-parameter template for flexible structures.
+ *
+ * Base template to define a stream setting/tuning or info based on a flexible structure and
+ * an optional BaseIndex. Stream parameters are tied to a specific stream on a port (input or
+ * output).
+ *
+ * \tparam T param type C2Setting, C2Tuning or C2Info
+ * \tparam S wrapped flexible structure
+ * \tparam BaseIndex optional base-index override. Must be specified for common/reused structures.
+ *
+ * Parameters wrap structures by prepending a (parameter) header. The fields and methods of flexible
+ * structures can be accessed via the m member variable; however, the constructors of the structure
+ * are wrapped directly. (This is because flexible types cannot be subclassed.)
+ *
+ * There are 3 flavors of stream parameters: unspecified port, input and output. All of these expose
+ * a setStream method and an extra initial streamID parameter for the constructor. Moreover,
+ * parameters with unspecified port expose a setPort method, and add an additional initial port
+ * parameter to the constructor.
+ */
+template<typename T, typename S, int BaseIndex>
+struct C2_HIDE C2StreamParam<T, S, BaseIndex, IF_FLEXIBLE(S)>
+ : public T, public C2BaseIndexOverride<S, BaseIndex>,
+ private C2FlexStructCheck<S, BaseIndex,
+ T::indexFlags | T::Index::kStreamFlag | T::Index::kDirUndefined> {
+private:
+ typedef C2StreamParam<T, S> _type;
+ /// Default constructor. Port/direction and stream-ID is undefined.
+ inline C2StreamParam(size_t flexCount) : T(_type::calcSize(flexCount), _type::typeIndex, 0u) { }
+ /// Wrapper around base structure's constructor while also specifying port/direction and
+ /// stream-ID.
+ template<typename ...Args>
+ inline C2StreamParam(size_t flexCount, bool _output, unsigned stream, const Args(&... args))
+ : T(_type::calcSize(flexCount), _output ? output::typeIndex : input::typeIndex, stream),
+ m(flexCount, args...) { }
+
+public:
+ S m; ///< wrapped flexible structure
+
+ /// Set port/direction.
+ inline void setPort(bool output) { C2Param::setPort(output); }
+ /// Set stream-id. \retval true if the stream-id was successfully set.
+ inline bool setStream(unsigned stream) { return C2Param::setStream(stream); }
+
+ DEFINE_FLEXIBLE_METHODS(_type, S)
+ DEFINE_CAST_OPERATORS(_type)
+
+ /// Specialization for an input stream parameter.
+ struct input : public T, public C2BaseIndexOverride<S, BaseIndex>,
+ public C2FlexStructCheck<S, BaseIndex,
+ T::indexFlags | T::Index::kStreamFlag | T::Type::kDirInput> {
+ private:
+ /// Default constructor. Stream-ID is undefined.
+ inline input(size_t flexCount) : T(_type::calcSize(flexCount), input::typeIndex) { }
+ /// Wrapper around base structure's constructor while also specifying stream-ID.
+ template<typename ...Args>
+ inline input(size_t flexCount, unsigned stream, const Args(&... args))
+ : T(_type::calcSize(flexCount), input::typeIndex, stream), m(flexCount, args...) { }
+
+ public:
+ S m; ///< wrapped flexible structure
+
+ /// Set stream-id. \retval true if the stream-id was successfully set.
+ inline bool setStream(unsigned stream) { return C2Param::setStream(stream); }
+
+ DEFINE_FLEXIBLE_METHODS(input, S)
+ DEFINE_CAST_OPERATORS(input)
+ };
+
+ /// Specialization for an output stream parameter.
+ struct output : public T, public C2BaseIndexOverride<S, BaseIndex>,
+ public C2FlexStructCheck<S, BaseIndex,
+ T::indexFlags | T::Index::kStreamFlag | T::Type::kDirOutput> {
+ private:
+ /// Default constructor. Stream-ID is undefined.
+ inline output(size_t flexCount) : T(_type::calcSize(flexCount), output::typeIndex) { }
+ /// Wrapper around base structure's constructor while also specifying stream-ID.
+ template<typename ...Args>
+ inline output(size_t flexCount, unsigned stream, const Args(&... args))
+ : T(_type::calcSize(flexCount), output::typeIndex, stream), m(flexCount, args...) { }
+
+ public:
+ S m; ///< wrapped flexible structure
+
+ /// Set stream-id. \retval true if the stream-id was successfully set.
+ inline bool setStream(unsigned stream) { return C2Param::setStream(stream); }
+
+ DEFINE_FLEXIBLE_METHODS(output, S)
+ DEFINE_CAST_OPERATORS(output)
+ };
+};
+
+/* ======================== SIMPLE VALUE PARAMETERS ======================== */
+
+/**
+ * \ingroup internal
+ * A structure template encapsulating a single element with default constructors and no base-index.
+ */
+template<typename T>
+struct C2SimpleValueStruct {
+ T mValue; ///< simple value of the structure
+ // Default constructor.
+ inline C2SimpleValueStruct() = default;
+ // Constructor with an initial value.
+ inline C2SimpleValueStruct(T value) : mValue(value) {}
+ DEFINE_C2STRUCT_NO_BASE(SimpleValue)
+};
+
+// TODO: move this and next to some generic place
+/**
+ * Interface to a block of (mapped) memory containing an array of some type (T).
+ */
+template<typename T>
+struct C2MemoryBlock {
+ /// \returns the number of elements in this block.
+ virtual size_t size() const = 0;
+ /// \returns a const pointer to the start of this block. Care must be taken to not read outside
+ /// the block.
+ virtual const T *data() const = 0; // TODO: should this be friend access only in some C2Memory module?
+ /// \returns a pointer to the start of this block. Care must be taken to not read or write
+ /// outside the block.
+ inline T *data() { return const_cast<T*>(data()); }
+protected:
+ // TODO: for now it should never be deleted as C2MemoryBlock
+ virtual ~C2MemoryBlock() = default;
+};
+
+/**
+ * Interface to a block of memory containing a constant (constexpr) array of some type (T).
+ */
+template<typename T>
+struct C2ConstMemoryBlock : public C2MemoryBlock<T> {
+ virtual const T * data() const { return mData; }
+ virtual size_t size() const { return mSize; }
+
+ /// Constructor.
+ template<unsigned N>
+ inline constexpr C2ConstMemoryBlock(const T(&init)[N]) : mData(init), mSize(N) {}
+
+private:
+ const T *mData;
+ const size_t mSize;
+};
+
+/// \addtogroup internal
+/// @{
+
+/// Helper class to initialize flexible arrays with various initalizers.
+struct _C2ValueArrayHelper {
+ // char[]-s are used as null terminated strings, so the last element is never inited.
+
+ /// Initialize a flexible array using a constexpr memory block.
+ template<typename T>
+ static void init(T(&array)[], size_t arrayLen, const C2MemoryBlock<T> &block) {
+ // reserve last element for terminal 0 for strings
+ if (arrayLen && std::is_same<T, char>::value) {
+ --arrayLen;
+ }
+ if (block.data()) {
+ memcpy(array, block.data(), std::min(arrayLen, block.size()) * sizeof(T));
+ }
+ }
+
+ /// Initialize a flexible array using an initializer list.
+ template<typename T>
+ static void init(T(&array)[], size_t arrayLen, const std::initializer_list<T> &init) {
+ size_t ix = 0;
+ // reserve last element for terminal 0 for strings
+ if (arrayLen && std::is_same<T, char>::value) {
+ --arrayLen;
+ }
+ for (const T &item : init) {
+ if (ix == arrayLen) {
+ break;
+ }
+ array[ix++] = item;
+ }
+ }
+
+ /// Initialize a flexible array using another flexible array.
+ template<typename T, unsigned N>
+ static void init(T(&array)[], size_t arrayLen, const T(&str)[N]) {
+ // reserve last element for terminal 0 for strings
+ if (arrayLen && std::is_same<T, char>::value) {
+ --arrayLen;
+ }
+ if (arrayLen) {
+ strncpy(array, str, std::min(arrayLen, (size_t)N));
+ }
+ }
+};
+
+/**
+ * Specialization for a flexible blob and string arrays. A structure template encapsulating a single
+ * flexible array member with default flexible constructors and no base-index. This type cannot be
+ * constructed on its own as it's size is 0.
+ *
+ * \internal This is different from C2SimpleArrayStruct<T[]> simply because its member has the name
+ * as mValue to reflect this is a single value.
+ */
+template<typename T>
+struct C2SimpleValueStruct<T[]> {
+ static_assert(std::is_same<T, char>::value || std::is_same<T, uint8_t>::value,
+ "C2SimpleValueStruct<T[]> is only for BLOB or STRING");
+ T mValue[];
+
+ inline C2SimpleValueStruct() = default;
+ DEFINE_C2STRUCT_NO_BASE(SimpleValue)
+ FLEX(C2SimpleValueStruct, mValue)
+
+private:
+ inline C2SimpleValueStruct(size_t flexCount, const C2MemoryBlock<T> &block) {
+ _C2ValueArrayHelper::init(mValue, flexCount, block);
+ }
+
+ inline C2SimpleValueStruct(size_t flexCount, const std::initializer_list<T> &init) {
+ _C2ValueArrayHelper::init(mValue, flexCount, init);
+ }
+
+ template<unsigned N>
+ inline C2SimpleValueStruct(size_t flexCount, const T(&init)[N]) {
+ _C2ValueArrayHelper::init(mValue, flexCount, init);
+ }
+};
+
+/// @}
+
+/**
+ * A structure template encapsulating a single flexible array element of a specific type (T) with
+ * default constructors and no base-index. This type cannot be constructed on its own as it's size
+ * is 0. Instead, it is meant to be used as a parameter, e.g.
+ *
+ * typedef C2StreamParam<C2Info, C2SimpleArrayStruct<C2MyFancyStruct>,
+ * kParamIndexMyFancyArrayStreamParam> C2MyFancyArrayStreamInfo;
+ */
+template<typename T>
+struct C2SimpleArrayStruct {
+ static_assert(!std::is_same<T, char>::value && !std::is_same<T, uint8_t>::value,
+ "use C2SimpleValueStruct<T[]> is for BLOB or STRING");
+
+ T mValues[]; ///< array member
+ /// Default constructor
+ inline C2SimpleArrayStruct() = default;
+ DEFINE_C2STRUCT_NO_BASE(SimpleArray)
+ FLEX(C2SimpleArrayStruct, mValues)
+
+private:
+ /// Construct from a C2MemoryBlock.
+ /// Used only by the flexible parameter allocators (alloc_unique & alloc_shared).
+ inline C2SimpleArrayStruct(size_t flexCount, const C2MemoryBlock<T> &block) {
+ _C2ValueArrayHelper::init(mValues, flexCount, block);
+ }
+
+ /// Construct from an initializer list.
+ /// Used only by the flexible parameter allocators (alloc_unique & alloc_shared).
+ inline C2SimpleArrayStruct(size_t flexCount, const std::initializer_list<T> &init) {
+ _C2ValueArrayHelper::init(mValues, flexCount, init);
+ }
+
+ /// Construct from another flexible array.
+ /// Used only by the flexible parameter allocators (alloc_unique & alloc_shared).
+ template<unsigned N>
+ inline C2SimpleArrayStruct(size_t flexCount, const T(&init)[N]) {
+ _C2ValueArrayHelper::init(mValues, flexCount, init);
+ }
+};
+
+/**
+ * \addtogroup simplevalue Simple value and array structures.
+ * @{
+ *
+ * Simple value structures.
+ *
+ * Structures containing a single simple value. These can be reused to easily define simple
+ * parameters of various types:
+ *
+ * typedef C2PortParam<C2Tuning, C2Int32Value, kParamIndexMyIntegerPortParam>
+ * C2MyIntegerPortParamTuning;
+ *
+ * They contain a single member (mValue or mValues) that is described as "value" or "values".
+ */
+/// A 32-bit signed integer parameter in mValue, described as "value"
+typedef C2SimpleValueStruct<int32_t> C2Int32Value;
+/// A 32-bit signed integer array parameter in mValues, described as "values"
+typedef C2SimpleArrayStruct<int32_t> C2Int32Array;
+/// A 32-bit unsigned integer parameter in mValue, described as "value"
+typedef C2SimpleValueStruct<uint32_t> C2Uint32Value;
+/// A 32-bit unsigned integer array parameter in mValues, described as "values"
+typedef C2SimpleArrayStruct<uint32_t> C2Uint32Array;
+/// A 64-bit signed integer parameter in mValue, described as "value"
+typedef C2SimpleValueStruct<int64_t> C2Int64Value;
+/// A 64-bit signed integer array parameter in mValues, described as "values"
+typedef C2SimpleArrayStruct<int64_t> C2Int64Array;
+/// A 64-bit unsigned integer parameter in mValue, described as "value"
+typedef C2SimpleValueStruct<uint64_t> C2Uint64Value;
+/// A 64-bit unsigned integer array parameter in mValues, described as "values"
+typedef C2SimpleArrayStruct<uint64_t> C2Uint64Array;
+/// A float parameter in mValue, described as "value"
+typedef C2SimpleValueStruct<float> C2FloatValue;
+/// A float array parameter in mValues, described as "values"
+typedef C2SimpleArrayStruct<float> C2FloatArray;
+/// A blob flexible parameter in mValue, described as "value"
+typedef C2SimpleValueStruct<uint8_t[]> C2BlobValue;
+/// A string flexible parameter in mValue, described as "value"
+typedef C2SimpleValueStruct<char[]> C2StringValue;
+
+#if 1
+template<typename T>
+const std::initializer_list<const C2FieldDescriptor> C2SimpleValueStruct<T>::fieldList = { C2FIELD(mValue, "value") };
+template<typename T>
+const std::initializer_list<const C2FieldDescriptor> C2SimpleValueStruct<T[]>::fieldList = { C2FIELD(mValue, "value") };
+template<typename T>
+const std::initializer_list<const C2FieldDescriptor> C2SimpleArrayStruct<T>::fieldList = { C2FIELD(mValues, "values") };
+#else
+// This seem to be able to be handled by the template above
+DESCRIBE_TEMPLATED_C2STRUCT(C2SimpleValueStruct<int32_t>, { C2FIELD(mValue, "value") });
+DESCRIBE_TEMPLATED_C2STRUCT(C2SimpleValueStruct<uint32_t>, { C2FIELD(mValue, "value") });
+DESCRIBE_TEMPLATED_C2STRUCT(C2SimpleValueStruct<int64_t>, { C2FIELD(mValue, "value") });
+DESCRIBE_TEMPLATED_C2STRUCT(C2SimpleValueStruct<uint64_t>, { C2FIELD(mValue, "value") });
+DESCRIBE_TEMPLATED_C2STRUCT(C2SimpleValueStruct<float>, { C2FIELD(mValue, "value") });
+DESCRIBE_TEMPLATED_C2STRUCT(C2SimpleValueStruct<uint8_t[]>, { C2FIELD(mValue, "value") });
+DESCRIBE_TEMPLATED_C2STRUCT(C2SimpleValueStruct<char[]>, { C2FIELD(mValue, "value") });
+DESCRIBE_TEMPLATED_C2STRUCT(C2SimpleArrayStruct<int32_t>, { C2FIELD(mValues, "values") });
+DESCRIBE_TEMPLATED_C2STRUCT(C2SimpleArrayStruct<uint32_t>, { C2FIELD(mValues, "values") });
+DESCRIBE_TEMPLATED_C2STRUCT(C2SimpleArrayStruct<int64_t>, { C2FIELD(mValues, "values") });
+DESCRIBE_TEMPLATED_C2STRUCT(C2SimpleArrayStruct<uint64_t>, { C2FIELD(mValues, "values") });
+DESCRIBE_TEMPLATED_C2STRUCT(C2SimpleArrayStruct<float>, { C2FIELD(mValues, "values") });
+#endif
+
+/// @}
+
+/// @}
+
+} // namespace android
+
+#endif // C2PARAM_DEF_H_
diff --git a/media/libstagefright/codec2/include/C2Work.h b/media/libstagefright/codec2/include/C2Work.h
new file mode 100644
index 0000000..a42d11a
--- /dev/null
+++ b/media/libstagefright/codec2/include/C2Work.h
@@ -0,0 +1,162 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef C2WORK_H_
+
+#define C2WORK_H_
+
+#include <stdint.h>
+#include <stdbool.h>
+#include <C2Param.h>
+#include <C2Buffer.h>
+#include <C2Config.h>
+
+#include <memory>
+#include <list>
+#include <vector>
+
+typedef int status_t;
+
+namespace android {
+
+/// \defgroup work Work and data processing
+/// @{
+
+struct C2SettingResult {
+ enum Failure {
+ READ_ONLY, ///< parameter is read-only and cannot be set
+ MISMATCH, ///< parameter mismatches input data
+ BAD_VALUE, ///< parameter does not accept value
+ BAD_TYPE, ///< parameter is not supported
+ BAD_PORT, ///< parameter is not supported on the specific port
+ BAD_INDEX, ///< parameter is not supported on the specific stream
+ CONFLICT, ///< parameter is in conflict with another setting
+ };
+
+ C2ParamField field;
+ Failure failure;
+ std::unique_ptr<C2FieldSupportedValues> supportedValues; //< if different from normal (e.g. in conflict w/another param or input data)
+ std::list<C2ParamField> conflictingFields;
+};
+
+// ================================================================================================
+// WORK
+// ================================================================================================
+
+// node_id-s
+typedef uint32_t node_id;
+
+enum flags_t : uint32_t {
+ BUFFERFLAG_CODEC_CONFIG,
+ BUFFERFLAG_DROP_FRAME,
+ BUFFERFLAG_END_OF_STREAM,
+};
+
+enum {
+ kParamIndexWorkOrdinal,
+};
+
+struct C2WorkOrdinalStruct {
+ uint64_t timestamp;
+ uint64_t frame_index; // submission ordinal on the initial component
+ uint64_t custom_ordinal; // can be given by the component, e.g. decode order
+
+ DEFINE_AND_DESCRIBE_C2STRUCT(WorkOrdinal)
+ C2FIELD(timestamp, "timestamp")
+ C2FIELD(frame_index, "frame-index")
+ C2FIELD(custom_ordinal, "custom-ordinal")
+};
+
+struct C2BufferPack {
+//public:
+ flags_t flags;
+ C2WorkOrdinalStruct ordinal;
+ std::vector<std::shared_ptr<C2Buffer>> buffers;
+ //< for initial work item, these may also come from the parser - if provided
+ //< for output buffers, these are the responses to requestedInfos
+ std::list<std::unique_ptr<C2Info>> infos;
+ std::list<std::shared_ptr<C2InfoBuffer>> infoBuffers;
+};
+
+struct C2Worklet {
+//public:
+ // IN
+ node_id component;
+
+ std::list<std::unique_ptr<C2Param>> tunings; //< tunings to be applied before processing this
+ // worklet
+ std::list<C2Param::Type> requestedInfos;
+ std::vector<std::shared_ptr<C2BlockAllocator>> allocators; //< This vector shall be the same size as
+ //< output.buffers.
+
+ // OUT
+ C2BufferPack output;
+ std::list<std::unique_ptr<C2SettingResult>> failures;
+};
+
+/**
+ * This structure holds information about all a single work item.
+ *
+ * This structure shall be passed by the client to the component for the first worklet. As such,
+ * worklets must not be empty. The ownership of this object is passed.
+ *
+ * input:
+ * The input data to be processed. This is provided by the client with ownership. When the work
+ * is returned, the input buffer-pack's buffer vector shall contain nullptrs.
+ *
+ * worklets:
+ * The chain of components and associated allocators, tunings and info requests that the data
+ * must pass through. If this has more than a single element, the tunnels between successive
+ * components of the worklet chain must have been (successfully) pre-registered at the time
+ * the work is submitted. Allocating the output buffers in the worklets is the responsibility
+ * of each component. Upon work submission, each output buffer-pack shall be an appropriately
+ * sized vector containing nullptrs. When the work is completed/returned to the client,
+ *
+ * worklets_processed:
+ * It shall be initialized to 0 by the client when the work is submitted.
+ * It shall contain the number of worklets that were successfully processed when the work is
+ * returned. If this is less then the number of worklets, result must not be success.
+ * It must be in the range of [0, worklets.size()].
+ *
+ * result:
+ * The final outcome of the work. If 0 when work is returned, it is assumed that all worklets
+ * have been processed.
+ */
+struct C2Work {
+//public:
+ // pre-chain infos (for portions of a tunneling chain that happend before this work-chain for
+ // this work item - due to framework facilitated (non-tunneled) work-chaining)
+ std::list<std::pair<std::unique_ptr<C2PortMimeConfig>, std::unique_ptr<C2Info>>> preChainInfos;
+ std::list<std::pair<std::unique_ptr<C2PortMimeConfig>, std::unique_ptr<C2Buffer>>> preChainInfoBlobs;
+
+ C2BufferPack input;
+ std::list<std::unique_ptr<C2Worklet>> worklets;
+
+ uint32_t worklets_processed;
+ status_t result;
+};
+
+struct C2WorkOutline {
+//public:
+ C2WorkOrdinalStruct ordinal;
+ std::list<node_id> chain;
+};
+
+/// @}
+
+} // namespace android
+
+#endif // C2WORK_H_
diff --git a/media/libstagefright/codec2/tests/Android.mk b/media/libstagefright/codec2/tests/Android.mk
new file mode 100644
index 0000000..49c4253
--- /dev/null
+++ b/media/libstagefright/codec2/tests/Android.mk
@@ -0,0 +1,37 @@
+# Build the unit tests.
+LOCAL_PATH:= $(call my-dir)
+include $(CLEAR_VARS)
+LOCAL_ADDITIONAL_DEPENDENCIES := $(LOCAL_PATH)/Android.mk
+
+LOCAL_MODULE := codec2_test
+
+LOCAL_MODULE_TAGS := tests
+
+LOCAL_SRC_FILES := \
+ vndk/C2UtilTest.cpp \
+ C2_test.cpp \
+ C2Param_test.cpp \
+
+LOCAL_SHARED_LIBRARIES := \
+ libcutils \
+ libstagefright_codec2 \
+ liblog
+
+LOCAL_C_INCLUDES := \
+ frameworks/av/media/libstagefright/codec2/include \
+ frameworks/av/media/libstagefright/codec2/vndk/include \
+ $(TOP)/frameworks/native/include/media/openmax \
+
+LOCAL_CFLAGS += -Werror -Wall -std=c++14
+LOCAL_CLANG := true
+
+include $(BUILD_NATIVE_TEST)
+
+# Include subdirectory makefiles
+# ============================================================
+
+# If we're building with ONE_SHOT_MAKEFILE (mm, mmm), then what the framework
+# team really wants is to build the stuff defined by this makefile.
+ifeq (,$(ONE_SHOT_MAKEFILE))
+include $(call first-makefiles-under,$(LOCAL_PATH))
+endif
diff --git a/media/libstagefright/codec2/tests/C2Param_test.cpp b/media/libstagefright/codec2/tests/C2Param_test.cpp
new file mode 100644
index 0000000..ec82c84
--- /dev/null
+++ b/media/libstagefright/codec2/tests/C2Param_test.cpp
@@ -0,0 +1,2687 @@
+/*
+ * Copyright 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "C2Param_test"
+
+#include <gtest/gtest.h>
+
+#include <util/C2ParamUtils.h>
+#include <C2ParamDef.h>
+
+namespace android {
+
+void PrintTo(const _C2FieldId &id, ::std::ostream* os) {
+ *os << "@" << id._mOffset << "+" << id._mSize;
+}
+
+void PrintTo(const C2FieldDescriptor &fd, ::std::ostream *os) {
+ using FD=C2FieldDescriptor;
+ switch (fd.type()) {
+ case FD::INT32: *os << "i32"; break;
+ case FD::INT64: *os << "i64"; break;
+ case FD::UINT32: *os << "u32"; break;
+ case FD::UINT64: *os << "u64"; break;
+ case FD::FLOAT: *os << "float"; break;
+ case FD::STRING: *os << "char"; break;
+ case FD::BLOB: *os << "u8"; break;
+ default:
+ if (fd.type() & FD::STRUCT_FLAG) {
+ *os << "struct-" << (fd.type() & ~FD::STRUCT_FLAG);
+ } else {
+ *os << "type-" << fd.type();
+ }
+ }
+ *os << " " << fd.name();
+ if (fd.length() > 1) {
+ *os << "[" << fd.length() << "]";
+ } else if (fd.length() == 0) {
+ *os << "[]";
+ }
+ *os << " (";
+ PrintTo(fd._mFieldId, os);
+ *os << "*" << fd.length() << ")";
+}
+
+enum C2ParamIndexType {
+ kParamIndexNumber,
+ kParamIndexNumbers,
+ kParamIndexNumber2,
+ kParamIndexVendorStart = C2Param::BaseIndex::kVendorStart,
+ kParamIndexVendorNumbers,
+};
+
+void ffff(int(*)(int)) {}
+
+/* ============================= STRUCT DECLARATION AND DESCRIPTION ============================= */
+
+typedef C2FieldDescriptor FD;
+
+class C2ParamTest : public ::testing::Test {
+};
+
+class C2ParamTest_ParamFieldList
+ : public ::testing::TestWithParam<std::initializer_list<const C2FieldDescriptor>> {
+};
+
+enum {
+ kParamIndexSize,
+ kParamIndexTestA,
+ kParamIndexTestB,
+ kParamIndexTestFlexS32,
+ kParamIndexTestFlexEndS32,
+ kParamIndexTestFlexS64,
+ kParamIndexTestFlexEndS64,
+ kParamIndexTestFlexSize,
+ kParamIndexTestFlexEndSize,
+};
+
+struct C2SizeStruct {
+ int32_t mNumber;
+ int32_t mHeight;
+ enum : uint32_t { baseIndex = kParamIndexSize }; // <= needed for C2FieldDescriptor
+ const static std::initializer_list<const C2FieldDescriptor> fieldList; // <= needed for C2FieldDescriptor
+ const static FD::Type TYPE = (FD::Type)(baseIndex | FD::STRUCT_FLAG);
+};
+
+DEFINE_NO_NAMED_VALUES_FOR(C2SizeStruct)
+
+// Test 1. define a structure without any helper methods
+
+bool operator==(const C2FieldDescriptor &a, const C2FieldDescriptor &b) {
+ return a.type() == b.type()
+ && a.length() == b.length()
+ && strcmp(a.name(), b.name()) == 0
+ && a._mFieldId == b._mFieldId;
+}
+
+struct C2TestStruct_A {
+ int32_t mSigned32;
+ int64_t mSigned64[2];
+ uint32_t mUnsigned32[1];
+ uint64_t mUnsigned64;
+ float mFloat;
+ C2SizeStruct mSize[3];
+ uint8_t mBlob[100];
+ char mString[100];
+ bool mYesNo[100];
+
+ const static std::initializer_list<const C2FieldDescriptor> fieldList;
+ // enum : uint32_t { baseIndex = kParamIndexTest };
+ // typedef C2TestStruct_A _type;
+} __attribute__((packed));
+
+const std::initializer_list<const C2FieldDescriptor> C2TestStruct_A::fieldList =
+ { { FD::INT32, 1, "s32", 0, 4 },
+ { FD::INT64, 2, "s64", 4, 8 },
+ { FD::UINT32, 1, "u32", 20, 4 },
+ { FD::UINT64, 1, "u64", 24, 8 },
+ { FD::FLOAT, 1, "fp", 32, 4 },
+ { C2SizeStruct::TYPE, 3, "size", 36, 8 },
+ { FD::BLOB, 100, "blob", 60, 1 },
+ { FD::STRING, 100, "str", 160, 1 },
+ { FD::BLOB, 100, "y-n", 260, 1 } };
+
+TEST_P(C2ParamTest_ParamFieldList, VerifyStruct) {
+ std::vector<const C2FieldDescriptor> fields = GetParam(), expected = C2TestStruct_A::fieldList;
+
+ // verify first field descriptor
+ EXPECT_EQ(FD::INT32, fields[0].type());
+ EXPECT_STREQ("s32", fields[0].name());
+ EXPECT_EQ(1u, fields[0].length());
+ EXPECT_EQ(_C2FieldId(0, 4), fields[0]._mFieldId);
+
+ EXPECT_EQ(expected[0], fields[0]);
+ EXPECT_EQ(expected[1], fields[1]);
+ EXPECT_EQ(expected[2], fields[2]);
+ EXPECT_EQ(expected[3], fields[3]);
+ EXPECT_EQ(expected[4], fields[4]);
+ EXPECT_EQ(expected[5], fields[5]);
+ EXPECT_EQ(expected[6], fields[6]);
+ EXPECT_EQ(expected[7], fields[7]);
+ for (size_t i = 8; i < fields.size() && i < expected.size(); ++i) {
+ EXPECT_EQ(expected[i], fields[i]);
+ }
+}
+
+INSTANTIATE_TEST_CASE_P(InitializerList, C2ParamTest_ParamFieldList, ::testing::Values(C2TestStruct_A::fieldList));
+
+// define fields using C2FieldDescriptor pointer constructor
+const std::initializer_list<const C2FieldDescriptor> C2TestStruct_A_FD_PTR_fieldList =
+ { C2FieldDescriptor(&((C2TestStruct_A*)(nullptr))->mSigned32, "s32"),
+ C2FieldDescriptor(&((C2TestStruct_A*)(nullptr))->mSigned64, "s64"),
+ C2FieldDescriptor(&((C2TestStruct_A*)(nullptr))->mUnsigned32, "u32"),
+ C2FieldDescriptor(&((C2TestStruct_A*)(nullptr))->mUnsigned64, "u64"),
+ C2FieldDescriptor(&((C2TestStruct_A*)(nullptr))->mFloat, "fp"),
+ C2FieldDescriptor(&((C2TestStruct_A*)(nullptr))->mSize, "size"),
+ C2FieldDescriptor(&((C2TestStruct_A*)(nullptr))->mBlob, "blob"),
+ C2FieldDescriptor(&((C2TestStruct_A*)(nullptr))->mString, "str"),
+ // C2FieldDescriptor(&((C2TestStruct_A*)(nullptr))->mYesNo, "y-n")
+ };
+
+INSTANTIATE_TEST_CASE_P(PointerConstructor, C2ParamTest_ParamFieldList, ::testing::Values(C2TestStruct_A_FD_PTR_fieldList));
+
+// define fields using C2FieldDescriptor member-pointer constructor
+const std::initializer_list<const C2FieldDescriptor> C2TestStruct_A_FD_MEM_PTR_fieldList =
+ { C2FieldDescriptor((C2TestStruct_A*)0, &C2TestStruct_A::mSigned32, "s32"),
+ C2FieldDescriptor((C2TestStruct_A*)0, &C2TestStruct_A::mSigned64, "s64"),
+ C2FieldDescriptor((C2TestStruct_A*)0, &C2TestStruct_A::mUnsigned32, "u32"),
+ C2FieldDescriptor((C2TestStruct_A*)0, &C2TestStruct_A::mUnsigned64, "u64"),
+ C2FieldDescriptor((C2TestStruct_A*)0, &C2TestStruct_A::mFloat, "fp"),
+ C2FieldDescriptor((C2TestStruct_A*)0, &C2TestStruct_A::mSize, "size"),
+ C2FieldDescriptor((C2TestStruct_A*)0, &C2TestStruct_A::mBlob, "blob"),
+ C2FieldDescriptor((C2TestStruct_A*)0, &C2TestStruct_A::mString, "str"),
+ // C2FieldDescriptor((C2TestStruct_A*)0, &C2TestStruct_A::mYesNo, "y-n")
+ };
+
+INSTANTIATE_TEST_CASE_P(MemberPointerConstructor, C2ParamTest_ParamFieldList, ::testing::Values(C2TestStruct_A_FD_MEM_PTR_fieldList));
+
+// Test 2. define a structure with two-step helper methods
+
+struct C2TestAStruct {
+ int32_t mSigned32;
+ int64_t mSigned64[2];
+ uint32_t mUnsigned32[1];
+ uint64_t mUnsigned64;
+ float mFloat;
+ C2SizeStruct mSize[3];
+ uint8_t mBlob[100];
+ char mString[100];
+ bool mYesNo[100];
+
+private: // test access level
+ DEFINE_C2STRUCT(TestA)
+} C2_PACK;
+
+DESCRIBE_C2STRUCT(TestA, {
+ C2FIELD(mSigned32, "s32")
+ C2FIELD(mSigned64, "s64")
+ C2FIELD(mUnsigned32, "u32")
+ C2FIELD(mUnsigned64, "u64")
+ C2FIELD(mFloat, "fp")
+ C2FIELD(mSize, "size")
+ C2FIELD(mBlob, "blob")
+ C2FIELD(mString, "str")
+ // C2FIELD(mYesNo, "y-n")
+}) // ; optional
+
+INSTANTIATE_TEST_CASE_P(DescribeStruct2Step, C2ParamTest_ParamFieldList, ::testing::Values(C2TestAStruct::fieldList));
+
+// Test 3. define a structure with one-step helper method
+
+struct C2TestBStruct {
+ int32_t mSigned32;
+ int64_t mSigned64[2];
+ uint32_t mUnsigned32[1];
+ uint64_t mUnsigned64;
+ float mFloat;
+ C2SizeStruct mSize[3];
+ uint8_t mBlob[100];
+ char mString[100];
+ bool mYesNo[100];
+
+private: // test access level
+ DEFINE_AND_DESCRIBE_C2STRUCT(TestB)
+
+ C2FIELD(mSigned32, "s32")
+ C2FIELD(mSigned64, "s64")
+ C2FIELD(mUnsigned32, "u32")
+ C2FIELD(mUnsigned64, "u64")
+ C2FIELD(mFloat, "fp")
+ C2FIELD(mSize, "size")
+ C2FIELD(mBlob, "blob")
+ C2FIELD(mString, "str")
+ // C2FIELD(mYesNo, "y-n")
+};
+
+INSTANTIATE_TEST_CASE_P(DescribeStruct1Step, C2ParamTest_ParamFieldList, ::testing::Values(C2TestBStruct::fieldList));
+
+// Test 4. flexible members
+
+template<typename T>
+class C2ParamTest_FlexParamFieldList : public ::testing::Test {
+protected:
+ using Type=FD::Type;
+
+ // static std::initializer_list<std::initializer_list<const C2FieldDescriptor>>
+ static std::vector<std::vector<const C2FieldDescriptor>>
+ GetLists();
+
+ constexpr static Type flexType =
+ std::is_same<T, int32_t>::value ? FD::INT32 :
+ std::is_same<T, int64_t>::value ? FD::INT64 :
+ std::is_same<T, uint32_t>::value ? FD::UINT32 :
+ std::is_same<T, uint64_t>::value ? FD::UINT64 :
+ std::is_same<T, float>::value ? FD::FLOAT :
+ std::is_same<T, uint8_t>::value ? FD::BLOB :
+ std::is_same<T, char>::value ? FD::STRING :
+ std::is_same<T, C2SizeStruct>::value ? C2SizeStruct::TYPE : (Type)0;
+ constexpr static size_t flexSize = sizeof(T);
+};
+
+typedef ::testing::Types<int32_t, int64_t, C2SizeStruct> FlexTypes;
+TYPED_TEST_CASE(C2ParamTest_FlexParamFieldList, FlexTypes);
+
+TYPED_TEST(C2ParamTest_FlexParamFieldList, VerifyStruct) {
+ for (auto a : this->GetLists()) {
+ std::vector<const C2FieldDescriptor> fields = a;
+ if (fields.size() > 1) {
+ EXPECT_EQ(2u, fields.size());
+ EXPECT_EQ(C2FieldDescriptor(FD::INT32, 1, "s32", 0, 4), fields[0]);
+ EXPECT_EQ(C2FieldDescriptor(this->flexType, 0, "flex", 4, this->flexSize),
+ fields[1]);
+ } else {
+ EXPECT_EQ(1u, fields.size());
+ EXPECT_EQ(C2FieldDescriptor(this->flexType, 0, "flex", 0, this->flexSize),
+ fields[0]);
+ }
+ }
+}
+
+struct C2TestStruct_FlexS32 {
+ int32_t mFlex[];
+
+ const static std::initializer_list<const C2FieldDescriptor> fieldList;
+ // enum : uint32_t { baseIndex = kParamIndexTestFlex, flexSize = 4 };
+ // typedef C2TestStruct_FlexS32 _type;
+ // typedef int32_t flexType;
+};
+
+const std::initializer_list<const C2FieldDescriptor> C2TestStruct_FlexS32::fieldList = {
+ { FD::INT32, 0, "flex", 0, 4 }
+};
+
+struct C2TestStruct_FlexEndS32 {
+ int32_t mSigned32;
+ int32_t mFlex[];
+
+ const static std::initializer_list<const C2FieldDescriptor> fieldList;
+ // enum : uint32_t { baseIndex = kParamIndexTestFlexEnd, flexSize = 4 };
+ // typedef C2TestStruct_FlexEnd _type;
+ // typedef int32_t flexType;
+};
+
+const std::initializer_list<const C2FieldDescriptor> C2TestStruct_FlexEndS32::fieldList = {
+ { FD::INT32, 1, "s32", 0, 4 },
+ { FD::INT32, 0, "flex", 4, 4 },
+};
+
+const static std::initializer_list<const C2FieldDescriptor> C2TestStruct_FlexEndS32_ptr_fieldList = {
+ C2FieldDescriptor(&((C2TestStruct_FlexEndS32*)0)->mSigned32, "s32"),
+ C2FieldDescriptor(&((C2TestStruct_FlexEndS32*)0)->mFlex, "flex"),
+};
+
+struct C2TestFlexS32Struct {
+ int32_t mFlexSigned32[];
+private: // test access level
+ C2TestFlexS32Struct() {}
+
+ DEFINE_AND_DESCRIBE_FLEX_C2STRUCT(TestFlexS32, mFlexSigned32)
+ C2FIELD(mFlexSigned32, "flex")
+};
+
+struct C2TestFlexEndS32Struct {
+ int32_t mSigned32;
+ int32_t mFlexSigned32[];
+private: // test access level
+ C2TestFlexEndS32Struct() {}
+
+ DEFINE_FLEX_C2STRUCT(TestFlexEndS32, mFlexSigned32)
+} C2_PACK;
+
+DESCRIBE_C2STRUCT(TestFlexEndS32, {
+ C2FIELD(mSigned32, "s32")
+ C2FIELD(mFlexSigned32, "flex")
+}) // ; optional
+
+template<>
+std::vector<std::vector<const C2FieldDescriptor>>
+//std::initializer_list<std::initializer_list<const C2FieldDescriptor>>
+C2ParamTest_FlexParamFieldList<int32_t>::GetLists() {
+ return {
+ C2TestStruct_FlexS32::fieldList,
+ C2TestStruct_FlexEndS32::fieldList,
+ C2TestStruct_FlexEndS32_ptr_fieldList,
+ C2TestFlexS32Struct::fieldList,
+ C2TestFlexEndS32Struct::fieldList,
+ };
+}
+
+struct C2TestStruct_FlexS64 {
+ int64_t mFlexSigned64[];
+
+ const static std::initializer_list<const C2FieldDescriptor> fieldList;
+ // enum : uint32_t { baseIndex = kParamIndexTestFlexS64, flexSize = 8 };
+ // typedef C2TestStruct_FlexS64 _type;
+ // typedef int64_t flexType;
+};
+
+const std::initializer_list<const C2FieldDescriptor> C2TestStruct_FlexS64::fieldList = {
+ { FD::INT64, 0, "flex", 0, 8 }
+};
+
+struct C2TestStruct_FlexEndS64 {
+ int32_t mSigned32;
+ int64_t mSigned64Flex[];
+
+ const static std::initializer_list<const C2FieldDescriptor> fieldList;
+ // enum : uint32_t { baseIndex = C2TestStruct_FlexEndS64, flexSize = 8 };
+ // typedef C2TestStruct_FlexEndS64 _type;
+ // typedef int64_t flexType;
+};
+
+const std::initializer_list<const C2FieldDescriptor> C2TestStruct_FlexEndS64::fieldList = {
+ { FD::INT32, 1, "s32", 0, 4 },
+ { FD::INT64, 0, "flex", 4, 8 },
+};
+
+struct C2TestFlexS64Struct {
+ int64_t mFlexSigned64[];
+ C2TestFlexS64Struct() {}
+
+ DEFINE_AND_DESCRIBE_FLEX_C2STRUCT(TestFlexS64, mFlexSigned64)
+ C2FIELD(mFlexSigned64, "flex")
+};
+
+struct C2TestFlexEndS64Struct {
+ int32_t mSigned32;
+ int64_t mFlexSigned64[];
+ C2TestFlexEndS64Struct() {}
+
+ DEFINE_FLEX_C2STRUCT(TestFlexEndS64, mFlexSigned64)
+} C2_PACK;
+
+DESCRIBE_C2STRUCT(TestFlexEndS64, {
+ C2FIELD(mSigned32, "s32")
+ C2FIELD(mFlexSigned64, "flex")
+}) // ; optional
+
+template<>
+std::vector<std::vector<const C2FieldDescriptor>>
+//std::initializer_list<std::initializer_list<const C2FieldDescriptor>>
+C2ParamTest_FlexParamFieldList<int64_t>::GetLists() {
+ return {
+ C2TestStruct_FlexS64::fieldList,
+ C2TestStruct_FlexEndS64::fieldList,
+ C2TestFlexS64Struct::fieldList,
+ C2TestFlexEndS64Struct::fieldList,
+ };
+}
+
+struct C2TestStruct_FlexSize {
+ C2SizeStruct mFlexSize[];
+
+ const static std::initializer_list<const C2FieldDescriptor> fieldList;
+ // enum : uint32_t { baseIndex = kParamIndexTestFlexSize, flexSize = 8 };
+ // typedef C2TestStruct_FlexSize _type;
+ // typedef C2SizeStruct flexType;
+};
+
+const std::initializer_list<const C2FieldDescriptor> C2TestStruct_FlexSize::fieldList = {
+ { C2SizeStruct::TYPE, 0, "flex", 0, sizeof(C2SizeStruct) }
+};
+
+struct C2TestStruct_FlexEndSize {
+ int32_t mSigned32;
+ C2SizeStruct mSizeFlex[];
+
+ const static std::initializer_list<const C2FieldDescriptor> fieldList;
+ // enum : uint32_t { baseIndex = C2TestStruct_FlexEndSize, flexSize = 8 };
+ // typedef C2TestStruct_FlexEndSize _type;
+ // typedef C2SizeStruct flexType;
+};
+
+const std::initializer_list<const C2FieldDescriptor> C2TestStruct_FlexEndSize::fieldList = {
+ { FD::INT32, 1, "s32", 0, 4 },
+ { C2SizeStruct::TYPE, 0, "flex", 4, sizeof(C2SizeStruct) },
+};
+
+struct C2TestFlexSizeStruct {
+ C2SizeStruct mFlexSize[];
+ C2TestFlexSizeStruct() {}
+
+ DEFINE_AND_DESCRIBE_FLEX_C2STRUCT(TestFlexSize, mFlexSize)
+ C2FIELD(mFlexSize, "flex")
+};
+
+struct C2TestFlexEndSizeStruct {
+ int32_t mSigned32;
+ C2SizeStruct mFlexSize[];
+ C2TestFlexEndSizeStruct() {}
+
+ DEFINE_FLEX_C2STRUCT(TestFlexEndSize, mFlexSize)
+} C2_PACK;
+
+DESCRIBE_C2STRUCT(TestFlexEndSize, {
+ C2FIELD(mSigned32, "s32")
+ C2FIELD(mFlexSize, "flex")
+}) // ; optional
+
+template<>
+std::vector<std::vector<const C2FieldDescriptor>>
+//std::initializer_list<std::initializer_list<const C2FieldDescriptor>>
+C2ParamTest_FlexParamFieldList<C2SizeStruct>::GetLists() {
+ return {
+ C2TestStruct_FlexSize::fieldList,
+ C2TestStruct_FlexEndSize::fieldList,
+ C2TestFlexSizeStruct::fieldList,
+ C2TestFlexEndSizeStruct::fieldList,
+ };
+}
+
+TEST_F(C2ParamTest, FieldId) {
+ // pointer constructor
+ EXPECT_EQ(_C2FieldId(0, 4), _C2FieldId(&((C2TestStruct_A*)0)->mSigned32));
+ EXPECT_EQ(_C2FieldId(4, 8), _C2FieldId(&((C2TestStruct_A*)0)->mSigned64));
+ EXPECT_EQ(_C2FieldId(20, 4), _C2FieldId(&((C2TestStruct_A*)0)->mUnsigned32));
+ EXPECT_EQ(_C2FieldId(24, 8), _C2FieldId(&((C2TestStruct_A*)0)->mUnsigned64));
+ EXPECT_EQ(_C2FieldId(32, 4), _C2FieldId(&((C2TestStruct_A*)0)->mFloat));
+ EXPECT_EQ(_C2FieldId(36, 8), _C2FieldId(&((C2TestStruct_A*)0)->mSize));
+ EXPECT_EQ(_C2FieldId(60, 1), _C2FieldId(&((C2TestStruct_A*)0)->mBlob));
+ EXPECT_EQ(_C2FieldId(160, 1), _C2FieldId(&((C2TestStruct_A*)0)->mString));
+ EXPECT_EQ(_C2FieldId(260, 1), _C2FieldId(&((C2TestStruct_A*)0)->mYesNo));
+
+ EXPECT_EQ(_C2FieldId(0, 4), _C2FieldId(&((C2TestFlexEndSizeStruct*)0)->mSigned32));
+ EXPECT_EQ(_C2FieldId(4, 8), _C2FieldId(&((C2TestFlexEndSizeStruct*)0)->mFlexSize));
+
+ // member pointer constructor
+ EXPECT_EQ(_C2FieldId(0, 4), _C2FieldId((C2TestStruct_A*)0, &C2TestStruct_A::mSigned32));
+ EXPECT_EQ(_C2FieldId(4, 8), _C2FieldId((C2TestStruct_A*)0, &C2TestStruct_A::mSigned64));
+ EXPECT_EQ(_C2FieldId(20, 4), _C2FieldId((C2TestStruct_A*)0, &C2TestStruct_A::mUnsigned32));
+ EXPECT_EQ(_C2FieldId(24, 8), _C2FieldId((C2TestStruct_A*)0, &C2TestStruct_A::mUnsigned64));
+ EXPECT_EQ(_C2FieldId(32, 4), _C2FieldId((C2TestStruct_A*)0, &C2TestStruct_A::mFloat));
+ EXPECT_EQ(_C2FieldId(36, 8), _C2FieldId((C2TestStruct_A*)0, &C2TestStruct_A::mSize));
+ EXPECT_EQ(_C2FieldId(60, 1), _C2FieldId((C2TestStruct_A*)0, &C2TestStruct_A::mBlob));
+ EXPECT_EQ(_C2FieldId(160, 1), _C2FieldId((C2TestStruct_A*)0, &C2TestStruct_A::mString));
+ EXPECT_EQ(_C2FieldId(260, 1), _C2FieldId((C2TestStruct_A*)0, &C2TestStruct_A::mYesNo));
+
+ EXPECT_EQ(_C2FieldId(0, 4), _C2FieldId((C2TestFlexEndSizeStruct*)0, &C2TestFlexEndSizeStruct::mSigned32));
+ EXPECT_EQ(_C2FieldId(4, 8), _C2FieldId((C2TestFlexEndSizeStruct*)0, &C2TestFlexEndSizeStruct::mFlexSize));
+
+ // member pointer sans type pointer
+ EXPECT_EQ(_C2FieldId(0, 4), _C2FieldId(&C2TestStruct_A::mSigned32));
+ EXPECT_EQ(_C2FieldId(4, 8), _C2FieldId(&C2TestStruct_A::mSigned64));
+ EXPECT_EQ(_C2FieldId(20, 4), _C2FieldId(&C2TestStruct_A::mUnsigned32));
+ EXPECT_EQ(_C2FieldId(24, 8), _C2FieldId(&C2TestStruct_A::mUnsigned64));
+ EXPECT_EQ(_C2FieldId(32, 4), _C2FieldId(&C2TestStruct_A::mFloat));
+ EXPECT_EQ(_C2FieldId(36, 8), _C2FieldId(&C2TestStruct_A::mSize));
+ EXPECT_EQ(_C2FieldId(60, 1), _C2FieldId(&C2TestStruct_A::mBlob));
+ EXPECT_EQ(_C2FieldId(160, 1), _C2FieldId(&C2TestStruct_A::mString));
+ EXPECT_EQ(_C2FieldId(260, 1), _C2FieldId(&C2TestStruct_A::mYesNo));
+
+ EXPECT_EQ(_C2FieldId(0, 4), _C2FieldId(&C2TestFlexEndSizeStruct::mSigned32));
+ EXPECT_EQ(_C2FieldId(4, 8), _C2FieldId(&C2TestFlexEndSizeStruct::mFlexSize));
+
+ typedef C2GlobalParam<C2Info, C2TestAStruct> C2TestAInfo;
+ typedef C2GlobalParam<C2Info, C2TestFlexEndSizeStruct> C2TestFlexEndSizeInfo;
+
+ // pointer constructor in C2Param
+ EXPECT_EQ(_C2FieldId(8, 4), _C2FieldId(&((C2TestAInfo*)0)->mSigned32));
+ EXPECT_EQ(_C2FieldId(12, 8), _C2FieldId(&((C2TestAInfo*)0)->mSigned64));
+ EXPECT_EQ(_C2FieldId(28, 4), _C2FieldId(&((C2TestAInfo*)0)->mUnsigned32));
+ EXPECT_EQ(_C2FieldId(32, 8), _C2FieldId(&((C2TestAInfo*)0)->mUnsigned64));
+ EXPECT_EQ(_C2FieldId(40, 4), _C2FieldId(&((C2TestAInfo*)0)->mFloat));
+ EXPECT_EQ(_C2FieldId(44, 8), _C2FieldId(&((C2TestAInfo*)0)->mSize));
+ EXPECT_EQ(_C2FieldId(68, 1), _C2FieldId(&((C2TestAInfo*)0)->mBlob));
+ EXPECT_EQ(_C2FieldId(168, 1), _C2FieldId(&((C2TestAInfo*)0)->mString));
+ EXPECT_EQ(_C2FieldId(268, 1), _C2FieldId(&((C2TestAInfo*)0)->mYesNo));
+
+ EXPECT_EQ(_C2FieldId(8, 4), _C2FieldId(&((C2TestFlexEndSizeInfo*)0)->m.mSigned32));
+ EXPECT_EQ(_C2FieldId(12, 8), _C2FieldId(&((C2TestFlexEndSizeInfo*)0)->m.mFlexSize));
+
+ // member pointer in C2Param
+ EXPECT_EQ(_C2FieldId(8, 4), _C2FieldId((C2TestAInfo*)0, &C2TestAInfo::mSigned32));
+ EXPECT_EQ(_C2FieldId(12, 8), _C2FieldId((C2TestAInfo*)0, &C2TestAInfo::mSigned64));
+ EXPECT_EQ(_C2FieldId(28, 4), _C2FieldId((C2TestAInfo*)0, &C2TestAInfo::mUnsigned32));
+ EXPECT_EQ(_C2FieldId(32, 8), _C2FieldId((C2TestAInfo*)0, &C2TestAInfo::mUnsigned64));
+ EXPECT_EQ(_C2FieldId(40, 4), _C2FieldId((C2TestAInfo*)0, &C2TestAInfo::mFloat));
+ EXPECT_EQ(_C2FieldId(44, 8), _C2FieldId((C2TestAInfo*)0, &C2TestAInfo::mSize));
+ EXPECT_EQ(_C2FieldId(68, 1), _C2FieldId((C2TestAInfo*)0, &C2TestAInfo::mBlob));
+ EXPECT_EQ(_C2FieldId(168, 1), _C2FieldId((C2TestAInfo*)0, &C2TestAInfo::mString));
+ EXPECT_EQ(_C2FieldId(268, 1), _C2FieldId((C2TestAInfo*)0, &C2TestAInfo::mYesNo));
+
+ // NOTE: cannot use a member pointer for flex params due to introduction of 'm'
+ // EXPECT_EQ(_C2FieldId(8, 4), _C2FieldId(&C2TestFlexEndSizeInfo::m.mSigned32));
+ // EXPECT_EQ(_C2FieldId(12, 8), _C2FieldId(&C2TestFlexEndSizeInfo::m.mFlexSize));
+
+
+
+}
+
+struct S32 {
+ template<typename T, class B=typename std::remove_extent<T>::type>
+ inline S32(const T*) {
+ static_assert(!std::is_array<T>::value, "should not be an array");
+ static_assert(std::is_same<B, int32_t>::value, "should be int32_t");
+ }
+};
+
+struct FLX {
+ template<typename U, typename T, class B=typename std::remove_extent<T>::type>
+ inline FLX(const T*, const U*) {
+ static_assert(std::is_array<T>::value, "should be an array");
+ static_assert(std::extent<T>::value == 0, "should be an array of 0 extent");
+ static_assert(std::is_same<B, U>::value, "should be type U");
+ }
+};
+
+struct MP {
+ template<typename U, typename T, typename ExpectedU, typename UnexpectedU>
+ inline MP(T U::*, const ExpectedU*, const UnexpectedU*) {
+ static_assert(!std::is_same<U, UnexpectedU>::value, "should not be member pointer of the base type");
+ static_assert(std::is_same<U, ExpectedU>::value, "should be member pointer of the derived type");
+ }
+
+ template<typename U, typename T, typename B, typename D>
+ inline MP(T D::*, const D*) { }
+};
+
+void compiledStatic_arrayTypePropagationTest() {
+ (void)S32(&((C2TestFlexEndS32Struct *)0)->mSigned32);
+ (void)FLX(&((C2TestFlexEndS32Struct *)0)->mFlexSigned32, (int32_t*)0);
+ (void)FLX(&((C2TestFlexS32Struct *)0)->mFlexSigned32, (int32_t*)0);
+
+ typedef C2GlobalParam<C2Info, C2TestAStruct> C2TestAInfo;
+
+ // TRICKY: &derivedClass::baseMember has type of baseClass::*
+ static_assert(std::is_same<decltype(&C2TestAInfo::mSigned32), int32_t C2TestAStruct::*>::value,
+ "base member pointer should have base class in type");
+
+ // therefore, member pointer expands to baseClass::* in templates
+ (void)MP(&C2TestAInfo::mSigned32,
+ (C2TestAStruct*)0 /* expected */, (C2TestAInfo*)0 /* unexpected */);
+ // but can be cast to derivedClass::*
+ (void)MP((int32_t C2TestAInfo::*)&C2TestAInfo::mSigned32,
+ (C2TestAInfo*)0 /* expected */, (C2TestAStruct*)0 /* unexpected */);
+
+ // TRICKY: baseClass::* does not autoconvert to derivedClass::* even in templates
+ // (void)MP(&C2TestAInfo::mSigned32, (C2TestAInfo*)0);
+}
+
+TEST_F(C2ParamTest, MemberPointerCast) {
+ typedef C2GlobalParam<C2Info, C2TestAStruct> C2TestAInfo;
+
+ static_assert(offsetof(C2TestAInfo, mSigned32) == 8, "offset should be 8");
+ constexpr int32_t C2TestAStruct::* s32ptr = &C2TestAInfo::mSigned32;
+ constexpr int32_t C2TestAInfo::* s32ptr_derived = (int32_t C2TestAStruct::*)&C2TestAInfo::mSigned32;
+ constexpr int32_t C2TestAInfo::* s32ptr_cast2derived = (int32_t C2TestAInfo::*)s32ptr;
+ C2TestAInfo *info = (C2TestAInfo *)256;
+ C2TestAStruct *strukt = (C2TestAStruct *)info;
+ int32_t *info_s32_derived = &(info->*s32ptr_derived);
+ int32_t *info_s32_cast2derived = &(info->*s32ptr_cast2derived);
+ int32_t *info_s32 = &(info->*s32ptr);
+ int32_t *strukt_s32 = &(strukt->*s32ptr);
+
+ EXPECT_EQ(256u, (uintptr_t)info);
+ EXPECT_EQ(264u, (uintptr_t)strukt);
+ EXPECT_EQ(264u, (uintptr_t)info_s32_derived);
+ EXPECT_EQ(264u, (uintptr_t)info_s32_cast2derived);
+ EXPECT_EQ(264u, (uintptr_t)info_s32);
+ EXPECT_EQ(264u, (uintptr_t)strukt_s32);
+
+ typedef C2GlobalParam<C2Info, C2TestFlexEndSizeStruct> C2TestFlexEndSizeInfo;
+ static_assert(offsetof(C2TestFlexEndSizeInfo, m.mSigned32) == 8, "offset should be 8");
+ static_assert(offsetof(C2TestFlexEndSizeInfo, m.mFlexSize) == 12, "offset should be 12");
+}
+
+/* ===================================== PARAM USAGE TESTS ===================================== */
+
+struct C2NumberStruct {
+ int32_t mNumber;
+ C2NumberStruct() {}
+ C2NumberStruct(int32_t _number) : mNumber(_number) {}
+
+ DEFINE_AND_DESCRIBE_C2STRUCT(Number)
+ C2FIELD(mNumber, "number")
+};
+
+struct C2NumbersStruct {
+ int32_t mNumbers[];
+ C2NumbersStruct() {}
+
+ DEFINE_AND_DESCRIBE_FLEX_C2STRUCT(Numbers, mNumbers)
+ C2FIELD(mNumbers, "numbers")
+};
+static_assert(sizeof(C2NumbersStruct) == 0, "C2NumbersStruct has incorrect size");
+
+typedef C2GlobalParam<C2Tuning, C2NumberStruct> C2NumberTuning;
+typedef C2PortParam<C2Tuning, C2NumberStruct> C2NumberPortTuning;
+typedef C2StreamParam<C2Tuning, C2NumberStruct> C2NumberStreamTuning;
+
+typedef C2GlobalParam<C2Tuning, C2NumbersStruct> C2NumbersTuning;
+typedef C2PortParam<C2Tuning, C2NumbersStruct> C2NumbersPortTuning;
+typedef C2StreamParam<C2Tuning, C2NumbersStruct> C2NumbersStreamTuning;
+
+//
+#if 0
+
+void test() {
+ C2NumberStruct s(10);
+ (void)C2NumberStruct::fieldList;
+};
+
+typedef C2StreamParam<C2Tuning, C2Int64Value, kParamIndexNumberB> C2NumberConfig4;
+typedef C2PortParam<C2Tuning, C2Int32Value, kParamIndexNumber> C2NumberConfig3;
+typedef C2GlobalParam<C2Tuning, C2StringValue, kParamIndexNumber> C2VideoNameConfig;
+
+void test3() {
+ C2NumberConfig3 s(10);
+ s.mValue = 11;
+ s = 12;
+ (void)C2NumberConfig3::fieldList;
+ std::shared_ptr<C2VideoNameConfig> n = C2VideoNameConfig::alloc_shared(25);
+ strcpy(n->m.mValue, "lajos");
+ C2NumberConfig4 t(false, 0, 11);
+ t.mValue = 15;
+};
+
+struct C2NumbersStruct {
+ int32_t mNumbers[];
+ enum { baseIndex = kParamIndexNumber };
+ const static std::initializer_list<const C2FieldDescriptor> fieldList;
+ C2NumbersStruct() {}
+
+ FLEX(C2NumbersStruct, mNumbers);
+};
+
+static_assert(sizeof(C2NumbersStruct) == 0, "yes");
+
+
+typedef C2GlobalParam<C2Info, C2NumbersStruct> C2NumbersInfo;
+
+const std::initializer_list<const C2FieldDescriptor> C2NumbersStruct::fieldList =
+// { { FD::INT32, 0, "widths" } };
+ { C2FieldDescriptor(&((C2NumbersStruct*)(nullptr))->mNumbers, "number") };
+
+typedef C2PortParam<C2Tuning, C2NumberStruct> C2NumberConfig;
+
+std::list<const C2FieldDescriptor> myList = C2NumberConfig::fieldList;
+
+ std::unique_ptr<android::C2ParamDescriptor> __test_describe(uint32_t paramType) {
+ std::list<const C2FieldDescriptor> fields = describeC2Params<C2NumberConfig>();
+
+ auto widths = C2NumbersInfo::alloc_shared(5);
+ widths->flexCount();
+ widths->m.mNumbers[4] = 1;
+
+ test();
+ test3();
+
+ C2NumberConfig outputWidth(false, 123);
+
+ C2Param::Index index(paramType);
+ switch (paramType) {
+ case C2NumberConfig::baseIndex:
+ return std::unique_ptr<C2ParamDescriptor>(new C2ParamDescriptor{
+ true /* isRequired */,
+ "number",
+ index,
+ });
+ }
+ return nullptr;
+ }
+
+
+} // namespace android
+
+#endif
+//
+
+template<typename T>
+bool canSetPort(T &o, bool output) { return o.setPort(output); }
+bool canSetPort(...) { return false; }
+
+template<typename S, typename=decltype(((S*)0)->setPort(true))>
+static std::true_type _canCallSetPort(int);
+template<typename>
+static std::false_type _canCallSetPort(...);
+#define canCallSetPort(x) decltype(_canCallSetPort<std::remove_reference<decltype(x)>::type>(0))::value
+
+/* ======================================= STATIC TESTS ======================================= */
+
+static_assert(_C2Comparable<int>::value, "int is not comparable");
+static_assert(!_C2Comparable<void>::value, "void is comparable");
+
+struct C2_HIDE _test0 {
+ bool operator==(const _test0&);
+ bool operator!=(const _test0&);
+};
+struct C2_HIDE _test1 {
+ bool operator==(const _test1&);
+};
+struct C2_HIDE _test2 {
+ bool operator!=(const _test2&);
+};
+static_assert(_C2Comparable<_test0>::value, "class with == and != is not comparable");
+static_assert(_C2Comparable<_test1>::value, "class with == is not comparable");
+static_assert(_C2Comparable<_test2>::value, "class with != is not comparable");
+
+/* ======================================= C2PARAM TESTS ======================================= */
+
+struct _C2ParamInspector {
+ static void StaticTest();
+ static void StaticFlexTest();
+};
+
+// TEST_F(_C2ParamInspector, StaticTest) {
+void _C2ParamInspector::StaticTest() {
+ typedef C2Param::Index I;
+
+ // C2NumberStruct: baseIndex = kIndex (args)
+ static_assert(C2NumberStruct::baseIndex == kParamIndexNumber, "bad index");
+ static_assert(sizeof(C2NumberStruct) == 4, "bad size");
+
+ // C2NumberTuning: kIndex | tun | global (args)
+ static_assert(C2NumberTuning::baseIndex == kParamIndexNumber, "bad index");
+ static_assert(C2NumberTuning::typeIndex == (kParamIndexNumber | I::kTypeTuning | I::kDirGlobal), "bad index");
+ static_assert(sizeof(C2NumberTuning) == 12, "bad size");
+
+ static_assert(offsetof(C2NumberTuning, _mSize) == 0, "bad size");
+ static_assert(offsetof(C2NumberTuning, _mIndex) == 4, "bad offset");
+ static_assert(offsetof(C2NumberTuning, mNumber) == 8, "bad offset");
+
+ // C2NumberPortTuning: kIndex | tun | port (bool, args)
+ static_assert(sizeof(C2NumberPortTuning) == 12, "bad size");
+ // C2NumberPortTuning::input: kIndex | tun | port | input (args)
+ // C2NumberPortTuning::output: kIndex | tun | port | output (args)
+ static_assert(C2NumberPortTuning::input::baseIndex ==
+ kParamIndexNumber, "bad index");
+ static_assert(C2NumberPortTuning::input::typeIndex ==
+ (kParamIndexNumber | I::kTypeTuning | I::kDirInput), "bad index");
+ static_assert(C2NumberPortTuning::output::baseIndex ==
+ kParamIndexNumber, "bad index");
+ static_assert(C2NumberPortTuning::output::typeIndex ==
+ (kParamIndexNumber | I::kTypeTuning | I::kDirOutput), "bad index");
+ static_assert(sizeof(C2NumberPortTuning::input) == 12, "bad size");
+ static_assert(sizeof(C2NumberPortTuning::output) == 12, "bad size");
+ static_assert(offsetof(C2NumberPortTuning::input, _mSize) == 0, "bad size");
+ static_assert(offsetof(C2NumberPortTuning::input, _mIndex) == 4, "bad offset");
+ static_assert(offsetof(C2NumberPortTuning::input, mNumber) == 8, "bad offset");
+ static_assert(offsetof(C2NumberPortTuning::output, _mSize) == 0, "bad size");
+ static_assert(offsetof(C2NumberPortTuning::output, _mIndex) == 4, "bad offset");
+ static_assert(offsetof(C2NumberPortTuning::output, mNumber) == 8, "bad offset");
+
+ // C2NumberStreamTuning: kIndex | tun | str (bool, uint, args)
+ static_assert(sizeof(C2NumberStreamTuning) == 12u, "bad size");
+ // C2NumberStreamTuning::input kIndex | tun | str | input (int, args)
+ // C2NumberStreamTuning::output kIx | tun | str | output (int, args)
+ static_assert(C2NumberStreamTuning::input::baseIndex ==
+ kParamIndexNumber, "bad index");
+ static_assert(C2NumberStreamTuning::input::typeIndex ==
+ (kParamIndexNumber | I::kTypeTuning | I::kDirInput | I::kStreamFlag), "bad index");
+ static_assert(C2NumberStreamTuning::output::baseIndex ==
+ kParamIndexNumber, "bad index");
+ static_assert(C2NumberStreamTuning::output::typeIndex ==
+ (kParamIndexNumber | I::kTypeTuning | I::kDirOutput | I::kStreamFlag), "bad index");
+ static_assert(sizeof(C2NumberStreamTuning::input) == 12u, "bad size");
+ static_assert(sizeof(C2NumberStreamTuning::output) == 12u, "bad size");
+ static_assert(offsetof(C2NumberStreamTuning::input, _mSize) == 0, "bad size");
+ static_assert(offsetof(C2NumberStreamTuning::input, _mIndex) == 4, "bad offset");
+ static_assert(offsetof(C2NumberStreamTuning::input, mNumber) == 8, "bad offset");
+ static_assert(offsetof(C2NumberStreamTuning::output, _mSize) == 0, "bad size");
+ static_assert(offsetof(C2NumberStreamTuning::output, _mIndex) == 4, "bad offset");
+ static_assert(offsetof(C2NumberStreamTuning::output, mNumber) == 8, "bad offset");
+}
+
+void _C2ParamInspector::StaticFlexTest() {
+ typedef C2Param::Index I;
+
+ // C2NumbersStruct: baseIndex = kIndex (args)
+ static_assert(C2NumbersStruct::baseIndex == (I::kFlexibleFlag | kParamIndexNumbers), "bad index");
+ static_assert(sizeof(C2NumbersStruct) == 0, "bad size");
+
+ // C2NumbersTuning: kIndex | tun | global (args)
+ static_assert(C2NumbersTuning::baseIndex == (I::kFlexibleFlag | kParamIndexNumbers), "bad index");
+ static_assert(C2NumbersTuning::typeIndex == (I::kFlexibleFlag | kParamIndexNumbers | I::kTypeTuning | I::kDirGlobal), "bad index");
+ static_assert(sizeof(C2NumbersTuning) == 8, "bad size");
+
+ static_assert(offsetof(C2NumbersTuning, _mSize) == 0, "bad size");
+ static_assert(offsetof(C2NumbersTuning, _mIndex) == 4, "bad offset");
+ static_assert(offsetof(C2NumbersTuning, m.mNumbers) == 8, "bad offset");
+
+ // C2NumbersPortTuning: kIndex | tun | port (bool, args)
+ static_assert(sizeof(C2NumbersPortTuning) == 8, "bad size");
+ // C2NumbersPortTuning::input: kIndex | tun | port | input (args)
+ // C2NumbersPortTuning::output: kIndex | tun | port | output (args)
+ static_assert(C2NumbersPortTuning::input::baseIndex ==
+ (I::kFlexibleFlag | kParamIndexNumbers), "bad index");
+ static_assert(C2NumbersPortTuning::input::typeIndex ==
+ (I::kFlexibleFlag | kParamIndexNumbers | I::kTypeTuning | I::kDirInput), "bad index");
+ static_assert(C2NumbersPortTuning::output::baseIndex ==
+ (I::kFlexibleFlag | kParamIndexNumbers), "bad index");
+ static_assert(C2NumbersPortTuning::output::typeIndex ==
+ (I::kFlexibleFlag | kParamIndexNumbers | I::kTypeTuning | I::kDirOutput), "bad index");
+ static_assert(sizeof(C2NumbersPortTuning::input) == 8, "bad size");
+ static_assert(sizeof(C2NumbersPortTuning::output) == 8, "bad size");
+ static_assert(offsetof(C2NumbersPortTuning::input, _mSize) == 0, "bad size");
+ static_assert(offsetof(C2NumbersPortTuning::input, _mIndex) == 4, "bad offset");
+ static_assert(offsetof(C2NumbersPortTuning::input, m.mNumbers) == 8, "bad offset");
+ static_assert(offsetof(C2NumbersPortTuning::output, _mSize) == 0, "bad size");
+ static_assert(offsetof(C2NumbersPortTuning::output, _mIndex) == 4, "bad offset");
+ static_assert(offsetof(C2NumbersPortTuning::output, m.mNumbers) == 8, "bad offset");
+
+ // C2NumbersStreamTuning: kIndex | tun | str (bool, uint, args)
+ static_assert(sizeof(C2NumbersStreamTuning) == 8, "bad size");
+ // C2NumbersStreamTuning::input kIndex | tun | str | input (int, args)
+ // C2NumbersStreamTuning::output kIx | tun | str | output (int, args)
+ static_assert(C2NumbersStreamTuning::input::baseIndex ==
+ (I::kFlexibleFlag | kParamIndexNumbers), "bad index");
+ static_assert(C2NumbersStreamTuning::input::typeIndex ==
+ (I::kFlexibleFlag | kParamIndexNumbers | I::kTypeTuning | I::kDirInput | I::kStreamFlag), "bad index");
+ static_assert(C2NumbersStreamTuning::output::baseIndex ==
+ (I::kFlexibleFlag | kParamIndexNumbers), "bad index");
+ static_assert(C2NumbersStreamTuning::output::typeIndex ==
+ (I::kFlexibleFlag | kParamIndexNumbers | I::kTypeTuning | I::kDirOutput | I::kStreamFlag), "bad index");
+ static_assert(sizeof(C2NumbersStreamTuning::input) == 8, "bad size");
+ static_assert(sizeof(C2NumbersStreamTuning::output) == 8, "bad size");
+ static_assert(offsetof(C2NumbersStreamTuning::input, _mSize) == 0, "bad size");
+ static_assert(offsetof(C2NumbersStreamTuning::input, _mIndex) == 4, "bad offset");
+ static_assert(offsetof(C2NumbersStreamTuning::input, m.mNumbers) == 8, "bad offset");
+ static_assert(offsetof(C2NumbersStreamTuning::output, _mSize) == 0, "bad size");
+ static_assert(offsetof(C2NumbersStreamTuning::output, _mIndex) == 4, "bad offset");
+ static_assert(offsetof(C2NumbersStreamTuning::output, m.mNumbers) == 8, "bad offset");
+}
+
+TEST_F(C2ParamTest, ParamOpsTest) {
+ const C2NumberStruct str(100);
+ C2NumberStruct bstr;
+
+ {
+ EXPECT_EQ(100, str.mNumber);
+ bstr.mNumber = 100;
+
+ C2Param::BaseIndex index = C2NumberStruct::baseIndex;
+ EXPECT_FALSE(index.isVendor());
+ EXPECT_FALSE(index.isFlexible());
+ EXPECT_EQ(index.baseIndex(), kParamIndexNumber);
+ EXPECT_EQ(index.paramIndex(), kParamIndexNumber);
+ }
+
+ const C2NumberTuning tun(100);
+ C2NumberTuning btun;
+
+ {
+ // flags & invariables
+ for (const auto &p : { tun, btun }) {
+ EXPECT_TRUE((bool)p);
+ EXPECT_FALSE(!p);
+ EXPECT_EQ(12u, p.size());
+
+ EXPECT_FALSE(p.isVendor());
+ EXPECT_FALSE(p.isFlexible());
+ EXPECT_TRUE(p.isGlobal());
+ EXPECT_FALSE(p.forInput());
+ EXPECT_FALSE(p.forOutput());
+ EXPECT_FALSE(p.forStream());
+ EXPECT_FALSE(p.forPort());
+ }
+
+ // value
+ EXPECT_EQ(100, tun.mNumber);
+ EXPECT_EQ(0, btun.mNumber);
+ EXPECT_FALSE(tun == btun);
+ EXPECT_FALSE(tun.operator==(btun));
+ EXPECT_TRUE(tun != btun);
+ EXPECT_TRUE(tun.operator!=(btun));
+ btun.mNumber = 100;
+ EXPECT_EQ(tun, btun);
+
+ // index
+ EXPECT_EQ(C2Param::Type(tun.type()).baseIndex(), C2NumberStruct::baseIndex);
+ EXPECT_EQ(C2Param::Type(tun.type()).paramIndex(), kParamIndexNumber);
+ EXPECT_EQ(tun.type(), C2NumberTuning::typeIndex);
+ EXPECT_EQ(tun.stream(), ~0u);
+
+ C2Param::BaseIndex index = C2NumberTuning::baseIndex;
+ EXPECT_FALSE(index.isVendor());
+ EXPECT_FALSE(index.isFlexible());
+ EXPECT_EQ(index.baseIndex(), kParamIndexNumber);
+ EXPECT_EQ(index.paramIndex(), kParamIndexNumber);
+
+ C2Param::Type type = C2NumberTuning::typeIndex;
+ EXPECT_FALSE(type.isVendor());
+ EXPECT_FALSE(type.isFlexible());
+ EXPECT_TRUE(type.isGlobal());
+ EXPECT_FALSE(type.forInput());
+ EXPECT_FALSE(type.forOutput());
+ EXPECT_FALSE(type.forStream());
+ EXPECT_FALSE(type.forPort());
+
+ EXPECT_EQ(C2NumberTuning::From(nullptr), nullptr);
+ EXPECT_EQ(C2NumberTuning::From(&tun), &tun);
+ EXPECT_EQ(C2NumberPortTuning::From(&tun), nullptr);
+ EXPECT_EQ(C2NumberPortTuning::input::From(&tun), nullptr);
+ EXPECT_EQ(C2NumberPortTuning::output::From(&tun), nullptr);
+ EXPECT_EQ(C2NumberStreamTuning::From(&tun), nullptr);
+ EXPECT_EQ(C2NumberStreamTuning::input::From(&tun), nullptr);
+ EXPECT_EQ(C2NumberStreamTuning::output::From(&tun), nullptr);
+ }
+
+ const C2NumberPortTuning outp1(true, 100), inp1(false, 100);
+ C2NumberPortTuning boutp1, binp1, binp3(false, 100);
+ const C2NumberPortTuning::input inp2(100);
+ C2NumberPortTuning::input binp2;
+ const C2NumberPortTuning::output outp2(100);
+ C2NumberPortTuning::output boutp2;
+
+ {
+ static_assert(canCallSetPort(binp3), "should be able to");
+ static_assert(canCallSetPort(binp1), "should be able to");
+ static_assert(!canCallSetPort(inp1), "should not be able to (const)");
+ static_assert(!canCallSetPort(inp2), "should not be able to (const & type)");
+ static_assert(!canCallSetPort(binp2), "should not be able to (type)");
+
+ // flags & invariables
+ for (const auto &p : { outp1, inp1, boutp1 }) {
+ EXPECT_EQ(12u, p.size());
+ EXPECT_FALSE(p.isVendor());
+ EXPECT_FALSE(p.isFlexible());
+ EXPECT_FALSE(p.isGlobal());
+ EXPECT_FALSE(p.forStream());
+ EXPECT_TRUE(p.forPort());
+ }
+ for (const auto &p : { inp2, binp2 }) {
+ EXPECT_EQ(12u, p.size());
+ EXPECT_FALSE(p.isVendor());
+ EXPECT_FALSE(p.isFlexible());
+ EXPECT_FALSE(p.isGlobal());
+ EXPECT_FALSE(p.forStream());
+ EXPECT_TRUE(p.forPort());
+ }
+ for (const auto &p : { outp2, boutp2 }) {
+ EXPECT_EQ(12u, p.size());
+ EXPECT_FALSE(p.isVendor());
+ EXPECT_FALSE(p.isFlexible());
+ EXPECT_FALSE(p.isGlobal());
+ EXPECT_FALSE(p.forStream());
+ EXPECT_TRUE(p.forPort());
+ }
+
+ // port specific flags & invariables
+ EXPECT_FALSE(outp1.forInput());
+ EXPECT_TRUE(outp1.forOutput());
+
+ EXPECT_TRUE(inp1.forInput());
+ EXPECT_FALSE(inp1.forOutput());
+
+ for (const auto &p : { outp1, inp1 }) {
+ EXPECT_TRUE((bool)p);
+ EXPECT_FALSE(!p);
+ EXPECT_EQ(100, p.mNumber);
+ }
+ for (const auto &p : { outp2, boutp2 }) {
+ EXPECT_TRUE((bool)p);
+ EXPECT_FALSE(!p);
+
+ EXPECT_FALSE(p.forInput());
+ EXPECT_TRUE(p.forOutput());
+ }
+ for (const auto &p : { inp2, binp2 }) {
+ EXPECT_TRUE((bool)p);
+ EXPECT_FALSE(!p);
+
+ EXPECT_TRUE(p.forInput());
+ EXPECT_FALSE(p.forOutput());
+ }
+ for (const auto &p : { boutp1 } ) {
+ EXPECT_FALSE((bool)p);
+ EXPECT_TRUE(!p);
+
+ EXPECT_FALSE(p.forInput());
+ EXPECT_FALSE(p.forOutput());
+ EXPECT_EQ(0, p.mNumber);
+ }
+
+ // values
+ EXPECT_EQ(100, inp2.mNumber);
+ EXPECT_EQ(100, outp2.mNumber);
+ EXPECT_EQ(0, binp1.mNumber);
+ EXPECT_EQ(0, binp2.mNumber);
+ EXPECT_EQ(0, boutp1.mNumber);
+ EXPECT_EQ(0, boutp2.mNumber);
+
+ EXPECT_TRUE(inp1 != outp1);
+ EXPECT_TRUE(inp1 == inp2);
+ EXPECT_TRUE(outp1 == outp2);
+ EXPECT_TRUE(binp1 == boutp1);
+ EXPECT_TRUE(binp2 != boutp2);
+
+ EXPECT_TRUE(inp1 != binp1);
+ binp1.mNumber = 100;
+ EXPECT_TRUE(inp1 != binp1);
+ binp1.setPort(false /* output */);
+ EXPECT_TRUE((bool)binp1);
+ EXPECT_FALSE(!binp1);
+ EXPECT_TRUE(inp1 == binp1);
+
+ EXPECT_TRUE(inp2 != binp2);
+ binp2.mNumber = 100;
+ EXPECT_TRUE(inp2 == binp2);
+
+ binp1.setPort(true /* output */);
+ EXPECT_TRUE(outp1 == binp1);
+
+ EXPECT_TRUE(outp1 != boutp1);
+ boutp1.mNumber = 100;
+ EXPECT_TRUE(outp1 != boutp1);
+ boutp1.setPort(true /* output */);
+ EXPECT_TRUE((bool)boutp1);
+ EXPECT_FALSE(!boutp1);
+ EXPECT_TRUE(outp1 == boutp1);
+
+ EXPECT_TRUE(outp2 != boutp2);
+ boutp2.mNumber = 100;
+ EXPECT_TRUE(outp2 == boutp2);
+
+ boutp1.setPort(false /* output */);
+ EXPECT_TRUE(inp1 == boutp1);
+
+ // index
+ EXPECT_EQ(C2Param::Type(inp1.type()).baseIndex(), C2NumberStruct::baseIndex);
+ EXPECT_EQ(C2Param::Type(inp1.type()).paramIndex(), kParamIndexNumber);
+ EXPECT_EQ(inp1.type(), C2NumberPortTuning::input::typeIndex);
+ EXPECT_EQ(inp1.stream(), ~0u);
+
+ EXPECT_EQ(C2Param::Type(inp2.type()).baseIndex(), C2NumberStruct::baseIndex);
+ EXPECT_EQ(C2Param::Type(inp2.type()).paramIndex(), kParamIndexNumber);
+ EXPECT_EQ(inp2.type(), C2NumberPortTuning::input::typeIndex);
+ EXPECT_EQ(inp2.stream(), ~0u);
+
+ EXPECT_EQ(C2Param::Type(outp1.type()).baseIndex(), C2NumberStruct::baseIndex);
+ EXPECT_EQ(C2Param::Type(outp1.type()).paramIndex(), kParamIndexNumber);
+ EXPECT_EQ(outp1.type(), C2NumberPortTuning::output::typeIndex);
+ EXPECT_EQ(outp1.stream(), ~0u);
+
+ EXPECT_EQ(C2Param::Type(outp2.type()).baseIndex(), C2NumberStruct::baseIndex);
+ EXPECT_EQ(C2Param::Type(outp2.type()).paramIndex(), kParamIndexNumber);
+ EXPECT_EQ(outp2.type(), C2NumberPortTuning::output::typeIndex);
+ EXPECT_EQ(outp2.stream(), ~0u);
+
+ C2Param::BaseIndex index = C2NumberPortTuning::input::typeIndex;
+ EXPECT_FALSE(index.isVendor());
+ EXPECT_FALSE(index.isFlexible());
+ EXPECT_EQ(index.baseIndex(), kParamIndexNumber);
+ EXPECT_EQ(index.paramIndex(), kParamIndexNumber);
+
+ index = C2NumberPortTuning::output::typeIndex;
+ EXPECT_FALSE(index.isVendor());
+ EXPECT_FALSE(index.isFlexible());
+ EXPECT_EQ(index.baseIndex(), kParamIndexNumber);
+ EXPECT_EQ(index.paramIndex(), kParamIndexNumber);
+
+ C2Param::Type type = C2NumberPortTuning::input::typeIndex;
+ EXPECT_FALSE(type.isVendor());
+ EXPECT_FALSE(type.isFlexible());
+ EXPECT_FALSE(type.isGlobal());
+ EXPECT_TRUE(type.forInput());
+ EXPECT_FALSE(type.forOutput());
+ EXPECT_FALSE(type.forStream());
+ EXPECT_TRUE(type.forPort());
+
+ type = C2NumberPortTuning::output::typeIndex;
+ EXPECT_FALSE(type.isVendor());
+ EXPECT_FALSE(type.isFlexible());
+ EXPECT_FALSE(type.isGlobal());
+ EXPECT_FALSE(type.forInput());
+ EXPECT_TRUE(type.forOutput());
+ EXPECT_FALSE(type.forStream());
+ EXPECT_TRUE(type.forPort());
+
+ EXPECT_EQ(C2NumberPortTuning::From(nullptr), nullptr);
+ EXPECT_EQ(C2NumberPortTuning::input::From(nullptr), nullptr);
+ EXPECT_EQ(C2NumberPortTuning::output::From(nullptr), nullptr);
+ EXPECT_EQ(C2NumberTuning::From(&inp1), nullptr);
+ EXPECT_EQ(C2NumberTuning::From(&inp2), nullptr);
+ EXPECT_EQ(C2NumberTuning::From(&outp1), nullptr);
+ EXPECT_EQ(C2NumberTuning::From(&outp2), nullptr);
+ EXPECT_EQ(C2NumberPortTuning::From(&inp1), &inp1);
+ EXPECT_EQ(C2NumberPortTuning::From(&inp2), (C2NumberPortTuning*)&inp2);
+ EXPECT_EQ(C2NumberPortTuning::From(&outp1), &outp1);
+ EXPECT_EQ(C2NumberPortTuning::From(&outp2), (C2NumberPortTuning*)&outp2);
+ EXPECT_EQ(C2NumberPortTuning::input::From(&inp1), (C2NumberPortTuning::input*)&inp1);
+ EXPECT_EQ(C2NumberPortTuning::input::From(&inp2), &inp2);
+ EXPECT_EQ(C2NumberPortTuning::input::From(&outp1), nullptr);
+ EXPECT_EQ(C2NumberPortTuning::input::From(&outp2), nullptr);
+ EXPECT_EQ(C2NumberPortTuning::output::From(&inp1), nullptr);
+ EXPECT_EQ(C2NumberPortTuning::output::From(&inp2), nullptr);
+ EXPECT_EQ(C2NumberPortTuning::output::From(&outp1), (C2NumberPortTuning::output*)&outp1);
+ EXPECT_EQ(C2NumberPortTuning::output::From(&outp2), &outp2);
+ EXPECT_EQ(C2NumberStreamTuning::From(&inp1), nullptr);
+ EXPECT_EQ(C2NumberStreamTuning::From(&inp2), nullptr);
+ EXPECT_EQ(C2NumberStreamTuning::From(&outp1), nullptr);
+ EXPECT_EQ(C2NumberStreamTuning::From(&outp2), nullptr);
+ EXPECT_EQ(C2NumberStreamTuning::input::From(&inp1), nullptr);
+ EXPECT_EQ(C2NumberStreamTuning::input::From(&inp2), nullptr);
+ EXPECT_EQ(C2NumberStreamTuning::input::From(&outp1), nullptr);
+ EXPECT_EQ(C2NumberStreamTuning::input::From(&outp2), nullptr);
+ EXPECT_EQ(C2NumberStreamTuning::output::From(&inp1), nullptr);
+ EXPECT_EQ(C2NumberStreamTuning::output::From(&inp2), nullptr);
+ EXPECT_EQ(C2NumberStreamTuning::output::From(&outp1), nullptr);
+ EXPECT_EQ(C2NumberStreamTuning::output::From(&outp2), nullptr);
+ }
+
+ const C2NumberStreamTuning outs1(true, 1u, 100), ins1(false, 1u, 100);
+ C2NumberStreamTuning bouts1, bins1, bins3(false, 1u, 100);
+ const C2NumberStreamTuning::input ins2(1u, 100);
+ C2NumberStreamTuning::input bins2;
+ const C2NumberStreamTuning::output outs2(1u, 100);
+ C2NumberStreamTuning::output bouts2;
+
+ {
+ static_assert(canCallSetPort(bins3), "should be able to");
+ static_assert(canCallSetPort(bins1), "should be able to");
+ static_assert(!canCallSetPort(ins1), "should not be able to (const)");
+ static_assert(!canCallSetPort(ins2), "should not be able to (const & type)");
+ static_assert(!canCallSetPort(bins2), "should not be able to (type)");
+
+ // flags & invariables
+ for (const auto &p : { outs1, ins1, bouts1 }) {
+ EXPECT_EQ(12u, p.size());
+ EXPECT_FALSE(p.isVendor());
+ EXPECT_FALSE(p.isFlexible());
+ EXPECT_FALSE(p.isGlobal());
+ EXPECT_TRUE(p.forStream());
+ EXPECT_FALSE(p.forPort());
+ }
+ for (const auto &p : { ins2, bins2 }) {
+ EXPECT_EQ(12u, p.size());
+ EXPECT_FALSE(p.isVendor());
+ EXPECT_FALSE(p.isFlexible());
+ EXPECT_FALSE(p.isGlobal());
+ EXPECT_TRUE(p.forStream());
+ EXPECT_FALSE(p.forPort());
+ }
+ for (const auto &p : { outs2, bouts2 }) {
+ EXPECT_EQ(12u, p.size());
+ EXPECT_FALSE(p.isVendor());
+ EXPECT_FALSE(p.isFlexible());
+ EXPECT_FALSE(p.isGlobal());
+ EXPECT_TRUE(p.forStream());
+ EXPECT_FALSE(p.forPort());
+ }
+
+ // port specific flags & invariables
+ EXPECT_FALSE(outs1.forInput());
+ EXPECT_TRUE(outs1.forOutput());
+
+ EXPECT_TRUE(ins1.forInput());
+ EXPECT_FALSE(ins1.forOutput());
+
+ for (const auto &p : { outs1, ins1 }) {
+ EXPECT_TRUE((bool)p);
+ EXPECT_FALSE(!p);
+ EXPECT_EQ(100, p.mNumber);
+ EXPECT_EQ(1u, p.stream());
+ }
+ for (const auto &p : { outs2, bouts2 }) {
+ EXPECT_TRUE((bool)p);
+ EXPECT_FALSE(!p);
+
+ EXPECT_FALSE(p.forInput());
+ EXPECT_TRUE(p.forOutput());
+ }
+ for (const auto &p : { ins2, bins2 }) {
+ EXPECT_TRUE((bool)p);
+ EXPECT_FALSE(!p);
+
+ EXPECT_TRUE(p.forInput());
+ EXPECT_FALSE(p.forOutput());
+ }
+ for (const auto &p : { bouts1 } ) {
+ EXPECT_FALSE((bool)p);
+ EXPECT_TRUE(!p);
+
+ EXPECT_FALSE(p.forInput());
+ EXPECT_FALSE(p.forOutput());
+ EXPECT_EQ(0, p.mNumber);
+ }
+
+ // values
+ EXPECT_EQ(100, ins2.mNumber);
+ EXPECT_EQ(100, outs2.mNumber);
+ EXPECT_EQ(0, bins1.mNumber);
+ EXPECT_EQ(0, bins2.mNumber);
+ EXPECT_EQ(0, bouts1.mNumber);
+ EXPECT_EQ(0, bouts2.mNumber);
+
+ EXPECT_EQ(1u, ins2.stream());
+ EXPECT_EQ(1u, outs2.stream());
+ EXPECT_EQ(0u, bins1.stream());
+ EXPECT_EQ(0u, bins2.stream());
+ EXPECT_EQ(0u, bouts1.stream());
+ EXPECT_EQ(0u, bouts2.stream());
+
+ EXPECT_TRUE(ins1 != outs1);
+ EXPECT_TRUE(ins1 == ins2);
+ EXPECT_TRUE(outs1 == outs2);
+ EXPECT_TRUE(bins1 == bouts1);
+ EXPECT_TRUE(bins2 != bouts2);
+
+ EXPECT_TRUE(ins1 != bins1);
+ bins1.mNumber = 100;
+ EXPECT_TRUE(ins1 != bins1);
+ bins1.setPort(false /* output */);
+ EXPECT_TRUE(ins1 != bins1);
+ bins1.setStream(1u);
+ EXPECT_TRUE(ins1 == bins1);
+
+ EXPECT_TRUE(ins2 != bins2);
+ bins2.mNumber = 100;
+ EXPECT_TRUE(ins2 != bins2);
+ bins2.setStream(1u);
+ EXPECT_TRUE(ins2 == bins2);
+
+ bins1.setPort(true /* output */);
+ EXPECT_TRUE(outs1 == bins1);
+
+ EXPECT_TRUE(outs1 != bouts1);
+ bouts1.mNumber = 100;
+ EXPECT_TRUE(outs1 != bouts1);
+ bouts1.setPort(true /* output */);
+ EXPECT_TRUE(outs1 != bouts1);
+ bouts1.setStream(1u);
+ EXPECT_TRUE(outs1 == bouts1);
+
+ EXPECT_TRUE(outs2 != bouts2);
+ bouts2.mNumber = 100;
+ EXPECT_TRUE(outs2 != bouts2);
+ bouts2.setStream(1u);
+ EXPECT_TRUE(outs2 == bouts2);
+
+ bouts1.setPort(false /* output */);
+ EXPECT_TRUE(ins1 == bouts1);
+
+ // index
+ EXPECT_EQ(C2Param::Type(ins1.type()).baseIndex(), C2NumberStruct::baseIndex);
+ EXPECT_EQ(C2Param::Type(ins1.type()).paramIndex(), kParamIndexNumber);
+ EXPECT_EQ(ins1.type(), C2NumberStreamTuning::input::typeIndex);
+
+ EXPECT_EQ(C2Param::Type(ins2.type()).baseIndex(), C2NumberStruct::baseIndex);
+ EXPECT_EQ(C2Param::Type(ins2.type()).paramIndex(), kParamIndexNumber);
+ EXPECT_EQ(ins2.type(), C2NumberStreamTuning::input::typeIndex);
+
+ EXPECT_EQ(C2Param::Type(outs1.type()).baseIndex(), C2NumberStruct::baseIndex);
+ EXPECT_EQ(C2Param::Type(outs1.type()).paramIndex(), kParamIndexNumber);
+ EXPECT_EQ(outs1.type(), C2NumberStreamTuning::output::typeIndex);
+
+ EXPECT_EQ(C2Param::Type(outs2.type()).baseIndex(), C2NumberStruct::baseIndex);
+ EXPECT_EQ(C2Param::Type(outs2.type()).paramIndex(), kParamIndexNumber);
+ EXPECT_EQ(outs2.type(), C2NumberStreamTuning::output::typeIndex);
+
+ C2Param::BaseIndex index = C2NumberStreamTuning::input::typeIndex;
+ EXPECT_FALSE(index.isVendor());
+ EXPECT_FALSE(index.isFlexible());
+ EXPECT_EQ(index.baseIndex(), kParamIndexNumber);
+ EXPECT_EQ(index.paramIndex(), kParamIndexNumber);
+
+ index = C2NumberStreamTuning::output::typeIndex;
+ EXPECT_FALSE(index.isVendor());
+ EXPECT_FALSE(index.isFlexible());
+ EXPECT_EQ(index.baseIndex(), kParamIndexNumber);
+ EXPECT_EQ(index.paramIndex(), kParamIndexNumber);
+
+ C2Param::Type type = C2NumberStreamTuning::input::typeIndex;
+ EXPECT_FALSE(type.isVendor());
+ EXPECT_FALSE(type.isFlexible());
+ EXPECT_FALSE(type.isGlobal());
+ EXPECT_TRUE(type.forInput());
+ EXPECT_FALSE(type.forOutput());
+ EXPECT_TRUE(type.forStream());
+ EXPECT_FALSE(type.forPort());
+
+ type = C2NumberStreamTuning::output::typeIndex;
+ EXPECT_FALSE(type.isVendor());
+ EXPECT_FALSE(type.isFlexible());
+ EXPECT_FALSE(type.isGlobal());
+ EXPECT_FALSE(type.forInput());
+ EXPECT_TRUE(type.forOutput());
+ EXPECT_TRUE(type.forStream());
+ EXPECT_FALSE(type.forPort());
+
+ EXPECT_EQ(C2NumberPortTuning::From(nullptr), nullptr);
+ EXPECT_EQ(C2NumberPortTuning::input::From(nullptr), nullptr);
+ EXPECT_EQ(C2NumberPortTuning::output::From(nullptr), nullptr);
+ EXPECT_EQ(C2NumberTuning::From(&ins1), nullptr);
+ EXPECT_EQ(C2NumberTuning::From(&ins2), nullptr);
+ EXPECT_EQ(C2NumberTuning::From(&outs1), nullptr);
+ EXPECT_EQ(C2NumberTuning::From(&outs2), nullptr);
+ EXPECT_EQ(C2NumberPortTuning::From(&ins1), nullptr);
+ EXPECT_EQ(C2NumberPortTuning::From(&ins2), nullptr);
+ EXPECT_EQ(C2NumberPortTuning::From(&outs1), nullptr);
+ EXPECT_EQ(C2NumberPortTuning::From(&outs2), nullptr);
+ EXPECT_EQ(C2NumberPortTuning::input::From(&ins1), nullptr);
+ EXPECT_EQ(C2NumberPortTuning::input::From(&ins2), nullptr);
+ EXPECT_EQ(C2NumberPortTuning::input::From(&outs1), nullptr);
+ EXPECT_EQ(C2NumberPortTuning::input::From(&outs2), nullptr);
+ EXPECT_EQ(C2NumberPortTuning::output::From(&ins1), nullptr);
+ EXPECT_EQ(C2NumberPortTuning::output::From(&ins2), nullptr);
+ EXPECT_EQ(C2NumberPortTuning::output::From(&outs1), nullptr);
+ EXPECT_EQ(C2NumberPortTuning::output::From(&outs2), nullptr);
+ EXPECT_EQ(C2NumberStreamTuning::From(&ins1), &ins1);
+ EXPECT_EQ(C2NumberStreamTuning::From(&ins2), (C2NumberStreamTuning*)&ins2);
+ EXPECT_EQ(C2NumberStreamTuning::From(&outs1), &outs1);
+ EXPECT_EQ(C2NumberStreamTuning::From(&outs2), (C2NumberStreamTuning*)&outs2);
+ EXPECT_EQ(C2NumberStreamTuning::input::From(&ins1), (C2NumberStreamTuning::input*)&ins1);
+ EXPECT_EQ(C2NumberStreamTuning::input::From(&ins2), &ins2);
+ EXPECT_EQ(C2NumberStreamTuning::input::From(&outs1), nullptr);
+ EXPECT_EQ(C2NumberStreamTuning::input::From(&outs2), nullptr);
+ EXPECT_EQ(C2NumberStreamTuning::output::From(&ins1), nullptr);
+ EXPECT_EQ(C2NumberStreamTuning::output::From(&ins2), nullptr);
+ EXPECT_EQ(C2NumberStreamTuning::output::From(&outs1), (C2NumberStreamTuning::output*)&outs1);
+ EXPECT_EQ(C2NumberStreamTuning::output::From(&outs2), &outs2);
+
+ }
+
+ {
+ uint32_t videoWidth[] = { 12u, C2NumberStreamTuning::output::typeIndex, 100 };
+ C2Param *p1 = C2Param::From(videoWidth, sizeof(videoWidth));
+ EXPECT_NE(p1, nullptr);
+ EXPECT_EQ(12u, p1->size());
+ EXPECT_EQ(p1->type(), C2NumberStreamTuning::output::typeIndex);
+
+ p1 = C2Param::From(videoWidth, sizeof(videoWidth) + 2);
+ EXPECT_EQ(p1, nullptr);
+
+ p1 = C2Param::From(videoWidth, sizeof(videoWidth) - 2);
+ EXPECT_EQ(p1, nullptr);
+
+ p1 = C2Param::From(videoWidth, 3);
+ EXPECT_EQ(p1, nullptr);
+
+ p1 = C2Param::From(videoWidth, 0);
+ EXPECT_EQ(p1, nullptr);
+ }
+}
+
+void StaticTestAddBaseIndex() {
+ struct nobase {};
+ struct base { enum : uint32_t { baseIndex = 1 }; };
+ static_assert(C2AddBaseIndex<nobase, 2>::baseIndex == 2, "should be 2");
+ static_assert(C2AddBaseIndex<base, 1>::baseIndex == 1, "should be 1");
+}
+
+class TestFlexHelper {
+ struct _Flex {
+ int32_t a;
+ char b[];
+ _Flex() {}
+ FLEX(_Flex, b);
+ };
+
+ struct _BoFlex {
+ _Flex a;
+ _BoFlex() {}
+ FLEX(_BoFlex, a);
+ };
+
+ struct _NonFlex {
+ };
+
+
+ static void StaticTest() {
+ static_assert(std::is_same<_C2FlexHelper<char>::flexType, void>::value, "should be void");
+ static_assert(std::is_same<_C2FlexHelper<char[]>::flexType, char>::value, "should be char");
+ static_assert(std::is_same<_C2FlexHelper<_Flex>::flexType, char>::value, "should be char");
+
+ static_assert(std::is_same<_C2FlexHelper<_BoFlex>::flexType, char>::value, "should be void");
+
+ static_assert(_C2Flexible<_Flex>::value, "should be flexible");
+ static_assert(!_C2Flexible<_NonFlex>::value, "should not be flexible");
+ }
+};
+
+TEST_F(C2ParamTest, FlexParamOpsTest) {
+// const C2NumbersStruct str{100};
+ C2NumbersStruct bstr;
+ {
+// EXPECT_EQ(100, str->m.mNumbers[0]);
+ (void)&bstr.mNumbers[0];
+
+ C2Param::BaseIndex index = C2NumbersStruct::baseIndex;
+ EXPECT_FALSE(index.isVendor());
+ EXPECT_TRUE(index.isFlexible());
+ EXPECT_EQ(index.baseIndex(), kParamIndexNumbers | C2Param::BaseIndex::_kFlexibleFlag);
+ EXPECT_EQ(index.paramIndex(), kParamIndexNumbers);
+ }
+
+ std::unique_ptr<C2NumbersTuning> tun_ = C2NumbersTuning::alloc_unique(1);
+ tun_->m.mNumbers[0] = 100;
+ std::unique_ptr<const C2NumbersTuning> tun = std::move(tun_);
+ std::shared_ptr<C2NumbersTuning> btun = C2NumbersTuning::alloc_shared(1);
+
+ {
+ // flags & invariables
+ const C2NumbersTuning *T[] = { tun.get(), btun.get() };
+ for (const auto p : T) {
+ EXPECT_TRUE((bool)(*p));
+ EXPECT_FALSE(!(*p));
+ EXPECT_EQ(12u, p->size());
+
+ EXPECT_FALSE(p->isVendor());
+ EXPECT_TRUE(p->isFlexible());
+ EXPECT_TRUE(p->isGlobal());
+ EXPECT_FALSE(p->forInput());
+ EXPECT_FALSE(p->forOutput());
+ EXPECT_FALSE(p->forStream());
+ EXPECT_FALSE(p->forPort());
+ }
+
+ // value
+ EXPECT_EQ(100, tun->m.mNumbers[0]);
+ EXPECT_EQ(0, btun->m.mNumbers[0]);
+ EXPECT_FALSE(*tun == *btun);
+ EXPECT_FALSE(tun->operator==(*btun));
+ EXPECT_TRUE(*tun != *btun);
+ EXPECT_TRUE(tun->operator!=(*btun));
+ btun->m.mNumbers[0] = 100;
+ EXPECT_EQ(*tun, *btun);
+
+ // index
+ EXPECT_EQ(C2Param::Type(tun->type()).baseIndex(), C2NumbersStruct::baseIndex);
+ EXPECT_EQ(C2Param::Type(tun->type()).paramIndex(), kParamIndexNumbers);
+ EXPECT_EQ(tun->type(), C2NumbersTuning::typeIndex);
+ EXPECT_EQ(tun->stream(), ~0u);
+
+ C2Param::BaseIndex index = C2NumbersTuning::baseIndex;
+ EXPECT_FALSE(index.isVendor());
+ EXPECT_TRUE(index.isFlexible());
+ EXPECT_EQ(index.baseIndex(), kParamIndexNumbers | C2Param::BaseIndex::_kFlexibleFlag);
+ EXPECT_EQ(index.paramIndex(), kParamIndexNumbers);
+
+ C2Param::Type type = C2NumbersTuning::typeIndex;
+ EXPECT_FALSE(type.isVendor());
+ EXPECT_TRUE(type.isFlexible());
+ EXPECT_TRUE(type.isGlobal());
+ EXPECT_FALSE(type.forInput());
+ EXPECT_FALSE(type.forOutput());
+ EXPECT_FALSE(type.forStream());
+ EXPECT_FALSE(type.forPort());
+
+ EXPECT_EQ(C2NumbersTuning::From(nullptr), nullptr);
+ EXPECT_EQ(C2NumbersTuning::From(tun.get()), tun.get());
+ EXPECT_EQ(C2NumbersPortTuning::From(tun.get()), nullptr);
+ EXPECT_EQ(C2NumbersPortTuning::input::From(tun.get()), nullptr);
+ EXPECT_EQ(C2NumbersPortTuning::output::From(tun.get()), nullptr);
+ EXPECT_EQ(C2NumbersStreamTuning::From(tun.get()), nullptr);
+ EXPECT_EQ(C2NumbersStreamTuning::input::From(tun.get()), nullptr);
+ EXPECT_EQ(C2NumbersStreamTuning::output::From(tun.get()), nullptr);
+ }
+
+ std::unique_ptr<C2NumbersPortTuning> outp1_(C2NumbersPortTuning::alloc_unique(1, true)),
+ inp1_ = C2NumbersPortTuning::alloc_unique(1, false);
+ outp1_->m.mNumbers[0] = 100;
+ inp1_->m.mNumbers[0] = 100;
+ std::unique_ptr<const C2NumbersPortTuning> outp1 = std::move(outp1_);
+ std::unique_ptr<const C2NumbersPortTuning> inp1 = std::move(inp1_);
+ std::shared_ptr<C2NumbersPortTuning> boutp1(C2NumbersPortTuning::alloc_shared(1)),
+ binp1 = C2NumbersPortTuning::alloc_shared(1),
+ binp3 = C2NumbersPortTuning::alloc_shared(1, false);
+ binp3->m.mNumbers[0] = 100;
+ std::unique_ptr<C2NumbersPortTuning::input> inp2_(C2NumbersPortTuning::input::alloc_unique(1));
+ inp2_->m.mNumbers[0] = 100;
+ std::unique_ptr<const C2NumbersPortTuning::input> inp2 = std::move(inp2_);
+ std::shared_ptr<C2NumbersPortTuning::input> binp2(C2NumbersPortTuning::input::alloc_shared(1));
+ std::unique_ptr<C2NumbersPortTuning::output> outp2_(C2NumbersPortTuning::output::alloc_unique(1));
+ outp2_->m.mNumbers[0] = 100;
+ std::unique_ptr<const C2NumbersPortTuning::output> outp2 = std::move(outp2_);
+ std::shared_ptr<C2NumbersPortTuning::output> boutp2(C2NumbersPortTuning::output::alloc_shared(1));
+
+ {
+ static_assert(canCallSetPort(*binp3), "should be able to");
+ static_assert(canCallSetPort(*binp1), "should be able to");
+ static_assert(!canCallSetPort(*inp1), "should not be able to (const)");
+ static_assert(!canCallSetPort(*inp2), "should not be able to (const & type)");
+ static_assert(!canCallSetPort(*binp2), "should not be able to (type)");
+
+ // flags & invariables
+ const C2NumbersPortTuning *P[] = { outp1.get(), inp1.get(), boutp1.get() };
+ for (const auto p : P) {
+ EXPECT_EQ(12u, p->size());
+ EXPECT_FALSE(p->isVendor());
+ EXPECT_TRUE(p->isFlexible());
+ EXPECT_FALSE(p->isGlobal());
+ EXPECT_FALSE(p->forStream());
+ EXPECT_TRUE(p->forPort());
+ }
+ const C2NumbersPortTuning::input *PI[] = { inp2.get(), binp2.get() };
+ for (const auto p : PI) {
+ EXPECT_EQ(12u, p->size());
+ EXPECT_FALSE(p->isVendor());
+ EXPECT_TRUE(p->isFlexible());
+ EXPECT_FALSE(p->isGlobal());
+ EXPECT_FALSE(p->forStream());
+ EXPECT_TRUE(p->forPort());
+ }
+ const C2NumbersPortTuning::output *PO[] = { outp2.get(), boutp2.get() };
+ for (const auto p : PO) {
+ EXPECT_EQ(12u, p->size());
+ EXPECT_FALSE(p->isVendor());
+ EXPECT_TRUE(p->isFlexible());
+ EXPECT_FALSE(p->isGlobal());
+ EXPECT_FALSE(p->forStream());
+ EXPECT_TRUE(p->forPort());
+ }
+
+ // port specific flags & invariables
+ EXPECT_FALSE(outp1->forInput());
+ EXPECT_TRUE(outp1->forOutput());
+
+ EXPECT_TRUE(inp1->forInput());
+ EXPECT_FALSE(inp1->forOutput());
+
+ const C2NumbersPortTuning *P2[] = { outp1.get(), inp1.get() };
+ for (const auto p : P2) {
+ EXPECT_TRUE((bool)(*p));
+ EXPECT_FALSE(!(*p));
+ EXPECT_EQ(100, p->m.mNumbers[0]);
+ }
+ for (const auto p : PO) {
+ EXPECT_TRUE((bool)(*p));
+ EXPECT_FALSE(!(*p));
+
+ EXPECT_FALSE(p->forInput());
+ EXPECT_TRUE(p->forOutput());
+ }
+ for (const auto p : PI) {
+ EXPECT_TRUE((bool)(*p));
+ EXPECT_FALSE(!(*p));
+
+ EXPECT_TRUE(p->forInput());
+ EXPECT_FALSE(p->forOutput());
+ }
+ const C2NumbersPortTuning *P3[] = { boutp1.get() };
+ for (const auto p : P3) {
+ EXPECT_FALSE((bool)(*p));
+ EXPECT_TRUE(!(*p));
+
+ EXPECT_FALSE(p->forInput());
+ EXPECT_FALSE(p->forOutput());
+ EXPECT_EQ(0, p->m.mNumbers[0]);
+ }
+
+ // values
+ EXPECT_EQ(100, inp2->m.mNumbers[0]);
+ EXPECT_EQ(100, outp2->m.mNumbers[0]);
+ EXPECT_EQ(0, binp1->m.mNumbers[0]);
+ EXPECT_EQ(0, binp2->m.mNumbers[0]);
+ EXPECT_EQ(0, boutp1->m.mNumbers[0]);
+ EXPECT_EQ(0, boutp2->m.mNumbers[0]);
+
+ EXPECT_TRUE(*inp1 != *outp1);
+ EXPECT_TRUE(*inp1 == *inp2);
+ EXPECT_TRUE(*outp1 == *outp2);
+ EXPECT_TRUE(*binp1 == *boutp1);
+ EXPECT_TRUE(*binp2 != *boutp2);
+
+ EXPECT_TRUE(*inp1 != *binp1);
+ binp1->m.mNumbers[0] = 100;
+ EXPECT_TRUE(*inp1 != *binp1);
+ binp1->setPort(false /* output */);
+ EXPECT_TRUE((bool)*binp1);
+ EXPECT_FALSE(!*binp1);
+ EXPECT_TRUE(*inp1 == *binp1);
+
+ EXPECT_TRUE(*inp2 != *binp2);
+ binp2->m.mNumbers[0] = 100;
+ EXPECT_TRUE(*inp2 == *binp2);
+
+ binp1->setPort(true /* output */);
+ EXPECT_TRUE(*outp1 == *binp1);
+
+ EXPECT_TRUE(*outp1 != *boutp1);
+ boutp1->m.mNumbers[0] = 100;
+ EXPECT_TRUE(*outp1 != *boutp1);
+ boutp1->setPort(true /* output */);
+ EXPECT_TRUE((bool)*boutp1);
+ EXPECT_FALSE(!*boutp1);
+ EXPECT_TRUE(*outp1 == *boutp1);
+
+ EXPECT_TRUE(*outp2 != *boutp2);
+ boutp2->m.mNumbers[0] = 100;
+ EXPECT_TRUE(*outp2 == *boutp2);
+
+ boutp1->setPort(false /* output */);
+ EXPECT_TRUE(*inp1 == *boutp1);
+
+ // index
+ EXPECT_EQ(C2Param::Type(inp1->type()).baseIndex(), C2NumbersStruct::baseIndex);
+ EXPECT_EQ(C2Param::Type(inp1->type()).paramIndex(), kParamIndexNumbers);
+ EXPECT_EQ(inp1->type(), C2NumbersPortTuning::input::typeIndex);
+ EXPECT_EQ(inp1->stream(), ~0u);
+
+ EXPECT_EQ(C2Param::Type(inp2->type()).baseIndex(), C2NumbersStruct::baseIndex);
+ EXPECT_EQ(C2Param::Type(inp2->type()).paramIndex(), kParamIndexNumbers);
+ EXPECT_EQ(inp2->type(), C2NumbersPortTuning::input::typeIndex);
+ EXPECT_EQ(inp2->stream(), ~0u);
+
+ EXPECT_EQ(C2Param::Type(outp1->type()).baseIndex(), C2NumbersStruct::baseIndex);
+ EXPECT_EQ(C2Param::Type(outp1->type()).paramIndex(), kParamIndexNumbers);
+ EXPECT_EQ(outp1->type(), C2NumbersPortTuning::output::typeIndex);
+ EXPECT_EQ(outp1->stream(), ~0u);
+
+ EXPECT_EQ(C2Param::Type(outp2->type()).baseIndex(), C2NumbersStruct::baseIndex);
+ EXPECT_EQ(C2Param::Type(outp2->type()).paramIndex(), kParamIndexNumbers);
+ EXPECT_EQ(outp2->type(), C2NumbersPortTuning::output::typeIndex);
+ EXPECT_EQ(outp2->stream(), ~0u);
+
+ C2Param::BaseIndex index = C2NumbersPortTuning::input::typeIndex;
+ EXPECT_FALSE(index.isVendor());
+ EXPECT_TRUE(index.isFlexible());
+ EXPECT_EQ(index.baseIndex(), kParamIndexNumbers | C2Param::BaseIndex::_kFlexibleFlag);
+ EXPECT_EQ(index.paramIndex(), kParamIndexNumbers);
+
+ index = C2NumbersPortTuning::output::typeIndex;
+ EXPECT_FALSE(index.isVendor());
+ EXPECT_TRUE(index.isFlexible());
+ EXPECT_EQ(index.baseIndex(), kParamIndexNumbers | C2Param::BaseIndex::_kFlexibleFlag);
+ EXPECT_EQ(index.paramIndex(), kParamIndexNumbers);
+
+ C2Param::Type type = C2NumbersPortTuning::input::typeIndex;
+ EXPECT_FALSE(type.isVendor());
+ EXPECT_TRUE(type.isFlexible());
+ EXPECT_FALSE(type.isGlobal());
+ EXPECT_TRUE(type.forInput());
+ EXPECT_FALSE(type.forOutput());
+ EXPECT_FALSE(type.forStream());
+ EXPECT_TRUE(type.forPort());
+
+ type = C2NumbersPortTuning::output::typeIndex;
+ EXPECT_FALSE(type.isVendor());
+ EXPECT_TRUE(type.isFlexible());
+ EXPECT_FALSE(type.isGlobal());
+ EXPECT_FALSE(type.forInput());
+ EXPECT_TRUE(type.forOutput());
+ EXPECT_FALSE(type.forStream());
+ EXPECT_TRUE(type.forPort());
+
+ EXPECT_EQ(C2NumbersPortTuning::From(nullptr), nullptr);
+ EXPECT_EQ(C2NumbersPortTuning::input::From(nullptr), nullptr);
+ EXPECT_EQ(C2NumbersPortTuning::output::From(nullptr), nullptr);
+ EXPECT_EQ(C2NumbersTuning::From(inp1.get()), nullptr);
+ EXPECT_EQ(C2NumbersTuning::From(inp2.get()), nullptr);
+ EXPECT_EQ(C2NumbersTuning::From(outp1.get()), nullptr);
+ EXPECT_EQ(C2NumbersTuning::From(outp2.get()), nullptr);
+ EXPECT_EQ(C2NumbersPortTuning::From(inp1.get()), inp1.get());
+ EXPECT_EQ(C2NumbersPortTuning::From(inp2.get()), (C2NumbersPortTuning*)inp2.get());
+ EXPECT_EQ(C2NumbersPortTuning::From(outp1.get()), outp1.get());
+ EXPECT_EQ(C2NumbersPortTuning::From(outp2.get()), (C2NumbersPortTuning*)outp2.get());
+ EXPECT_EQ(C2NumbersPortTuning::input::From(inp1.get()), (C2NumbersPortTuning::input*)inp1.get());
+ EXPECT_EQ(C2NumbersPortTuning::input::From(inp2.get()), inp2.get());
+ EXPECT_EQ(C2NumbersPortTuning::input::From(outp1.get()), nullptr);
+ EXPECT_EQ(C2NumbersPortTuning::input::From(outp2.get()), nullptr);
+ EXPECT_EQ(C2NumbersPortTuning::output::From(inp1.get()), nullptr);
+ EXPECT_EQ(C2NumbersPortTuning::output::From(inp2.get()), nullptr);
+ EXPECT_EQ(C2NumbersPortTuning::output::From(outp1.get()), (C2NumbersPortTuning::output*)outp1.get());
+ EXPECT_EQ(C2NumbersPortTuning::output::From(outp2.get()), outp2.get());
+ EXPECT_EQ(C2NumbersStreamTuning::From(inp1.get()), nullptr);
+ EXPECT_EQ(C2NumbersStreamTuning::From(inp2.get()), nullptr);
+ EXPECT_EQ(C2NumbersStreamTuning::From(outp1.get()), nullptr);
+ EXPECT_EQ(C2NumbersStreamTuning::From(outp2.get()), nullptr);
+ EXPECT_EQ(C2NumbersStreamTuning::input::From(inp1.get()), nullptr);
+ EXPECT_EQ(C2NumbersStreamTuning::input::From(inp2.get()), nullptr);
+ EXPECT_EQ(C2NumbersStreamTuning::input::From(outp1.get()), nullptr);
+ EXPECT_EQ(C2NumbersStreamTuning::input::From(outp2.get()), nullptr);
+ EXPECT_EQ(C2NumbersStreamTuning::output::From(inp1.get()), nullptr);
+ EXPECT_EQ(C2NumbersStreamTuning::output::From(inp2.get()), nullptr);
+ EXPECT_EQ(C2NumbersStreamTuning::output::From(outp1.get()), nullptr);
+ EXPECT_EQ(C2NumbersStreamTuning::output::From(outp2.get()), nullptr);
+
+ }
+
+ std::unique_ptr<C2NumbersStreamTuning> outs1_(C2NumbersStreamTuning::alloc_unique(1, true, 1u));
+ outs1_->m.mNumbers[0] = 100;
+ std::unique_ptr<const C2NumbersStreamTuning> outs1 = std::move(outs1_);
+ std::unique_ptr<C2NumbersStreamTuning> ins1_(C2NumbersStreamTuning::alloc_unique(1, false, 1u));
+ ins1_->m.mNumbers[0] = 100;
+ std::unique_ptr<const C2NumbersStreamTuning> ins1 = std::move(ins1_);
+ std::shared_ptr<C2NumbersStreamTuning> bouts1(C2NumbersStreamTuning::alloc_shared(1));
+ std::shared_ptr<C2NumbersStreamTuning> bins1(C2NumbersStreamTuning::alloc_shared(1));
+ std::shared_ptr<C2NumbersStreamTuning> bins3(C2NumbersStreamTuning::alloc_shared(1, false, 1u));
+ bins3->m.mNumbers[0] = 100;
+ std::unique_ptr<C2NumbersStreamTuning::input> ins2_(C2NumbersStreamTuning::input::alloc_unique(1, 1u));
+ ins2_->m.mNumbers[0] = 100;
+ std::unique_ptr<const C2NumbersStreamTuning::input> ins2 = std::move(ins2_);
+ std::shared_ptr<C2NumbersStreamTuning::input> bins2(C2NumbersStreamTuning::input::alloc_shared(1));
+ std::unique_ptr<C2NumbersStreamTuning::output> outs2_(C2NumbersStreamTuning::output::alloc_unique(1, 1u));
+ outs2_->m.mNumbers[0] = 100;
+ std::unique_ptr<const C2NumbersStreamTuning::output> outs2 = std::move(outs2_);
+ std::shared_ptr<C2NumbersStreamTuning::output> bouts2(C2NumbersStreamTuning::output::alloc_shared(1));
+
+ {
+ static_assert(canCallSetPort(*bins3), "should be able to");
+ static_assert(canCallSetPort(*bins1), "should be able to");
+ static_assert(!canCallSetPort(*ins1), "should not be able to (const)");
+ static_assert(!canCallSetPort(*ins2), "should not be able to (const & type)");
+ static_assert(!canCallSetPort(*bins2), "should not be able to (type)");
+
+ // flags & invariables
+ const C2NumbersStreamTuning *S[] = { outs1.get(), ins1.get(), bouts1.get() };
+ for (const auto p : S) {
+ EXPECT_EQ(12u, p->size());
+ EXPECT_FALSE(p->isVendor());
+ EXPECT_TRUE(p->isFlexible());
+ EXPECT_FALSE(p->isGlobal());
+ EXPECT_TRUE(p->forStream());
+ EXPECT_FALSE(p->forPort());
+ }
+ const C2NumbersStreamTuning::input *SI[] = { ins2.get(), bins2.get() };
+ for (const auto p : SI) {
+ EXPECT_EQ(12u, p->size());
+ EXPECT_FALSE(p->isVendor());
+ EXPECT_TRUE(p->isFlexible());
+ EXPECT_FALSE(p->isGlobal());
+ EXPECT_TRUE(p->forStream());
+ EXPECT_FALSE(p->forPort());
+ }
+ const C2NumbersStreamTuning::output *SO[] = { outs2.get(), bouts2.get() };
+ for (const auto p : SO) {
+ EXPECT_EQ(12u, p->size());
+ EXPECT_FALSE(p->isVendor());
+ EXPECT_TRUE(p->isFlexible());
+ EXPECT_FALSE(p->isGlobal());
+ EXPECT_TRUE(p->forStream());
+ EXPECT_FALSE(p->forPort());
+ }
+
+ // port specific flags & invariables
+ EXPECT_FALSE(outs1->forInput());
+ EXPECT_TRUE(outs1->forOutput());
+
+ EXPECT_TRUE(ins1->forInput());
+ EXPECT_FALSE(ins1->forOutput());
+
+ const C2NumbersStreamTuning *S2[] = { outs1.get(), ins1.get() };
+ for (const auto p : S2) {
+ EXPECT_TRUE((bool)(*p));
+ EXPECT_FALSE(!(*p));
+ EXPECT_EQ(100, p->m.mNumbers[0]);
+ EXPECT_EQ(1u, p->stream());
+ }
+ for (const auto p : SO) {
+ EXPECT_TRUE((bool)(*p));
+ EXPECT_FALSE(!(*p));
+
+ EXPECT_FALSE(p->forInput());
+ EXPECT_TRUE(p->forOutput());
+ }
+ for (const auto p : SI) {
+ EXPECT_TRUE((bool)(*p));
+ EXPECT_FALSE(!(*p));
+
+ EXPECT_TRUE(p->forInput());
+ EXPECT_FALSE(p->forOutput());
+ }
+ const C2NumbersStreamTuning *S3[] = { bouts1.get() };
+ for (const auto p : S3) {
+ EXPECT_FALSE((bool)(*p));
+ EXPECT_TRUE(!(*p));
+
+ EXPECT_FALSE(p->forInput());
+ EXPECT_FALSE(p->forOutput());
+ EXPECT_EQ(0, p->m.mNumbers[0]);
+ }
+
+ // values
+ EXPECT_EQ(100, ins2->m.mNumbers[0]);
+ EXPECT_EQ(100, outs2->m.mNumbers[0]);
+ EXPECT_EQ(0, bins1->m.mNumbers[0]);
+ EXPECT_EQ(0, bins2->m.mNumbers[0]);
+ EXPECT_EQ(0, bouts1->m.mNumbers[0]);
+ EXPECT_EQ(0, bouts2->m.mNumbers[0]);
+
+ EXPECT_EQ(1u, ins2->stream());
+ EXPECT_EQ(1u, outs2->stream());
+ EXPECT_EQ(0u, bins1->stream());
+ EXPECT_EQ(0u, bins2->stream());
+ EXPECT_EQ(0u, bouts1->stream());
+ EXPECT_EQ(0u, bouts2->stream());
+
+ EXPECT_TRUE(*ins1 != *outs1);
+ EXPECT_TRUE(*ins1 == *ins2);
+ EXPECT_TRUE(*outs1 == *outs2);
+ EXPECT_TRUE(*bins1 == *bouts1);
+ EXPECT_TRUE(*bins2 != *bouts2);
+
+ EXPECT_TRUE(*ins1 != *bins1);
+ bins1->m.mNumbers[0] = 100;
+ EXPECT_TRUE(*ins1 != *bins1);
+ bins1->setPort(false /* output */);
+ EXPECT_TRUE(*ins1 != *bins1);
+ bins1->setStream(1u);
+ EXPECT_TRUE(*ins1 == *bins1);
+
+ EXPECT_TRUE(*ins2 != *bins2);
+ bins2->m.mNumbers[0] = 100;
+ EXPECT_TRUE(*ins2 != *bins2);
+ bins2->setStream(1u);
+ EXPECT_TRUE(*ins2 == *bins2);
+
+ bins1->setPort(true /* output */);
+ EXPECT_TRUE(*outs1 == *bins1);
+
+ EXPECT_TRUE(*outs1 != *bouts1);
+ bouts1->m.mNumbers[0] = 100;
+ EXPECT_TRUE(*outs1 != *bouts1);
+ bouts1->setPort(true /* output */);
+ EXPECT_TRUE(*outs1 != *bouts1);
+ bouts1->setStream(1u);
+ EXPECT_TRUE(*outs1 == *bouts1);
+
+ EXPECT_TRUE(*outs2 != *bouts2);
+ bouts2->m.mNumbers[0] = 100;
+ EXPECT_TRUE(*outs2 != *bouts2);
+ bouts2->setStream(1u);
+ EXPECT_TRUE(*outs2 == *bouts2);
+
+ bouts1->setPort(false /* output */);
+ EXPECT_TRUE(*ins1 == *bouts1);
+
+ // index
+ EXPECT_EQ(C2Param::Type(ins1->type()).baseIndex(), C2NumbersStruct::baseIndex);
+ EXPECT_EQ(C2Param::Type(ins1->type()).paramIndex(), kParamIndexNumbers);
+ EXPECT_EQ(ins1->type(), C2NumbersStreamTuning::input::typeIndex);
+
+ EXPECT_EQ(C2Param::Type(ins2->type()).baseIndex(), C2NumbersStruct::baseIndex);
+ EXPECT_EQ(C2Param::Type(ins2->type()).paramIndex(), kParamIndexNumbers);
+ EXPECT_EQ(ins2->type(), C2NumbersStreamTuning::input::typeIndex);
+
+ EXPECT_EQ(C2Param::Type(outs1->type()).baseIndex(), C2NumbersStruct::baseIndex);
+ EXPECT_EQ(C2Param::Type(outs1->type()).paramIndex(), kParamIndexNumbers);
+ EXPECT_EQ(outs1->type(), C2NumbersStreamTuning::output::typeIndex);
+
+ EXPECT_EQ(C2Param::Type(outs2->type()).baseIndex(), C2NumbersStruct::baseIndex);
+ EXPECT_EQ(C2Param::Type(outs2->type()).paramIndex(), kParamIndexNumbers);
+ EXPECT_EQ(outs2->type(), C2NumbersStreamTuning::output::typeIndex);
+
+ C2Param::BaseIndex index = C2NumbersStreamTuning::input::typeIndex;
+ EXPECT_FALSE(index.isVendor());
+ EXPECT_TRUE(index.isFlexible());
+ EXPECT_EQ(index.baseIndex(), kParamIndexNumbers | C2Param::BaseIndex::_kFlexibleFlag);
+ EXPECT_EQ(index.paramIndex(), kParamIndexNumbers);
+
+ index = C2NumbersStreamTuning::output::typeIndex;
+ EXPECT_FALSE(index.isVendor());
+ EXPECT_TRUE(index.isFlexible());
+ EXPECT_EQ(index.baseIndex(), kParamIndexNumbers | C2Param::BaseIndex::_kFlexibleFlag);
+ EXPECT_EQ(index.paramIndex(), kParamIndexNumbers);
+
+ C2Param::Type type = C2NumbersStreamTuning::input::typeIndex;
+ EXPECT_FALSE(type.isVendor());
+ EXPECT_TRUE(type.isFlexible());
+ EXPECT_FALSE(type.isGlobal());
+ EXPECT_TRUE(type.forInput());
+ EXPECT_FALSE(type.forOutput());
+ EXPECT_TRUE(type.forStream());
+ EXPECT_FALSE(type.forPort());
+
+ type = C2NumbersStreamTuning::output::typeIndex;
+ EXPECT_FALSE(type.isVendor());
+ EXPECT_TRUE(type.isFlexible());
+ EXPECT_FALSE(type.isGlobal());
+ EXPECT_FALSE(type.forInput());
+ EXPECT_TRUE(type.forOutput());
+ EXPECT_TRUE(type.forStream());
+ EXPECT_FALSE(type.forPort());
+
+ EXPECT_EQ(C2NumbersPortTuning::From(nullptr), nullptr);
+ EXPECT_EQ(C2NumbersPortTuning::input::From(nullptr), nullptr);
+ EXPECT_EQ(C2NumbersPortTuning::output::From(nullptr), nullptr);
+ EXPECT_EQ(C2NumbersTuning::From(ins1.get()), nullptr);
+ EXPECT_EQ(C2NumbersTuning::From(ins2.get()), nullptr);
+ EXPECT_EQ(C2NumbersTuning::From(outs1.get()), nullptr);
+ EXPECT_EQ(C2NumbersTuning::From(outs2.get()), nullptr);
+ EXPECT_EQ(C2NumbersPortTuning::From(ins1.get()), nullptr);
+ EXPECT_EQ(C2NumbersPortTuning::From(ins2.get()), nullptr);
+ EXPECT_EQ(C2NumbersPortTuning::From(outs1.get()), nullptr);
+ EXPECT_EQ(C2NumbersPortTuning::From(outs2.get()), nullptr);
+ EXPECT_EQ(C2NumbersPortTuning::input::From(ins1.get()), nullptr);
+ EXPECT_EQ(C2NumbersPortTuning::input::From(ins2.get()), nullptr);
+ EXPECT_EQ(C2NumbersPortTuning::input::From(outs1.get()), nullptr);
+ EXPECT_EQ(C2NumbersPortTuning::input::From(outs2.get()), nullptr);
+ EXPECT_EQ(C2NumbersPortTuning::output::From(ins1.get()), nullptr);
+ EXPECT_EQ(C2NumbersPortTuning::output::From(ins2.get()), nullptr);
+ EXPECT_EQ(C2NumbersPortTuning::output::From(outs1.get()), nullptr);
+ EXPECT_EQ(C2NumbersPortTuning::output::From(outs2.get()), nullptr);
+ EXPECT_EQ(C2NumbersStreamTuning::From(ins1.get()), ins1.get());
+ EXPECT_EQ(C2NumbersStreamTuning::From(ins2.get()), (C2NumbersStreamTuning*)ins2.get());
+ EXPECT_EQ(C2NumbersStreamTuning::From(outs1.get()), outs1.get());
+ EXPECT_EQ(C2NumbersStreamTuning::From(outs2.get()), (C2NumbersStreamTuning*)outs2.get());
+ EXPECT_EQ(C2NumbersStreamTuning::input::From(ins1.get()), (C2NumbersStreamTuning::input*)ins1.get());
+ EXPECT_EQ(C2NumbersStreamTuning::input::From(ins2.get()), ins2.get());
+ EXPECT_EQ(C2NumbersStreamTuning::input::From(outs1.get()), nullptr);
+ EXPECT_EQ(C2NumbersStreamTuning::input::From(outs2.get()), nullptr);
+ EXPECT_EQ(C2NumbersStreamTuning::output::From(ins1.get()), nullptr);
+ EXPECT_EQ(C2NumbersStreamTuning::output::From(ins2.get()), nullptr);
+ EXPECT_EQ(C2NumbersStreamTuning::output::From(outs1.get()), (C2NumbersStreamTuning::output*)outs1.get());
+ EXPECT_EQ(C2NumbersStreamTuning::output::From(outs2.get()), outs2.get());
+
+ }
+
+ {
+ C2Int32Value int32Value(INT32_MIN);
+ static_assert(std::is_same<decltype(int32Value.mValue), int32_t>::value, "should be int32_t");
+ EXPECT_EQ(INT32_MIN, int32Value.mValue);
+ std::list<const C2FieldDescriptor> fields = int32Value.fieldList;
+ EXPECT_EQ(1u, fields.size());
+ EXPECT_EQ(FD::INT32, fields.cbegin()->type());
+ EXPECT_EQ(1u, fields.cbegin()->length());
+ EXPECT_EQ(C2String("value"), fields.cbegin()->name());
+ }
+
+ {
+ C2Uint32Value uint32Value(UINT32_MAX);
+ static_assert(std::is_same<decltype(uint32Value.mValue), uint32_t>::value, "should be uint32_t");
+ EXPECT_EQ(UINT32_MAX, uint32Value.mValue);
+ std::list<const C2FieldDescriptor> fields = uint32Value.fieldList;
+ EXPECT_EQ(1u, fields.size());
+ EXPECT_EQ(FD::UINT32, fields.cbegin()->type());
+ EXPECT_EQ(1u, fields.cbegin()->length());
+ EXPECT_EQ(C2String("value"), fields.cbegin()->name());
+ }
+
+ {
+ C2Int64Value int64Value(INT64_MIN);
+ static_assert(std::is_same<decltype(int64Value.mValue), int64_t>::value, "should be int64_t");
+ EXPECT_EQ(INT64_MIN, int64Value.mValue);
+ std::list<const C2FieldDescriptor> fields = int64Value.fieldList;
+ EXPECT_EQ(1u, fields.size());
+ EXPECT_EQ(FD::INT64, fields.cbegin()->type());
+ EXPECT_EQ(1u, fields.cbegin()->length());
+ EXPECT_EQ(C2String("value"), fields.cbegin()->name());
+ }
+
+ {
+ C2Uint64Value uint64Value(UINT64_MAX);
+ static_assert(std::is_same<decltype(uint64Value.mValue), uint64_t>::value, "should be uint64_t");
+ EXPECT_EQ(UINT64_MAX, uint64Value.mValue);
+ std::list<const C2FieldDescriptor> fields = uint64Value.fieldList;
+ EXPECT_EQ(1u, fields.size());
+ EXPECT_EQ(FD::UINT64, fields.cbegin()->type());
+ EXPECT_EQ(1u, fields.cbegin()->length());
+ EXPECT_EQ(C2String("value"), fields.cbegin()->name());
+ }
+
+ {
+ C2FloatValue floatValue(123.4f);
+ static_assert(std::is_same<decltype(floatValue.mValue), float>::value, "should be float");
+ EXPECT_EQ(123.4f, floatValue.mValue);
+ std::list<const C2FieldDescriptor> fields = floatValue.fieldList;
+ EXPECT_EQ(1u, fields.size());
+ EXPECT_EQ(FD::FLOAT, fields.cbegin()->type());
+ EXPECT_EQ(1u, fields.cbegin()->length());
+ EXPECT_EQ(C2String("value"), fields.cbegin()->name());
+ }
+
+ {
+ uint8_t initValue[] = "ABCD";
+ typedef C2GlobalParam<C2Setting, C2BlobValue, 0> BlobSetting;
+ std::unique_ptr<BlobSetting> blobValue = BlobSetting::alloc_unique(6, C2ConstMemoryBlock<uint8_t>(initValue));
+ static_assert(std::is_same<decltype(blobValue->m.mValue), uint8_t[]>::value, "should be uint8_t[]");
+ EXPECT_EQ(0, memcmp(blobValue->m.mValue, "ABCD\0", 6));
+ EXPECT_EQ(6u, blobValue->flexCount());
+ std::list<const C2FieldDescriptor> fields = blobValue->fieldList;
+ EXPECT_EQ(1u, fields.size());
+ EXPECT_EQ(FD::BLOB, fields.cbegin()->type());
+ EXPECT_EQ(0u, fields.cbegin()->length());
+ EXPECT_EQ(C2String("value"), fields.cbegin()->name());
+
+ blobValue = BlobSetting::alloc_unique(3, C2ConstMemoryBlock<uint8_t>(initValue));
+ EXPECT_EQ(0, memcmp(blobValue->m.mValue, "ABC", 3));
+ EXPECT_EQ(3u, blobValue->flexCount());
+ }
+
+ {
+ constexpr char initValue[] = "ABCD";
+ typedef C2GlobalParam<C2Setting, C2StringValue, 0> StringSetting;
+ std::unique_ptr<StringSetting> stringValue = StringSetting::alloc_unique(6, C2ConstMemoryBlock<char>(initValue));
+ stringValue = StringSetting::alloc_unique(6, initValue);
+ static_assert(std::is_same<decltype(stringValue->m.mValue), char[]>::value, "should be char[]");
+ EXPECT_EQ(0, memcmp(stringValue->m.mValue, "ABCD\0", 6));
+ EXPECT_EQ(6u, stringValue->flexCount());
+ std::list<const C2FieldDescriptor> fields = stringValue->fieldList;
+ EXPECT_EQ(1u, fields.size());
+ EXPECT_EQ(FD::STRING, fields.cbegin()->type());
+ EXPECT_EQ(0u, fields.cbegin()->length());
+ EXPECT_EQ(C2String("value"), fields.cbegin()->name());
+
+ stringValue = StringSetting::alloc_unique(3, C2ConstMemoryBlock<char>(initValue));
+ EXPECT_EQ(0, memcmp(stringValue->m.mValue, "AB", 3));
+ EXPECT_EQ(3u, stringValue->flexCount());
+
+ stringValue = StringSetting::alloc_unique(11, "initValue");
+ EXPECT_EQ(0, memcmp(stringValue->m.mValue, "initValue\0", 11));
+ EXPECT_EQ(11u, stringValue->flexCount());
+
+ stringValue = StringSetting::alloc_unique(initValue);
+ EXPECT_EQ(0, memcmp(stringValue->m.mValue, "ABCD", 5));
+ EXPECT_EQ(5u, stringValue->flexCount());
+
+ stringValue = StringSetting::alloc_unique({ 'A', 'B', 'C', 'D' });
+ EXPECT_EQ(0, memcmp(stringValue->m.mValue, "ABC", 4));
+ EXPECT_EQ(4u, stringValue->flexCount());
+ }
+
+ {
+ uint32_t videoWidth[] = { 12u, C2NumbersStreamTuning::output::typeIndex, 100 };
+ C2Param *p1 = C2Param::From(videoWidth, sizeof(videoWidth));
+ EXPECT_NE(nullptr, p1);
+ EXPECT_EQ(12u, p1->size());
+ EXPECT_EQ(C2NumbersStreamTuning::output::typeIndex, p1->type());
+
+ C2NumbersStreamTuning::output *vst = C2NumbersStreamTuning::output::From(p1);
+ EXPECT_NE(nullptr, vst);
+ if (vst) {
+ EXPECT_EQ(1u, vst->flexCount());
+ EXPECT_EQ(100, vst->m.mNumbers[0]);
+ }
+
+ p1 = C2Param::From(videoWidth, sizeof(videoWidth) + 2);
+ EXPECT_EQ(nullptr, p1);
+
+ p1 = C2Param::From(videoWidth, sizeof(videoWidth) - 2);
+ EXPECT_EQ(nullptr, p1);
+
+ p1 = C2Param::From(videoWidth, 3);
+ EXPECT_EQ(nullptr, p1);
+
+ p1 = C2Param::From(videoWidth, 0);
+ EXPECT_EQ(nullptr, p1);
+ }
+
+ {
+ uint32_t videoWidth[] = { 16u, C2NumbersPortTuning::input::typeIndex, 101, 102 };
+
+ C2Param *p1 = C2Param::From(videoWidth, sizeof(videoWidth));
+ EXPECT_NE(nullptr, p1);
+ EXPECT_EQ(16u, p1->size());
+ EXPECT_EQ(C2NumbersPortTuning::input::typeIndex, p1->type());
+
+ C2NumbersPortTuning::input *vpt = C2NumbersPortTuning::input::From(p1);
+ EXPECT_NE(nullptr, vpt);
+ if (vpt) {
+ EXPECT_EQ(2u, vpt->flexCount());
+ EXPECT_EQ(101, vpt->m.mNumbers[0]);
+ EXPECT_EQ(102, vpt->m.mNumbers[1]);
+ }
+
+ p1 = C2Param::From(videoWidth, sizeof(videoWidth) + 2);
+ EXPECT_EQ(nullptr, p1);
+
+ p1 = C2Param::From(videoWidth, sizeof(videoWidth) - 2);
+ EXPECT_EQ(nullptr, p1);
+
+ p1 = C2Param::From(videoWidth, 3);
+ EXPECT_EQ(nullptr, p1);
+
+ p1 = C2Param::From(videoWidth, 0);
+ EXPECT_EQ(nullptr, p1);
+ }
+}
+
+// ***********************
+
+}
+
+#include <util/C2ParamUtils.h>
+#include <C2Config.h>
+#include <C2Component.h>
+#include <unordered_map>
+
+namespace android {
+
+C2ENUM(
+ MetadataType, int32_t,
+ kInvalid = -1,
+ kNone = 0,
+ kGralloc,
+ kNativeHandle,
+ kANativeWindow,
+ kCamera,
+)
+
+enum {
+ kParamIndexVideoConfig = 0x1234,
+};
+
+struct C2VideoConfigStruct {
+ int32_t mWidth;
+ uint32_t mHeight;
+ MetadataType mMetadataType;
+ int32_t mSupportedFormats[];
+
+ C2VideoConfigStruct() {}
+
+ DEFINE_AND_DESCRIBE_FLEX_C2STRUCT(VideoConfig, mSupportedFormats)
+ C2FIELD(mWidth, "width")
+ C2FIELD(mHeight, "height")
+ C2FIELD(mMetadataType, "metadata-type")
+ C2FIELD(mSupportedFormats, "formats")
+};
+
+typedef C2PortParam<C2Tuning, C2VideoConfigStruct> C2VideoConfigPortTuning;
+
+class MyReflector : public C2ParamReflector {
+private:
+ std::unique_ptr<C2VideoConfigPortTuning::input> inputVideoConfigTuning;
+ std::unique_ptr<C2VideoConfigPortTuning::output> outputVideoConfigTuning;
+
+public:
+ void describeSupportedValues() {
+ C2TypedFieldSupportedValues<int32_t> supportedWidths(16, 1920, 8);
+ C2FieldSupportedValues supportedWidths2(16, 1920, 8);
+
+
+ std::list<C2FieldSupportedValues> supported;
+ //supported.emplace_push(inputVideoConfigTuning->mNumber, range(16, 1920, 8));
+ //supported.emplace_push(inputVideoConfigTuning->mHeight, range(16, 1088, 8));
+ //supported.emplace_push(inputVideoConfigTuning->mMetadataType, all_enums);
+ //supported.emplace_push(inputVideoConfigTuning->mSupportedFormats, { 0, 1, 5, 7 });
+ }
+
+ virtual std::unique_ptr<android::C2StructDescriptor> describe(C2Param::BaseIndex paramType) {
+ switch (paramType.baseIndex()) {
+ case C2VideoConfigPortTuning::baseIndex:
+ return std::unique_ptr<C2StructDescriptor>(new C2StructDescriptor{
+ paramType.baseIndex(),
+ C2VideoConfigPortTuning::fieldList,
+ });
+ }
+ return nullptr;
+ }
+};
+
+class MyComponentInstance : public C2ComponentInterface {
+public:
+ virtual C2String getName() const {
+ /// \todo this seems too specific
+ return "sample.interface";
+ };
+
+ virtual node_id getId() const {
+ /// \todo how are these shared?
+ return 0;
+ }
+
+ virtual status_t commit_sm(
+ const std::vector<C2Param* const> ¶ms,
+ std::vector<std::unique_ptr<C2SettingResult>>* const failures) {
+ (void)params;
+ (void)failures;
+ return C2_UNSUPPORTED;
+ }
+
+ virtual status_t config_nb(
+ const std::vector<C2Param* const> ¶ms,
+ std::vector<std::unique_ptr<C2SettingResult>>* const failures) {
+ (void)params;
+ (void)failures;
+ return C2_UNSUPPORTED;
+ }
+
+ virtual status_t createTunnel_sm(node_id targetComponent) {
+ (void)targetComponent;
+ return C2_UNSUPPORTED;
+ }
+
+ virtual status_t query_nb(
+ const std::vector<C2Param* const> &stackParams,
+ const std::vector<C2Param::Index> &heapParamIndices,
+ std::vector<std::unique_ptr<C2Param>>* const heapParams) const {
+ for (C2Param* const param : stackParams) {
+ if (!*param) { // param is already invalid - remember it
+ continue;
+ }
+
+ // note: this does not handle stream params (should use index...)
+ if (!mMyParams.count(param->type())) {
+ continue; // not my param
+ }
+
+ C2Param & myParam = mMyParams.find(param->type())->second;
+ if (myParam.size() != param->size()) { // incorrect size
+ param->invalidate();
+ continue;
+ }
+
+ param->updateFrom(myParam);
+ }
+
+ for (const C2Param::Index index : heapParamIndices) {
+ if (mMyParams.count(index)) {
+ C2Param & myParam = mMyParams.find(index)->second;
+ std::unique_ptr<C2Param> paramCopy(C2Param::From(&myParam, myParam.size()));
+ heapParams->push_back(std::move(paramCopy));
+ }
+ }
+
+ return C2_OK;
+ }
+
+ std::unordered_map<uint32_t, C2Param &> mMyParams;
+
+ C2ComponentDomainInfo mDomainInfo;
+
+ MyComponentInstance() {
+ mMyParams.insert({mDomainInfo.type(), mDomainInfo});
+ }
+
+ virtual status_t releaseTunnel_sm(node_id targetComponent) {
+ (void)targetComponent;
+ return C2_UNSUPPORTED;
+ }
+
+ class MyParamReflector : public C2ParamReflector {
+ const MyComponentInstance *instance;
+
+ public:
+ MyParamReflector(const MyComponentInstance *i) : instance(i) { }
+
+ virtual std::unique_ptr<C2StructDescriptor> describe(C2Param::BaseIndex paramIndex) {
+ switch (paramIndex.baseIndex()) {
+ case decltype(instance->mDomainInfo)::baseIndex:
+ default:
+ return std::unique_ptr<C2StructDescriptor>(new C2StructDescriptor{
+ instance->mDomainInfo.type(),
+ decltype(instance->mDomainInfo)::fieldList,
+ });
+ }
+ return nullptr;
+ }
+ };
+
+ virtual status_t getSupportedValues(
+ const std::vector<const C2ParamField> fields,
+ std::vector<C2FieldSupportedValues>* const values) const {
+ for (const C2ParamField &field : fields) {
+ if (field == C2ParamField(&mDomainInfo, &C2ComponentDomainInfo::mValue)) {
+ values->push_back(C2FieldSupportedValues(
+ false /* flag */,
+ &mDomainInfo.mValue
+ //,
+ //{(int32_t)C2DomainVideo}
+ ));
+ }
+ }
+ return C2_OK;
+ }
+
+ virtual std::shared_ptr<C2ParamReflector> getParamReflector() const {
+ return std::shared_ptr<C2ParamReflector>(new MyParamReflector(this));
+ }
+
+ virtual status_t getSupportedParams(std::vector<std::shared_ptr<C2ParamDescriptor>> * const params) const {
+ params->push_back(std::make_shared<C2ParamDescriptor>(
+ true /* required */, "_domain", &mDomainInfo));
+ return C2_OK;
+ }
+
+ status_t getSupportedParams2(std::vector<std::shared_ptr<C2ParamDescriptor>> * const params) {
+ params->push_back(std::shared_ptr<C2ParamDescriptor>(
+ new C2ParamDescriptor(true /* required */, "_domain", &mDomainInfo)));
+ return C2_OK;
+ }
+
+};
+
+template<typename E, bool S=std::is_enum<E>::value>
+struct getter {
+ int32_t get(const C2FieldSupportedValues::Primitive &p, int32_t*) {
+ return p.i32;
+ }
+ int64_t get(const C2FieldSupportedValues::Primitive &p, int64_t*) {
+ return p.i64;
+ }
+ uint32_t get(const C2FieldSupportedValues::Primitive &p, uint32_t*) {
+ return p.u32;
+ }
+ uint64_t get(const C2FieldSupportedValues::Primitive &p, uint64_t*) {
+ return p.u64;
+ }
+ float get(const C2FieldSupportedValues::Primitive &p, float*) {
+ return p.fp;
+ }
+};
+
+template<typename E>
+struct getter<E, true> {
+ typename std::underlying_type<E>::type get(const C2FieldSupportedValues::Primitive &p, E*) {
+ using u=typename std::underlying_type<E>::type;
+ return getter<u>().get(p, (u*)0);
+ }
+};
+
+template<typename T, bool E=std::is_enum<T>::value>
+struct lax_underlying_type {
+ typedef typename std::underlying_type<T>::type type;
+};
+
+template<typename T>
+struct lax_underlying_type<T, false> {
+ typedef T type;
+};
+
+template<typename E>
+typename lax_underlying_type<E>::type get(
+ const C2FieldSupportedValues::Primitive &p, E*) {
+ return getter<E>().get(p, (E*)0);
+}
+
+template<typename T>
+void dumpFSV(const C2FieldSupportedValues &sv, T*t) {
+ using namespace std;
+ cout << (std::is_enum<T>::value ? (std::is_signed<typename std::underlying_type<T>::type>::value ? "i" : "u")
+ : std::is_integral<T>::value ? std::is_signed<T>::value ? "i" : "u" : "f")
+ << (8 * sizeof(T));
+ if (sv.type == sv.RANGE) {
+ cout << ".range(" << get(sv.range.min, t);
+ if (get(sv.range.step, t) != std::is_integral<T>::value) {
+ cout << ":" << get(sv.range.step, t);
+ }
+ if (get(sv.range.nom, t) != 1 || get(sv.range.denom, t) != 1) {
+ cout << ":" << get(sv.range.nom, t) << "/" << get(sv.range.denom, t);
+ }
+ cout << get(sv.range.max, t) << ")";
+ }
+ if (sv.values.size()) {
+ cout << (sv.type == sv.FLAGS ? ".flags(" : ".list(");
+ const char *sep = "";
+ for (const C2FieldSupportedValues::Primitive &p : sv.values) {
+ cout << sep << get(p, t);
+ sep = ",";
+ }
+ cout << ")";
+ }
+ cout << endl;
+}
+
+void dumpType(C2Param::Type type) {
+ using namespace std;
+ cout << (type.isVendor() ? "Vendor" : "C2");
+ if (type.forInput()) {
+ cout << "Input";
+ } else if (type.forOutput()) {
+ cout << "Output";
+ } else if (type.forPort() && !type.forStream()) {
+ cout << "Port";
+ }
+ if (type.forStream()) {
+ cout << "Stream";
+ }
+
+ if (type.isFlexible()) {
+ cout << "Flex";
+ }
+
+ cout << type.paramIndex();
+
+ switch (type.kind()) {
+ case C2Param::INFO: cout << "Info"; break;
+ case C2Param::SETTING: cout << "Setting"; break;
+ case C2Param::TUNING: cout << "Tuning"; break;
+ case C2Param::STRUCT: cout << "Struct"; break;
+ default: cout << "Kind" << (int32_t)type.kind(); break;
+ }
+}
+
+void dumpType(C2Param::BaseIndex type) {
+ using namespace std;
+ cout << (type.isVendor() ? "Vendor" : "C2");
+ if (type.isFlexible()) {
+ cout << "Flex";
+ }
+
+ cout << type.paramIndex() << "Struct";
+}
+
+void dumpType(FD::Type type) {
+ using namespace std;
+ switch (type) {
+ case FD::BLOB: cout << "blob "; break;
+ case FD::FLOAT: cout << "float "; break;
+ case FD::INT32: cout << "int32_t "; break;
+ case FD::INT64: cout << "int64_t "; break;
+ case FD::UINT32: cout << "uint32_t "; break;
+ case FD::UINT64: cout << "uint64_t "; break;
+ case FD::STRING: cout << "char "; break;
+ default:
+ cout << "struct ";
+ dumpType((C2Param::Type)type);
+ break;
+ }
+}
+
+void dumpStruct(const C2StructDescriptor &sd) {
+ using namespace std;
+ cout << "struct ";
+ dumpType(sd.baseIndex());
+ cout << " {" << endl;
+ //C2FieldDescriptor &f;
+ for (const C2FieldDescriptor &f : sd) {
+ PrintTo(f, &cout);
+ cout << endl;
+
+ if (f.namedValues().size()) {
+ cout << ".named(";
+ const char *sep = "";
+ for (const FD::named_value_type &p : f.namedValues()) {
+ cout << sep << p.first << "=";
+ switch (f.type()) {
+ case C2Value::INT32: cout << get(p.second, (int32_t *)0); break;
+ case C2Value::INT64: cout << get(p.second, (int64_t *)0); break;
+ case C2Value::UINT32: cout << get(p.second, (uint32_t *)0); break;
+ case C2Value::UINT64: cout << get(p.second, (uint64_t *)0); break;
+ case C2Value::FLOAT: cout << get(p.second, (float *)0); break;
+ default: cout << "???"; break;
+ }
+ sep = ",";
+ }
+ cout << ")";
+ }
+ }
+
+ cout << "};" << endl;
+}
+
+void dumpDesc(const C2ParamDescriptor &pd) {
+ using namespace std;
+ if (pd.isRequired()) {
+ cout << "required ";
+ }
+ if (pd.isPersistent()) {
+ cout << "persistent ";
+ }
+ cout << "struct ";
+ dumpType(pd.type());
+ cout << " " << pd.name() << ";" << endl;
+}
+
+TEST_F(C2ParamTest, ReflectorTest) {
+ C2ComponentDomainInfo domainInfo;
+ std::shared_ptr<C2ComponentInterface> comp(new MyComponentInstance);
+ std::vector<C2FieldSupportedValues> values;
+
+ std::unique_ptr<C2StructDescriptor> desc{
+ comp->getParamReflector()->describe(C2ComponentDomainInfo::indexFlags)};
+ dumpStruct(*desc);
+
+ EXPECT_EQ(
+ C2_OK,
+ comp->getSupportedValues(
+ { C2ParamField(&domainInfo, &C2ComponentDomainInfo::mValue) },
+ &values)
+ );
+
+ for (const C2FieldSupportedValues &sv : values) {
+ dumpFSV(sv, &domainInfo.mValue);
+ }
+}
+
+C2ENUM(Enum1, uint32_t,
+ Enum1Value1,
+ Enum1Value2,
+ Enum1Value4 = Enum1Value2 + 2,
+);
+
+C2ENUM_CUSTOM_PREFIX(Enum2, uint32_t, "Enum",
+ Enum2Value1,
+ Enum2Value2,
+ Enum2Value4 = Enum1Value2 + 2,
+);
+
+C2ENUM_CUSTOM_NAMES(Enum3, uint8_t,
+ ({ { "value1", Enum3Value1 },
+ { "value2", Enum3Value2 },
+ { "value4", Enum3Value4 },
+ { "invalid", Invalid } }),
+ Enum3Value1,
+ Enum3Value2,
+ Enum3Value4 = Enum3Value2 + 2,
+ Invalid,
+);
+
+TEST_F(C2ParamTest, EnumUtilsTest) {
+ std::vector<std::pair<C2String, Enum3>> pairs ( { { "value1", Enum3Value1 },
+ { "value2", Enum3Value2 },
+ { "value4", Enum3Value4 },
+ { "invalid", Invalid } });
+ Enum3 e3;
+ FD::namedValuesFor(e3);
+}
+
+TEST_F(C2ParamTest, ParamUtilsTest) {
+ // upper case
+ EXPECT_EQ("yes", C2ParamUtils::camelCaseToDashed("YES"));
+ EXPECT_EQ("no", C2ParamUtils::camelCaseToDashed("NO"));
+ EXPECT_EQ("yes-no", C2ParamUtils::camelCaseToDashed("YES_NO"));
+ EXPECT_EQ("yes-no", C2ParamUtils::camelCaseToDashed("YES__NO"));
+ EXPECT_EQ("a2dp", C2ParamUtils::camelCaseToDashed("A2DP"));
+ EXPECT_EQ("mp2-ts", C2ParamUtils::camelCaseToDashed("MP2_TS"));
+ EXPECT_EQ("block-2d", C2ParamUtils::camelCaseToDashed("BLOCK_2D"));
+ EXPECT_EQ("mpeg-2-ts", C2ParamUtils::camelCaseToDashed("MPEG_2_TS"));
+ EXPECT_EQ("_hidden-value", C2ParamUtils::camelCaseToDashed("_HIDDEN_VALUE"));
+ EXPECT_EQ("__hidden-value2", C2ParamUtils::camelCaseToDashed("__HIDDEN_VALUE2"));
+ EXPECT_EQ("__hidden-value-2", C2ParamUtils::camelCaseToDashed("__HIDDEN_VALUE_2"));
+
+ // camel case
+ EXPECT_EQ("yes", C2ParamUtils::camelCaseToDashed("Yes"));
+ EXPECT_EQ("no", C2ParamUtils::camelCaseToDashed("No"));
+ EXPECT_EQ("yes-no", C2ParamUtils::camelCaseToDashed("YesNo"));
+ EXPECT_EQ("yes-no", C2ParamUtils::camelCaseToDashed("Yes_No"));
+ EXPECT_EQ("mp2-ts", C2ParamUtils::camelCaseToDashed("MP2Ts"));
+ EXPECT_EQ("block-2d", C2ParamUtils::camelCaseToDashed("Block2D"));
+ EXPECT_EQ("mpeg-2-ts", C2ParamUtils::camelCaseToDashed("Mpeg2ts"));
+ EXPECT_EQ("_hidden-value", C2ParamUtils::camelCaseToDashed("_HiddenValue"));
+ EXPECT_EQ("__hidden-value-2", C2ParamUtils::camelCaseToDashed("__HiddenValue2"));
+
+ // mixed case
+ EXPECT_EQ("mp2t-s", C2ParamUtils::camelCaseToDashed("MP2T_s"));
+ EXPECT_EQ("block-2d", C2ParamUtils::camelCaseToDashed("Block_2D"));
+ EXPECT_EQ("block-2-d", C2ParamUtils::camelCaseToDashed("Block2_D"));
+ EXPECT_EQ("mpeg-2-ts", C2ParamUtils::camelCaseToDashed("Mpeg_2ts"));
+ EXPECT_EQ("mpeg-2-ts", C2ParamUtils::camelCaseToDashed("Mpeg_2_TS"));
+ EXPECT_EQ("_hidden-value", C2ParamUtils::camelCaseToDashed("_Hidden__VALUE"));
+ EXPECT_EQ("__hidden-value-2", C2ParamUtils::camelCaseToDashed("__HiddenValue_2"));
+ EXPECT_EQ("_2", C2ParamUtils::camelCaseToDashed("_2"));
+ EXPECT_EQ("__23", C2ParamUtils::camelCaseToDashed("__23"));
+}
+
+TEST_F(C2ParamTest, C2ValueTest) {
+ C2Value val;
+ int32_t i32 = -32;
+ int64_t i64 = -64;
+ uint32_t u32 = 32;
+ uint64_t u64 = 64;
+ float fp = 1.5f;
+
+ EXPECT_EQ(C2Value::NO_INIT, val.type());
+ EXPECT_EQ(false, val.get(&i32));
+ EXPECT_EQ(-32, i32);
+ EXPECT_EQ(false, val.get(&i64));
+ EXPECT_EQ(-64, i64);
+ EXPECT_EQ(false, val.get(&u32));
+ EXPECT_EQ(32u, u32);
+ EXPECT_EQ(false, val.get(&u64));
+ EXPECT_EQ(64u, u64);
+ EXPECT_EQ(false, val.get(&fp));
+ EXPECT_EQ(1.5f, fp);
+
+ val = int32_t(-3216);
+ EXPECT_EQ(C2Value::INT32, val.type());
+ EXPECT_EQ(true, val.get(&i32));
+ EXPECT_EQ(-3216, i32);
+ EXPECT_EQ(false, val.get(&i64));
+ EXPECT_EQ(-64, i64);
+ EXPECT_EQ(false, val.get(&u32));
+ EXPECT_EQ(32u, u32);
+ EXPECT_EQ(false, val.get(&u64));
+ EXPECT_EQ(64u, u64);
+ EXPECT_EQ(false, val.get(&fp));
+ EXPECT_EQ(1.5f, fp);
+
+ val = uint32_t(3216);
+ EXPECT_EQ(C2Value::UINT32, val.type());
+ EXPECT_EQ(false, val.get(&i32));
+ EXPECT_EQ(-3216, i32);
+ EXPECT_EQ(false, val.get(&i64));
+ EXPECT_EQ(-64, i64);
+ EXPECT_EQ(true, val.get(&u32));
+ EXPECT_EQ(3216u, u32);
+ EXPECT_EQ(false, val.get(&u64));
+ EXPECT_EQ(64u, u64);
+ EXPECT_EQ(false, val.get(&fp));
+ EXPECT_EQ(1.5f, fp);
+
+ val = int64_t(-6432);
+ EXPECT_EQ(C2Value::INT64, val.type());
+ EXPECT_EQ(false, val.get(&i32));
+ EXPECT_EQ(-3216, i32);
+ EXPECT_EQ(true, val.get(&i64));
+ EXPECT_EQ(-6432, i64);
+ EXPECT_EQ(false, val.get(&u32));
+ EXPECT_EQ(3216u, u32);
+ EXPECT_EQ(false, val.get(&u64));
+ EXPECT_EQ(64u, u64);
+ EXPECT_EQ(false, val.get(&fp));
+ EXPECT_EQ(1.5f, fp);
+
+ val = uint64_t(6432);
+ EXPECT_EQ(C2Value::UINT64, val.type());
+ EXPECT_EQ(false, val.get(&i32));
+ EXPECT_EQ(-3216, i32);
+ EXPECT_EQ(false, val.get(&i64));
+ EXPECT_EQ(-6432, i64);
+ EXPECT_EQ(false, val.get(&u32));
+ EXPECT_EQ(3216u, u32);
+ EXPECT_EQ(true, val.get(&u64));
+ EXPECT_EQ(6432u, u64);
+ EXPECT_EQ(false, val.get(&fp));
+ EXPECT_EQ(1.5f, fp);
+
+ val = 15.25f;
+ EXPECT_EQ(C2Value::FLOAT, val.type());
+ EXPECT_EQ(false, val.get(&i32));
+ EXPECT_EQ(-3216, i32);
+ EXPECT_EQ(false, val.get(&i64));
+ EXPECT_EQ(-6432, i64);
+ EXPECT_EQ(false, val.get(&u32));
+ EXPECT_EQ(3216u, u32);
+ EXPECT_EQ(false, val.get(&u64));
+ EXPECT_EQ(6432u, u64);
+ EXPECT_EQ(true, val.get(&fp));
+ EXPECT_EQ(15.25f, fp);
+}
+
+} // namespace android
diff --git a/media/libstagefright/codec2/tests/C2_test.cpp b/media/libstagefright/codec2/tests/C2_test.cpp
new file mode 100644
index 0000000..92a3d91
--- /dev/null
+++ b/media/libstagefright/codec2/tests/C2_test.cpp
@@ -0,0 +1,78 @@
+/*
+ * Copyright 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "C2_test"
+
+#include <gtest/gtest.h>
+
+#include <C2.h>
+
+namespace android {
+
+/* ======================================= STATIC TESTS ======================================= */
+
+template<int N>
+struct c2_const_checker
+{
+ inline constexpr static int num() { return N; }
+};
+
+constexpr auto min_i32_i32 = c2_min(int32_t(1), int32_t(2));
+static_assert(std::is_same<decltype(min_i32_i32), const int32_t>::value, "should be int32_t");
+constexpr auto min_i32_i64 = c2_min(int32_t(3), int64_t(2));
+static_assert(std::is_same<decltype(min_i32_i64), const int64_t>::value, "should be int64_t");
+constexpr auto min_i8_i32 = c2_min(int8_t(0xff), int32_t(0xffffffff));
+static_assert(std::is_same<decltype(min_i8_i32), const int32_t>::value, "should be int32_t");
+
+static_assert(c2_const_checker<min_i32_i32>::num() == 1, "should be 1");
+static_assert(c2_const_checker<min_i32_i64>::num() == 2, "should be 2");
+static_assert(c2_const_checker<min_i8_i32>::num() == 0xffffffff, "should be 0xffffffff");
+
+constexpr auto min_u32_u32 = c2_min(uint32_t(1), uint32_t(2));
+static_assert(std::is_same<decltype(min_u32_u32), const uint32_t>::value, "should be uint32_t");
+constexpr auto min_u32_u64 = c2_min(uint32_t(3), uint64_t(2));
+static_assert(std::is_same<decltype(min_u32_u64), const uint32_t>::value, "should be uint32_t");
+constexpr auto min_u32_u8 = c2_min(uint32_t(0xffffffff), uint8_t(0xff));
+static_assert(std::is_same<decltype(min_u32_u8), const uint8_t>::value, "should be uint8_t");
+
+static_assert(c2_const_checker<min_u32_u32>::num() == 1, "should be 1");
+static_assert(c2_const_checker<min_u32_u64>::num() == 2, "should be 2");
+static_assert(c2_const_checker<min_u32_u8>::num() == 0xff, "should be 0xff");
+
+constexpr auto max_i32_i32 = c2_max(int32_t(1), int32_t(2));
+static_assert(std::is_same<decltype(max_i32_i32), const int32_t>::value, "should be int32_t");
+constexpr auto max_i32_i64 = c2_max(int32_t(3), int64_t(2));
+static_assert(std::is_same<decltype(max_i32_i64), const int64_t>::value, "should be int64_t");
+constexpr auto max_i8_i32 = c2_max(int8_t(0xff), int32_t(0xffffffff));
+static_assert(std::is_same<decltype(max_i8_i32), const int32_t>::value, "should be int32_t");
+
+static_assert(c2_const_checker<max_i32_i32>::num() == 2, "should be 2");
+static_assert(c2_const_checker<max_i32_i64>::num() == 3, "should be 3");
+static_assert(c2_const_checker<max_i8_i32>::num() == 0xffffffff, "should be 0xffffffff");
+
+constexpr auto max_u32_u32 = c2_max(uint32_t(1), uint32_t(2));
+static_assert(std::is_same<decltype(max_u32_u32), const uint32_t>::value, "should be uint32_t");
+constexpr auto max_u32_u64 = c2_max(uint32_t(3), uint64_t(2));
+static_assert(std::is_same<decltype(max_u32_u64), const uint64_t>::value, "should be uint64_t");
+constexpr auto max_u32_u8 = c2_max(uint32_t(0x7fffffff), uint8_t(0xff));
+static_assert(std::is_same<decltype(max_u32_u8), const uint32_t>::value, "should be uint32_t");
+
+static_assert(c2_const_checker<max_u32_u32>::num() == 2, "should be 2");
+static_assert(c2_const_checker<max_u32_u64>::num() == 3, "should be 3");
+static_assert(c2_const_checker<max_u32_u8>::num() == 0x7fffffff, "should be 0x7fffffff");
+
+} // namespace android
diff --git a/media/libstagefright/codec2/tests/vndk/C2UtilTest.cpp b/media/libstagefright/codec2/tests/vndk/C2UtilTest.cpp
new file mode 100644
index 0000000..7a1374b
--- /dev/null
+++ b/media/libstagefright/codec2/tests/vndk/C2UtilTest.cpp
@@ -0,0 +1,40 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <util/_C2MacroUtils.h>
+
+/** \file
+ * Tests for vndk/util.
+ */
+
+/* --------------------------------------- _C2MacroUtils --------------------------------------- */
+
+static_assert(0 == _C2_ARGC(), "should be 0");
+static_assert(1 == _C2_ARGC(1), "should be 1");
+static_assert(2 == _C2_ARGC(1, 2), "should be 2");
+static_assert(64 == _C2_ARGC(
+ 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25,
+ 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48,
+ 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64), "should be 64");
+
+static_assert(0 == _C2_ARGC(,), "should be 0");
+static_assert(1 == _C2_ARGC(1,), "should be 1");
+static_assert(2 == _C2_ARGC(1, 2,), "should be 2");
+static_assert(64 == _C2_ARGC(
+ 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25,
+ 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48,
+ 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64,), "should be 64");
+
diff --git a/media/libstagefright/codec2/vndk/include/util/C2ParamUtils.h b/media/libstagefright/codec2/vndk/include/util/C2ParamUtils.h
new file mode 100644
index 0000000..edae303
--- /dev/null
+++ b/media/libstagefright/codec2/vndk/include/util/C2ParamUtils.h
@@ -0,0 +1,302 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef C2UTILS_PARAM_UTILS_H_
+#define C2UTILS_PARAM_UTILS_H_
+
+#include <C2Param.h>
+#include <util/_C2MacroUtils.h>
+
+#include <iostream>
+
+/** \file
+ * Utilities for parameter handling to be used by Codec2 implementations.
+ */
+
+namespace android {
+
+/// \cond INTERNAL
+
+/* ---------------------------- UTILITIES FOR ENUMERATION REFLECTION ---------------------------- */
+
+/**
+ * Utility class that allows ignoring enum value assignment (e.g. both '(_C2EnumConst)kValue = x'
+ * and '(_C2EnumConst)kValue' will eval to kValue.
+ */
+template<typename T>
+class _C2EnumConst {
+public:
+ // implicit conversion from T
+ inline _C2EnumConst(T value) : _mValue(value) {}
+ // implicit conversion to T
+ inline operator T() { return _mValue; }
+ // implicit conversion to C2Value::Primitive
+ inline operator C2Value::Primitive() { return (T)_mValue; }
+ // ignore assignment and return T here to avoid implicit conversion to T later
+ inline T &operator =(T value __unused) { return _mValue; }
+private:
+ T _mValue;
+};
+
+/// mapper to get name of enum
+/// \note this will contain any initialization, which we will remove when converting to lower-case
+#define _C2_GET_ENUM_NAME(x, y) #x
+/// mapper to get value of enum
+#define _C2_GET_ENUM_VALUE(x, type) (_C2EnumConst<type>)x
+
+/// \endcond
+
+#define DEFINE_C2_ENUM_VALUE_AUTO_HELPER(name, type, prefix, ...) \
+template<> C2FieldDescriptor::named_values_type C2FieldDescriptor::namedValuesFor(const name &r __unused) { \
+ return C2ParamUtils::sanitizeEnumValues( \
+ std::vector<C2Value::Primitive> { _C2_MAP(_C2_GET_ENUM_VALUE, type, __VA_ARGS__) }, \
+ { _C2_MAP(_C2_GET_ENUM_NAME, type, __VA_ARGS__) }, \
+ prefix); \
+}
+
+#define DEFINE_C2_ENUM_VALUE_CUSTOM_HELPER(name, type, names, ...) \
+template<> C2FieldDescriptor::named_values_type C2FieldDescriptor::namedValuesFor(const name &r __unused) { \
+ return C2ParamUtils::customEnumValues( \
+ std::vector<std::pair<C2StringLiteral, name>> names); \
+}
+
+
+class C2ParamUtils {
+private:
+ static size_t countLeadingUnderscores(C2StringLiteral a) {
+ size_t i = 0;
+ while (a[i] == '_') {
+ ++i;
+ }
+ return i;
+ }
+
+ static size_t countMatching(C2StringLiteral a, const C2String &b) {
+ for (size_t i = 0; i < b.size(); ++i) {
+ if (!a[i] || a[i] != b[i]) {
+ return i;
+ }
+ }
+ return b.size();
+ }
+
+ // ABCDef => abc-def
+ // ABCD2ef => abcd2-ef // 0
+ // ABCD2Ef => ancd2-ef // -1
+ // AbcDef => abc-def // -1
+ // Abc2Def => abc-2def
+ // Abc2def => abc-2-def
+ // _Yo => _yo
+ // _yo => _yo
+ // C2_yo => c2-yo
+ // C2__yo => c2-yo
+
+ static C2String camelCaseToDashed(C2String name) {
+ enum {
+ kNone = '.',
+ kLower = 'a',
+ kUpper = 'A',
+ kDigit = '1',
+ kDash = '-',
+ kUnderscore = '_',
+ } type = kNone;
+ size_t word_start = 0;
+ for (size_t ix = 0; ix < name.size(); ++ix) {
+ /* std::cout << name.substr(0, word_start) << "|"
+ << name.substr(word_start, ix - word_start) << "["
+ << name.substr(ix, 1) << "]" << name.substr(ix + 1)
+ << ": " << (char)type << std::endl; */
+ if (isupper(name[ix])) {
+ if (type == kLower) {
+ name.insert(ix++, 1, '-');
+ word_start = ix;
+ }
+ name[ix] = tolower(name[ix]);
+ type = kUpper;
+ } else if (islower(name[ix])) {
+ if (type == kDigit && ix > 0) {
+ name.insert(ix++, 1, '-');
+ word_start = ix;
+ } else if (type == kUpper && ix > word_start + 1) {
+ name.insert(ix++ - 1, 1, '-');
+ word_start = ix - 1;
+ }
+ type = kLower;
+ } else if (isdigit(name[ix])) {
+ if (type == kLower) {
+ name.insert(ix++, 1, '-');
+ word_start = ix;
+ }
+ type = kDigit;
+ } else if (name[ix] == '_') {
+ if (type == kDash) {
+ name.erase(ix--, 1);
+ } else if (type != kNone && type != kUnderscore) {
+ name[ix] = '-';
+ type = kDash;
+ word_start = ix + 1;
+ } else {
+ type = kUnderscore;
+ word_start = ix + 1;
+ }
+ } else {
+ name.resize(ix);
+ }
+ }
+ // std::cout << "=> " << name << std::endl;
+ return name;
+ }
+
+ static std::vector<C2String> sanitizeEnumValueNames(
+ const std::vector<C2StringLiteral> names,
+ C2StringLiteral _prefix = NULL) {
+ std::vector<C2String> sanitizedNames;
+ C2String prefix;
+ size_t extraUnderscores = 0;
+ bool first = true;
+ if (_prefix) {
+ extraUnderscores = countLeadingUnderscores(_prefix);
+ prefix = _prefix + extraUnderscores;
+ first = false;
+ // std::cout << "prefix:" << prefix << ", underscores:" << extraUnderscores << std::endl;
+ }
+
+ // calculate prefix and minimum leading underscores
+ for (C2StringLiteral s : names) {
+ // std::cout << s << std::endl;
+ size_t underscores = countLeadingUnderscores(s);
+ if (first) {
+ extraUnderscores = underscores;
+ prefix = s + underscores;
+ first = false;
+ } else {
+ size_t matching = countMatching(
+ s + underscores,
+ prefix);
+ prefix.resize(matching);
+ extraUnderscores = std::min(underscores, extraUnderscores);
+ }
+ // std::cout << "prefix:" << prefix << ", underscores:" << extraUnderscores << std::endl;
+ if (prefix.size() == 0 && extraUnderscores == 0) {
+ break;
+ }
+ }
+
+ // we swallow the first underscore after upper case prefixes
+ bool upperCasePrefix = true;
+ for (size_t i = 0; i < prefix.size(); ++i) {
+ if (islower(prefix[i])) {
+ upperCasePrefix = false;
+ break;
+ }
+ }
+
+ for (C2StringLiteral s : names) {
+ size_t underscores = countLeadingUnderscores(s);
+ C2String sanitized = C2String(s, underscores - extraUnderscores);
+ sanitized.append(s + prefix.size() + underscores +
+ (upperCasePrefix && s[prefix.size() + underscores] == '_'));
+ sanitizedNames.push_back(camelCaseToDashed(sanitized));
+ }
+
+ for (C2String s : sanitizedNames) {
+ std::cout << s << std::endl;
+ }
+
+ return sanitizedNames;
+ }
+
+ friend class C2ParamTest_ParamUtilsTest_Test;
+
+public:
+ static std::vector<C2String> getEnumValuesFromString(C2StringLiteral value) {
+ std::vector<C2String> foundNames;
+ size_t pos = 0, len = strlen(value);
+ do {
+ size_t endPos = strcspn(value + pos, " ,=") + pos;
+ if (endPos > pos) {
+ foundNames.emplace_back(value + pos, endPos - pos);
+ }
+ if (value[endPos] && value[endPos] != ',') {
+ endPos += strcspn(value + endPos, ",");
+ }
+ pos = strspn(value + endPos, " ,") + endPos;
+ } while (pos < len);
+ return foundNames;
+ }
+
+ template<typename T>
+ static C2FieldDescriptor::named_values_type sanitizeEnumValues(
+ std::vector<T> values,
+ std::vector<C2StringLiteral> names,
+ C2StringLiteral prefix = NULL) {
+ C2FieldDescriptor::named_values_type namedValues;
+ std::vector<C2String> sanitizedNames = sanitizeEnumValueNames(names, prefix);
+ for (size_t i = 0; i < values.size() && i < sanitizedNames.size(); ++i) {
+ namedValues.emplace_back(sanitizedNames[i], values[i]);
+ }
+ return namedValues;
+ }
+
+ template<typename E>
+ static C2FieldDescriptor::named_values_type customEnumValues(
+ std::vector<std::pair<C2StringLiteral, E>> items) {
+ C2FieldDescriptor::named_values_type namedValues;
+ for (auto &item : items) {
+ namedValues.emplace_back(item.first, item.second);
+ }
+ return namedValues;
+ }
+};
+
+/* ---------------------------- UTILITIES FOR PARAMETER REFLECTION ---------------------------- */
+
+/* ======================== UTILITY TEMPLATES FOR PARAMETER REFLECTION ======================== */
+
+#if 1
+template<typename... Params>
+class C2_HIDE _C2Tuple { };
+
+C2_HIDE
+void addC2Params(std::list<const C2FieldDescriptor> &, _C2Tuple<> *) {
+}
+
+template<typename T, typename... Params>
+C2_HIDE
+void addC2Params(std::list<const C2FieldDescriptor> &fields, _C2Tuple<T, Params...> *)
+{
+ //C2Param::index_t index = T::baseIndex;
+ //(void)index;
+ fields.insert(fields.end(), T::fieldList);
+ addC2Params(fields, (_C2Tuple<Params...> *)nullptr);
+}
+
+template<typename... Params>
+C2_HIDE
+std::list<const C2FieldDescriptor> describeC2Params() {
+ std::list<const C2FieldDescriptor> fields;
+ addC2Params(fields, (_C2Tuple<Params...> *)nullptr);
+ return fields;
+}
+
+#endif
+
+/* ---------------------------- UTILITIES FOR ENUMERATION REFLECTION ---------------------------- */
+
+} // namespace android
+
+#endif // C2UTILS_PARAM_UTILS_H_
+
diff --git a/media/libstagefright/codec2/vndk/include/util/_C2MacroUtils.h b/media/libstagefright/codec2/vndk/include/util/_C2MacroUtils.h
new file mode 100644
index 0000000..04e9ba5
--- /dev/null
+++ b/media/libstagefright/codec2/vndk/include/util/_C2MacroUtils.h
@@ -0,0 +1,162 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef C2UTILS_MACRO_UTILS_H_
+#define C2UTILS_MACRO_UTILS_H_
+
+/** \file
+ * Macro utilities for the utils library used by Codec2 implementations.
+ */
+
+/// \if 0
+
+/* --------------------------------- VARIABLE ARGUMENT COUNTING --------------------------------- */
+
+// remove empty arguments - _C2_ARG() expands to '', while _C2_ARG(x) expands to ', x'
+// _C2_ARGn(...) does the same for n arguments
+#define _C2_ARG(...) , ##__VA_ARGS__
+#define _C2_ARG2(_1, _2) _C2_ARG(_1) _C2_ARG(_2)
+#define _C2_ARG4(_1, _2, _3, _4) _C2_ARG2(_1, _2) _C2_ARG2(_3, _4)
+#define _C2_ARG8(_1, _2, _3, _4, _5, _6, _7, _8) _C2_ARG4(_1, _2, _3, _4) _C2_ARG4(_5, _6, _7, _8)
+#define _C2_ARG16(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16) \
+ _C2_ARG8(_1, _2, _3, _4, _5, _6, _7, _8) _C2_ARG8(_9, _10, _11, _12, _13, _14, _15, _16)
+
+// return the 65th argument
+#define _C2_ARGC_3(_, _0, _1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16, \
+ _17, _18, _19, _20, _21, _22, _23, _24, _25, _26, _27, _28, _29, _30, _31, _32, \
+ _33, _34, _35, _36, _37, _38, _39, _40, _41, _42, _43, _44, _45, _46, _47, _48, \
+ _49, _50, _51, _52, _53, _54, _55, _56, _57, _58, _59, _60, _61, _62, _63, _64, ...) _64
+
+/// \endif
+
+/**
+ * Returns the number of arguments.
+ */
+// We do this by prepending 1 and appending 65 designed values such that the 65th element
+// will be the number of arguments.
+#define _C2_ARGC(...) _C2_ARGC_1(0, ##__VA_ARGS__, \
+ 64, 63, 62, 61, 60, 59, 58, 57, 56, 55, 54, 53, 52, 51, 50, 49, 48, 47, 46, 45, 44, 43, \
+ 42, 41, 40, 39, 38, 37, 36, 35, 34, 33, 32, 31, 30, 29, 28, 27, 26, 25, 24, 23, 22, 21, \
+ 20, 19, 18, 17, 16, 15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2, 1, 0)
+
+/// \if 0
+
+// step 1. remove empty arguments - this is needed to allow trailing comma in enum definitions
+// (NOTE: we don't know which argument will have this trailing comma so we have to try all)
+#define _C2_ARGC_1(_, _0, _1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16, \
+ _17, _18, _19, _20, _21, _22, _23, _24, _25, _26, _27, _28, _29, _30, _31, _32, \
+ _33, _34, _35, _36, _37, _38, _39, _40, _41, _42, _43, _44, _45, _46, _47, _48, \
+ _49, _50, _51, _52, _53, _54, _55, _56, _57, _58, _59, _60, _61, _62, _63, _64, ...) \
+ _C2_ARGC_2(_ _C2_ARG(_0) \
+ _C2_ARG16(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16) \
+ _C2_ARG16(_17, _18, _19, _20, _21, _22, _23, _24, _25, _26, _27, _28, _29, _30, _31, _32) \
+ _C2_ARG16(_33, _34, _35, _36, _37, _38, _39, _40, _41, _42, _43, _44, _45, _46, _47, _48) \
+ _C2_ARG16(_49, _50, _51, _52, _53, _54, _55, _56, _57, _58, _59, _60, _61, _62, _63, _64), \
+ ##__VA_ARGS__)
+
+// step 2. this is needed as removed arguments cannot be passed directly as empty into a macro
+#define _C2_ARGC_2(...) _C2_ARGC_3(__VA_ARGS__)
+
+/// \endif
+
+/* -------------------------------- VARIABLE ARGUMENT CONVERSION -------------------------------- */
+
+/// \if 0
+
+// macros that convert _1, _2, _3, ... to fn(_1, arg), fn(_2, arg), fn(_3, arg), ...
+#define _C2_MAP_64(fn, arg, head, ...) fn(head, arg), _C2_MAP_63(fn, arg, ##__VA_ARGS__)
+#define _C2_MAP_63(fn, arg, head, ...) fn(head, arg), _C2_MAP_62(fn, arg, ##__VA_ARGS__)
+#define _C2_MAP_62(fn, arg, head, ...) fn(head, arg), _C2_MAP_61(fn, arg, ##__VA_ARGS__)
+#define _C2_MAP_61(fn, arg, head, ...) fn(head, arg), _C2_MAP_60(fn, arg, ##__VA_ARGS__)
+#define _C2_MAP_60(fn, arg, head, ...) fn(head, arg), _C2_MAP_59(fn, arg, ##__VA_ARGS__)
+#define _C2_MAP_59(fn, arg, head, ...) fn(head, arg), _C2_MAP_58(fn, arg, ##__VA_ARGS__)
+#define _C2_MAP_58(fn, arg, head, ...) fn(head, arg), _C2_MAP_57(fn, arg, ##__VA_ARGS__)
+#define _C2_MAP_57(fn, arg, head, ...) fn(head, arg), _C2_MAP_56(fn, arg, ##__VA_ARGS__)
+#define _C2_MAP_56(fn, arg, head, ...) fn(head, arg), _C2_MAP_55(fn, arg, ##__VA_ARGS__)
+#define _C2_MAP_55(fn, arg, head, ...) fn(head, arg), _C2_MAP_54(fn, arg, ##__VA_ARGS__)
+#define _C2_MAP_54(fn, arg, head, ...) fn(head, arg), _C2_MAP_53(fn, arg, ##__VA_ARGS__)
+#define _C2_MAP_53(fn, arg, head, ...) fn(head, arg), _C2_MAP_52(fn, arg, ##__VA_ARGS__)
+#define _C2_MAP_52(fn, arg, head, ...) fn(head, arg), _C2_MAP_51(fn, arg, ##__VA_ARGS__)
+#define _C2_MAP_51(fn, arg, head, ...) fn(head, arg), _C2_MAP_50(fn, arg, ##__VA_ARGS__)
+#define _C2_MAP_50(fn, arg, head, ...) fn(head, arg), _C2_MAP_49(fn, arg, ##__VA_ARGS__)
+#define _C2_MAP_49(fn, arg, head, ...) fn(head, arg), _C2_MAP_48(fn, arg, ##__VA_ARGS__)
+#define _C2_MAP_48(fn, arg, head, ...) fn(head, arg), _C2_MAP_47(fn, arg, ##__VA_ARGS__)
+#define _C2_MAP_47(fn, arg, head, ...) fn(head, arg), _C2_MAP_46(fn, arg, ##__VA_ARGS__)
+#define _C2_MAP_46(fn, arg, head, ...) fn(head, arg), _C2_MAP_45(fn, arg, ##__VA_ARGS__)
+#define _C2_MAP_45(fn, arg, head, ...) fn(head, arg), _C2_MAP_44(fn, arg, ##__VA_ARGS__)
+#define _C2_MAP_44(fn, arg, head, ...) fn(head, arg), _C2_MAP_43(fn, arg, ##__VA_ARGS__)
+#define _C2_MAP_43(fn, arg, head, ...) fn(head, arg), _C2_MAP_42(fn, arg, ##__VA_ARGS__)
+#define _C2_MAP_42(fn, arg, head, ...) fn(head, arg), _C2_MAP_41(fn, arg, ##__VA_ARGS__)
+#define _C2_MAP_41(fn, arg, head, ...) fn(head, arg), _C2_MAP_40(fn, arg, ##__VA_ARGS__)
+#define _C2_MAP_40(fn, arg, head, ...) fn(head, arg), _C2_MAP_39(fn, arg, ##__VA_ARGS__)
+#define _C2_MAP_39(fn, arg, head, ...) fn(head, arg), _C2_MAP_38(fn, arg, ##__VA_ARGS__)
+#define _C2_MAP_38(fn, arg, head, ...) fn(head, arg), _C2_MAP_37(fn, arg, ##__VA_ARGS__)
+#define _C2_MAP_37(fn, arg, head, ...) fn(head, arg), _C2_MAP_36(fn, arg, ##__VA_ARGS__)
+#define _C2_MAP_36(fn, arg, head, ...) fn(head, arg), _C2_MAP_35(fn, arg, ##__VA_ARGS__)
+#define _C2_MAP_35(fn, arg, head, ...) fn(head, arg), _C2_MAP_34(fn, arg, ##__VA_ARGS__)
+#define _C2_MAP_34(fn, arg, head, ...) fn(head, arg), _C2_MAP_33(fn, arg, ##__VA_ARGS__)
+#define _C2_MAP_33(fn, arg, head, ...) fn(head, arg), _C2_MAP_32(fn, arg, ##__VA_ARGS__)
+#define _C2_MAP_32(fn, arg, head, ...) fn(head, arg), _C2_MAP_31(fn, arg, ##__VA_ARGS__)
+#define _C2_MAP_31(fn, arg, head, ...) fn(head, arg), _C2_MAP_30(fn, arg, ##__VA_ARGS__)
+#define _C2_MAP_30(fn, arg, head, ...) fn(head, arg), _C2_MAP_29(fn, arg, ##__VA_ARGS__)
+#define _C2_MAP_29(fn, arg, head, ...) fn(head, arg), _C2_MAP_28(fn, arg, ##__VA_ARGS__)
+#define _C2_MAP_28(fn, arg, head, ...) fn(head, arg), _C2_MAP_27(fn, arg, ##__VA_ARGS__)
+#define _C2_MAP_27(fn, arg, head, ...) fn(head, arg), _C2_MAP_26(fn, arg, ##__VA_ARGS__)
+#define _C2_MAP_26(fn, arg, head, ...) fn(head, arg), _C2_MAP_25(fn, arg, ##__VA_ARGS__)
+#define _C2_MAP_25(fn, arg, head, ...) fn(head, arg), _C2_MAP_24(fn, arg, ##__VA_ARGS__)
+#define _C2_MAP_24(fn, arg, head, ...) fn(head, arg), _C2_MAP_23(fn, arg, ##__VA_ARGS__)
+#define _C2_MAP_23(fn, arg, head, ...) fn(head, arg), _C2_MAP_22(fn, arg, ##__VA_ARGS__)
+#define _C2_MAP_22(fn, arg, head, ...) fn(head, arg), _C2_MAP_21(fn, arg, ##__VA_ARGS__)
+#define _C2_MAP_21(fn, arg, head, ...) fn(head, arg), _C2_MAP_20(fn, arg, ##__VA_ARGS__)
+#define _C2_MAP_20(fn, arg, head, ...) fn(head, arg), _C2_MAP_19(fn, arg, ##__VA_ARGS__)
+#define _C2_MAP_19(fn, arg, head, ...) fn(head, arg), _C2_MAP_18(fn, arg, ##__VA_ARGS__)
+#define _C2_MAP_18(fn, arg, head, ...) fn(head, arg), _C2_MAP_17(fn, arg, ##__VA_ARGS__)
+#define _C2_MAP_17(fn, arg, head, ...) fn(head, arg), _C2_MAP_16(fn, arg, ##__VA_ARGS__)
+#define _C2_MAP_16(fn, arg, head, ...) fn(head, arg), _C2_MAP_15(fn, arg, ##__VA_ARGS__)
+#define _C2_MAP_15(fn, arg, head, ...) fn(head, arg), _C2_MAP_14(fn, arg, ##__VA_ARGS__)
+#define _C2_MAP_14(fn, arg, head, ...) fn(head, arg), _C2_MAP_13(fn, arg, ##__VA_ARGS__)
+#define _C2_MAP_13(fn, arg, head, ...) fn(head, arg), _C2_MAP_12(fn, arg, ##__VA_ARGS__)
+#define _C2_MAP_12(fn, arg, head, ...) fn(head, arg), _C2_MAP_11(fn, arg, ##__VA_ARGS__)
+#define _C2_MAP_11(fn, arg, head, ...) fn(head, arg), _C2_MAP_10(fn, arg, ##__VA_ARGS__)
+#define _C2_MAP_10(fn, arg, head, ...) fn(head, arg), _C2_MAP_9(fn, arg, ##__VA_ARGS__)
+#define _C2_MAP_9(fn, arg, head, ...) fn(head, arg), _C2_MAP_8(fn, arg, ##__VA_ARGS__)
+#define _C2_MAP_8(fn, arg, head, ...) fn(head, arg), _C2_MAP_7(fn, arg, ##__VA_ARGS__)
+#define _C2_MAP_7(fn, arg, head, ...) fn(head, arg), _C2_MAP_6(fn, arg, ##__VA_ARGS__)
+#define _C2_MAP_6(fn, arg, head, ...) fn(head, arg), _C2_MAP_5(fn, arg, ##__VA_ARGS__)
+#define _C2_MAP_5(fn, arg, head, ...) fn(head, arg), _C2_MAP_4(fn, arg, ##__VA_ARGS__)
+#define _C2_MAP_4(fn, arg, head, ...) fn(head, arg), _C2_MAP_3(fn, arg, ##__VA_ARGS__)
+#define _C2_MAP_3(fn, arg, head, ...) fn(head, arg), _C2_MAP_2(fn, arg, ##__VA_ARGS__)
+#define _C2_MAP_2(fn, arg, head, ...) fn(head, arg), _C2_MAP_1(fn, arg, ##__VA_ARGS__)
+#define _C2_MAP_1(fn, arg, head, ...) fn(head, arg)
+
+/// \endif
+
+/**
+ * Maps each argument using another macro x -> fn(x, arg)
+ */
+// use wrapper to call the proper mapper based on the number of arguments
+#define _C2_MAP(fn, arg, ...) _C2_MAP__(_C2_ARGC(__VA_ARGS__), fn, arg, ##__VA_ARGS__)
+
+/// \if 0
+
+// evaluate _n so it becomes a number
+#define _C2_MAP__(_n, fn, arg, ...) _C2_MAP_(_n, fn, arg, __VA_ARGS__)
+// call the proper mapper
+#define _C2_MAP_(_n, fn, arg, ...) _C2_MAP_##_n (fn, arg, __VA_ARGS__)
+
+/// \endif
+
+#endif // C2UTILS_MACRO_UTILS_H_
diff --git a/media/libstagefright/codecs/aacdec/Android.mk b/media/libstagefright/codecs/aacdec/Android.mk
index 84ea708..6490f8f 100644
--- a/media/libstagefright/codecs/aacdec/Android.mk
+++ b/media/libstagefright/codecs/aacdec/Android.mk
@@ -19,7 +19,6 @@
LOCAL_CFLAGS :=
LOCAL_CFLAGS += -Werror
-LOCAL_CLANG := true
LOCAL_SANITIZE := signed-integer-overflow unsigned-integer-overflow
LOCAL_STATIC_LIBRARIES := libFraunhoferAAC
diff --git a/media/libstagefright/codecs/aacdec/SoftAAC2.cpp b/media/libstagefright/codecs/aacdec/SoftAAC2.cpp
index 1c5e3c6..9fbdb72 100644
--- a/media/libstagefright/codecs/aacdec/SoftAAC2.cpp
+++ b/media/libstagefright/codecs/aacdec/SoftAAC2.cpp
@@ -62,6 +62,7 @@
OMX_AUDIO_AACObjectHE_PS,
OMX_AUDIO_AACObjectLD,
OMX_AUDIO_AACObjectELD,
+ OMX_AUDIO_AACObjectER_Scalable,
};
SoftAAC2::SoftAAC2(
diff --git a/media/libstagefright/codecs/aacenc/Android.mk b/media/libstagefright/codecs/aacenc/Android.mk
index 71c374b..2f34e83 100644
--- a/media/libstagefright/codecs/aacenc/Android.mk
+++ b/media/libstagefright/codecs/aacenc/Android.mk
@@ -78,7 +78,6 @@
$(LOCAL_PATH)/basic_op
LOCAL_CFLAGS += -Werror
-LOCAL_CLANG := true
LOCAL_SANITIZE := signed-integer-overflow unsigned-integer-overflow
include $(BUILD_STATIC_LIBRARY)
@@ -106,7 +105,6 @@
LOCAL_CFLAGS :=
LOCAL_CFLAGS += -Werror
- LOCAL_CLANG := true
LOCAL_SANITIZE := signed-integer-overflow unsigned-integer-overflow
LOCAL_STATIC_LIBRARIES := libFraunhoferAAC
@@ -132,7 +130,6 @@
LOCAL_CFLAGS := -DOSCL_IMPORT_REF=
LOCAL_CFLAGS += -Werror
- LOCAL_CLANG := true
LOCAL_SANITIZE := signed-integer-overflow unsigned-integer-overflow
LOCAL_STATIC_LIBRARIES := \
diff --git a/media/libstagefright/codecs/amrnb/common/Android.mk b/media/libstagefright/codecs/amrnb/common/Android.mk
index 15220a4..0bb5724 100644
--- a/media/libstagefright/codecs/amrnb/common/Android.mk
+++ b/media/libstagefright/codecs/amrnb/common/Android.mk
@@ -68,7 +68,6 @@
-D"OSCL_UNUSED_ARG(x)=(void)(x)" -DOSCL_IMPORT_REF= -DOSCL_EXPORT_REF=
LOCAL_CFLAGS += -Werror
-LOCAL_CLANG := true
#addressing b/25409744
#LOCAL_SANITIZE := signed-integer-overflow unsigned-integer-overflow
diff --git a/media/libstagefright/codecs/amrnb/dec/Android.mk b/media/libstagefright/codecs/amrnb/dec/Android.mk
index 7967ec3..3959b80 100644
--- a/media/libstagefright/codecs/amrnb/dec/Android.mk
+++ b/media/libstagefright/codecs/amrnb/dec/Android.mk
@@ -48,7 +48,6 @@
-D"OSCL_UNUSED_ARG(x)=(void)(x)" -DOSCL_IMPORT_REF=
LOCAL_CFLAGS += -Werror
-LOCAL_CLANG := true
#LOCAL_SANITIZE := signed-integer-overflow
LOCAL_MODULE := libstagefright_amrnbdec
@@ -73,7 +72,6 @@
LOCAL_CFLAGS := -DOSCL_IMPORT_REF=
LOCAL_CFLAGS += -Werror
-LOCAL_CLANG := true
#LOCAL_SANITIZE := signed-integer-overflow
LOCAL_STATIC_LIBRARIES := \
@@ -104,7 +102,6 @@
LOCAL_SHARED_LIBRARIES := \
libstagefright_amrnb_common libaudioutils liblog
-LOCAL_CLANG := true
#LOCAL_SANITIZE := signed-integer-overflow
LOCAL_MODULE := libstagefright_amrnbdec_test
diff --git a/media/libstagefright/codecs/amrnb/enc/Android.mk b/media/libstagefright/codecs/amrnb/enc/Android.mk
index f8a41af..af1efb9 100644
--- a/media/libstagefright/codecs/amrnb/enc/Android.mk
+++ b/media/libstagefright/codecs/amrnb/enc/Android.mk
@@ -70,7 +70,6 @@
-D"OSCL_UNUSED_ARG(x)=(void)(x)"
LOCAL_CFLAGS += -Werror
-LOCAL_CLANG := true
#addressing b/25409744
#LOCAL_SANITIZE := signed-integer-overflow
@@ -94,7 +93,6 @@
$(LOCAL_PATH)/../common
LOCAL_CFLAGS += -Werror
-LOCAL_CLANG := true
#addressing b/25409744
#LOCAL_SANITIZE := signed-integer-overflow
@@ -127,7 +125,6 @@
LOCAL_SHARED_LIBRARIES := \
libstagefright_amrnb_common
-LOCAL_CLANG := true
#addressing b/25409744
#LOCAL_SANITIZE := signed-integer-overflow
diff --git a/media/libstagefright/codecs/amrwb/Android.mk b/media/libstagefright/codecs/amrwb/Android.mk
index 1649c4a..73a1751 100644
--- a/media/libstagefright/codecs/amrwb/Android.mk
+++ b/media/libstagefright/codecs/amrwb/Android.mk
@@ -51,7 +51,6 @@
-D"OSCL_UNUSED_ARG(x)=(void)(x)" -DOSCL_IMPORT_REF=
LOCAL_CFLAGS += -Werror
-LOCAL_CLANG := true
LOCAL_SANITIZE := signed-integer-overflow
LOCAL_MODULE := libstagefright_amrwbdec
@@ -74,7 +73,6 @@
LOCAL_SHARED_LIBRARIES := \
libaudioutils
-LOCAL_CLANG := true
LOCAL_SANITIZE := signed-integer-overflow
LOCAL_MODULE := libstagefright_amrwbdec_test
diff --git a/media/libstagefright/codecs/amrwbenc/Android.mk b/media/libstagefright/codecs/amrwbenc/Android.mk
index 026006e..3395fc1 100644
--- a/media/libstagefright/codecs/amrwbenc/Android.mk
+++ b/media/libstagefright/codecs/amrwbenc/Android.mk
@@ -102,7 +102,6 @@
$(LOCAL_PATH)/inc
LOCAL_CFLAGS += -Werror
-LOCAL_CLANG := true
#LOCAL_SANITIZE := signed-integer-overflow
include $(BUILD_STATIC_LIBRARY)
@@ -120,7 +119,6 @@
frameworks/native/include/media/openmax
LOCAL_CFLAGS += -Werror
-LOCAL_CLANG := true
LOCAL_SANITIZE := signed-integer-overflow
LOCAL_STATIC_LIBRARIES := \
diff --git a/media/libstagefright/codecs/amrwbenc/SampleCode/AMRWB_E_SAMPLE.c b/media/libstagefright/codecs/amrwbenc/SampleCode/AMRWB_E_SAMPLE.c
index 0cb0097..7c094f3 100644
--- a/media/libstagefright/codecs/amrwbenc/SampleCode/AMRWB_E_SAMPLE.c
+++ b/media/libstagefright/codecs/amrwbenc/SampleCode/AMRWB_E_SAMPLE.c
@@ -90,7 +90,7 @@
VO_AUDIO_CODECAPI AudioAPI;
VO_MEM_OPERATOR moper;
VO_CODEC_INIT_USERDATA useData;
- VO_HANDLE hCodec;
+ VO_HANDLE hCodec = NULL;
VO_CODECBUFFER inData;
VO_CODECBUFFER outData;
VO_AUDIO_OUTPUTINFO outFormat;
@@ -211,7 +211,7 @@
if(returnCode == 0)
{
framenum++;
- printf(" Frames processed: %hd\r", framenum);
+ printf(" Frames processed: %d\r", framenum);
if(framenum == 1)
{
fwrite(OutputBuf, 1, outData.Length + size1, fdst);
diff --git a/media/libstagefright/codecs/avc/common/Android.mk b/media/libstagefright/codecs/avc/common/Android.mk
index ed0f8ca..ecf0884 100644
--- a/media/libstagefright/codecs/avc/common/Android.mk
+++ b/media/libstagefright/codecs/avc/common/Android.mk
@@ -17,7 +17,6 @@
$(LOCAL_PATH)/include
LOCAL_CFLAGS += -Werror
-LOCAL_CLANG := true
LOCAL_SANITIZE := signed-integer-overflow
include $(BUILD_SHARED_LIBRARY)
diff --git a/media/libstagefright/codecs/avc/enc/Android.mk b/media/libstagefright/codecs/avc/enc/Android.mk
index 511c9f2..cfca608 100644
--- a/media/libstagefright/codecs/avc/enc/Android.mk
+++ b/media/libstagefright/codecs/avc/enc/Android.mk
@@ -31,7 +31,6 @@
-DOSCL_IMPORT_REF= -D"OSCL_UNUSED_ARG(x)=(void)(x)" -DOSCL_EXPORT_REF=
LOCAL_CFLAGS += -Werror
-LOCAL_CLANG := true
LOCAL_SANITIZE := signed-integer-overflow
include $(BUILD_STATIC_LIBRARY)
@@ -60,7 +59,7 @@
libstagefright_avcenc
LOCAL_SHARED_LIBRARIES := \
- libstagefright \
+ libmedia \
libstagefright_avc_common \
libstagefright_foundation \
libstagefright_omx \
@@ -72,7 +71,6 @@
LOCAL_MODULE_TAGS := optional
LOCAL_CFLAGS += -Werror
-LOCAL_CLANG := true
LOCAL_SANITIZE := signed-integer-overflow
include $(BUILD_SHARED_LIBRARY)
@@ -92,7 +90,6 @@
LOCAL_CFLAGS := \
-DOSCL_IMPORT_REF= -DOSCL_UNUSED_ARG= -DOSCL_EXPORT_REF=
-LOCAL_CLANG := true
LOCAL_SANITIZE := signed-integer-overflow
LOCAL_STATIC_LIBRARIES := \
diff --git a/media/libstagefright/codecs/avcdec/Android.mk b/media/libstagefright/codecs/avcdec/Android.mk
index ef0dbfd..9da8a6f 100644
--- a/media/libstagefright/codecs/avcdec/Android.mk
+++ b/media/libstagefright/codecs/avcdec/Android.mk
@@ -14,13 +14,12 @@
LOCAL_C_INCLUDES += $(TOP)/frameworks/av/media/libstagefright/include
LOCAL_C_INCLUDES += $(TOP)/frameworks/native/include/media/openmax
-LOCAL_SHARED_LIBRARIES := libstagefright
+LOCAL_SHARED_LIBRARIES := libmedia
LOCAL_SHARED_LIBRARIES += libstagefright_omx
LOCAL_SHARED_LIBRARIES += libstagefright_foundation
LOCAL_SHARED_LIBRARIES += libutils
LOCAL_SHARED_LIBRARIES += liblog
-LOCAL_CLANG := true
LOCAL_SANITIZE := signed-integer-overflow
LOCAL_LDFLAGS := -Wl,-Bsymbolic
diff --git a/media/libstagefright/codecs/avcdec/SoftAVCDec.cpp b/media/libstagefright/codecs/avcdec/SoftAVCDec.cpp
index 8694c73..e2bba25 100644
--- a/media/libstagefright/codecs/avcdec/SoftAVCDec.cpp
+++ b/media/libstagefright/codecs/avcdec/SoftAVCDec.cpp
@@ -49,58 +49,10 @@
(IVD_CONTROL_API_COMMAND_TYPE_T)IH264D_CMD_CTL_SET_NUM_CORES
static const CodecProfileLevel kProfileLevels[] = {
- { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel1 },
- { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel1b },
- { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel11 },
- { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel12 },
- { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel13 },
- { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel2 },
- { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel21 },
- { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel22 },
- { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel3 },
- { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel31 },
- { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel32 },
- { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel4 },
- { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel41 },
- { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel42 },
- { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel5 },
- { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel51 },
{ OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel52 },
- { OMX_VIDEO_AVCProfileMain, OMX_VIDEO_AVCLevel1 },
- { OMX_VIDEO_AVCProfileMain, OMX_VIDEO_AVCLevel1b },
- { OMX_VIDEO_AVCProfileMain, OMX_VIDEO_AVCLevel11 },
- { OMX_VIDEO_AVCProfileMain, OMX_VIDEO_AVCLevel12 },
- { OMX_VIDEO_AVCProfileMain, OMX_VIDEO_AVCLevel13 },
- { OMX_VIDEO_AVCProfileMain, OMX_VIDEO_AVCLevel2 },
- { OMX_VIDEO_AVCProfileMain, OMX_VIDEO_AVCLevel21 },
- { OMX_VIDEO_AVCProfileMain, OMX_VIDEO_AVCLevel22 },
- { OMX_VIDEO_AVCProfileMain, OMX_VIDEO_AVCLevel3 },
- { OMX_VIDEO_AVCProfileMain, OMX_VIDEO_AVCLevel31 },
- { OMX_VIDEO_AVCProfileMain, OMX_VIDEO_AVCLevel32 },
- { OMX_VIDEO_AVCProfileMain, OMX_VIDEO_AVCLevel4 },
- { OMX_VIDEO_AVCProfileMain, OMX_VIDEO_AVCLevel41 },
- { OMX_VIDEO_AVCProfileMain, OMX_VIDEO_AVCLevel42 },
- { OMX_VIDEO_AVCProfileMain, OMX_VIDEO_AVCLevel5 },
- { OMX_VIDEO_AVCProfileMain, OMX_VIDEO_AVCLevel51 },
{ OMX_VIDEO_AVCProfileMain, OMX_VIDEO_AVCLevel52 },
- { OMX_VIDEO_AVCProfileHigh, OMX_VIDEO_AVCLevel1 },
- { OMX_VIDEO_AVCProfileHigh, OMX_VIDEO_AVCLevel1b },
- { OMX_VIDEO_AVCProfileHigh, OMX_VIDEO_AVCLevel11 },
- { OMX_VIDEO_AVCProfileHigh, OMX_VIDEO_AVCLevel12 },
- { OMX_VIDEO_AVCProfileHigh, OMX_VIDEO_AVCLevel13 },
- { OMX_VIDEO_AVCProfileHigh, OMX_VIDEO_AVCLevel2 },
- { OMX_VIDEO_AVCProfileHigh, OMX_VIDEO_AVCLevel21 },
- { OMX_VIDEO_AVCProfileHigh, OMX_VIDEO_AVCLevel22 },
- { OMX_VIDEO_AVCProfileHigh, OMX_VIDEO_AVCLevel3 },
- { OMX_VIDEO_AVCProfileHigh, OMX_VIDEO_AVCLevel31 },
- { OMX_VIDEO_AVCProfileHigh, OMX_VIDEO_AVCLevel32 },
- { OMX_VIDEO_AVCProfileHigh, OMX_VIDEO_AVCLevel4 },
- { OMX_VIDEO_AVCProfileHigh, OMX_VIDEO_AVCLevel41 },
- { OMX_VIDEO_AVCProfileHigh, OMX_VIDEO_AVCLevel42 },
- { OMX_VIDEO_AVCProfileHigh, OMX_VIDEO_AVCLevel5 },
- { OMX_VIDEO_AVCProfileHigh, OMX_VIDEO_AVCLevel51 },
{ OMX_VIDEO_AVCProfileHigh, OMX_VIDEO_AVCLevel52 },
};
diff --git a/media/libstagefright/codecs/avcenc/Android.mk b/media/libstagefright/codecs/avcenc/Android.mk
index 523036a..1b1a1a0 100644
--- a/media/libstagefright/codecs/avcenc/Android.mk
+++ b/media/libstagefright/codecs/avcenc/Android.mk
@@ -12,17 +12,14 @@
LOCAL_C_INCLUDES := $(TOP)/external/libavc/encoder
LOCAL_C_INCLUDES += $(TOP)/external/libavc/common
LOCAL_C_INCLUDES += $(TOP)/frameworks/av/media/libstagefright/include
-LOCAL_C_INCLUDES += $(TOP)/frameworks/native/include/media/openmax
-LOCAL_C_INCLUDES += $(TOP)/frameworks/av/media/libstagefright/include
LOCAL_C_INCLUDES += $(TOP)/frameworks/native/include/media/hardware
LOCAL_C_INCLUDES += $(TOP)/frameworks/native/include/media/openmax
-LOCAL_SHARED_LIBRARIES := libstagefright
+LOCAL_SHARED_LIBRARIES := libmedia
LOCAL_SHARED_LIBRARIES += libstagefright_omx
LOCAL_SHARED_LIBRARIES += libutils
LOCAL_SHARED_LIBRARIES += liblog
-LOCAL_CLANG := true
LOCAL_SANITIZE := signed-integer-overflow
LOCAL_LDFLAGS := -Wl,-Bsymbolic
diff --git a/media/libstagefright/codecs/flac/enc/Android.mk b/media/libstagefright/codecs/flac/enc/Android.mk
index 7e6e015..a3c37fb 100644
--- a/media/libstagefright/codecs/flac/enc/Android.mk
+++ b/media/libstagefright/codecs/flac/enc/Android.mk
@@ -10,11 +10,10 @@
external/flac/include
LOCAL_CFLAGS += -Werror
-LOCAL_CLANG := true
LOCAL_SANITIZE := signed-integer-overflow unsigned-integer-overflow
LOCAL_SHARED_LIBRARIES := \
- libstagefright libstagefright_omx libstagefright_foundation libutils liblog
+ libmedia libstagefright_omx libstagefright_foundation libutils liblog
LOCAL_STATIC_LIBRARIES := \
libFLAC \
diff --git a/media/libstagefright/codecs/g711/dec/Android.mk b/media/libstagefright/codecs/g711/dec/Android.mk
index 8e5a2ff..11978a1 100644
--- a/media/libstagefright/codecs/g711/dec/Android.mk
+++ b/media/libstagefright/codecs/g711/dec/Android.mk
@@ -9,13 +9,12 @@
frameworks/native/include/media/openmax
LOCAL_SHARED_LIBRARIES := \
- libstagefright libstagefright_omx libutils liblog
+ libmedia libstagefright_omx libutils liblog
LOCAL_MODULE := libstagefright_soft_g711dec
LOCAL_MODULE_TAGS := optional
LOCAL_CFLAGS += -Werror
-LOCAL_CLANG := true
LOCAL_SANITIZE := signed-integer-overflow unsigned-integer-overflow
include $(BUILD_SHARED_LIBRARY)
diff --git a/media/libstagefright/codecs/gsm/dec/Android.mk b/media/libstagefright/codecs/gsm/dec/Android.mk
index a225c31..eed1348 100644
--- a/media/libstagefright/codecs/gsm/dec/Android.mk
+++ b/media/libstagefright/codecs/gsm/dec/Android.mk
@@ -10,11 +10,10 @@
external/libgsm/inc
LOCAL_CFLAGS += -Werror
-LOCAL_CLANG := true
LOCAL_SANITIZE := signed-integer-overflow unsigned-integer-overflow
LOCAL_SHARED_LIBRARIES := \
- libstagefright libstagefright_omx libutils liblog
+ libmedia libstagefright_omx libutils liblog
LOCAL_STATIC_LIBRARIES := \
libgsm
diff --git a/media/libstagefright/codecs/hevcdec/Android.mk b/media/libstagefright/codecs/hevcdec/Android.mk
index 78c4637..83e377c 100644
--- a/media/libstagefright/codecs/hevcdec/Android.mk
+++ b/media/libstagefright/codecs/hevcdec/Android.mk
@@ -13,10 +13,9 @@
LOCAL_C_INCLUDES += $(TOP)/external/libhevc/common
LOCAL_C_INCLUDES += $(TOP)/frameworks/av/media/libstagefright/include
LOCAL_C_INCLUDES += $(TOP)/frameworks/native/include/media/openmax
-LOCAL_CLANG := true
LOCAL_SANITIZE := signed-integer-overflow
-LOCAL_SHARED_LIBRARIES := libstagefright
+LOCAL_SHARED_LIBRARIES := libmedia
LOCAL_SHARED_LIBRARIES += libstagefright_omx
LOCAL_SHARED_LIBRARIES += libstagefright_foundation
LOCAL_SHARED_LIBRARIES += libutils
diff --git a/media/libstagefright/codecs/hevcdec/SoftHEVC.cpp b/media/libstagefright/codecs/hevcdec/SoftHEVC.cpp
index 5c70387..a3fd336 100644
--- a/media/libstagefright/codecs/hevcdec/SoftHEVC.cpp
+++ b/media/libstagefright/codecs/hevcdec/SoftHEVC.cpp
@@ -48,14 +48,6 @@
(IVD_CONTROL_API_COMMAND_TYPE_T)IHEVCD_CXA_CMD_CTL_SET_NUM_CORES
static const CodecProfileLevel kProfileLevels[] = {
- { OMX_VIDEO_HEVCProfileMain, OMX_VIDEO_HEVCMainTierLevel1 },
- { OMX_VIDEO_HEVCProfileMain, OMX_VIDEO_HEVCMainTierLevel2 },
- { OMX_VIDEO_HEVCProfileMain, OMX_VIDEO_HEVCMainTierLevel21 },
- { OMX_VIDEO_HEVCProfileMain, OMX_VIDEO_HEVCMainTierLevel3 },
- { OMX_VIDEO_HEVCProfileMain, OMX_VIDEO_HEVCMainTierLevel31 },
- { OMX_VIDEO_HEVCProfileMain, OMX_VIDEO_HEVCMainTierLevel4 },
- { OMX_VIDEO_HEVCProfileMain, OMX_VIDEO_HEVCMainTierLevel41 },
- { OMX_VIDEO_HEVCProfileMain, OMX_VIDEO_HEVCMainTierLevel5 },
{ OMX_VIDEO_HEVCProfileMain, OMX_VIDEO_HEVCMainTierLevel51 },
};
diff --git a/media/libstagefright/codecs/m4v_h263/dec/Android.mk b/media/libstagefright/codecs/m4v_h263/dec/Android.mk
index eb39b44..e83d24d 100644
--- a/media/libstagefright/codecs/m4v_h263/dec/Android.mk
+++ b/media/libstagefright/codecs/m4v_h263/dec/Android.mk
@@ -47,7 +47,6 @@
LOCAL_CFLAGS := -DOSCL_EXPORT_REF= -DOSCL_IMPORT_REF=
LOCAL_CFLAGS += -Werror
-LOCAL_CLANG := true
LOCAL_SANITIZE := signed-integer-overflow
include $(BUILD_STATIC_LIBRARY)
@@ -71,13 +70,12 @@
libstagefright_m4vh263dec
LOCAL_SHARED_LIBRARIES := \
- libstagefright libstagefright_omx libstagefright_foundation libutils liblog
+ libmedia libstagefright_omx libstagefright_foundation libutils liblog
LOCAL_MODULE := libstagefright_soft_mpeg4dec
LOCAL_MODULE_TAGS := optional
LOCAL_CFLAGS += -Werror
-LOCAL_CLANG := true
LOCAL_SANITIZE := signed-integer-overflow
include $(BUILD_SHARED_LIBRARY)
diff --git a/media/libstagefright/codecs/m4v_h263/enc/Android.mk b/media/libstagefright/codecs/m4v_h263/enc/Android.mk
index 9f52538..7b706fe 100644
--- a/media/libstagefright/codecs/m4v_h263/enc/Android.mk
+++ b/media/libstagefright/codecs/m4v_h263/enc/Android.mk
@@ -34,7 +34,6 @@
$(TOP)/frameworks/native/include/media/openmax
LOCAL_CFLAGS += -Werror
-LOCAL_CLANG := true
LOCAL_SANITIZE := signed-integer-overflow
include $(BUILD_STATIC_LIBRARY)
@@ -64,7 +63,7 @@
libstagefright_m4vh263enc
LOCAL_SHARED_LIBRARIES := \
- libstagefright \
+ libmedia \
libstagefright_omx \
libutils \
liblog \
@@ -73,7 +72,6 @@
LOCAL_MODULE_TAGS := optional
LOCAL_CFLAGS += -Werror
-LOCAL_CLANG := true
LOCAL_SANITIZE := signed-integer-overflow
include $(BUILD_SHARED_LIBRARY)
@@ -89,7 +87,6 @@
$(LOCAL_PATH)/include
LOCAL_CFLAGS := -DOSCL_EXPORT_REF= -DOSCL_IMPORT_REF= -DBX_RC
-LOCAL_CLANG := true
LOCAL_SANITIZE := signed-integer-overflow
LOCAL_STATIC_LIBRARIES := \
diff --git a/media/libstagefright/codecs/mp3dec/Android.mk b/media/libstagefright/codecs/mp3dec/Android.mk
index 11581c1..62dce35 100644
--- a/media/libstagefright/codecs/mp3dec/Android.mk
+++ b/media/libstagefright/codecs/mp3dec/Android.mk
@@ -54,7 +54,6 @@
-D"OSCL_UNUSED_ARG(x)=(void)(x)"
LOCAL_CFLAGS += -Werror
-LOCAL_CLANG := true
LOCAL_SANITIZE := signed-integer-overflow
LOCAL_MODULE := libstagefright_mp3dec
@@ -77,11 +76,10 @@
$(LOCAL_PATH)/include
LOCAL_CFLAGS += -Werror
-LOCAL_CLANG := true
LOCAL_SANITIZE := signed-integer-overflow
LOCAL_SHARED_LIBRARIES := \
- libstagefright libstagefright_omx libstagefright_foundation libutils liblog
+ libmedia libstagefright_omx libstagefright_foundation libutils liblog
LOCAL_STATIC_LIBRARIES := \
libstagefright_mp3dec
@@ -103,7 +101,6 @@
$(LOCAL_PATH)/test/include \
$(call include-path-for, audio-utils)
-LOCAL_CLANG := true
LOCAL_SANITIZE := signed-integer-overflow
LOCAL_STATIC_LIBRARIES := \
libstagefright_mp3dec libsndfile
diff --git a/media/libstagefright/codecs/mpeg2dec/Android.mk b/media/libstagefright/codecs/mpeg2dec/Android.mk
index f1c1719..65a081e 100644
--- a/media/libstagefright/codecs/mpeg2dec/Android.mk
+++ b/media/libstagefright/codecs/mpeg2dec/Android.mk
@@ -14,14 +14,13 @@
LOCAL_C_INCLUDES += $(TOP)/frameworks/av/media/libstagefright/include
LOCAL_C_INCLUDES += $(TOP)/frameworks/native/include/media/openmax
-LOCAL_SHARED_LIBRARIES := libstagefright
+LOCAL_SHARED_LIBRARIES := libmedia
LOCAL_SHARED_LIBRARIES += libstagefright_omx
LOCAL_SHARED_LIBRARIES += libstagefright_foundation
LOCAL_SHARED_LIBRARIES += libutils
LOCAL_SHARED_LIBRARIES += liblog
LOCAL_LDFLAGS := -Wl,-Bsymbolic
-LOCAL_CLANG := true
LOCAL_SANITIZE := signed-integer-overflow
include $(BUILD_SHARED_LIBRARY)
diff --git a/media/libstagefright/codecs/mpeg2dec/SoftMPEG2.cpp b/media/libstagefright/codecs/mpeg2dec/SoftMPEG2.cpp
index 5ed037a..ce28faf 100644
--- a/media/libstagefright/codecs/mpeg2dec/SoftMPEG2.cpp
+++ b/media/libstagefright/codecs/mpeg2dec/SoftMPEG2.cpp
@@ -47,14 +47,8 @@
(IVD_CONTROL_API_COMMAND_TYPE_T)IMPEG2D_CMD_CTL_SET_NUM_CORES
static const CodecProfileLevel kProfileLevels[] = {
- { OMX_VIDEO_MPEG2ProfileSimple, OMX_VIDEO_MPEG2LevelLL },
- { OMX_VIDEO_MPEG2ProfileSimple, OMX_VIDEO_MPEG2LevelML },
- { OMX_VIDEO_MPEG2ProfileSimple, OMX_VIDEO_MPEG2LevelH14 },
{ OMX_VIDEO_MPEG2ProfileSimple, OMX_VIDEO_MPEG2LevelHL },
- { OMX_VIDEO_MPEG2ProfileMain , OMX_VIDEO_MPEG2LevelLL },
- { OMX_VIDEO_MPEG2ProfileMain , OMX_VIDEO_MPEG2LevelML },
- { OMX_VIDEO_MPEG2ProfileMain , OMX_VIDEO_MPEG2LevelH14 },
{ OMX_VIDEO_MPEG2ProfileMain , OMX_VIDEO_MPEG2LevelHL },
};
diff --git a/media/libstagefright/codecs/on2/dec/Android.mk b/media/libstagefright/codecs/on2/dec/Android.mk
index 76f7600..de1e937 100644
--- a/media/libstagefright/codecs/on2/dec/Android.mk
+++ b/media/libstagefright/codecs/on2/dec/Android.mk
@@ -15,13 +15,12 @@
libvpx
LOCAL_SHARED_LIBRARIES := \
- libstagefright libstagefright_omx libstagefright_foundation libutils liblog
+ libmedia libstagefright_omx libstagefright_foundation libutils liblog
LOCAL_MODULE := libstagefright_soft_vpxdec
LOCAL_MODULE_TAGS := optional
LOCAL_CFLAGS += -Werror
-LOCAL_CLANG := true
LOCAL_SANITIZE := signed-integer-overflow unsigned-integer-overflow
include $(BUILD_SHARED_LIBRARY)
diff --git a/media/libstagefright/codecs/on2/enc/Android.mk b/media/libstagefright/codecs/on2/enc/Android.mk
index 522672b..5c85bbd 100644
--- a/media/libstagefright/codecs/on2/enc/Android.mk
+++ b/media/libstagefright/codecs/on2/enc/Android.mk
@@ -2,7 +2,9 @@
include $(CLEAR_VARS)
LOCAL_SRC_FILES := \
- SoftVPXEncoder.cpp
+ SoftVPXEncoder.cpp \
+ SoftVP8Encoder.cpp \
+ SoftVP9Encoder.cpp
LOCAL_C_INCLUDES := \
$(TOP)/external/libvpx/libvpx \
@@ -11,14 +13,13 @@
frameworks/av/media/libstagefright/include \
frameworks/native/include/media/openmax \
-LOCAL_CLANG := true
LOCAL_SANITIZE := signed-integer-overflow unsigned-integer-overflow
LOCAL_STATIC_LIBRARIES := \
libvpx
LOCAL_SHARED_LIBRARIES := \
- libstagefright libstagefright_omx libstagefright_foundation libutils liblog \
+ libmedia libstagefright_omx libstagefright_foundation libutils liblog \
LOCAL_MODULE := libstagefright_soft_vpxenc
LOCAL_MODULE_TAGS := optional
diff --git a/media/libstagefright/codecs/on2/enc/SoftVP8Encoder.cpp b/media/libstagefright/codecs/on2/enc/SoftVP8Encoder.cpp
new file mode 100644
index 0000000..04737a9
--- /dev/null
+++ b/media/libstagefright/codecs/on2/enc/SoftVP8Encoder.cpp
@@ -0,0 +1,162 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// #define LOG_NDEBUG 0
+#define LOG_TAG "SoftVP8Encoder"
+#include "SoftVP8Encoder.h"
+
+#include <utils/Log.h>
+#include <utils/misc.h>
+
+#include <media/hardware/HardwareAPI.h>
+#include <media/hardware/MetadataBufferType.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/MediaDefs.h>
+
+#ifndef INT32_MAX
+#define INT32_MAX 2147483647
+#endif
+
+namespace android {
+
+static const CodecProfileLevel kVp8ProfileLevels[] = {
+ { OMX_VIDEO_VP8ProfileMain, OMX_VIDEO_VP8Level_Version0 },
+ { OMX_VIDEO_VP8ProfileMain, OMX_VIDEO_VP8Level_Version1 },
+ { OMX_VIDEO_VP8ProfileMain, OMX_VIDEO_VP8Level_Version2 },
+ { OMX_VIDEO_VP8ProfileMain, OMX_VIDEO_VP8Level_Version3 },
+};
+
+SoftVP8Encoder::SoftVP8Encoder(const char *name,
+ const OMX_CALLBACKTYPE *callbacks,
+ OMX_PTR appData,
+ OMX_COMPONENTTYPE **component)
+ : SoftVPXEncoder(
+ name, callbacks, appData, component, "video_encoder.vp8",
+ OMX_VIDEO_CodingVP8, MEDIA_MIMETYPE_VIDEO_VP8, 2,
+ kVp8ProfileLevels, NELEM(kVp8ProfileLevels)),
+ mDCTPartitions(0),
+ mLevel(OMX_VIDEO_VP8Level_Version0) {
+}
+
+void SoftVP8Encoder::setCodecSpecificInterface() {
+ mCodecInterface = vpx_codec_vp8_cx();
+}
+
+void SoftVP8Encoder::setCodecSpecificConfiguration() {
+ switch (mLevel) {
+ case OMX_VIDEO_VP8Level_Version0:
+ mCodecConfiguration->g_profile = 0;
+ break;
+
+ case OMX_VIDEO_VP8Level_Version1:
+ mCodecConfiguration->g_profile = 1;
+ break;
+
+ case OMX_VIDEO_VP8Level_Version2:
+ mCodecConfiguration->g_profile = 2;
+ break;
+
+ case OMX_VIDEO_VP8Level_Version3:
+ mCodecConfiguration->g_profile = 3;
+ break;
+
+ default:
+ mCodecConfiguration->g_profile = 0;
+ }
+}
+
+vpx_codec_err_t SoftVP8Encoder::setCodecSpecificControls() {
+ vpx_codec_err_t codec_return = vpx_codec_control(mCodecContext,
+ VP8E_SET_TOKEN_PARTITIONS,
+ mDCTPartitions);
+ if (codec_return != VPX_CODEC_OK) {
+ ALOGE("Error setting dct partitions for vpx encoder.");
+ }
+ return codec_return;
+}
+
+OMX_ERRORTYPE SoftVP8Encoder::internalGetParameter(OMX_INDEXTYPE index,
+ OMX_PTR param) {
+ // can include extension index OMX_INDEXEXTTYPE
+ const int32_t indexFull = index;
+
+ switch (indexFull) {
+ case OMX_IndexParamVideoVp8:
+ return internalGetVp8Params(
+ (OMX_VIDEO_PARAM_VP8TYPE *)param);
+
+ default:
+ return SoftVPXEncoder::internalGetParameter(index, param);
+ }
+}
+
+OMX_ERRORTYPE SoftVP8Encoder::internalSetParameter(OMX_INDEXTYPE index,
+ const OMX_PTR param) {
+ // can include extension index OMX_INDEXEXTTYPE
+ const int32_t indexFull = index;
+
+ switch (indexFull) {
+ case OMX_IndexParamVideoVp8:
+ return internalSetVp8Params(
+ (const OMX_VIDEO_PARAM_VP8TYPE *)param);
+
+ default:
+ return SoftVPXEncoder::internalSetParameter(index, param);
+ }
+}
+
+OMX_ERRORTYPE SoftVP8Encoder::internalGetVp8Params(
+ OMX_VIDEO_PARAM_VP8TYPE* vp8Params) {
+ if (vp8Params->nPortIndex != kOutputPortIndex) {
+ return OMX_ErrorUnsupportedIndex;
+ }
+
+ vp8Params->eProfile = OMX_VIDEO_VP8ProfileMain;
+ vp8Params->eLevel = mLevel;
+ vp8Params->bErrorResilientMode = mErrorResilience;
+ vp8Params->nDCTPartitions = mDCTPartitions;
+ return OMX_ErrorNone;
+}
+
+OMX_ERRORTYPE SoftVP8Encoder::internalSetVp8Params(
+ const OMX_VIDEO_PARAM_VP8TYPE* vp8Params) {
+ if (vp8Params->nPortIndex != kOutputPortIndex) {
+ return OMX_ErrorUnsupportedIndex;
+ }
+
+ if (vp8Params->eProfile != OMX_VIDEO_VP8ProfileMain) {
+ return OMX_ErrorBadParameter;
+ }
+
+ if (vp8Params->eLevel == OMX_VIDEO_VP8Level_Version0 ||
+ vp8Params->eLevel == OMX_VIDEO_VP8Level_Version1 ||
+ vp8Params->eLevel == OMX_VIDEO_VP8Level_Version2 ||
+ vp8Params->eLevel == OMX_VIDEO_VP8Level_Version3) {
+ mLevel = vp8Params->eLevel;
+ } else {
+ return OMX_ErrorBadParameter;
+ }
+
+ mErrorResilience = vp8Params->bErrorResilientMode;
+ if (vp8Params->nDCTPartitions <= kMaxDCTPartitions) {
+ mDCTPartitions = vp8Params->nDCTPartitions;
+ } else {
+ return OMX_ErrorBadParameter;
+ }
+ return OMX_ErrorNone;
+}
+
+} // namespace android
diff --git a/media/libstagefright/codecs/on2/enc/SoftVP8Encoder.h b/media/libstagefright/codecs/on2/enc/SoftVP8Encoder.h
new file mode 100644
index 0000000..b4904bf
--- /dev/null
+++ b/media/libstagefright/codecs/on2/enc/SoftVP8Encoder.h
@@ -0,0 +1,95 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef SOFT_VP8_ENCODER_H_
+
+#define SOFT_VP8_ENCODER_H_
+
+#include "SoftVPXEncoder.h"
+
+#include <OMX_VideoExt.h>
+#include <OMX_IndexExt.h>
+
+#include <hardware/gralloc.h>
+
+#include "vpx/vpx_encoder.h"
+#include "vpx/vpx_codec.h"
+#include "vpx/vp8cx.h"
+
+namespace android {
+
+// Exposes a vp8 encoder as an OMX Component
+//
+// In addition to the base class settings, Only following encoder settings are
+// available:
+// - token partitioning
+struct SoftVP8Encoder : public SoftVPXEncoder {
+ SoftVP8Encoder(const char *name,
+ const OMX_CALLBACKTYPE *callbacks,
+ OMX_PTR appData,
+ OMX_COMPONENTTYPE **component);
+
+protected:
+ // Returns current values for requested OMX
+ // parameters
+ virtual OMX_ERRORTYPE internalGetParameter(
+ OMX_INDEXTYPE index, OMX_PTR param);
+
+ // Validates, extracts and stores relevant OMX
+ // parameters
+ virtual OMX_ERRORTYPE internalSetParameter(
+ OMX_INDEXTYPE index, const OMX_PTR param);
+
+ // Populates |mCodecInterface| with codec specific settings.
+ virtual void setCodecSpecificInterface();
+
+ // Sets codec specific configuration.
+ virtual void setCodecSpecificConfiguration();
+
+ // Initializes codec specific encoder settings.
+ virtual vpx_codec_err_t setCodecSpecificControls();
+
+ // Gets vp8 specific parameters.
+ OMX_ERRORTYPE internalGetVp8Params(
+ OMX_VIDEO_PARAM_VP8TYPE* vp8Params);
+
+ // Handles vp8 specific parameters.
+ OMX_ERRORTYPE internalSetVp8Params(
+ const OMX_VIDEO_PARAM_VP8TYPE* vp8Params);
+
+private:
+ // Max value supported for DCT partitions
+ static const uint32_t kMaxDCTPartitions = 3;
+
+ // vp8 specific configuration parameter
+ // that enables token partitioning of
+ // the stream into substreams
+ int32_t mDCTPartitions;
+
+ // Encoder profile corresponding to OMX level parameter
+ //
+ // The inconsistency in the naming is caused by
+ // OMX spec referring vpx profiles (g_profile)
+ // as "levels" whereas using the name "profile" for
+ // something else.
+ OMX_VIDEO_VP8LEVELTYPE mLevel;
+
+ DISALLOW_EVIL_CONSTRUCTORS(SoftVP8Encoder);
+};
+
+} // namespace android
+
+#endif // SOFT_VP8_ENCODER_H_
diff --git a/media/libstagefright/codecs/on2/enc/SoftVP9Encoder.cpp b/media/libstagefright/codecs/on2/enc/SoftVP9Encoder.cpp
new file mode 100644
index 0000000..4c7290d
--- /dev/null
+++ b/media/libstagefright/codecs/on2/enc/SoftVP9Encoder.cpp
@@ -0,0 +1,162 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// #define LOG_NDEBUG 0
+#define LOG_TAG "SoftVP9Encoder"
+#include "SoftVP9Encoder.h"
+
+#include <utils/Log.h>
+#include <utils/misc.h>
+
+#include <media/hardware/HardwareAPI.h>
+#include <media/hardware/MetadataBufferType.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/MediaDefs.h>
+
+namespace android {
+
+static const CodecProfileLevel kVp9ProfileLevels[] = {
+ { OMX_VIDEO_VP9Profile0, OMX_VIDEO_VP9Level41 },
+};
+
+SoftVP9Encoder::SoftVP9Encoder(
+ const char *name, const OMX_CALLBACKTYPE *callbacks, OMX_PTR appData,
+ OMX_COMPONENTTYPE **component)
+ : SoftVPXEncoder(
+ name, callbacks, appData, component, "video_encoder.vp9",
+ OMX_VIDEO_CodingVP9, MEDIA_MIMETYPE_VIDEO_VP9, 4,
+ kVp9ProfileLevels, NELEM(kVp9ProfileLevels)),
+ mLevel(OMX_VIDEO_VP9Level1),
+ mTileColumns(0),
+ mFrameParallelDecoding(OMX_FALSE) {
+}
+
+void SoftVP9Encoder::setCodecSpecificInterface() {
+ mCodecInterface = vpx_codec_vp9_cx();
+}
+
+void SoftVP9Encoder::setCodecSpecificConfiguration() {
+ mCodecConfiguration->g_profile = 0;
+}
+
+vpx_codec_err_t SoftVP9Encoder::setCodecSpecificControls() {
+ vpx_codec_err_t codecReturn = vpx_codec_control(
+ mCodecContext, VP9E_SET_TILE_COLUMNS, mTileColumns);
+ if (codecReturn != VPX_CODEC_OK) {
+ ALOGE("Error setting VP9E_SET_TILE_COLUMNS to %d. vpx_codec_control() "
+ "returned %d", mTileColumns, codecReturn);
+ return codecReturn;
+ }
+ codecReturn = vpx_codec_control(
+ mCodecContext, VP9E_SET_FRAME_PARALLEL_DECODING,
+ mFrameParallelDecoding);
+ if (codecReturn != VPX_CODEC_OK) {
+ ALOGE("Error setting VP9E_SET_FRAME_PARALLEL_DECODING to %d."
+ "vpx_codec_control() returned %d", mFrameParallelDecoding,
+ codecReturn);
+ return codecReturn;
+ }
+ // For VP9, we always set CPU_USED to 8 (because the realtime default is 0
+ // which is too slow).
+ codecReturn = vpx_codec_control(mCodecContext, VP8E_SET_CPUUSED, 8);
+ if (codecReturn != VPX_CODEC_OK) {
+ ALOGE("Error setting VP8E_SET_CPUUSED to 8. vpx_codec_control() "
+ "returned %d", codecReturn);
+ return codecReturn;
+ }
+ return codecReturn;
+}
+
+OMX_ERRORTYPE SoftVP9Encoder::internalGetParameter(
+ OMX_INDEXTYPE index, OMX_PTR param) {
+ // can include extension index OMX_INDEXEXTTYPE
+ const int32_t indexFull = index;
+
+ switch (indexFull) {
+ case OMX_IndexParamVideoVp9:
+ return internalGetVp9Params(
+ (OMX_VIDEO_PARAM_VP9TYPE *)param);
+
+ default:
+ return SoftVPXEncoder::internalGetParameter(index, param);
+ }
+}
+
+OMX_ERRORTYPE SoftVP9Encoder::internalSetParameter(
+ OMX_INDEXTYPE index, const OMX_PTR param) {
+ // can include extension index OMX_INDEXEXTTYPE
+ const int32_t indexFull = index;
+
+ switch (indexFull) {
+ case OMX_IndexParamVideoVp9:
+ return internalSetVp9Params(
+ (const OMX_VIDEO_PARAM_VP9TYPE *)param);
+
+ default:
+ return SoftVPXEncoder::internalSetParameter(index, param);
+ }
+}
+
+OMX_ERRORTYPE SoftVP9Encoder::internalGetVp9Params(
+ OMX_VIDEO_PARAM_VP9TYPE *vp9Params) {
+ if (vp9Params->nPortIndex != kOutputPortIndex) {
+ return OMX_ErrorUnsupportedIndex;
+ }
+
+ vp9Params->eProfile = OMX_VIDEO_VP9Profile0;
+ vp9Params->eLevel = mLevel;
+ vp9Params->bErrorResilientMode = mErrorResilience;
+ vp9Params->nTileColumns = mTileColumns;
+ vp9Params->bEnableFrameParallelDecoding = mFrameParallelDecoding;
+ return OMX_ErrorNone;
+}
+
+OMX_ERRORTYPE SoftVP9Encoder::internalSetVp9Params(
+ const OMX_VIDEO_PARAM_VP9TYPE *vp9Params) {
+ if (vp9Params->nPortIndex != kOutputPortIndex) {
+ return OMX_ErrorUnsupportedIndex;
+ }
+
+ if (vp9Params->eProfile != OMX_VIDEO_VP9Profile0) {
+ return OMX_ErrorBadParameter;
+ }
+
+ if (vp9Params->eLevel == OMX_VIDEO_VP9Level1 ||
+ vp9Params->eLevel == OMX_VIDEO_VP9Level11 ||
+ vp9Params->eLevel == OMX_VIDEO_VP9Level2 ||
+ vp9Params->eLevel == OMX_VIDEO_VP9Level21 ||
+ vp9Params->eLevel == OMX_VIDEO_VP9Level3 ||
+ vp9Params->eLevel == OMX_VIDEO_VP9Level31 ||
+ vp9Params->eLevel == OMX_VIDEO_VP9Level4 ||
+ vp9Params->eLevel == OMX_VIDEO_VP9Level41 ||
+ vp9Params->eLevel == OMX_VIDEO_VP9Level5 ||
+ vp9Params->eLevel == OMX_VIDEO_VP9Level51 ||
+ vp9Params->eLevel == OMX_VIDEO_VP9Level52 ||
+ vp9Params->eLevel == OMX_VIDEO_VP9Level6 ||
+ vp9Params->eLevel == OMX_VIDEO_VP9Level61 ||
+ vp9Params->eLevel == OMX_VIDEO_VP9Level62) {
+ mLevel = vp9Params->eLevel;
+ } else {
+ return OMX_ErrorBadParameter;
+ }
+
+ mErrorResilience = vp9Params->bErrorResilientMode;
+ mTileColumns = vp9Params->nTileColumns;
+ mFrameParallelDecoding = vp9Params->bEnableFrameParallelDecoding;
+ return OMX_ErrorNone;
+}
+
+} // namespace android
diff --git a/media/libstagefright/codecs/on2/enc/SoftVP9Encoder.h b/media/libstagefright/codecs/on2/enc/SoftVP9Encoder.h
new file mode 100644
index 0000000..85df69a
--- /dev/null
+++ b/media/libstagefright/codecs/on2/enc/SoftVP9Encoder.h
@@ -0,0 +1,93 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef SOFT_VP9_ENCODER_H_
+
+#define SOFT_VP9_ENCODER_H_
+
+#include "SoftVPXEncoder.h"
+
+#include <OMX_VideoExt.h>
+#include <OMX_IndexExt.h>
+
+#include <hardware/gralloc.h>
+
+#include "vpx/vpx_encoder.h"
+#include "vpx/vpx_codec.h"
+#include "vpx/vp8cx.h"
+
+namespace android {
+
+// Exposes a VP9 encoder as an OMX Component
+//
+// In addition to the base class settings, Only following encoder settings are
+// available:
+// - tile rows
+// - tile columns
+// - frame parallel mode
+struct SoftVP9Encoder : public SoftVPXEncoder {
+ SoftVP9Encoder(const char *name,
+ const OMX_CALLBACKTYPE *callbacks,
+ OMX_PTR appData,
+ OMX_COMPONENTTYPE **component);
+
+protected:
+ // Returns current values for requested OMX
+ // parameters
+ virtual OMX_ERRORTYPE internalGetParameter(
+ OMX_INDEXTYPE index, OMX_PTR param);
+
+ // Validates, extracts and stores relevant OMX
+ // parameters
+ virtual OMX_ERRORTYPE internalSetParameter(
+ OMX_INDEXTYPE index, const OMX_PTR param);
+
+ // Populates |mCodecInterface| with codec specific settings.
+ virtual void setCodecSpecificInterface();
+
+ // Sets codec specific configuration.
+ virtual void setCodecSpecificConfiguration();
+
+ // Initializes codec specific encoder settings.
+ virtual vpx_codec_err_t setCodecSpecificControls();
+
+ // Gets vp9 specific parameters.
+ OMX_ERRORTYPE internalGetVp9Params(
+ OMX_VIDEO_PARAM_VP9TYPE* vp9Params);
+
+ // Handles vp9 specific parameters.
+ OMX_ERRORTYPE internalSetVp9Params(
+ const OMX_VIDEO_PARAM_VP9TYPE* vp9Params);
+
+private:
+ // Encoder profile corresponding to OMX level parameter
+ //
+ // The inconsistency in the naming is caused by
+ // OMX spec referring vpx profiles (g_profile)
+ // as "levels" whereas using the name "profile" for
+ // something else.
+ OMX_VIDEO_VP9LEVELTYPE mLevel;
+
+ int32_t mTileColumns;
+
+ OMX_BOOL mFrameParallelDecoding;
+
+ DISALLOW_EVIL_CONSTRUCTORS(SoftVP9Encoder);
+};
+
+} // namespace android
+
+#endif // SOFT_VP9_ENCODER_H_
diff --git a/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.cpp b/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.cpp
index 5edfbb5..5609032 100644
--- a/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.cpp
+++ b/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.cpp
@@ -18,6 +18,9 @@
#define LOG_TAG "SoftVPXEncoder"
#include "SoftVPXEncoder.h"
+#include "SoftVP8Encoder.h"
+#include "SoftVP9Encoder.h"
+
#include <utils/Log.h>
#include <utils/misc.h>
@@ -42,7 +45,6 @@
params->nVersion.s.nStep = 0;
}
-
static int GetCPUCoreCount() {
int cpuCoreCount = 1;
#if defined(_SC_NPROCESSORS_ONLN)
@@ -55,30 +57,26 @@
return cpuCoreCount;
}
-static const CodecProfileLevel kProfileLevels[] = {
- { OMX_VIDEO_VP8ProfileMain, OMX_VIDEO_VP8Level_Version0 },
- { OMX_VIDEO_VP8ProfileMain, OMX_VIDEO_VP8Level_Version1 },
- { OMX_VIDEO_VP8ProfileMain, OMX_VIDEO_VP8Level_Version2 },
- { OMX_VIDEO_VP8ProfileMain, OMX_VIDEO_VP8Level_Version3 },
-};
-
SoftVPXEncoder::SoftVPXEncoder(const char *name,
const OMX_CALLBACKTYPE *callbacks,
OMX_PTR appData,
- OMX_COMPONENTTYPE **component)
+ OMX_COMPONENTTYPE **component,
+ const char* role,
+ OMX_VIDEO_CODINGTYPE codingType,
+ const char* mimeType,
+ int32_t minCompressionRatio,
+ const CodecProfileLevel *profileLevels,
+ size_t numProfileLevels)
: SoftVideoEncoderOMXComponent(
- name, "video_encoder.vp8", OMX_VIDEO_CodingVP8,
- kProfileLevels, NELEM(kProfileLevels),
+ name, role, codingType, profileLevels, numProfileLevels,
176 /* width */, 144 /* height */,
callbacks, appData, component),
mCodecContext(NULL),
mCodecConfiguration(NULL),
mCodecInterface(NULL),
mBitrateUpdated(false),
- mBitrateControlMode(VPX_VBR), // variable bitrate
- mDCTPartitions(0),
+ mBitrateControlMode(VPX_VBR),
mErrorResilience(OMX_FALSE),
- mLevel(OMX_VIDEO_VP8Level_Version0),
mKeyFrameInterval(0),
mMinQuantizer(0),
mMaxQuantizer(0),
@@ -96,10 +94,9 @@
initPorts(
kNumBuffers, kNumBuffers, kMinOutputBufferSize,
- MEDIA_MIMETYPE_VIDEO_VP8, 2 /* minCompressionRatio */);
+ mimeType, minCompressionRatio);
}
-
SoftVPXEncoder::~SoftVPXEncoder() {
releaseEncoder();
}
@@ -108,18 +105,18 @@
vpx_codec_err_t codec_return;
status_t result = UNKNOWN_ERROR;
- mCodecInterface = vpx_codec_vp8_cx();
+ setCodecSpecificInterface();
if (mCodecInterface == NULL) {
goto CLEAN_UP;
}
- ALOGD("VP8: initEncoder. BRMode: %u. TSLayers: %zu. KF: %u. QP: %u - %u",
+ ALOGD("VPx: initEncoder. BRMode: %u. TSLayers: %zu. KF: %u. QP: %u - %u",
(uint32_t)mBitrateControlMode, mTemporalLayers, mKeyFrameInterval,
mMinQuantizer, mMaxQuantizer);
mCodecConfiguration = new vpx_codec_enc_cfg_t;
codec_return = vpx_codec_enc_config_default(mCodecInterface,
mCodecConfiguration,
- 0); // Codec specific flags
+ 0);
if (codec_return != VPX_CODEC_OK) {
ALOGE("Error populating default configuration for vpx encoder.");
@@ -131,27 +128,6 @@
mCodecConfiguration->g_threads = GetCPUCoreCount();
mCodecConfiguration->g_error_resilient = mErrorResilience;
- switch (mLevel) {
- case OMX_VIDEO_VP8Level_Version0:
- mCodecConfiguration->g_profile = 0;
- break;
-
- case OMX_VIDEO_VP8Level_Version1:
- mCodecConfiguration->g_profile = 1;
- break;
-
- case OMX_VIDEO_VP8Level_Version2:
- mCodecConfiguration->g_profile = 2;
- break;
-
- case OMX_VIDEO_VP8Level_Version3:
- mCodecConfiguration->g_profile = 3;
- break;
-
- default:
- mCodecConfiguration->g_profile = 0;
- }
-
// OMX timebase unit is microsecond
// g_timebase is in seconds (i.e. 1/1000000 seconds)
mCodecConfiguration->g_timebase.num = 1;
@@ -253,7 +229,6 @@
goto CLEAN_UP;
}
}
-
// Set bitrate values for each layer
for (size_t i = 0; i < mCodecConfiguration->ts_number_layers; i++) {
mCodecConfiguration->ts_target_bitrate[i] =
@@ -271,7 +246,7 @@
if (mMaxQuantizer > 0) {
mCodecConfiguration->rc_max_quantizer = mMaxQuantizer;
}
-
+ setCodecSpecificConfiguration();
mCodecContext = new vpx_codec_ctx_t;
codec_return = vpx_codec_enc_init(mCodecContext,
mCodecInterface,
@@ -283,14 +258,6 @@
goto CLEAN_UP;
}
- codec_return = vpx_codec_control(mCodecContext,
- VP8E_SET_TOKEN_PARTITIONS,
- mDCTPartitions);
- if (codec_return != VPX_CODEC_OK) {
- ALOGE("Error setting dct partitions for vpx encoder.");
- goto CLEAN_UP;
- }
-
// Extra CBR settings
if (mBitrateControlMode == VPX_CBR) {
codec_return = vpx_codec_control(mCodecContext,
@@ -318,6 +285,13 @@
}
}
+ codec_return = setCodecSpecificControls();
+
+ if (codec_return != VPX_CODEC_OK) {
+ // The codec specific method would have logged the error.
+ goto CLEAN_UP;
+ }
+
if (mColorFormat != OMX_COLOR_FormatYUV420Planar || mInputDataIsMeta) {
free(mConversionBuffer);
mConversionBuffer = NULL;
@@ -338,7 +312,6 @@
return result;
}
-
status_t SoftVPXEncoder::releaseEncoder() {
if (mCodecContext != NULL) {
vpx_codec_destroy(mCodecContext);
@@ -362,7 +335,6 @@
return OK;
}
-
OMX_ERRORTYPE SoftVPXEncoder::internalGetParameter(OMX_INDEXTYPE index,
OMX_PTR param) {
// can include extension index OMX_INDEXEXTTYPE
@@ -393,54 +365,15 @@
return OMX_ErrorNone;
}
- // VP8 specific parameters that use extension headers
- case OMX_IndexParamVideoVp8: {
- OMX_VIDEO_PARAM_VP8TYPE *vp8Params =
- (OMX_VIDEO_PARAM_VP8TYPE *)param;
-
- if (!isValidOMXParam(vp8Params)) {
- return OMX_ErrorBadParameter;
- }
-
- if (vp8Params->nPortIndex != kOutputPortIndex) {
- return OMX_ErrorUnsupportedIndex;
- }
-
- vp8Params->eProfile = OMX_VIDEO_VP8ProfileMain;
- vp8Params->eLevel = mLevel;
- vp8Params->nDCTPartitions = mDCTPartitions;
- vp8Params->bErrorResilientMode = mErrorResilience;
- return OMX_ErrorNone;
- }
-
- case OMX_IndexParamVideoAndroidVp8Encoder: {
- OMX_VIDEO_PARAM_ANDROID_VP8ENCODERTYPE *vp8AndroidParams =
- (OMX_VIDEO_PARAM_ANDROID_VP8ENCODERTYPE *)param;
-
- if (!isValidOMXParam(vp8AndroidParams)) {
- return OMX_ErrorBadParameter;
- }
-
- if (vp8AndroidParams->nPortIndex != kOutputPortIndex) {
- return OMX_ErrorUnsupportedIndex;
- }
-
- vp8AndroidParams->nKeyFrameInterval = mKeyFrameInterval;
- vp8AndroidParams->eTemporalPattern = mTemporalPatternType;
- vp8AndroidParams->nTemporalLayerCount = mTemporalLayers;
- vp8AndroidParams->nMinQuantizer = mMinQuantizer;
- vp8AndroidParams->nMaxQuantizer = mMaxQuantizer;
- memcpy(vp8AndroidParams->nTemporalLayerBitrateRatio,
- mTemporalLayerBitrateRatio, sizeof(mTemporalLayerBitrateRatio));
- return OMX_ErrorNone;
- }
+ case OMX_IndexParamVideoAndroidVp8Encoder:
+ return internalGetAndroidVpxParams(
+ (OMX_VIDEO_PARAM_ANDROID_VP8ENCODERTYPE *)param);
default:
return SoftVideoEncoderOMXComponent::internalGetParameter(index, param);
}
}
-
OMX_ERRORTYPE SoftVPXEncoder::internalSetParameter(OMX_INDEXTYPE index,
const OMX_PTR param) {
// can include extension index OMX_INDEXEXTTYPE
@@ -458,27 +391,9 @@
return internalSetBitrateParams(bitRate);
}
- case OMX_IndexParamVideoVp8: {
- const OMX_VIDEO_PARAM_VP8TYPE *vp8Params =
- (const OMX_VIDEO_PARAM_VP8TYPE*) param;
-
- if (!isValidOMXParam(vp8Params)) {
- return OMX_ErrorBadParameter;
- }
-
- return internalSetVp8Params(vp8Params);
- }
-
- case OMX_IndexParamVideoAndroidVp8Encoder: {
- const OMX_VIDEO_PARAM_ANDROID_VP8ENCODERTYPE *vp8AndroidParams =
- (const OMX_VIDEO_PARAM_ANDROID_VP8ENCODERTYPE*) param;
-
- if (!isValidOMXParam(vp8AndroidParams)) {
- return OMX_ErrorBadParameter;
- }
-
- return internalSetAndroidVp8Params(vp8AndroidParams);
- }
+ case OMX_IndexParamVideoAndroidVp8Encoder:
+ return internalSetAndroidVpxParams(
+ (const OMX_VIDEO_PARAM_ANDROID_VP8ENCODERTYPE *)param);
default:
return SoftVideoEncoderOMXComponent::internalSetParameter(index, param);
@@ -530,77 +445,21 @@
}
}
-OMX_ERRORTYPE SoftVPXEncoder::internalSetVp8Params(
- const OMX_VIDEO_PARAM_VP8TYPE* vp8Params) {
- if (vp8Params->nPortIndex != kOutputPortIndex) {
+OMX_ERRORTYPE SoftVPXEncoder::internalGetBitrateParams(
+ OMX_VIDEO_PARAM_BITRATETYPE* bitrate) {
+ if (bitrate->nPortIndex != kOutputPortIndex) {
return OMX_ErrorUnsupportedIndex;
}
- if (vp8Params->eProfile != OMX_VIDEO_VP8ProfileMain) {
- return OMX_ErrorBadParameter;
- }
+ bitrate->nTargetBitrate = mBitrate;
- if (vp8Params->eLevel == OMX_VIDEO_VP8Level_Version0 ||
- vp8Params->eLevel == OMX_VIDEO_VP8Level_Version1 ||
- vp8Params->eLevel == OMX_VIDEO_VP8Level_Version2 ||
- vp8Params->eLevel == OMX_VIDEO_VP8Level_Version3) {
- mLevel = vp8Params->eLevel;
+ if (mBitrateControlMode == VPX_VBR) {
+ bitrate->eControlRate = OMX_Video_ControlRateVariable;
+ } else if (mBitrateControlMode == VPX_CBR) {
+ bitrate->eControlRate = OMX_Video_ControlRateConstant;
} else {
- return OMX_ErrorBadParameter;
+ return OMX_ErrorUnsupportedSetting;
}
-
- if (vp8Params->nDCTPartitions <= kMaxDCTPartitions) {
- mDCTPartitions = vp8Params->nDCTPartitions;
- } else {
- return OMX_ErrorBadParameter;
- }
-
- mErrorResilience = vp8Params->bErrorResilientMode;
- return OMX_ErrorNone;
-}
-
-OMX_ERRORTYPE SoftVPXEncoder::internalSetAndroidVp8Params(
- const OMX_VIDEO_PARAM_ANDROID_VP8ENCODERTYPE* vp8AndroidParams) {
- if (vp8AndroidParams->nPortIndex != kOutputPortIndex) {
- return OMX_ErrorUnsupportedIndex;
- }
- if (vp8AndroidParams->eTemporalPattern != OMX_VIDEO_VPXTemporalLayerPatternNone &&
- vp8AndroidParams->eTemporalPattern != OMX_VIDEO_VPXTemporalLayerPatternWebRTC) {
- return OMX_ErrorBadParameter;
- }
- if (vp8AndroidParams->nTemporalLayerCount > OMX_VIDEO_ANDROID_MAXVP8TEMPORALLAYERS) {
- return OMX_ErrorBadParameter;
- }
- if (vp8AndroidParams->nMinQuantizer > vp8AndroidParams->nMaxQuantizer) {
- return OMX_ErrorBadParameter;
- }
-
- mTemporalPatternType = vp8AndroidParams->eTemporalPattern;
- if (vp8AndroidParams->eTemporalPattern == OMX_VIDEO_VPXTemporalLayerPatternWebRTC) {
- mTemporalLayers = vp8AndroidParams->nTemporalLayerCount;
- } else if (vp8AndroidParams->eTemporalPattern == OMX_VIDEO_VPXTemporalLayerPatternNone) {
- mTemporalLayers = 0;
- }
- // Check the bitrate distribution between layers is in increasing order
- if (mTemporalLayers > 1) {
- for (size_t i = 0; i < mTemporalLayers - 1; i++) {
- if (vp8AndroidParams->nTemporalLayerBitrateRatio[i + 1] <=
- vp8AndroidParams->nTemporalLayerBitrateRatio[i]) {
- ALOGE("Wrong bitrate ratio - should be in increasing order.");
- return OMX_ErrorBadParameter;
- }
- }
- }
- mKeyFrameInterval = vp8AndroidParams->nKeyFrameInterval;
- mMinQuantizer = vp8AndroidParams->nMinQuantizer;
- mMaxQuantizer = vp8AndroidParams->nMaxQuantizer;
- memcpy(mTemporalLayerBitrateRatio, vp8AndroidParams->nTemporalLayerBitrateRatio,
- sizeof(mTemporalLayerBitrateRatio));
- ALOGD("VP8: internalSetAndroidVp8Params. BRMode: %u. TS: %zu. KF: %u."
- " QP: %u - %u BR0: %u. BR1: %u. BR2: %u",
- (uint32_t)mBitrateControlMode, mTemporalLayers, mKeyFrameInterval,
- mMinQuantizer, mMaxQuantizer, mTemporalLayerBitrateRatio[0],
- mTemporalLayerBitrateRatio[1], mTemporalLayerBitrateRatio[2]);
return OMX_ErrorNone;
}
@@ -623,71 +482,134 @@
return OMX_ErrorNone;
}
+OMX_ERRORTYPE SoftVPXEncoder::internalGetAndroidVpxParams(
+ OMX_VIDEO_PARAM_ANDROID_VP8ENCODERTYPE *vpxAndroidParams) {
+ if (vpxAndroidParams->nPortIndex != kOutputPortIndex) {
+ return OMX_ErrorUnsupportedIndex;
+ }
+
+ vpxAndroidParams->nKeyFrameInterval = mKeyFrameInterval;
+ vpxAndroidParams->eTemporalPattern = mTemporalPatternType;
+ vpxAndroidParams->nTemporalLayerCount = mTemporalLayers;
+ vpxAndroidParams->nMinQuantizer = mMinQuantizer;
+ vpxAndroidParams->nMaxQuantizer = mMaxQuantizer;
+ memcpy(vpxAndroidParams->nTemporalLayerBitrateRatio,
+ mTemporalLayerBitrateRatio, sizeof(mTemporalLayerBitrateRatio));
+ return OMX_ErrorNone;
+}
+
+OMX_ERRORTYPE SoftVPXEncoder::internalSetAndroidVpxParams(
+ const OMX_VIDEO_PARAM_ANDROID_VP8ENCODERTYPE *vpxAndroidParams) {
+ if (vpxAndroidParams->nPortIndex != kOutputPortIndex) {
+ return OMX_ErrorUnsupportedIndex;
+ }
+ if (vpxAndroidParams->eTemporalPattern != OMX_VIDEO_VPXTemporalLayerPatternNone &&
+ vpxAndroidParams->eTemporalPattern != OMX_VIDEO_VPXTemporalLayerPatternWebRTC) {
+ return OMX_ErrorBadParameter;
+ }
+ if (vpxAndroidParams->nTemporalLayerCount > OMX_VIDEO_ANDROID_MAXVP8TEMPORALLAYERS) {
+ return OMX_ErrorBadParameter;
+ }
+ if (vpxAndroidParams->nMinQuantizer > vpxAndroidParams->nMaxQuantizer) {
+ return OMX_ErrorBadParameter;
+ }
+
+ mTemporalPatternType = vpxAndroidParams->eTemporalPattern;
+ if (vpxAndroidParams->eTemporalPattern == OMX_VIDEO_VPXTemporalLayerPatternWebRTC) {
+ mTemporalLayers = vpxAndroidParams->nTemporalLayerCount;
+ } else if (vpxAndroidParams->eTemporalPattern == OMX_VIDEO_VPXTemporalLayerPatternNone) {
+ mTemporalLayers = 0;
+ }
+ // Check the bitrate distribution between layers is in increasing order
+ if (mTemporalLayers > 1) {
+ for (size_t i = 0; i < mTemporalLayers - 1; i++) {
+ if (vpxAndroidParams->nTemporalLayerBitrateRatio[i + 1] <=
+ vpxAndroidParams->nTemporalLayerBitrateRatio[i]) {
+ ALOGE("Wrong bitrate ratio - should be in increasing order.");
+ return OMX_ErrorBadParameter;
+ }
+ }
+ }
+ mKeyFrameInterval = vpxAndroidParams->nKeyFrameInterval;
+ mMinQuantizer = vpxAndroidParams->nMinQuantizer;
+ mMaxQuantizer = vpxAndroidParams->nMaxQuantizer;
+ memcpy(mTemporalLayerBitrateRatio, vpxAndroidParams->nTemporalLayerBitrateRatio,
+ sizeof(mTemporalLayerBitrateRatio));
+ ALOGD("VPx: internalSetAndroidVpxParams. BRMode: %u. TS: %zu. KF: %u."
+ " QP: %u - %u BR0: %u. BR1: %u. BR2: %u",
+ (uint32_t)mBitrateControlMode, mTemporalLayers, mKeyFrameInterval,
+ mMinQuantizer, mMaxQuantizer, mTemporalLayerBitrateRatio[0],
+ mTemporalLayerBitrateRatio[1], mTemporalLayerBitrateRatio[2]);
+ return OMX_ErrorNone;
+}
+
vpx_enc_frame_flags_t SoftVPXEncoder::getEncodeFlags() {
vpx_enc_frame_flags_t flags = 0;
- int patternIdx = mTemporalPatternIdx % mTemporalPatternLength;
- mTemporalPatternIdx++;
- switch (mTemporalPattern[patternIdx]) {
- case kTemporalUpdateLast:
- flags |= VP8_EFLAG_NO_UPD_GF;
- flags |= VP8_EFLAG_NO_UPD_ARF;
- flags |= VP8_EFLAG_NO_REF_GF;
- flags |= VP8_EFLAG_NO_REF_ARF;
- break;
- case kTemporalUpdateGoldenWithoutDependency:
- flags |= VP8_EFLAG_NO_REF_GF;
- // Deliberately no break here.
- case kTemporalUpdateGolden:
- flags |= VP8_EFLAG_NO_REF_ARF;
- flags |= VP8_EFLAG_NO_UPD_ARF;
- flags |= VP8_EFLAG_NO_UPD_LAST;
- break;
- case kTemporalUpdateAltrefWithoutDependency:
- flags |= VP8_EFLAG_NO_REF_ARF;
- flags |= VP8_EFLAG_NO_REF_GF;
- // Deliberately no break here.
- case kTemporalUpdateAltref:
- flags |= VP8_EFLAG_NO_UPD_GF;
- flags |= VP8_EFLAG_NO_UPD_LAST;
- break;
- case kTemporalUpdateNoneNoRefAltref:
- flags |= VP8_EFLAG_NO_REF_ARF;
- // Deliberately no break here.
- case kTemporalUpdateNone:
- flags |= VP8_EFLAG_NO_UPD_GF;
- flags |= VP8_EFLAG_NO_UPD_ARF;
- flags |= VP8_EFLAG_NO_UPD_LAST;
- flags |= VP8_EFLAG_NO_UPD_ENTROPY;
- break;
- case kTemporalUpdateNoneNoRefGoldenRefAltRef:
- flags |= VP8_EFLAG_NO_REF_GF;
- flags |= VP8_EFLAG_NO_UPD_GF;
- flags |= VP8_EFLAG_NO_UPD_ARF;
- flags |= VP8_EFLAG_NO_UPD_LAST;
- flags |= VP8_EFLAG_NO_UPD_ENTROPY;
- break;
- case kTemporalUpdateGoldenWithoutDependencyRefAltRef:
- flags |= VP8_EFLAG_NO_REF_GF;
- flags |= VP8_EFLAG_NO_UPD_ARF;
- flags |= VP8_EFLAG_NO_UPD_LAST;
- break;
- case kTemporalUpdateLastRefAltRef:
- flags |= VP8_EFLAG_NO_UPD_GF;
- flags |= VP8_EFLAG_NO_UPD_ARF;
- flags |= VP8_EFLAG_NO_REF_GF;
- break;
- case kTemporalUpdateGoldenRefAltRef:
- flags |= VP8_EFLAG_NO_UPD_ARF;
- flags |= VP8_EFLAG_NO_UPD_LAST;
- break;
- case kTemporalUpdateLastAndGoldenRefAltRef:
- flags |= VP8_EFLAG_NO_UPD_ARF;
- flags |= VP8_EFLAG_NO_REF_GF;
- break;
- case kTemporalUpdateLastRefAll:
- flags |= VP8_EFLAG_NO_UPD_ARF;
- flags |= VP8_EFLAG_NO_UPD_GF;
- break;
+ if (mTemporalPatternLength > 0) {
+ int patternIdx = mTemporalPatternIdx % mTemporalPatternLength;
+ mTemporalPatternIdx++;
+ switch (mTemporalPattern[patternIdx]) {
+ case kTemporalUpdateLast:
+ flags |= VP8_EFLAG_NO_UPD_GF;
+ flags |= VP8_EFLAG_NO_UPD_ARF;
+ flags |= VP8_EFLAG_NO_REF_GF;
+ flags |= VP8_EFLAG_NO_REF_ARF;
+ break;
+ case kTemporalUpdateGoldenWithoutDependency:
+ flags |= VP8_EFLAG_NO_REF_GF;
+ // Deliberately no break here.
+ case kTemporalUpdateGolden:
+ flags |= VP8_EFLAG_NO_REF_ARF;
+ flags |= VP8_EFLAG_NO_UPD_ARF;
+ flags |= VP8_EFLAG_NO_UPD_LAST;
+ break;
+ case kTemporalUpdateAltrefWithoutDependency:
+ flags |= VP8_EFLAG_NO_REF_ARF;
+ flags |= VP8_EFLAG_NO_REF_GF;
+ // Deliberately no break here.
+ case kTemporalUpdateAltref:
+ flags |= VP8_EFLAG_NO_UPD_GF;
+ flags |= VP8_EFLAG_NO_UPD_LAST;
+ break;
+ case kTemporalUpdateNoneNoRefAltref:
+ flags |= VP8_EFLAG_NO_REF_ARF;
+ // Deliberately no break here.
+ case kTemporalUpdateNone:
+ flags |= VP8_EFLAG_NO_UPD_GF;
+ flags |= VP8_EFLAG_NO_UPD_ARF;
+ flags |= VP8_EFLAG_NO_UPD_LAST;
+ flags |= VP8_EFLAG_NO_UPD_ENTROPY;
+ break;
+ case kTemporalUpdateNoneNoRefGoldenRefAltRef:
+ flags |= VP8_EFLAG_NO_REF_GF;
+ flags |= VP8_EFLAG_NO_UPD_GF;
+ flags |= VP8_EFLAG_NO_UPD_ARF;
+ flags |= VP8_EFLAG_NO_UPD_LAST;
+ flags |= VP8_EFLAG_NO_UPD_ENTROPY;
+ break;
+ case kTemporalUpdateGoldenWithoutDependencyRefAltRef:
+ flags |= VP8_EFLAG_NO_REF_GF;
+ flags |= VP8_EFLAG_NO_UPD_ARF;
+ flags |= VP8_EFLAG_NO_UPD_LAST;
+ break;
+ case kTemporalUpdateLastRefAltRef:
+ flags |= VP8_EFLAG_NO_UPD_GF;
+ flags |= VP8_EFLAG_NO_UPD_ARF;
+ flags |= VP8_EFLAG_NO_REF_GF;
+ break;
+ case kTemporalUpdateGoldenRefAltRef:
+ flags |= VP8_EFLAG_NO_UPD_ARF;
+ flags |= VP8_EFLAG_NO_UPD_LAST;
+ break;
+ case kTemporalUpdateLastAndGoldenRefAltRef:
+ flags |= VP8_EFLAG_NO_UPD_ARF;
+ flags |= VP8_EFLAG_NO_REF_GF;
+ break;
+ case kTemporalUpdateLastRefAll:
+ flags |= VP8_EFLAG_NO_UPD_ARF;
+ flags |= VP8_EFLAG_NO_UPD_GF;
+ break;
+ }
}
return flags;
}
@@ -765,10 +687,7 @@
vpx_img_wrap(&raw_frame, VPX_IMG_FMT_I420, mWidth, mHeight,
kInputBufferAlignment, (uint8_t *)source);
- vpx_enc_frame_flags_t flags = 0;
- if (mTemporalPatternLength > 0) {
- flags = getEncodeFlags();
- }
+ vpx_enc_frame_flags_t flags = getEncodeFlags();
if (mKeyFrameRequested) {
flags |= VPX_EFLAG_FORCE_KF;
mKeyFrameRequested = false;
@@ -779,7 +698,7 @@
vpx_codec_err_t res = vpx_codec_enc_config_set(mCodecContext,
mCodecConfiguration);
if (res != VPX_CODEC_OK) {
- ALOGE("vp8 encoder failed to update bitrate: %s",
+ ALOGE("vpx encoder failed to update bitrate: %s",
vpx_codec_err_to_string(res));
notify(OMX_EventError,
OMX_ErrorUndefined,
@@ -849,9 +768,15 @@
} // namespace android
-
android::SoftOMXComponent *createSoftOMXComponent(
const char *name, const OMX_CALLBACKTYPE *callbacks,
OMX_PTR appData, OMX_COMPONENTTYPE **component) {
- return new android::SoftVPXEncoder(name, callbacks, appData, component);
+ if (!strcmp(name, "OMX.google.vp8.encoder")) {
+ return new android::SoftVP8Encoder(name, callbacks, appData, component);
+ } else if (!strcmp(name, "OMX.google.vp9.encoder")) {
+ return new android::SoftVP9Encoder(name, callbacks, appData, component);
+ } else {
+ CHECK(!"Unknown component");
+ }
+ return NULL;
}
diff --git a/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.h b/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.h
index cd0a0cf..86e71da 100644
--- a/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.h
+++ b/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.h
@@ -31,18 +31,18 @@
namespace android {
-// Exposes a vpx encoder as an OMX Component
+// Base class for a VPX Encoder OMX Component
//
// Boilerplate for callback bindings are taken care
// by the base class SimpleSoftOMXComponent and its
// parent SoftOMXComponent.
//
-// Only following encoder settings are available
+// Only following encoder settings are available (codec specific settings might
+// be available in the sub-classes):
// - target bitrate
// - rate control (constant / variable)
// - frame rate
// - error resilience
-// - token partitioning
// - reconstruction & loop filters (g_profile)
//
// Only following color formats are recognized
@@ -54,7 +54,7 @@
// - encoding deadline is realtime
// - multithreaded encoding utilizes a number of threads equal
// to online cpu's available
-// - the algorithm interface for encoder is vp8
+// - the algorithm interface for encoder is decided by the sub-class in use
// - fractional bits of frame rate is discarded
// - OMX timestamps are in microseconds, therefore
// encoder timebase is fixed to 1/1000000
@@ -63,7 +63,13 @@
SoftVPXEncoder(const char *name,
const OMX_CALLBACKTYPE *callbacks,
OMX_PTR appData,
- OMX_COMPONENTTYPE **component);
+ OMX_COMPONENTTYPE **component,
+ const char* role,
+ OMX_VIDEO_CODINGTYPE codingType,
+ const char* mimeType,
+ int32_t minCompressionRatio,
+ const CodecProfileLevel *profileLevels,
+ size_t numProfileLevels);
protected:
virtual ~SoftVPXEncoder();
@@ -87,7 +93,44 @@
// encoding of the frame
virtual void onQueueFilled(OMX_U32 portIndex);
-private:
+ // Initializes vpx encoder with available settings.
+ status_t initEncoder();
+
+ // Populates mCodecInterface with codec specific settings.
+ virtual void setCodecSpecificInterface() = 0;
+
+ // Sets codec specific configuration.
+ virtual void setCodecSpecificConfiguration() = 0;
+
+ // Sets codec specific encoder controls.
+ virtual vpx_codec_err_t setCodecSpecificControls() = 0;
+
+ // Get current encode flags.
+ virtual vpx_enc_frame_flags_t getEncodeFlags();
+
+ // Releases vpx encoder instance, with it's associated
+ // data structures.
+ //
+ // Unless called earlier, this is handled by the
+ // dtor.
+ status_t releaseEncoder();
+
+ // Get bitrate parameters.
+ virtual OMX_ERRORTYPE internalGetBitrateParams(
+ OMX_VIDEO_PARAM_BITRATETYPE* bitrate);
+
+ // Updates bitrate to reflect port settings.
+ virtual OMX_ERRORTYPE internalSetBitrateParams(
+ const OMX_VIDEO_PARAM_BITRATETYPE* bitrate);
+
+ // Gets Android vpx specific parameters.
+ OMX_ERRORTYPE internalGetAndroidVpxParams(
+ OMX_VIDEO_PARAM_ANDROID_VP8ENCODERTYPE *vpxAndroidParams);
+
+ // Handles Android vpx specific parameters.
+ OMX_ERRORTYPE internalSetAndroidVpxParams(
+ const OMX_VIDEO_PARAM_ANDROID_VP8ENCODERTYPE *vpxAndroidParams);
+
enum TemporalReferences {
// For 1 layer case: reference all (last, golden, and alt ref), but only
// update last.
@@ -137,9 +180,6 @@
static const uint32_t kInputBufferAlignment = 1;
static const uint32_t kOutputBufferAlignment = 2;
- // Max value supported for DCT partitions
- static const uint32_t kMaxDCTPartitions = 3;
-
// Number of supported input color formats
static const uint32_t kNumberOfSupportedColorFormats = 3;
@@ -161,23 +201,10 @@
// Bitrate control mode, either constant or variable
vpx_rc_mode mBitrateControlMode;
- // vp8 specific configuration parameter
- // that enables token partitioning of
- // the stream into substreams
- int32_t mDCTPartitions;
-
// Parameter that denotes whether error resilience
// is enabled in encoder
OMX_BOOL mErrorResilience;
- // Encoder profile corresponding to OMX level parameter
- //
- // The inconsistency in the naming is caused by
- // OMX spec referring vpx profiles (g_profile)
- // as "levels" whereas using the name "profile" for
- // something else.
- OMX_VIDEO_VP8LEVELTYPE mLevel;
-
// Key frame interval in frames
uint32_t mKeyFrameInterval;
@@ -216,31 +243,6 @@
bool mKeyFrameRequested;
- // Initializes vpx encoder with available settings.
- status_t initEncoder();
-
- // Releases vpx encoder instance, with it's associated
- // data structures.
- //
- // Unless called earlier, this is handled by the
- // dtor.
- status_t releaseEncoder();
-
- // Get current encode flags
- vpx_enc_frame_flags_t getEncodeFlags();
-
- // Updates bitrate to reflect port settings.
- OMX_ERRORTYPE internalSetBitrateParams(
- const OMX_VIDEO_PARAM_BITRATETYPE* bitrate);
-
- // Handles vp8 specific parameters.
- OMX_ERRORTYPE internalSetVp8Params(
- const OMX_VIDEO_PARAM_VP8TYPE* vp8Params);
-
- // Handles Android vp8 specific parameters.
- OMX_ERRORTYPE internalSetAndroidVp8Params(
- const OMX_VIDEO_PARAM_ANDROID_VP8ENCODERTYPE* vp8AndroidParams);
-
DISALLOW_EVIL_CONSTRUCTORS(SoftVPXEncoder);
};
diff --git a/media/libstagefright/codecs/on2/h264dec/Android.mk b/media/libstagefright/codecs/on2/h264dec/Android.mk
index 7159674..f28e17b 100644
--- a/media/libstagefright/codecs/on2/h264dec/Android.mk
+++ b/media/libstagefright/codecs/on2/h264dec/Android.mk
@@ -95,11 +95,10 @@
$(LOCAL_PATH)/./omxdl/arm_neon/vc/m4p10/api
endif
-LOCAL_CLANG := true
LOCAL_SANITIZE := signed-integer-overflow
LOCAL_SHARED_LIBRARIES := \
- libstagefright libstagefright_omx libstagefright_foundation libutils liblog \
+ libmedia libstagefright_omx libstagefright_foundation libutils liblog \
LOCAL_MODULE := libstagefright_soft_h264dec
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/ARM_DELIVERY.TXT b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/ARM_DELIVERY.TXT
old mode 100755
new mode 100644
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/ARM_MANIFEST.TXT b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/ARM_MANIFEST.TXT
old mode 100755
new mode 100644
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/filelist_vc.txt b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/filelist_vc.txt
old mode 100755
new mode 100644
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/comm/src/omxVCCOMM_Copy16x16_s.s b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/comm/src/omxVCCOMM_Copy16x16_s.s
old mode 100755
new mode 100644
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/comm/src/omxVCCOMM_Copy8x8_s.s b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/comm/src/omxVCCOMM_Copy8x8_s.s
old mode 100755
new mode 100644
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/comm/src/omxVCCOMM_ExpandFrame_I_s.s b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/comm/src/omxVCCOMM_ExpandFrame_I_s.s
old mode 100755
new mode 100644
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/armVCM4P10_Average_4x_Align_unsafe_s.s b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/armVCM4P10_Average_4x_Align_unsafe_s.s
old mode 100755
new mode 100644
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/armVCM4P10_DeblockingChroma_unsafe_s.s b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/armVCM4P10_DeblockingChroma_unsafe_s.s
old mode 100755
new mode 100644
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/armVCM4P10_DeblockingLuma_unsafe_s.s b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/armVCM4P10_DeblockingLuma_unsafe_s.s
old mode 100755
new mode 100644
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/armVCM4P10_DecodeCoeffsToPair_s.s b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/armVCM4P10_DecodeCoeffsToPair_s.s
old mode 100755
new mode 100644
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/armVCM4P10_DequantTables_s.s b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/armVCM4P10_DequantTables_s.s
old mode 100755
new mode 100644
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/armVCM4P10_InterpolateLuma_Align_unsafe_s.s b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/armVCM4P10_InterpolateLuma_Align_unsafe_s.s
old mode 100755
new mode 100644
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/armVCM4P10_InterpolateLuma_Copy_unsafe_s.s b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/armVCM4P10_InterpolateLuma_Copy_unsafe_s.s
old mode 100755
new mode 100644
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/armVCM4P10_InterpolateLuma_DiagCopy_unsafe_s.s b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/armVCM4P10_InterpolateLuma_DiagCopy_unsafe_s.s
old mode 100755
new mode 100644
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/armVCM4P10_InterpolateLuma_HalfDiagHorVer4x4_unsafe_s.s b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/armVCM4P10_InterpolateLuma_HalfDiagHorVer4x4_unsafe_s.s
old mode 100755
new mode 100644
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/armVCM4P10_InterpolateLuma_HalfDiagVerHor4x4_unsafe_s.s b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/armVCM4P10_InterpolateLuma_HalfDiagVerHor4x4_unsafe_s.s
old mode 100755
new mode 100644
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/armVCM4P10_InterpolateLuma_HalfHor4x4_unsafe_s.s b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/armVCM4P10_InterpolateLuma_HalfHor4x4_unsafe_s.s
old mode 100755
new mode 100644
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/armVCM4P10_InterpolateLuma_HalfVer4x4_unsafe_s.s b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/armVCM4P10_InterpolateLuma_HalfVer4x4_unsafe_s.s
old mode 100755
new mode 100644
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/armVCM4P10_Interpolate_Chroma_s.s b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/armVCM4P10_Interpolate_Chroma_s.s
old mode 100755
new mode 100644
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/armVCM4P10_QuantTables_s.s b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/armVCM4P10_QuantTables_s.s
old mode 100755
new mode 100644
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/armVCM4P10_TransformResidual4x4_s.s b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/armVCM4P10_TransformResidual4x4_s.s
old mode 100755
new mode 100644
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/armVCM4P10_UnpackBlock4x4_s.s b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/armVCM4P10_UnpackBlock4x4_s.s
old mode 100755
new mode 100644
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/omxVCM4P10_DequantTransformResidualFromPairAndAdd_s.s b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/omxVCM4P10_DequantTransformResidualFromPairAndAdd_s.s
old mode 100755
new mode 100644
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/omxVCM4P10_FilterDeblockingLuma_HorEdge_I_s.s b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/omxVCM4P10_FilterDeblockingLuma_HorEdge_I_s.s
old mode 100755
new mode 100644
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/omxVCM4P10_FilterDeblockingLuma_VerEdge_I_s.s b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/omxVCM4P10_FilterDeblockingLuma_VerEdge_I_s.s
old mode 100755
new mode 100644
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/omxVCM4P10_InterpolateLuma_s.s b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/omxVCM4P10_InterpolateLuma_s.s
old mode 100755
new mode 100644
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/omxVCM4P10_PredictIntraChroma_8x8_s.s b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/omxVCM4P10_PredictIntraChroma_8x8_s.s
old mode 100755
new mode 100644
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/omxVCM4P10_PredictIntra_16x16_s.s b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/omxVCM4P10_PredictIntra_16x16_s.s
old mode 100755
new mode 100644
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/omxVCM4P10_PredictIntra_4x4_s.s b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/omxVCM4P10_PredictIntra_4x4_s.s
old mode 100755
new mode 100644
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/omxVCM4P10_TransformDequantChromaDCFromPair_s.s b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/omxVCM4P10_TransformDequantChromaDCFromPair_s.s
old mode 100755
new mode 100644
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/omxVCM4P10_TransformDequantLumaDCFromPair_s.s b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/omxVCM4P10_TransformDequantLumaDCFromPair_s.s
old mode 100755
new mode 100644
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p2/src/armVCM4P2_Clip8_s.s b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p2/src/armVCM4P2_Clip8_s.s
old mode 100755
new mode 100644
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p2/src/armVCM4P2_DecodeVLCZigzag_AC_unsafe_s.s b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p2/src/armVCM4P2_DecodeVLCZigzag_AC_unsafe_s.s
old mode 100755
new mode 100644
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p2/src/armVCM4P2_SetPredDir_s.s b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p2/src/armVCM4P2_SetPredDir_s.s
old mode 100755
new mode 100644
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p2/src/omxVCM4P2_DecodePadMV_PVOP_s.s b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p2/src/omxVCM4P2_DecodePadMV_PVOP_s.s
old mode 100755
new mode 100644
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p2/src/omxVCM4P2_DecodeVLCZigzag_Inter_s.s b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p2/src/omxVCM4P2_DecodeVLCZigzag_Inter_s.s
old mode 100755
new mode 100644
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p2/src/omxVCM4P2_DecodeVLCZigzag_IntraACVLC_s.s b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p2/src/omxVCM4P2_DecodeVLCZigzag_IntraACVLC_s.s
old mode 100755
new mode 100644
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p2/src/omxVCM4P2_DecodeVLCZigzag_IntraDCVLC_s.s b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p2/src/omxVCM4P2_DecodeVLCZigzag_IntraDCVLC_s.s
old mode 100755
new mode 100644
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p2/src/omxVCM4P2_FindMVpred_s.s b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p2/src/omxVCM4P2_FindMVpred_s.s
old mode 100755
new mode 100644
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p2/src/omxVCM4P2_IDCT8x8blk_s.s b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p2/src/omxVCM4P2_IDCT8x8blk_s.s
old mode 100755
new mode 100644
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p2/src/omxVCM4P2_MCReconBlock_s.s b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p2/src/omxVCM4P2_MCReconBlock_s.s
old mode 100755
new mode 100644
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p2/src/omxVCM4P2_PredictReconCoefIntra_s.s b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p2/src/omxVCM4P2_PredictReconCoefIntra_s.s
old mode 100755
new mode 100644
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p2/src/omxVCM4P2_QuantInvInter_I_s.s b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p2/src/omxVCM4P2_QuantInvInter_I_s.s
old mode 100755
new mode 100644
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p2/src/omxVCM4P2_QuantInvIntra_I_s.s b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p2/src/omxVCM4P2_QuantInvIntra_I_s.s
old mode 100755
new mode 100644
diff --git a/media/libstagefright/codecs/on2/h264dec/source/arm11_asm/h264bsd_interpolate_chroma_hor.s b/media/libstagefright/codecs/on2/h264dec/source/arm11_asm/h264bsd_interpolate_chroma_hor.s
old mode 100755
new mode 100644
diff --git a/media/libstagefright/codecs/on2/h264dec/source/arm11_asm/h264bsd_interpolate_chroma_hor_ver.s b/media/libstagefright/codecs/on2/h264dec/source/arm11_asm/h264bsd_interpolate_chroma_hor_ver.s
old mode 100755
new mode 100644
diff --git a/media/libstagefright/codecs/on2/h264dec/source/arm11_asm/h264bsd_interpolate_chroma_ver.s b/media/libstagefright/codecs/on2/h264dec/source/arm11_asm/h264bsd_interpolate_chroma_ver.s
old mode 100755
new mode 100644
diff --git a/media/libstagefright/codecs/on2/h264dec/source/arm11_asm/h264bsd_interpolate_hor_half.s b/media/libstagefright/codecs/on2/h264dec/source/arm11_asm/h264bsd_interpolate_hor_half.s
old mode 100755
new mode 100644
diff --git a/media/libstagefright/codecs/on2/h264dec/source/arm11_asm/h264bsd_interpolate_hor_quarter.s b/media/libstagefright/codecs/on2/h264dec/source/arm11_asm/h264bsd_interpolate_hor_quarter.s
old mode 100755
new mode 100644
diff --git a/media/libstagefright/codecs/on2/h264dec/source/arm11_asm/h264bsd_interpolate_hor_ver_quarter.s b/media/libstagefright/codecs/on2/h264dec/source/arm11_asm/h264bsd_interpolate_hor_ver_quarter.s
old mode 100755
new mode 100644
diff --git a/media/libstagefright/codecs/on2/h264dec/source/arm11_asm/h264bsd_interpolate_mid_hor.s b/media/libstagefright/codecs/on2/h264dec/source/arm11_asm/h264bsd_interpolate_mid_hor.s
old mode 100755
new mode 100644
diff --git a/media/libstagefright/codecs/on2/h264dec/source/arm11_asm/h264bsd_interpolate_ver_half.s b/media/libstagefright/codecs/on2/h264dec/source/arm11_asm/h264bsd_interpolate_ver_half.s
old mode 100755
new mode 100644
diff --git a/media/libstagefright/codecs/on2/h264dec/source/arm11_asm/h264bsd_interpolate_ver_quarter.s b/media/libstagefright/codecs/on2/h264dec/source/arm11_asm/h264bsd_interpolate_ver_quarter.s
old mode 100755
new mode 100644
diff --git a/media/libstagefright/codecs/opus/dec/Android.mk b/media/libstagefright/codecs/opus/dec/Android.mk
index f272763..7b0ad2c 100644
--- a/media/libstagefright/codecs/opus/dec/Android.mk
+++ b/media/libstagefright/codecs/opus/dec/Android.mk
@@ -10,10 +10,9 @@
frameworks/native/include/media/openmax \
LOCAL_SHARED_LIBRARIES := \
- libopus libstagefright libstagefright_omx \
+ libopus libmedia libstagefright_omx \
libstagefright_foundation libutils liblog
-LOCAL_CLANG := true
LOCAL_SANITIZE := signed-integer-overflow unsigned-integer-overflow
LOCAL_MODULE := libstagefright_soft_opusdec
diff --git a/media/libstagefright/codecs/raw/Android.mk b/media/libstagefright/codecs/raw/Android.mk
index e454c84..caed2cc 100644
--- a/media/libstagefright/codecs/raw/Android.mk
+++ b/media/libstagefright/codecs/raw/Android.mk
@@ -9,7 +9,6 @@
frameworks/native/include/media/openmax
LOCAL_CFLAGS += -Werror
-LOCAL_CLANG := true
LOCAL_SANITIZE := signed-integer-overflow unsigned-integer-overflow
LOCAL_SHARED_LIBRARIES := \
diff --git a/media/libstagefright/codecs/vorbis/dec/Android.mk b/media/libstagefright/codecs/vorbis/dec/Android.mk
index 039be6f..3d72d3a 100644
--- a/media/libstagefright/codecs/vorbis/dec/Android.mk
+++ b/media/libstagefright/codecs/vorbis/dec/Android.mk
@@ -10,14 +10,13 @@
frameworks/native/include/media/openmax \
LOCAL_SHARED_LIBRARIES := \
- libvorbisidec libstagefright libstagefright_omx \
+ libvorbisidec libmedia libstagefright_omx \
libstagefright_foundation libutils liblog
LOCAL_MODULE := libstagefright_soft_vorbisdec
LOCAL_MODULE_TAGS := optional
LOCAL_CFLAGS += -Werror
-LOCAL_CLANG := true
LOCAL_SANITIZE := signed-integer-overflow unsigned-integer-overflow
include $(BUILD_SHARED_LIBRARY)
diff --git a/media/libstagefright/colorconversion/Android.mk b/media/libstagefright/colorconversion/Android.mk
index 0bf9701..ba74740 100644
--- a/media/libstagefright/colorconversion/Android.mk
+++ b/media/libstagefright/colorconversion/Android.mk
@@ -14,7 +14,6 @@
libyuv_static \
LOCAL_CFLAGS += -Werror
-LOCAL_CLANG := true
LOCAL_SANITIZE := signed-integer-overflow
LOCAL_MODULE:= libstagefright_color_conversion
diff --git a/media/libstagefright/data/media_codecs_google_video.xml b/media/libstagefright/data/media_codecs_google_video.xml
index b03c769..ce164a2 100644
--- a/media/libstagefright/data/media_codecs_google_video.xml
+++ b/media/libstagefright/data/media_codecs_google_video.xml
@@ -101,5 +101,12 @@
<Limit name="bitrate" range="1-40000000" />
<Feature name="bitrate-modes" value="VBR,CBR" />
</MediaCodec>
+ <MediaCodec name="OMX.google.vp9.encoder" type="video/x-vnd.on2.vp9">
+ <!-- profiles and levels: ProfileMain : Level_Version0-3 -->
+ <Limit name="size" min="2x2" max="2048x2048" />
+ <Limit name="alignment" value="2x2" />
+ <Limit name="bitrate" range="1-40000000" />
+ <Feature name="bitrate-modes" value="VBR,CBR" />
+ </MediaCodec>
</Encoders>
</Included>
diff --git a/media/libstagefright/filters/Android.mk b/media/libstagefright/filters/Android.mk
index bd75a88..f8e8352 100644
--- a/media/libstagefright/filters/Android.mk
+++ b/media/libstagefright/filters/Android.mk
@@ -21,7 +21,6 @@
LOCAL_C_INCLUDES += $(intermediates)
LOCAL_CFLAGS += -Wno-multichar -Werror -Wall
-LOCAL_CLANG := true
LOCAL_SHARED_LIBRARIES := libmedia
diff --git a/media/libstagefright/filters/IntrinsicBlurFilter.cpp b/media/libstagefright/filters/IntrinsicBlurFilter.cpp
index cbcf699..e00afd9 100644
--- a/media/libstagefright/filters/IntrinsicBlurFilter.cpp
+++ b/media/libstagefright/filters/IntrinsicBlurFilter.cpp
@@ -19,7 +19,7 @@
#include <utils/Log.h>
-#include <media/stagefright/foundation/ABuffer.h>
+#include <media/MediaCodecBuffer.h>
#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/foundation/AMessage.h>
@@ -88,7 +88,7 @@
}
status_t IntrinsicBlurFilter::processBuffers(
- const sp<ABuffer> &srcBuffer, const sp<ABuffer> &outBuffer) {
+ const sp<MediaCodecBuffer> &srcBuffer, const sp<MediaCodecBuffer> &outBuffer) {
mAllocIn->copy1DRangeFrom(0, mWidth * mHeight, srcBuffer->data());
mBlur->forEach(mAllocOut);
mAllocOut->copy1DRangeTo(0, mWidth * mHeight, outBuffer->data());
diff --git a/media/libstagefright/filters/IntrinsicBlurFilter.h b/media/libstagefright/filters/IntrinsicBlurFilter.h
index 4707ab7..a2aabfa 100644
--- a/media/libstagefright/filters/IntrinsicBlurFilter.h
+++ b/media/libstagefright/filters/IntrinsicBlurFilter.h
@@ -31,7 +31,7 @@
virtual void reset();
virtual status_t setParameters(const sp<AMessage> &msg);
virtual status_t processBuffers(
- const sp<ABuffer> &srcBuffer, const sp<ABuffer> &outBuffer);
+ const sp<MediaCodecBuffer> &srcBuffer, const sp<MediaCodecBuffer> &outBuffer);
protected:
virtual ~IntrinsicBlurFilter() {};
diff --git a/media/libstagefright/filters/MediaFilter.cpp b/media/libstagefright/filters/MediaFilter.cpp
index cd69418..30e3643 100644
--- a/media/libstagefright/filters/MediaFilter.cpp
+++ b/media/libstagefright/filters/MediaFilter.cpp
@@ -31,6 +31,8 @@
#include <media/stagefright/MediaErrors.h>
#include <media/stagefright/MediaFilter.h>
+#include <media/MediaCodecBuffer.h>
+
#include <gui/BufferItem.h>
#include "ColorConvert.h"
@@ -40,6 +42,8 @@
#include "SaturationFilter.h"
#include "ZeroFilter.h"
+#include "../include/SharedMemoryBuffer.h"
+
namespace android {
// parameter: number of input and output buffers
@@ -195,7 +199,7 @@
}
void MediaFilter::PortDescription::addBuffer(
- IOMX::buffer_id id, const sp<ABuffer> &buffer) {
+ IOMX::buffer_id id, const sp<MediaCodecBuffer> &buffer) {
mBufferIDs.push_back(id);
mBuffers.push_back(buffer);
}
@@ -208,7 +212,7 @@
return mBufferIDs.itemAt(index);
}
-sp<ABuffer> MediaFilter::PortDescription::bufferAt(size_t index) const {
+sp<MediaCodecBuffer> MediaFilter::PortDescription::bufferAt(size_t index) const {
return mBuffers.itemAt(index);
}
@@ -250,7 +254,8 @@
info.mBufferID = i;
info.mGeneration = mGeneration;
info.mOutputFlags = 0;
- info.mData = new ABuffer(mem->pointer(), bufferSize);
+ info.mData = new SharedMemoryBuffer(
+ isInput ? mInputFormat : mOutputFormat, mem);
info.mData->meta()->setInt64("timeUs", 0);
mBuffers[portIndex].push_back(info);
@@ -314,7 +319,7 @@
notify->setInt32("buffer-id", info->mBufferID);
info->mData->meta()->clear();
- notify->setBuffer("buffer", info->mData);
+ notify->setObject("buffer", info->mData);
sp<AMessage> reply = new AMessage(kWhatInputBufferFilled, this);
reply->setInt32("buffer-id", info->mBufferID);
@@ -334,7 +339,7 @@
notify->setInt32("what", CodecBase::kWhatDrainThisBuffer);
notify->setInt32("buffer-id", info->mBufferID);
notify->setInt32("flags", info->mOutputFlags);
- notify->setBuffer("buffer", info->mData);
+ notify->setObject("buffer", info->mData);
sp<AMessage> reply = new AMessage(kWhatOutputBufferDrained, this);
reply->setInt32("buffer-id", info->mBufferID);
@@ -355,25 +360,6 @@
ALOGV("Sent kWhatEOS.");
}
-void MediaFilter::sendFormatChange() {
- sp<AMessage> notify = mNotify->dup();
-
- notify->setInt32("what", kWhatOutputFormatChanged);
-
- AString mime;
- CHECK(mOutputFormat->findString("mime", &mime));
- notify->setString("mime", mime.c_str());
-
- notify->setInt32("stride", mStride);
- notify->setInt32("slice-height", mSliceHeight);
- notify->setInt32("color-format", mColorFormatOut);
- notify->setRect("crop", 0, 0, mStride - 1, mSliceHeight - 1);
- notify->setInt32("width", mWidth);
- notify->setInt32("height", mHeight);
-
- notify->post();
-}
-
void MediaFilter::requestFillEmptyInput() {
if (mPortEOS[kPortIndexInput]) {
return;
@@ -548,8 +534,6 @@
notify->post();
mState = CONFIGURED;
ALOGV("Handled kWhatConfigureComponent.");
-
- sendFormatChange();
}
void MediaFilter::onStart() {
@@ -597,11 +581,12 @@
CHECK_EQ(info->mStatus, BufferInfo::OWNED_BY_UPSTREAM);
info->mStatus = BufferInfo::OWNED_BY_US;
- sp<ABuffer> buffer;
+ sp<MediaCodecBuffer> buffer;
int32_t err = OK;
bool eos = false;
- if (!msg->findBuffer("buffer", &buffer)) {
+ sp<RefBase> obj;
+ if (!msg->findObject("buffer", &obj)) {
// these are unfilled buffers returned by client
CHECK(msg->findInt32("err", &err));
@@ -616,6 +601,8 @@
}
buffer.clear();
+ } else {
+ buffer = static_cast<MediaCodecBuffer *>(obj.get());
}
int32_t isCSD;
@@ -768,7 +755,8 @@
// TODO: check input format and convert only if necessary
// copy RGBA graphic buffer into temporary ARGB input buffer
BufferInfo *inputInfo = new BufferInfo;
- inputInfo->mData = new ABuffer(buf->getWidth() * buf->getHeight() * 4);
+ inputInfo->mData = new MediaCodecBuffer(
+ mInputFormat, new ABuffer(buf->getWidth() * buf->getHeight() * 4));
ALOGV("Copying surface data into temp buffer.");
convertRGBAToARGB(
(uint8_t*)bufPtr, buf->getWidth(), buf->getHeight(),
diff --git a/media/libstagefright/filters/RSFilter.cpp b/media/libstagefright/filters/RSFilter.cpp
index b569945..225a375 100644
--- a/media/libstagefright/filters/RSFilter.cpp
+++ b/media/libstagefright/filters/RSFilter.cpp
@@ -19,7 +19,7 @@
#include <utils/Log.h>
-#include <media/stagefright/foundation/ABuffer.h>
+#include <media/MediaCodecBuffer.h>
#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/foundation/AMessage.h>
@@ -85,7 +85,7 @@
}
status_t RSFilter::processBuffers(
- const sp<ABuffer> &srcBuffer, const sp<ABuffer> &outBuffer) {
+ const sp<MediaCodecBuffer> &srcBuffer, const sp<MediaCodecBuffer> &outBuffer) {
mAllocIn->copy1DRangeFrom(0, mWidth * mHeight, srcBuffer->data());
mCallback->processBuffers(mAllocIn.get(), mAllocOut.get());
mAllocOut->copy1DRangeTo(0, mWidth * mHeight, outBuffer->data());
diff --git a/media/libstagefright/filters/RSFilter.h b/media/libstagefright/filters/RSFilter.h
index c5b5074..3326284 100644
--- a/media/libstagefright/filters/RSFilter.h
+++ b/media/libstagefright/filters/RSFilter.h
@@ -35,7 +35,7 @@
virtual void reset();
virtual status_t setParameters(const sp<AMessage> &msg);
virtual status_t processBuffers(
- const sp<ABuffer> &srcBuffer, const sp<ABuffer> &outBuffer);
+ const sp<MediaCodecBuffer> &srcBuffer, const sp<MediaCodecBuffer> &outBuffer);
protected:
virtual ~RSFilter();
diff --git a/media/libstagefright/filters/SaturationFilter.cpp b/media/libstagefright/filters/SaturationFilter.cpp
index ba5f75a..0a1df05 100644
--- a/media/libstagefright/filters/SaturationFilter.cpp
+++ b/media/libstagefright/filters/SaturationFilter.cpp
@@ -19,7 +19,7 @@
#include <utils/Log.h>
-#include <media/stagefright/foundation/ABuffer.h>
+#include <media/MediaCodecBuffer.h>
#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/foundation/AMessage.h>
@@ -88,7 +88,7 @@
}
status_t SaturationFilter::processBuffers(
- const sp<ABuffer> &srcBuffer, const sp<ABuffer> &outBuffer) {
+ const sp<MediaCodecBuffer> &srcBuffer, const sp<MediaCodecBuffer> &outBuffer) {
mAllocIn->copy1DRangeFrom(0, mWidth * mHeight, srcBuffer->data());
mScript->forEach_root(mAllocIn, mAllocOut);
mAllocOut->copy1DRangeTo(0, mWidth * mHeight, outBuffer->data());
diff --git a/media/libstagefright/filters/SaturationFilter.h b/media/libstagefright/filters/SaturationFilter.h
index 0545021..317e469 100644
--- a/media/libstagefright/filters/SaturationFilter.h
+++ b/media/libstagefright/filters/SaturationFilter.h
@@ -33,7 +33,7 @@
virtual void reset();
virtual status_t setParameters(const sp<AMessage> &msg);
virtual status_t processBuffers(
- const sp<ABuffer> &srcBuffer, const sp<ABuffer> &outBuffer);
+ const sp<MediaCodecBuffer> &srcBuffer, const sp<MediaCodecBuffer> &outBuffer);
protected:
virtual ~SaturationFilter() {};
diff --git a/media/libstagefright/filters/SimpleFilter.h b/media/libstagefright/filters/SimpleFilter.h
index 4cd37ef..a3c2d76 100644
--- a/media/libstagefright/filters/SimpleFilter.h
+++ b/media/libstagefright/filters/SimpleFilter.h
@@ -21,11 +21,11 @@
#include <utils/Errors.h>
#include <utils/RefBase.h>
-struct ABuffer;
-struct AMessage;
-
namespace android {
+struct AMessage;
+class MediaCodecBuffer;
+
struct SimpleFilter : public RefBase {
public:
SimpleFilter() : mWidth(0), mHeight(0), mStride(0), mSliceHeight(0),
@@ -37,7 +37,7 @@
virtual void reset() = 0;
virtual status_t setParameters(const sp<AMessage> &msg) = 0;
virtual status_t processBuffers(
- const sp<ABuffer> &srcBuffer, const sp<ABuffer> &outBuffer) = 0;
+ const sp<MediaCodecBuffer> &srcBuffer, const sp<MediaCodecBuffer> &outBuffer) = 0;
protected:
int32_t mWidth, mHeight;
diff --git a/media/libstagefright/filters/ZeroFilter.cpp b/media/libstagefright/filters/ZeroFilter.cpp
index 3f1243c..74b94b7 100644
--- a/media/libstagefright/filters/ZeroFilter.cpp
+++ b/media/libstagefright/filters/ZeroFilter.cpp
@@ -17,7 +17,7 @@
//#define LOG_NDEBUG 0
#define LOG_TAG "ZeroFilter"
-#include <media/stagefright/foundation/ABuffer.h>
+#include <media/MediaCodecBuffer.h>
#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/foundation/AMessage.h>
@@ -38,7 +38,7 @@
}
status_t ZeroFilter::processBuffers(
- const sp<ABuffer> &srcBuffer, const sp<ABuffer> &outBuffer) {
+ const sp<MediaCodecBuffer> &srcBuffer, const sp<MediaCodecBuffer> &outBuffer) {
// assuming identical input & output buffers, since we're a copy filter
if (mInvertData) {
uint32_t* src = (uint32_t*)srcBuffer->data();
diff --git a/media/libstagefright/filters/ZeroFilter.h b/media/libstagefright/filters/ZeroFilter.h
index bd34dfb..f941cc8 100644
--- a/media/libstagefright/filters/ZeroFilter.h
+++ b/media/libstagefright/filters/ZeroFilter.h
@@ -29,7 +29,7 @@
virtual void reset() {};
virtual status_t setParameters(const sp<AMessage> &msg);
virtual status_t processBuffers(
- const sp<ABuffer> &srcBuffer, const sp<ABuffer> &outBuffer);
+ const sp<MediaCodecBuffer> &srcBuffer, const sp<MediaCodecBuffer> &outBuffer);
protected:
virtual ~ZeroFilter() {};
diff --git a/media/libstagefright/foundation/ALooperRoster.cpp b/media/libstagefright/foundation/ALooperRoster.cpp
index 5f11fb6..8a7c3eb 100644
--- a/media/libstagefright/foundation/ALooperRoster.cpp
+++ b/media/libstagefright/foundation/ALooperRoster.cpp
@@ -100,7 +100,7 @@
}
}
-static void makeFourCC(uint32_t fourcc, char *s) {
+static void makeFourCC(uint32_t fourcc, char *s, size_t bufsz) {
s[0] = (fourcc >> 24) & 0xff;
if (s[0]) {
s[1] = (fourcc >> 16) & 0xff;
@@ -108,7 +108,7 @@
s[3] = fourcc & 0xff;
s[4] = 0;
} else {
- sprintf(s, "%u", fourcc);
+ snprintf(s, bufsz, "%u", fourcc);
}
}
@@ -146,7 +146,7 @@
if (verboseStats) {
for (size_t j = 0; j < handler->mMessages.size(); j++) {
char fourcc[15];
- makeFourCC(handler->mMessages.keyAt(j), fourcc);
+ makeFourCC(handler->mMessages.keyAt(j), fourcc, sizeof(fourcc));
s.appendFormat("\n %s: %u",
fourcc,
handler->mMessages.valueAt(j));
diff --git a/media/libstagefright/foundation/AString.cpp b/media/libstagefright/foundation/AString.cpp
index b167543..04fac19 100644
--- a/media/libstagefright/foundation/AString.cpp
+++ b/media/libstagefright/foundation/AString.cpp
@@ -14,6 +14,9 @@
* limitations under the License.
*/
+#define LOG_TAG "AString"
+#include <utils/Log.h>
+
#include <ctype.h>
#include <stdarg.h>
#include <stdio.h>
@@ -40,14 +43,24 @@
: mData(NULL),
mSize(0),
mAllocSize(1) {
- setTo(s);
+ if (!s) {
+ ALOGW("ctor got NULL, using empty string instead");
+ clear();
+ } else {
+ setTo(s);
+ }
}
AString::AString(const char *s, size_t size)
: mData(NULL),
mSize(0),
mAllocSize(1) {
- setTo(s, size);
+ if (!s) {
+ ALOGW("ctor got NULL, using empty string instead");
+ clear();
+ } else {
+ setTo(s, size);
+ }
}
AString::AString(const String8 &from)
diff --git a/media/libstagefright/foundation/ColorUtils.cpp b/media/libstagefright/foundation/ColorUtils.cpp
index d7439b2..88a8351 100644
--- a/media/libstagefright/foundation/ColorUtils.cpp
+++ b/media/libstagefright/foundation/ColorUtils.cpp
@@ -343,6 +343,23 @@
}
// static
+ColorAspects ColorUtils::unpackToColorAspects(uint32_t packed) {
+ ColorAspects aspects;
+ aspects.mRange = (ColorAspects::Range)((packed >> 24) & 0xFF);
+ aspects.mPrimaries = (ColorAspects::Primaries)((packed >> 16) & 0xFF);
+ aspects.mMatrixCoeffs = (ColorAspects::MatrixCoeffs)((packed >> 8) & 0xFF);
+ aspects.mTransfer = (ColorAspects::Transfer)(packed & 0xFF);
+
+ return aspects;
+}
+
+// static
+uint32_t ColorUtils::packToU32(const ColorAspects &aspects) {
+ return (aspects.mRange << 24) | (aspects.mPrimaries << 16)
+ | (aspects.mMatrixCoeffs << 8) | aspects.mTransfer;
+}
+
+// static
void ColorUtils::setDefaultCodecColorAspectsIfNeeded(
ColorAspects &aspects, int32_t width, int32_t height) {
ColorAspects::MatrixCoeffs coeffs;
diff --git a/media/libstagefright/foundation/hexdump.cpp b/media/libstagefright/foundation/hexdump.cpp
index a44d832..872c5f3 100644
--- a/media/libstagefright/foundation/hexdump.cpp
+++ b/media/libstagefright/foundation/hexdump.cpp
@@ -49,7 +49,7 @@
appendIndent(&line, indent);
char tmp[32];
- sprintf(tmp, "%08lx: ", (unsigned long)offset);
+ snprintf(tmp, sizeof(tmp), "%08lx: ", (unsigned long)offset);
line.append(tmp);
@@ -60,7 +60,7 @@
if (offset + i >= size) {
line.append(" ");
} else {
- sprintf(tmp, "%02x ", data[offset + i]);
+ snprintf(tmp, sizeof(tmp), "%02x ", data[offset + i]);
line.append(tmp);
}
}
diff --git a/media/libstagefright/http/Android.mk b/media/libstagefright/http/Android.mk
index 33b8361..a7bd6a2 100644
--- a/media/libstagefright/http/Android.mk
+++ b/media/libstagefright/http/Android.mk
@@ -22,7 +22,6 @@
LOCAL_CFLAGS += -Wno-multichar
LOCAL_CFLAGS += -Werror -Wall
-LOCAL_CLANG := true
LOCAL_SANITIZE := signed-integer-overflow
include $(BUILD_SHARED_LIBRARY)
diff --git a/media/libstagefright/httplive/Android.mk b/media/libstagefright/httplive/Android.mk
index 2c985fc..6a71b7c 100644
--- a/media/libstagefright/httplive/Android.mk
+++ b/media/libstagefright/httplive/Android.mk
@@ -14,7 +14,6 @@
$(TOP)/frameworks/native/include/media/openmax
LOCAL_CFLAGS += -Werror -Wall
-LOCAL_CLANG := true
LOCAL_SANITIZE := unsigned-integer-overflow signed-integer-overflow
LOCAL_SHARED_LIBRARIES := \
diff --git a/media/libstagefright/httplive/LiveSession.cpp b/media/libstagefright/httplive/LiveSession.cpp
index 8b9472e..e654a01 100644
--- a/media/libstagefright/httplive/LiveSession.cpp
+++ b/media/libstagefright/httplive/LiveSession.cpp
@@ -518,9 +518,10 @@
return err;
}
-status_t LiveSession::seekTo(int64_t timeUs) {
+status_t LiveSession::seekTo(int64_t timeUs, bool precise) {
sp<AMessage> msg = new AMessage(kWhatSeek, this);
msg->setInt64("timeUs", timeUs);
+ msg->setInt32("precise", precise);
sp<AMessage> response;
status_t err = msg->postAndAwaitResponse(&response);
@@ -1441,7 +1442,10 @@
void LiveSession::onSeek(const sp<AMessage> &msg) {
int64_t timeUs;
+ int32_t precise;
CHECK(msg->findInt64("timeUs", &timeUs));
+ CHECK(msg->findInt32("precise", &precise));
+ // TODO: add "precise" to changeConfiguration.
changeConfiguration(timeUs);
}
diff --git a/media/libstagefright/httplive/LiveSession.h b/media/libstagefright/httplive/LiveSession.h
index 65a824e..4dc529c 100644
--- a/media/libstagefright/httplive/LiveSession.h
+++ b/media/libstagefright/httplive/LiveSession.h
@@ -86,7 +86,7 @@
status_t disconnect();
// Blocks until seek is complete.
- status_t seekTo(int64_t timeUs);
+ status_t seekTo(int64_t timeUs, bool precise);
status_t getDuration(int64_t *durationUs) const;
size_t getTrackCount() const;
diff --git a/media/libstagefright/httplive/PlaylistFetcher.cpp b/media/libstagefright/httplive/PlaylistFetcher.cpp
index 7ad7fee..bbcea51 100644
--- a/media/libstagefright/httplive/PlaylistFetcher.cpp
+++ b/media/libstagefright/httplive/PlaylistFetcher.cpp
@@ -956,6 +956,38 @@
return false;
}
+void PlaylistFetcher::initSeqNumberForLiveStream(
+ int32_t &firstSeqNumberInPlaylist,
+ int32_t &lastSeqNumberInPlaylist) {
+ // start at least 3 target durations from the end.
+ int64_t timeFromEnd = 0;
+ size_t index = mPlaylist->size();
+ sp<AMessage> itemMeta;
+ int64_t itemDurationUs;
+ int32_t targetDuration;
+ if (mPlaylist->meta()->findInt32("target-duration", &targetDuration)) {
+ do {
+ --index;
+ if (!mPlaylist->itemAt(index, NULL /* uri */, &itemMeta)
+ || !itemMeta->findInt64("durationUs", &itemDurationUs)) {
+ ALOGW("item or itemDurationUs missing");
+ mSeqNumber = lastSeqNumberInPlaylist - 3;
+ break;
+ }
+
+ timeFromEnd += itemDurationUs;
+ mSeqNumber = firstSeqNumberInPlaylist + index;
+ } while (timeFromEnd < targetDuration * 3E6 && index > 0);
+ } else {
+ ALOGW("target-duration missing");
+ mSeqNumber = lastSeqNumberInPlaylist - 3;
+ }
+
+ if (mSeqNumber < firstSeqNumberInPlaylist) {
+ mSeqNumber = firstSeqNumberInPlaylist;
+ }
+}
+
bool PlaylistFetcher::initDownloadState(
AString &uri,
sp<AMessage> &itemMeta,
@@ -982,11 +1014,8 @@
if (mSegmentStartTimeUs < 0) {
if (!mPlaylist->isComplete() && !mPlaylist->isEvent()) {
- // If this is a live session, start 3 segments from the end on connect
- mSeqNumber = lastSeqNumberInPlaylist - 3;
- if (mSeqNumber < firstSeqNumberInPlaylist) {
- mSeqNumber = firstSeqNumberInPlaylist;
- }
+ // this is a live session
+ initSeqNumberForLiveStream(firstSeqNumberInPlaylist, lastSeqNumberInPlaylist);
} else {
// When seeking mSegmentStartTimeUs is unavailable (< 0), we
// use mStartTimeUs (client supplied timestamp) to determine both start segment
diff --git a/media/libstagefright/httplive/PlaylistFetcher.h b/media/libstagefright/httplive/PlaylistFetcher.h
index c8ca457..ee7d3a1 100644
--- a/media/libstagefright/httplive/PlaylistFetcher.h
+++ b/media/libstagefright/httplive/PlaylistFetcher.h
@@ -218,6 +218,9 @@
void onStop(const sp<AMessage> &msg);
void onMonitorQueue();
void onDownloadNext();
+ void initSeqNumberForLiveStream(
+ int32_t &firstSeqNumberInPlaylist,
+ int32_t &lastSeqNumberInPlaylist);
bool initDownloadState(
AString &uri,
sp<AMessage> &itemMeta,
diff --git a/media/libstagefright/id3/Android.mk b/media/libstagefright/id3/Android.mk
index bd2e459..19ada73 100644
--- a/media/libstagefright/id3/Android.mk
+++ b/media/libstagefright/id3/Android.mk
@@ -5,7 +5,6 @@
ID3.cpp
LOCAL_CFLAGS += -Werror -Wall
-LOCAL_CLANG := true
LOCAL_SANITIZE := unsigned-integer-overflow signed-integer-overflow
LOCAL_SHARED_LIBRARIES := libmedia
@@ -22,7 +21,6 @@
testid3.cpp
LOCAL_CFLAGS += -Werror -Wall
-LOCAL_CLANG := true
LOCAL_SHARED_LIBRARIES := \
libstagefright libutils liblog libbinder libstagefright_foundation
diff --git a/media/libstagefright/id3/ID3.cpp b/media/libstagefright/id3/ID3.cpp
index a0eb630..9105084 100644
--- a/media/libstagefright/id3/ID3.cpp
+++ b/media/libstagefright/id3/ID3.cpp
@@ -517,7 +517,7 @@
if (mOffset == 126 || mOffset == 127) {
// Special treatment for the track number and genre.
char tmp[16];
- sprintf(tmp, "%d", (int)*frameData);
+ snprintf(tmp, sizeof(tmp), "%d", (int)*frameData);
id->setTo(tmp);
return;
diff --git a/media/libstagefright/include/DataConverter.h b/media/libstagefright/include/DataConverter.h
index 8d67921..60ebad1 100644
--- a/media/libstagefright/include/DataConverter.h
+++ b/media/libstagefright/include/DataConverter.h
@@ -24,18 +24,18 @@
namespace android {
-struct ABuffer;
+class MediaCodecBuffer;
// DataConverter base class, defaults to memcpy
struct DataConverter : public RefBase {
virtual size_t sourceSize(size_t targetSize); // will clamp to SIZE_MAX
virtual size_t targetSize(size_t sourceSize); // will clamp to SIZE_MAX
- status_t convert(const sp<ABuffer> &source, sp<ABuffer> &target);
+ status_t convert(const sp<MediaCodecBuffer> &source, sp<MediaCodecBuffer> &target);
virtual ~DataConverter();
protected:
- virtual status_t safeConvert(const sp<ABuffer> &source, sp<ABuffer> &target);
+ virtual status_t safeConvert(const sp<MediaCodecBuffer> &source, sp<MediaCodecBuffer> &target);
};
// SampleConverterBase uses a ratio to calculate the source and target sizes
@@ -45,7 +45,7 @@
virtual size_t targetSize(size_t sourceSize);
protected:
- virtual status_t safeConvert(const sp<ABuffer> &source, sp<ABuffer> &target) = 0;
+ virtual status_t safeConvert(const sp<MediaCodecBuffer> &source, sp<MediaCodecBuffer> &target) = 0;
// sourceSize = sourceSampleSize / targetSampleSize * targetSize
SampleConverterBase(uint32_t sourceSampleSize, uint32_t targetSampleSize)
@@ -61,7 +61,7 @@
static AudioConverter *Create(AudioEncoding source, AudioEncoding target);
protected:
- virtual status_t safeConvert(const sp<ABuffer> &src, sp<ABuffer> &tgt);
+ virtual status_t safeConvert(const sp<MediaCodecBuffer> &source, sp<MediaCodecBuffer> &target);
private:
AudioConverter(
diff --git a/media/libstagefright/include/MPEG4Extractor.h b/media/libstagefright/include/MPEG4Extractor.h
index 89ad137..fa05886 100644
--- a/media/libstagefright/include/MPEG4Extractor.h
+++ b/media/libstagefright/include/MPEG4Extractor.h
@@ -140,6 +140,9 @@
Track *findTrackByMimePrefix(const char *mimePrefix);
+ status_t parseAC3SampleEntry(off64_t offset);
+ status_t parseAC3SpecificBox(off64_t offset, uint16_t sampleRate);
+
MPEG4Extractor(const MPEG4Extractor &);
MPEG4Extractor &operator=(const MPEG4Extractor &);
};
diff --git a/media/libstagefright/include/OMX.h b/media/libstagefright/include/OMX.h
index 6c073f0..9edd0de 100644
--- a/media/libstagefright/include/OMX.h
+++ b/media/libstagefright/include/OMX.h
@@ -31,158 +31,28 @@
public:
OMX();
- virtual bool livesLocally(node_id node, pid_t pid);
-
virtual status_t listNodes(List<ComponentInfo> *list);
virtual status_t allocateNode(
const char *name, const sp<IOMXObserver> &observer,
- sp<IBinder> *nodeBinder,
- node_id *node);
-
- virtual status_t freeNode(node_id node);
-
- virtual status_t sendCommand(
- node_id node, OMX_COMMANDTYPE cmd, OMX_S32 param);
-
- virtual status_t getParameter(
- node_id node, OMX_INDEXTYPE index,
- void *params, size_t size);
-
- virtual status_t setParameter(
- node_id node, OMX_INDEXTYPE index,
- const void *params, size_t size);
-
- virtual status_t getConfig(
- node_id node, OMX_INDEXTYPE index,
- void *params, size_t size);
-
- virtual status_t setConfig(
- node_id node, OMX_INDEXTYPE index,
- const void *params, size_t size);
-
- virtual status_t getState(
- node_id node, OMX_STATETYPE* state);
-
- virtual status_t enableNativeBuffers(
- node_id node, OMX_U32 port_index, OMX_BOOL graphic, OMX_BOOL enable);
-
- virtual status_t getGraphicBufferUsage(
- node_id node, OMX_U32 port_index, OMX_U32* usage);
-
- virtual status_t storeMetaDataInBuffers(
- node_id node, OMX_U32 port_index, OMX_BOOL enable, MetadataBufferType *type);
-
- virtual status_t prepareForAdaptivePlayback(
- node_id node, OMX_U32 portIndex, OMX_BOOL enable,
- OMX_U32 max_frame_width, OMX_U32 max_frame_height);
-
- virtual status_t configureVideoTunnelMode(
- node_id node, OMX_U32 portIndex, OMX_BOOL tunneled,
- OMX_U32 audioHwSync, native_handle_t **sidebandHandle);
-
- virtual status_t useBuffer(
- node_id node, OMX_U32 port_index, const sp<IMemory> ¶ms,
- buffer_id *buffer, OMX_U32 allottedSize);
-
- virtual status_t useGraphicBuffer(
- node_id node, OMX_U32 port_index,
- const sp<GraphicBuffer> &graphicBuffer, buffer_id *buffer);
-
- virtual status_t updateGraphicBufferInMeta(
- node_id node, OMX_U32 port_index,
- const sp<GraphicBuffer> &graphicBuffer, buffer_id buffer);
-
- virtual status_t updateNativeHandleInMeta(
- node_id node, OMX_U32 port_index,
- const sp<NativeHandle> &nativeHandle, buffer_id buffer);
+ sp<IOMXNode> *omxNode);
virtual status_t createInputSurface(
- node_id node, OMX_U32 port_index, android_dataspace dataSpace,
sp<IGraphicBufferProducer> *bufferProducer,
- MetadataBufferType *type);
-
- virtual status_t createPersistentInputSurface(
- sp<IGraphicBufferProducer> *bufferProducer,
- sp<IGraphicBufferConsumer> *bufferConsumer);
-
- virtual status_t setInputSurface(
- node_id node, OMX_U32 port_index,
- const sp<IGraphicBufferConsumer> &bufferConsumer,
- MetadataBufferType *type);
-
- virtual status_t signalEndOfInputStream(node_id node);
-
- virtual status_t allocateSecureBuffer(
- node_id node, OMX_U32 port_index, size_t size,
- buffer_id *buffer, void **buffer_data, sp<NativeHandle> *native_handle);
-
- virtual status_t allocateBufferWithBackup(
- node_id node, OMX_U32 port_index, const sp<IMemory> ¶ms,
- buffer_id *buffer, OMX_U32 allottedSize);
-
- virtual status_t freeBuffer(
- node_id node, OMX_U32 port_index, buffer_id buffer);
-
- virtual status_t fillBuffer(node_id node, buffer_id buffer, int fenceFd);
-
- virtual status_t emptyBuffer(
- node_id node,
- buffer_id buffer,
- OMX_U32 range_offset, OMX_U32 range_length,
- OMX_U32 flags, OMX_TICKS timestamp, int fenceFd);
-
- virtual status_t getExtensionIndex(
- node_id node,
- const char *parameter_name,
- OMX_INDEXTYPE *index);
-
- virtual status_t setInternalOption(
- node_id node,
- OMX_U32 port_index,
- InternalOptionType type,
- const void *data,
- size_t size);
+ sp<IGraphicBufferSource> *bufferSource);
virtual void binderDied(const wp<IBinder> &the_late_who);
- virtual bool isSecure(IOMX::node_id node);
-
- OMX_ERRORTYPE OnEvent(
- node_id node,
- OMX_IN OMX_EVENTTYPE eEvent,
- OMX_IN OMX_U32 nData1,
- OMX_IN OMX_U32 nData2,
- OMX_IN OMX_PTR pEventData);
-
- OMX_ERRORTYPE OnEmptyBufferDone(
- node_id node, buffer_id buffer, OMX_IN OMX_BUFFERHEADERTYPE *pBuffer, int fenceFd);
-
- OMX_ERRORTYPE OnFillBufferDone(
- node_id node, buffer_id buffer, OMX_IN OMX_BUFFERHEADERTYPE *pBuffer, int fenceFd);
-
- void invalidateNodeID(node_id node);
+ status_t freeNode(const sp<OMXNodeInstance>& instance);
protected:
virtual ~OMX();
private:
- struct CallbackDispatcherThread;
- struct CallbackDispatcher;
-
Mutex mLock;
OMXMaster *mMaster;
- size_t mNodeCounter;
- KeyedVector<wp<IBinder>, OMXNodeInstance *> mLiveNodes;
- KeyedVector<node_id, OMXNodeInstance *> mNodeIDToInstance;
- KeyedVector<node_id, sp<CallbackDispatcher> > mDispatchers;
-
- node_id makeNodeID_l(OMXNodeInstance *instance);
- OMXNodeInstance *findInstance(node_id node);
- sp<CallbackDispatcher> findDispatcher(node_id node);
-
- void invalidateNodeID_l(node_id node);
+ KeyedVector<wp<IBinder>, sp<OMXNodeInstance> > mLiveNodes;
OMX(const OMX &);
OMX &operator=(const OMX &);
diff --git a/media/libstagefright/include/OMXNodeInstance.h b/media/libstagefright/include/OMXNodeInstance.h
index 6411267..85ee4ee 100644
--- a/media/libstagefright/include/OMXNodeInstance.h
+++ b/media/libstagefright/include/OMXNodeInstance.h
@@ -25,22 +25,21 @@
#include <utils/threads.h>
namespace android {
-
+class IOMXBufferSource;
class IOMXObserver;
struct OMXMaster;
-class GraphicBufferSource;
+class OMXBuffer;
-struct OMXNodeInstance {
+struct OMXNodeInstance : public BnOMXNode {
OMXNodeInstance(
OMX *owner, const sp<IOMXObserver> &observer, const char *name);
- void setHandle(OMX::node_id node_id, OMX_HANDLETYPE handle);
+ void setHandle(OMX_HANDLETYPE handle);
- OMX *owner();
+ OMX_HANDLETYPE handle();
sp<IOMXObserver> observer();
- OMX::node_id nodeID();
- status_t freeNode(OMXMaster *master);
+ status_t freeNode() override;
status_t sendCommand(OMX_COMMANDTYPE cmd, OMX_S32 param);
status_t getParameter(OMX_INDEXTYPE index, void *params, size_t size);
@@ -51,8 +50,6 @@
status_t getConfig(OMX_INDEXTYPE index, void *params, size_t size);
status_t setConfig(OMX_INDEXTYPE index, const void *params, size_t size);
- status_t getState(OMX_STATETYPE* state);
-
status_t enableNativeBuffers(OMX_U32 portIndex, OMX_BOOL graphic, OMX_BOOL enable);
status_t getGraphicBufferUsage(OMX_U32 portIndex, OMX_U32* usage);
@@ -68,101 +65,66 @@
OMX_U32 portIndex, OMX_BOOL tunneled,
OMX_U32 audioHwSync, native_handle_t **sidebandHandle);
- status_t useBuffer(
- OMX_U32 portIndex, const sp<IMemory> ¶ms,
- OMX::buffer_id *buffer, OMX_U32 allottedSize);
-
- status_t useGraphicBuffer(
- OMX_U32 portIndex, const sp<GraphicBuffer> &graphicBuffer,
- OMX::buffer_id *buffer);
-
- status_t updateGraphicBufferInMeta(
- OMX_U32 portIndex, const sp<GraphicBuffer> &graphicBuffer,
- OMX::buffer_id buffer);
-
- status_t updateNativeHandleInMeta(
- OMX_U32 portIndex, const sp<NativeHandle> &nativeHandle,
- OMX::buffer_id buffer);
-
- status_t createInputSurface(
- OMX_U32 portIndex, android_dataspace dataSpace,
- sp<IGraphicBufferProducer> *bufferProducer,
- MetadataBufferType *type);
-
- static status_t createPersistentInputSurface(
- sp<IGraphicBufferProducer> *bufferProducer,
- sp<IGraphicBufferConsumer> *bufferConsumer);
-
status_t setInputSurface(
- OMX_U32 portIndex, const sp<IGraphicBufferConsumer> &bufferConsumer,
- MetadataBufferType *type);
-
- status_t signalEndOfInputStream();
-
- void signalEvent(OMX_EVENTTYPE event, OMX_U32 arg1, OMX_U32 arg2);
+ const sp<IOMXBufferSource> &bufferSource);
status_t allocateSecureBuffer(
OMX_U32 portIndex, size_t size, OMX::buffer_id *buffer,
void **buffer_data, sp<NativeHandle> *native_handle);
- status_t allocateBufferWithBackup(
- OMX_U32 portIndex, const sp<IMemory> ¶ms,
- OMX::buffer_id *buffer, OMX_U32 allottedSize);
+ status_t useBuffer(
+ OMX_U32 portIndex, const OMXBuffer &omxBuf, buffer_id *buffer);
- status_t freeBuffer(OMX_U32 portIndex, OMX::buffer_id buffer);
+ status_t freeBuffer(
+ OMX_U32 portIndex, buffer_id buffer);
- status_t fillBuffer(OMX::buffer_id buffer, int fenceFd);
+ status_t fillBuffer(
+ buffer_id buffer, const OMXBuffer &omxBuf, int fenceFd = -1);
status_t emptyBuffer(
- OMX::buffer_id buffer,
- OMX_U32 rangeOffset, OMX_U32 rangeLength,
- OMX_U32 flags, OMX_TICKS timestamp, int fenceFd);
-
- status_t emptyGraphicBuffer(
- OMX_BUFFERHEADERTYPE *header, const sp<GraphicBuffer> &buffer,
- OMX_U32 flags, OMX_TICKS timestamp, int fenceFd);
+ buffer_id buffer, const OMXBuffer &omxBuf,
+ OMX_U32 flags, OMX_TICKS timestamp, int fenceFd = -1);
status_t getExtensionIndex(
const char *parameterName, OMX_INDEXTYPE *index);
- status_t setInternalOption(
- OMX_U32 portIndex,
- IOMX::InternalOptionType type,
- const void *data,
- size_t size);
+ status_t setQuirks(OMX_U32 quirks);
bool isSecure() const {
return mIsSecure;
}
+ status_t dispatchMessage(const omx_message &msg) override;
+
// handles messages and removes them from the list
void onMessages(std::list<omx_message> &messages);
- void onMessage(const omx_message &msg);
- void onObserverDied(OMXMaster *master);
- void onGetHandleFailed();
+ void onObserverDied();
void onEvent(OMX_EVENTTYPE event, OMX_U32 arg1, OMX_U32 arg2);
static OMX_CALLBACKTYPE kCallbacks;
private:
+ struct CallbackDispatcherThread;
+ struct CallbackDispatcher;
+
Mutex mLock;
OMX *mOwner;
- OMX::node_id mNodeID;
OMX_HANDLETYPE mHandle;
sp<IOMXObserver> mObserver;
+ sp<CallbackDispatcher> mDispatcher;
bool mDying;
bool mSailed; // configuration is set (no more meta-mode changes)
bool mQueriedProhibitedExtensions;
SortedVector<OMX_INDEXTYPE> mProhibitedExtensions;
bool mIsSecure;
+ uint32_t mQuirks;
- // Lock only covers mGraphicBufferSource. We can't always use mLock
- // because of rare instances where we'd end up locking it recursively.
- Mutex mGraphicBufferSourceLock;
- // Access this through getGraphicBufferSource().
- sp<GraphicBufferSource> mGraphicBufferSource;
-
+ // Lock only covers mOMXBufferSource and mOMXOutputListener. We can't always
+ // use mLock because of rare instances where we'd end up locking it recursively.
+ Mutex mOMXBufferSourceLock;
+ // Access these through getBufferSource().
+ sp<IOMXBufferSource> mOMXBufferSource;
struct ActiveBuffer {
OMX_U32 mPortIndex;
@@ -184,6 +146,14 @@
};
SecureBufferType mSecureBufferType[2];
+ // Following are OMX parameters managed by us (instead of the component)
+ // OMX_IndexParamMaxFrameDurationForBitrateControl
+ KeyedVector<int64_t, int64_t> mOriginalTimeUs;
+ bool mRestorePtsFailed;
+ int64_t mMaxTimestampGapUs;
+ int64_t mPrevOriginalTimeUs;
+ int64_t mPrevModifiedTimeUs;
+
// For debug support
char *mName;
int DEBUG;
@@ -210,9 +180,43 @@
bool isProhibitedIndex_l(OMX_INDEXTYPE index);
+ status_t useBuffer(
+ OMX_U32 portIndex, const sp<IMemory> ¶ms,
+ OMX::buffer_id *buffer, OMX_U32 allottedSize);
+
+ status_t useBuffer_l(
+ OMX_U32 portIndex, const sp<IMemory> ¶ms,
+ OMX::buffer_id *buffer, OMX_U32 allottedSize);
+
+ status_t useGraphicBuffer(
+ OMX_U32 portIndex, const sp<GraphicBuffer> &graphicBuffer,
+ OMX::buffer_id *buffer);
+
+ status_t useGraphicBufferWithMetadata_l(
+ OMX_U32 portIndex, const sp<GraphicBuffer> &graphicBuffer,
+ OMX::buffer_id *buffer);
+
status_t useGraphicBuffer2_l(
OMX_U32 portIndex, const sp<GraphicBuffer> &graphicBuffer,
OMX::buffer_id *buffer);
+
+ status_t emptyBuffer(
+ OMX::buffer_id buffer,
+ OMX_U32 rangeOffset, OMX_U32 rangeLength,
+ OMX_U32 flags, OMX_TICKS timestamp, int fenceFd);
+
+ status_t emptyGraphicBuffer(
+ OMX::buffer_id buffer, const sp<GraphicBuffer> &graphicBuffer,
+ OMX_U32 flags, OMX_TICKS timestamp, int fenceFd);
+
+ status_t emptyNativeHandleBuffer(
+ OMX::buffer_id buffer, const sp<NativeHandle> &nativeHandle,
+ OMX_U32 flags, OMX_TICKS timestamp, int fenceFd);
+
+ status_t emptyBuffer_l(
+ OMX_BUFFERHEADERTYPE *header,
+ OMX_U32 flags, OMX_TICKS timestamp, intptr_t debugAddr, int fenceFd);
+
static OMX_ERRORTYPE OnEvent(
OMX_IN OMX_HANDLETYPE hComponent,
OMX_IN OMX_PTR pAppData,
@@ -243,10 +247,6 @@
int retrieveFenceFromMeta_l(
OMX_BUFFERHEADERTYPE *header, OMX_U32 portIndex);
- status_t emptyBuffer_l(
- OMX_BUFFERHEADERTYPE *header,
- OMX_U32 flags, OMX_TICKS timestamp, intptr_t debugAddr, int fenceFd);
-
// Updates the graphic buffer handle in the metadata buffer for |buffer| and |header| to
// |graphicBuffer|'s handle. If |updateCodecBuffer| is true, the update will happen in
// the actual codec buffer (use this if not using emptyBuffer (with no _l) later to
@@ -254,18 +254,27 @@
// buffer.)
status_t updateGraphicBufferInMeta_l(
OMX_U32 portIndex, const sp<GraphicBuffer> &graphicBuffer,
- OMX::buffer_id buffer, OMX_BUFFERHEADERTYPE *header, bool updateCodecBuffer);
+ OMX::buffer_id buffer, OMX_BUFFERHEADERTYPE *header);
- status_t createGraphicBufferSource(
- OMX_U32 portIndex, const sp<IGraphicBufferConsumer> &consumer /* nullable */,
- MetadataBufferType *type);
- sp<GraphicBufferSource> getGraphicBufferSource();
- void setGraphicBufferSource(const sp<GraphicBufferSource> &bufferSource);
+ status_t updateNativeHandleInMeta_l(
+ OMX_U32 portIndex, const sp<NativeHandle> &nativeHandle,
+ OMX::buffer_id buffer, OMX_BUFFERHEADERTYPE *header);
+
+ sp<IOMXBufferSource> getBufferSource();
+ void setBufferSource(const sp<IOMXBufferSource> &bufferSource);
+ // Called when omx_message::FILL_BUFFER_DONE is received. (Currently the
+ // buffer source will fix timestamp in the header if needed.)
+ void codecBufferFilled(omx_message &msg);
// Handles |msg|, and may modify it. Returns true iff completely handled it and
// |msg| does not need to be sent to the event listener.
bool handleMessage(omx_message &msg);
+ bool handleDataSpaceChanged(omx_message &msg);
+
+ status_t setMaxPtsGapUs(const void *params, size_t size);
+ int64_t getCodecTimestamp(OMX_TICKS timestamp);
+
OMXNodeInstance(const OMXNodeInstance &);
OMXNodeInstance &operator=(const OMXNodeInstance &);
};
diff --git a/media/libstagefright/include/SecureBuffer.h b/media/libstagefright/include/SecureBuffer.h
new file mode 100644
index 0000000..cf7933a
--- /dev/null
+++ b/media/libstagefright/include/SecureBuffer.h
@@ -0,0 +1,54 @@
+/*
+ * Copyright 2016, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef SECURE_BUFFER_H_
+
+#define SECURE_BUFFER_H_
+
+#include <media/ICrypto.h>
+#include <media/MediaCodecBuffer.h>
+
+namespace android {
+
+class NativeHandle;
+
+/**
+ * Secure MediaCodecBuffer implementation.
+ *
+ * For classes outside of MediaCodec, this buffer is an opaque buffer only with
+ * the size information. For decryption, it exposes underlying handle/pointer
+ * and its type, which can be fed to ICrypto::decrypt().
+ */
+class SecureBuffer : public MediaCodecBuffer {
+public:
+ SecureBuffer(const sp<AMessage> &format, const void *ptr, size_t size);
+ SecureBuffer(const sp<AMessage> &format, const sp<NativeHandle> &handle, size_t size);
+
+ virtual ~SecureBuffer() = default;
+
+ void *getDestinationPointer();
+ ICrypto::DestinationType getDestinationType();
+
+private:
+ SecureBuffer() = delete;
+
+ const void *mPointer;
+ const sp<NativeHandle> mHandle;
+};
+
+} // namespace android
+
+#endif // SECURE_BUFFER_H_
diff --git a/media/libstagefright/include/SharedMemoryBuffer.h b/media/libstagefright/include/SharedMemoryBuffer.h
new file mode 100644
index 0000000..1d7f7a6
--- /dev/null
+++ b/media/libstagefright/include/SharedMemoryBuffer.h
@@ -0,0 +1,45 @@
+/*
+ * Copyright 2016, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef SHARED_MEMORY_BUFFER_H_
+
+#define SHARED_MEMORY_BUFFER_H_
+
+#include <media/MediaCodecBuffer.h>
+
+namespace android {
+
+struct AMessage;
+class IMemory;
+
+/**
+ * MediaCodecBuffer implementation based on IMemory.
+ */
+class SharedMemoryBuffer : public MediaCodecBuffer {
+public:
+ SharedMemoryBuffer(const sp<AMessage> &format, const sp<IMemory> &mem);
+
+ virtual ~SharedMemoryBuffer() = default;
+
+private:
+ SharedMemoryBuffer() = delete;
+
+ const sp<IMemory> mMemory;
+};
+
+} // namespace android
+
+#endif // SHARED_MEMORY_BUFFER_H_
diff --git a/media/libstagefright/include/avc_utils.h b/media/libstagefright/include/avc_utils.h
index b2ef360..d05906a 100644
--- a/media/libstagefright/include/avc_utils.h
+++ b/media/libstagefright/include/avc_utils.h
@@ -18,6 +18,7 @@
#define AVC_UTILS_H_
+#include <media/MediaCodecBuffer.h>
#include <media/stagefright/foundation/ABuffer.h>
#include <utils/Errors.h>
@@ -84,6 +85,7 @@
sp<MetaData> MakeAVCCodecSpecificData(const sp<ABuffer> &accessUnit);
bool IsIDR(const sp<ABuffer> &accessUnit);
+bool IsIDR(const sp<MediaCodecBuffer> &accessUnit);
bool IsAVCReferenceFrame(const sp<ABuffer> &accessUnit);
uint32_t FindAVCLayerId(const uint8_t *data, size_t size);
diff --git a/media/libstagefright/matroska/Android.mk b/media/libstagefright/matroska/Android.mk
index 89d7ff2..7dd0863 100644
--- a/media/libstagefright/matroska/Android.mk
+++ b/media/libstagefright/matroska/Android.mk
@@ -10,7 +10,6 @@
$(TOP)/frameworks/av/media/libstagefright/include \
LOCAL_CFLAGS += -Wno-multichar -Werror -Wall
-LOCAL_CLANG := true
LOCAL_SANITIZE := unsigned-integer-overflow signed-integer-overflow
LOCAL_SHARED_LIBRARIES := libmedia
diff --git a/media/libstagefright/matroska/MatroskaExtractor.cpp b/media/libstagefright/matroska/MatroskaExtractor.cpp
index 8e82486..a974671 100644
--- a/media/libstagefright/matroska/MatroskaExtractor.cpp
+++ b/media/libstagefright/matroska/MatroskaExtractor.cpp
@@ -377,6 +377,16 @@
*actualFrameTimeUs = -1ll;
+ if (seekTimeUs > INT64_MAX / 1000ll ||
+ seekTimeUs < INT64_MIN / 1000ll ||
+ (mExtractor->mSeekPreRollNs > 0 &&
+ (seekTimeUs * 1000ll) < INT64_MIN + mExtractor->mSeekPreRollNs) ||
+ (mExtractor->mSeekPreRollNs < 0 &&
+ (seekTimeUs * 1000ll) > INT64_MAX + mExtractor->mSeekPreRollNs)) {
+ ALOGE("cannot seek to %lld", (long long) seekTimeUs);
+ return;
+ }
+
const int64_t seekTimeNs = seekTimeUs * 1000ll - mExtractor->mSeekPreRollNs;
mkvparser::Segment* const pSegment = mExtractor->mSegment;
@@ -605,16 +615,27 @@
int64_t timeUs = mBlockIter.blockTimeUs();
for (int i = 0; i < block->GetFrameCount(); ++i) {
+ MatroskaExtractor::TrackInfo *trackInfo = &mExtractor->mTracks.editItemAt(mTrackIndex);
const mkvparser::Block::Frame &frame = block->GetFrame(i);
+ size_t len = frame.len;
+ if (SIZE_MAX - len < trackInfo->mHeaderLen) {
+ return ERROR_MALFORMED;
+ }
- MediaBuffer *mbuf = new MediaBuffer(frame.len);
+ len += trackInfo->mHeaderLen;
+ MediaBuffer *mbuf = new MediaBuffer(len);
+ uint8_t *data = static_cast<uint8_t *>(mbuf->data());
+ if (trackInfo->mHeader) {
+ memcpy(data, trackInfo->mHeader, trackInfo->mHeaderLen);
+ }
+
mbuf->meta_data()->setInt64(kKeyTime, timeUs);
mbuf->meta_data()->setInt32(kKeyIsSyncFrame, block->IsKey());
- status_t err = frame.Read(mExtractor->mReader, static_cast<uint8_t *>(mbuf->data()));
+ status_t err = frame.Read(mExtractor->mReader, data + trackInfo->mHeaderLen);
if (err == OK
&& mExtractor->mIsWebm
- && mExtractor->mTracks.itemAt(mTrackIndex).mEncrypted) {
+ && trackInfo->mEncrypted) {
err = setWebmBlockCryptoInfo(mbuf);
}
@@ -1164,6 +1185,42 @@
}
}
+status_t MatroskaExtractor::initTrackInfo(
+ const mkvparser::Track *track, const sp<MetaData> &meta, TrackInfo *trackInfo) {
+ trackInfo->mTrackNum = track->GetNumber();
+ trackInfo->mMeta = meta;
+ trackInfo->mExtractor = this;
+ trackInfo->mEncrypted = false;
+ trackInfo->mHeader = NULL;
+ trackInfo->mHeaderLen = 0;
+
+ for(size_t i = 0; i < track->GetContentEncodingCount(); i++) {
+ const mkvparser::ContentEncoding *encoding = track->GetContentEncodingByIndex(i);
+ for(size_t j = 0; j < encoding->GetEncryptionCount(); j++) {
+ const mkvparser::ContentEncoding::ContentEncryption *encryption;
+ encryption = encoding->GetEncryptionByIndex(j);
+ trackInfo->mMeta->setData(kKeyCryptoKey, 0, encryption->key_id, encryption->key_id_len);
+ trackInfo->mEncrypted = true;
+ break;
+ }
+
+ for(size_t j = 0; j < encoding->GetCompressionCount(); j++) {
+ const mkvparser::ContentEncoding::ContentCompression *compression;
+ compression = encoding->GetCompressionByIndex(j);
+ ALOGV("compression algo %llu settings_len %lld",
+ compression->algo, compression->settings_len);
+ if (compression->algo == 3
+ && compression->settings
+ && compression->settings_len > 0) {
+ trackInfo->mHeader = compression->settings;
+ trackInfo->mHeaderLen = compression->settings_len;
+ }
+ }
+ }
+
+ return OK;
+}
+
void MatroskaExtractor::addTracks() {
const mkvparser::Tracks *tracks = mSegment->GetTracks();
@@ -1288,21 +1345,7 @@
mTracks.push();
size_t n = mTracks.size() - 1;
TrackInfo *trackInfo = &mTracks.editItemAt(n);
- trackInfo->mTrackNum = track->GetNumber();
- trackInfo->mMeta = meta;
- trackInfo->mExtractor = this;
-
- trackInfo->mEncrypted = false;
- for(size_t i = 0; i < track->GetContentEncodingCount() && !trackInfo->mEncrypted; i++) {
- const mkvparser::ContentEncoding *encoding = track->GetContentEncodingByIndex(i);
- for(size_t j = 0; j < encoding->GetEncryptionCount(); j++) {
- const mkvparser::ContentEncoding::ContentEncryption *encryption;
- encryption = encoding->GetEncryptionByIndex(j);
- meta->setData(kKeyCryptoKey, 0, encryption->key_id, encryption->key_id_len);
- trackInfo->mEncrypted = true;
- break;
- }
- }
+ initTrackInfo(track, meta, trackInfo);
if (!strcmp("V_MPEG4/ISO/AVC", codecID) && codecPrivateSize == 0) {
// Attempt to recover from AVC track without codec private data
diff --git a/media/libstagefright/matroska/MatroskaExtractor.h b/media/libstagefright/matroska/MatroskaExtractor.h
index 588bd39..19775ce 100644
--- a/media/libstagefright/matroska/MatroskaExtractor.h
+++ b/media/libstagefright/matroska/MatroskaExtractor.h
@@ -63,6 +63,12 @@
const MatroskaExtractor *mExtractor;
Vector<const mkvparser::CuePoint*> mCuePoints;
+ // mHeader points to memory managed by mkvparser;
+ // mHeader would be deleted when mSegment is deleted
+ // in ~MatroskaExtractor.
+ unsigned char *mHeader;
+ size_t mHeaderLen;
+
const mkvparser::Track* getTrack() const;
const mkvparser::CuePoint::TrackPosition *find(long long timeNs) const;
};
@@ -79,6 +85,7 @@
int64_t mSeekPreRollNs;
status_t synthesizeAVCC(TrackInfo *trackInfo, size_t index);
+ status_t initTrackInfo(const mkvparser::Track *track, const sp<MetaData> &meta, TrackInfo *trackInfo);
void addTracks();
void findThumbnails();
void getColorInformation(const mkvparser::VideoTrack *vtrack, sp<MetaData> &meta);
diff --git a/media/libstagefright/mpeg2ts/Android.mk b/media/libstagefright/mpeg2ts/Android.mk
index 66722a8..92c386c 100644
--- a/media/libstagefright/mpeg2ts/Android.mk
+++ b/media/libstagefright/mpeg2ts/Android.mk
@@ -14,7 +14,6 @@
$(TOP)/frameworks/native/include/media/openmax
LOCAL_CFLAGS += -Werror -Wall
-LOCAL_CLANG := true
LOCAL_SANITIZE := unsigned-integer-overflow signed-integer-overflow
LOCAL_SHARED_LIBRARIES := libmedia
diff --git a/media/libstagefright/omx/Android.mk b/media/libstagefright/omx/Android.mk
index 8cbfc0d..72f8043 100644
--- a/media/libstagefright/omx/Android.mk
+++ b/media/libstagefright/omx/Android.mk
@@ -32,7 +32,6 @@
LOCAL_MODULE:= libstagefright_omx
LOCAL_CFLAGS += -Werror -Wall
-LOCAL_CLANG := true
LOCAL_SANITIZE := unsigned-integer-overflow signed-integer-overflow
include $(BUILD_SHARED_LIBRARY)
diff --git a/media/libstagefright/omx/GraphicBufferSource.cpp b/media/libstagefright/omx/GraphicBufferSource.cpp
index 267f24d..4909100 100644
--- a/media/libstagefright/omx/GraphicBufferSource.cpp
+++ b/media/libstagefright/omx/GraphicBufferSource.cpp
@@ -23,10 +23,6 @@
#define STRINGIFY_ENUMS // for asString in HardwareAPI.h/VideoAPI.h
#include "GraphicBufferSource.h"
-#include "OMXUtils.h"
-
-#include <OMX_Core.h>
-#include <OMX_IndexExt.h>
#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/foundation/AMessage.h>
#include <media/stagefright/foundation/ColorUtils.h>
@@ -35,108 +31,33 @@
#include <ui/GraphicBuffer.h>
#include <gui/BufferItem.h>
#include <HardwareAPI.h>
+#include "omx/OMXUtils.h"
+#include <OMX_Component.h>
+#include <OMX_IndexExt.h>
+#include "OMXBuffer.h"
#include <inttypes.h>
#include "FrameDropper.h"
namespace android {
-static const bool EXTRA_CHECK = true;
-
static const OMX_U32 kPortIndexInput = 0;
-GraphicBufferSource::PersistentProxyListener::PersistentProxyListener(
- const wp<IGraphicBufferConsumer> &consumer,
- const wp<ConsumerListener>& consumerListener) :
- mConsumerListener(consumerListener),
- mConsumer(consumer) {}
-
-GraphicBufferSource::PersistentProxyListener::~PersistentProxyListener() {}
-
-void GraphicBufferSource::PersistentProxyListener::onFrameAvailable(
- const BufferItem& item) {
- sp<ConsumerListener> listener(mConsumerListener.promote());
- if (listener != NULL) {
- listener->onFrameAvailable(item);
- } else {
- sp<IGraphicBufferConsumer> consumer(mConsumer.promote());
- if (consumer == NULL) {
- return;
- }
- BufferItem bi;
- status_t err = consumer->acquireBuffer(&bi, 0);
- if (err != OK) {
- ALOGE("PersistentProxyListener: acquireBuffer failed (%d)", err);
- return;
- }
-
- err = consumer->detachBuffer(bi.mSlot);
- if (err != OK) {
- ALOGE("PersistentProxyListener: detachBuffer failed (%d)", err);
- return;
- }
-
- err = consumer->attachBuffer(&bi.mSlot, bi.mGraphicBuffer);
- if (err != OK) {
- ALOGE("PersistentProxyListener: attachBuffer failed (%d)", err);
- return;
- }
-
- err = consumer->releaseBuffer(bi.mSlot, 0,
- EGL_NO_DISPLAY, EGL_NO_SYNC_KHR, bi.mFence);
- if (err != OK) {
- ALOGE("PersistentProxyListener: releaseBuffer failed (%d)", err);
- }
- }
-}
-
-void GraphicBufferSource::PersistentProxyListener::onFrameReplaced(
- const BufferItem& item) {
- sp<ConsumerListener> listener(mConsumerListener.promote());
- if (listener != NULL) {
- listener->onFrameReplaced(item);
- }
-}
-
-void GraphicBufferSource::PersistentProxyListener::onBuffersReleased() {
- sp<ConsumerListener> listener(mConsumerListener.promote());
- if (listener != NULL) {
- listener->onBuffersReleased();
- }
-}
-
-void GraphicBufferSource::PersistentProxyListener::onSidebandStreamChanged() {
- sp<ConsumerListener> listener(mConsumerListener.promote());
- if (listener != NULL) {
- listener->onSidebandStreamChanged();
- }
-}
-
-GraphicBufferSource::GraphicBufferSource(
- OMXNodeInstance* nodeInstance,
- uint32_t bufferWidth,
- uint32_t bufferHeight,
- uint32_t bufferCount,
- uint32_t consumerUsage,
- const sp<IGraphicBufferConsumer> &consumer) :
+GraphicBufferSource::GraphicBufferSource() :
mInitCheck(UNKNOWN_ERROR),
- mNodeInstance(nodeInstance),
mExecuting(false),
mSuspended(false),
mLastDataSpace(HAL_DATASPACE_UNKNOWN),
- mIsPersistent(false),
- mConsumer(consumer),
mNumFramesAvailable(0),
mNumBufferAcquired(0),
mEndOfStream(false),
mEndOfStreamSent(false),
- mMaxTimestampGapUs(-1ll),
mPrevOriginalTimeUs(-1ll),
- mPrevModifiedTimeUs(-1ll),
mSkipFramesBeforeNs(-1ll),
mRepeatAfterUs(-1ll),
mRepeatLastFrameGeneration(0),
mRepeatLastFrameTimestamp(-1ll),
+ mRepeatLastFrameCount(0),
mLatestBufferId(-1),
mLatestBufferFrameNum(0),
mLatestBufferFence(Fence::NO_FENCE),
@@ -146,48 +67,21 @@
mPrevCaptureUs(-1ll),
mPrevFrameUs(-1ll),
mInputBufferTimeOffsetUs(0ll) {
+ ALOGV("GraphicBufferSource");
- ALOGV("GraphicBufferSource w=%u h=%u c=%u",
- bufferWidth, bufferHeight, bufferCount);
+ String8 name("GraphicBufferSource");
- if (bufferWidth == 0 || bufferHeight == 0) {
- ALOGE("Invalid dimensions %ux%u", bufferWidth, bufferHeight);
- mInitCheck = BAD_VALUE;
- return;
- }
+ BufferQueue::createBufferQueue(&mProducer, &mConsumer);
+ mConsumer->setConsumerName(name);
- if (mConsumer == NULL) {
- String8 name("GraphicBufferSource");
-
- BufferQueue::createBufferQueue(&mProducer, &mConsumer);
- mConsumer->setConsumerName(name);
-
- // use consumer usage bits queried from encoder, but always add HW_VIDEO_ENCODER
- // for backward compatibility.
- consumerUsage |= GRALLOC_USAGE_HW_VIDEO_ENCODER;
- mConsumer->setConsumerUsageBits(consumerUsage);
-
- mInitCheck = mConsumer->setMaxAcquiredBufferCount(bufferCount);
- if (mInitCheck != NO_ERROR) {
- ALOGE("Unable to set BQ max acquired buffer count to %u: %d",
- bufferCount, mInitCheck);
- return;
- }
- } else {
- mIsPersistent = true;
- }
- mConsumer->setDefaultBufferSize(bufferWidth, bufferHeight);
// Note that we can't create an sp<...>(this) in a ctor that will not keep a
// reference once the ctor ends, as that would cause the refcount of 'this'
// dropping to 0 at the end of the ctor. Since all we need is a wp<...>
// that's what we create.
- wp<BufferQueue::ConsumerListener> listener = static_cast<BufferQueue::ConsumerListener*>(this);
- sp<IConsumerListener> proxy;
- if (!mIsPersistent) {
- proxy = new BufferQueue::ProxyConsumerListener(listener);
- } else {
- proxy = new PersistentProxyListener(mConsumer, listener);
- }
+ wp<BufferQueue::ConsumerListener> listener =
+ static_cast<BufferQueue::ConsumerListener*>(this);
+ sp<IConsumerListener> proxy =
+ new BufferQueue::ProxyConsumerListener(listener);
mInitCheck = mConsumer->consumerConnect(proxy, false);
if (mInitCheck != NO_ERROR) {
@@ -202,15 +96,14 @@
}
GraphicBufferSource::~GraphicBufferSource() {
+ ALOGV("~GraphicBufferSource");
if (mLatestBufferId >= 0) {
- releaseBuffer(
- mLatestBufferId, mLatestBufferFrameNum,
- mBufferSlot[mLatestBufferId], mLatestBufferFence);
+ releaseBuffer(mLatestBufferId, mLatestBufferFrameNum, mLatestBufferFence);
}
if (mNumBufferAcquired != 0) {
ALOGW("potential buffer leak (acquired %d)", mNumBufferAcquired);
}
- if (mConsumer != NULL && !mIsPersistent) {
+ if (mConsumer != NULL) {
status_t err = mConsumer->consumerDisconnect();
if (err != NO_ERROR) {
ALOGW("consumerDisconnect failed: %d", err);
@@ -218,7 +111,7 @@
}
}
-void GraphicBufferSource::omxExecuting() {
+Status GraphicBufferSource::onOmxExecuting() {
Mutex::Autolock autoLock(mMutex);
ALOGV("--> executing; avail=%zu, codec vec size=%zd",
mNumFramesAvailable, mCodecBuffers.size());
@@ -267,9 +160,11 @@
msg->post(mRepeatAfterUs);
}
}
+
+ return Status::ok();
}
-void GraphicBufferSource::omxIdle() {
+Status GraphicBufferSource::onOmxIdle() {
ALOGV("omxIdle");
Mutex::Autolock autoLock(mMutex);
@@ -279,9 +174,10 @@
// not loaded->idle.
mExecuting = false;
}
+ return Status::ok();
}
-void GraphicBufferSource::omxLoaded(){
+Status GraphicBufferSource::onOmxLoaded(){
Mutex::Autolock autoLock(mMutex);
if (!mExecuting) {
// This can happen if something failed very early.
@@ -296,53 +192,77 @@
mLooper.clear();
}
- ALOGV("--> loaded; avail=%zu eos=%d eosSent=%d",
- mNumFramesAvailable, mEndOfStream, mEndOfStreamSent);
+ ALOGV("--> loaded; avail=%zu eos=%d eosSent=%d acquired=%d",
+ mNumFramesAvailable, mEndOfStream, mEndOfStreamSent, mNumBufferAcquired);
- // Codec is no longer executing. Discard all codec-related state.
+ // Codec is no longer executing. Releasing all buffers to bq.
+ for (int i = (int)mCodecBuffers.size() - 1; i >= 0; --i) {
+ if (mCodecBuffers[i].mGraphicBuffer != NULL) {
+ int id = mCodecBuffers[i].mSlot;
+ if (id != mLatestBufferId) {
+ ALOGV("releasing buffer for codec: slot=%d, useCount=%d, latest=%d",
+ id, mBufferUseCount[id], mLatestBufferId);
+ sp<Fence> fence = new Fence(-1);
+ releaseBuffer(id, mCodecBuffers[i].mFrameNumber, fence);
+ mBufferUseCount[id] = 0;
+ }
+ }
+ }
+ // Also release the latest buffer
+ if (mLatestBufferId >= 0) {
+ releaseBuffer(mLatestBufferId, mLatestBufferFrameNum, mLatestBufferFence);
+ mBufferUseCount[mLatestBufferId] = 0;
+ mLatestBufferId = -1;
+ }
+
mCodecBuffers.clear();
- // TODO: scan mCodecBuffers to verify that all mGraphicBuffer entries
- // are null; complain if not
-
+ mOMXNode.clear();
mExecuting = false;
+
+ return Status::ok();
}
-void GraphicBufferSource::addCodecBuffer(OMX_BUFFERHEADERTYPE* header) {
+Status GraphicBufferSource::onInputBufferAdded(int32_t bufferID) {
Mutex::Autolock autoLock(mMutex);
if (mExecuting) {
// This should never happen -- buffers can only be allocated when
// transitioning from "loaded" to "idle".
ALOGE("addCodecBuffer: buffer added while executing");
- return;
+ return Status::fromServiceSpecificError(INVALID_OPERATION);
}
- ALOGV("addCodecBuffer h=%p size=%" PRIu32 " p=%p",
- header, header->nAllocLen, header->pBuffer);
+ ALOGV("addCodecBuffer: bufferID=%u", bufferID);
+
CodecBuffer codecBuffer;
- codecBuffer.mHeader = header;
+ codecBuffer.mBufferID = bufferID;
mCodecBuffers.add(codecBuffer);
+ return Status::ok();
}
-void GraphicBufferSource::codecBufferEmptied(OMX_BUFFERHEADERTYPE* header, int fenceFd) {
+Status GraphicBufferSource::onInputBufferEmptied(
+ int32_t bufferID, const OMXFenceParcelable &fenceParcel) {
+ int fenceFd = fenceParcel.get();
+
Mutex::Autolock autoLock(mMutex);
if (!mExecuting) {
- return;
- }
-
- int cbi = findMatchingCodecBuffer_l(header);
- if (cbi < 0) {
- // This should never happen.
- ALOGE("codecBufferEmptied: buffer not recognized (h=%p)", header);
if (fenceFd >= 0) {
::close(fenceFd);
}
- return;
+ return Status::fromServiceSpecificError(INVALID_OPERATION);
}
- ALOGV("codecBufferEmptied h=%p size=%" PRIu32 " filled=%" PRIu32 " p=%p",
- header, header->nAllocLen, header->nFilledLen,
- header->pBuffer);
+ int cbi = findMatchingCodecBuffer_l(bufferID);
+ if (cbi < 0) {
+ // This should never happen.
+ ALOGE("codecBufferEmptied: buffer not recognized (bufferID=%u)", bufferID);
+ if (fenceFd >= 0) {
+ ::close(fenceFd);
+ }
+ return Status::fromServiceSpecificError(BAD_VALUE);
+ }
+
+ ALOGV("codecBufferEmptied: bufferID=%u, cbi=%d", bufferID, cbi);
CodecBuffer& codecBuffer(mCodecBuffers.editItemAt(cbi));
// header->nFilledLen may not be the original value, so we can't compare
@@ -361,33 +281,7 @@
if (fenceFd >= 0) {
::close(fenceFd);
}
- return;
- }
-
- if (EXTRA_CHECK && header->nAllocLen >= sizeof(MetadataBufferType)) {
- // Pull the graphic buffer handle back out of the buffer, and confirm
- // that it matches expectations.
- OMX_U8* data = header->pBuffer;
- MetadataBufferType type = *(MetadataBufferType *)data;
- if (type == kMetadataBufferTypeGrallocSource
- && header->nAllocLen >= sizeof(VideoGrallocMetadata)) {
- VideoGrallocMetadata &grallocMeta = *(VideoGrallocMetadata *)data;
- if (grallocMeta.pHandle != codecBuffer.mGraphicBuffer->handle) {
- // should never happen
- ALOGE("codecBufferEmptied: buffer's handle is %p, expected %p",
- grallocMeta.pHandle, codecBuffer.mGraphicBuffer->handle);
- CHECK(!"codecBufferEmptied: mismatched buffer");
- }
- } else if (type == kMetadataBufferTypeANWBuffer
- && header->nAllocLen >= sizeof(VideoNativeMetadata)) {
- VideoNativeMetadata &nativeMeta = *(VideoNativeMetadata *)data;
- if (nativeMeta.pBuffer != codecBuffer.mGraphicBuffer->getNativeBuffer()) {
- // should never happen
- ALOGE("codecBufferEmptied: buffer is %p, expected %p",
- nativeMeta.pBuffer, codecBuffer.mGraphicBuffer->getNativeBuffer());
- CHECK(!"codecBufferEmptied: mismatched buffer");
- }
- }
+ return Status::fromServiceSpecificError(BAD_VALUE);
}
// Find matching entry in our cached copy of the BufferQueue slots.
@@ -399,19 +293,19 @@
mBufferSlot[id]->handle == codecBuffer.mGraphicBuffer->handle) {
mBufferUseCount[id]--;
- ALOGV("codecBufferEmptied: slot=%d, cbi=%d, useCount=%d, handle=%p",
- id, cbi, mBufferUseCount[id], mBufferSlot[id]->handle);
-
if (mBufferUseCount[id] < 0) {
ALOGW("mBufferUseCount for bq slot %d < 0 (=%d)", id, mBufferUseCount[id]);
mBufferUseCount[id] = 0;
}
if (id != mLatestBufferId && mBufferUseCount[id] == 0) {
- releaseBuffer(id, codecBuffer.mFrameNumber, mBufferSlot[id], fence);
+ releaseBuffer(id, codecBuffer.mFrameNumber, fence);
}
+ ALOGV("codecBufferEmptied: slot=%d, cbi=%d, useCount=%d, acquired=%d, handle=%p",
+ id, cbi, mBufferUseCount[id], mNumBufferAcquired, mBufferSlot[id]->handle);
} else {
- ALOGV("codecBufferEmptied: no match for emptied buffer in cbi %d",
- cbi);
+ ALOGV("codecBufferEmptied: no match for emptied buffer, "
+ "slot=%d, cbi=%d, useCount=%d, acquired=%d",
+ id, cbi, mBufferUseCount[id], mNumBufferAcquired);
// we will not reuse codec buffer, so there is no need to wait for fence
}
@@ -439,75 +333,7 @@
mRepeatBufferDeferred = false;
}
- return;
-}
-
-void GraphicBufferSource::codecBufferFilled(OMX_BUFFERHEADERTYPE* header) {
- Mutex::Autolock autoLock(mMutex);
-
- if (mMaxTimestampGapUs > 0ll
- && !(header->nFlags & OMX_BUFFERFLAG_CODECCONFIG)) {
- ssize_t index = mOriginalTimeUs.indexOfKey(header->nTimeStamp);
- if (index >= 0) {
- ALOGV("OUT timestamp: %lld -> %lld",
- static_cast<long long>(header->nTimeStamp),
- static_cast<long long>(mOriginalTimeUs[index]));
- header->nTimeStamp = mOriginalTimeUs[index];
- mOriginalTimeUs.removeItemsAt(index);
- } else {
- // giving up the effort as encoder doesn't appear to preserve pts
- ALOGW("giving up limiting timestamp gap (pts = %lld)",
- header->nTimeStamp);
- mMaxTimestampGapUs = -1ll;
- }
- if (mOriginalTimeUs.size() > BufferQueue::NUM_BUFFER_SLOTS) {
- // something terribly wrong must have happened, giving up...
- ALOGE("mOriginalTimeUs has too many entries (%zu)",
- mOriginalTimeUs.size());
- mMaxTimestampGapUs = -1ll;
- }
- }
-}
-
-void GraphicBufferSource::suspend(bool suspend) {
- Mutex::Autolock autoLock(mMutex);
-
- if (suspend) {
- mSuspended = true;
-
- while (mNumFramesAvailable > 0) {
- BufferItem item;
- status_t err = mConsumer->acquireBuffer(&item, 0);
-
- if (err == BufferQueue::NO_BUFFER_AVAILABLE) {
- // shouldn't happen.
- ALOGW("suspend: frame was not available");
- break;
- } else if (err != OK) {
- ALOGW("suspend: acquireBuffer returned err=%d", err);
- break;
- }
-
- ++mNumBufferAcquired;
- --mNumFramesAvailable;
-
- releaseBuffer(item.mSlot, item.mFrameNumber,
- item.mGraphicBuffer, item.mFence);
- }
- return;
- }
-
- mSuspended = false;
-
- if (mExecuting && mNumFramesAvailable == 0 && mRepeatBufferDeferred) {
- if (repeatLatestBuffer_l()) {
- ALOGV("suspend/deferred repeatLatestBuffer_l SUCCESS");
-
- mRepeatBufferDeferred = false;
- } else {
- ALOGV("suspend/deferred repeatLatestBuffer_l FAILURE");
- }
- }
+ return Status::ok();
}
void GraphicBufferSource::onDataSpaceChanged_l(
@@ -516,67 +342,15 @@
mLastDataSpace = dataSpace;
if (ColorUtils::convertDataSpaceToV0(dataSpace)) {
- ColorAspects aspects = mColorAspects; // initially requested aspects
+ omx_message msg;
+ msg.type = omx_message::EVENT;
+ msg.fenceFd = -1;
+ msg.u.event_data.event = OMX_EventDataSpaceChanged;
+ msg.u.event_data.data1 = mLastDataSpace;
+ msg.u.event_data.data2 = ColorUtils::packToU32(mColorAspects);
+ msg.u.event_data.data3 = pixelFormat;
- // request color aspects to encode
- OMX_INDEXTYPE index;
- status_t err = mNodeInstance->getExtensionIndex(
- "OMX.google.android.index.describeColorAspects", &index);
- if (err == OK) {
- // V0 dataspace
- DescribeColorAspectsParams params;
- InitOMXParams(¶ms);
- params.nPortIndex = kPortIndexInput;
- params.nDataSpace = mLastDataSpace;
- params.nPixelFormat = pixelFormat;
- params.bDataSpaceChanged = OMX_TRUE;
- params.sAspects = mColorAspects;
-
- err = mNodeInstance->getConfig(index, ¶ms, sizeof(params));
- if (err == OK) {
- aspects = params.sAspects;
- ALOGD("Codec resolved it to (R:%d(%s), P:%d(%s), M:%d(%s), T:%d(%s)) err=%d(%s)",
- params.sAspects.mRange, asString(params.sAspects.mRange),
- params.sAspects.mPrimaries, asString(params.sAspects.mPrimaries),
- params.sAspects.mMatrixCoeffs, asString(params.sAspects.mMatrixCoeffs),
- params.sAspects.mTransfer, asString(params.sAspects.mTransfer),
- err, asString(err));
- } else {
- params.sAspects = aspects;
- err = OK;
- }
- params.bDataSpaceChanged = OMX_FALSE;
- for (int triesLeft = 2; --triesLeft >= 0; ) {
- status_t err = mNodeInstance->setConfig(index, ¶ms, sizeof(params));
- if (err == OK) {
- err = mNodeInstance->getConfig(index, ¶ms, sizeof(params));
- }
- if (err != OK || !ColorUtils::checkIfAspectsChangedAndUnspecifyThem(
- params.sAspects, aspects)) {
- // if we can't set or get color aspects, still communicate dataspace to client
- break;
- }
-
- ALOGW_IF(triesLeft == 0, "Codec repeatedly changed requested ColorAspects.");
- }
- }
-
- ALOGV("Set color aspects to (R:%d(%s), P:%d(%s), M:%d(%s), T:%d(%s)) err=%d(%s)",
- aspects.mRange, asString(aspects.mRange),
- aspects.mPrimaries, asString(aspects.mPrimaries),
- aspects.mMatrixCoeffs, asString(aspects.mMatrixCoeffs),
- aspects.mTransfer, asString(aspects.mTransfer),
- err, asString(err));
-
- // signal client that the dataspace has changed; this will update the output format
- // TODO: we should tie this to an output buffer somehow, and signal the change
- // just before the output buffer is returned to the client, but there are many
- // ways this could fail (e.g. flushing), and we are not yet supporting this scenario.
-
- mNodeInstance->signalEvent(
- OMX_EventDataSpaceChanged, dataSpace,
- (aspects.mRange << 24) | (aspects.mPrimaries << 16)
- | (aspects.mMatrixCoeffs << 8) | aspects.mTransfer);
+ mOMXNode->dispatchMessage(msg);
}
}
@@ -598,34 +372,19 @@
ALOGV("fillCodecBuffer_l: acquiring buffer, avail=%zu",
mNumFramesAvailable);
BufferItem item;
- status_t err = mConsumer->acquireBuffer(&item, 0);
- if (err == BufferQueue::NO_BUFFER_AVAILABLE) {
- // shouldn't happen
- ALOGW("fillCodecBuffer_l: frame was not available");
- return false;
- } else if (err != OK) {
- // now what? fake end-of-stream?
- ALOGW("fillCodecBuffer_l: acquireBuffer returned err=%d", err);
+ status_t err = acquireBuffer(&item);
+ if (err != OK) {
+ ALOGE("fillCodecBuffer_l: acquireBuffer returned err=%d", err);
return false;
}
- mNumBufferAcquired++;
mNumFramesAvailable--;
- // If this is the first time we're seeing this buffer, add it to our
- // slot table.
- if (item.mGraphicBuffer != NULL) {
- ALOGV("fillCodecBuffer_l: setting mBufferSlot %d", item.mSlot);
- mBufferSlot[item.mSlot] = item.mGraphicBuffer;
- mBufferUseCount[item.mSlot] = 0;
- }
-
if (item.mDataSpace != mLastDataSpace) {
onDataSpaceChanged_l(
item.mDataSpace, (android_pixel_format)mBufferSlot[item.mSlot]->getPixelFormat());
}
-
err = UNKNOWN_ERROR;
// only submit sample if start time is unspecified, or sample
@@ -650,10 +409,18 @@
if (err != OK) {
ALOGV("submitBuffer_l failed, releasing bq slot %d", item.mSlot);
- releaseBuffer(item.mSlot, item.mFrameNumber, item.mGraphicBuffer, item.mFence);
+ releaseBuffer(item.mSlot, item.mFrameNumber, item.mFence);
} else {
- ALOGV("buffer submitted (bq %d, cbi %d)", item.mSlot, cbi);
- setLatestBuffer_l(item, dropped);
+ // Don't set the last buffer id if we're not repeating,
+ // we'll be holding on to the last buffer for nothing.
+ if (mRepeatAfterUs > 0ll) {
+ setLatestBuffer_l(item);
+ }
+ if (!dropped) {
+ ++mBufferUseCount[item.mSlot];
+ }
+ ALOGV("buffer submitted: slot=%d, cbi=%d, useCount=%d, acquired=%d",
+ item.mSlot, cbi, mBufferUseCount[item.mSlot], mNumBufferAcquired);
}
return true;
@@ -722,24 +489,16 @@
return true;
}
-void GraphicBufferSource::setLatestBuffer_l(
- const BufferItem &item, bool dropped) {
- if (mLatestBufferId >= 0) {
- if (mBufferUseCount[mLatestBufferId] == 0) {
- releaseBuffer(mLatestBufferId, mLatestBufferFrameNum,
- mBufferSlot[mLatestBufferId], mLatestBufferFence);
- // mLatestBufferFence will be set to new fence just below
- }
+void GraphicBufferSource::setLatestBuffer_l(const BufferItem &item) {
+ if (mLatestBufferId >= 0 && mBufferUseCount[mLatestBufferId] == 0) {
+ releaseBuffer(mLatestBufferId, mLatestBufferFrameNum, mLatestBufferFence);
+ // mLatestBufferFence will be set to new fence just below
}
mLatestBufferId = item.mSlot;
mLatestBufferFrameNum = item.mFrameNumber;
mRepeatLastFrameTimestamp = item.mTimestamp + mRepeatAfterUs * 1000;
- if (!dropped) {
- ++mBufferUseCount[item.mSlot];
- }
-
ALOGV("setLatestBuffer_l: slot=%d, useCount=%d",
item.mSlot, mBufferUseCount[item.mSlot]);
@@ -754,34 +513,8 @@
}
}
-status_t GraphicBufferSource::signalEndOfInputStream() {
- Mutex::Autolock autoLock(mMutex);
- ALOGV("signalEndOfInputStream: exec=%d avail=%zu eos=%d",
- mExecuting, mNumFramesAvailable, mEndOfStream);
-
- if (mEndOfStream) {
- ALOGE("EOS was already signaled");
- return INVALID_OPERATION;
- }
-
- // Set the end-of-stream flag. If no frames are pending from the
- // BufferQueue, and a codec buffer is available, and we're executing,
- // we initiate the EOS from here. Otherwise, we'll let
- // codecBufferEmptied() (or omxExecuting) do it.
- //
- // Note: if there are no pending frames and all codec buffers are
- // available, we *must* submit the EOS from here or we'll just
- // stall since no future events are expected.
- mEndOfStream = true;
-
- if (mExecuting && mNumFramesAvailable == 0) {
- submitEndOfInputStream_l();
- }
-
- return OK;
-}
-
-int64_t GraphicBufferSource::getTimestamp(const BufferItem &item) {
+bool GraphicBufferSource::getTimestamp(
+ const BufferItem &item, int64_t *codecTimeUs) {
int64_t timeUs = item.mTimestamp / 1000;
timeUs += mInputBufferTimeOffsetUs;
@@ -801,7 +534,7 @@
if (nFrames <= 0) {
// skip this frame as it's too close to previous capture
ALOGV("skipping frame, timeUs %lld", static_cast<long long>(timeUs));
- return -1;
+ return false;
}
mPrevCaptureUs = mPrevCaptureUs + nFrames * mTimePerCaptureUs;
mPrevFrameUs += mTimePerFrameUs * nFrames;
@@ -812,49 +545,29 @@
static_cast<long long>(mPrevCaptureUs),
static_cast<long long>(mPrevFrameUs));
- return mPrevFrameUs;
+ *codecTimeUs = mPrevFrameUs;
+ return true;
} else {
int64_t originalTimeUs = timeUs;
if (originalTimeUs <= mPrevOriginalTimeUs) {
// Drop the frame if it's going backward in time. Bad timestamp
// could disrupt encoder's rate control completely.
ALOGW("Dropping frame that's going backward in time");
- return -1;
- }
-
- if (mMaxTimestampGapUs > 0ll) {
- //TODO: Fix the case when mMaxTimestampGapUs and mTimePerCaptureUs are both set.
-
- /* Cap timestamp gap between adjacent frames to specified max
- *
- * In the scenario of cast mirroring, encoding could be suspended for
- * prolonged periods. Limiting the pts gap to workaround the problem
- * where encoder's rate control logic produces huge frames after a
- * long period of suspension.
- */
- if (mPrevOriginalTimeUs >= 0ll) {
- int64_t timestampGapUs = originalTimeUs - mPrevOriginalTimeUs;
- timeUs = (timestampGapUs < mMaxTimestampGapUs ?
- timestampGapUs : mMaxTimestampGapUs) + mPrevModifiedTimeUs;
- }
- mOriginalTimeUs.add(timeUs, originalTimeUs);
- ALOGV("IN timestamp: %lld -> %lld",
- static_cast<long long>(originalTimeUs),
- static_cast<long long>(timeUs));
+ return false;
}
mPrevOriginalTimeUs = originalTimeUs;
- mPrevModifiedTimeUs = timeUs;
}
- return timeUs;
+ *codecTimeUs = timeUs;
+ return true;
}
status_t GraphicBufferSource::submitBuffer_l(const BufferItem &item, int cbi) {
ALOGV("submitBuffer_l: slot=%d, cbi=%d", item.mSlot, cbi);
- int64_t timeUs = getTimestamp(item);
- if (timeUs < 0ll) {
+ int64_t codecTimeUs;
+ if (!getTimestamp(item, &codecTimeUs)) {
return UNKNOWN_ERROR;
}
@@ -863,19 +576,21 @@
codecBuffer.mSlot = item.mSlot;
codecBuffer.mFrameNumber = item.mFrameNumber;
- OMX_BUFFERHEADERTYPE* header = codecBuffer.mHeader;
- sp<GraphicBuffer> buffer = codecBuffer.mGraphicBuffer;
- status_t err = mNodeInstance->emptyGraphicBuffer(
- header, buffer, OMX_BUFFERFLAG_ENDOFFRAME, timeUs,
- item.mFence->isValid() ? item.mFence->dup() : -1);
+ IOMX::buffer_id bufferID = codecBuffer.mBufferID;
+ const sp<GraphicBuffer> &buffer = codecBuffer.mGraphicBuffer;
+ int fenceID = item.mFence->isValid() ? item.mFence->dup() : -1;
+
+ status_t err = mOMXNode->emptyBuffer(
+ bufferID, buffer, OMX_BUFFERFLAG_ENDOFFRAME, codecTimeUs, fenceID);
+
if (err != OK) {
- ALOGW("WARNING: emptyNativeWindowBuffer failed: 0x%x", err);
+ ALOGW("WARNING: emptyGraphicBuffer failed: 0x%x", err);
codecBuffer.mGraphicBuffer = NULL;
return err;
}
- ALOGV("emptyNativeWindowBuffer succeeded, h=%p p=%p buf=%p bufhandle=%p",
- header, header->pBuffer, buffer->getNativeBuffer(), buffer->handle);
+ ALOGV("emptyGraphicBuffer succeeded, bufferID=%u buf=%p bufhandle=%p",
+ bufferID, buffer->getNativeBuffer(), buffer->handle);
return OK;
}
@@ -896,16 +611,17 @@
// to stick a placeholder into codecBuffer.mGraphicBuffer to mark it as
// in-use.
CodecBuffer& codecBuffer(mCodecBuffers.editItemAt(cbi));
+ IOMX::buffer_id bufferID = codecBuffer.mBufferID;
- OMX_BUFFERHEADERTYPE* header = codecBuffer.mHeader;
- status_t err = mNodeInstance->emptyGraphicBuffer(
- header, NULL /* buffer */, OMX_BUFFERFLAG_ENDOFFRAME | OMX_BUFFERFLAG_EOS,
+ status_t err = mOMXNode->emptyBuffer(
+ bufferID, (sp<GraphicBuffer>)NULL,
+ OMX_BUFFERFLAG_ENDOFFRAME | OMX_BUFFERFLAG_EOS,
0 /* timestamp */, -1 /* fenceFd */);
if (err != OK) {
ALOGW("emptyDirectBuffer EOS failed: 0x%x", err);
} else {
- ALOGV("submitEndOfInputStream_l: buffer submitted, header=%p cbi=%d",
- header, cbi);
+ ALOGV("submitEndOfInputStream_l: buffer submitted, bufferID=%u cbi=%d",
+ bufferID, cbi);
mEndOfStreamSent = true;
}
}
@@ -921,44 +637,48 @@
return -1;
}
-int GraphicBufferSource::findMatchingCodecBuffer_l(
- const OMX_BUFFERHEADERTYPE* header) {
+int GraphicBufferSource::findMatchingCodecBuffer_l(IOMX::buffer_id bufferID) {
for (int i = (int)mCodecBuffers.size() - 1; i>= 0; --i) {
- if (mCodecBuffers[i].mHeader == header) {
+ if (mCodecBuffers[i].mBufferID == bufferID) {
return i;
}
}
return -1;
}
+status_t GraphicBufferSource::acquireBuffer(BufferItem *bi) {
+ status_t err = mConsumer->acquireBuffer(bi, 0);
+ if (err == BufferQueue::NO_BUFFER_AVAILABLE) {
+ // shouldn't happen
+ ALOGW("acquireBuffer: frame was not available");
+ return err;
+ } else if (err != OK) {
+ ALOGW("acquireBuffer: failed with err=%d", err);
+ return err;
+ }
+ // If this is the first time we're seeing this buffer, add it to our
+ // slot table.
+ if (bi->mGraphicBuffer != NULL) {
+ ALOGV("acquireBuffer: setting mBufferSlot %d", bi->mSlot);
+ mBufferSlot[bi->mSlot] = bi->mGraphicBuffer;
+ mBufferUseCount[bi->mSlot] = 0;
+ }
+ mNumBufferAcquired++;
+ return OK;
+}
+
/*
- * Releases an acquired buffer back to the consumer for either persistent
- * or non-persistent surfaces.
+ * Releases an acquired buffer back to the consumer.
*
- * id: buffer slot to release (in persistent case the id might be changed)
+ * id: buffer slot to release
* frameNum: frame number of the frame being released
- * buffer: GraphicBuffer pointer to release (note this must not be & as we
- * will clear the original mBufferSlot in persistent case)
- * Use NOLINT to supress warning on the copy of 'buffer'.
* fence: fence of the frame being released
*/
void GraphicBufferSource::releaseBuffer(
- int &id, uint64_t frameNum,
- const sp<GraphicBuffer> buffer, const sp<Fence> &fence) { // NOLINT
+ int id, uint64_t frameNum, const sp<Fence> &fence) {
ALOGV("releaseBuffer: slot=%d", id);
- if (mIsPersistent) {
- mConsumer->detachBuffer(id);
- mBufferSlot[id] = NULL;
-
- if (mConsumer->attachBuffer(&id, buffer) == OK) {
- mConsumer->releaseBuffer(
- id, 0, EGL_NO_DISPLAY, EGL_NO_SYNC_KHR, fence);
- }
- } else {
- mConsumer->releaseBuffer(
- id, frameNum, EGL_NO_DISPLAY, EGL_NO_SYNC_KHR, fence);
- }
- id = -1; // invalidate id
+ mConsumer->releaseBuffer(
+ id, frameNum, EGL_NO_DISPLAY, EGL_NO_SYNC_KHR, fence);
mNumBufferAcquired--;
}
@@ -969,7 +689,7 @@
ALOGV("onFrameAvailable exec=%d avail=%zu",
mExecuting, mNumFramesAvailable);
- if (mEndOfStream || mSuspended) {
+ if (mOMXNode == NULL || mEndOfStream || mSuspended) {
if (mEndOfStream) {
// This should only be possible if a new buffer was queued after
// EOS was signaled, i.e. the app is misbehaving.
@@ -980,20 +700,11 @@
}
BufferItem item;
- status_t err = mConsumer->acquireBuffer(&item, 0);
+ status_t err = acquireBuffer(&item);
if (err == OK) {
- mNumBufferAcquired++;
-
- // If this is the first time we're seeing this buffer, add it to our
- // slot table.
- if (item.mGraphicBuffer != NULL) {
- ALOGV("onFrameAvailable: setting mBufferSlot %d", item.mSlot);
- mBufferSlot[item.mSlot] = item.mGraphicBuffer;
- mBufferUseCount[item.mSlot] = 0;
- }
-
- releaseBuffer(item.mSlot, item.mFrameNumber,
- item.mGraphicBuffer, item.mFence);
+ releaseBuffer(item.mSlot, item.mFrameNumber, item.mFence);
+ } else {
+ ALOGE("onFrameAvailable: acquireBuffer returned err=%d", err);
}
return;
}
@@ -1022,6 +733,18 @@
for (int i = 0; i < BufferQueue::NUM_BUFFER_SLOTS; i++) {
if ((slotMask & 0x01) != 0) {
+ // Last buffer (if set) is always acquired even if its use count
+ // is 0, because we could have skipped that frame but kept it for
+ // repeating. Otherwise a buffer is only acquired if use count>0.
+ if (mBufferSlot[i] != NULL &&
+ (mBufferUseCount[i] > 0 || mLatestBufferId == i)) {
+ ALOGV("releasing acquired buffer: slot=%d, useCount=%d, latest=%d",
+ i, mBufferUseCount[i], mLatestBufferId);
+ mNumBufferAcquired--;
+ }
+ if (mLatestBufferId == i) {
+ mLatestBufferId = -1;
+ }
mBufferSlot[i] = NULL;
mBufferUseCount[i] = 0;
}
@@ -1034,95 +757,245 @@
ALOG_ASSERT(false, "GraphicBufferSource can't consume sideband streams");
}
-void GraphicBufferSource::setDefaultDataSpace(android_dataspace dataSpace) {
- // no need for mutex as we are not yet running
- ALOGD("setting dataspace: %#x", dataSpace);
- mConsumer->setDefaultBufferDataSpace(dataSpace);
- mLastDataSpace = dataSpace;
+Status GraphicBufferSource::configure(
+ const sp<IOMXNode>& omxNode, int32_t dataSpace) {
+ if (omxNode == NULL) {
+ return Status::fromServiceSpecificError(BAD_VALUE);
+ }
+
+ // Do setInputSurface() first, the node will try to enable metadata
+ // mode on input, and does necessary error checking. If this fails,
+ // we can't use this input surface on the node.
+ status_t err = omxNode->setInputSurface(this);
+ if (err != NO_ERROR) {
+ ALOGE("Unable to set input surface: %d", err);
+ return Status::fromServiceSpecificError(err);
+ }
+
+ // use consumer usage bits queried from encoder, but always add
+ // HW_VIDEO_ENCODER for backward compatibility.
+ uint32_t consumerUsage;
+ if (omxNode->getParameter(
+ (OMX_INDEXTYPE)OMX_IndexParamConsumerUsageBits,
+ &consumerUsage, sizeof(consumerUsage)) != OK) {
+ consumerUsage = 0;
+ }
+
+ OMX_PARAM_PORTDEFINITIONTYPE def;
+ InitOMXParams(&def);
+ def.nPortIndex = kPortIndexInput;
+
+ err = omxNode->getParameter(
+ OMX_IndexParamPortDefinition, &def, sizeof(def));
+ if (err != NO_ERROR) {
+ ALOGE("Failed to get port definition: %d", err);
+ return Status::fromServiceSpecificError(UNKNOWN_ERROR);
+ }
+
+ // Call setMaxAcquiredBufferCount without lock.
+ // setMaxAcquiredBufferCount could call back to onBuffersReleased
+ // if the buffer count change results in releasing of existing buffers,
+ // which would lead to deadlock.
+ err = mConsumer->setMaxAcquiredBufferCount(def.nBufferCountActual);
+ if (err != NO_ERROR) {
+ ALOGE("Unable to set BQ max acquired buffer count to %u: %d",
+ def.nBufferCountActual, err);
+ return Status::fromServiceSpecificError(err);
+ }
+
+ {
+ Mutex::Autolock autoLock(mMutex);
+ mOMXNode = omxNode;
+
+ err = mConsumer->setDefaultBufferSize(
+ def.format.video.nFrameWidth,
+ def.format.video.nFrameHeight);
+ if (err != NO_ERROR) {
+ ALOGE("Unable to set BQ default buffer size to %ux%u: %d",
+ def.format.video.nFrameWidth,
+ def.format.video.nFrameHeight,
+ err);
+ return Status::fromServiceSpecificError(err);
+ }
+
+ consumerUsage |= GRALLOC_USAGE_HW_VIDEO_ENCODER;
+ mConsumer->setConsumerUsageBits(consumerUsage);
+
+ // Sets the default buffer data space
+ ALOGD("setting dataspace: %#x, acquired=%d", dataSpace, mNumBufferAcquired);
+ mConsumer->setDefaultBufferDataSpace((android_dataspace)dataSpace);
+ mLastDataSpace = (android_dataspace)dataSpace;
+
+ mExecuting = false;
+ mSuspended = false;
+ mEndOfStream = false;
+ mEndOfStreamSent = false;
+ mPrevOriginalTimeUs = -1ll;
+ mSkipFramesBeforeNs = -1ll;
+ mRepeatAfterUs = -1ll;
+ mRepeatLastFrameGeneration = 0;
+ mRepeatLastFrameTimestamp = -1ll;
+ mRepeatLastFrameCount = 0;
+ mLatestBufferId = -1;
+ mLatestBufferFrameNum = 0;
+ mLatestBufferFence = Fence::NO_FENCE;
+ mRepeatBufferDeferred = false;
+ mTimePerCaptureUs = -1ll;
+ mTimePerFrameUs = -1ll;
+ mPrevCaptureUs = -1ll;
+ mPrevFrameUs = -1ll;
+ mInputBufferTimeOffsetUs = 0;
+ }
+
+ return Status::ok();
}
-status_t GraphicBufferSource::setRepeatPreviousFrameDelayUs(
- int64_t repeatAfterUs) {
+Status GraphicBufferSource::setSuspend(bool suspend) {
+ ALOGV("setSuspend=%d", suspend);
+
+ Mutex::Autolock autoLock(mMutex);
+
+ if (suspend) {
+ mSuspended = true;
+
+ while (mNumFramesAvailable > 0) {
+ BufferItem item;
+ status_t err = acquireBuffer(&item);
+
+ if (err != OK) {
+ ALOGE("setSuspend: acquireBuffer returned err=%d", err);
+ break;
+ }
+
+ --mNumFramesAvailable;
+
+ releaseBuffer(item.mSlot, item.mFrameNumber, item.mFence);
+ }
+ return Status::ok();
+ }
+
+ mSuspended = false;
+
+ if (mExecuting && mNumFramesAvailable == 0 && mRepeatBufferDeferred) {
+ if (repeatLatestBuffer_l()) {
+ ALOGV("suspend/deferred repeatLatestBuffer_l SUCCESS");
+
+ mRepeatBufferDeferred = false;
+ } else {
+ ALOGV("suspend/deferred repeatLatestBuffer_l FAILURE");
+ }
+ }
+ return Status::ok();
+}
+
+Status GraphicBufferSource::setRepeatPreviousFrameDelayUs(int64_t repeatAfterUs) {
+ ALOGV("setRepeatPreviousFrameDelayUs: delayUs=%lld", (long long)repeatAfterUs);
+
Mutex::Autolock autoLock(mMutex);
if (mExecuting || repeatAfterUs <= 0ll) {
- return INVALID_OPERATION;
+ return Status::fromServiceSpecificError(INVALID_OPERATION);
}
mRepeatAfterUs = repeatAfterUs;
-
- return OK;
+ return Status::ok();
}
-status_t GraphicBufferSource::setMaxTimestampGapUs(int64_t maxGapUs) {
- Mutex::Autolock autoLock(mMutex);
-
- if (mExecuting || maxGapUs <= 0ll) {
- return INVALID_OPERATION;
- }
-
- mMaxTimestampGapUs = maxGapUs;
-
- return OK;
-}
-
-status_t GraphicBufferSource::setInputBufferTimeOffset(int64_t timeOffsetUs) {
+Status GraphicBufferSource::setTimeOffsetUs(int64_t timeOffsetUs) {
Mutex::Autolock autoLock(mMutex);
// timeOffsetUs must be negative for adjustment.
if (timeOffsetUs >= 0ll) {
- return INVALID_OPERATION;
+ return Status::fromServiceSpecificError(INVALID_OPERATION);
}
mInputBufferTimeOffsetUs = timeOffsetUs;
- return OK;
+ return Status::ok();
}
-status_t GraphicBufferSource::setMaxFps(float maxFps) {
+Status GraphicBufferSource::setMaxFps(float maxFps) {
+ ALOGV("setMaxFps: maxFps=%lld", (long long)maxFps);
+
Mutex::Autolock autoLock(mMutex);
if (mExecuting) {
- return INVALID_OPERATION;
+ return Status::fromServiceSpecificError(INVALID_OPERATION);
}
mFrameDropper = new FrameDropper();
status_t err = mFrameDropper->setMaxFrameRate(maxFps);
if (err != OK) {
mFrameDropper.clear();
- return err;
+ return Status::fromServiceSpecificError(err);
}
- return OK;
+ return Status::ok();
}
-void GraphicBufferSource::setSkipFramesBeforeUs(int64_t skipFramesBeforeUs) {
+Status GraphicBufferSource::setStartTimeUs(int64_t skipFramesBeforeUs) {
+ ALOGV("setStartTimeUs: skipFramesBeforeUs=%lld", (long long)skipFramesBeforeUs);
+
Mutex::Autolock autoLock(mMutex);
mSkipFramesBeforeNs =
(skipFramesBeforeUs > 0) ? (skipFramesBeforeUs * 1000) : -1ll;
+
+ return Status::ok();
}
-status_t GraphicBufferSource::setTimeLapseConfig(const TimeLapseConfig &config) {
+Status GraphicBufferSource::setTimeLapseConfig(int64_t timePerFrameUs, int64_t timePerCaptureUs) {
+ ALOGV("setTimeLapseConfig: timePerFrameUs=%lld, timePerCaptureUs=%lld",
+ (long long)timePerFrameUs, (long long)timePerCaptureUs);
+
Mutex::Autolock autoLock(mMutex);
- if (mExecuting || config.mTimePerFrameUs <= 0ll || config.mTimePerCaptureUs <= 0ll) {
- return INVALID_OPERATION;
+ if (mExecuting || timePerFrameUs <= 0ll || timePerCaptureUs <= 0ll) {
+ return Status::fromServiceSpecificError(INVALID_OPERATION);
}
- mTimePerFrameUs = config.mTimePerFrameUs;
- mTimePerCaptureUs = config.mTimePerCaptureUs;
+ mTimePerFrameUs = timePerFrameUs;
+ mTimePerCaptureUs = timePerCaptureUs;
- return OK;
+ return Status::ok();
}
-void GraphicBufferSource::setColorAspects(const ColorAspects &aspects) {
+Status GraphicBufferSource::setColorAspects(int32_t aspectsPacked) {
Mutex::Autolock autoLock(mMutex);
- mColorAspects = aspects;
+ mColorAspects = ColorUtils::unpackToColorAspects(aspectsPacked);
ALOGD("requesting color aspects (R:%d(%s), P:%d(%s), M:%d(%s), T:%d(%s))",
- aspects.mRange, asString(aspects.mRange),
- aspects.mPrimaries, asString(aspects.mPrimaries),
- aspects.mMatrixCoeffs, asString(aspects.mMatrixCoeffs),
- aspects.mTransfer, asString(aspects.mTransfer));
+ mColorAspects.mRange, asString(mColorAspects.mRange),
+ mColorAspects.mPrimaries, asString(mColorAspects.mPrimaries),
+ mColorAspects.mMatrixCoeffs, asString(mColorAspects.mMatrixCoeffs),
+ mColorAspects.mTransfer, asString(mColorAspects.mTransfer));
+
+ return Status::ok();
+}
+
+Status GraphicBufferSource::signalEndOfInputStream() {
+ Mutex::Autolock autoLock(mMutex);
+ ALOGV("signalEndOfInputStream: exec=%d avail=%zu eos=%d",
+ mExecuting, mNumFramesAvailable, mEndOfStream);
+
+ if (mEndOfStream) {
+ ALOGE("EOS was already signaled");
+ return Status::fromStatusT(INVALID_OPERATION);
+ }
+
+ // Set the end-of-stream flag. If no frames are pending from the
+ // BufferQueue, and a codec buffer is available, and we're executing,
+ // we initiate the EOS from here. Otherwise, we'll let
+ // codecBufferEmptied() (or omxExecuting) do it.
+ //
+ // Note: if there are no pending frames and all codec buffers are
+ // available, we *must* submit the EOS from here or we'll just
+ // stall since no future events are expected.
+ mEndOfStream = true;
+
+ if (mExecuting && mNumFramesAvailable == 0) {
+ submitEndOfInputStream_l();
+ }
+
+ return Status::ok();
}
void GraphicBufferSource::onMessageReceived(const sp<AMessage> &msg) {
diff --git a/media/libstagefright/omx/GraphicBufferSource.h b/media/libstagefright/omx/GraphicBufferSource.h
index aa4ceb3..80fe078 100644
--- a/media/libstagefright/omx/GraphicBufferSource.h
+++ b/media/libstagefright/omx/GraphicBufferSource.h
@@ -22,15 +22,20 @@
#include <gui/BufferQueue.h>
#include <utils/RefBase.h>
-#include <OMX_Core.h>
#include <VideoAPI.h>
-#include "../include/OMXNodeInstance.h"
+#include <media/IOMX.h>
+#include <media/OMXFenceParcelable.h>
#include <media/stagefright/foundation/ABase.h>
#include <media/stagefright/foundation/AHandlerReflector.h>
#include <media/stagefright/foundation/ALooper.h>
+#include <android/BnGraphicBufferSource.h>
+#include <android/BnOMXBufferSource.h>
+
namespace android {
+using ::android::binder::Status;
+
struct FrameDropper;
/*
@@ -49,16 +54,11 @@
* before the codec is in the "executing" state, so we need to queue
* things up until we're ready to go.
*/
-class GraphicBufferSource : public BufferQueue::ConsumerListener {
+class GraphicBufferSource : public BnGraphicBufferSource,
+ public BnOMXBufferSource,
+ public BufferQueue::ConsumerListener {
public:
- GraphicBufferSource(
- OMXNodeInstance* nodeInstance,
- uint32_t bufferWidth,
- uint32_t bufferHeight,
- uint32_t bufferCount,
- uint32_t consumerUsage,
- const sp<IGraphicBufferConsumer> &consumer = NULL
- );
+ GraphicBufferSource();
virtual ~GraphicBufferSource();
@@ -74,44 +74,42 @@
return mProducer;
}
- // Sets the default buffer data space
- void setDefaultDataSpace(android_dataspace dataSpace);
-
// This is called when OMX transitions to OMX_StateExecuting, which means
// we can start handing it buffers. If we already have buffers of data
// sitting in the BufferQueue, this will send them to the codec.
- void omxExecuting();
+ Status onOmxExecuting() override;
// This is called when OMX transitions to OMX_StateIdle, indicating that
// the codec is meant to return all buffers back to the client for them
// to be freed. Do NOT submit any more buffers to the component.
- void omxIdle();
+ Status onOmxIdle() override;
// This is called when OMX transitions to OMX_StateLoaded, indicating that
// we are shutting down.
- void omxLoaded();
+ Status onOmxLoaded() override;
// A "codec buffer", i.e. a buffer that can be used to pass data into
// the encoder, has been allocated. (This call does not call back into
// OMXNodeInstance.)
- void addCodecBuffer(OMX_BUFFERHEADERTYPE* header);
+ Status onInputBufferAdded(int32_t bufferID) override;
// Called from OnEmptyBufferDone. If we have a BQ buffer available,
// fill it with a new frame of data; otherwise, just mark it as available.
- void codecBufferEmptied(OMX_BUFFERHEADERTYPE* header, int fenceFd);
+ Status onInputBufferEmptied(
+ int32_t bufferID, const OMXFenceParcelable& fenceParcel) override;
- // Called when omx_message::FILL_BUFFER_DONE is received. (Currently the
- // buffer source will fix timestamp in the header if needed.)
- void codecBufferFilled(OMX_BUFFERHEADERTYPE* header);
+ // Configure the buffer source to be used with an OMX node with the default
+ // data space.
+ Status configure(const sp<IOMXNode>& omxNode, int32_t dataSpace) override;
// This is called after the last input frame has been submitted. We
// need to submit an empty buffer with the EOS flag set. If we don't
// have a codec buffer ready, we just set the mEndOfStream flag.
- status_t signalEndOfInputStream();
+ Status signalEndOfInputStream() override;
// If suspend is true, all incoming buffers (including those currently
// in the BufferQueue) will be discarded until the suspension is lifted.
- void suspend(bool suspend);
+ Status setSuspend(bool suspend) override;
// Specifies the interval after which we requeue the buffer previously
// queued to the encoder. This is useful in the case of surface flinger
@@ -120,40 +118,26 @@
// the decoder on the remote end would be unable to decode the latest frame.
// This API must be called before transitioning the encoder to "executing"
// state and once this behaviour is specified it cannot be reset.
- status_t setRepeatPreviousFrameDelayUs(int64_t repeatAfterUs);
-
- // When set, the timestamp fed to the encoder will be modified such that
- // the gap between two adjacent frames is capped at maxGapUs. Timestamp
- // will be restored to the original when the encoded frame is returned to
- // the client.
- // This is to solve a problem in certain real-time streaming case, where
- // encoder's rate control logic produces huge frames after a long period
- // of suspension on input.
- status_t setMaxTimestampGapUs(int64_t maxGapUs);
+ Status setRepeatPreviousFrameDelayUs(int64_t repeatAfterUs) override;
// Sets the input buffer timestamp offset.
// When set, the sample's timestamp will be adjusted with the timeOffsetUs.
- status_t setInputBufferTimeOffset(int64_t timeOffsetUs);
+ Status setTimeOffsetUs(int64_t timeOffsetUs) override;
// When set, the max frame rate fed to the encoder will be capped at maxFps.
- status_t setMaxFps(float maxFps);
-
- struct TimeLapseConfig {
- int64_t mTimePerFrameUs; // the time (us) between two frames for playback
- int64_t mTimePerCaptureUs; // the time (us) between two frames for capture
- };
+ Status setMaxFps(float maxFps) override;
// Sets the time lapse (or slow motion) parameters.
// When set, the sample's timestamp will be modified to playback framerate,
// and capture timestamp will be modified to capture rate.
- status_t setTimeLapseConfig(const TimeLapseConfig &config);
+ Status setTimeLapseConfig(int64_t timePerFrameUs, int64_t timePerCaptureUs) override;
// Sets the start time us (in system time), samples before which should
// be dropped and not submitted to encoder
- void setSkipFramesBeforeUs(int64_t startTimeUs);
+ Status setStartTimeUs(int64_t startTimeUs) override;
// Sets the desired color aspects, e.g. to be used when producer does not specify a dataspace.
- void setColorAspects(const ColorAspects &aspects);
+ Status setColorAspects(int32_t aspectsPacked) override;
protected:
// BufferQueue::ConsumerListener interface, called when a new frame of
@@ -162,48 +146,24 @@
// into the codec buffer, and call Empty[This]Buffer. If we're not yet
// executing or there's no codec buffer available, we just increment
// mNumFramesAvailable and return.
- virtual void onFrameAvailable(const BufferItem& item);
+ void onFrameAvailable(const BufferItem& item) override;
// BufferQueue::ConsumerListener interface, called when the client has
// released one or more GraphicBuffers. We clear out the appropriate
// set of mBufferSlot entries.
- virtual void onBuffersReleased();
+ void onBuffersReleased() override;
// BufferQueue::ConsumerListener interface, called when the client has
// changed the sideband stream. GraphicBufferSource doesn't handle sideband
// streams so this is a no-op (and should never be called).
- virtual void onSidebandStreamChanged();
+ void onSidebandStreamChanged() override;
private:
- // PersistentProxyListener is similar to BufferQueue::ProxyConsumerListener
- // except that it returns (acquire/detach/re-attache/release) buffers
- // in onFrameAvailable() if the actual consumer object is no longer valid.
- //
- // This class is used in persistent input surface case to prevent buffer
- // loss when onFrameAvailable() is received while we don't have a valid
- // consumer around.
- class PersistentProxyListener : public BnConsumerListener {
- public:
- PersistentProxyListener(
- const wp<IGraphicBufferConsumer> &consumer,
- const wp<ConsumerListener>& consumerListener);
- virtual ~PersistentProxyListener();
- virtual void onFrameAvailable(const BufferItem& item) override;
- virtual void onFrameReplaced(const BufferItem& item) override;
- virtual void onBuffersReleased() override;
- virtual void onSidebandStreamChanged() override;
- private:
- // mConsumerListener is a weak reference to the IConsumerListener.
- wp<ConsumerListener> mConsumerListener;
- // mConsumer is a weak reference to the IGraphicBufferConsumer, use
- // a weak ref to avoid circular ref between mConsumer and this class
- wp<IGraphicBufferConsumer> mConsumer;
- };
// Keep track of codec input buffers. They may either be available
// (mGraphicBuffer == NULL) or in use by the codec.
struct CodecBuffer {
- OMX_BUFFERHEADERTYPE* mHeader;
+ IOMX::buffer_id mBufferID;
// buffer producer's frame-number for buffer
uint64_t mFrameNumber;
@@ -224,7 +184,7 @@
}
// Finds the mCodecBuffers entry that matches. Returns -1 if not found.
- int findMatchingCodecBuffer_l(const OMX_BUFFERHEADERTYPE* header);
+ int findMatchingCodecBuffer_l(IOMX::buffer_id bufferID);
// Fills a codec buffer with a frame from the BufferQueue. This must
// only be called when we know that a frame of data is ready (i.e. we're
@@ -243,14 +203,15 @@
// doing anything if we don't have a codec buffer available.
void submitEndOfInputStream_l();
- // Release buffer to the consumer
- void releaseBuffer(
- int &id, uint64_t frameNum,
- const sp<GraphicBuffer> buffer, const sp<Fence> &fence);
+ // Acquire buffer from the consumer
+ status_t acquireBuffer(BufferItem *bi);
- void setLatestBuffer_l(const BufferItem &item, bool dropped);
+ // Release buffer to the consumer
+ void releaseBuffer(int id, uint64_t frameNum, const sp<Fence> &fence);
+
+ void setLatestBuffer_l(const BufferItem &item);
bool repeatLatestBuffer_l();
- int64_t getTimestamp(const BufferItem &item);
+ bool getTimestamp(const BufferItem &item, int64_t *codecTimeUs);
// called when the data space of the input buffer changes
void onDataSpaceChanged_l(android_dataspace dataSpace, android_pixel_format pixelFormat);
@@ -261,8 +222,8 @@
// Used to report constructor failure.
status_t mInitCheck;
- // Pointer back to the object that contains us. We send buffers here.
- OMXNodeInstance* mNodeInstance;
+ // Pointer back to the IOMXNode that created us. We send buffers here.
+ sp<IOMXNode> mOMXNode;
// Set by omxExecuting() / omxIdling().
bool mExecuting;
@@ -275,7 +236,6 @@
// Our BufferQueue interfaces. mProducer is passed to the producer through
// getIGraphicBufferProducer, and mConsumer is used internally to retrieve
// the buffers queued by the producer.
- bool mIsPersistent;
sp<IGraphicBufferProducer> mProducer;
sp<IGraphicBufferConsumer> mConsumer;
@@ -310,10 +270,7 @@
kRepeatLastFrameCount = 10,
};
- KeyedVector<int64_t, int64_t> mOriginalTimeUs;
- int64_t mMaxTimestampGapUs;
int64_t mPrevOriginalTimeUs;
- int64_t mPrevModifiedTimeUs;
int64_t mSkipFramesBeforeNs;
sp<FrameDropper> mFrameDropper;
@@ -342,7 +299,6 @@
int64_t mInputBufferTimeOffsetUs;
- MetadataBufferType mMetadataBufferType;
ColorAspects mColorAspects;
void onMessageReceived(const sp<AMessage> &msg);
diff --git a/media/libstagefright/omx/OMX.cpp b/media/libstagefright/omx/OMX.cpp
index f7058d7..7907c62 100644
--- a/media/libstagefright/omx/OMX.cpp
+++ b/media/libstagefright/omx/OMX.cpp
@@ -26,149 +26,19 @@
#include "../include/OMXNodeInstance.h"
-#include <binder/IMemory.h>
#include <media/stagefright/foundation/ADebug.h>
-#include <utils/threads.h>
+#include "GraphicBufferSource.h"
#include "OMXMaster.h"
#include "OMXUtils.h"
-#include <OMX_AsString.h>
-#include <OMX_Component.h>
-#include <OMX_VideoExt.h>
-
namespace android {
// node ids are created by concatenating the pid with a 16-bit counter
static size_t kMaxNodeInstances = (1 << 16);
-////////////////////////////////////////////////////////////////////////////////
-
-// This provides the underlying Thread used by CallbackDispatcher.
-// Note that deriving CallbackDispatcher from Thread does not work.
-
-struct OMX::CallbackDispatcherThread : public Thread {
- explicit CallbackDispatcherThread(CallbackDispatcher *dispatcher)
- : mDispatcher(dispatcher) {
- }
-
-private:
- CallbackDispatcher *mDispatcher;
-
- bool threadLoop();
-
- CallbackDispatcherThread(const CallbackDispatcherThread &);
- CallbackDispatcherThread &operator=(const CallbackDispatcherThread &);
-};
-
-////////////////////////////////////////////////////////////////////////////////
-
-struct OMX::CallbackDispatcher : public RefBase {
- explicit CallbackDispatcher(OMXNodeInstance *owner);
-
- // Posts |msg| to the listener's queue. If |realTime| is true, the listener thread is notified
- // that a new message is available on the queue. Otherwise, the message stays on the queue, but
- // the listener is not notified of it. It will process this message when a subsequent message
- // is posted with |realTime| set to true.
- void post(const omx_message &msg, bool realTime = true);
-
- bool loop();
-
-protected:
- virtual ~CallbackDispatcher();
-
-private:
- Mutex mLock;
-
- OMXNodeInstance *mOwner;
- bool mDone;
- Condition mQueueChanged;
- std::list<omx_message> mQueue;
-
- sp<CallbackDispatcherThread> mThread;
-
- void dispatch(std::list<omx_message> &messages);
-
- CallbackDispatcher(const CallbackDispatcher &);
- CallbackDispatcher &operator=(const CallbackDispatcher &);
-};
-
-OMX::CallbackDispatcher::CallbackDispatcher(OMXNodeInstance *owner)
- : mOwner(owner),
- mDone(false) {
- mThread = new CallbackDispatcherThread(this);
- mThread->run("OMXCallbackDisp", ANDROID_PRIORITY_FOREGROUND);
-}
-
-OMX::CallbackDispatcher::~CallbackDispatcher() {
- {
- Mutex::Autolock autoLock(mLock);
-
- mDone = true;
- mQueueChanged.signal();
- }
-
- // A join on self can happen if the last ref to CallbackDispatcher
- // is released within the CallbackDispatcherThread loop
- status_t status = mThread->join();
- if (status != WOULD_BLOCK) {
- // Other than join to self, the only other error return codes are
- // whatever readyToRun() returns, and we don't override that
- CHECK_EQ(status, (status_t)NO_ERROR);
- }
-}
-
-void OMX::CallbackDispatcher::post(const omx_message &msg, bool realTime) {
- Mutex::Autolock autoLock(mLock);
-
- mQueue.push_back(msg);
- if (realTime) {
- mQueueChanged.signal();
- }
-}
-
-void OMX::CallbackDispatcher::dispatch(std::list<omx_message> &messages) {
- if (mOwner == NULL) {
- ALOGV("Would have dispatched a message to a node that's already gone.");
- return;
- }
- mOwner->onMessages(messages);
-}
-
-bool OMX::CallbackDispatcher::loop() {
- for (;;) {
- std::list<omx_message> messages;
-
- {
- Mutex::Autolock autoLock(mLock);
- while (!mDone && mQueue.empty()) {
- mQueueChanged.wait(mLock);
- }
-
- if (mDone) {
- break;
- }
-
- messages.swap(mQueue);
- }
-
- dispatch(messages);
- }
-
- return false;
-}
-
-////////////////////////////////////////////////////////////////////////////////
-
-bool OMX::CallbackDispatcherThread::threadLoop() {
- return mDispatcher->loop();
-}
-
-////////////////////////////////////////////////////////////////////////////////
-
OMX::OMX()
- : mMaster(new OMXMaster),
- mNodeCounter(0) {
+ : mMaster(new OMXMaster) {
}
OMX::~OMX() {
@@ -177,7 +47,7 @@
}
void OMX::binderDied(const wp<IBinder> &the_late_who) {
- OMXNodeInstance *instance;
+ sp<OMXNodeInstance> instance;
{
Mutex::Autolock autoLock(mLock);
@@ -192,24 +62,9 @@
instance = mLiveNodes.editValueAt(index);
mLiveNodes.removeItemsAt(index);
-
- index = mDispatchers.indexOfKey(instance->nodeID());
- CHECK(index >= 0);
- mDispatchers.removeItemsAt(index);
-
- invalidateNodeID_l(instance->nodeID());
}
- instance->onObserverDied(mMaster);
-}
-
-bool OMX::isSecure(node_id node) {
- OMXNodeInstance *instance = findInstance(node);
- return (instance == NULL ? false : instance->isSecure());
-}
-
-bool OMX::livesLocally(node_id /* node */, pid_t pid) {
- return pid == getpid();
+ instance->onObserverDied();
}
status_t OMX::listNodes(List<ComponentInfo> *list) {
@@ -242,48 +97,38 @@
status_t OMX::allocateNode(
const char *name, const sp<IOMXObserver> &observer,
- sp<IBinder> *nodeBinder, node_id *node) {
+ sp<IOMXNode> *omxNode) {
Mutex::Autolock autoLock(mLock);
- *node = 0;
- if (nodeBinder != NULL) {
- *nodeBinder = NULL;
- }
+ omxNode->clear();
- if (mNodeIDToInstance.size() == kMaxNodeInstances) {
- // all possible node IDs are in use
+ if (mLiveNodes.size() == kMaxNodeInstances) {
return NO_MEMORY;
}
- OMXNodeInstance *instance = new OMXNodeInstance(this, observer, name);
+ sp<OMXNodeInstance> instance = new OMXNodeInstance(this, observer, name);
OMX_COMPONENTTYPE *handle;
OMX_ERRORTYPE err = mMaster->makeComponentInstance(
name, &OMXNodeInstance::kCallbacks,
- instance, &handle);
+ instance.get(), &handle);
if (err != OMX_ErrorNone) {
ALOGE("FAILED to allocate omx component '%s' err=%s(%#x)", name, asString(err), err);
- instance->onGetHandleFailed();
-
return StatusFromOMXError(err);
}
-
- *node = makeNodeID_l(instance);
- mDispatchers.add(*node, new CallbackDispatcher(instance));
-
- instance->setHandle(*node, handle);
+ instance->setHandle(handle);
mLiveNodes.add(IInterface::asBinder(observer), instance);
IInterface::asBinder(observer)->linkToDeath(this);
+ *omxNode = instance;
+
return OK;
}
-status_t OMX::freeNode(node_id node) {
- OMXNodeInstance *instance = findInstance(node);
-
+status_t OMX::freeNode(const sp<OMXNodeInstance> &instance) {
if (instance == NULL) {
return OK;
}
@@ -301,466 +146,35 @@
IInterface::asBinder(instance->observer())->unlinkToDeath(this);
- status_t err = instance->freeNode(mMaster);
-
- {
- Mutex::Autolock autoLock(mLock);
- ssize_t index = mDispatchers.indexOfKey(node);
- CHECK(index >= 0);
- mDispatchers.removeItemsAt(index);
+ OMX_ERRORTYPE err = OMX_ErrorNone;
+ if (instance->handle() != NULL) {
+ err = mMaster->destroyComponentInstance(
+ static_cast<OMX_COMPONENTTYPE *>(instance->handle()));
}
- return err;
-}
-
-status_t OMX::sendCommand(
- node_id node, OMX_COMMANDTYPE cmd, OMX_S32 param) {
- OMXNodeInstance *instance = findInstance(node);
-
- if (instance == NULL) {
- return NAME_NOT_FOUND;
- }
-
- return instance->sendCommand(cmd, param);
-}
-
-status_t OMX::getParameter(
- node_id node, OMX_INDEXTYPE index,
- void *params, size_t size) {
- ALOGV("getParameter(%u %#x %p %zd)", node, index, params, size);
- OMXNodeInstance *instance = findInstance(node);
-
- if (instance == NULL) {
- return NAME_NOT_FOUND;
- }
-
- return instance->getParameter(
- index, params, size);
-}
-
-status_t OMX::setParameter(
- node_id node, OMX_INDEXTYPE index,
- const void *params, size_t size) {
- ALOGV("setParameter(%u %#x %p %zd)", node, index, params, size);
- OMXNodeInstance *instance = findInstance(node);
-
- if (instance == NULL) {
- return NAME_NOT_FOUND;
- }
-
- return instance->setParameter(
- index, params, size);
-}
-
-status_t OMX::getConfig(
- node_id node, OMX_INDEXTYPE index,
- void *params, size_t size) {
- OMXNodeInstance *instance = findInstance(node);
-
- if (instance == NULL) {
- return NAME_NOT_FOUND;
- }
-
- return instance->getConfig(
- index, params, size);
-}
-
-status_t OMX::setConfig(
- node_id node, OMX_INDEXTYPE index,
- const void *params, size_t size) {
- OMXNodeInstance *instance = findInstance(node);
-
- if (instance == NULL) {
- return NAME_NOT_FOUND;
- }
-
- return instance->setConfig(
- index, params, size);
-}
-
-status_t OMX::getState(
- node_id node, OMX_STATETYPE* state) {
- OMXNodeInstance *instance = findInstance(node);
-
- if (instance == NULL) {
- return NAME_NOT_FOUND;
- }
-
- return instance->getState(
- state);
-}
-
-status_t OMX::enableNativeBuffers(
- node_id node, OMX_U32 port_index, OMX_BOOL graphic, OMX_BOOL enable) {
- OMXNodeInstance *instance = findInstance(node);
-
- if (instance == NULL) {
- return NAME_NOT_FOUND;
- }
-
- return instance->enableNativeBuffers(port_index, graphic, enable);
-}
-
-status_t OMX::getGraphicBufferUsage(
- node_id node, OMX_U32 port_index, OMX_U32* usage) {
- OMXNodeInstance *instance = findInstance(node);
-
- if (instance == NULL) {
- return NAME_NOT_FOUND;
- }
-
- return instance->getGraphicBufferUsage(port_index, usage);
-}
-
-status_t OMX::storeMetaDataInBuffers(
- node_id node, OMX_U32 port_index, OMX_BOOL enable, MetadataBufferType *type) {
- OMXNodeInstance *instance = findInstance(node);
-
- if (instance == NULL) {
- return NAME_NOT_FOUND;
- }
-
- return instance->storeMetaDataInBuffers(port_index, enable, type);
-}
-
-status_t OMX::prepareForAdaptivePlayback(
- node_id node, OMX_U32 portIndex, OMX_BOOL enable,
- OMX_U32 maxFrameWidth, OMX_U32 maxFrameHeight) {
- OMXNodeInstance *instance = findInstance(node);
-
- if (instance == NULL) {
- return NAME_NOT_FOUND;
- }
-
- return instance->prepareForAdaptivePlayback(
- portIndex, enable, maxFrameWidth, maxFrameHeight);
-}
-
-status_t OMX::configureVideoTunnelMode(
- node_id node, OMX_U32 portIndex, OMX_BOOL tunneled,
- OMX_U32 audioHwSync, native_handle_t **sidebandHandle) {
- OMXNodeInstance *instance = findInstance(node);
-
- if (instance == NULL) {
- return NAME_NOT_FOUND;
- }
-
- return instance->configureVideoTunnelMode(
- portIndex, tunneled, audioHwSync, sidebandHandle);
-}
-
-status_t OMX::useBuffer(
- node_id node, OMX_U32 port_index, const sp<IMemory> ¶ms,
- buffer_id *buffer, OMX_U32 allottedSize) {
- OMXNodeInstance *instance = findInstance(node);
-
- if (instance == NULL) {
- return NAME_NOT_FOUND;
- }
-
- return instance->useBuffer(
- port_index, params, buffer, allottedSize);
-}
-
-status_t OMX::useGraphicBuffer(
- node_id node, OMX_U32 port_index,
- const sp<GraphicBuffer> &graphicBuffer, buffer_id *buffer) {
- OMXNodeInstance *instance = findInstance(node);
-
- if (instance == NULL) {
- return NAME_NOT_FOUND;
- }
-
- return instance->useGraphicBuffer(
- port_index, graphicBuffer, buffer);
-}
-
-status_t OMX::updateGraphicBufferInMeta(
- node_id node, OMX_U32 port_index,
- const sp<GraphicBuffer> &graphicBuffer, buffer_id buffer) {
- OMXNodeInstance *instance = findInstance(node);
-
- if (instance == NULL) {
- return NAME_NOT_FOUND;
- }
-
- return instance->updateGraphicBufferInMeta(
- port_index, graphicBuffer, buffer);
-}
-
-status_t OMX::updateNativeHandleInMeta(
- node_id node, OMX_U32 port_index,
- const sp<NativeHandle> &nativeHandle, buffer_id buffer) {
- OMXNodeInstance *instance = findInstance(node);
-
- if (instance == NULL) {
- return NAME_NOT_FOUND;
- }
-
- return instance->updateNativeHandleInMeta(
- port_index, nativeHandle, buffer);
+ return StatusFromOMXError(err);
}
status_t OMX::createInputSurface(
- node_id node, OMX_U32 port_index, android_dataspace dataSpace,
- sp<IGraphicBufferProducer> *bufferProducer, MetadataBufferType *type) {
- OMXNodeInstance *instance = findInstance(node);
-
- if (instance == NULL) {
- return NAME_NOT_FOUND;
- }
-
- return instance->createInputSurface(
- port_index, dataSpace, bufferProducer, type);
-}
-
-status_t OMX::createPersistentInputSurface(
sp<IGraphicBufferProducer> *bufferProducer,
- sp<IGraphicBufferConsumer> *bufferConsumer) {
- return OMXNodeInstance::createPersistentInputSurface(
- bufferProducer, bufferConsumer);
-}
-
-status_t OMX::setInputSurface(
- node_id node, OMX_U32 port_index,
- const sp<IGraphicBufferConsumer> &bufferConsumer, MetadataBufferType *type) {
- OMXNodeInstance *instance = findInstance(node);
-
- if (instance == NULL) {
- return NAME_NOT_FOUND;
+ sp<IGraphicBufferSource> *bufferSource) {
+ if (bufferProducer == NULL || bufferSource == NULL) {
+ ALOGE("b/25884056");
+ return BAD_VALUE;
}
- return instance->setInputSurface(port_index, bufferConsumer, type);
-}
-
-
-status_t OMX::signalEndOfInputStream(node_id node) {
- OMXNodeInstance *instance = findInstance(node);
-
- if (instance == NULL) {
- return NAME_NOT_FOUND;
+ sp<GraphicBufferSource> graphicBufferSource = new GraphicBufferSource();
+ status_t err = graphicBufferSource->initCheck();
+ if (err != OK) {
+ ALOGE("Failed to create persistent input surface: %s (%d)",
+ strerror(-err), err);
+ return err;
}
- return instance->signalEndOfInputStream();
-}
+ *bufferProducer = graphicBufferSource->getIGraphicBufferProducer();
+ *bufferSource = graphicBufferSource;
-status_t OMX::allocateSecureBuffer(
- node_id node, OMX_U32 port_index, size_t size,
- buffer_id *buffer, void **buffer_data, sp<NativeHandle> *native_handle) {
- OMXNodeInstance *instance = findInstance(node);
-
- if (instance == NULL) {
- return NAME_NOT_FOUND;
- }
-
- return instance->allocateSecureBuffer(
- port_index, size, buffer, buffer_data, native_handle);
-}
-
-status_t OMX::allocateBufferWithBackup(
- node_id node, OMX_U32 port_index, const sp<IMemory> ¶ms,
- buffer_id *buffer, OMX_U32 allottedSize) {
- OMXNodeInstance *instance = findInstance(node);
-
- if (instance == NULL) {
- return NAME_NOT_FOUND;
- }
-
- return instance->allocateBufferWithBackup(
- port_index, params, buffer, allottedSize);
-}
-
-status_t OMX::freeBuffer(node_id node, OMX_U32 port_index, buffer_id buffer) {
- OMXNodeInstance *instance = findInstance(node);
-
- if (instance == NULL) {
- return NAME_NOT_FOUND;
- }
-
- return instance->freeBuffer(
- port_index, buffer);
-}
-
-status_t OMX::fillBuffer(node_id node, buffer_id buffer, int fenceFd) {
- OMXNodeInstance *instance = findInstance(node);
-
- if (instance == NULL) {
- return NAME_NOT_FOUND;
- }
-
- return instance->fillBuffer(buffer, fenceFd);
-}
-
-status_t OMX::emptyBuffer(
- node_id node,
- buffer_id buffer,
- OMX_U32 range_offset, OMX_U32 range_length,
- OMX_U32 flags, OMX_TICKS timestamp, int fenceFd) {
- OMXNodeInstance *instance = findInstance(node);
-
- if (instance == NULL) {
- return NAME_NOT_FOUND;
- }
-
- return instance->emptyBuffer(
- buffer, range_offset, range_length, flags, timestamp, fenceFd);
-}
-
-status_t OMX::getExtensionIndex(
- node_id node,
- const char *parameter_name,
- OMX_INDEXTYPE *index) {
- OMXNodeInstance *instance = findInstance(node);
-
- if (instance == NULL) {
- return NAME_NOT_FOUND;
- }
-
- return instance->getExtensionIndex(
- parameter_name, index);
-}
-
-status_t OMX::setInternalOption(
- node_id node,
- OMX_U32 port_index,
- InternalOptionType type,
- const void *data,
- size_t size) {
- OMXNodeInstance *instance = findInstance(node);
-
- if (instance == NULL) {
- return NAME_NOT_FOUND;
- }
-
- return instance->setInternalOption(port_index, type, data, size);
-}
-
-OMX_ERRORTYPE OMX::OnEvent(
- node_id node,
- OMX_IN OMX_EVENTTYPE eEvent,
- OMX_IN OMX_U32 nData1,
- OMX_IN OMX_U32 nData2,
- OMX_IN OMX_PTR pEventData) {
- ALOGV("OnEvent(%d, %" PRIu32", %" PRIu32 ")", eEvent, nData1, nData2);
- OMXNodeInstance *instance = findInstance(node);
-
- if (instance == NULL) {
- return OMX_ErrorComponentNotFound;
- }
-
- // Forward to OMXNodeInstance.
- instance->onEvent(eEvent, nData1, nData2);
-
- sp<OMX::CallbackDispatcher> dispatcher = findDispatcher(node);
-
- // output rendered events are not processed as regular events until they hit the observer
- if (eEvent == OMX_EventOutputRendered) {
- if (pEventData == NULL) {
- return OMX_ErrorBadParameter;
- }
-
- // process data from array
- OMX_VIDEO_RENDEREVENTTYPE *renderData = (OMX_VIDEO_RENDEREVENTTYPE *)pEventData;
- for (size_t i = 0; i < nData1; ++i) {
- omx_message msg;
- msg.type = omx_message::FRAME_RENDERED;
- msg.node = node;
- msg.fenceFd = -1;
- msg.u.render_data.timestamp = renderData[i].nMediaTimeUs;
- msg.u.render_data.nanoTime = renderData[i].nSystemTimeNs;
-
- dispatcher->post(msg, false /* realTime */);
- }
- return OMX_ErrorNone;
- }
-
- omx_message msg;
- msg.type = omx_message::EVENT;
- msg.node = node;
- msg.fenceFd = -1;
- msg.u.event_data.event = eEvent;
- msg.u.event_data.data1 = nData1;
- msg.u.event_data.data2 = nData2;
-
- dispatcher->post(msg, true /* realTime */);
-
- return OMX_ErrorNone;
-}
-
-OMX_ERRORTYPE OMX::OnEmptyBufferDone(
- node_id node, buffer_id buffer, OMX_IN OMX_BUFFERHEADERTYPE *pBuffer, int fenceFd) {
- ALOGV("OnEmptyBufferDone buffer=%p", pBuffer);
-
- omx_message msg;
- msg.type = omx_message::EMPTY_BUFFER_DONE;
- msg.node = node;
- msg.fenceFd = fenceFd;
- msg.u.buffer_data.buffer = buffer;
-
- findDispatcher(node)->post(msg);
-
- return OMX_ErrorNone;
-}
-
-OMX_ERRORTYPE OMX::OnFillBufferDone(
- node_id node, buffer_id buffer, OMX_IN OMX_BUFFERHEADERTYPE *pBuffer, int fenceFd) {
- ALOGV("OnFillBufferDone buffer=%p", pBuffer);
-
- omx_message msg;
- msg.type = omx_message::FILL_BUFFER_DONE;
- msg.node = node;
- msg.fenceFd = fenceFd;
- msg.u.extended_buffer_data.buffer = buffer;
- msg.u.extended_buffer_data.range_offset = pBuffer->nOffset;
- msg.u.extended_buffer_data.range_length = pBuffer->nFilledLen;
- msg.u.extended_buffer_data.flags = pBuffer->nFlags;
- msg.u.extended_buffer_data.timestamp = pBuffer->nTimeStamp;
-
- findDispatcher(node)->post(msg);
-
- return OMX_ErrorNone;
-}
-
-OMX::node_id OMX::makeNodeID_l(OMXNodeInstance *instance) {
- // mLock is already held.
-
- node_id prefix = node_id(getpid() << 16);
- node_id node = 0;
- do {
- if (++mNodeCounter >= kMaxNodeInstances) {
- mNodeCounter = 0; // OK to use because we're combining with the pid
- }
- node = node_id(prefix | mNodeCounter);
- } while (mNodeIDToInstance.indexOfKey(node) >= 0);
- mNodeIDToInstance.add(node, instance);
-
- return node;
-}
-
-OMXNodeInstance *OMX::findInstance(node_id node) {
- Mutex::Autolock autoLock(mLock);
-
- ssize_t index = mNodeIDToInstance.indexOfKey(node);
-
- return index < 0 ? NULL : mNodeIDToInstance.valueAt(index);
-}
-
-sp<OMX::CallbackDispatcher> OMX::findDispatcher(node_id node) {
- Mutex::Autolock autoLock(mLock);
-
- ssize_t index = mDispatchers.indexOfKey(node);
-
- return index < 0 ? NULL : mDispatchers.valueAt(index);
-}
-
-void OMX::invalidateNodeID(node_id node) {
- Mutex::Autolock autoLock(mLock);
- invalidateNodeID_l(node);
-}
-
-void OMX::invalidateNodeID_l(node_id node) {
- // mLock is held.
- mNodeIDToInstance.removeItem(node);
+ return OK;
}
} // namespace android
diff --git a/media/libstagefright/omx/OMXMaster.cpp b/media/libstagefright/omx/OMXMaster.cpp
index 6132a2c..ac9b0c3 100644
--- a/media/libstagefright/omx/OMXMaster.cpp
+++ b/media/libstagefright/omx/OMXMaster.cpp
@@ -32,26 +32,23 @@
OMXMaster::OMXMaster()
: mVendorLibHandle(NULL) {
- mProcessName[0] = 0;
- if (mProcessName[0] == 0) {
- pid_t pid = getpid();
- char filename[20];
- snprintf(filename, sizeof(filename), "/proc/%d/comm", pid);
- int fd = open(filename, O_RDONLY);
- if (fd < 0) {
- ALOGW("couldn't determine process name");
- sprintf(mProcessName, "<unknown>");
- } else {
- ssize_t len = read(fd, mProcessName, sizeof(mProcessName));
- if (len < 2) {
- ALOGW("couldn't determine process name");
- sprintf(mProcessName, "<unknown>");
- } else {
- // the name is newline terminated, so erase the newline
- mProcessName[len - 1] = 0;
- }
- close(fd);
- }
+ pid_t pid = getpid();
+ char filename[20];
+ snprintf(filename, sizeof(filename), "/proc/%d/comm", pid);
+ int fd = open(filename, O_RDONLY);
+ if (fd < 0) {
+ ALOGW("couldn't determine process name");
+ strlcpy(mProcessName, "<unknown>", sizeof(mProcessName));
+ } else {
+ ssize_t len = read(fd, mProcessName, sizeof(mProcessName));
+ if (len < 2) {
+ ALOGW("couldn't determine process name");
+ strlcpy(mProcessName, "<unknown>", sizeof(mProcessName));
+ } else {
+ // the name is newline terminated, so erase the newline
+ mProcessName[len - 1] = 0;
+ }
+ close(fd);
}
addVendorPlugin();
diff --git a/media/libstagefright/omx/OMXNodeInstance.cpp b/media/libstagefright/omx/OMXNodeInstance.cpp
index 355a2dd..fdc9d7f 100644
--- a/media/libstagefright/omx/OMXNodeInstance.cpp
+++ b/media/libstagefright/omx/OMXNodeInstance.cpp
@@ -23,10 +23,11 @@
#include "../include/OMXNodeInstance.h"
#include "OMXMaster.h"
#include "OMXUtils.h"
-#include "GraphicBufferSource.h"
+#include <android/IOMXBufferSource.h>
#include <OMX_Component.h>
#include <OMX_IndexExt.h>
+#include <OMX_VideoExt.h>
#include <OMX_AsString.h>
#include <binder/IMemory.h>
@@ -35,26 +36,37 @@
#include <HardwareAPI.h>
#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/foundation/ABuffer.h>
+#include <media/stagefright/foundation/ColorUtils.h>
#include <media/stagefright/MediaErrors.h>
#include <utils/misc.h>
#include <utils/NativeHandle.h>
+#include <media/OMXBuffer.h>
static const OMX_U32 kPortIndexInput = 0;
static const OMX_U32 kPortIndexOutput = 1;
-#define CLOGW(fmt, ...) ALOGW("[%x:%s] " fmt, mNodeID, mName, ##__VA_ARGS__)
+// Quirk still supported, even though deprecated
+enum Quirks {
+ kRequiresAllocateBufferOnInputPorts = 1,
+ kRequiresAllocateBufferOnOutputPorts = 2,
+
+ kQuirksMask = kRequiresAllocateBufferOnInputPorts
+ | kRequiresAllocateBufferOnOutputPorts,
+};
+
+#define CLOGW(fmt, ...) ALOGW("[%p:%s] " fmt, mHandle, mName, ##__VA_ARGS__)
#define CLOG_ERROR_IF(cond, fn, err, fmt, ...) \
- ALOGE_IF(cond, #fn "(%x:%s, " fmt ") ERROR: %s(%#x)", \
- mNodeID, mName, ##__VA_ARGS__, asString(err), err)
+ ALOGE_IF(cond, #fn "(%p:%s, " fmt ") ERROR: %s(%#x)", \
+ mHandle, mName, ##__VA_ARGS__, asString(err), err)
#define CLOG_ERROR(fn, err, fmt, ...) CLOG_ERROR_IF(true, fn, err, fmt, ##__VA_ARGS__)
#define CLOG_IF_ERROR(fn, err, fmt, ...) \
CLOG_ERROR_IF((err) != OMX_ErrorNone, fn, err, fmt, ##__VA_ARGS__)
#define CLOGI_(level, fn, fmt, ...) \
- ALOGI_IF(DEBUG >= (level), #fn "(%x:%s, " fmt ")", mNodeID, mName, ##__VA_ARGS__)
+ ALOGI_IF(DEBUG >= (level), #fn "(%p:%s, " fmt ")", mHandle, mName, ##__VA_ARGS__)
#define CLOGD_(level, fn, fmt, ...) \
- ALOGD_IF(DEBUG >= (level), #fn "(%x:%s, " fmt ")", mNodeID, mName, ##__VA_ARGS__)
+ ALOGD_IF(DEBUG >= (level), #fn "(%p:%s, " fmt ")", mHandle, mName, ##__VA_ARGS__)
#define CLOG_LIFE(fn, fmt, ...) CLOGI_(ADebug::kDebugLifeCycle, fn, fmt, ##__VA_ARGS__)
#define CLOG_STATE(fn, fmt, ...) CLOGI_(ADebug::kDebugState, fn, fmt, ##__VA_ARGS__)
@@ -62,7 +74,7 @@
#define CLOG_INTERNAL(fn, fmt, ...) CLOGD_(ADebug::kDebugInternalState, fn, fmt, ##__VA_ARGS__)
#define CLOG_DEBUG_IF(cond, fn, fmt, ...) \
- ALOGD_IF(cond, #fn "(%x, " fmt ")", mNodeID, ##__VA_ARGS__)
+ ALOGD_IF(cond, #fn "(%p, " fmt ")", mHandle, ##__VA_ARGS__)
#define CLOG_BUFFER(fn, fmt, ...) \
CLOG_DEBUG_IF(DEBUG >= ADebug::kDebugAll, fn, fmt, ##__VA_ARGS__)
@@ -94,18 +106,16 @@
struct BufferMeta {
explicit BufferMeta(
- const sp<IMemory> &mem, OMX_U32 portIndex, bool copyToOmx,
- bool copyFromOmx, OMX_U8 *backup)
+ const sp<IMemory> &mem, OMX_U32 portIndex, bool copy, OMX_U8 *backup)
: mMem(mem),
- mCopyFromOmx(copyFromOmx),
- mCopyToOmx(copyToOmx),
+ mCopyFromOmx(portIndex == kPortIndexOutput && copy),
+ mCopyToOmx(portIndex == kPortIndexInput && copy),
mPortIndex(portIndex),
mBackup(backup) {
}
- explicit BufferMeta(size_t size, OMX_U32 portIndex)
- : mSize(size),
- mCopyFromOmx(false),
+ explicit BufferMeta(OMX_U32 portIndex)
+ : mCopyFromOmx(false),
mCopyToOmx(false),
mPortIndex(portIndex),
mBackup(NULL) {
@@ -125,7 +135,7 @@
}
// check component returns proper range
- sp<ABuffer> codec = getBuffer(header, false /* backup */, true /* limit */);
+ sp<ABuffer> codec = getBuffer(header, true /* limit */);
memcpy((OMX_U8 *)mMem->pointer() + header->nOffset, codec->data(), codec->size());
}
@@ -140,14 +150,9 @@
header->nFilledLen);
}
- // return either the codec or the backup buffer
- sp<ABuffer> getBuffer(const OMX_BUFFERHEADERTYPE *header, bool backup, bool limit) {
- sp<ABuffer> buf;
- if (backup && mMem != NULL) {
- buf = new ABuffer(mMem->pointer(), mMem->size());
- } else {
- buf = new ABuffer(header->pBuffer, header->nAllocLen);
- }
+ // return the codec buffer
+ sp<ABuffer> getBuffer(const OMX_BUFFERHEADERTYPE *header, bool limit) {
+ sp<ABuffer> buf = new ABuffer(header->pBuffer, header->nAllocLen);
if (limit) {
if (header->nOffset + header->nFilledLen > header->nOffset
&& header->nOffset + header->nFilledLen <= header->nAllocLen) {
@@ -179,7 +184,6 @@
sp<GraphicBuffer> mGraphicBuffer;
sp<NativeHandle> mNativeHandle;
sp<IMemory> mMem;
- size_t mSize;
bool mCopyFromOmx;
bool mCopyToOmx;
OMX_U32 mPortIndex;
@@ -203,16 +207,144 @@
}
}
+////////////////////////////////////////////////////////////////////////////////
+
+// This provides the underlying Thread used by CallbackDispatcher.
+// Note that deriving CallbackDispatcher from Thread does not work.
+
+struct OMXNodeInstance::CallbackDispatcherThread : public Thread {
+ explicit CallbackDispatcherThread(CallbackDispatcher *dispatcher)
+ : mDispatcher(dispatcher) {
+ }
+
+private:
+ CallbackDispatcher *mDispatcher;
+
+ bool threadLoop();
+
+ CallbackDispatcherThread(const CallbackDispatcherThread &);
+ CallbackDispatcherThread &operator=(const CallbackDispatcherThread &);
+};
+
+////////////////////////////////////////////////////////////////////////////////
+
+struct OMXNodeInstance::CallbackDispatcher : public RefBase {
+ explicit CallbackDispatcher(const sp<OMXNodeInstance> &owner);
+
+ // Posts |msg| to the listener's queue. If |realTime| is true, the listener thread is notified
+ // that a new message is available on the queue. Otherwise, the message stays on the queue, but
+ // the listener is not notified of it. It will process this message when a subsequent message
+ // is posted with |realTime| set to true.
+ void post(const omx_message &msg, bool realTime = true);
+
+ bool loop();
+
+protected:
+ virtual ~CallbackDispatcher();
+
+private:
+ Mutex mLock;
+
+ sp<OMXNodeInstance> const mOwner;
+ bool mDone;
+ Condition mQueueChanged;
+ std::list<omx_message> mQueue;
+
+ sp<CallbackDispatcherThread> mThread;
+
+ void dispatch(std::list<omx_message> &messages);
+
+ CallbackDispatcher(const CallbackDispatcher &);
+ CallbackDispatcher &operator=(const CallbackDispatcher &);
+};
+
+OMXNodeInstance::CallbackDispatcher::CallbackDispatcher(const sp<OMXNodeInstance> &owner)
+ : mOwner(owner),
+ mDone(false) {
+ mThread = new CallbackDispatcherThread(this);
+ mThread->run("OMXCallbackDisp", ANDROID_PRIORITY_FOREGROUND);
+}
+
+OMXNodeInstance::CallbackDispatcher::~CallbackDispatcher() {
+ {
+ Mutex::Autolock autoLock(mLock);
+
+ mDone = true;
+ mQueueChanged.signal();
+ }
+
+ // A join on self can happen if the last ref to CallbackDispatcher
+ // is released within the CallbackDispatcherThread loop
+ status_t status = mThread->join();
+ if (status != WOULD_BLOCK) {
+ // Other than join to self, the only other error return codes are
+ // whatever readyToRun() returns, and we don't override that
+ CHECK_EQ(status, (status_t)NO_ERROR);
+ }
+}
+
+void OMXNodeInstance::CallbackDispatcher::post(const omx_message &msg, bool realTime) {
+ Mutex::Autolock autoLock(mLock);
+
+ mQueue.push_back(msg);
+ if (realTime) {
+ mQueueChanged.signal();
+ }
+}
+
+void OMXNodeInstance::CallbackDispatcher::dispatch(std::list<omx_message> &messages) {
+ if (mOwner == NULL) {
+ ALOGV("Would have dispatched a message to a node that's already gone.");
+ return;
+ }
+ mOwner->onMessages(messages);
+}
+
+bool OMXNodeInstance::CallbackDispatcher::loop() {
+ for (;;) {
+ std::list<omx_message> messages;
+
+ {
+ Mutex::Autolock autoLock(mLock);
+ while (!mDone && mQueue.empty()) {
+ mQueueChanged.wait(mLock);
+ }
+
+ if (mDone) {
+ break;
+ }
+
+ messages.swap(mQueue);
+ }
+
+ dispatch(messages);
+ }
+
+ return false;
+}
+
+////////////////////////////////////////////////////////////////////////////////
+
+bool OMXNodeInstance::CallbackDispatcherThread::threadLoop() {
+ return mDispatcher->loop();
+}
+
+////////////////////////////////////////////////////////////////////////////////
+
OMXNodeInstance::OMXNodeInstance(
OMX *owner, const sp<IOMXObserver> &observer, const char *name)
: mOwner(owner),
- mNodeID(0),
mHandle(NULL),
mObserver(observer),
mDying(false),
mSailed(false),
mQueriedProhibitedExtensions(false),
- mBufferIDCount(0)
+ mQuirks(0),
+ mBufferIDCount(0),
+ mRestorePtsFailed(false),
+ mMaxTimestampGapUs(-1ll),
+ mPrevOriginalTimeUs(-1ll),
+ mPrevModifiedTimeUs(-1ll)
{
mName = ADebug::GetDebugName(name);
DEBUG = ADebug::GetDebugLevelFromProperty(name, "debug.stagefright.omx-debug");
@@ -234,44 +366,42 @@
CHECK(mHandle == NULL);
}
-void OMXNodeInstance::setHandle(OMX::node_id node_id, OMX_HANDLETYPE handle) {
- mNodeID = node_id;
+void OMXNodeInstance::setHandle(OMX_HANDLETYPE handle) {
CLOG_LIFE(allocateNode, "handle=%p", handle);
CHECK(mHandle == NULL);
mHandle = handle;
+ if (handle != NULL) {
+ mDispatcher = new CallbackDispatcher(this);
+ }
}
-sp<GraphicBufferSource> OMXNodeInstance::getGraphicBufferSource() {
- Mutex::Autolock autoLock(mGraphicBufferSourceLock);
- return mGraphicBufferSource;
+sp<IOMXBufferSource> OMXNodeInstance::getBufferSource() {
+ Mutex::Autolock autoLock(mOMXBufferSourceLock);
+ return mOMXBufferSource;
}
-void OMXNodeInstance::setGraphicBufferSource(
- const sp<GraphicBufferSource>& bufferSource) {
- Mutex::Autolock autoLock(mGraphicBufferSourceLock);
- CLOG_INTERNAL(setGraphicBufferSource, "%p", bufferSource.get());
- mGraphicBufferSource = bufferSource;
+void OMXNodeInstance::setBufferSource(const sp<IOMXBufferSource>& bufferSource) {
+ Mutex::Autolock autoLock(mOMXBufferSourceLock);
+ CLOG_INTERNAL(setBufferSource, "%p", bufferSource.get());
+ mOMXBufferSource = bufferSource;
}
-OMX *OMXNodeInstance::owner() {
- return mOwner;
+OMX_HANDLETYPE OMXNodeInstance::handle() {
+ return mHandle;
}
sp<IOMXObserver> OMXNodeInstance::observer() {
return mObserver;
}
-OMX::node_id OMXNodeInstance::nodeID() {
- return mNodeID;
-}
+status_t OMXNodeInstance::freeNode() {
-status_t OMXNodeInstance::freeNode(OMXMaster *master) {
CLOG_LIFE(freeNode, "handle=%p", mHandle);
static int32_t kMaxNumIterations = 10;
// exit if we have already freed the node
if (mHandle == NULL) {
- return OK;
+ return mOwner->freeNode(this);
}
// Transition the node from its current state all the way down
@@ -350,43 +480,35 @@
LOG_ALWAYS_FATAL("unknown state %s(%#x).", asString(state), state);
break;
}
+ status_t err = mOwner->freeNode(this);
- ALOGV("[%x:%s] calling destroyComponentInstance", mNodeID, mName);
- OMX_ERRORTYPE err = master->destroyComponentInstance(
- static_cast<OMX_COMPONENTTYPE *>(mHandle));
+ mDispatcher.clear();
+ mOMXBufferSource.clear();
mHandle = NULL;
CLOG_IF_ERROR(freeNode, err, "");
free(mName);
mName = NULL;
- mOwner->invalidateNodeID(mNodeID);
- mNodeID = 0;
-
ALOGV("OMXNodeInstance going away.");
- delete this;
- return StatusFromOMXError(err);
+ return err;
}
status_t OMXNodeInstance::sendCommand(
OMX_COMMANDTYPE cmd, OMX_S32 param) {
- if (cmd == OMX_CommandStateSet) {
- // There are no configurations past first StateSet command.
- mSailed = true;
- }
- const sp<GraphicBufferSource> bufferSource(getGraphicBufferSource());
+ const sp<IOMXBufferSource> bufferSource(getBufferSource());
if (bufferSource != NULL && cmd == OMX_CommandStateSet) {
if (param == OMX_StateIdle) {
// Initiating transition from Executing -> Idle
// ACodec is waiting for all buffers to be returned, do NOT
// submit any more buffers to the codec.
- bufferSource->omxIdle();
+ bufferSource->onOmxIdle();
} else if (param == OMX_StateLoaded) {
// Initiating transition from Idle/Executing -> Loaded
// Buffers are about to be freed.
- bufferSource->omxLoaded();
- setGraphicBufferSource(NULL);
+ bufferSource->onOmxLoaded();
+ setBufferSource(NULL);
}
// fall through
@@ -394,6 +516,11 @@
Mutex::Autolock autoLock(mLock);
+ if (cmd == OMX_CommandStateSet) {
+ // There are no configurations past first StateSet command.
+ mSailed = true;
+ }
+
// bump internal-state debug level for 2 input and output frames past a command
{
Mutex::Autolock _l(mDebugLock);
@@ -422,18 +549,17 @@
"OMX.google.android.index.getAndroidNativeBufferUsage",
};
- if ((index > OMX_IndexComponentStartUnused && index <= OMX_IndexParamStandardComponentRole)
- || (index > OMX_IndexPortStartUnused && index <= OMX_IndexParamCompBufferSupplier)
- || (index > OMX_IndexAudioStartUnused && index <= OMX_IndexConfigAudioChannelVolume)
- || (index > OMX_IndexVideoStartUnused && index <= OMX_IndexConfigVideoNalSize)
- || (index > OMX_IndexCommonStartUnused
- && index <= OMX_IndexConfigCommonTransitionEffect)
+ if ((index > OMX_IndexComponentStartUnused && index < OMX_IndexComponentEndUnused)
+ || (index > OMX_IndexPortStartUnused && index < OMX_IndexPortEndUnused)
+ || (index > OMX_IndexAudioStartUnused && index < OMX_IndexAudioEndUnused)
+ || (index > OMX_IndexVideoStartUnused && index < OMX_IndexVideoEndUnused)
+ || (index > OMX_IndexCommonStartUnused && index < OMX_IndexCommonEndUnused)
|| (index > (OMX_INDEXTYPE)OMX_IndexExtAudioStartUnused
- && index <= (OMX_INDEXTYPE)OMX_IndexParamAudioProfileQuerySupported)
+ && index < (OMX_INDEXTYPE)OMX_IndexExtAudioEndUnused)
|| (index > (OMX_INDEXTYPE)OMX_IndexExtVideoStartUnused
- && index <= (OMX_INDEXTYPE)OMX_IndexConfigAndroidVideoTemporalLayering)
+ && index < (OMX_INDEXTYPE)OMX_IndexExtVideoEndUnused)
|| (index > (OMX_INDEXTYPE)OMX_IndexExtOtherStartUnused
- && index <= (OMX_INDEXTYPE)OMX_IndexParamConsumerUsageBits)) {
+ && index < (OMX_INDEXTYPE)OMX_IndexExtOtherEndUnused)) {
return false;
}
@@ -474,6 +600,10 @@
OMX_INDEXEXTTYPE extIndex = (OMX_INDEXEXTTYPE)index;
CLOG_CONFIG(setParameter, "%s(%#x), %zu@%p)", asString(extIndex), index, size, params);
+ if (extIndex == OMX_IndexParamMaxFrameDurationForBitrateControl) {
+ return setMaxPtsGapUs(params, size);
+ }
+
if (isProhibitedIndex_l(index)) {
android_errorWriteLog(0x534e4554, "29422020");
return BAD_INDEX;
@@ -520,14 +650,6 @@
return StatusFromOMXError(err);
}
-status_t OMXNodeInstance::getState(OMX_STATETYPE* state) {
- Mutex::Autolock autoLock(mLock);
-
- OMX_ERRORTYPE err = OMX_GetState(mHandle, state);
- CLOG_IF_ERROR(getState, err, "");
- return StatusFromOMXError(err);
-}
-
status_t OMXNodeInstance::enableNativeBuffers(
OMX_U32 portIndex, OMX_BOOL graphic, OMX_BOOL enable) {
if (portIndex >= NELEM(mSecureBufferType)) {
@@ -780,6 +902,21 @@
}
status_t OMXNodeInstance::useBuffer(
+ OMX_U32 portIndex,
+ const OMXBuffer &omxBuffer, OMX::buffer_id *buffer) {
+ // TODO: the allotted size is probably no longer needed.
+ if (omxBuffer.mBufferType == OMXBuffer::kBufferTypeSharedMem) {
+ return useBuffer(portIndex, omxBuffer.mMem, buffer, omxBuffer.mAllottedSize);
+ }
+
+ if (omxBuffer.mBufferType == OMXBuffer::kBufferTypeANWBuffer) {
+ return useGraphicBuffer(portIndex, omxBuffer.mGraphicBuffer, buffer);
+ }
+
+ return BAD_VALUE;
+}
+
+status_t OMXNodeInstance::useBuffer(
OMX_U32 portIndex, const sp<IMemory> ¶ms,
OMX::buffer_id *buffer, OMX_U32 allottedSize) {
if (params == NULL || buffer == NULL) {
@@ -792,36 +929,80 @@
return BAD_VALUE;
}
- // metadata buffers are not connected cross process
- // use a backup buffer instead of the actual buffer
- BufferMeta *buffer_meta;
- bool useBackup = mMetadataType[portIndex] != kMetadataBufferTypeInvalid;
- OMX_U8 *data = static_cast<OMX_U8 *>(params->pointer());
- // allocate backup buffer
- if (useBackup) {
- data = new (std::nothrow) OMX_U8[allottedSize];
- if (data == NULL) {
- return NO_MEMORY;
- }
- memset(data, 0, allottedSize);
+ return useBuffer_l(portIndex, params, buffer, allottedSize);
+}
+status_t OMXNodeInstance::useBuffer_l(
+ OMX_U32 portIndex, const sp<IMemory> ¶ms,
+ OMX::buffer_id *buffer, OMX_U32 allottedSize) {
+ BufferMeta *buffer_meta;
+ OMX_BUFFERHEADERTYPE *header;
+ OMX_ERRORTYPE err = OMX_ErrorNone;
+ bool isMetadata = mMetadataType[portIndex] != kMetadataBufferTypeInvalid;
+ bool isOutputGraphicMetadata = (portIndex == kPortIndexOutput) &&
+ (mMetadataType[portIndex] == kMetadataBufferTypeGrallocSource ||
+ mMetadataType[portIndex] == kMetadataBufferTypeANWBuffer);
+
+ uint32_t requiresAllocateBufferBit =
+ (portIndex == kPortIndexInput)
+ ? kRequiresAllocateBufferOnInputPorts
+ : kRequiresAllocateBufferOnOutputPorts;
+
+ // we use useBuffer for output metadata regardless of quirks
+ if (!isOutputGraphicMetadata && (mQuirks & requiresAllocateBufferBit)) {
+ // metadata buffers are not connected cross process; only copy if not meta.
buffer_meta = new BufferMeta(
- params, portIndex, false /* copyToOmx */, false /* copyFromOmx */, data);
+ params, portIndex, !isMetadata /* copy */, NULL /* data */);
+
+ err = OMX_AllocateBuffer(
+ mHandle, &header, portIndex, buffer_meta, allottedSize);
+
+ if (err != OMX_ErrorNone) {
+ CLOG_ERROR(allocateBuffer, err,
+ SIMPLE_BUFFER(portIndex, (size_t)allottedSize,
+ params != NULL ? params->pointer() : NULL));
+ }
} else {
- buffer_meta = new BufferMeta(
- params, portIndex, false /* copyToOmx */, false /* copyFromOmx */, NULL);
+ OMX_U8 *data = NULL;
+
+ // metadata buffers are not connected cross process
+ // use a backup buffer instead of the actual buffer
+ if (isMetadata) {
+ // TODO: this logic is very fishy, should it be removed?
+ // if we are not connecting the buffers, the sizes must match
+ if (params != NULL && allottedSize != params->size()) {
+ CLOG_ERROR(useBuffer, BAD_VALUE, SIMPLE_BUFFER(portIndex, (size_t)allottedSize, data));
+ return BAD_VALUE;
+ }
+
+ data = new (std::nothrow) OMX_U8[allottedSize];
+ if (data == NULL) {
+ return NO_MEMORY;
+ }
+ memset(data, 0, allottedSize);
+
+ buffer_meta = new BufferMeta(
+ params, portIndex, false /* copy */, data);
+ } else {
+ // NULL params is allowed only in metadata mode.
+ CHECK(params != NULL);
+ data = static_cast<OMX_U8 *>(params->pointer());
+
+ buffer_meta = new BufferMeta(
+ params, portIndex, false /* copy */, NULL);
+ }
+
+ err = OMX_UseBuffer(
+ mHandle, &header, portIndex, buffer_meta,
+ allottedSize, data);
+
+ if (err != OMX_ErrorNone) {
+ CLOG_ERROR(useBuffer, err, SIMPLE_BUFFER(
+ portIndex, (size_t)allottedSize, data));
+ }
}
- OMX_BUFFERHEADERTYPE *header;
-
- OMX_ERRORTYPE err = OMX_UseBuffer(
- mHandle, &header, portIndex, buffer_meta,
- allottedSize, data);
-
if (err != OMX_ErrorNone) {
- CLOG_ERROR(useBuffer, err, SIMPLE_BUFFER(
- portIndex, (size_t)allottedSize, data));
-
delete buffer_meta;
buffer_meta = NULL;
@@ -836,9 +1017,9 @@
addActiveBuffer(portIndex, *buffer);
- sp<GraphicBufferSource> bufferSource(getGraphicBufferSource());
+ sp<IOMXBufferSource> bufferSource(getBufferSource());
if (bufferSource != NULL && portIndex == kPortIndexInput) {
- bufferSource->addCodecBuffer(header);
+ bufferSource->onInputBufferAdded(*buffer);
}
CLOG_BUFFER(useBuffer, NEW_BUFFER_FMT(
@@ -911,6 +1092,13 @@
}
Mutex::Autolock autoLock(mLock);
+ // First, see if we're in metadata mode. We could be running an experiment to simulate
+ // legacy behavior (preallocated buffers) on devices that supports meta.
+ if (mMetadataType[portIndex] != kMetadataBufferTypeInvalid) {
+ return useGraphicBufferWithMetadata_l(
+ portIndex, graphicBuffer, buffer);
+ }
+
// See if the newer version of the extension is present.
OMX_INDEXTYPE index;
if (OMX_GetExtensionIndex(
@@ -966,9 +1154,36 @@
return OK;
}
+status_t OMXNodeInstance::useGraphicBufferWithMetadata_l(
+ OMX_U32 portIndex, const sp<GraphicBuffer> &graphicBuffer,
+ OMX::buffer_id *buffer) {
+ if (portIndex != kPortIndexOutput) {
+ return BAD_VALUE;
+ }
+
+ OMX_U32 allottedSize = 0;
+ if (mMetadataType[portIndex] == kMetadataBufferTypeGrallocSource) {
+ allottedSize = sizeof(VideoGrallocMetadata);
+ } else if (mMetadataType[portIndex] == kMetadataBufferTypeANWBuffer) {
+ allottedSize = sizeof(VideoNativeMetadata);
+ } else {
+ return BAD_VALUE;
+ }
+
+ status_t err = useBuffer_l(portIndex, NULL, buffer, allottedSize);
+ if (err != OK) {
+ return err;
+ }
+
+ OMX_BUFFERHEADERTYPE *header = findBufferHeader(*buffer, portIndex);
+
+ return updateGraphicBufferInMeta_l(portIndex, graphicBuffer, *buffer, header);
+
+}
+
status_t OMXNodeInstance::updateGraphicBufferInMeta_l(
OMX_U32 portIndex, const sp<GraphicBuffer>& graphicBuffer,
- OMX::buffer_id buffer, OMX_BUFFERHEADERTYPE *header, bool updateCodecBuffer) {
+ OMX::buffer_id buffer, OMX_BUFFERHEADERTYPE *header) {
// No need to check |graphicBuffer| since NULL is valid for it as below.
if (header == NULL) {
ALOGE("b/25884056");
@@ -980,14 +1195,9 @@
}
BufferMeta *bufferMeta = (BufferMeta *)(header->pAppPrivate);
- sp<ABuffer> data = bufferMeta->getBuffer(
- header, !updateCodecBuffer /* backup */, false /* limit */);
+ sp<ABuffer> data = bufferMeta->getBuffer(header, false /* limit */);
bufferMeta->setGraphicBuffer(graphicBuffer);
MetadataBufferType metaType = mMetadataType[portIndex];
- // we use gralloc source only in the codec buffers
- if (metaType == kMetadataBufferTypeGrallocSource && !updateCodecBuffer) {
- metaType = kMetadataBufferTypeANWBuffer;
- }
if (metaType == kMetadataBufferTypeGrallocSource
&& data->capacity() >= sizeof(VideoGrallocMetadata)) {
VideoGrallocMetadata &metadata = *(VideoGrallocMetadata *)(data->data());
@@ -1011,21 +1221,9 @@
return OK;
}
-status_t OMXNodeInstance::updateGraphicBufferInMeta(
- OMX_U32 portIndex, const sp<GraphicBuffer>& graphicBuffer,
- OMX::buffer_id buffer) {
- Mutex::Autolock autoLock(mLock);
- OMX_BUFFERHEADERTYPE *header = findBufferHeader(buffer, portIndex);
- // update backup buffer for input, codec buffer for output
- return updateGraphicBufferInMeta_l(
- portIndex, graphicBuffer, buffer, header,
- true /* updateCodecBuffer */);
-}
-
-status_t OMXNodeInstance::updateNativeHandleInMeta(
- OMX_U32 portIndex, const sp<NativeHandle>& nativeHandle, OMX::buffer_id buffer) {
- Mutex::Autolock autoLock(mLock);
- OMX_BUFFERHEADERTYPE *header = findBufferHeader(buffer, portIndex);
+status_t OMXNodeInstance::updateNativeHandleInMeta_l(
+ OMX_U32 portIndex, const sp<NativeHandle>& nativeHandle,
+ OMX::buffer_id buffer, OMX_BUFFERHEADERTYPE *header) {
// No need to check |nativeHandle| since NULL is valid for it as below.
if (header == NULL) {
ALOGE("b/25884056");
@@ -1037,9 +1235,7 @@
}
BufferMeta *bufferMeta = (BufferMeta *)(header->pAppPrivate);
- // update backup buffer
- sp<ABuffer> data = bufferMeta->getBuffer(
- header, false /* backup */, false /* limit */);
+ sp<ABuffer> data = bufferMeta->getBuffer(header, false /* limit */);
bufferMeta->setNativeHandle(nativeHandle);
if (mMetadataType[portIndex] == kMetadataBufferTypeNativeHandleSource
&& data->capacity() >= sizeof(VideoNativeHandleMetadata)) {
@@ -1059,32 +1255,23 @@
return OK;
}
-status_t OMXNodeInstance::createGraphicBufferSource(
- OMX_U32 portIndex, const sp<IGraphicBufferConsumer> &bufferConsumer, MetadataBufferType *type) {
+status_t OMXNodeInstance::setInputSurface(
+ const sp<IOMXBufferSource> &bufferSource) {
+ Mutex::Autolock autolock(mLock);
+
status_t err;
// only allow graphic source on input port, when there are no allocated buffers yet
- if (portIndex != kPortIndexInput) {
- android_errorWriteLog(0x534e4554, "29422020");
- return BAD_VALUE;
- } else if (mNumPortBuffers[portIndex] > 0) {
+ if (mNumPortBuffers[kPortIndexInput] > 0) {
android_errorWriteLog(0x534e4554, "29422020");
return INVALID_OPERATION;
}
- const sp<GraphicBufferSource> surfaceCheck = getGraphicBufferSource();
- if (surfaceCheck != NULL) {
- if (portIndex < NELEM(mMetadataType) && type != NULL) {
- *type = mMetadataType[portIndex];
- }
+ if (getBufferSource() != NULL) {
return ALREADY_EXISTS;
}
- // Input buffers will hold meta-data (ANativeWindowBuffer references).
- if (type != NULL) {
- *type = kMetadataBufferTypeANWBuffer;
- }
- err = storeMetaDataInBuffers_l(portIndex, OMX_TRUE, type);
+ err = storeMetaDataInBuffers_l(kPortIndexInput, OMX_TRUE, NULL);
if (err != OK) {
return err;
}
@@ -1093,13 +1280,13 @@
// codec was configured.
OMX_PARAM_PORTDEFINITIONTYPE def;
InitOMXParams(&def);
- def.nPortIndex = portIndex;
+ def.nPortIndex = kPortIndexInput;
OMX_ERRORTYPE oerr = OMX_GetParameter(
mHandle, OMX_IndexParamPortDefinition, &def);
if (oerr != OMX_ErrorNone) {
OMX_INDEXTYPE index = OMX_IndexParamPortDefinition;
- CLOG_ERROR(getParameter, oerr, "%s(%#x): %s:%u",
- asString(index), index, portString(portIndex), portIndex);
+ CLOG_ERROR(getParameter, oerr, "%s(%#x): %s:%u", asString(index),
+ index, portString(kPortIndexInput), kPortIndexInput);
return UNKNOWN_ERROR;
}
@@ -1110,103 +1297,18 @@
return INVALID_OPERATION;
}
- uint32_t usageBits;
- oerr = OMX_GetParameter(
- mHandle, (OMX_INDEXTYPE)OMX_IndexParamConsumerUsageBits, &usageBits);
- if (oerr != OMX_ErrorNone) {
- usageBits = 0;
- }
-
- sp<GraphicBufferSource> bufferSource = new GraphicBufferSource(this,
- def.format.video.nFrameWidth,
- def.format.video.nFrameHeight,
- def.nBufferCountActual,
- usageBits,
- bufferConsumer);
-
- if ((err = bufferSource->initCheck()) != OK) {
- return err;
- }
- setGraphicBufferSource(bufferSource);
-
- return OK;
-}
-
-status_t OMXNodeInstance::createInputSurface(
- OMX_U32 portIndex, android_dataspace dataSpace,
- sp<IGraphicBufferProducer> *bufferProducer, MetadataBufferType *type) {
- if (bufferProducer == NULL) {
- ALOGE("b/25884056");
+ if (def.format.video.nFrameWidth == 0
+ || def.format.video.nFrameHeight == 0) {
+ ALOGE("Invalid video dimension %ux%u",
+ def.format.video.nFrameWidth,
+ def.format.video.nFrameHeight);
return BAD_VALUE;
}
- Mutex::Autolock autolock(mLock);
- status_t err = createGraphicBufferSource(portIndex, NULL /* bufferConsumer */, type);
-
- if (err != OK) {
- return err;
- }
-
- mGraphicBufferSource->setDefaultDataSpace(dataSpace);
-
- *bufferProducer = mGraphicBufferSource->getIGraphicBufferProducer();
+ setBufferSource(bufferSource);
return OK;
}
-//static
-status_t OMXNodeInstance::createPersistentInputSurface(
- sp<IGraphicBufferProducer> *bufferProducer,
- sp<IGraphicBufferConsumer> *bufferConsumer) {
- if (bufferProducer == NULL || bufferConsumer == NULL) {
- ALOGE("b/25884056");
- return BAD_VALUE;
- }
- String8 name("GraphicBufferSource");
-
- sp<IGraphicBufferProducer> producer;
- sp<IGraphicBufferConsumer> consumer;
- BufferQueue::createBufferQueue(&producer, &consumer);
- consumer->setConsumerName(name);
- consumer->setConsumerUsageBits(GRALLOC_USAGE_HW_VIDEO_ENCODER);
-
- sp<BufferQueue::ProxyConsumerListener> proxy =
- new BufferQueue::ProxyConsumerListener(NULL);
- status_t err = consumer->consumerConnect(proxy, false);
- if (err != NO_ERROR) {
- ALOGE("Error connecting to BufferQueue: %s (%d)",
- strerror(-err), err);
- return err;
- }
-
- *bufferProducer = producer;
- *bufferConsumer = consumer;
-
- return OK;
-}
-
-status_t OMXNodeInstance::setInputSurface(
- OMX_U32 portIndex, const sp<IGraphicBufferConsumer> &bufferConsumer,
- MetadataBufferType *type) {
- Mutex::Autolock autolock(mLock);
- return createGraphicBufferSource(portIndex, bufferConsumer, type);
-}
-
-void OMXNodeInstance::signalEvent(OMX_EVENTTYPE event, OMX_U32 arg1, OMX_U32 arg2) {
- mOwner->OnEvent(mNodeID, event, arg1, arg2, NULL);
-}
-
-status_t OMXNodeInstance::signalEndOfInputStream() {
- // For non-Surface input, the MediaCodec should convert the call to a
- // pair of requests (dequeue input buffer, queue input buffer with EOS
- // flag set). Seems easier than doing the equivalent from here.
- sp<GraphicBufferSource> bufferSource(getGraphicBufferSource());
- if (bufferSource == NULL) {
- CLOGW("signalEndOfInputStream can only be used with Surface input");
- return INVALID_OPERATION;
- }
- return bufferSource->signalEndOfInputStream();
-}
-
status_t OMXNodeInstance::allocateSecureBuffer(
OMX_U32 portIndex, size_t size, OMX::buffer_id *buffer,
void **buffer_data, sp<NativeHandle> *native_handle) {
@@ -1223,7 +1325,7 @@
Mutex::Autolock autoLock(mLock);
- BufferMeta *buffer_meta = new BufferMeta(size, portIndex);
+ BufferMeta *buffer_meta = new BufferMeta(portIndex);
OMX_BUFFERHEADERTYPE *header;
@@ -1254,9 +1356,9 @@
addActiveBuffer(portIndex, *buffer);
- sp<GraphicBufferSource> bufferSource(getGraphicBufferSource());
+ sp<IOMXBufferSource> bufferSource(getBufferSource());
if (bufferSource != NULL && portIndex == kPortIndexInput) {
- bufferSource->addCodecBuffer(header);
+ bufferSource->onInputBufferAdded(*buffer);
}
CLOG_BUFFER(allocateSecureBuffer, NEW_BUFFER_FMT(
*buffer, portIndex, "%zu@%p:%p", size, *buffer_data,
@@ -1265,60 +1367,6 @@
return OK;
}
-status_t OMXNodeInstance::allocateBufferWithBackup(
- OMX_U32 portIndex, const sp<IMemory> ¶ms,
- OMX::buffer_id *buffer, OMX_U32 allottedSize) {
- if (params == NULL || buffer == NULL) {
- ALOGE("b/25884056");
- return BAD_VALUE;
- }
-
- Mutex::Autolock autoLock(mLock);
- if (allottedSize > params->size() || portIndex >= NELEM(mNumPortBuffers)) {
- return BAD_VALUE;
- }
-
- // metadata buffers are not connected cross process; only copy if not meta
- bool copy = mMetadataType[portIndex] == kMetadataBufferTypeInvalid;
-
- BufferMeta *buffer_meta = new BufferMeta(
- params, portIndex,
- (portIndex == kPortIndexInput) && copy /* copyToOmx */,
- (portIndex == kPortIndexOutput) && copy /* copyFromOmx */,
- NULL /* data */);
-
- OMX_BUFFERHEADERTYPE *header;
-
- OMX_ERRORTYPE err = OMX_AllocateBuffer(
- mHandle, &header, portIndex, buffer_meta, allottedSize);
- if (err != OMX_ErrorNone) {
- CLOG_ERROR(allocateBufferWithBackup, err,
- SIMPLE_BUFFER(portIndex, (size_t)allottedSize, params->pointer()));
- delete buffer_meta;
- buffer_meta = NULL;
-
- *buffer = 0;
-
- return StatusFromOMXError(err);
- }
-
- CHECK_EQ(header->pAppPrivate, buffer_meta);
-
- *buffer = makeBufferID(header);
-
- addActiveBuffer(portIndex, *buffer);
-
- sp<GraphicBufferSource> bufferSource(getGraphicBufferSource());
- if (bufferSource != NULL && portIndex == kPortIndexInput) {
- bufferSource->addCodecBuffer(header);
- }
-
- CLOG_BUFFER(allocateBufferWithBackup, NEW_BUFFER_FMT(*buffer, portIndex, "%zu@%p :> %u@%p",
- params->size(), params->pointer(), allottedSize, header->pBuffer));
-
- return OK;
-}
-
status_t OMXNodeInstance::freeBuffer(
OMX_U32 portIndex, OMX::buffer_id buffer) {
Mutex::Autolock autoLock(mLock);
@@ -1343,7 +1391,8 @@
return StatusFromOMXError(err);
}
-status_t OMXNodeInstance::fillBuffer(OMX::buffer_id buffer, int fenceFd) {
+status_t OMXNodeInstance::fillBuffer(
+ OMX::buffer_id buffer, const OMXBuffer &omxBuffer, int fenceFd) {
Mutex::Autolock autoLock(mLock);
OMX_BUFFERHEADERTYPE *header = findBufferHeader(buffer, kPortIndexOutput);
@@ -1351,6 +1400,20 @@
ALOGE("b/25884056");
return BAD_VALUE;
}
+
+ if (omxBuffer.mBufferType == OMXBuffer::kBufferTypeANWBuffer) {
+ status_t err = updateGraphicBufferInMeta_l(
+ kPortIndexOutput, omxBuffer.mGraphicBuffer, buffer, header);
+
+ if (err != OK) {
+ CLOG_ERROR(fillBuffer, err, FULL_BUFFER(
+ (intptr_t)header->pBuffer, header, fenceFd));
+ return err;
+ }
+ } else if (omxBuffer.mBufferType != OMXBuffer::kBufferTypePreset) {
+ return BAD_VALUE;
+ }
+
header->nFilledLen = 0;
header->nOffset = 0;
header->nFlags = 0;
@@ -1378,13 +1441,34 @@
}
status_t OMXNodeInstance::emptyBuffer(
+ buffer_id buffer, const OMXBuffer &omxBuffer,
+ OMX_U32 flags, OMX_TICKS timestamp, int fenceFd) {
+ if (omxBuffer.mBufferType == OMXBuffer::kBufferTypePreset) {
+ return emptyBuffer(
+ buffer, 0, omxBuffer.mRangeLength, flags, timestamp, fenceFd);
+ }
+
+ if (omxBuffer.mBufferType == OMXBuffer::kBufferTypeANWBuffer) {
+ return emptyGraphicBuffer(
+ buffer, omxBuffer.mGraphicBuffer, flags, timestamp, fenceFd);
+ }
+
+ if (omxBuffer.mBufferType == OMXBuffer::kBufferTypeNativeHandle) {
+ return emptyNativeHandleBuffer(
+ buffer, omxBuffer.mNativeHandle, flags, timestamp, fenceFd);
+ }
+
+ return BAD_VALUE;
+}
+
+status_t OMXNodeInstance::emptyBuffer(
OMX::buffer_id buffer,
OMX_U32 rangeOffset, OMX_U32 rangeLength,
OMX_U32 flags, OMX_TICKS timestamp, int fenceFd) {
Mutex::Autolock autoLock(mLock);
// no emptybuffer if using input surface
- if (getGraphicBufferSource() != NULL) {
+ if (getBufferSource() != NULL) {
android_errorWriteLog(0x534e4554, "29422020");
return INVALID_OPERATION;
}
@@ -1531,24 +1615,26 @@
// like emptyBuffer, but the data is already in header->pBuffer
status_t OMXNodeInstance::emptyGraphicBuffer(
- OMX_BUFFERHEADERTYPE *header, const sp<GraphicBuffer> &graphicBuffer,
+ OMX::buffer_id buffer, const sp<GraphicBuffer> &graphicBuffer,
OMX_U32 flags, OMX_TICKS timestamp, int fenceFd) {
+ Mutex::Autolock autoLock(mLock);
+
+ OMX_BUFFERHEADERTYPE *header = findBufferHeader(buffer, kPortIndexInput);
if (header == NULL) {
ALOGE("b/25884056");
return BAD_VALUE;
}
- Mutex::Autolock autoLock(mLock);
- OMX::buffer_id buffer = findBufferID(header);
status_t err = updateGraphicBufferInMeta_l(
- kPortIndexInput, graphicBuffer, buffer, header,
- true /* updateCodecBuffer */);
+ kPortIndexInput, graphicBuffer, buffer, header);
if (err != OK) {
CLOG_ERROR(emptyGraphicBuffer, err, FULL_BUFFER(
(intptr_t)header->pBuffer, header, fenceFd));
return err;
}
+ int64_t codecTimeUs = getCodecTimestamp(timestamp);
+
header->nOffset = 0;
if (graphicBuffer == NULL) {
header->nFilledLen = 0;
@@ -1557,9 +1643,102 @@
} else {
header->nFilledLen = sizeof(VideoNativeMetadata);
}
+ return emptyBuffer_l(header, flags, codecTimeUs, (intptr_t)header->pBuffer, fenceFd);
+}
+
+status_t OMXNodeInstance::setMaxPtsGapUs(const void *params, size_t size) {
+ if (params == NULL || size != sizeof(OMX_PARAM_U32TYPE)) {
+ CLOG_ERROR(setMaxPtsGapUs, BAD_VALUE, "invalid params (%p,%zu)", params, size);
+ return BAD_VALUE;
+ }
+
+ mMaxTimestampGapUs = (int64_t)((OMX_PARAM_U32TYPE*)params)->nU32;
+
+ return OK;
+}
+
+int64_t OMXNodeInstance::getCodecTimestamp(OMX_TICKS timestamp) {
+ int64_t originalTimeUs = timestamp;
+
+ if (mMaxTimestampGapUs > 0ll) {
+ /* Cap timestamp gap between adjacent frames to specified max
+ *
+ * In the scenario of cast mirroring, encoding could be suspended for
+ * prolonged periods. Limiting the pts gap to workaround the problem
+ * where encoder's rate control logic produces huge frames after a
+ * long period of suspension.
+ */
+ if (mPrevOriginalTimeUs >= 0ll) {
+ int64_t timestampGapUs = originalTimeUs - mPrevOriginalTimeUs;
+ timestamp = (timestampGapUs < mMaxTimestampGapUs ?
+ timestampGapUs : mMaxTimestampGapUs) + mPrevModifiedTimeUs;
+ }
+ ALOGV("IN timestamp: %lld -> %lld",
+ static_cast<long long>(originalTimeUs),
+ static_cast<long long>(timestamp));
+ }
+
+ mPrevOriginalTimeUs = originalTimeUs;
+ mPrevModifiedTimeUs = timestamp;
+
+ if (mMaxTimestampGapUs > 0ll && !mRestorePtsFailed) {
+ mOriginalTimeUs.add(timestamp, originalTimeUs);
+ }
+
+ return timestamp;
+}
+
+status_t OMXNodeInstance::emptyNativeHandleBuffer(
+ OMX::buffer_id buffer, const sp<NativeHandle> &nativeHandle,
+ OMX_U32 flags, OMX_TICKS timestamp, int fenceFd) {
+ Mutex::Autolock autoLock(mLock);
+
+ OMX_BUFFERHEADERTYPE *header = findBufferHeader(buffer, kPortIndexInput);
+ if (header == NULL) {
+ ALOGE("b/25884056");
+ return BAD_VALUE;
+ }
+
+ status_t err = updateNativeHandleInMeta_l(
+ kPortIndexInput, nativeHandle, buffer, header);
+ if (err != OK) {
+ CLOG_ERROR(emptyNativeHandleBuffer, err, FULL_BUFFER(
+ (intptr_t)header->pBuffer, header, fenceFd));
+ return err;
+ }
+
+ header->nOffset = 0;
+ header->nFilledLen = (nativeHandle == NULL) ? 0 : sizeof(VideoNativeMetadata);
+
return emptyBuffer_l(header, flags, timestamp, (intptr_t)header->pBuffer, fenceFd);
}
+void OMXNodeInstance::codecBufferFilled(omx_message &msg) {
+ Mutex::Autolock autoLock(mBufferIDLock);
+
+ if (mMaxTimestampGapUs <= 0ll || mRestorePtsFailed) {
+ return;
+ }
+
+ OMX_U32 &flags = msg.u.extended_buffer_data.flags;
+ OMX_TICKS ×tamp = msg.u.extended_buffer_data.timestamp;
+
+ if (!(flags & OMX_BUFFERFLAG_CODECCONFIG)) {
+ ssize_t index = mOriginalTimeUs.indexOfKey(timestamp);
+ if (index >= 0) {
+ ALOGV("OUT timestamp: %lld -> %lld",
+ static_cast<long long>(timestamp),
+ static_cast<long long>(mOriginalTimeUs[index]));
+ timestamp = mOriginalTimeUs[index];
+ mOriginalTimeUs.removeItemsAt(index);
+ } else {
+ // giving up the effort as encoder doesn't appear to preserve pts
+ ALOGW("giving up limiting timestamp gap (pts = %lld)", timestamp);
+ mRestorePtsFailed = true;
+ }
+ }
+}
+
status_t OMXNodeInstance::getExtensionIndex(
const char *parameterName, OMX_INDEXTYPE *index) {
Mutex::Autolock autoLock(mLock);
@@ -1570,133 +1749,22 @@
return StatusFromOMXError(err);
}
-inline static const char *asString(IOMX::InternalOptionType i, const char *def = "??") {
- switch (i) {
- case IOMX::INTERNAL_OPTION_SUSPEND: return "SUSPEND";
- case IOMX::INTERNAL_OPTION_REPEAT_PREVIOUS_FRAME_DELAY:
- return "REPEAT_PREVIOUS_FRAME_DELAY";
- case IOMX::INTERNAL_OPTION_MAX_TIMESTAMP_GAP: return "MAX_TIMESTAMP_GAP";
- case IOMX::INTERNAL_OPTION_MAX_FPS: return "MAX_FPS";
- case IOMX::INTERNAL_OPTION_START_TIME: return "START_TIME";
- case IOMX::INTERNAL_OPTION_TIME_LAPSE: return "TIME_LAPSE";
- case IOMX::INTERNAL_OPTION_TIME_OFFSET: return "TIME_OFFSET";
- default: return def;
- }
+status_t OMXNodeInstance::dispatchMessage(const omx_message &msg) {
+ mDispatcher->post(msg, true /*realTime*/);
+ return OK;
}
-template<typename T>
-static bool getInternalOption(
- const void *data, size_t size, T *out) {
- if (size != sizeof(T)) {
- return false;
+status_t OMXNodeInstance::setQuirks(OMX_U32 quirks) {
+ if (quirks & ~kQuirksMask) {
+ return BAD_VALUE;
}
- *out = *(T*)data;
- return true;
-}
-status_t OMXNodeInstance::setInternalOption(
- OMX_U32 portIndex,
- IOMX::InternalOptionType type,
- const void *data,
- size_t size) {
- CLOG_CONFIG(setInternalOption, "%s(%d): %s:%u %zu@%p",
- asString(type), type, portString(portIndex), portIndex, size, data);
- switch (type) {
- case IOMX::INTERNAL_OPTION_SUSPEND:
- case IOMX::INTERNAL_OPTION_REPEAT_PREVIOUS_FRAME_DELAY:
- case IOMX::INTERNAL_OPTION_MAX_TIMESTAMP_GAP:
- case IOMX::INTERNAL_OPTION_MAX_FPS:
- case IOMX::INTERNAL_OPTION_START_TIME:
- case IOMX::INTERNAL_OPTION_TIME_LAPSE:
- case IOMX::INTERNAL_OPTION_TIME_OFFSET:
- case IOMX::INTERNAL_OPTION_COLOR_ASPECTS:
- {
- const sp<GraphicBufferSource> &bufferSource =
- getGraphicBufferSource();
+ mQuirks = quirks;
- if (bufferSource == NULL || portIndex != kPortIndexInput) {
- CLOGW("setInternalOption is only for Surface input");
- return ERROR_UNSUPPORTED;
- }
-
- if (type == IOMX::INTERNAL_OPTION_SUSPEND) {
- bool suspend;
- if (!getInternalOption(data, size, &suspend)) {
- return INVALID_OPERATION;
- }
-
- CLOG_CONFIG(setInternalOption, "suspend=%d", suspend);
- bufferSource->suspend(suspend);
- } else if (type == IOMX::INTERNAL_OPTION_REPEAT_PREVIOUS_FRAME_DELAY) {
- int64_t delayUs;
- if (!getInternalOption(data, size, &delayUs)) {
- return INVALID_OPERATION;
- }
-
- CLOG_CONFIG(setInternalOption, "delayUs=%lld", (long long)delayUs);
- return bufferSource->setRepeatPreviousFrameDelayUs(delayUs);
- } else if (type == IOMX::INTERNAL_OPTION_TIME_OFFSET) {
- int64_t timeOffsetUs;
- if (!getInternalOption(data, size, &timeOffsetUs)) {
- return INVALID_OPERATION;
- }
- CLOG_CONFIG(setInternalOption, "bufferOffsetUs=%lld", (long long)timeOffsetUs);
- return bufferSource->setInputBufferTimeOffset(timeOffsetUs);
- } else if (type == IOMX::INTERNAL_OPTION_MAX_TIMESTAMP_GAP) {
- int64_t maxGapUs;
- if (!getInternalOption(data, size, &maxGapUs)) {
- return INVALID_OPERATION;
- }
-
- CLOG_CONFIG(setInternalOption, "gapUs=%lld", (long long)maxGapUs);
- return bufferSource->setMaxTimestampGapUs(maxGapUs);
- } else if (type == IOMX::INTERNAL_OPTION_MAX_FPS) {
- float maxFps;
- if (!getInternalOption(data, size, &maxFps)) {
- return INVALID_OPERATION;
- }
-
- CLOG_CONFIG(setInternalOption, "maxFps=%f", maxFps);
- return bufferSource->setMaxFps(maxFps);
- } else if (type == IOMX::INTERNAL_OPTION_START_TIME) {
- int64_t skipFramesBeforeUs;
- if (!getInternalOption(data, size, &skipFramesBeforeUs)) {
- return INVALID_OPERATION;
- }
-
- CLOG_CONFIG(setInternalOption, "beforeUs=%lld", (long long)skipFramesBeforeUs);
- bufferSource->setSkipFramesBeforeUs(skipFramesBeforeUs);
- } else if (type == IOMX::INTERNAL_OPTION_TIME_LAPSE) {
- GraphicBufferSource::TimeLapseConfig config;
- if (!getInternalOption(data, size, &config)) {
- return INVALID_OPERATION;
- }
-
- CLOG_CONFIG(setInternalOption, "perFrameUs=%lld perCaptureUs=%lld",
- (long long)config.mTimePerFrameUs, (long long)config.mTimePerCaptureUs);
-
- return bufferSource->setTimeLapseConfig(config);
- } else if (type == IOMX::INTERNAL_OPTION_COLOR_ASPECTS) {
- ColorAspects aspects;
- if (!getInternalOption(data, size, &aspects)) {
- return INVALID_OPERATION;
- }
-
- CLOG_CONFIG(setInternalOption, "setting color aspects");
- bufferSource->setColorAspects(aspects);
- }
-
- return OK;
- }
-
- default:
- return ERROR_UNSUPPORTED;
- }
+ return OK;
}
bool OMXNodeInstance::handleMessage(omx_message &msg) {
- const sp<GraphicBufferSource>& bufferSource(getGraphicBufferSource());
-
if (msg.type == omx_message::FILL_BUFFER_DONE) {
OMX_BUFFERHEADERTYPE *buffer =
findBufferHeader(msg.u.extended_buffer_data.buffer, kPortIndexOutput);
@@ -1726,12 +1794,8 @@
}
buffer_meta->CopyFromOMX(buffer);
- if (bufferSource != NULL) {
- // fix up the buffer info (especially timestamp) if needed
- bufferSource->codecBufferFilled(buffer);
-
- msg.u.extended_buffer_data.timestamp = buffer->nTimeStamp;
- }
+ // fix up the buffer info (especially timestamp) if needed
+ codecBufferFilled(msg);
} else if (msg.type == omx_message::EMPTY_BUFFER_DONE) {
OMX_BUFFERHEADERTYPE *buffer =
findBufferHeader(msg.u.buffer_data.buffer, kPortIndexInput);
@@ -1747,20 +1811,100 @@
EBD, WITH_STATS(EMPTY_BUFFER(msg.u.buffer_data.buffer, buffer, msg.fenceFd)));
}
+ const sp<IOMXBufferSource> bufferSource(getBufferSource());
+
if (bufferSource != NULL) {
- // This is one of the buffers used exclusively by
- // GraphicBufferSource.
+ // This is one of the buffers used exclusively by IOMXBufferSource.
// Don't dispatch a message back to ACodec, since it doesn't
// know that anyone asked to have the buffer emptied and will
// be very confused.
- bufferSource->codecBufferEmptied(buffer, msg.fenceFd);
+ bufferSource->onInputBufferEmptied(
+ msg.u.buffer_data.buffer, OMXFenceParcelable(msg.fenceFd));
return true;
}
+ } else if (msg.type == omx_message::EVENT &&
+ msg.u.event_data.event == OMX_EventDataSpaceChanged) {
+ handleDataSpaceChanged(msg);
}
return false;
}
+bool OMXNodeInstance::handleDataSpaceChanged(omx_message &msg) {
+ android_dataspace dataSpace = (android_dataspace) msg.u.event_data.data1;
+ android_dataspace origDataSpace = dataSpace;
+
+ if (!ColorUtils::convertDataSpaceToV0(dataSpace)) {
+ // Do not process the data space change, don't notify client either
+ return true;
+ }
+
+ android_pixel_format pixelFormat = (android_pixel_format)msg.u.event_data.data3;
+
+ ColorAspects requestedAspects = ColorUtils::unpackToColorAspects(msg.u.event_data.data2);
+ ColorAspects aspects = requestedAspects; // initially requested aspects
+
+ // request color aspects to encode
+ OMX_INDEXTYPE index;
+ status_t err = getExtensionIndex(
+ "OMX.google.android.index.describeColorAspects", &index);
+ if (err == OK) {
+ // V0 dataspace
+ DescribeColorAspectsParams params;
+ InitOMXParams(¶ms);
+ params.nPortIndex = kPortIndexInput;
+ params.nDataSpace = origDataSpace;
+ params.nPixelFormat = pixelFormat;
+ params.bDataSpaceChanged = OMX_TRUE;
+ params.sAspects = requestedAspects;
+
+ err = getConfig(index, ¶ms, sizeof(params));
+ if (err == OK) {
+ aspects = params.sAspects;
+ ALOGD("Codec resolved it to (R:%d(%s), P:%d(%s), M:%d(%s), T:%d(%s)) err=%d(%s)",
+ params.sAspects.mRange, asString(params.sAspects.mRange),
+ params.sAspects.mPrimaries, asString(params.sAspects.mPrimaries),
+ params.sAspects.mMatrixCoeffs, asString(params.sAspects.mMatrixCoeffs),
+ params.sAspects.mTransfer, asString(params.sAspects.mTransfer),
+ err, asString(err));
+ } else {
+ params.sAspects = aspects;
+ err = OK;
+ }
+ params.bDataSpaceChanged = OMX_FALSE;
+ for (int triesLeft = 2; --triesLeft >= 0; ) {
+ status_t err = setConfig(index, ¶ms, sizeof(params));
+ if (err == OK) {
+ err = getConfig(index, ¶ms, sizeof(params));
+ }
+ if (err != OK || !ColorUtils::checkIfAspectsChangedAndUnspecifyThem(
+ params.sAspects, aspects)) {
+ // if we can't set or get color aspects, still communicate dataspace to client
+ break;
+ }
+
+ ALOGW_IF(triesLeft == 0, "Codec repeatedly changed requested ColorAspects.");
+ }
+ }
+
+ ALOGV("Set color aspects to (R:%d(%s), P:%d(%s), M:%d(%s), T:%d(%s)) err=%d(%s)",
+ aspects.mRange, asString(aspects.mRange),
+ aspects.mPrimaries, asString(aspects.mPrimaries),
+ aspects.mMatrixCoeffs, asString(aspects.mMatrixCoeffs),
+ aspects.mTransfer, asString(aspects.mTransfer),
+ err, asString(err));
+
+ // signal client that the dataspace has changed; this will update the output format
+ // TODO: we should tie this to an output buffer somehow, and signal the change
+ // just before the output buffer is returned to the client, but there are many
+ // ways this could fail (e.g. flushing), and we are not yet supporting this scenario.
+
+ msg.u.event_data.data1 = (OMX_U32) dataSpace;
+ msg.u.event_data.data2 = (OMX_U32) ColorUtils::packToU32(aspects);
+
+ return false;
+}
+
void OMXNodeInstance::onMessages(std::list<omx_message> &messages) {
for (std::list<omx_message>::iterator it = messages.begin(); it != messages.end(); ) {
if (handleMessage(*it)) {
@@ -1775,15 +1919,11 @@
}
}
-void OMXNodeInstance::onObserverDied(OMXMaster *master) {
+void OMXNodeInstance::onObserverDied() {
ALOGE("!!! Observer died. Quickly, do something, ... anything...");
// Try to force shutdown of the node and hope for the best.
- freeNode(master);
-}
-
-void OMXNodeInstance::onGetHandleFailed() {
- delete this;
+ freeNode();
}
// OMXNodeInstance::OnEvent calls OMX::OnEvent, which then calls here.
@@ -1827,13 +1967,13 @@
CLOGI_(level, onEvent, "%s(%x), %s(%x), %s(%x)",
asString(event), event, arg1String, arg1, arg2String, arg2);
- const sp<GraphicBufferSource>& bufferSource(getGraphicBufferSource());
+ const sp<IOMXBufferSource> bufferSource(getBufferSource());
if (bufferSource != NULL
&& event == OMX_EventCmdComplete
&& arg1 == OMX_CommandStateSet
&& arg2 == OMX_StateExecuting) {
- bufferSource->omxExecuting();
+ bufferSource->onOmxExecuting();
}
// allow configuration if we return to the loaded state
@@ -1860,8 +2000,39 @@
if (instance->mDying) {
return OMX_ErrorNone;
}
- return instance->owner()->OnEvent(
- instance->nodeID(), eEvent, nData1, nData2, pEventData);
+
+ instance->onEvent(eEvent, nData1, nData2);
+
+ // output rendered events are not processed as regular events until they hit the observer
+ if (eEvent == OMX_EventOutputRendered) {
+ if (pEventData == NULL) {
+ return OMX_ErrorBadParameter;
+ }
+
+ // process data from array
+ OMX_VIDEO_RENDEREVENTTYPE *renderData = (OMX_VIDEO_RENDEREVENTTYPE *)pEventData;
+ for (size_t i = 0; i < nData1; ++i) {
+ omx_message msg;
+ msg.type = omx_message::FRAME_RENDERED;
+ msg.fenceFd = -1;
+ msg.u.render_data.timestamp = renderData[i].nMediaTimeUs;
+ msg.u.render_data.nanoTime = renderData[i].nSystemTimeNs;
+
+ instance->mDispatcher->post(msg, false /* realTime */);
+ }
+ return OMX_ErrorNone;
+ }
+
+ omx_message msg;
+ msg.type = omx_message::EVENT;
+ msg.fenceFd = -1;
+ msg.u.event_data.event = eEvent;
+ msg.u.event_data.data1 = nData1;
+ msg.u.event_data.data2 = nData2;
+
+ instance->mDispatcher->post(msg, true /* realTime */);
+
+ return OMX_ErrorNone;
}
// static
@@ -1878,8 +2049,14 @@
return OMX_ErrorNone;
}
int fenceFd = instance->retrieveFenceFromMeta_l(pBuffer, kPortIndexOutput);
- return instance->owner()->OnEmptyBufferDone(instance->nodeID(),
- instance->findBufferID(pBuffer), pBuffer, fenceFd);
+
+ omx_message msg;
+ msg.type = omx_message::EMPTY_BUFFER_DONE;
+ msg.fenceFd = fenceFd;
+ msg.u.buffer_data.buffer = instance->findBufferID(pBuffer);
+ instance->mDispatcher->post(msg);
+
+ return OMX_ErrorNone;
}
// static
@@ -1896,8 +2073,18 @@
return OMX_ErrorNone;
}
int fenceFd = instance->retrieveFenceFromMeta_l(pBuffer, kPortIndexOutput);
- return instance->owner()->OnFillBufferDone(instance->nodeID(),
- instance->findBufferID(pBuffer), pBuffer, fenceFd);
+
+ omx_message msg;
+ msg.type = omx_message::FILL_BUFFER_DONE;
+ msg.fenceFd = fenceFd;
+ msg.u.extended_buffer_data.buffer = instance->findBufferID(pBuffer);
+ msg.u.extended_buffer_data.range_offset = pBuffer->nOffset;
+ msg.u.extended_buffer_data.range_length = pBuffer->nFilledLen;
+ msg.u.extended_buffer_data.flags = pBuffer->nFlags;
+ msg.u.extended_buffer_data.timestamp = pBuffer->nTimeStamp;
+ instance->mDispatcher->post(msg);
+
+ return OMX_ErrorNone;
}
void OMXNodeInstance::addActiveBuffer(OMX_U32 portIndex, OMX::buffer_id id) {
diff --git a/media/libstagefright/omx/OMXUtils.cpp b/media/libstagefright/omx/OMXUtils.cpp
index 799696c..38aad39 100644
--- a/media/libstagefright/omx/OMXUtils.cpp
+++ b/media/libstagefright/omx/OMXUtils.cpp
@@ -20,7 +20,10 @@
#include <string.h>
#include <media/hardware/HardwareAPI.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/AUtils.h>
#include <media/stagefright/MediaErrors.h>
+#include <media/MediaDefs.h>
#include "OMXUtils.h"
namespace android {
@@ -101,5 +104,273 @@
/**************************************************************************************************/
+const char *GetComponentRole(bool isEncoder, const char *mime) {
+ struct MimeToRole {
+ const char *mime;
+ const char *decoderRole;
+ const char *encoderRole;
+ };
+
+ static const MimeToRole kMimeToRole[] = {
+ { MEDIA_MIMETYPE_AUDIO_MPEG,
+ "audio_decoder.mp3", "audio_encoder.mp3" },
+ { MEDIA_MIMETYPE_AUDIO_MPEG_LAYER_I,
+ "audio_decoder.mp1", "audio_encoder.mp1" },
+ { MEDIA_MIMETYPE_AUDIO_MPEG_LAYER_II,
+ "audio_decoder.mp2", "audio_encoder.mp2" },
+ { MEDIA_MIMETYPE_AUDIO_AMR_NB,
+ "audio_decoder.amrnb", "audio_encoder.amrnb" },
+ { MEDIA_MIMETYPE_AUDIO_AMR_WB,
+ "audio_decoder.amrwb", "audio_encoder.amrwb" },
+ { MEDIA_MIMETYPE_AUDIO_AAC,
+ "audio_decoder.aac", "audio_encoder.aac" },
+ { MEDIA_MIMETYPE_AUDIO_VORBIS,
+ "audio_decoder.vorbis", "audio_encoder.vorbis" },
+ { MEDIA_MIMETYPE_AUDIO_OPUS,
+ "audio_decoder.opus", "audio_encoder.opus" },
+ { MEDIA_MIMETYPE_AUDIO_G711_MLAW,
+ "audio_decoder.g711mlaw", "audio_encoder.g711mlaw" },
+ { MEDIA_MIMETYPE_AUDIO_G711_ALAW,
+ "audio_decoder.g711alaw", "audio_encoder.g711alaw" },
+ { MEDIA_MIMETYPE_VIDEO_AVC,
+ "video_decoder.avc", "video_encoder.avc" },
+ { MEDIA_MIMETYPE_VIDEO_HEVC,
+ "video_decoder.hevc", "video_encoder.hevc" },
+ { MEDIA_MIMETYPE_VIDEO_MPEG4,
+ "video_decoder.mpeg4", "video_encoder.mpeg4" },
+ { MEDIA_MIMETYPE_VIDEO_H263,
+ "video_decoder.h263", "video_encoder.h263" },
+ { MEDIA_MIMETYPE_VIDEO_VP8,
+ "video_decoder.vp8", "video_encoder.vp8" },
+ { MEDIA_MIMETYPE_VIDEO_VP9,
+ "video_decoder.vp9", "video_encoder.vp9" },
+ { MEDIA_MIMETYPE_AUDIO_RAW,
+ "audio_decoder.raw", "audio_encoder.raw" },
+ { MEDIA_MIMETYPE_VIDEO_DOLBY_VISION,
+ "video_decoder.dolby-vision", "video_encoder.dolby-vision" },
+ { MEDIA_MIMETYPE_AUDIO_FLAC,
+ "audio_decoder.flac", "audio_encoder.flac" },
+ { MEDIA_MIMETYPE_AUDIO_MSGSM,
+ "audio_decoder.gsm", "audio_encoder.gsm" },
+ { MEDIA_MIMETYPE_VIDEO_MPEG2,
+ "video_decoder.mpeg2", "video_encoder.mpeg2" },
+ { MEDIA_MIMETYPE_AUDIO_AC3,
+ "audio_decoder.ac3", "audio_encoder.ac3" },
+ { MEDIA_MIMETYPE_AUDIO_EAC3,
+ "audio_decoder.eac3", "audio_encoder.eac3" },
+ };
+
+ static const size_t kNumMimeToRole =
+ sizeof(kMimeToRole) / sizeof(kMimeToRole[0]);
+
+ size_t i;
+ for (i = 0; i < kNumMimeToRole; ++i) {
+ if (!strcasecmp(mime, kMimeToRole[i].mime)) {
+ break;
+ }
+ }
+
+ if (i == kNumMimeToRole) {
+ return NULL;
+ }
+
+ return isEncoder ? kMimeToRole[i].encoderRole
+ : kMimeToRole[i].decoderRole;
+}
+
+status_t SetComponentRole(const sp<IOMXNode> &omxNode, const char *role) {
+ OMX_PARAM_COMPONENTROLETYPE roleParams;
+ InitOMXParams(&roleParams);
+
+ strncpy((char *)roleParams.cRole,
+ role, OMX_MAX_STRINGNAME_SIZE - 1);
+
+ roleParams.cRole[OMX_MAX_STRINGNAME_SIZE - 1] = '\0';
+
+ return omxNode->setParameter(
+ OMX_IndexParamStandardComponentRole,
+ &roleParams, sizeof(roleParams));
+}
+
+bool DescribeDefaultColorFormat(DescribeColorFormat2Params ¶ms) {
+ MediaImage2 &image = params.sMediaImage;
+ memset(&image, 0, sizeof(image));
+
+ image.mType = MediaImage2::MEDIA_IMAGE_TYPE_UNKNOWN;
+ image.mNumPlanes = 0;
+
+ const OMX_COLOR_FORMATTYPE fmt = params.eColorFormat;
+ image.mWidth = params.nFrameWidth;
+ image.mHeight = params.nFrameHeight;
+
+ // only supporting YUV420
+ if (fmt != OMX_COLOR_FormatYUV420Planar &&
+ fmt != OMX_COLOR_FormatYUV420PackedPlanar &&
+ fmt != OMX_COLOR_FormatYUV420SemiPlanar &&
+ fmt != OMX_COLOR_FormatYUV420PackedSemiPlanar &&
+ fmt != (OMX_COLOR_FORMATTYPE)HAL_PIXEL_FORMAT_YV12) {
+ ALOGW("do not know color format 0x%x = %d", fmt, fmt);
+ return false;
+ }
+
+ // TEMPORARY FIX for some vendors that advertise sliceHeight as 0
+ if (params.nStride != 0 && params.nSliceHeight == 0) {
+ ALOGW("using sliceHeight=%u instead of what codec advertised (=0)",
+ params.nFrameHeight);
+ params.nSliceHeight = params.nFrameHeight;
+ }
+
+ // we need stride and slice-height to be non-zero and sensible. These values were chosen to
+ // prevent integer overflows further down the line, and do not indicate support for
+ // 32kx32k video.
+ if (params.nStride == 0 || params.nSliceHeight == 0
+ || params.nStride > 32768 || params.nSliceHeight > 32768) {
+ ALOGW("cannot describe color format 0x%x = %d with stride=%u and sliceHeight=%u",
+ fmt, fmt, params.nStride, params.nSliceHeight);
+ return false;
+ }
+
+ // set-up YUV format
+ image.mType = MediaImage2::MEDIA_IMAGE_TYPE_YUV;
+ image.mNumPlanes = 3;
+ image.mBitDepth = 8;
+ image.mBitDepthAllocated = 8;
+ image.mPlane[image.Y].mOffset = 0;
+ image.mPlane[image.Y].mColInc = 1;
+ image.mPlane[image.Y].mRowInc = params.nStride;
+ image.mPlane[image.Y].mHorizSubsampling = 1;
+ image.mPlane[image.Y].mVertSubsampling = 1;
+
+ switch ((int)fmt) {
+ case HAL_PIXEL_FORMAT_YV12:
+ if (params.bUsingNativeBuffers) {
+ size_t ystride = align(params.nStride, 16);
+ size_t cstride = align(params.nStride / 2, 16);
+ image.mPlane[image.Y].mRowInc = ystride;
+
+ image.mPlane[image.V].mOffset = ystride * params.nSliceHeight;
+ image.mPlane[image.V].mColInc = 1;
+ image.mPlane[image.V].mRowInc = cstride;
+ image.mPlane[image.V].mHorizSubsampling = 2;
+ image.mPlane[image.V].mVertSubsampling = 2;
+
+ image.mPlane[image.U].mOffset = image.mPlane[image.V].mOffset
+ + (cstride * params.nSliceHeight / 2);
+ image.mPlane[image.U].mColInc = 1;
+ image.mPlane[image.U].mRowInc = cstride;
+ image.mPlane[image.U].mHorizSubsampling = 2;
+ image.mPlane[image.U].mVertSubsampling = 2;
+ break;
+ } else {
+ // fall through as YV12 is used for YUV420Planar by some codecs
+ }
+
+ case OMX_COLOR_FormatYUV420Planar:
+ case OMX_COLOR_FormatYUV420PackedPlanar:
+ image.mPlane[image.U].mOffset = params.nStride * params.nSliceHeight;
+ image.mPlane[image.U].mColInc = 1;
+ image.mPlane[image.U].mRowInc = params.nStride / 2;
+ image.mPlane[image.U].mHorizSubsampling = 2;
+ image.mPlane[image.U].mVertSubsampling = 2;
+
+ image.mPlane[image.V].mOffset = image.mPlane[image.U].mOffset
+ + (params.nStride * params.nSliceHeight / 4);
+ image.mPlane[image.V].mColInc = 1;
+ image.mPlane[image.V].mRowInc = params.nStride / 2;
+ image.mPlane[image.V].mHorizSubsampling = 2;
+ image.mPlane[image.V].mVertSubsampling = 2;
+ break;
+
+ case OMX_COLOR_FormatYUV420SemiPlanar:
+ // FIXME: NV21 for sw-encoder, NV12 for decoder and hw-encoder
+ case OMX_COLOR_FormatYUV420PackedSemiPlanar:
+ // NV12
+ image.mPlane[image.U].mOffset = params.nStride * params.nSliceHeight;
+ image.mPlane[image.U].mColInc = 2;
+ image.mPlane[image.U].mRowInc = params.nStride;
+ image.mPlane[image.U].mHorizSubsampling = 2;
+ image.mPlane[image.U].mVertSubsampling = 2;
+
+ image.mPlane[image.V].mOffset = image.mPlane[image.U].mOffset + 1;
+ image.mPlane[image.V].mColInc = 2;
+ image.mPlane[image.V].mRowInc = params.nStride;
+ image.mPlane[image.V].mHorizSubsampling = 2;
+ image.mPlane[image.V].mVertSubsampling = 2;
+ break;
+
+ default:
+ TRESPASS();
+ }
+ return true;
+}
+
+bool DescribeColorFormat(
+ const sp<IOMXNode> &omxNode,
+ DescribeColorFormat2Params &describeParams)
+{
+ OMX_INDEXTYPE describeColorFormatIndex;
+ if (omxNode->getExtensionIndex(
+ "OMX.google.android.index.describeColorFormat",
+ &describeColorFormatIndex) == OK) {
+ DescribeColorFormatParams describeParamsV1(describeParams);
+ if (omxNode->getParameter(
+ describeColorFormatIndex,
+ &describeParamsV1, sizeof(describeParamsV1)) == OK) {
+ describeParams.initFromV1(describeParamsV1);
+ return describeParams.sMediaImage.mType != MediaImage2::MEDIA_IMAGE_TYPE_UNKNOWN;
+ }
+ } else if (omxNode->getExtensionIndex(
+ "OMX.google.android.index.describeColorFormat2", &describeColorFormatIndex) == OK
+ && omxNode->getParameter(
+ describeColorFormatIndex, &describeParams, sizeof(describeParams)) == OK) {
+ return describeParams.sMediaImage.mType != MediaImage2::MEDIA_IMAGE_TYPE_UNKNOWN;
+ }
+
+ return DescribeDefaultColorFormat(describeParams);
+}
+
+// static
+bool IsFlexibleColorFormat(
+ const sp<IOMXNode> &omxNode,
+ uint32_t colorFormat, bool usingNativeBuffers, OMX_U32 *flexibleEquivalent) {
+ DescribeColorFormat2Params describeParams;
+ InitOMXParams(&describeParams);
+ describeParams.eColorFormat = (OMX_COLOR_FORMATTYPE)colorFormat;
+ // reasonable dummy values
+ describeParams.nFrameWidth = 128;
+ describeParams.nFrameHeight = 128;
+ describeParams.nStride = 128;
+ describeParams.nSliceHeight = 128;
+ describeParams.bUsingNativeBuffers = (OMX_BOOL)usingNativeBuffers;
+
+ CHECK(flexibleEquivalent != NULL);
+
+ if (!DescribeColorFormat(omxNode, describeParams)) {
+ return false;
+ }
+
+ const MediaImage2 &img = describeParams.sMediaImage;
+ if (img.mType == MediaImage2::MEDIA_IMAGE_TYPE_YUV) {
+ if (img.mNumPlanes != 3
+ || img.mPlane[img.Y].mHorizSubsampling != 1
+ || img.mPlane[img.Y].mVertSubsampling != 1) {
+ return false;
+ }
+
+ // YUV 420
+ if (img.mPlane[img.U].mHorizSubsampling == 2
+ && img.mPlane[img.U].mVertSubsampling == 2
+ && img.mPlane[img.V].mHorizSubsampling == 2
+ && img.mPlane[img.V].mVertSubsampling == 2) {
+ // possible flexible YUV420 format
+ if (img.mBitDepth <= 8) {
+ *flexibleEquivalent = OMX_COLOR_FormatYUV420Flexible;
+ return true;
+ }
+ }
+ }
+ return false;
+}
+
} // namespace android
diff --git a/media/libstagefright/omx/OMXUtils.h b/media/libstagefright/omx/OMXUtils.h
index 0c5e537..3f533ff 100644
--- a/media/libstagefright/omx/OMXUtils.h
+++ b/media/libstagefright/omx/OMXUtils.h
@@ -17,6 +17,8 @@
#ifndef OMX_UTILS_H_
#define OMX_UTILS_H_
+#include <media/IOMX.h>
+
/***** DO NOT USE THIS INCLUDE!!! INTERAL ONLY!!! UNLESS YOU RESIDE IN media/libstagefright *****/
// OMXUtils contains omx-specific utility functions for stagefright/omx library
@@ -36,6 +38,19 @@
status_t StatusFromOMXError(OMX_ERRORTYPE err);
+const char *GetComponentRole(bool isEncoder, const char *mime);
+status_t SetComponentRole(const sp<IOMXNode> &omxNode, const char *role);
+
+struct DescribeColorFormat2Params;
+
+bool IsFlexibleColorFormat(
+ const sp<IOMXNode> &omxNode, uint32_t colorFormat,
+ bool usingNativeBuffers, OMX_U32 *flexibleEquivalent);
+bool DescribeDefaultColorFormat(DescribeColorFormat2Params &describeParams);
+bool DescribeColorFormat(
+ const sp<IOMXNode> &omxNode,
+ DescribeColorFormat2Params &describeParams);
+
} // namespace android
#endif
diff --git a/media/libstagefright/omx/SimpleSoftOMXComponent.cpp b/media/libstagefright/omx/SimpleSoftOMXComponent.cpp
index 7c975f7..761b425 100644
--- a/media/libstagefright/omx/SimpleSoftOMXComponent.cpp
+++ b/media/libstagefright/omx/SimpleSoftOMXComponent.cpp
@@ -77,19 +77,34 @@
switch (index) {
case OMX_IndexParamPortDefinition:
{
- portIndex = ((OMX_PARAM_PORTDEFINITIONTYPE *)params)->nPortIndex;
+ const OMX_PARAM_PORTDEFINITIONTYPE *portDefs =
+ (const OMX_PARAM_PORTDEFINITIONTYPE *) params;
+ if (!isValidOMXParam(portDefs)) {
+ return false;
+ }
+ portIndex = portDefs->nPortIndex;
break;
}
case OMX_IndexParamAudioPcm:
{
- portIndex = ((OMX_AUDIO_PARAM_PCMMODETYPE *)params)->nPortIndex;
+ const OMX_AUDIO_PARAM_PCMMODETYPE *pcmMode =
+ (const OMX_AUDIO_PARAM_PCMMODETYPE *) params;
+ if (!isValidOMXParam(pcmMode)) {
+ return false;
+ }
+ portIndex = pcmMode->nPortIndex;
break;
}
case OMX_IndexParamAudioAac:
{
- portIndex = ((OMX_AUDIO_PARAM_AACPROFILETYPE *)params)->nPortIndex;
+ const OMX_AUDIO_PARAM_AACPROFILETYPE *aacMode =
+ (const OMX_AUDIO_PARAM_AACPROFILETYPE *) params;
+ if (!isValidOMXParam(aacMode)) {
+ return false;
+ }
+ portIndex = aacMode->nPortIndex;
break;
}
diff --git a/media/libstagefright/omx/SoftOMXPlugin.cpp b/media/libstagefright/omx/SoftOMXPlugin.cpp
old mode 100755
new mode 100644
index 0f9c00c..a773ca2
--- a/media/libstagefright/omx/SoftOMXPlugin.cpp
+++ b/media/libstagefright/omx/SoftOMXPlugin.cpp
@@ -56,6 +56,7 @@
{ "OMX.google.vp8.decoder", "vpxdec", "video_decoder.vp8" },
{ "OMX.google.vp9.decoder", "vpxdec", "video_decoder.vp9" },
{ "OMX.google.vp8.encoder", "vpxenc", "video_encoder.vp8" },
+ { "OMX.google.vp9.encoder", "vpxenc", "video_encoder.vp9" },
{ "OMX.google.raw.decoder", "rawdec", "audio_decoder.raw" },
{ "OMX.google.flac.encoder", "flacenc", "audio_encoder.flac" },
{ "OMX.google.gsm.decoder", "gsmdec", "audio_decoder.gsm" },
diff --git a/media/libstagefright/omx/SoftVideoEncoderOMXComponent.cpp b/media/libstagefright/omx/SoftVideoEncoderOMXComponent.cpp
index 0f9c118..adf846a 100644
--- a/media/libstagefright/omx/SoftVideoEncoderOMXComponent.cpp
+++ b/media/libstagefright/omx/SoftVideoEncoderOMXComponent.cpp
@@ -480,18 +480,25 @@
unsigned green = src[greenOffset];
unsigned blue = src[blueOffset];
- // using ITU-R BT.601 conversion matrix
+ // Using ITU-R BT.601-7 (03/2011)
+ // 2.5.1: Ey' = ( 0.299*R + 0.587*G + 0.114*B)
+ // 2.5.2: ECr' = ( 0.701*R - 0.587*G - 0.114*B) / 1.402
+ // ECb' = (-0.299*R - 0.587*G + 0.886*B) / 1.772
+ // 2.5.3: Y = 219 * Ey' + 16
+ // Cr = 224 * ECr' + 128
+ // Cb = 224 * ECb' + 128
+
unsigned luma =
- ((red * 66 + green * 129 + blue * 25) >> 8) + 16;
+ ((red * 65 + green * 129 + blue * 25 + 128) >> 8) + 16;
dstY[x] = luma;
if ((x & 1) == 0 && (y & 1) == 0) {
unsigned U =
- ((-red * 38 - green * 74 + blue * 112) >> 8) + 128;
+ ((-red * 38 - green * 74 + blue * 112 + 128) >> 8) + 128;
unsigned V =
- ((red * 112 - green * 94 - blue * 18) >> 8) + 128;
+ ((red * 112 - green * 94 - blue * 18 + 128) >> 8) + 128;
dstU[x >> 1] = U;
dstV[x >> 1] = V;
diff --git a/media/libstagefright/omx/tests/Android.mk b/media/libstagefright/omx/tests/Android.mk
index 02e97f1..ec14eb7 100644
--- a/media/libstagefright/omx/tests/Android.mk
+++ b/media/libstagefright/omx/tests/Android.mk
@@ -12,7 +12,6 @@
$(TOP)/frameworks/native/include/media/openmax
LOCAL_CFLAGS += -Werror -Wall
-LOCAL_CLANG := true
LOCAL_MODULE := omx_tests
@@ -39,6 +38,5 @@
frameworks/av/media/libstagefright/omx \
LOCAL_CFLAGS += -Werror -Wall
-LOCAL_CLANG := true
include $(BUILD_NATIVE_TEST)
diff --git a/media/libstagefright/omx/tests/OMXHarness.cpp b/media/libstagefright/omx/tests/OMXHarness.cpp
index 50bb0de..935d7bf 100644
--- a/media/libstagefright/omx/tests/OMXHarness.cpp
+++ b/media/libstagefright/omx/tests/OMXHarness.cpp
@@ -38,11 +38,31 @@
#include <media/stagefright/MediaSource.h>
#include <media/stagefright/MetaData.h>
#include <media/stagefright/SimpleDecodingSource.h>
+#include <media/OMXBuffer.h>
#define DEFAULT_TIMEOUT 500000
namespace android {
+/////////////////////////////////////////////////////////////////////
+
+struct Harness::CodecObserver : public BnOMXObserver {
+ CodecObserver(const sp<Harness> &harness, int32_t gen)
+ : mHarness(harness), mGeneration(gen) {}
+
+ void onMessages(const std::list<omx_message> &messages) override;
+
+private:
+ sp<Harness> mHarness;
+ int32_t mGeneration;
+};
+
+void Harness::CodecObserver::onMessages(const std::list<omx_message> &messages) {
+ mHarness->handleMessages(mGeneration, messages);
+}
+
+/////////////////////////////////////////////////////////////////////
+
Harness::Harness()
: mInitCheck(NO_INIT) {
mInitCheck = initOMX();
@@ -64,18 +84,17 @@
return mOMX != 0 ? OK : NO_INIT;
}
-void Harness::onMessages(const std::list<omx_message> &messages) {
+void Harness::handleMessages(int32_t gen, const std::list<omx_message> &messages) {
Mutex::Autolock autoLock(mLock);
for (std::list<omx_message>::const_iterator it = messages.cbegin(); it != messages.cend(); ) {
mMessageQueue.push_back(*it++);
+ mLastMsgGeneration = gen;
}
mMessageAddedCondition.signal();
}
-status_t Harness::dequeueMessageForNode(
- IOMX::node_id node, omx_message *msg, int64_t timeoutUs) {
- return dequeueMessageForNodeIgnoringBuffers(
- node, NULL, NULL, msg, timeoutUs);
+status_t Harness::dequeueMessageForNode(omx_message *msg, int64_t timeoutUs) {
+ return dequeueMessageForNodeIgnoringBuffers(NULL, NULL, msg, timeoutUs);
}
// static
@@ -120,7 +139,6 @@
}
status_t Harness::dequeueMessageForNodeIgnoringBuffers(
- IOMX::node_id node,
Vector<Buffer> *inputBuffers,
Vector<Buffer> *outputBuffers,
omx_message *msg, int64_t timeoutUs) {
@@ -128,21 +146,22 @@
for (;;) {
Mutex::Autolock autoLock(mLock);
+ // Messages are queued in batches, if the last batch queued is
+ // from a node that already expired, discard those messages.
+ if (mLastMsgGeneration < mCurGeneration) {
+ mMessageQueue.clear();
+ }
List<omx_message>::iterator it = mMessageQueue.begin();
while (it != mMessageQueue.end()) {
- if ((*it).node == node) {
- if (handleBufferMessage(*it, inputBuffers, outputBuffers)) {
- it = mMessageQueue.erase(it);
- continue;
- }
-
- *msg = *it;
- mMessageQueue.erase(it);
-
- return OK;
+ if (handleBufferMessage(*it, inputBuffers, outputBuffers)) {
+ it = mMessageQueue.erase(it);
+ continue;
}
- ++it;
+ *msg = *it;
+ mMessageQueue.erase(it);
+
+ return OK;
}
status_t err = (timeoutUs < 0)
@@ -158,16 +177,15 @@
}
status_t Harness::getPortDefinition(
- IOMX::node_id node, OMX_U32 portIndex,
- OMX_PARAM_PORTDEFINITIONTYPE *def) {
+ OMX_U32 portIndex, OMX_PARAM_PORTDEFINITIONTYPE *def) {
def->nSize = sizeof(*def);
def->nVersion.s.nVersionMajor = 1;
def->nVersion.s.nVersionMinor = 0;
def->nVersion.s.nRevision = 0;
def->nVersion.s.nStep = 0;
def->nPortIndex = portIndex;
- return mOMX->getParameter(
- node, OMX_IndexParamPortDefinition, def, sizeof(*def));
+ return mOMXNode->getParameter(
+ OMX_IndexParamPortDefinition, def, sizeof(*def));
}
#define EXPECT(condition, info) \
@@ -180,12 +198,11 @@
status_t Harness::allocatePortBuffers(
const sp<MemoryDealer> &dealer,
- IOMX::node_id node, OMX_U32 portIndex,
- Vector<Buffer> *buffers) {
+ OMX_U32 portIndex, Vector<Buffer> *buffers) {
buffers->clear();
OMX_PARAM_PORTDEFINITIONTYPE def;
- status_t err = getPortDefinition(node, portIndex, &def);
+ status_t err = getPortDefinition(portIndex, &def);
EXPECT_SUCCESS(err, "getPortDefinition");
for (OMX_U32 i = 0; i < def.nBufferCountActual; ++i) {
@@ -194,9 +211,8 @@
buffer.mFlags = 0;
CHECK(buffer.mMemory != NULL);
- err = mOMX->allocateBufferWithBackup(
- node, portIndex, buffer.mMemory, &buffer.mID, buffer.mMemory->size());
- EXPECT_SUCCESS(err, "allocateBuffer");
+ err = mOMXNode->useBuffer(portIndex, buffer.mMemory, &buffer.mID);
+ EXPECT_SUCCESS(err, "useBuffer");
buffers->push(buffer);
}
@@ -204,7 +220,7 @@
return OK;
}
-status_t Harness::setRole(IOMX::node_id node, const char *role) {
+status_t Harness::setRole(const char *role) {
OMX_PARAM_COMPONENTROLETYPE params;
params.nSize = sizeof(params);
params.nVersion.s.nVersionMajor = 1;
@@ -214,31 +230,31 @@
strncpy((char *)params.cRole, role, OMX_MAX_STRINGNAME_SIZE - 1);
params.cRole[OMX_MAX_STRINGNAME_SIZE - 1] = '\0';
- return mOMX->setParameter(
- node, OMX_IndexParamStandardComponentRole,
+ return mOMXNode->setParameter(
+ OMX_IndexParamStandardComponentRole,
¶ms, sizeof(params));
}
struct NodeReaper {
- NodeReaper(const sp<Harness> &harness, IOMX::node_id node)
+ NodeReaper(const sp<Harness> &harness, const sp<IOMXNode> &omxNode)
: mHarness(harness),
- mNode(node) {
+ mOMXNode(omxNode) {
}
~NodeReaper() {
- if (mNode != 0) {
- mHarness->mOMX->freeNode(mNode);
- mNode = 0;
+ if (mOMXNode != 0) {
+ mOMXNode->freeNode();
+ mOMXNode = NULL;
}
}
void disarm() {
- mNode = 0;
+ mOMXNode = NULL;
}
private:
sp<Harness> mHarness;
- IOMX::node_id mNode;
+ sp<IOMXNode> mOMXNode;
NodeReaper(const NodeReaper &);
NodeReaper &operator=(const NodeReaper &);
@@ -264,23 +280,23 @@
}
sp<MemoryDealer> dealer = new MemoryDealer(16 * 1024 * 1024, "OMXHarness");
- IOMX::node_id node;
- status_t err =
- mOMX->allocateNode(componentName, this, NULL, &node);
+ sp<CodecObserver> observer = new CodecObserver(this, ++mCurGeneration);
+
+ status_t err = mOMX->allocateNode(componentName, observer, &mOMXNode);
EXPECT_SUCCESS(err, "allocateNode");
- NodeReaper reaper(this, node);
+ NodeReaper reaper(this, mOMXNode);
- err = setRole(node, componentRole);
+ err = setRole(componentRole);
EXPECT_SUCCESS(err, "setRole");
// Initiate transition Loaded->Idle
- err = mOMX->sendCommand(node, OMX_CommandStateSet, OMX_StateIdle);
+ err = mOMXNode->sendCommand(OMX_CommandStateSet, OMX_StateIdle);
EXPECT_SUCCESS(err, "sendCommand(go-to-Idle)");
omx_message msg;
- err = dequeueMessageForNode(node, &msg, DEFAULT_TIMEOUT);
+ err = dequeueMessageForNode(&msg, DEFAULT_TIMEOUT);
// Make sure node doesn't just transition to idle before we are done
// allocating all input and output buffers.
EXPECT(err == TIMED_OUT,
@@ -289,17 +305,17 @@
// Now allocate buffers.
Vector<Buffer> inputBuffers;
- err = allocatePortBuffers(dealer, node, 0, &inputBuffers);
+ err = allocatePortBuffers(dealer, 0, &inputBuffers);
EXPECT_SUCCESS(err, "allocatePortBuffers(input)");
- err = dequeueMessageForNode(node, &msg, DEFAULT_TIMEOUT);
+ err = dequeueMessageForNode(&msg, DEFAULT_TIMEOUT);
CHECK_EQ(err, (status_t)TIMED_OUT);
Vector<Buffer> outputBuffers;
- err = allocatePortBuffers(dealer, node, 1, &outputBuffers);
+ err = allocatePortBuffers(dealer, 1, &outputBuffers);
EXPECT_SUCCESS(err, "allocatePortBuffers(output)");
- err = dequeueMessageForNode(node, &msg, DEFAULT_TIMEOUT);
+ err = dequeueMessageForNode(&msg, DEFAULT_TIMEOUT);
EXPECT(err == OK
&& msg.type == omx_message::EVENT
&& msg.u.event_data.event == OMX_EventCmdComplete
@@ -309,10 +325,10 @@
"after all input and output buffers were allocated.");
// Initiate transition Idle->Executing
- err = mOMX->sendCommand(node, OMX_CommandStateSet, OMX_StateExecuting);
+ err = mOMXNode->sendCommand(OMX_CommandStateSet, OMX_StateExecuting);
EXPECT_SUCCESS(err, "sendCommand(go-to-Executing)");
- err = dequeueMessageForNode(node, &msg, DEFAULT_TIMEOUT);
+ err = dequeueMessageForNode(&msg, DEFAULT_TIMEOUT);
EXPECT(err == OK
&& msg.type == omx_message::EVENT
&& msg.u.event_data.event == OMX_EventCmdComplete
@@ -322,17 +338,17 @@
"executing state.");
for (size_t i = 0; i < outputBuffers.size(); ++i) {
- err = mOMX->fillBuffer(node, outputBuffers[i].mID);
+ err = mOMXNode->fillBuffer(outputBuffers[i].mID, OMXBuffer::sPreset);
EXPECT_SUCCESS(err, "fillBuffer");
outputBuffers.editItemAt(i).mFlags |= kBufferBusy;
}
- err = mOMX->sendCommand(node, OMX_CommandFlush, 1);
+ err = mOMXNode->sendCommand(OMX_CommandFlush, 1);
EXPECT_SUCCESS(err, "sendCommand(flush-output-port)");
err = dequeueMessageForNodeIgnoringBuffers(
- node, &inputBuffers, &outputBuffers, &msg, DEFAULT_TIMEOUT);
+ &inputBuffers, &outputBuffers, &msg, DEFAULT_TIMEOUT);
EXPECT(err == OK
&& msg.type == omx_message::EVENT
&& msg.u.event_data.event == OMX_EventCmdComplete
@@ -347,18 +363,18 @@
}
for (size_t i = 0; i < outputBuffers.size(); ++i) {
- err = mOMX->fillBuffer(node, outputBuffers[i].mID);
+ err = mOMXNode->fillBuffer(outputBuffers[i].mID, OMXBuffer::sPreset);
EXPECT_SUCCESS(err, "fillBuffer");
outputBuffers.editItemAt(i).mFlags |= kBufferBusy;
}
// Initiate transition Executing->Idle
- err = mOMX->sendCommand(node, OMX_CommandStateSet, OMX_StateIdle);
+ err = mOMXNode->sendCommand(OMX_CommandStateSet, OMX_StateIdle);
EXPECT_SUCCESS(err, "sendCommand(go-to-Idle)");
err = dequeueMessageForNodeIgnoringBuffers(
- node, &inputBuffers, &outputBuffers, &msg, DEFAULT_TIMEOUT);
+ &inputBuffers, &outputBuffers, &msg, DEFAULT_TIMEOUT);
EXPECT(err == OK
&& msg.type == omx_message::EVENT
&& msg.u.event_data.event == OMX_EventCmdComplete
@@ -382,28 +398,28 @@
}
// Initiate transition Idle->Loaded
- err = mOMX->sendCommand(node, OMX_CommandStateSet, OMX_StateLoaded);
+ err = mOMXNode->sendCommand(OMX_CommandStateSet, OMX_StateLoaded);
EXPECT_SUCCESS(err, "sendCommand(go-to-Loaded)");
// Make sure node doesn't just transition to loaded before we are done
// freeing all input and output buffers.
- err = dequeueMessageForNode(node, &msg, DEFAULT_TIMEOUT);
+ err = dequeueMessageForNode(&msg, DEFAULT_TIMEOUT);
CHECK_EQ(err, (status_t)TIMED_OUT);
for (size_t i = 0; i < inputBuffers.size(); ++i) {
- err = mOMX->freeBuffer(node, 0, inputBuffers[i].mID);
+ err = mOMXNode->freeBuffer(0, inputBuffers[i].mID);
EXPECT_SUCCESS(err, "freeBuffer");
}
- err = dequeueMessageForNode(node, &msg, DEFAULT_TIMEOUT);
+ err = dequeueMessageForNode(&msg, DEFAULT_TIMEOUT);
CHECK_EQ(err, (status_t)TIMED_OUT);
for (size_t i = 0; i < outputBuffers.size(); ++i) {
- err = mOMX->freeBuffer(node, 1, outputBuffers[i].mID);
+ err = mOMXNode->freeBuffer(1, outputBuffers[i].mID);
EXPECT_SUCCESS(err, "freeBuffer");
}
- err = dequeueMessageForNode(node, &msg, DEFAULT_TIMEOUT);
+ err = dequeueMessageForNode(&msg, DEFAULT_TIMEOUT);
EXPECT(err == OK
&& msg.type == omx_message::EVENT
&& msg.u.event_data.event == OMX_EventCmdComplete
@@ -412,12 +428,12 @@
"Component did not properly transition to from idle to "
"loaded state after freeing all input and output buffers.");
- err = mOMX->freeNode(node);
+ err = mOMXNode->freeNode();
EXPECT_SUCCESS(err, "freeNode");
reaper.disarm();
- node = 0;
+ mOMXNode = NULL;
return OK;
}
diff --git a/media/libstagefright/omx/tests/OMXHarness.h b/media/libstagefright/omx/tests/OMXHarness.h
index 1ebf3aa..0fe00a6 100644
--- a/media/libstagefright/omx/tests/OMXHarness.h
+++ b/media/libstagefright/omx/tests/OMXHarness.h
@@ -29,7 +29,7 @@
class MemoryDealer;
-struct Harness : public BnOMXObserver {
+struct Harness : public RefBase {
enum BufferFlags {
kBufferBusy = 1
};
@@ -43,25 +43,21 @@
status_t initCheck() const;
- status_t dequeueMessageForNode(
- IOMX::node_id node, omx_message *msg, int64_t timeoutUs = -1);
+ status_t dequeueMessageForNode(omx_message *msg, int64_t timeoutUs = -1);
status_t dequeueMessageForNodeIgnoringBuffers(
- IOMX::node_id node,
Vector<Buffer> *inputBuffers,
Vector<Buffer> *outputBuffers,
omx_message *msg, int64_t timeoutUs = -1);
status_t getPortDefinition(
- IOMX::node_id node, OMX_U32 portIndex,
- OMX_PARAM_PORTDEFINITIONTYPE *def);
+ OMX_U32 portIndex, OMX_PARAM_PORTDEFINITIONTYPE *def);
status_t allocatePortBuffers(
const sp<MemoryDealer> &dealer,
- IOMX::node_id node, OMX_U32 portIndex,
- Vector<Buffer> *buffers);
+ OMX_U32 portIndex, Vector<Buffer> *buffers);
- status_t setRole(IOMX::node_id node, const char *role);
+ status_t setRole(const char *role);
status_t testStateTransitions(
const char *componentName, const char *componentRole);
@@ -74,20 +70,22 @@
status_t testAll();
- virtual void onMessages(const std::list<omx_message> &messages);
-
protected:
virtual ~Harness();
private:
friend struct NodeReaper;
+ struct CodecObserver;
Mutex mLock;
status_t mInitCheck;
sp<IOMX> mOMX;
+ sp<IOMXNode> mOMXNode;
List<omx_message> mMessageQueue;
Condition mMessageAddedCondition;
+ int32_t mLastMsgGeneration;
+ int32_t mCurGeneration;
status_t initOMX();
@@ -96,6 +94,8 @@
Vector<Buffer> *inputBuffers,
Vector<Buffer> *outputBuffers);
+ void handleMessages(int32_t gen, const std::list<omx_message> &messages);
+
Harness(const Harness &);
Harness &operator=(const Harness &);
};
diff --git a/media/libstagefright/rtsp/APacketSource.cpp b/media/libstagefright/rtsp/APacketSource.cpp
index cfafaa7..8ba9e02 100644
--- a/media/libstagefright/rtsp/APacketSource.cpp
+++ b/media/libstagefright/rtsp/APacketSource.cpp
@@ -418,7 +418,7 @@
if (sessionDesc->getDurationUs(&durationUs)) {
mFormat->setInt64(kKeyDuration, durationUs);
} else {
- mFormat->setInt64(kKeyDuration, 60 * 60 * 1000000ll);
+ mFormat->setInt64(kKeyDuration, -1ll);
}
mInitCheck = OK;
diff --git a/media/libstagefright/rtsp/ASessionDescription.cpp b/media/libstagefright/rtsp/ASessionDescription.cpp
index 8b0331a..325084c 100644
--- a/media/libstagefright/rtsp/ASessionDescription.cpp
+++ b/media/libstagefright/rtsp/ASessionDescription.cpp
@@ -17,7 +17,6 @@
//#define LOG_NDEBUG 0
#define LOG_TAG "ASessionDescription"
#include <utils/Log.h>
-#include <cutils/log.h>
#include "ASessionDescription.h"
@@ -212,7 +211,7 @@
*PT = x;
- char key[32];
+ char key[20];
snprintf(key, sizeof(key), "a=rtpmap:%lu", x);
if (findAttribute(index, key, desc)) {
snprintf(key, sizeof(key), "a=fmtp:%lu", x);
@@ -231,11 +230,8 @@
*width = 0;
*height = 0;
- char key[33];
+ char key[20];
snprintf(key, sizeof(key), "a=framesize:%lu", PT);
- if (PT > 9999999) {
- android_errorWriteLog(0x534e4554, "25747670");
- }
AString value;
if (!findAttribute(index, key, &value)) {
return false;
diff --git a/media/libstagefright/rtsp/Android.mk b/media/libstagefright/rtsp/Android.mk
index 35301ce..3472e49 100644
--- a/media/libstagefright/rtsp/Android.mk
+++ b/media/libstagefright/rtsp/Android.mk
@@ -32,7 +32,6 @@
endif
LOCAL_CFLAGS += -Werror -Wall
-LOCAL_CLANG := true
LOCAL_SANITIZE := unsigned-integer-overflow signed-integer-overflow
LOCAL_ADDITIONAL_DEPENDENCIES := $(LOCAL_PATH)/Android.mk
@@ -58,7 +57,6 @@
$(TOP)/frameworks/native/include/media/openmax
LOCAL_CFLAGS += -Wno-multichar -Werror -Wall
-LOCAL_CLANG := true
LOCAL_SANITIZE := signed-integer-overflow
LOCAL_MODULE_TAGS := optional
diff --git a/media/libstagefright/rtsp/MyHandler.h b/media/libstagefright/rtsp/MyHandler.h
index 76e2e6e..5505aa4 100644
--- a/media/libstagefright/rtsp/MyHandler.h
+++ b/media/libstagefright/rtsp/MyHandler.h
@@ -25,6 +25,7 @@
#endif
#include <utils/Log.h>
+#include <cutils/properties.h> // for property_get
#include "APacketSource.h"
#include "ARTPConnection.h"
@@ -807,11 +808,7 @@
result = UNKNOWN_ERROR;
} else {
parsePlayResponse(response);
-
- sp<AMessage> timeout = new AMessage('tiou', this);
- mCheckTimeoutGeneration++;
- timeout->setInt32("tioucheck", mCheckTimeoutGeneration);
- timeout->post(kStartupTimeoutUs);
+ postTimeout();
}
}
@@ -1153,10 +1150,7 @@
// Post new timeout in order to make sure to use
// fake timestamps if no new Sender Reports arrive
- sp<AMessage> timeout = new AMessage('tiou', this);
- mCheckTimeoutGeneration++;
- timeout->setInt32("tioucheck", mCheckTimeoutGeneration);
- timeout->post(kStartupTimeoutUs);
+ postTimeout();
}
}
@@ -1248,10 +1242,7 @@
// Start new timeoutgeneration to avoid getting timeout
// before PLAY response arrive
- sp<AMessage> timeout = new AMessage('tiou', this);
- mCheckTimeoutGeneration++;
- timeout->setInt32("tioucheck", mCheckTimeoutGeneration);
- timeout->post(kStartupTimeoutUs);
+ postTimeout();
int64_t timeUs;
CHECK(msg->findInt64("time", &timeUs));
@@ -1305,10 +1296,7 @@
// Post new timeout in order to make sure to use
// fake timestamps if no new Sender Reports arrive
- sp<AMessage> timeout = new AMessage('tiou', this);
- mCheckTimeoutGeneration++;
- timeout->setInt32("tioucheck", mCheckTimeoutGeneration);
- timeout->post(kStartupTimeoutUs);
+ postTimeout();
ssize_t i = response->mHeaders.indexOfKey("rtp-info");
CHECK_GE(i, 0);
@@ -1964,6 +1952,16 @@
msg->post();
}
+ void postTimeout() {
+ sp<AMessage> timeout = new AMessage('tiou', this);
+ mCheckTimeoutGeneration++;
+ timeout->setInt32("tioucheck", mCheckTimeoutGeneration);
+
+ int64_t startupTimeoutUs;
+ startupTimeoutUs = property_get_int64("media.rtsp.timeout-us", kStartupTimeoutUs);
+ timeout->post(startupTimeoutUs);
+ }
+
DISALLOW_EVIL_CONSTRUCTORS(MyHandler);
};
diff --git a/media/libstagefright/tests/Android.mk b/media/libstagefright/tests/Android.mk
index d1c9d36..a93770a 100644
--- a/media/libstagefright/tests/Android.mk
+++ b/media/libstagefright/tests/Android.mk
@@ -33,7 +33,6 @@
$(TOP)/frameworks/native/include/media/hardware \
LOCAL_CFLAGS += -Werror -Wall
-LOCAL_CLANG := true
LOCAL_32_BIT_ONLY := true
@@ -65,7 +64,6 @@
$(TOP)/frameworks/native/include/media/openmax \
LOCAL_CFLAGS += -Werror -Wall
-LOCAL_CLANG := true
include $(BUILD_NATIVE_TEST)
@@ -95,7 +93,6 @@
LOCAL_32_BIT_ONLY := true
LOCAL_CFLAGS += -Werror -Wall
-LOCAL_CLANG := true
include $(BUILD_NATIVE_TEST)
diff --git a/media/libstagefright/timedtext/Android.mk b/media/libstagefright/timedtext/Android.mk
index 8d128b8..0b0facf 100644
--- a/media/libstagefright/timedtext/Android.mk
+++ b/media/libstagefright/timedtext/Android.mk
@@ -5,7 +5,6 @@
TextDescriptions.cpp \
LOCAL_CFLAGS += -Wno-multichar -Werror -Wall
-LOCAL_CLANG := true
LOCAL_SANITIZE := signed-integer-overflow
LOCAL_C_INCLUDES:= \
diff --git a/media/libstagefright/webm/Android.mk b/media/libstagefright/webm/Android.mk
index ce580ae..096fd07 100644
--- a/media/libstagefright/webm/Android.mk
+++ b/media/libstagefright/webm/Android.mk
@@ -4,7 +4,6 @@
LOCAL_CPPFLAGS += -D__STDINT_LIMITS
LOCAL_CFLAGS += -Werror -Wall
-LOCAL_CLANG := true
LOCAL_SANITIZE := unsigned-integer-overflow signed-integer-overflow
LOCAL_SRC_FILES:= EbmlUtil.cpp \
diff --git a/media/libstagefright/wifi-display/Android.mk b/media/libstagefright/wifi-display/Android.mk
index c87875d..c5322a4 100644
--- a/media/libstagefright/wifi-display/Android.mk
+++ b/media/libstagefright/wifi-display/Android.mk
@@ -31,7 +31,6 @@
libutils \
LOCAL_CFLAGS += -Wno-multichar -Werror -Wall
-LOCAL_CLANG := true
LOCAL_SANITIZE := signed-integer-overflow
LOCAL_MODULE:= libstagefright_wfd
diff --git a/media/libstagefright/wifi-display/source/Converter.cpp b/media/libstagefright/wifi-display/source/Converter.cpp
index 471152e..273af18 100644
--- a/media/libstagefright/wifi-display/source/Converter.cpp
+++ b/media/libstagefright/wifi-display/source/Converter.cpp
@@ -26,6 +26,7 @@
#include <cutils/properties.h>
#include <gui/Surface.h>
#include <media/ICrypto.h>
+#include <media/MediaCodecBuffer.h>
#include <media/stagefright/foundation/ABuffer.h>
#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/foundation/AMessage.h>
@@ -720,7 +721,7 @@
#endif
sp<ABuffer> buffer;
- sp<ABuffer> outbuf = mEncoderOutputBuffers.itemAt(bufferIndex);
+ sp<MediaCodecBuffer> outbuf = mEncoderOutputBuffers.itemAt(bufferIndex);
if (outbuf->meta()->findPointer("handle", (void**)&handle) &&
handle != NULL) {
diff --git a/media/libstagefright/wifi-display/source/Converter.h b/media/libstagefright/wifi-display/source/Converter.h
index b182990..ad95ab5 100644
--- a/media/libstagefright/wifi-display/source/Converter.h
+++ b/media/libstagefright/wifi-display/source/Converter.h
@@ -25,6 +25,7 @@
struct ABuffer;
class IGraphicBufferProducer;
struct MediaCodec;
+class MediaCodecBuffer;
#define ENABLE_SILENCE_DETECTION 0
@@ -106,8 +107,8 @@
sp<IGraphicBufferProducer> mGraphicBufferProducer;
- Vector<sp<ABuffer> > mEncoderInputBuffers;
- Vector<sp<ABuffer> > mEncoderOutputBuffers;
+ Vector<sp<MediaCodecBuffer> > mEncoderInputBuffers;
+ Vector<sp<MediaCodecBuffer> > mEncoderOutputBuffers;
List<size_t> mAvailEncoderInputIndices;
diff --git a/media/mtp/MtpDataPacket.cpp b/media/mtp/MtpDataPacket.cpp
index 0381edf..0356753 100644
--- a/media/mtp/MtpDataPacket.cpp
+++ b/media/mtp/MtpDataPacket.cpp
@@ -16,17 +16,38 @@
#define LOG_TAG "MtpDataPacket"
+#include "MtpDataPacket.h"
+
+#include <algorithm>
+#include <fcntl.h>
#include <stdio.h>
#include <sys/types.h>
-#include <fcntl.h>
-
#include <usbhost/usbhost.h>
-
-#include "MtpDataPacket.h"
#include "MtpStringBuffer.h"
namespace android {
+namespace {
+// Reads the exact |count| bytes from |fd| to |buf|.
+// Returns |count| if it succeed to read the bytes. Otherwise returns -1. If it reaches EOF, the
+// function regards it as an error.
+ssize_t readExactBytes(int fd, void* buf, size_t count) {
+ if (count > SSIZE_MAX) {
+ return -1;
+ }
+ size_t read_count = 0;
+ while (read_count < count) {
+ int result = read(fd, static_cast<int8_t*>(buf) + read_count, count - read_count);
+ // Assume that EOF is error.
+ if (result <= 0) {
+ return -1;
+ }
+ read_count += result;
+ }
+ return read_count == count ? count : -1;
+}
+} // namespace
+
MtpDataPacket::MtpDataPacket()
: MtpPacket(MTP_BUFFER_SIZE), // MAX_USBFS_BUFFER_SIZE
mOffset(MTP_CONTAINER_HEADER_SIZE)
@@ -498,7 +519,7 @@
// Wait for result of readDataAsync
int MtpDataPacket::readDataWait(struct usb_device *device) {
- struct usb_request *req = usb_request_wait(device);
+ struct usb_request *req = usb_request_wait(device, -1);
return (req ? req->actual_length : -1);
}
@@ -511,29 +532,104 @@
return length;
}
-int MtpDataPacket::writeDataHeader(struct usb_request *request, uint32_t length) {
- MtpPacket::putUInt32(MTP_CONTAINER_LENGTH_OFFSET, length);
- MtpPacket::putUInt16(MTP_CONTAINER_TYPE_OFFSET, MTP_CONTAINER_TYPE_DATA);
- request->buffer = mBuffer;
- request->buffer_length = MTP_CONTAINER_HEADER_SIZE;
- int ret = transfer(request);
- return (ret < 0 ? ret : 0);
-}
+int MtpDataPacket::write(struct usb_request *request, UrbPacketDivisionMode divisionMode) {
+ if (mPacketSize < MTP_CONTAINER_HEADER_SIZE || mPacketSize > MTP_BUFFER_SIZE) {
+ ALOGE("Illegal packet size.");
+ return -1;
+ }
-int MtpDataPacket::write(struct usb_request *request) {
MtpPacket::putUInt32(MTP_CONTAINER_LENGTH_OFFSET, mPacketSize);
MtpPacket::putUInt16(MTP_CONTAINER_TYPE_OFFSET, MTP_CONTAINER_TYPE_DATA);
- request->buffer = mBuffer;
- request->buffer_length = mPacketSize;
- int ret = transfer(request);
- return (ret < 0 ? ret : 0);
+
+ size_t processedBytes = 0;
+ while (processedBytes < mPacketSize) {
+ const size_t write_size =
+ processedBytes == 0 && divisionMode == FIRST_PACKET_ONLY_HEADER ?
+ MTP_CONTAINER_HEADER_SIZE : mPacketSize - processedBytes;
+ request->buffer = mBuffer + processedBytes;
+ request->buffer_length = write_size;
+ const int result = transfer(request);
+ if (result < 0) {
+ ALOGE("Failed to write bytes to the device.");
+ return -1;
+ }
+ processedBytes += result;
+ }
+
+ return processedBytes == mPacketSize ? processedBytes : -1;
}
-int MtpDataPacket::write(struct usb_request *request, void* buffer, uint32_t length) {
- request->buffer = buffer;
- request->buffer_length = length;
- int ret = transfer(request);
- return (ret < 0 ? ret : 0);
+int MtpDataPacket::write(struct usb_request *request,
+ UrbPacketDivisionMode divisionMode,
+ int fd,
+ size_t payloadSize) {
+ // Obtain the greatest multiple of minimum packet size that is not greater than
+ // MTP_BUFFER_SIZE.
+ if (request->max_packet_size <= 0) {
+ ALOGE("Cannot determine bulk transfer size due to illegal max packet size %d.",
+ request->max_packet_size);
+ return -1;
+ }
+ const size_t maxBulkTransferSize =
+ MTP_BUFFER_SIZE - (MTP_BUFFER_SIZE % request->max_packet_size);
+ const size_t containerLength = payloadSize + MTP_CONTAINER_HEADER_SIZE;
+ size_t processedBytes = 0;
+ bool readError = false;
+
+ // Bind the packet with given request.
+ request->buffer = mBuffer;
+ allocate(maxBulkTransferSize);
+
+ while (processedBytes < containerLength) {
+ size_t bulkTransferSize = 0;
+
+ // prepare header.
+ const bool headerSent = processedBytes != 0;
+ if (!headerSent) {
+ MtpPacket::putUInt32(MTP_CONTAINER_LENGTH_OFFSET, containerLength);
+ MtpPacket::putUInt16(MTP_CONTAINER_TYPE_OFFSET, MTP_CONTAINER_TYPE_DATA);
+ bulkTransferSize += MTP_CONTAINER_HEADER_SIZE;
+ }
+
+ // Prepare payload.
+ if (headerSent || divisionMode == FIRST_PACKET_HAS_PAYLOAD) {
+ const size_t processedPayloadBytes =
+ headerSent ? processedBytes - MTP_CONTAINER_HEADER_SIZE : 0;
+ const size_t maxRead = payloadSize - processedPayloadBytes;
+ const size_t maxWrite = maxBulkTransferSize - bulkTransferSize;
+ const size_t bulkTransferPayloadSize = std::min(maxRead, maxWrite);
+ // prepare payload.
+ if (!readError) {
+ const ssize_t result = readExactBytes(
+ fd,
+ mBuffer + bulkTransferSize,
+ bulkTransferPayloadSize);
+ if (result < 0) {
+ ALOGE("Found an error while reading data from FD. Send 0 data instead.");
+ readError = true;
+ }
+ }
+ if (readError) {
+ memset(mBuffer + bulkTransferSize, 0, bulkTransferPayloadSize);
+ }
+ bulkTransferSize += bulkTransferPayloadSize;
+ }
+
+ // Bulk transfer.
+ mPacketSize = bulkTransferSize;
+ request->buffer_length = bulkTransferSize;
+ const int result = transfer(request);
+ if (result != static_cast<ssize_t>(bulkTransferSize)) {
+ // Cannot recover writing error.
+ ALOGE("Found an error while write data to MtpDevice.");
+ return -1;
+ }
+
+ // Update variables.
+ processedBytes += bulkTransferSize;
+ }
+
+ return readError ? -1 : processedBytes;
}
#endif // MTP_HOST
diff --git a/media/mtp/MtpDataPacket.h b/media/mtp/MtpDataPacket.h
index 6240f28..82e0ee4 100644
--- a/media/mtp/MtpDataPacket.h
+++ b/media/mtp/MtpDataPacket.h
@@ -93,7 +93,6 @@
inline void putEmptyString() { putUInt8(0); }
inline void putEmptyArray() { putUInt32(0); }
-
#ifdef MTP_DEVICE
// fill our buffer with data from the given file descriptor
int read(int fd);
@@ -110,9 +109,15 @@
int readDataWait(struct usb_device *device);
int readDataHeader(struct usb_request *ep);
- int writeDataHeader(struct usb_request *ep, uint32_t length);
- int write(struct usb_request *ep);
- int write(struct usb_request *ep, void* buffer, uint32_t length);
+ // Write a whole data packet with payload to the end point given by a request. |divisionMode|
+ // specifies whether to divide header and payload. See |UrbPacketDivisionMode| for meanings of
+ // each value. Return the number of bytes (including header size) sent to the device on success.
+ // Otherwise -1.
+ int write(struct usb_request *request, UrbPacketDivisionMode divisionMode);
+ // Similar to previous write method but it reads the payload from |fd|. If |size| is larger than
+ // MTP_BUFFER_SIZE, the data will be sent by multiple bulk transfer requests.
+ int write(struct usb_request *request, UrbPacketDivisionMode divisionMode,
+ int fd, size_t size);
#endif
inline bool hasData() const { return mPacketSize > MTP_CONTAINER_HEADER_SIZE; }
diff --git a/media/mtp/MtpDevice.cpp b/media/mtp/MtpDevice.cpp
index bd89a51..82a2627 100644
--- a/media/mtp/MtpDevice.cpp
+++ b/media/mtp/MtpDevice.cpp
@@ -39,6 +39,12 @@
namespace android {
+namespace {
+
+static constexpr int USB_CONTROL_TRANSFER_TIMEOUT_MS = 200;
+
+} // namespace
+
#if 0
static bool isMtpDevice(uint16_t vendor, uint16_t product) {
// Sandisk Sansa Fuze
@@ -84,15 +90,18 @@
interface->bInterfaceSubClass == 1 && // Still Image Capture
interface->bInterfaceProtocol == 1) // Picture Transfer Protocol (PIMA 15470)
{
- char* manufacturerName = usb_device_get_manufacturer_name(device);
- char* productName = usb_device_get_product_name(device);
+ char* manufacturerName = usb_device_get_manufacturer_name(device,
+ USB_CONTROL_TRANSFER_TIMEOUT_MS);
+ char* productName = usb_device_get_product_name(device,
+ USB_CONTROL_TRANSFER_TIMEOUT_MS);
ALOGD("Found camera: \"%s\" \"%s\"\n", manufacturerName, productName);
free(manufacturerName);
free(productName);
} else if (interface->bInterfaceClass == 0xFF &&
interface->bInterfaceSubClass == 0xFF &&
interface->bInterfaceProtocol == 0) {
- char* interfaceName = usb_device_get_string(device, interface->iInterface);
+ char* interfaceName = usb_device_get_string(device, interface->iInterface,
+ USB_CONTROL_TRANSFER_TIMEOUT_MS);
if (!interfaceName) {
continue;
} else if (strcmp(interfaceName, "MTP")) {
@@ -102,8 +111,10 @@
free(interfaceName);
// Looks like an android style MTP device
- char* manufacturerName = usb_device_get_manufacturer_name(device);
- char* productName = usb_device_get_product_name(device);
+ char* manufacturerName = usb_device_get_manufacturer_name(device,
+ USB_CONTROL_TRANSFER_TIMEOUT_MS);
+ char* productName = usb_device_get_product_name(device,
+ USB_CONTROL_TRANSFER_TIMEOUT_MS);
ALOGD("Found MTP device: \"%s\" \"%s\"\n", manufacturerName, productName);
free(manufacturerName);
free(productName);
@@ -220,7 +231,10 @@
mTransactionID(0),
mReceivedResponse(false),
mProcessingEvent(false),
- mCurrentEventHandle(0)
+ mCurrentEventHandle(0),
+ mLastSendObjectInfoTransactionID(0),
+ mLastSendObjectInfoObjectHandle(0),
+ mPacketDivisionMode(FIRST_PACKET_HAS_PAYLOAD)
{
mRequestIn1 = usb_request_new(device, ep_in);
mRequestIn2 = usb_request_new(device, ep_in);
@@ -490,6 +504,8 @@
if (sendRequest(MTP_OPERATION_SEND_OBJECT_INFO) && sendData()) {
MtpResponseCode ret = readResponse();
if (ret == MTP_RESPONSE_OK) {
+ mLastSendObjectInfoTransactionID = mRequest.getTransactionID();
+ mLastSendObjectInfoObjectHandle = mResponse.getParameter(3);
info->mStorageID = mResponse.getParameter(1);
info->mParent = mResponse.getParameter(2);
info->mHandle = mResponse.getParameter(3);
@@ -502,31 +518,21 @@
bool MtpDevice::sendObject(MtpObjectHandle handle, int size, int srcFD) {
Mutex::Autolock autoLock(mMutex);
- int remaining = size;
- mRequest.reset();
- mRequest.setParameter(1, handle);
- bool error = false;
- if (sendRequest(MTP_OPERATION_SEND_OBJECT)) {
- // send data header
- writeDataHeader(MTP_OPERATION_SEND_OBJECT, remaining);
-
- // USB writes greater than 16K don't work
- char buffer[MTP_BUFFER_SIZE];
- while (remaining > 0) {
- int count = read(srcFD, buffer, sizeof(buffer));
- if (count > 0) {
- if (mData.write(mRequestOut, buffer, count) < 0) {
- error = true;
- }
- // FIXME check error
- remaining -= count;
- } else {
- break;
- }
- }
+ if (mLastSendObjectInfoTransactionID + 1 != mTransactionID ||
+ mLastSendObjectInfoObjectHandle != handle) {
+ ALOGE("A sendObject request must follow the sendObjectInfo request.");
+ return false;
}
- MtpResponseCode ret = readResponse();
- return (remaining == 0 && ret == MTP_RESPONSE_OK && !error);
+
+ mRequest.reset();
+ if (sendRequest(MTP_OPERATION_SEND_OBJECT)) {
+ mData.setOperationCode(mRequest.getOperationCode());
+ mData.setTransactionID(mRequest.getTransactionID());
+ const int writeResult = mData.write(mRequestOut, mPacketDivisionMode, srcFD, size);
+ const MtpResponseCode ret = readResponse();
+ return ret == MTP_RESPONSE_OK && writeResult > 0;
+ }
+ return false;
}
bool MtpDevice::deleteObject(MtpObjectHandle handle) {
@@ -698,8 +704,8 @@
return false;
}
- // If object size 0 byte, the remote device can reply response packet
- // without sending any data packets.
+ // If object size 0 byte, the remote device may reply a response packet without sending any data
+ // packets.
if (mData.getContainerType() == MTP_CONTAINER_TYPE_RESPONSE) {
mResponse.copyFrom(mData);
return mResponse.getResponseCode() == MTP_RESPONSE_OK;
@@ -722,6 +728,14 @@
{
int initialDataLength = 0;
void* const initialData = mData.getData(&initialDataLength);
+ if (fullLength > MTP_CONTAINER_HEADER_SIZE && initialDataLength == 0) {
+ // According to the MTP spec, the responder (MTP device) can choose two ways of sending
+ // data. a) The first packet contains the head and as much of the payload as possible
+ // b) The first packet contains only the header. The initiator (MTP host) needs
+ // to remember which way the responder used, and send upcoming data in the same way.
+ ALOGD("Found short packet that contains only a header.");
+ mPacketDivisionMode = FIRST_PACKET_ONLY_HEADER;
+ }
if (initialData) {
if (initialDataLength > 0) {
if (!callback(initialData, offset, initialDataLength, clientData)) {
@@ -845,7 +859,7 @@
ALOGV("sendData\n");
mData.setOperationCode(mRequest.getOperationCode());
mData.setTransactionID(mRequest.getTransactionID());
- int ret = mData.write(mRequestOut);
+ int ret = mData.write(mRequestOut, mPacketDivisionMode);
mData.dump();
return (ret >= 0);
}
@@ -872,12 +886,6 @@
}
}
-bool MtpDevice::writeDataHeader(MtpOperationCode operation, int dataLength) {
- mData.setOperationCode(operation);
- mData.setTransactionID(mRequest.getTransactionID());
- return (!mData.writeDataHeader(mRequestOut, dataLength));
-}
-
MtpResponseCode MtpDevice::readResponse() {
ALOGV("readResponse\n");
if (mReceivedResponse) {
diff --git a/media/mtp/MtpDevice.h b/media/mtp/MtpDevice.h
index 4be44cf..a9a3e0e 100644
--- a/media/mtp/MtpDevice.h
+++ b/media/mtp/MtpDevice.h
@@ -62,11 +62,18 @@
bool mProcessingEvent;
int mCurrentEventHandle;
+ // to check if a sendObject request follows the last sendObjectInfo request.
+ MtpTransactionID mLastSendObjectInfoTransactionID;
+ MtpObjectHandle mLastSendObjectInfoObjectHandle;
+
// to ensure only one MTP transaction at a time
Mutex mMutex;
Mutex mEventMutex;
Mutex mEventMutexForInterrupt;
+ // Remember the device's packet division mode.
+ UrbPacketDivisionMode mPacketDivisionMode;
+
public:
typedef bool (*ReadObjectCallback)
(void* data, uint32_t offset, uint32_t length, void* clientData);
@@ -100,7 +107,7 @@
bool sendObject(MtpObjectHandle handle, int size, int srcFD);
bool deleteObject(MtpObjectHandle handle);
MtpObjectHandle getParent(MtpObjectHandle handle);
- MtpObjectHandle getStorageID(MtpObjectHandle handle);
+ MtpStorageID getStorageID(MtpObjectHandle handle);
MtpObjectPropertyList* getObjectPropsSupported(MtpObjectFormat format);
diff --git a/media/mtp/MtpDeviceInfo.cpp b/media/mtp/MtpDeviceInfo.cpp
index 3e1dff7..3d5cb06 100644
--- a/media/mtp/MtpDeviceInfo.cpp
+++ b/media/mtp/MtpDeviceInfo.cpp
@@ -69,6 +69,7 @@
if (!packet.getString(string)) return false;
mVendorExtensionDesc = strdup((const char *)string);
+ if (!mVendorExtensionDesc) return false;
if (!packet.getUInt16(mFunctionalMode)) return false;
mOperations = packet.getAUInt16();
@@ -84,12 +85,16 @@
if (!packet.getString(string)) return false;
mManufacturer = strdup((const char *)string);
+ if (!mManufacturer) return false;
if (!packet.getString(string)) return false;
mModel = strdup((const char *)string);
+ if (!mModel) return false;
if (!packet.getString(string)) return false;
mVersion = strdup((const char *)string);
+ if (!mVersion) return false;
if (!packet.getString(string)) return false;
mSerial = strdup((const char *)string);
+ if (!mSerial) return false;
return true;
}
diff --git a/media/mtp/MtpEventPacket.cpp b/media/mtp/MtpEventPacket.cpp
index 8e13ea9..d9ef311 100644
--- a/media/mtp/MtpEventPacket.cpp
+++ b/media/mtp/MtpEventPacket.cpp
@@ -66,7 +66,7 @@
}
int MtpEventPacket::readResponse(struct usb_device *device) {
- struct usb_request* const req = usb_request_wait(device);
+ struct usb_request* const req = usb_request_wait(device, -1);
if (req) {
mPacketSize = req->actual_length;
return req->actual_length;
diff --git a/media/mtp/MtpObjectInfo.cpp b/media/mtp/MtpObjectInfo.cpp
index 0573104..43b745f 100644
--- a/media/mtp/MtpObjectInfo.cpp
+++ b/media/mtp/MtpObjectInfo.cpp
@@ -77,6 +77,7 @@
if (!packet.getString(string)) return false;
mName = strdup((const char *)string);
+ if (!mName) return false;
if (!packet.getString(string)) return false;
if (parseDateTime((const char*)string, time))
@@ -88,6 +89,7 @@
if (!packet.getString(string)) return false;
mKeywords = strdup((const char *)string);
+ if (!mKeywords) return false;
return true;
}
diff --git a/media/mtp/MtpPacket.cpp b/media/mtp/MtpPacket.cpp
index 35ecb4f..3dd4248 100644
--- a/media/mtp/MtpPacket.cpp
+++ b/media/mtp/MtpPacket.cpp
@@ -70,8 +70,8 @@
char* bufptr = buffer;
for (size_t i = 0; i < mPacketSize; i++) {
- sprintf(bufptr, "%02X ", mBuffer[i]);
- bufptr += strlen(bufptr);
+ bufptr += snprintf(bufptr, sizeof(buffer) - (bufptr - buffer), "%02X ",
+ mBuffer[i]);
if (i % DUMP_BYTES_PER_ROW == (DUMP_BYTES_PER_ROW - 1)) {
ALOGV("%s", buffer);
bufptr = buffer;
diff --git a/media/mtp/MtpStorageInfo.cpp b/media/mtp/MtpStorageInfo.cpp
index 5d4ebbf..8801a38 100644
--- a/media/mtp/MtpStorageInfo.cpp
+++ b/media/mtp/MtpStorageInfo.cpp
@@ -58,8 +58,10 @@
if (!packet.getString(string)) return false;
mStorageDescription = strdup((const char *)string);
+ if (!mStorageDescription) return false;
if (!packet.getString(string)) return false;
mVolumeIdentifier = strdup((const char *)string);
+ if (!mVolumeIdentifier) return false;
return true;
}
diff --git a/media/mtp/MtpTypes.h b/media/mtp/MtpTypes.h
index 720c854..c749c66 100644
--- a/media/mtp/MtpTypes.h
+++ b/media/mtp/MtpTypes.h
@@ -73,6 +73,13 @@
typedef String8 MtpString;
+enum UrbPacketDivisionMode {
+ // First packet only contains a header.
+ FIRST_PACKET_ONLY_HEADER,
+ // First packet contains payload much as possible.
+ FIRST_PACKET_HAS_PAYLOAD
+};
+
}; // namespace android
#endif // _MTP_TYPES_H
diff --git a/media/ndk/Android.mk b/media/ndk/Android.mk
index a4f999f..74729e4 100644
--- a/media/ndk/Android.mk
+++ b/media/ndk/Android.mk
@@ -45,6 +45,7 @@
LOCAL_SHARED_LIBRARIES := \
libbinder \
libmedia \
+ libmediadrm \
libstagefright \
libstagefright_foundation \
liblog \
diff --git a/media/ndk/NdkMediaCodec.cpp b/media/ndk/NdkMediaCodec.cpp
index 3d1eca1..22c90e2 100644
--- a/media/ndk/NdkMediaCodec.cpp
+++ b/media/ndk/NdkMediaCodec.cpp
@@ -30,10 +30,10 @@
#include <media/stagefright/foundation/ALooper.h>
#include <media/stagefright/foundation/AMessage.h>
-#include <media/stagefright/foundation/ABuffer.h>
#include <media/stagefright/MediaCodec.h>
#include <media/stagefright/MediaErrors.h>
+#include <media/MediaCodecBuffer.h>
using namespace android;
@@ -268,13 +268,17 @@
EXPORT
uint8_t* AMediaCodec_getInputBuffer(AMediaCodec *mData, size_t idx, size_t *out_size) {
- android::Vector<android::sp<android::ABuffer> > abufs;
+ android::Vector<android::sp<android::MediaCodecBuffer> > abufs;
if (mData->mCodec->getInputBuffers(&abufs) == 0) {
size_t n = abufs.size();
if (idx >= n) {
ALOGE("buffer index %zu out of range", idx);
return NULL;
}
+ if (abufs[idx] == NULL) {
+ ALOGE("buffer index %zu is NULL", idx);
+ return NULL;
+ }
if (out_size != NULL) {
*out_size = abufs[idx]->capacity();
}
@@ -286,7 +290,7 @@
EXPORT
uint8_t* AMediaCodec_getOutputBuffer(AMediaCodec *mData, size_t idx, size_t *out_size) {
- android::Vector<android::sp<android::ABuffer> > abufs;
+ android::Vector<android::sp<android::MediaCodecBuffer> > abufs;
if (mData->mCodec->getOutputBuffers(&abufs) == 0) {
size_t n = abufs.size();
if (idx >= n) {
diff --git a/media/ndk/NdkMediaDrm.cpp b/media/ndk/NdkMediaDrm.cpp
index 166e6f1..cdce932 100644
--- a/media/ndk/NdkMediaDrm.cpp
+++ b/media/ndk/NdkMediaDrm.cpp
@@ -99,11 +99,12 @@
break;
default:
ALOGE("Invalid event DrmPlugin::EventType %d, ignored", (int)eventType);
- return;
+ goto cleanup;
}
(*mListener)(mObj, &sessionId, ndkEventType, extra, data, dataSize);
+ cleanup:
delete [] sessionId.ptr;
delete [] data;
}
diff --git a/media/utils/Android.mk b/media/utils/Android.mk
index f482d1a..21d1b5b 100644
--- a/media/utils/Android.mk
+++ b/media/utils/Android.mk
@@ -20,6 +20,7 @@
BatteryNotifier.cpp \
ISchedulingPolicyService.cpp \
MemoryLeakTrackUtil.cpp \
+ ProcessInfo.cpp \
SchedulingPolicyService.cpp
LOCAL_SHARED_LIBRARIES := \
diff --git a/media/utils/BatteryNotifier.cpp b/media/utils/BatteryNotifier.cpp
index 341d391..7a7321f 100644
--- a/media/utils/BatteryNotifier.cpp
+++ b/media/utils/BatteryNotifier.cpp
@@ -29,7 +29,7 @@
BatteryNotifier::getInstance().onBatteryStatServiceDied();
}
-BatteryNotifier::BatteryNotifier() : mVideoRefCount(0), mAudioRefCount(0) {}
+BatteryNotifier::BatteryNotifier() {}
BatteryNotifier::~BatteryNotifier() {
Mutex::Autolock _l(mLock);
@@ -38,67 +38,73 @@
}
}
-void BatteryNotifier::noteStartVideo() {
+void BatteryNotifier::noteStartVideo(int uid) {
Mutex::Autolock _l(mLock);
sp<IBatteryStats> batteryService = getBatteryService_l();
- if (mVideoRefCount == 0 && batteryService != nullptr) {
- batteryService->noteStartVideo(AID_MEDIA);
+ if (mVideoRefCounts[uid] == 0 && batteryService != nullptr) {
+ batteryService->noteStartVideo(uid);
}
- mVideoRefCount++;
+ mVideoRefCounts[uid]++;
}
-void BatteryNotifier::noteStopVideo() {
+void BatteryNotifier::noteStopVideo(int uid) {
Mutex::Autolock _l(mLock);
- if (mVideoRefCount == 0) {
- ALOGW("%s: video refcount is broken.", __FUNCTION__);
+ if (mVideoRefCounts.find(uid) == mVideoRefCounts.end()) {
+ ALOGW("%s: video refcount is broken for uid(%d).", __FUNCTION__, (int)uid);
return;
}
sp<IBatteryStats> batteryService = getBatteryService_l();
- mVideoRefCount--;
- if (mVideoRefCount == 0 && batteryService != nullptr) {
- batteryService->noteStopVideo(AID_MEDIA);
+ mVideoRefCounts[uid]--;
+ if (mVideoRefCounts[uid] == 0) {
+ if (batteryService != nullptr) {
+ batteryService->noteStopVideo(uid);
+ }
+ mVideoRefCounts.erase(uid);
}
}
void BatteryNotifier::noteResetVideo() {
Mutex::Autolock _l(mLock);
sp<IBatteryStats> batteryService = getBatteryService_l();
- mVideoRefCount = 0;
+ mVideoRefCounts.clear();
if (batteryService != nullptr) {
batteryService->noteResetVideo();
}
}
-void BatteryNotifier::noteStartAudio() {
+void BatteryNotifier::noteStartAudio(int uid) {
Mutex::Autolock _l(mLock);
sp<IBatteryStats> batteryService = getBatteryService_l();
- if (mAudioRefCount == 0 && batteryService != nullptr) {
- batteryService->noteStartAudio(AID_AUDIOSERVER);
+ if (mAudioRefCounts[uid] == 0 && batteryService != nullptr) {
+ batteryService->noteStartAudio(uid);
}
- mAudioRefCount++;
+ mAudioRefCounts[uid]++;
}
-void BatteryNotifier::noteStopAudio() {
+void BatteryNotifier::noteStopAudio(int uid) {
Mutex::Autolock _l(mLock);
- if (mAudioRefCount == 0) {
- ALOGW("%s: audio refcount is broken.", __FUNCTION__);
+ if (mAudioRefCounts.find(uid) == mAudioRefCounts.end()) {
+ ALOGW("%s: audio refcount is broken for uid(%d).", __FUNCTION__, (int)uid);
return;
}
sp<IBatteryStats> batteryService = getBatteryService_l();
- mAudioRefCount--;
- if (mAudioRefCount == 0 && batteryService != nullptr) {
- batteryService->noteStopAudio(AID_AUDIOSERVER);
+ mAudioRefCounts[uid]--;
+ if (mAudioRefCounts[uid] == 0) {
+ if (batteryService != nullptr) {
+ batteryService->noteStopAudio(uid);
+ }
+ mAudioRefCounts.erase(uid);
}
}
void BatteryNotifier::noteResetAudio() {
Mutex::Autolock _l(mLock);
sp<IBatteryStats> batteryService = getBatteryService_l();
- mAudioRefCount = 0;
+ mAudioRefCounts.clear();
if (batteryService != nullptr) {
batteryService->noteResetAudio();
}
@@ -176,7 +182,7 @@
Mutex::Autolock _l(mLock);
mBatteryStatService.clear();
mDeathNotifier.clear();
- // Do not reset mVideoRefCount and mAudioRefCount here. The ref
+ // Do not reset mVideoRefCounts and mAudioRefCounts here. The ref
// counting is independent of the battery service availability.
// We need this if battery service becomes available after media
// started.
@@ -205,11 +211,13 @@
// Notify start now if mediaserver or audioserver is already started.
// 1) mediaserver and audioserver is started before batterystats service
// 2) batterystats server may have crashed.
- if (mVideoRefCount > 0) {
- mBatteryStatService->noteStartVideo(AID_MEDIA);
+ std::map<int, int>::iterator it = mVideoRefCounts.begin();
+ for (; it != mVideoRefCounts.end(); ++it) {
+ mBatteryStatService->noteStartVideo(it->first);
}
- if (mAudioRefCount > 0) {
- mBatteryStatService->noteStartAudio(AID_AUDIOSERVER);
+ it = mAudioRefCounts.begin();
+ for (; it != mAudioRefCounts.end(); ++it) {
+ mBatteryStatService->noteStartAudio(it->first);
}
// TODO: Notify for camera and flashlight state as well?
}
diff --git a/media/libstagefright/ProcessInfo.cpp b/media/utils/ProcessInfo.cpp
similarity index 100%
rename from media/libstagefright/ProcessInfo.cpp
rename to media/utils/ProcessInfo.cpp
diff --git a/media/utils/include/mediautils/BatteryNotifier.h b/media/utils/include/mediautils/BatteryNotifier.h
index 49048042..2ba4c76 100644
--- a/media/utils/include/mediautils/BatteryNotifier.h
+++ b/media/utils/include/mediautils/BatteryNotifier.h
@@ -37,11 +37,11 @@
public:
~BatteryNotifier();
- void noteStartVideo();
- void noteStopVideo();
+ void noteStartVideo(int uid);
+ void noteStopVideo(int uid);
void noteResetVideo();
- void noteStartAudio();
- void noteStopAudio();
+ void noteStartAudio(int uid);
+ void noteStopAudio(int uid);
void noteResetAudio();
void noteFlashlightOn(const String8& id, int uid);
void noteFlashlightOff(const String8& id, int uid);
@@ -58,8 +58,8 @@
};
Mutex mLock;
- int mVideoRefCount;
- int mAudioRefCount;
+ std::map<int, int> mVideoRefCounts;
+ std::map<int, int> mAudioRefCounts;
std::map<std::pair<String8, int>, bool> mFlashlightState;
std::map<std::pair<String8, int>, bool> mCameraState;
sp<IBatteryStats> mBatteryStatService;
diff --git a/radio/IRadio.cpp b/radio/IRadio.cpp
index 0881a91..ebf3859 100644
--- a/radio/IRadio.cpp
+++ b/radio/IRadio.cpp
@@ -112,7 +112,7 @@
if (status == NO_ERROR) {
status = (status_t)reply.readInt32();
if (status == NO_ERROR) {
- int muteread = reply.readInt32();
+ int32_t muteread = reply.readInt32();
*mute = muteread != 0;
}
}
@@ -145,12 +145,12 @@
return status;
}
- virtual status_t tune(unsigned int channel, unsigned int subChannel)
+ virtual status_t tune(uint32_t channel, uint32_t subChannel)
{
Parcel data, reply;
data.writeInterfaceToken(IRadio::getInterfaceDescriptor());
- data.writeInt32(channel);
- data.writeInt32(subChannel);
+ data.writeUint32(channel);
+ data.writeUint32(subChannel);
status_t status = remote()->transact(TUNE, data, &reply);
if (status == NO_ERROR) {
status = (status_t)reply.readInt32();
@@ -177,27 +177,29 @@
}
radio_metadata_t *metadata = info->metadata;
data.writeInterfaceToken(IRadio::getInterfaceDescriptor());
+ if (metadata != NULL) {
+ data.writeUint32(1);
+ } else {
+ data.writeUint32(0);
+ }
status_t status = remote()->transact(GET_PROGRAM_INFORMATION, data, &reply);
if (status == NO_ERROR) {
status = (status_t)reply.readInt32();
if (status == NO_ERROR) {
reply.read(info, sizeof(struct radio_program_info));
+ // restore local metadata pointer
info->metadata = metadata;
- if (metadata == NULL) {
- return status;
+
+ uint32_t metatataSize = reply.readUint32();
+ if ((metadata != NULL) && (metatataSize != 0)) {
+ radio_metadata_t *newMetadata = (radio_metadata_t *)malloc(metatataSize);
+ if (newMetadata == NULL) {
+ return NO_MEMORY;
+ }
+ reply.read(newMetadata, metatataSize);
+ status = radio_metadata_add_metadata(&info->metadata, newMetadata);
+ free(newMetadata);
}
- size_t size = (size_t)reply.readInt32();
- if (size == 0) {
- return status;
- }
- metadata =
- (radio_metadata_t *)calloc(size / sizeof(unsigned int), sizeof(unsigned int));
- if (metadata == NULL) {
- return NO_MEMORY;
- }
- reply.read(metadata, size);
- status = radio_metadata_add_metadata(&info->metadata, metadata);
- free(metadata);
}
}
return status;
@@ -288,8 +290,8 @@
}
case TUNE: {
CHECK_INTERFACE(IRadio, data, reply);
- unsigned int channel = (unsigned int)data.readInt32();
- unsigned int subChannel = (unsigned int)data.readInt32();
+ uint32_t channel = data.readUint32();
+ uint32_t subChannel = data.readUint32();
status_t status = tune(channel, subChannel);
reply->writeInt32(status);
return NO_ERROR;
@@ -303,22 +305,27 @@
case GET_PROGRAM_INFORMATION: {
CHECK_INTERFACE(IRadio, data, reply);
struct radio_program_info info;
-
- status_t status = radio_metadata_allocate(&info.metadata, 0, 0);
- if (status != NO_ERROR) {
- return status;
+ status_t status;
+ // query metadata only if requested by remote side
+ if (data.readUint32() == 1) {
+ status = radio_metadata_allocate(&info.metadata, 0, 0);
+ if (status != NO_ERROR) {
+ return status;
+ }
+ } else {
+ info.metadata = NULL;
}
status = getProgramInformation(&info);
+
reply->writeInt32(status);
if (status == NO_ERROR) {
reply->write(&info, sizeof(struct radio_program_info));
- int count = radio_metadata_get_count(info.metadata);
- if (count > 0) {
+ if ((info.metadata != NULL) && (radio_metadata_get_count(info.metadata) > 0)) {
size_t size = radio_metadata_get_size(info.metadata);
- reply->writeInt32(size);
+ reply->writeUint32((uint32_t)size);
reply->write(info.metadata, size);
} else {
- reply->writeInt32(0);
+ reply->writeUint32(0);
}
}
radio_metadata_deallocate(info.metadata);
diff --git a/radio/IRadioService.cpp b/radio/IRadioService.cpp
index be7d21e..72e3a61 100644
--- a/radio/IRadioService.cpp
+++ b/radio/IRadioService.cpp
@@ -16,8 +16,7 @@
*/
#define LOG_TAG "BpRadioService"
-//
-#define LOG_NDEBUG 0
+//#define LOG_NDEBUG 0
#include <utils/Log.h>
#include <utils/Errors.h>
@@ -58,12 +57,12 @@
}
Parcel data, reply;
data.writeInterfaceToken(IRadioService::getInterfaceDescriptor());
- unsigned int numModulesReq = (properties == NULL) ? 0 : *numModules;
+ uint32_t numModulesReq = (properties == NULL) ? 0 : *numModules;
data.writeInt32(numModulesReq);
status_t status = remote()->transact(LIST_MODULES, data, &reply);
if (status == NO_ERROR) {
status = (status_t)reply.readInt32();
- *numModules = (unsigned int)reply.readInt32();
+ *numModules = (uint32_t)reply.readInt32();
}
ALOGV("listModules() status %d got *numModules %d", status, *numModules);
if (status == NO_ERROR) {
@@ -120,11 +119,11 @@
switch(code) {
case LIST_MODULES: {
CHECK_INTERFACE(IRadioService, data, reply);
- unsigned int numModulesReq = data.readInt32();
+ uint32_t numModulesReq = data.readInt32();
if (numModulesReq > MAX_ITEMS_PER_LIST) {
numModulesReq = MAX_ITEMS_PER_LIST;
}
- unsigned int numModules = numModulesReq;
+ uint32_t numModules = numModulesReq;
struct radio_properties *properties =
(struct radio_properties *)calloc(numModulesReq,
sizeof(struct radio_properties));
diff --git a/radio/Radio.cpp b/radio/Radio.cpp
index 3c04fb0..fa39589 100644
--- a/radio/Radio.cpp
+++ b/radio/Radio.cpp
@@ -240,20 +240,31 @@
return;
}
+ // The event layout in shared memory is:
+ // sizeof(struct radio_event) bytes : the event itself
+ // 4 bytes : metadata size or 0
+ // N bytes : metadata if present
struct radio_event *event = (struct radio_event *)eventMemory->pointer();
+ uint32_t metadataOffset = sizeof(struct radio_event) + sizeof(uint32_t);
+ uint32_t metadataSize = *(uint32_t *)((uint8_t *)event + metadataOffset - sizeof(uint32_t));
+
// restore local metadata pointer from offset
switch (event->type) {
case RADIO_EVENT_TUNED:
case RADIO_EVENT_AF_SWITCH:
- if (event->info.metadata != NULL) {
+ if (metadataSize != 0) {
event->info.metadata =
- (radio_metadata_t *)((char *)event + (size_t)event->info.metadata);
+ (radio_metadata_t *)((uint8_t *)event + metadataOffset);
+ } else {
+ event->info.metadata = 0;
}
break;
case RADIO_EVENT_METADATA:
- if (event->metadata != NULL) {
+ if (metadataSize != 0) {
event->metadata =
- (radio_metadata_t *)((char *)event + (size_t)event->metadata);
+ (radio_metadata_t *)((uint8_t *)event + metadataOffset);
+ } else {
+ event->metadata = 0;
}
break;
default:
diff --git a/services/audioflinger/Android.mk b/services/audioflinger/Android.mk
index 5c28e46..e2a93ad 100644
--- a/services/audioflinger/Android.mk
+++ b/services/audioflinger/Android.mk
@@ -31,16 +31,17 @@
AudioMixer.cpp.arm \
BufferProviders.cpp \
PatchPanel.cpp \
- StateQueue.cpp
+ StateQueue.cpp \
+ BufLog.cpp
LOCAL_C_INCLUDES := \
$(TOPDIR)frameworks/av/services/audiopolicy \
$(TOPDIR)frameworks/av/services/medialog \
$(TOPDIR)external/sonic \
- $(call include-path-for, audio-effects) \
$(call include-path-for, audio-utils)
LOCAL_SHARED_LIBRARIES := \
+ libaudiohal \
libaudioresampler \
libaudiospdif \
libaudioutils \
@@ -52,9 +53,6 @@
libmedialogservice \
libmediautils \
libnbaio \
- libhardware \
- libhardware_legacy \
- libeffects \
libpowermanager \
libserviceutility \
libsonic \
diff --git a/services/audioflinger/AudioFlinger.cpp b/services/audioflinger/AudioFlinger.cpp
index 60093cc..4d2049e 100644
--- a/services/audioflinger/AudioFlinger.cpp
+++ b/services/audioflinger/AudioFlinger.cpp
@@ -31,6 +31,11 @@
#include <utils/Log.h>
#include <utils/Trace.h>
#include <binder/Parcel.h>
+#include <media/audiohal/DeviceHalInterface.h>
+#include <media/audiohal/DevicesFactoryHalInterface.h>
+#include <media/audiohal/EffectsFactoryHalInterface.h>
+#include <media/AudioParameter.h>
+#include <media/TypeConverter.h>
#include <memunreachable/memunreachable.h>
#include <utils/String16.h>
#include <utils/threads.h>
@@ -40,7 +45,6 @@
#include <cutils/properties.h>
#include <system/audio.h>
-#include <hardware/audio.h>
#include "AudioMixer.h"
#include "AudioFlinger.h"
@@ -48,10 +52,9 @@
#include <media/AudioResamplerPublic.h>
-#include <media/EffectsFactoryApi.h>
-#include <audio_effects/effect_visualizer.h>
-#include <audio_effects/effect_ns.h>
-#include <audio_effects/effect_aec.h>
+#include <system/audio_effects/effect_visualizer.h>
+#include <system/audio_effects/effect_ns.h>
+#include <system/audio_effects/effect_aec.h>
#include <audio_utils/primitives.h>
@@ -65,6 +68,9 @@
#include <mediautils/BatteryNotifier.h>
#include <private/android_filesystem_config.h>
+//#define BUFLOG_NDEBUG 0
+#include <BufLog.h>
+
// ----------------------------------------------------------------------------
// Note: the following macro is used for extremely verbose logging message. In
@@ -85,6 +91,7 @@
static const char kDeadlockedString[] = "AudioFlinger may be deadlocked\n";
static const char kHardwareLockedString[] = "Hardware lock is taken\n";
static const char kClientLockedString[] = "Client lock is taken\n";
+static const char kNoEffectsFactory[] = "Effects Factory is absent\n";
nsecs_t AudioFlinger::mStandbyTimeInNsecs = kDefaultStandbyTimeInNsecs;
@@ -107,67 +114,10 @@
// ----------------------------------------------------------------------------
-const char *formatToString(audio_format_t format) {
- switch (audio_get_main_format(format)) {
- case AUDIO_FORMAT_PCM:
- switch (format) {
- case AUDIO_FORMAT_PCM_16_BIT: return "pcm16";
- case AUDIO_FORMAT_PCM_8_BIT: return "pcm8";
- case AUDIO_FORMAT_PCM_32_BIT: return "pcm32";
- case AUDIO_FORMAT_PCM_8_24_BIT: return "pcm8.24";
- case AUDIO_FORMAT_PCM_FLOAT: return "pcmfloat";
- case AUDIO_FORMAT_PCM_24_BIT_PACKED: return "pcm24";
- default:
- break;
- }
- break;
- case AUDIO_FORMAT_MP3: return "mp3";
- case AUDIO_FORMAT_AMR_NB: return "amr-nb";
- case AUDIO_FORMAT_AMR_WB: return "amr-wb";
- case AUDIO_FORMAT_AAC: return "aac";
- case AUDIO_FORMAT_HE_AAC_V1: return "he-aac-v1";
- case AUDIO_FORMAT_HE_AAC_V2: return "he-aac-v2";
- case AUDIO_FORMAT_VORBIS: return "vorbis";
- case AUDIO_FORMAT_OPUS: return "opus";
- case AUDIO_FORMAT_AC3: return "ac-3";
- case AUDIO_FORMAT_E_AC3: return "e-ac-3";
- case AUDIO_FORMAT_IEC61937: return "iec61937";
- case AUDIO_FORMAT_DTS: return "dts";
- case AUDIO_FORMAT_DTS_HD: return "dts-hd";
- case AUDIO_FORMAT_DOLBY_TRUEHD: return "dolby-truehd";
- default:
- break;
- }
- return "unknown";
-}
-
-static int load_audio_interface(const char *if_name, audio_hw_device_t **dev)
-{
- const hw_module_t *mod;
- int rc;
-
- rc = hw_get_module_by_class(AUDIO_HARDWARE_MODULE_ID, if_name, &mod);
- ALOGE_IF(rc, "%s couldn't load audio hw module %s.%s (%s)", __func__,
- AUDIO_HARDWARE_MODULE_ID, if_name, strerror(-rc));
- if (rc) {
- goto out;
- }
- rc = audio_hw_device_open(mod, dev);
- ALOGE_IF(rc, "%s couldn't open audio hw device in %s.%s (%s)", __func__,
- AUDIO_HARDWARE_MODULE_ID, if_name, strerror(-rc));
- if (rc) {
- goto out;
- }
- if ((*dev)->common.version < AUDIO_DEVICE_API_VERSION_MIN) {
- ALOGE("%s wrong audio hw device version %04x", __func__, (*dev)->common.version);
- rc = BAD_VALUE;
- goto out;
- }
- return 0;
-
-out:
- *dev = NULL;
- return rc;
+std::string formatToString(audio_format_t format) {
+ std::string result;
+ FormatConverter::toString(format, result);
+ return result;
}
// ----------------------------------------------------------------------------
@@ -205,6 +155,9 @@
// in bad state, reset the state upon service start.
BatteryNotifier::getInstance().noteResetAudio();
+ mDevicesFactoryHal = DevicesFactoryHalInterface::create();
+ mEffectsFactoryHal = EffectsFactoryHalInterface::create();
+
#ifdef TEE_SINK
char value[PROPERTY_VALUE_MAX];
(void) property_get("ro.debuggable", value, "0");
@@ -263,7 +216,6 @@
for (size_t i = 0; i < mAudioHwDevs.size(); i++) {
// no mHardwareLock needed, as there are no other references to this
- audio_hw_device_close(mAudioHwDevs.valueAt(i)->hwDevice());
delete mAudioHwDevs.valueAt(i);
}
@@ -302,10 +254,12 @@
// then try to find a module supporting the requested device.
for (size_t i = 0; i < mAudioHwDevs.size(); i++) {
AudioHwDevice *audioHwDevice = mAudioHwDevs.valueAt(i);
- audio_hw_device_t *dev = audioHwDevice->hwDevice();
- if ((dev->get_supported_devices != NULL) &&
- (dev->get_supported_devices(dev) & devices) == devices)
+ sp<DeviceHalInterface> dev = audioHwDevice->hwDevice();
+ uint32_t supportedDevices;
+ if (dev->getSupportedDevices(&supportedDevices) == OK &&
+ (supportedDevices & devices) == devices) {
return audioHwDevice;
+ }
}
} else {
// check a match for the requested module handle
@@ -419,7 +373,12 @@
write(fd, result.string(), result.size());
}
- EffectDumpEffects(fd);
+ if (mEffectsFactoryHal != 0) {
+ mEffectsFactoryHal->dumpEffects(fd);
+ } else {
+ String8 result(kNoEffectsFactory);
+ write(fd, result.string(), result.size());
+ }
dumpClients(fd, args);
if (clientLocked) {
@@ -447,8 +406,8 @@
}
// dump all hardware devs
for (size_t i = 0; i < mAudioHwDevs.size(); i++) {
- audio_hw_device_t *dev = mAudioHwDevs.valueAt(i)->hwDevice();
- dev->dump(dev, fd);
+ sp<DeviceHalInterface> dev = mAudioHwDevs.valueAt(i)->hwDevice();
+ dev->dump(fd);
}
#ifdef TEE_SINK
@@ -458,6 +417,8 @@
}
#endif
+ BUFLOG_RESET;
+
if (locked) {
mLock.unlock();
}
@@ -810,7 +771,7 @@
mHardwareStatus = AUDIO_HW_SET_MASTER_VOLUME;
if (dev->canSetMasterVolume()) {
- dev->hwDevice()->set_master_volume(dev->hwDevice(), value);
+ dev->hwDevice()->setMasterVolume(value);
}
mHardwareStatus = AUDIO_HW_IDLE;
}
@@ -847,9 +808,9 @@
{ // scope for the lock
AutoMutex lock(mHardwareLock);
- audio_hw_device_t *dev = mPrimaryHardwareDev->hwDevice();
+ sp<DeviceHalInterface> dev = mPrimaryHardwareDev->hwDevice();
mHardwareStatus = AUDIO_HW_SET_MODE;
- ret = dev->set_mode(dev, mode);
+ ret = dev->setMode(mode);
mHardwareStatus = AUDIO_HW_IDLE;
}
@@ -878,8 +839,8 @@
AutoMutex lock(mHardwareLock);
mHardwareStatus = AUDIO_HW_SET_MIC_MUTE;
for (size_t i = 0; i < mAudioHwDevs.size(); i++) {
- audio_hw_device_t *dev = mAudioHwDevs.valueAt(i)->hwDevice();
- status_t result = dev->set_mic_mute(dev, state);
+ sp<DeviceHalInterface> dev = mAudioHwDevs.valueAt(i)->hwDevice();
+ status_t result = dev->setMicMute(state);
if (result != NO_ERROR) {
ret = result;
}
@@ -899,8 +860,8 @@
AutoMutex lock(mHardwareLock);
mHardwareStatus = AUDIO_HW_GET_MIC_MUTE;
for (size_t i = 0; i < mAudioHwDevs.size(); i++) {
- audio_hw_device_t *dev = mAudioHwDevs.valueAt(i)->hwDevice();
- status_t result = dev->get_mic_mute(dev, &state);
+ sp<DeviceHalInterface> dev = mAudioHwDevs.valueAt(i)->hwDevice();
+ status_t result = dev->getMicMute(&state);
if (result == NO_ERROR) {
mute = mute && state;
}
@@ -932,7 +893,7 @@
mHardwareStatus = AUDIO_HW_SET_MASTER_MUTE;
if (dev->canSetMasterMute()) {
- dev->hwDevice()->set_master_mute(dev->hwDevice(), muted);
+ dev->hwDevice()->setMasterMute(muted);
}
mHardwareStatus = AUDIO_HW_IDLE;
}
@@ -1110,8 +1071,8 @@
AutoMutex lock(mHardwareLock);
mHardwareStatus = AUDIO_HW_SET_PARAMETER;
for (size_t i = 0; i < mAudioHwDevs.size(); i++) {
- audio_hw_device_t *dev = mAudioHwDevs.valueAt(i)->hwDevice();
- status_t result = dev->set_parameters(dev, keyValuePairs.string());
+ sp<DeviceHalInterface> dev = mAudioHwDevs.valueAt(i)->hwDevice();
+ status_t result = dev->setParameters(keyValuePairs);
// return success if at least one audio device accepts the parameters as not all
// HALs are requested to support all parameters. If no audio device supports the
// requested parameters, the last error is reported.
@@ -1124,8 +1085,8 @@
// disable AEC and NS if the device is a BT SCO headset supporting those pre processings
AudioParameter param = AudioParameter(keyValuePairs);
String8 value;
- if (param.get(String8(AUDIO_PARAMETER_KEY_BT_NREC), value) == NO_ERROR) {
- bool btNrecIsOff = (value == AUDIO_PARAMETER_VALUE_OFF);
+ if (param.get(String8(AudioParameter::keyBtNrec), value) == NO_ERROR) {
+ bool btNrecIsOff = (value == AudioParameter::valueOff);
if (mBtNrecIsOff != btNrecIsOff) {
for (size_t i = 0; i < mRecordThreads.size(); i++) {
sp<RecordThread> thread = mRecordThreads.valueAt(i);
@@ -1149,7 +1110,7 @@
}
String8 screenState;
if (param.get(String8(AudioParameter::keyScreenState), screenState) == NO_ERROR) {
- bool isOff = screenState == "off";
+ bool isOff = (screenState == AudioParameter::valueOff);
if (isOff != (AudioFlinger::mScreenState & 1)) {
AudioFlinger::mScreenState = ((AudioFlinger::mScreenState & ~1) + 2) | isOff;
}
@@ -1192,16 +1153,16 @@
String8 out_s8;
for (size_t i = 0; i < mAudioHwDevs.size(); i++) {
- char *s;
+ String8 s;
+ status_t result;
{
AutoMutex lock(mHardwareLock);
mHardwareStatus = AUDIO_HW_GET_PARAMETER;
- audio_hw_device_t *dev = mAudioHwDevs.valueAt(i)->hwDevice();
- s = dev->get_parameters(dev, keys.string());
+ sp<DeviceHalInterface> dev = mAudioHwDevs.valueAt(i)->hwDevice();
+ result = dev->getParameters(keys, &s);
mHardwareStatus = AUDIO_HW_IDLE;
}
- out_s8 += String8(s ? s : "");
- free(s);
+ if (result == OK) out_s8 += s;
}
return out_s8;
}
@@ -1238,14 +1199,14 @@
proposed.channel_mask = channelMask;
proposed.format = format;
- audio_hw_device_t *dev = mPrimaryHardwareDev->hwDevice();
+ sp<DeviceHalInterface> dev = mPrimaryHardwareDev->hwDevice();
size_t frames;
for (;;) {
// Note: config is currently a const parameter for get_input_buffer_size()
// but we use a copy from proposed in case config changes from the call.
config = proposed;
- frames = dev->get_input_buffer_size(dev, &config);
- if (frames != 0) {
+ status_t result = dev->getInputBufferSize(&config, &frames);
+ if (result == OK && frames != 0) {
break; // hal success, config is the result
}
// change one parameter of the configuration each iteration to a more "common" value
@@ -1292,9 +1253,9 @@
}
AutoMutex lock(mHardwareLock);
- audio_hw_device_t *dev = mPrimaryHardwareDev->hwDevice();
+ sp<DeviceHalInterface> dev = mPrimaryHardwareDev->hwDevice();
mHardwareStatus = AUDIO_HW_SET_VOICE_VOLUME;
- ret = dev->set_voice_volume(dev, value);
+ ret = dev->setVoiceVolume(value);
mHardwareStatus = AUDIO_HW_IDLE;
return ret;
@@ -1632,16 +1593,16 @@
}
}
- audio_hw_device_t *dev;
+ sp<DeviceHalInterface> dev;
- int rc = load_audio_interface(name, &dev);
+ int rc = mDevicesFactoryHal->openDevice(name, &dev);
if (rc) {
ALOGE("loadHwModule() error %d loading module %s", rc, name);
return AUDIO_MODULE_HANDLE_NONE;
}
mHardwareStatus = AUDIO_HW_INIT;
- rc = dev->init_check(dev);
+ rc = dev->initCheck();
mHardwareStatus = AUDIO_HW_IDLE;
if (rc) {
ALOGE("loadHwModule() init check error %d for module %s", rc, name);
@@ -1659,32 +1620,26 @@
if (0 == mAudioHwDevs.size()) {
mHardwareStatus = AUDIO_HW_GET_MASTER_VOLUME;
- if (NULL != dev->get_master_volume) {
- float mv;
- if (OK == dev->get_master_volume(dev, &mv)) {
- mMasterVolume = mv;
- }
+ float mv;
+ if (OK == dev->getMasterVolume(&mv)) {
+ mMasterVolume = mv;
}
mHardwareStatus = AUDIO_HW_GET_MASTER_MUTE;
- if (NULL != dev->get_master_mute) {
- bool mm;
- if (OK == dev->get_master_mute(dev, &mm)) {
- mMasterMute = mm;
- }
+ bool mm;
+ if (OK == dev->getMasterMute(&mm)) {
+ mMasterMute = mm;
}
}
mHardwareStatus = AUDIO_HW_SET_MASTER_VOLUME;
- if ((NULL != dev->set_master_volume) &&
- (OK == dev->set_master_volume(dev, mMasterVolume))) {
+ if (OK == dev->setMasterVolume(mMasterVolume)) {
flags = static_cast<AudioHwDevice::Flags>(flags |
AudioHwDevice::AHWD_CAN_SET_MASTER_VOLUME);
}
mHardwareStatus = AUDIO_HW_SET_MASTER_MUTE;
- if ((NULL != dev->set_master_mute) &&
- (OK == dev->set_master_mute(dev, mMasterMute))) {
+ if (OK == dev->setMasterMute(mMasterMute)) {
flags = static_cast<AudioHwDevice::Flags>(flags |
AudioHwDevice::AHWD_CAN_SET_MASTER_MUTE);
}
@@ -1695,8 +1650,7 @@
audio_module_handle_t handle = (audio_module_handle_t) nextUniqueId(AUDIO_UNIQUE_ID_USE_MODULE);
mAudioHwDevs.add(handle, new AudioHwDevice(handle, name, dev, flags));
- ALOGI("loadHwModule() Loaded %s audio interface from %s (%s) handle %d",
- name, dev->common.module->name, dev->common.module->id, handle);
+ ALOGI("loadHwModule() Loaded %s audio interface, handle %d", name, handle);
return handle;
@@ -1746,16 +1700,18 @@
return mHwAvSyncIds.valueAt(index);
}
- audio_hw_device_t *dev = mPrimaryHardwareDev->hwDevice();
+ sp<DeviceHalInterface> dev = mPrimaryHardwareDev->hwDevice();
if (dev == NULL) {
return AUDIO_HW_SYNC_INVALID;
}
- char *reply = dev->get_parameters(dev, AUDIO_PARAMETER_HW_AV_SYNC);
- AudioParameter param = AudioParameter(String8(reply));
- free(reply);
+ String8 reply;
+ AudioParameter param;
+ if (dev->getParameters(String8(AudioParameter::keyHwAvSync), &reply) == OK) {
+ param = AudioParameter(reply);
+ }
int value;
- if (param.getInt(String8(AUDIO_PARAMETER_HW_AV_SYNC), value) != NO_ERROR) {
+ if (param.getInt(String8(AudioParameter::keyHwAvSync), value) != NO_ERROR) {
ALOGW("getAudioHwSyncForSession error getting sync for session %d", sessionId);
return AUDIO_HW_SYNC_INVALID;
}
@@ -1777,7 +1733,7 @@
uint32_t sessions = thread->hasAudioSession(sessionId);
if (sessions & ThreadBase::TRACK_SESSION) {
AudioParameter param = AudioParameter();
- param.addInt(String8(AUDIO_PARAMETER_STREAM_HW_AV_SYNC), value);
+ param.addInt(String8(AudioParameter::keyStreamHwAvSync), value);
thread->setParameters(param.toString());
break;
}
@@ -1815,7 +1771,7 @@
audio_hw_sync_t syncId = mHwAvSyncIds.valueAt(index);
ALOGV("setAudioHwSyncForSession_l found ID %d for session %d", syncId, sessionId);
AudioParameter param = AudioParameter();
- param.addInt(String8(AUDIO_PARAMETER_STREAM_HW_AV_SYNC), syncId);
+ param.addInt(String8(AudioParameter::keyStreamHwAvSync), syncId);
thread->setParameters(param.toString());
}
}
@@ -1937,7 +1893,7 @@
AutoMutex lock(mHardwareLock);
mHardwareStatus = AUDIO_HW_SET_MODE;
- mPrimaryHardwareDev->hwDevice()->set_mode(mPrimaryHardwareDev->hwDevice(), mMode);
+ mPrimaryHardwareDev->hwDevice()->setMode(mMode);
mHardwareStatus = AUDIO_HW_IDLE;
}
return NO_ERROR;
@@ -2033,7 +1989,6 @@
AudioStreamOut *out = thread->clearOutput();
ALOG_ASSERT(out != NULL, "out shouldn't be NULL");
// from now on thread->mOutput is NULL
- out->hwDev()->close_output_stream(out->hwDev(), out->stream);
delete out;
}
@@ -2128,13 +2083,13 @@
}
audio_config_t halconfig = *config;
- audio_hw_device_t *inHwHal = inHwDev->hwDevice();
- audio_stream_in_t *inStream = NULL;
- status_t status = inHwHal->open_input_stream(inHwHal, *input, devices, &halconfig,
- &inStream, flags, address.string(), source);
+ sp<DeviceHalInterface> inHwHal = inHwDev->hwDevice();
+ sp<StreamInHalInterface> inStream;
+ status_t status = inHwHal->openInputStream(
+ *input, devices, &halconfig, flags, address.string(), source, &inStream);
ALOGV("openInput_l() openInputStream returned input %p, SamplingRate %d"
", Format %#x, Channels %x, flags %#x, status %d addr %s",
- inStream,
+ inStream.get(),
halconfig.sample_rate,
halconfig.format,
halconfig.channel_mask,
@@ -2151,13 +2106,13 @@
(audio_channel_count_from_in_mask(config->channel_mask) <= FCC_8)) {
// FIXME describe the change proposed by HAL (save old values so we can log them here)
ALOGV("openInput_l() reopening with proposed sampling rate and channel mask");
- inStream = NULL;
- status = inHwHal->open_input_stream(inHwHal, *input, devices, &halconfig,
- &inStream, flags, address.string(), source);
+ inStream.clear();
+ status = inHwHal->openInputStream(
+ *input, devices, &halconfig, flags, address.string(), source, &inStream);
// FIXME log this new status; HAL should not propose any further changes
}
- if (status == NO_ERROR && inStream != NULL) {
+ if (status == NO_ERROR && inStream != 0) {
#ifdef TEE_SINK
// Try to re-use most recently used Pipe to archive a copy of input for dumpsys,
@@ -2304,7 +2259,6 @@
AudioStreamIn *in = thread->clearInput();
ALOG_ASSERT(in != NULL, "in shouldn't be NULL");
// from now on thread->mInput is NULL
- in->hwDev()->close_input_stream(in->hwDev(), in->stream);
delete in;
}
@@ -2596,24 +2550,39 @@
// Effect management
// ----------------------------------------------------------------------------
+sp<EffectsFactoryHalInterface> AudioFlinger::getEffectsFactory() {
+ return mEffectsFactoryHal;
+}
status_t AudioFlinger::queryNumberEffects(uint32_t *numEffects) const
{
Mutex::Autolock _l(mLock);
- return EffectQueryNumberEffects(numEffects);
+ if (mEffectsFactoryHal.get()) {
+ return mEffectsFactoryHal->queryNumberEffects(numEffects);
+ } else {
+ return -ENODEV;
+ }
}
status_t AudioFlinger::queryEffect(uint32_t index, effect_descriptor_t *descriptor) const
{
Mutex::Autolock _l(mLock);
- return EffectQueryEffect(index, descriptor);
+ if (mEffectsFactoryHal.get()) {
+ return mEffectsFactoryHal->getDescriptor(index, descriptor);
+ } else {
+ return -ENODEV;
+ }
}
status_t AudioFlinger::getEffectDescriptor(const effect_uuid_t *pUuid,
effect_descriptor_t *descriptor) const
{
Mutex::Autolock _l(mLock);
- return EffectGetDescriptor(pUuid, descriptor);
+ if (mEffectsFactoryHal.get()) {
+ return mEffectsFactoryHal->getDescriptor(pUuid, descriptor);
+ } else {
+ return -ENODEV;
+ }
}
@@ -2633,8 +2602,8 @@
effect_descriptor_t desc;
pid_t pid = IPCThreadState::self()->getCallingPid();
- ALOGV("createEffect pid %d, effectClient %p, priority %d, sessionId %d, io %d",
- pid, effectClient.get(), priority, sessionId, io);
+ ALOGV("createEffect pid %d, effectClient %p, priority %d, sessionId %d, io %d, factory %p",
+ pid, effectClient.get(), priority, sessionId, io, mEffectsFactoryHal.get());
if (pDesc == NULL) {
lStatus = BAD_VALUE;
@@ -2654,10 +2623,15 @@
goto Exit;
}
+ if (mEffectsFactoryHal == 0) {
+ lStatus = NO_INIT;
+ goto Exit;
+ }
+
{
- if (!EffectIsNullUuid(&pDesc->uuid)) {
+ if (!EffectsFactoryHalInterface::isNullUuid(&pDesc->uuid)) {
// if uuid is specified, request effect descriptor
- lStatus = EffectGetDescriptor(&pDesc->uuid, &desc);
+ lStatus = mEffectsFactoryHal->getDescriptor(&pDesc->uuid, &desc);
if (lStatus < 0) {
ALOGW("createEffect() error %d from EffectGetDescriptor", lStatus);
goto Exit;
@@ -2665,7 +2639,7 @@
} else {
// if uuid is not specified, look for an available implementation
// of the required type in effect factory
- if (EffectIsNullUuid(&pDesc->type)) {
+ if (EffectsFactoryHalInterface::isNullUuid(&pDesc->type)) {
ALOGW("createEffect() no effect type");
lStatus = BAD_VALUE;
goto Exit;
@@ -2675,13 +2649,13 @@
d.flags = 0; // prevent compiler warning
bool found = false;
- lStatus = EffectQueryNumberEffects(&numEffects);
+ lStatus = mEffectsFactoryHal->queryNumberEffects(&numEffects);
if (lStatus < 0) {
ALOGW("createEffect() error %d from EffectQueryNumberEffects", lStatus);
goto Exit;
}
for (uint32_t i = 0; i < numEffects; i++) {
- lStatus = EffectQueryEffect(i, &desc);
+ lStatus = mEffectsFactoryHal->getDescriptor(i, &desc);
if (lStatus < 0) {
ALOGW("createEffect() error %d from EffectQueryEffect", lStatus);
continue;
diff --git a/services/audioflinger/AudioFlinger.h b/services/audioflinger/AudioFlinger.h
index c4b89f8..e9c0f93 100644
--- a/services/audioflinger/AudioFlinger.h
+++ b/services/audioflinger/AudioFlinger.h
@@ -43,9 +43,9 @@
#include <binder/MemoryDealer.h>
#include <system/audio.h>
-#include <hardware/audio.h>
-#include <hardware/audio_policy.h>
+#include <system/audio_policy.h>
+#include <media/audiohal/StreamHalInterface.h>
#include <media/AudioBufferProvider.h>
#include <media/ExtendedAudioBufferProvider.h>
@@ -71,6 +71,9 @@
class AudioMixer;
class AudioBuffer;
class AudioResampler;
+class DeviceHalInterface;
+class DevicesFactoryHalInterface;
+class EffectsFactoryHalInterface;
class FastMixer;
class PassthruBufferProvider;
class ServerProxy;
@@ -271,6 +274,7 @@
sp<NBLog::Writer> newWriter_l(size_t size, const char *name);
void unregisterWriter(const sp<NBLog::Writer>& writer);
+ sp<EffectsFactoryHalInterface> getEffectsFactory();
private:
static const size_t kLogMemorySize = 40 * 1024;
sp<MemoryDealer> mLogMemoryDealer; // == 0 when NBLog is disabled
@@ -611,12 +615,12 @@
struct AudioStreamIn {
AudioHwDevice* const audioHwDev;
- audio_stream_in_t* const stream;
+ sp<StreamInHalInterface> stream;
audio_input_flags_t flags;
- audio_hw_device_t* hwDev() const { return audioHwDev->hwDevice(); }
+ sp<DeviceHalInterface> hwDev() const { return audioHwDev->hwDevice(); }
- AudioStreamIn(AudioHwDevice *dev, audio_stream_in_t *in, audio_input_flags_t flags) :
+ AudioStreamIn(AudioHwDevice *dev, sp<StreamInHalInterface> in, audio_input_flags_t flags) :
audioHwDev(dev), stream(in), flags(flags) {}
};
@@ -645,6 +649,8 @@
AudioHwDevice* mPrimaryHardwareDev; // mAudioHwDevs[0] or NULL
DefaultKeyedVector<audio_module_handle_t, AudioHwDevice*> mAudioHwDevs;
+ sp<DevicesFactoryHalInterface> mDevicesFactoryHal;
+
// for dump, indicates which hardware operation is currently in progress (but not stream ops)
enum hardware_call_state {
AUDIO_HW_IDLE = 0, // no operation in progress
@@ -760,16 +766,17 @@
nsecs_t mGlobalEffectEnableTime; // when a global effect was last enabled
sp<PatchPanel> mPatchPanel;
+ sp<EffectsFactoryHalInterface> mEffectsFactoryHal;
bool mSystemReady;
};
#undef INCLUDING_FROM_AUDIOFLINGER_H
-const char *formatToString(audio_format_t format);
-String8 inputFlagsToString(audio_input_flags_t flags);
-String8 outputFlagsToString(audio_output_flags_t flags);
-String8 devicesToString(audio_devices_t devices);
+std::string formatToString(audio_format_t format);
+std::string inputFlagsToString(audio_input_flags_t flags);
+std::string outputFlagsToString(audio_output_flags_t flags);
+std::string devicesToString(audio_devices_t devices);
const char *sourceToString(audio_source_t source);
// ----------------------------------------------------------------------------
diff --git a/services/audioflinger/AudioHwDevice.cpp b/services/audioflinger/AudioHwDevice.cpp
index 7494930..b109d06 100644
--- a/services/audioflinger/AudioHwDevice.cpp
+++ b/services/audioflinger/AudioHwDevice.cpp
@@ -18,7 +18,7 @@
#define LOG_TAG "AudioHwDevice"
//#define LOG_NDEBUG 0
-#include <hardware/audio.h>
+#include <system/audio.h>
#include <utils/Log.h>
#include <audio_utils/spdif/SPDIFEncoder.h>
@@ -93,5 +93,10 @@
return status;
}
+bool AudioHwDevice::supportsAudioPatches() const {
+ bool result;
+ return mHwDevice->supportsAudioPatches(&result) == OK ? result : false;
+}
+
}; // namespace android
diff --git a/services/audioflinger/AudioHwDevice.h b/services/audioflinger/AudioHwDevice.h
index b9f65c1..eb826c6 100644
--- a/services/audioflinger/AudioHwDevice.h
+++ b/services/audioflinger/AudioHwDevice.h
@@ -22,11 +22,10 @@
#include <stdlib.h>
#include <sys/types.h>
-#include <hardware/audio.h>
+#include <media/audiohal/DeviceHalInterface.h>
#include <utils/Errors.h>
#include <system/audio.h>
-
namespace android {
class AudioStreamOut;
@@ -40,7 +39,7 @@
AudioHwDevice(audio_module_handle_t handle,
const char *moduleName,
- audio_hw_device_t *hwDevice,
+ sp<DeviceHalInterface> hwDevice,
Flags flags)
: mHandle(handle)
, mModuleName(strdup(moduleName))
@@ -58,8 +57,7 @@
audio_module_handle_t handle() const { return mHandle; }
const char *moduleName() const { return mModuleName; }
- audio_hw_device_t *hwDevice() const { return mHwDevice; }
- uint32_t version() const { return mHwDevice->common.version; }
+ sp<DeviceHalInterface> hwDevice() const { return mHwDevice; }
/** This method creates and opens the audio hardware output stream.
* The "address" parameter qualifies the "devices" audio device type if needed.
@@ -76,10 +74,12 @@
struct audio_config *config,
const char *address);
+ bool supportsAudioPatches() const;
+
private:
const audio_module_handle_t mHandle;
const char * const mModuleName;
- audio_hw_device_t * const mHwDevice;
+ sp<DeviceHalInterface> mHwDevice;
const Flags mFlags;
};
diff --git a/services/audioflinger/AudioMixer.h b/services/audioflinger/AudioMixer.h
index e788ac3..540caac 100644
--- a/services/audioflinger/AudioMixer.h
+++ b/services/audioflinger/AudioMixer.h
@@ -21,7 +21,6 @@
#include <stdint.h>
#include <sys/types.h>
-#include <hardware/audio_effect.h>
#include <media/AudioBufferProvider.h>
#include <media/AudioResamplerPublic.h>
#include <media/nbaio/NBLog.h>
diff --git a/services/audioflinger/AudioResamplerDyn.cpp b/services/audioflinger/AudioResamplerDyn.cpp
index b7ca5d9..21914b9 100644
--- a/services/audioflinger/AudioResamplerDyn.cpp
+++ b/services/audioflinger/AudioResamplerDyn.cpp
@@ -149,6 +149,15 @@
}
template<typename TC, typename TI, typename TO>
+void AudioResamplerDyn<TC, TI, TO>::InBuffer::reset()
+{
+ // clear resampler state
+ if (mState != nullptr) {
+ memset(mState, 0, mStateCount * sizeof(TI));
+ }
+}
+
+template<typename TC, typename TI, typename TO>
void AudioResamplerDyn<TC, TI, TO>::Constants::set(
int L, int halfNumCoefs, int inSampleRate, int outSampleRate)
{
@@ -528,6 +537,9 @@
mBuffer.frameCount = inFrameCount;
provider->getNextBuffer(&mBuffer);
if (mBuffer.raw == NULL) {
+ // We are either at the end of playback or in an underrun situation.
+ // Reset buffer to prevent pop noise at the next buffer.
+ mInBuffer.reset();
goto resample_exit;
}
inFrameCount -= mBuffer.frameCount;
diff --git a/services/audioflinger/AudioResamplerDyn.h b/services/audioflinger/AudioResamplerDyn.h
index 3b1c381..a5ea821 100644
--- a/services/audioflinger/AudioResamplerDyn.h
+++ b/services/audioflinger/AudioResamplerDyn.h
@@ -96,6 +96,8 @@
inline void readAdvance(TI*& impulse, const int halfNumCoefs,
const TI* const in, const size_t inputIndex);
+ void reset();
+
private:
// tuning parameter guidelines: 2 <= multiple <= 8
static const int kStateSizeMultipleOfFilterLength = 4;
diff --git a/services/audioflinger/AudioStreamOut.cpp b/services/audioflinger/AudioStreamOut.cpp
index 6026bbb..1d4b3fe 100644
--- a/services/audioflinger/AudioStreamOut.cpp
+++ b/services/audioflinger/AudioStreamOut.cpp
@@ -18,7 +18,9 @@
#define LOG_TAG "AudioFlinger"
//#define LOG_NDEBUG 0
-#include <hardware/audio.h>
+#include <media/audiohal/DeviceHalInterface.h>
+#include <media/audiohal/StreamHalInterface.h>
+#include <system/audio.h>
#include <utils/Log.h>
#include "AudioHwDevice.h"
@@ -40,19 +42,23 @@
{
}
-audio_hw_device_t *AudioStreamOut::hwDev() const
+AudioStreamOut::~AudioStreamOut()
+{
+}
+
+sp<DeviceHalInterface> AudioStreamOut::hwDev() const
{
return audioHwDev->hwDevice();
}
status_t AudioStreamOut::getRenderPosition(uint64_t *frames)
{
- if (stream == NULL) {
+ if (stream == 0) {
return NO_INIT;
}
uint32_t halPosition = 0;
- status_t status = stream->get_render_position(stream, &halPosition);
+ status_t status = stream->getRenderPosition(&halPosition);
if (status != NO_ERROR) {
return status;
}
@@ -84,12 +90,12 @@
status_t AudioStreamOut::getPresentationPosition(uint64_t *frames, struct timespec *timestamp)
{
- if (stream == NULL) {
+ if (stream == 0) {
return NO_INIT;
}
uint64_t halPosition = 0;
- status_t status = stream->get_presentation_position(stream, &halPosition, timestamp);
+ status_t status = stream->getPresentationPosition(&halPosition, timestamp);
if (status != NO_ERROR) {
return status;
}
@@ -115,24 +121,23 @@
struct audio_config *config,
const char *address)
{
- audio_stream_out_t *outStream;
+ sp<StreamOutHalInterface> outStream;
audio_output_flags_t customFlags = (config->format == AUDIO_FORMAT_IEC61937)
? (audio_output_flags_t)(flags | AUDIO_OUTPUT_FLAG_IEC958_NONAUDIO)
: flags;
- int status = hwDev()->open_output_stream(
- hwDev(),
+ int status = hwDev()->openOutputStream(
handle,
devices,
customFlags,
config,
- &outStream,
- address);
+ address,
+ &outStream);
ALOGV("AudioStreamOut::open(), HAL returned "
" stream %p, sampleRate %d, Format %#x, "
"channelMask %#x, status %d",
- outStream,
+ outStream.get(),
config->sample_rate,
config->format,
config->channel_mask,
@@ -144,21 +149,20 @@
struct audio_config customConfig = *config;
customConfig.format = AUDIO_FORMAT_PCM_16_BIT;
- status = hwDev()->open_output_stream(
- hwDev(),
+ status = hwDev()->openOutputStream(
handle,
devices,
customFlags,
&customConfig,
- &outStream,
- address);
+ address,
+ &outStream);
ALOGV("AudioStreamOut::open(), treat IEC61937 as PCM, status = %d", status);
}
if (status == NO_ERROR) {
stream = outStream;
mHalFormatHasProportionalFrames = audio_has_proportional_frames(config->format);
- mHalFrameSize = audio_stream_out_frame_size(stream);
+ status = stream->getFrameSize(&mHalFrameSize);
}
return status;
@@ -166,47 +170,46 @@
audio_format_t AudioStreamOut::getFormat() const
{
- return stream->common.get_format(&stream->common);
+ audio_format_t result;
+ return stream->getFormat(&result) == OK ? result : AUDIO_FORMAT_INVALID;
}
uint32_t AudioStreamOut::getSampleRate() const
{
- return stream->common.get_sample_rate(&stream->common);
+ uint32_t result;
+ return stream->getSampleRate(&result) == OK ? result : 0;
}
audio_channel_mask_t AudioStreamOut::getChannelMask() const
{
- return stream->common.get_channels(&stream->common);
+ audio_channel_mask_t result;
+ return stream->getChannelMask(&result) == OK ? result : AUDIO_CHANNEL_INVALID;
}
int AudioStreamOut::flush()
{
- ALOG_ASSERT(stream != NULL);
mRenderPosition = 0;
mFramesWritten = 0;
mFramesWrittenAtStandby = 0;
- if (stream->flush != NULL) {
- return stream->flush(stream);
- }
- return NO_ERROR;
+ status_t result = stream->flush();
+ return result != INVALID_OPERATION ? result : NO_ERROR;
}
int AudioStreamOut::standby()
{
- ALOG_ASSERT(stream != NULL);
mRenderPosition = 0;
mFramesWrittenAtStandby = mFramesWritten;
- return stream->common.standby(&stream->common);
+ return stream->standby();
}
ssize_t AudioStreamOut::write(const void *buffer, size_t numBytes)
{
- ALOG_ASSERT(stream != NULL);
- ssize_t bytesWritten = stream->write(stream, buffer, numBytes);
- if (bytesWritten > 0 && mHalFrameSize > 0) {
+ size_t bytesWritten;
+ status_t result = stream->write(buffer, numBytes, &bytesWritten);
+ if (result == OK && bytesWritten > 0 && mHalFrameSize > 0) {
mFramesWritten += bytesWritten / mHalFrameSize;
}
- return bytesWritten;
+ return result == OK ? bytesWritten : result;
}
} // namespace android
diff --git a/services/audioflinger/AudioStreamOut.h b/services/audioflinger/AudioStreamOut.h
index 768f537..b16b1af 100644
--- a/services/audioflinger/AudioStreamOut.h
+++ b/services/audioflinger/AudioStreamOut.h
@@ -23,11 +23,11 @@
#include <system/audio.h>
-#include "AudioStreamOut.h"
-
namespace android {
class AudioHwDevice;
+class DeviceHalInterface;
+class StreamOutHalInterface;
/**
* Managed access to a HAL output stream.
@@ -38,10 +38,10 @@
// For emphasis, we could also make all pointers to them be "const *",
// but that would clutter the code unnecessarily.
AudioHwDevice * const audioHwDev;
- audio_stream_out_t *stream;
+ sp<StreamOutHalInterface> stream;
const audio_output_flags_t flags;
- audio_hw_device_t *hwDev() const;
+ sp<DeviceHalInterface> hwDev() const;
AudioStreamOut(AudioHwDevice *dev, audio_output_flags_t flags);
@@ -51,7 +51,7 @@
struct audio_config *config,
const char *address);
- virtual ~AudioStreamOut() { }
+ virtual ~AudioStreamOut();
// Get the bottom 32-bits of the 64-bit render position.
status_t getRenderPosition(uint32_t *frames);
diff --git a/services/audioflinger/BufLog.cpp b/services/audioflinger/BufLog.cpp
new file mode 100644
index 0000000..9680eb5
--- /dev/null
+++ b/services/audioflinger/BufLog.cpp
@@ -0,0 +1,195 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+#include "BufLog.h"
+#define LOG_TAG "BufLog"
+//#define LOG_NDEBUG 0
+
+#include <errno.h>
+#include "log/log.h"
+#include <pthread.h>
+#include <stdio.h>
+#include <string.h>
+
+#define MIN(a, b) ((a) < (b) ? (a) : (b))
+
+// ------------------------------
+// BufLogSingleton
+// ------------------------------
+pthread_once_t onceControl = PTHREAD_ONCE_INIT;
+
+BufLog *BufLogSingleton::mInstance = NULL;
+
+void BufLogSingleton::initOnce() {
+ mInstance = new BufLog();
+ ALOGW("=====================================\n" \
+ "Warning: BUFLOG is defined in some part of your code.\n" \
+ "This will create large audio dumps in %s.\n" \
+ "=====================================\n", BUFLOG_BASE_PATH);
+}
+
+BufLog *BufLogSingleton::instance() {
+ pthread_once(&onceControl, initOnce);
+ return mInstance;
+}
+
+bool BufLogSingleton::instanceExists() {
+ return mInstance != NULL;
+}
+
+// ------------------------------
+// BufLog
+// ------------------------------
+
+BufLog::BufLog() {
+ memset(mStreams, 0, sizeof(mStreams));
+}
+
+BufLog::~BufLog() {
+ android::Mutex::Autolock autoLock(mLock);
+
+ for (unsigned int id = 0; id < BUFLOG_MAXSTREAMS; id++) {
+ BufLogStream *pBLStream = mStreams[id];
+ if (pBLStream != NULL) {
+ delete pBLStream ;
+ mStreams[id] = NULL;
+ }
+ }
+}
+
+size_t BufLog::write(int streamid, const char *tag, int format, int channels,
+ int samplingRate, size_t maxBytes, const void *buf, size_t size) {
+ unsigned int id = streamid % BUFLOG_MAXSTREAMS;
+ android::Mutex::Autolock autoLock(mLock);
+
+ BufLogStream *pBLStream = mStreams[id];
+
+ if (pBLStream == NULL) {
+ pBLStream = mStreams[id] = new BufLogStream(id, tag, format, channels,
+ samplingRate, maxBytes);
+ ALOG_ASSERT(pBLStream != NULL, "BufLogStream Failed to be created");
+ }
+
+ return pBLStream->write(buf, size);
+}
+
+void BufLog::reset() {
+ android::Mutex::Autolock autoLock(mLock);
+ ALOGV("Resetting all BufLogs");
+ int count = 0;
+
+ for (unsigned int id = 0; id < BUFLOG_MAXSTREAMS; id++) {
+ BufLogStream *pBLStream = mStreams[id];
+ if (pBLStream != NULL) {
+ delete pBLStream;
+ mStreams[id] = NULL;
+ count++;
+ }
+ }
+ ALOGV("Reset %d BufLogs", count);
+}
+
+// ------------------------------
+// BufLogStream
+// ------------------------------
+
+BufLogStream::BufLogStream(unsigned int id,
+ const char *tag,
+ unsigned int format,
+ unsigned int channels,
+ unsigned int samplingRate,
+ size_t maxBytes = 0) : mId(id), mFormat(format), mChannels(channels),
+ mSamplingRate(samplingRate), mMaxBytes(maxBytes) {
+ mByteCount = 0l;
+ mPaused = false;
+ if (tag != NULL) {
+ strncpy(mTag, tag, BUFLOGSTREAM_MAX_TAGSIZE);
+ } else {
+ mTag[0] = 0;
+ }
+ ALOGV("Creating BufLogStream id:%d tag:%s format:%d ch:%d sr:%d maxbytes:%zu", mId, mTag,
+ mFormat, mChannels, mSamplingRate, mMaxBytes);
+
+ //open file (s), info about tag, format, etc.
+ //timestamp
+ char timeStr[16]; //size 16: format %Y%m%d%H%M%S 14 chars + string null terminator
+ struct timeval tv;
+ gettimeofday(&tv, NULL);
+ struct tm tm;
+ localtime_r(&tv.tv_sec, &tm);
+ strftime(timeStr, sizeof(timeStr), "%Y%m%d%H%M%S", &tm);
+ char logPath[BUFLOG_MAX_PATH_SIZE];
+ snprintf(logPath, BUFLOG_MAX_PATH_SIZE, "%s/%s_%d_%s_%d_%d_%d.raw", BUFLOG_BASE_PATH, timeStr,
+ mId, mTag, mFormat, mChannels, mSamplingRate);
+ ALOGV("data output: %s", logPath);
+
+ mFile = fopen(logPath, "wb");
+ if (mFile != NULL) {
+ ALOGV("Success creating file at: %p", mFile);
+ } else {
+ ALOGE("Error: could not create file BufLogStream %s", strerror(errno));
+ }
+}
+
+void BufLogStream::closeStream_l() {
+ ALOGV("Closing BufLogStream id:%d tag:%s", mId, mTag);
+ if (mFile != NULL) {
+ fclose(mFile);
+ mFile = NULL;
+ }
+}
+
+BufLogStream::~BufLogStream() {
+ ALOGV("Destroying BufLogStream id:%d tag:%s", mId, mTag);
+ android::Mutex::Autolock autoLock(mLock);
+ closeStream_l();
+}
+
+size_t BufLogStream::write(const void *buf, size_t size) {
+
+ size_t bytes = 0;
+ if (!mPaused && mFile != NULL) {
+ if (size > 0 && buf != NULL) {
+ android::Mutex::Autolock autoLock(mLock);
+ if (mMaxBytes > 0) {
+ size = MIN(size, mMaxBytes - mByteCount);
+ }
+ bytes = fwrite(buf, 1, size, mFile);
+ mByteCount += bytes;
+ if (mMaxBytes > 0 && mMaxBytes == mByteCount) {
+ closeStream_l();
+ }
+ }
+ ALOGV("wrote %zu/%zu bytes to BufLogStream %d tag:%s. Total Bytes: %zu", bytes, size, mId,
+ mTag, mByteCount);
+ } else {
+ ALOGV("Warning: trying to write to %s BufLogStream id:%d tag:%s",
+ mPaused ? "paused" : "closed", mId, mTag);
+ }
+ return bytes;
+}
+
+bool BufLogStream::setPause(bool pause) {
+ bool old = mPaused;
+ mPaused = pause;
+ return old;
+}
+
+void BufLogStream::finalize() {
+ android::Mutex::Autolock autoLock(mLock);
+ closeStream_l();
+}
diff --git a/services/audioflinger/BufLog.h b/services/audioflinger/BufLog.h
new file mode 100644
index 0000000..1b402f4
--- /dev/null
+++ b/services/audioflinger/BufLog.h
@@ -0,0 +1,199 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_AUDIO_BUFLOG_H
+#define ANDROID_AUDIO_BUFLOG_H
+
+/*
+ * BUFLOG creates up to BUFLOG_MAXSTREAMS simultaneous streams [0:15] of audio buffer data
+ * and saves them to disk. The files are stored in the path specified in BUFLOG_BASE_PATH and
+ * are named following this format:
+ * YYYYMMDDHHMMSS_id_format_channels_samplingrate.raw
+ *
+ * Normally we strip BUFLOG dumps from release builds.
+ * You can modify this (for example with "#define BUFLOG_NDEBUG 0"
+ * at the top of your source file) to change that behavior.
+ *
+ * usage:
+ * - Add this to the top of the source file you want to debug:
+ * #define BUFLOG_NDEBUG 0
+ * #include "BufLog.h"
+ *
+ * - dump an audio buffer
+ * BUFLOG(buff_id, buff_tag, format, channels, sampling_rate, max_bytes, buff_pointer, buff_size);
+ *
+ * buff_id: int [0:15] buffer id. If a buffer doesn't exist, it is created the first time.
+ * buff_tag: char* string tag used on stream filename and logs
+ * format: int Audio format (audio_format_t see audio.h)
+ * channels: int Channel Count
+ * sampling_rate: int Sampling rate in Hz. e.g. 8000, 16000, 44100, 48000, etc
+ * max_bytes: int [0 or positive number]
+ * Maximum size of the file (in bytes) to be output.
+ * If the value is 0, no limit.
+ * buff_pointer: void * Pointer to audio buffer.
+ * buff_size: int Size (in bytes) of the current audio buffer to be stored.
+ *
+ *
+ * Example usage:
+ * int format = mConfig.outputCfg.format;
+ * int channels = audio_channel_count_from_out_mask(mConfig.outputCfg.channels);
+ * int samplingRate = mConfig.outputCfg.samplingRate;
+ * int frameCount = mConfig.outputCfg.buffer.frameCount;
+ * int frameSize = audio_bytes_per_sample((audio_format_t)format) * channels;
+ * int buffSize = frameCount * frameSize;
+ * long maxBytes = 10 * samplingRate * frameSize; //10 seconds max
+ * BUFLOG(11, "loudnes_enhancer_out", format, channels, samplingRate, maxBytes,
+ * mConfig.outputCfg.buffer.raw, buffSize);
+ *
+ * Other macros:
+ * BUFLOG_EXISTS returns true if there is an instance of BufLog
+ *
+ * BUFLOG_RESET If an instance of BufLog exists, it stops the capture and closes all
+ * streams.
+ * If a new call to BUFLOG(..) is done, new streams are created.
+ */
+
+#ifndef BUFLOG_NDEBUG
+#ifdef NDEBUG
+#define BUFLOG_NDEBUG 1
+#else
+#define BUFLOG_NDEBUG 0
+#endif
+#endif
+
+/*
+ * Simplified macro to send a buffer.
+ */
+#ifndef BUFLOG
+#define __BUFLOG(STREAMID, TAG, FORMAT, CHANNELS, SAMPLINGRATE, MAXBYTES, BUF, SIZE) \
+ BufLogSingleton::instance()->write(STREAMID, TAG, FORMAT, CHANNELS, SAMPLINGRATE, MAXBYTES, \
+ BUF, SIZE)
+#if BUFLOG_NDEBUG
+#define BUFLOG(STREAMID, TAG, FORMAT, CHANNELS, SAMPLINGRATE, MAXBYTES, BUF, SIZE) \
+ do { if (0) { } } while (0)
+#else
+#define BUFLOG(STREAMID, TAG, FORMAT, CHANNELS, SAMPLINGRATE, MAXBYTES, BUF, SIZE) \
+ __BUFLOG(STREAMID, TAG, FORMAT, CHANNELS, SAMPLINGRATE, MAXBYTES, BUF, SIZE)
+#endif
+#endif
+
+#ifndef BUFLOG_EXISTS
+#define BUFLOG_EXISTS BufLogSingleton::instanceExists()
+#endif
+
+#ifndef BUFLOG_RESET
+#define BUFLOG_RESET do { if (BufLogSingleton::instanceExists()) { \
+ BufLogSingleton::instance()->reset(); } } while (0)
+#endif
+
+
+#include <stdint.h>
+#include <stdio.h>
+#include <sys/types.h>
+#include <utils/Mutex.h>
+
+//BufLog configuration
+#define BUFLOGSTREAM_MAX_TAGSIZE 32
+#define BUFLOG_BASE_PATH "/data/misc/audioserver"
+#define BUFLOG_MAX_PATH_SIZE 300
+
+class BufLogStream {
+public:
+ BufLogStream(unsigned int id,
+ const char *tag,
+ unsigned int format,
+ unsigned int channels,
+ unsigned int samplingRate,
+ size_t maxBytes);
+ ~BufLogStream();
+
+ // write buffer to stream
+ // buf: pointer to buffer
+ // size: number of bytes to write
+ size_t write(const void *buf, size_t size);
+
+ // pause/resume stream
+ // pause: true = paused, false = not paused
+ // return value: previous state of stream (paused or not).
+ bool setPause(bool pause);
+
+ // will stop the stream and close any open file
+ // the stream can't be reopen. Instead, a new stream (and file) should be created.
+ void finalize();
+
+private:
+ bool mPaused;
+ const unsigned int mId;
+ char mTag[BUFLOGSTREAM_MAX_TAGSIZE + 1];
+ const unsigned int mFormat;
+ const unsigned int mChannels;
+ const unsigned int mSamplingRate;
+ const size_t mMaxBytes;
+ size_t mByteCount;
+ FILE *mFile;
+ mutable android::Mutex mLock;
+
+ void closeStream_l();
+};
+
+
+class BufLog {
+public:
+ BufLog();
+ ~BufLog();
+ BufLog(BufLog const&) {};
+
+ // streamid: int [0:BUFLOG_MAXSTREAMS-1] buffer id.
+ // If a buffer doesn't exist, it is created the first time is referenced
+ // tag: char* string tag used on stream filename and logs
+ // format: int Audio format (audio_format_t see audio.h)
+ // channels: int Channel Count
+ // samplingRate: int Sampling rate in Hz. e.g. 8000, 16000, 44100, 48000, etc
+ // maxBytes: int [0 or positive number]
+ // Maximum size of the file (in bytes) to be output.
+ // If the value is 0, no limit.
+ // size: int Size (in bytes) of the current audio buffer to be written.
+ // buf: void * Pointer to audio buffer.
+ size_t write(int streamid,
+ const char *tag,
+ int format,
+ int channels,
+ int samplingRate,
+ size_t maxBytes,
+ const void *buf,
+ size_t size);
+
+ // reset will stop and close all active streams, thus finalizing any open file.
+ // New streams will be created if write() is called again.
+ void reset();
+
+protected:
+ static const unsigned int BUFLOG_MAXSTREAMS = 16;
+ BufLogStream *mStreams[BUFLOG_MAXSTREAMS];
+ mutable android::Mutex mLock;
+};
+
+class BufLogSingleton {
+public:
+ static BufLog *instance();
+ static bool instanceExists();
+
+private:
+ static void initOnce();
+ static BufLog *mInstance;
+};
+
+#endif //ANDROID_AUDIO_BUFLOG_H
diff --git a/services/audioflinger/BufferProviders.cpp b/services/audioflinger/BufferProviders.cpp
index 7b6dfcb..ba5f7b6 100644
--- a/services/audioflinger/BufferProviders.cpp
+++ b/services/audioflinger/BufferProviders.cpp
@@ -17,11 +17,12 @@
#define LOG_TAG "BufferProvider"
//#define LOG_NDEBUG 0
-#include <audio_effects/effect_downmix.h>
#include <audio_utils/primitives.h>
#include <audio_utils/format.h>
+#include <media/audiohal/EffectHalInterface.h>
+#include <media/audiohal/EffectsFactoryHalInterface.h>
#include <media/AudioResamplerPublic.h>
-#include <media/EffectsFactoryApi.h>
+#include <system/audio_effects/effect_downmix.h>
#include <utils/Log.h>
@@ -145,13 +146,22 @@
ALOGV("DownmixerBufferProvider(%p)(%#x, %#x, %#x %u %d)",
this, inputChannelMask, outputChannelMask, format,
sampleRate, sessionId);
- if (!sIsMultichannelCapable
- || EffectCreate(&sDwnmFxDesc.uuid,
- sessionId,
- SESSION_ID_INVALID_AND_IGNORED,
- &mDownmixHandle) != 0) {
+ if (!sIsMultichannelCapable) {
+ ALOGE("DownmixerBufferProvider() error: not multichannel capable");
+ return;
+ }
+ mEffectsFactory = EffectsFactoryHalInterface::create();
+ if (mEffectsFactory == 0) {
+ ALOGE("DownmixerBufferProvider() error: could not obtain the effects factory");
+ return;
+ }
+ if (mEffectsFactory->createEffect(&sDwnmFxDesc.uuid,
+ sessionId,
+ SESSION_ID_INVALID_AND_IGNORED,
+ &mDownmixInterface) != 0) {
ALOGE("DownmixerBufferProvider() error creating downmixer effect");
- mDownmixHandle = NULL;
+ mDownmixInterface.clear();
+ mEffectsFactory.clear();
return;
}
// channel input configuration will be overridden per-track
@@ -173,28 +183,28 @@
uint32_t replySize = sizeof(int);
// Configure downmixer
- status_t status = (*mDownmixHandle)->command(mDownmixHandle,
+ status_t status = mDownmixInterface->command(
EFFECT_CMD_SET_CONFIG /*cmdCode*/, sizeof(effect_config_t) /*cmdSize*/,
&mDownmixConfig /*pCmdData*/,
&replySize, &cmdStatus /*pReplyData*/);
if (status != 0 || cmdStatus != 0) {
ALOGE("DownmixerBufferProvider() error %d cmdStatus %d while configuring downmixer",
status, cmdStatus);
- EffectRelease(mDownmixHandle);
- mDownmixHandle = NULL;
+ mDownmixInterface.clear();
+ mEffectsFactory.clear();
return;
}
// Enable downmixer
replySize = sizeof(int);
- status = (*mDownmixHandle)->command(mDownmixHandle,
+ status = mDownmixInterface->command(
EFFECT_CMD_ENABLE /*cmdCode*/, 0 /*cmdSize*/, NULL /*pCmdData*/,
&replySize, &cmdStatus /*pReplyData*/);
if (status != 0 || cmdStatus != 0) {
ALOGE("DownmixerBufferProvider() error %d cmdStatus %d while enabling downmixer",
status, cmdStatus);
- EffectRelease(mDownmixHandle);
- mDownmixHandle = NULL;
+ mDownmixInterface.clear();
+ mEffectsFactory.clear();
return;
}
@@ -211,15 +221,15 @@
param->vsize = sizeof(downmix_type_t);
memcpy(param->data + psizePadded, &downmixType, param->vsize);
replySize = sizeof(int);
- status = (*mDownmixHandle)->command(mDownmixHandle,
+ status = mDownmixInterface->command(
EFFECT_CMD_SET_PARAM /* cmdCode */, downmixParamSize /* cmdSize */,
param /*pCmdData*/, &replySize, &cmdStatus /*pReplyData*/);
free(param);
if (status != 0 || cmdStatus != 0) {
ALOGE("DownmixerBufferProvider() error %d cmdStatus %d while setting downmix type",
status, cmdStatus);
- EffectRelease(mDownmixHandle);
- mDownmixHandle = NULL;
+ mDownmixInterface.clear();
+ mEffectsFactory.clear();
return;
}
ALOGV("DownmixerBufferProvider() downmix type set to %d", (int) downmixType);
@@ -228,8 +238,6 @@
DownmixerBufferProvider::~DownmixerBufferProvider()
{
ALOGV("~DownmixerBufferProvider (%p)", this);
- EffectRelease(mDownmixHandle);
- mDownmixHandle = NULL;
}
void DownmixerBufferProvider::copyFrames(void *dst, const void *src, size_t frames)
@@ -239,7 +247,7 @@
mDownmixConfig.outputCfg.buffer.frameCount = frames;
mDownmixConfig.outputCfg.buffer.raw = dst;
// may be in-place if src == dst.
- status_t res = (*mDownmixHandle)->process(mDownmixHandle,
+ status_t res = mDownmixInterface->process(
&mDownmixConfig.inputCfg.buffer, &mDownmixConfig.outputCfg.buffer);
ALOGE_IF(res != OK, "DownmixBufferProvider error %d", res);
}
@@ -248,8 +256,13 @@
/*static*/ status_t DownmixerBufferProvider::init()
{
// find multichannel downmix effect if we have to play multichannel content
+ sp<EffectsFactoryHalInterface> effectsFactory = EffectsFactoryHalInterface::create();
+ if (effectsFactory == 0) {
+ ALOGE("AudioMixer() error: could not obtain the effects factory");
+ return NO_INIT;
+ }
uint32_t numEffects = 0;
- int ret = EffectQueryNumberEffects(&numEffects);
+ int ret = effectsFactory->queryNumberEffects(&numEffects);
if (ret != 0) {
ALOGE("AudioMixer() error %d querying number of effects", ret);
return NO_INIT;
@@ -257,7 +270,7 @@
ALOGV("EffectQueryNumberEffects() numEffects=%d", numEffects);
for (uint32_t i = 0 ; i < numEffects ; i++) {
- if (EffectQueryEffect(i, &sDwnmFxDesc) == 0) {
+ if (effectsFactory->getDescriptor(i, &sDwnmFxDesc) == 0) {
ALOGV("effect %d is called %s", i, sDwnmFxDesc.name);
if (memcmp(&sDwnmFxDesc.type, EFFECT_UIID_DOWNMIX, sizeof(effect_uuid_t)) == 0) {
ALOGI("found effect \"%s\" from %s",
diff --git a/services/audioflinger/BufferProviders.h b/services/audioflinger/BufferProviders.h
index abd43c6..2a857fe 100644
--- a/services/audioflinger/BufferProviders.h
+++ b/services/audioflinger/BufferProviders.h
@@ -20,13 +20,17 @@
#include <stdint.h>
#include <sys/types.h>
-#include <hardware/audio_effect.h>
#include <media/AudioBufferProvider.h>
#include <system/audio.h>
+#include <system/audio_effect.h>
#include <sonic.h>
+#include <utils/StrongPointer.h>
namespace android {
+class EffectHalInterface;
+class EffectsFactoryHalInterface;
+
// ----------------------------------------------------------------------------
class PassthruBufferProvider : public AudioBufferProvider {
@@ -97,12 +101,13 @@
//Overrides
virtual void copyFrames(void *dst, const void *src, size_t frames);
- bool isValid() const { return mDownmixHandle != NULL; }
+ bool isValid() const { return mDownmixInterface.get() != NULL; }
static status_t init();
static bool isMultichannelCapable() { return sIsMultichannelCapable; }
protected:
- effect_handle_t mDownmixHandle;
+ sp<EffectsFactoryHalInterface> mEffectsFactory;
+ sp<EffectHalInterface> mDownmixInterface;
effect_config_t mDownmixConfig;
// effect descriptor for the downmixer used by the mixer
diff --git a/services/audioflinger/Effects.cpp b/services/audioflinger/Effects.cpp
index 3675998..3a04651 100644
--- a/services/audioflinger/Effects.cpp
+++ b/services/audioflinger/Effects.cpp
@@ -21,10 +21,11 @@
#include "Configuration.h"
#include <utils/Log.h>
-#include <audio_effects/effect_visualizer.h>
#include <audio_utils/primitives.h>
#include <private/media/AudioEffectShared.h>
-#include <media/EffectsFactoryApi.h>
+#include <media/audiohal/EffectHalInterface.h>
+#include <media/audiohal/EffectsFactoryHalInterface.h>
+#include <system/audio_effects/effect_visualizer.h>
#include "AudioFlinger.h"
#include "ServiceUtilities.h"
@@ -64,7 +65,6 @@
mThread(thread), mChain(chain), mId(id), mSessionId(sessionId),
mDescriptor(*desc),
// mConfig is set by configure() and not used before then
- mEffectInterface(NULL),
mStatus(NO_INIT), mState(IDLE),
// mMaxDisableWaitCnt is set by configure() and not used before then
// mDisableWaitCnt is set by process() and updateState() and not used before then
@@ -75,7 +75,15 @@
int lStatus;
// create effect engine from effect factory
- mStatus = EffectCreate(&desc->uuid, sessionId, thread->id(), &mEffectInterface);
+ mStatus = -ENODEV;
+ sp<AudioFlinger> audioFlinger = mAudioFlinger.promote();
+ if (audioFlinger != 0) {
+ sp<EffectsFactoryHalInterface> effectsFactory = audioFlinger->getEffectsFactory();
+ if (effectsFactory != 0) {
+ mStatus = effectsFactory->createEffect(
+ &desc->uuid, sessionId, thread->id(), &mEffectInterface);
+ }
+ }
if (mStatus != NO_ERROR) {
return;
@@ -86,21 +94,20 @@
goto Error;
}
- ALOGV("Constructor success name %s, Interface %p", mDescriptor.name, mEffectInterface);
+ ALOGV("Constructor success name %s, Interface %p", mDescriptor.name, mEffectInterface.get());
return;
Error:
- EffectRelease(mEffectInterface);
- mEffectInterface = NULL;
+ mEffectInterface.clear();
ALOGV("Constructor Error %d", mStatus);
}
AudioFlinger::EffectModule::~EffectModule()
{
ALOGV("Destructor %p", this);
- if (mEffectInterface != NULL) {
+ if (mEffectInterface != 0) {
remove_effect_from_hal_l();
// release effect engine
- EffectRelease(mEffectInterface);
+ mEffectInterface.clear();
}
}
@@ -266,7 +273,7 @@
{
Mutex::Autolock _l(mLock);
- if (mState == DESTROYED || mEffectInterface == NULL ||
+ if (mState == DESTROYED || mEffectInterface == 0 ||
mConfig.inputCfg.buffer.raw == NULL ||
mConfig.outputCfg.buffer.raw == NULL) {
return;
@@ -282,9 +289,7 @@
int ret;
if (isProcessImplemented()) {
// do the actual processing in the effect engine
- ret = (*mEffectInterface)->process(mEffectInterface,
- &mConfig.inputCfg.buffer,
- &mConfig.outputCfg.buffer);
+ ret = mEffectInterface->process(&mConfig.inputCfg.buffer, &mConfig.outputCfg.buffer);
} else {
if (mConfig.inputCfg.buffer.raw != mConfig.outputCfg.buffer.raw) {
size_t frameCnt = mConfig.inputCfg.buffer.frameCount * FCC_2; //always stereo here
@@ -330,10 +335,10 @@
void AudioFlinger::EffectModule::reset_l()
{
- if (mStatus != NO_ERROR || mEffectInterface == NULL) {
+ if (mStatus != NO_ERROR || mEffectInterface == 0) {
return;
}
- (*mEffectInterface)->command(mEffectInterface, EFFECT_CMD_RESET, 0, NULL, 0, NULL);
+ mEffectInterface->command(EFFECT_CMD_RESET, 0, NULL, 0, NULL);
}
status_t AudioFlinger::EffectModule::configure()
@@ -343,7 +348,7 @@
uint32_t size;
audio_channel_mask_t channelMask;
- if (mEffectInterface == NULL) {
+ if (mEffectInterface == 0) {
status = NO_INIT;
goto exit;
}
@@ -408,12 +413,11 @@
status_t cmdStatus;
size = sizeof(int);
- status = (*mEffectInterface)->command(mEffectInterface,
- EFFECT_CMD_SET_CONFIG,
- sizeof(effect_config_t),
- &mConfig,
- &size,
- &cmdStatus);
+ status = mEffectInterface->command(EFFECT_CMD_SET_CONFIG,
+ sizeof(effect_config_t),
+ &mConfig,
+ &size,
+ &cmdStatus);
if (status == 0) {
status = cmdStatus;
}
@@ -435,12 +439,11 @@
}
*((int32_t *)p->data + 1)= latency;
- (*mEffectInterface)->command(mEffectInterface,
- EFFECT_CMD_SET_PARAM,
- sizeof(effect_param_t) + 8,
- &buf32,
- &size,
- &cmdStatus);
+ mEffectInterface->command(EFFECT_CMD_SET_PARAM,
+ sizeof(effect_param_t) + 8,
+ &buf32,
+ &size,
+ &cmdStatus);
}
mMaxDisableWaitCnt = (MAX_DISABLE_TIME_MS * mConfig.outputCfg.samplingRate) /
@@ -454,17 +457,16 @@
status_t AudioFlinger::EffectModule::init()
{
Mutex::Autolock _l(mLock);
- if (mEffectInterface == NULL) {
+ if (mEffectInterface == 0) {
return NO_INIT;
}
status_t cmdStatus;
uint32_t size = sizeof(status_t);
- status_t status = (*mEffectInterface)->command(mEffectInterface,
- EFFECT_CMD_INIT,
- 0,
- NULL,
- &size,
- &cmdStatus);
+ status_t status = mEffectInterface->command(EFFECT_CMD_INIT,
+ 0,
+ NULL,
+ &size,
+ &cmdStatus);
if (status == 0) {
status = cmdStatus;
}
@@ -477,9 +479,10 @@
(mDescriptor.flags & EFFECT_FLAG_TYPE_MASK) == EFFECT_FLAG_TYPE_POST_PROC) {
sp<ThreadBase> thread = mThread.promote();
if (thread != 0) {
- audio_stream_t *stream = thread->stream();
- if (stream != NULL) {
- stream->add_audio_effect(stream, mEffectInterface);
+ sp<StreamHalInterface> stream = thread->stream();
+ if (stream != 0) {
+ status_t result = stream->addEffect(mEffectInterface);
+ ALOGE_IF(result != OK, "Error when adding effect: %d", result);
}
}
}
@@ -505,7 +508,7 @@
status_t AudioFlinger::EffectModule::start_l()
{
- if (mEffectInterface == NULL) {
+ if (mEffectInterface == 0) {
return NO_INIT;
}
if (mStatus != NO_ERROR) {
@@ -513,12 +516,11 @@
}
status_t cmdStatus;
uint32_t size = sizeof(status_t);
- status_t status = (*mEffectInterface)->command(mEffectInterface,
- EFFECT_CMD_ENABLE,
- 0,
- NULL,
- &size,
- &cmdStatus);
+ status_t status = mEffectInterface->command(EFFECT_CMD_ENABLE,
+ 0,
+ NULL,
+ &size,
+ &cmdStatus);
if (status == 0) {
status = cmdStatus;
}
@@ -536,7 +538,7 @@
status_t AudioFlinger::EffectModule::stop_l()
{
- if (mEffectInterface == NULL) {
+ if (mEffectInterface == 0) {
return NO_INIT;
}
if (mStatus != NO_ERROR) {
@@ -544,12 +546,11 @@
}
status_t cmdStatus = NO_ERROR;
uint32_t size = sizeof(status_t);
- status_t status = (*mEffectInterface)->command(mEffectInterface,
- EFFECT_CMD_DISABLE,
- 0,
- NULL,
- &size,
- &cmdStatus);
+ status_t status = mEffectInterface->command(EFFECT_CMD_DISABLE,
+ 0,
+ NULL,
+ &size,
+ &cmdStatus);
if (status == NO_ERROR) {
status = cmdStatus;
}
@@ -565,9 +566,10 @@
(mDescriptor.flags & EFFECT_FLAG_TYPE_MASK) == EFFECT_FLAG_TYPE_POST_PROC) {
sp<ThreadBase> thread = mThread.promote();
if (thread != 0) {
- audio_stream_t *stream = thread->stream();
- if (stream != NULL) {
- stream->remove_audio_effect(stream, mEffectInterface);
+ sp<StreamHalInterface> stream = thread->stream();
+ if (stream != 0) {
+ status_t result = stream->removeEffect(mEffectInterface);
+ ALOGE_IF(result != OK, "Error when removing effect: %d", result);
}
}
}
@@ -588,9 +590,9 @@
void *pReplyData)
{
Mutex::Autolock _l(mLock);
- ALOGVV("command(), cmdCode: %d, mEffectInterface: %p", cmdCode, mEffectInterface);
+ ALOGVV("command(), cmdCode: %d, mEffectInterface: %p", cmdCode, mEffectInterface.get());
- if (mState == DESTROYED || mEffectInterface == NULL) {
+ if (mState == DESTROYED || mEffectInterface == 0) {
return NO_INIT;
}
if (mStatus != NO_ERROR) {
@@ -625,12 +627,11 @@
android_errorWriteLog(0x534e4554, "30204301");
return -EINVAL;
}
- status_t status = (*mEffectInterface)->command(mEffectInterface,
- cmdCode,
- cmdSize,
- pCmdData,
- replySize,
- pReplyData);
+ status_t status = mEffectInterface->command(cmdCode,
+ cmdSize,
+ pCmdData,
+ replySize,
+ pReplyData);
if (cmdCode != EFFECT_CMD_GET_PARAM && status == NO_ERROR) {
uint32_t size = (replySize == NULL) ? 0 : *replySize;
for (size_t i = 1; i < mHandles.size(); i++) {
@@ -752,12 +753,11 @@
if (controller) {
pVolume = volume;
}
- status = (*mEffectInterface)->command(mEffectInterface,
- EFFECT_CMD_SET_VOLUME,
- size,
- volume,
- &size,
- pVolume);
+ status = mEffectInterface->command(EFFECT_CMD_SET_VOLUME,
+ size,
+ volume,
+ &size,
+ pVolume);
if (controller && status == NO_ERROR && size == sizeof(volume)) {
*left = volume[0];
*right = volume[1];
@@ -782,12 +782,11 @@
uint32_t size = sizeof(status_t);
uint32_t cmd = audio_is_output_devices(device) ? EFFECT_CMD_SET_DEVICE :
EFFECT_CMD_SET_INPUT_DEVICE;
- status = (*mEffectInterface)->command(mEffectInterface,
- cmd,
- sizeof(uint32_t),
- &device,
- &size,
- &cmdStatus);
+ status = mEffectInterface->command(cmd,
+ sizeof(uint32_t),
+ &device,
+ &size,
+ &cmdStatus);
}
return status;
}
@@ -802,12 +801,11 @@
if ((mDescriptor.flags & EFFECT_FLAG_AUDIO_MODE_MASK) == EFFECT_FLAG_AUDIO_MODE_IND) {
status_t cmdStatus;
uint32_t size = sizeof(status_t);
- status = (*mEffectInterface)->command(mEffectInterface,
- EFFECT_CMD_SET_AUDIO_MODE,
- sizeof(audio_mode_t),
- &mode,
- &size,
- &cmdStatus);
+ status = mEffectInterface->command(EFFECT_CMD_SET_AUDIO_MODE,
+ sizeof(audio_mode_t),
+ &mode,
+ &size,
+ &cmdStatus);
if (status == NO_ERROR) {
status = cmdStatus;
}
@@ -824,12 +822,11 @@
status_t status = NO_ERROR;
if ((mDescriptor.flags & EFFECT_FLAG_AUDIO_SOURCE_MASK) == EFFECT_FLAG_AUDIO_SOURCE_IND) {
uint32_t size = 0;
- status = (*mEffectInterface)->command(mEffectInterface,
- EFFECT_CMD_SET_AUDIO_SOURCE,
- sizeof(audio_source_t),
- &source,
- &size,
- NULL);
+ status = mEffectInterface->command(EFFECT_CMD_SET_AUDIO_SOURCE,
+ sizeof(audio_source_t),
+ &source,
+ &size,
+ NULL);
}
return status;
}
@@ -876,12 +873,11 @@
cmd.isOffload = offloaded;
cmd.ioHandle = io;
- status = (*mEffectInterface)->command(mEffectInterface,
- EFFECT_CMD_OFFLOAD,
- sizeof(effect_offload_param_t),
- &cmd,
- &size,
- &cmdStatus);
+ status = mEffectInterface->command(EFFECT_CMD_OFFLOAD,
+ sizeof(effect_offload_param_t),
+ &cmd,
+ &size,
+ &cmdStatus);
if (status == NO_ERROR) {
status = cmdStatus;
}
@@ -1024,7 +1020,7 @@
result.append("\t\tSession Status State Engine:\n");
snprintf(buffer, SIZE, "\t\t%05d %03d %03d %p\n",
- mSessionId, mStatus, mState, mEffectInterface);
+ mSessionId, mStatus, mState, mEffectInterface.get());
result.append(buffer);
result.append("\t\tDescriptor:\n");
@@ -1060,7 +1056,7 @@
mConfig.inputCfg.samplingRate,
mConfig.inputCfg.channels,
mConfig.inputCfg.format,
- formatToString((audio_format_t)mConfig.inputCfg.format),
+ formatToString((audio_format_t)mConfig.inputCfg.format).c_str(),
mConfig.inputCfg.buffer.raw);
result.append(buffer);
@@ -1072,7 +1068,7 @@
mConfig.outputCfg.samplingRate,
mConfig.outputCfg.channels,
mConfig.outputCfg.format,
- formatToString((audio_format_t)mConfig.outputCfg.format));
+ formatToString((audio_format_t)mConfig.outputCfg.format).c_str());
result.append(buffer);
snprintf(buffer, SIZE, "\t\t%zu Clients:\n", mHandles.size());
diff --git a/services/audioflinger/Effects.h b/services/audioflinger/Effects.h
index 96cb607..a19b7fd 100644
--- a/services/audioflinger/Effects.h
+++ b/services/audioflinger/Effects.h
@@ -148,7 +148,7 @@
const audio_session_t mSessionId; // audio session ID
const effect_descriptor_t mDescriptor;// effect descriptor received from effect engine
effect_config_t mConfig; // input and output audio configuration
- effect_handle_t mEffectInterface; // Effect module C API
+ sp<EffectHalInterface> mEffectInterface; // Effect module HAL
status_t mStatus; // initialization status
effect_state mState; // current activation state
Vector<EffectHandle *> mHandles; // list of client handles
diff --git a/services/audioflinger/PatchPanel.cpp b/services/audioflinger/PatchPanel.cpp
index bee17fd..591a49e 100644
--- a/services/audioflinger/PatchPanel.cpp
+++ b/services/audioflinger/PatchPanel.cpp
@@ -202,9 +202,9 @@
if (hwModule != AUDIO_MODULE_HANDLE_NONE) {
ssize_t index = audioflinger->mAudioHwDevs.indexOfKey(hwModule);
if (index >= 0) {
- audio_hw_device_t *hwDevice =
+ sp<DeviceHalInterface> hwDevice =
audioflinger->mAudioHwDevs.valueAt(index)->hwDevice();
- hwDevice->release_audio_patch(hwDevice, halHandle);
+ hwDevice->releaseAudioPatch(halHandle);
}
}
}
@@ -247,11 +247,11 @@
// - special patch request with 2 sources (reuse one existing output mix) OR
// - Device to device AND
// - source HW module != destination HW module OR
- // - audio HAL version < 3.0
+ // - audio HAL does not support audio patches creation
if ((patch->num_sources == 2) ||
((patch->sinks[0].type == AUDIO_PORT_TYPE_DEVICE) &&
((patch->sinks[0].ext.device.hw_module != srcModule) ||
- (audioHwDevice->version() < AUDIO_DEVICE_API_VERSION_3_0)))) {
+ !audioHwDevice->supportsAudioPatches()))) {
if (patch->num_sources == 2) {
if (patch->sources[1].type != AUDIO_PORT_TYPE_MIX ||
(patch->num_sinks != 0 && patch->sinks[0].ext.device.hw_module !=
@@ -339,18 +339,13 @@
}
status = thread->sendCreateAudioPatchConfigEvent(patch, &halHandle);
} else {
- if (audioHwDevice->version() < AUDIO_DEVICE_API_VERSION_3_0) {
- status = INVALID_OPERATION;
- goto exit;
- }
-
- audio_hw_device_t *hwDevice = audioHwDevice->hwDevice();
- status = hwDevice->create_audio_patch(hwDevice,
- patch->num_sources,
- patch->sources,
- patch->num_sinks,
- patch->sinks,
- &halHandle);
+ sp<DeviceHalInterface> hwDevice = audioHwDevice->hwDevice();
+ status = hwDevice->createAudioPatch(patch->num_sources,
+ patch->sources,
+ patch->num_sinks,
+ patch->sinks,
+ &halHandle);
+ if (status == INVALID_OPERATION) goto exit;
}
}
} break;
@@ -388,7 +383,7 @@
}
if (thread == audioflinger->primaryPlaybackThread_l()) {
AudioParameter param = AudioParameter();
- param.addInt(String8(AUDIO_PARAMETER_STREAM_ROUTING), (int)type);
+ param.addInt(String8(AudioParameter::keyRouting), (int)type);
audioflinger->broacastParametersToRecordThreads_l(param.toString());
}
@@ -619,12 +614,8 @@
status = thread->sendReleaseAudioPatchConfigEvent(removedPatch->mHalHandle);
} else {
AudioHwDevice *audioHwDevice = audioflinger->mAudioHwDevs.valueAt(index);
- if (audioHwDevice->version() < AUDIO_DEVICE_API_VERSION_3_0) {
- status = INVALID_OPERATION;
- break;
- }
- audio_hw_device_t *hwDevice = audioHwDevice->hwDevice();
- status = hwDevice->release_audio_patch(hwDevice, removedPatch->mHalHandle);
+ sp<DeviceHalInterface> hwDevice = audioHwDevice->hwDevice();
+ status = hwDevice->releaseAudioPatch(removedPatch->mHalHandle);
}
} break;
case AUDIO_PORT_TYPE_MIX: {
@@ -687,13 +678,7 @@
}
AudioHwDevice *audioHwDevice = audioflinger->mAudioHwDevs.valueAt(index);
- if (audioHwDevice->version() >= AUDIO_DEVICE_API_VERSION_3_0) {
- audio_hw_device_t *hwDevice = audioHwDevice->hwDevice();
- return hwDevice->set_audio_port_config(hwDevice, config);
- } else {
- return INVALID_OPERATION;
- }
- return NO_ERROR;
+ return audioHwDevice->hwDevice()->setAudioPortConfig(config);
}
} // namespace android
diff --git a/services/audioflinger/PlaybackTracks.h b/services/audioflinger/PlaybackTracks.h
index 5601bde..0bcb9a0 100644
--- a/services/audioflinger/PlaybackTracks.h
+++ b/services/audioflinger/PlaybackTracks.h
@@ -32,7 +32,7 @@
void *buffer,
const sp<IMemory>& sharedBuffer,
audio_session_t sessionId,
- int uid,
+ uid_t uid,
audio_output_flags_t flags,
track_type type);
virtual ~Track();
@@ -166,7 +166,7 @@
// 'volatile' means accessed without lock or
// barrier, but is read/written atomically
bool mIsInvalid; // non-resettable latch, set by invalidate()
- AudioTrackServerProxy* mAudioTrackServerProxy;
+ sp<AudioTrackServerProxy> mAudioTrackServerProxy;
bool mResumeToStopping; // track was paused in stopping state.
bool mFlushHwPending; // track requests for thread flush
audio_output_flags_t mFlags;
@@ -188,7 +188,7 @@
audio_format_t format,
audio_channel_mask_t channelMask,
size_t frameCount,
- int uid);
+ uid_t uid);
virtual ~OutputTrack();
virtual status_t start(AudioSystem::sync_event_t event =
@@ -214,8 +214,8 @@
Vector < Buffer* > mBufferQueue;
AudioBufferProvider::Buffer mOutBuffer;
bool mActive;
- DuplicatingThread* const mSourceThread; // for waitTimeMs() in write()
- AudioTrackClientProxy* mClientProxy;
+ DuplicatingThread* const mSourceThread; // for waitTimeMs() in write()
+ sp<AudioTrackClientProxy> mClientProxy;
}; // end of OutputTrack
// playback track, used by PatchPanel
diff --git a/services/audioflinger/RecordTracks.h b/services/audioflinger/RecordTracks.h
index 123e033..883ff6b 100644
--- a/services/audioflinger/RecordTracks.h
+++ b/services/audioflinger/RecordTracks.h
@@ -30,7 +30,7 @@
size_t frameCount,
void *buffer,
audio_session_t sessionId,
- int uid,
+ uid_t uid,
audio_input_flags_t flags,
track_type type);
virtual ~RecordTrack();
diff --git a/services/audioflinger/SpdifStreamOut.cpp b/services/audioflinger/SpdifStreamOut.cpp
index 004a068..a44ab2a 100644
--- a/services/audioflinger/SpdifStreamOut.cpp
+++ b/services/audioflinger/SpdifStreamOut.cpp
@@ -17,13 +17,12 @@
#define LOG_TAG "AudioFlinger"
//#define LOG_NDEBUG 0
-#include <hardware/audio.h>
+#include <system/audio.h>
#include <utils/Log.h>
#include <audio_utils/spdif/SPDIFEncoder.h>
#include "AudioHwDevice.h"
-#include "AudioStreamOut.h"
#include "SpdifStreamOut.h"
namespace android {
diff --git a/services/audioflinger/SpdifStreamOut.h b/services/audioflinger/SpdifStreamOut.h
index c870250..fc9bb6e 100644
--- a/services/audioflinger/SpdifStreamOut.h
+++ b/services/audioflinger/SpdifStreamOut.h
@@ -23,9 +23,7 @@
#include <system/audio.h>
-#include "AudioHwDevice.h"
#include "AudioStreamOut.h"
-#include "SpdifStreamOut.h"
#include <audio_utils/spdif/SPDIFEncoder.h>
diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp
index 8a4fbb5..1d7b946 100644
--- a/services/audioflinger/Threads.cpp
+++ b/services/audioflinger/Threads.cpp
@@ -29,17 +29,19 @@
#include <cutils/properties.h>
#include <media/AudioParameter.h>
#include <media/AudioResamplerPublic.h>
+#include <media/TypeConverter.h>
#include <utils/Log.h>
#include <utils/Trace.h>
#include <private/media/AudioTrackShared.h>
-#include <hardware/audio.h>
-#include <audio_effects/effect_ns.h>
-#include <audio_effects/effect_aec.h>
+#include <private/android_filesystem_config.h>
#include <audio_utils/conversion.h>
#include <audio_utils/primitives.h>
#include <audio_utils/format.h>
#include <audio_utils/minifloat.h>
+#include <system/audio_effects/effect_ns.h>
+#include <system/audio_effects/effect_aec.h>
+#include <system/audio.h>
// NBAIO implementations
#include <media/nbaio/AudioStreamInSource.h>
@@ -143,6 +145,12 @@
// Direct output thread minimum sleep time in idle or active(underrun) state
static const nsecs_t kDirectMinSleepTimeUs = 10000;
+// The universal constant for ubiquitous 20ms value. The value of 20ms seems to provide a good
+// balance between power consumption and latency, and allows threads to be scheduled reliably
+// by the CFS scheduler.
+// FIXME Express other hardcoded references to 20ms with references to this constant and move
+// it appropriately.
+#define FMS_20 20
// Whether to use fast mixer
static const enum {
@@ -442,168 +450,28 @@
}
}
-String8 devicesToString(audio_devices_t devices)
+std::string devicesToString(audio_devices_t devices)
{
- static const struct mapping {
- audio_devices_t mDevices;
- const char * mString;
- } mappingsOut[] = {
- {AUDIO_DEVICE_OUT_EARPIECE, "EARPIECE"},
- {AUDIO_DEVICE_OUT_SPEAKER, "SPEAKER"},
- {AUDIO_DEVICE_OUT_WIRED_HEADSET, "WIRED_HEADSET"},
- {AUDIO_DEVICE_OUT_WIRED_HEADPHONE, "WIRED_HEADPHONE"},
- {AUDIO_DEVICE_OUT_BLUETOOTH_SCO, "BLUETOOTH_SCO"},
- {AUDIO_DEVICE_OUT_BLUETOOTH_SCO_HEADSET, "BLUETOOTH_SCO_HEADSET"},
- {AUDIO_DEVICE_OUT_BLUETOOTH_SCO_CARKIT, "BLUETOOTH_SCO_CARKIT"},
- {AUDIO_DEVICE_OUT_BLUETOOTH_A2DP, "BLUETOOTH_A2DP"},
- {AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_HEADPHONES,"BLUETOOTH_A2DP_HEADPHONES"},
- {AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_SPEAKER, "BLUETOOTH_A2DP_SPEAKER"},
- {AUDIO_DEVICE_OUT_AUX_DIGITAL, "AUX_DIGITAL"},
- {AUDIO_DEVICE_OUT_HDMI, "HDMI"},
- {AUDIO_DEVICE_OUT_ANLG_DOCK_HEADSET,"ANLG_DOCK_HEADSET"},
- {AUDIO_DEVICE_OUT_DGTL_DOCK_HEADSET,"DGTL_DOCK_HEADSET"},
- {AUDIO_DEVICE_OUT_USB_ACCESSORY, "USB_ACCESSORY"},
- {AUDIO_DEVICE_OUT_USB_DEVICE, "USB_DEVICE"},
- {AUDIO_DEVICE_OUT_TELEPHONY_TX, "TELEPHONY_TX"},
- {AUDIO_DEVICE_OUT_LINE, "LINE"},
- {AUDIO_DEVICE_OUT_HDMI_ARC, "HDMI_ARC"},
- {AUDIO_DEVICE_OUT_SPDIF, "SPDIF"},
- {AUDIO_DEVICE_OUT_FM, "FM"},
- {AUDIO_DEVICE_OUT_AUX_LINE, "AUX_LINE"},
- {AUDIO_DEVICE_OUT_SPEAKER_SAFE, "SPEAKER_SAFE"},
- {AUDIO_DEVICE_OUT_IP, "IP"},
- {AUDIO_DEVICE_OUT_BUS, "BUS"},
- {AUDIO_DEVICE_NONE, "NONE"}, // must be last
- }, mappingsIn[] = {
- {AUDIO_DEVICE_IN_COMMUNICATION, "COMMUNICATION"},
- {AUDIO_DEVICE_IN_AMBIENT, "AMBIENT"},
- {AUDIO_DEVICE_IN_BUILTIN_MIC, "BUILTIN_MIC"},
- {AUDIO_DEVICE_IN_BLUETOOTH_SCO_HEADSET, "BLUETOOTH_SCO_HEADSET"},
- {AUDIO_DEVICE_IN_WIRED_HEADSET, "WIRED_HEADSET"},
- {AUDIO_DEVICE_IN_AUX_DIGITAL, "AUX_DIGITAL"},
- {AUDIO_DEVICE_IN_VOICE_CALL, "VOICE_CALL"},
- {AUDIO_DEVICE_IN_TELEPHONY_RX, "TELEPHONY_RX"},
- {AUDIO_DEVICE_IN_BACK_MIC, "BACK_MIC"},
- {AUDIO_DEVICE_IN_REMOTE_SUBMIX, "REMOTE_SUBMIX"},
- {AUDIO_DEVICE_IN_ANLG_DOCK_HEADSET, "ANLG_DOCK_HEADSET"},
- {AUDIO_DEVICE_IN_DGTL_DOCK_HEADSET, "DGTL_DOCK_HEADSET"},
- {AUDIO_DEVICE_IN_USB_ACCESSORY, "USB_ACCESSORY"},
- {AUDIO_DEVICE_IN_USB_DEVICE, "USB_DEVICE"},
- {AUDIO_DEVICE_IN_FM_TUNER, "FM_TUNER"},
- {AUDIO_DEVICE_IN_TV_TUNER, "TV_TUNER"},
- {AUDIO_DEVICE_IN_LINE, "LINE"},
- {AUDIO_DEVICE_IN_SPDIF, "SPDIF"},
- {AUDIO_DEVICE_IN_BLUETOOTH_A2DP, "BLUETOOTH_A2DP"},
- {AUDIO_DEVICE_IN_LOOPBACK, "LOOPBACK"},
- {AUDIO_DEVICE_IN_IP, "IP"},
- {AUDIO_DEVICE_IN_BUS, "BUS"},
- {AUDIO_DEVICE_NONE, "NONE"}, // must be last
- };
- String8 result;
- audio_devices_t allDevices = AUDIO_DEVICE_NONE;
- const mapping *entry;
+ std::string result;
if (devices & AUDIO_DEVICE_BIT_IN) {
- devices &= ~AUDIO_DEVICE_BIT_IN;
- entry = mappingsIn;
+ InputDeviceConverter::maskToString(devices, result);
} else {
- entry = mappingsOut;
- }
- for ( ; entry->mDevices != AUDIO_DEVICE_NONE; entry++) {
- allDevices = (audio_devices_t) (allDevices | entry->mDevices);
- if (devices & entry->mDevices) {
- if (!result.isEmpty()) {
- result.append("|");
- }
- result.append(entry->mString);
- }
- }
- if (devices & ~allDevices) {
- if (!result.isEmpty()) {
- result.append("|");
- }
- result.appendFormat("0x%X", devices & ~allDevices);
- }
- if (result.isEmpty()) {
- result.append(entry->mString);
+ OutputDeviceConverter::maskToString(devices, result);
}
return result;
}
-String8 inputFlagsToString(audio_input_flags_t flags)
+std::string inputFlagsToString(audio_input_flags_t flags)
{
- static const struct mapping {
- audio_input_flags_t mFlag;
- const char * mString;
- } mappings[] = {
- {AUDIO_INPUT_FLAG_FAST, "FAST"},
- {AUDIO_INPUT_FLAG_HW_HOTWORD, "HW_HOTWORD"},
- {AUDIO_INPUT_FLAG_RAW, "RAW"},
- {AUDIO_INPUT_FLAG_SYNC, "SYNC"},
- {AUDIO_INPUT_FLAG_NONE, "NONE"}, // must be last
- };
- String8 result;
- audio_input_flags_t allFlags = AUDIO_INPUT_FLAG_NONE;
- const mapping *entry;
- for (entry = mappings; entry->mFlag != AUDIO_INPUT_FLAG_NONE; entry++) {
- allFlags = (audio_input_flags_t) (allFlags | entry->mFlag);
- if (flags & entry->mFlag) {
- if (!result.isEmpty()) {
- result.append("|");
- }
- result.append(entry->mString);
- }
- }
- if (flags & ~allFlags) {
- if (!result.isEmpty()) {
- result.append("|");
- }
- result.appendFormat("0x%X", flags & ~allFlags);
- }
- if (result.isEmpty()) {
- result.append(entry->mString);
- }
+ std::string result;
+ InputFlagConverter::maskToString(flags, result);
return result;
}
-String8 outputFlagsToString(audio_output_flags_t flags)
+std::string outputFlagsToString(audio_output_flags_t flags)
{
- static const struct mapping {
- audio_output_flags_t mFlag;
- const char * mString;
- } mappings[] = {
- {AUDIO_OUTPUT_FLAG_DIRECT, "DIRECT"},
- {AUDIO_OUTPUT_FLAG_PRIMARY, "PRIMARY"},
- {AUDIO_OUTPUT_FLAG_FAST, "FAST"},
- {AUDIO_OUTPUT_FLAG_DEEP_BUFFER, "DEEP_BUFFER"},
- {AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD,"COMPRESS_OFFLOAD"},
- {AUDIO_OUTPUT_FLAG_NON_BLOCKING, "NON_BLOCKING"},
- {AUDIO_OUTPUT_FLAG_HW_AV_SYNC, "HW_AV_SYNC"},
- {AUDIO_OUTPUT_FLAG_RAW, "RAW"},
- {AUDIO_OUTPUT_FLAG_SYNC, "SYNC"},
- {AUDIO_OUTPUT_FLAG_IEC958_NONAUDIO, "IEC958_NONAUDIO"},
- {AUDIO_OUTPUT_FLAG_NONE, "NONE"}, // must be last
- };
- String8 result;
- audio_output_flags_t allFlags = AUDIO_OUTPUT_FLAG_NONE;
- const mapping *entry;
- for (entry = mappings; entry->mFlag != AUDIO_OUTPUT_FLAG_NONE; entry++) {
- allFlags = (audio_output_flags_t) (allFlags | entry->mFlag);
- if (flags & entry->mFlag) {
- if (!result.isEmpty()) {
- result.append("|");
- }
- result.append(entry->mString);
- }
- }
- if (flags & ~allFlags) {
- if (!result.isEmpty()) {
- result.append("|");
- }
- result.appendFormat("0x%X", flags & ~allFlags);
- }
- if (result.isEmpty()) {
- result.append(entry->mString);
- }
+ std::string result;
+ OutputFlagConverter::maskToString(flags, result);
return result;
}
@@ -763,12 +631,12 @@
sp<ConfigEvent> configEvent;
AudioParameter param(keyValuePair);
int value;
- if (param.getInt(String8(AUDIO_PARAMETER_MONO_OUTPUT), value) == NO_ERROR) {
+ if (param.getInt(String8(AudioParameter::keyMonoOutput), value) == NO_ERROR) {
setMasterMono_l(value != 0);
if (param.size() == 1) {
return NO_ERROR; // should be a solo parameter - we don't pass down
}
- param.remove(String8(AUDIO_PARAMETER_MONO_OUTPUT));
+ param.remove(String8(AudioParameter::keyMonoOutput));
configEvent = new SetParameterConfigEvent(param.toString());
} else {
configEvent = new SetParameterConfigEvent(keyValuePair);
@@ -937,12 +805,12 @@
dprintf(fd, " Standby: %s\n", mStandby ? "yes" : "no");
dprintf(fd, " Sample rate: %u Hz\n", mSampleRate);
dprintf(fd, " HAL frame count: %zu\n", mFrameCount);
- dprintf(fd, " HAL format: 0x%x (%s)\n", mHALFormat, formatToString(mHALFormat));
+ dprintf(fd, " HAL format: 0x%x (%s)\n", mHALFormat, formatToString(mHALFormat).c_str());
dprintf(fd, " HAL buffer size: %zu bytes\n", mBufferSize);
dprintf(fd, " Channel count: %u\n", mChannelCount);
dprintf(fd, " Channel mask: 0x%08x (%s)\n", mChannelMask,
channelMaskToString(mChannelMask, mType != RECORD).string());
- dprintf(fd, " Processing format: 0x%x (%s)\n", mFormat, formatToString(mFormat));
+ dprintf(fd, " Processing format: 0x%x (%s)\n", mFormat, formatToString(mFormat).c_str());
dprintf(fd, " Processing frame size: %zu bytes\n", mFrameSize);
dprintf(fd, " Pending config events:");
size_t numConfig = mConfigEvents.size();
@@ -955,8 +823,8 @@
} else {
dprintf(fd, " none\n");
}
- dprintf(fd, " Output device: %#x (%s)\n", mOutDevice, devicesToString(mOutDevice).string());
- dprintf(fd, " Input device: %#x (%s)\n", mInDevice, devicesToString(mInDevice).string());
+ dprintf(fd, " Output device: %#x (%s)\n", mOutDevice, devicesToString(mOutDevice).c_str());
+ dprintf(fd, " Input device: %#x (%s)\n", mInDevice, devicesToString(mInDevice).c_str());
dprintf(fd, " Audio source: %d (%s)\n", mAudioSource, sourceToString(mAudioSource));
if (locked) {
@@ -1034,7 +902,8 @@
}
if (!mNotifiedBatteryStart) {
- BatteryNotifier::getInstance().noteStartAudio();
+ // TODO: call this function for each track when it becomes active.
+ BatteryNotifier::getInstance().noteStartAudio(AID_AUDIOSERVER);
mNotifiedBatteryStart = true;
}
gBoottime.acquire(mWakeLockToken);
@@ -1061,16 +930,12 @@
}
if (mNotifiedBatteryStart) {
- BatteryNotifier::getInstance().noteStopAudio();
+ // TODO: call this function for each track when it becomes inactive.
+ BatteryNotifier::getInstance().noteStopAudio(AID_AUDIOSERVER);
mNotifiedBatteryStart = false;
}
}
-void AudioFlinger::ThreadBase::updateWakeLockUids(const SortedVector<int> &uids) {
- Mutex::Autolock _l(mLock);
- updateWakeLockUids_l(uids);
-}
-
void AudioFlinger::ThreadBase::getPowerManager_l() {
if (mSystemReady && mPowerManager == 0) {
// use checkService() to avoid blocking if power service is not up yet
@@ -1096,10 +961,10 @@
return;
}
if (mPowerManager != 0) {
- sp<IBinder> binder = new BBinder();
- status_t status;
- status = mPowerManager->updateWakeLockUids(mWakeLockToken, uids.size(), uids.array(),
- true /* FIXME force oneway contrary to .aidl */);
+ std::vector<int> uidsAsInt(uids.begin(), uids.end()); // powermanager expects uids as ints
+ status_t status = mPowerManager->updateWakeLockUids(
+ mWakeLockToken, uidsAsInt.size(), uidsAsInt.data(),
+ true /* FIXME force oneway contrary to .aidl */);
ALOGV("updateWakeLockUids_l() %s status %d", mThreadName, status);
}
}
@@ -1824,8 +1689,8 @@
dprintf(fd, " Standby delay ns=%lld\n", (long long)mStandbyDelayNs);
AudioStreamOut *output = mOutput;
audio_output_flags_t flags = output != NULL ? output->flags : AUDIO_OUTPUT_FLAG_NONE;
- String8 flagsAsString = outputFlagsToString(flags);
- dprintf(fd, " AudioStreamOut: %p flags %#x (%s)\n", output, flags, flagsAsString.string());
+ dprintf(fd, " AudioStreamOut: %p flags %#x (%s)\n",
+ output, flags, outputFlagsToString(flags).c_str());
dprintf(fd, " Frames written: %lld\n", (long long)mFramesWritten);
dprintf(fd, " Suspended frames: %lld\n", (long long)mSuspendedFrames);
if (mPipeSink.get() != nullptr) {
@@ -1833,7 +1698,7 @@
}
if (output != nullptr) {
dprintf(fd, " Hal stream dump:\n");
- (void)output->stream->common.dump(&output->stream->common, fd);
+ (void)output->stream->dump(fd);
}
}
@@ -1850,7 +1715,8 @@
ALOGV(" preExit()");
// FIXME this is using hard-coded strings but in the future, this functionality will be
// converted to use audio HAL extensions required to support tunneling
- mOutput->stream->common.set_parameters(&mOutput->stream->common, "exiting=1");
+ status_t result = mOutput->stream->setParameters(String8("exiting=1"));
+ ALOGE_IF(result != OK, "Error when setting parameters on exit: %d", result);
}
// PlaybackThread::createTrack_l() must be called with AudioFlinger::mLock held
@@ -1865,7 +1731,7 @@
audio_session_t sessionId,
audio_output_flags_t *flags,
pid_t tid,
- int uid,
+ uid_t uid,
status_t *status)
{
size_t frameCount = *pFrameCount;
@@ -1957,7 +1823,12 @@
&& audio_has_proportional_frames(format) && sharedBuffer == 0) {
// this must match AudioTrack.cpp calculateMinFrameCount().
// TODO: Move to a common library
- uint32_t latencyMs = mOutput->stream->get_latency(mOutput->stream);
+ uint32_t latencyMs = 0;
+ lStatus = mOutput->stream->getLatency(&latencyMs);
+ if (lStatus != OK) {
+ ALOGE("Error when retrieving output stream latency: %d", lStatus);
+ goto Exit;
+ }
uint32_t minBufCount = latencyMs / ((1000 * mNormalFrameCount) / mSampleRate);
if (minBufCount < 2) {
minBufCount = 2;
@@ -2088,11 +1959,11 @@
}
uint32_t AudioFlinger::PlaybackThread::latency_l() const
{
- if (initCheck() == NO_ERROR) {
- return correctLatency_l(mOutput->stream->get_latency(mOutput->stream));
- } else {
- return 0;
+ uint32_t latency;
+ if (initCheck() == NO_ERROR && mOutput->stream->getLatency(&latency) == OK) {
+ return correctLatency_l(latency);
}
+ return 0;
}
void AudioFlinger::PlaybackThread::setMasterVolume(float value)
@@ -2257,14 +2128,11 @@
String8 AudioFlinger::PlaybackThread::getParameters(const String8& keys)
{
Mutex::Autolock _l(mLock);
- if (initCheck() != NO_ERROR) {
- return String8();
+ String8 out_s8;
+ if (initCheck() == NO_ERROR && mOutput->stream->getParameters(keys, &out_s8) == OK) {
+ return out_s8;
}
-
- char *s = mOutput->stream->common.get_parameters(&mOutput->stream->common, keys.string());
- const String8 out_s8(s);
- free(s);
- return out_s8;
+ return String8();
}
void AudioFlinger::PlaybackThread::ioConfigChanged(audio_io_config_event event, pid_t pid) {
@@ -2293,21 +2161,18 @@
mAudioFlinger->ioConfigChanged(event, desc, pid);
}
-void AudioFlinger::PlaybackThread::writeCallback()
+void AudioFlinger::PlaybackThread::onWriteReady()
{
- ALOG_ASSERT(mCallbackThread != 0);
mCallbackThread->resetWriteBlocked();
}
-void AudioFlinger::PlaybackThread::drainCallback()
+void AudioFlinger::PlaybackThread::onDrainReady()
{
- ALOG_ASSERT(mCallbackThread != 0);
mCallbackThread->resetDraining();
}
-void AudioFlinger::PlaybackThread::errorCallback()
+void AudioFlinger::PlaybackThread::onError()
{
- ALOG_ASSERT(mCallbackThread != 0);
mCallbackThread->setAsyncError();
}
@@ -2331,30 +2196,6 @@
}
}
-// static
-int AudioFlinger::PlaybackThread::asyncCallback(stream_callback_event_t event,
- void *param __unused,
- void *cookie)
-{
- AudioFlinger::PlaybackThread *me = (AudioFlinger::PlaybackThread *)cookie;
- ALOGV("asyncCallback() event %d", event);
- switch (event) {
- case STREAM_CBK_EVENT_WRITE_READY:
- me->writeCallback();
- break;
- case STREAM_CBK_EVENT_DRAIN_READY:
- me->drainCallback();
- break;
- case STREAM_CBK_EVENT_ERROR:
- me->errorCallback();
- break;
- default:
- ALOGW("asyncCallback() unknown event %d", event);
- break;
- }
- return 0;
-}
-
void AudioFlinger::PlaybackThread::readOutputParameters_l()
{
// unfortunately we have no way of recovering from errors here, hence the LOG_ALWAYS_FATAL
@@ -2371,7 +2212,8 @@
mChannelCount = audio_channel_count_from_out_mask(mChannelMask);
// Get actual HAL format.
- mHALFormat = mOutput->stream->common.get_format(&mOutput->stream->common);
+ status_t result = mOutput->stream->getFormat(&mHALFormat);
+ LOG_ALWAYS_FATAL_IF(result != OK, "Error when retrieving output stream format: %d", result);
// Get format from the shim, which will be different than the HAL format
// if playing compressed audio over HDMI passthrough.
mFormat = mOutput->getFormat();
@@ -2384,17 +2226,17 @@
mFormat);
}
mFrameSize = mOutput->getFrameSize();
- mBufferSize = mOutput->stream->common.get_buffer_size(&mOutput->stream->common);
+ result = mOutput->stream->getBufferSize(&mBufferSize);
+ LOG_ALWAYS_FATAL_IF(result != OK,
+ "Error when retrieving output stream buffer size: %d", result);
mFrameCount = mBufferSize / mFrameSize;
if (mFrameCount & 15) {
ALOGW("HAL output buffer size is %zu frames but AudioMixer requires multiples of 16 frames",
mFrameCount);
}
- if ((mOutput->flags & AUDIO_OUTPUT_FLAG_NON_BLOCKING) &&
- (mOutput->stream->set_callback != NULL)) {
- if (mOutput->stream->set_callback(mOutput->stream,
- AudioFlinger::PlaybackThread::asyncCallback, this) == 0) {
+ if (mOutput->flags & AUDIO_OUTPUT_FLAG_NON_BLOCKING) {
+ if (mOutput->stream->setCallback(this) == OK) {
mUseAsyncWrite = true;
mCallbackThread = new AudioFlinger::AsyncCallbackThread(this);
}
@@ -2402,14 +2244,15 @@
mHwSupportsPause = false;
if (mOutput->flags & AUDIO_OUTPUT_FLAG_DIRECT) {
- if (mOutput->stream->pause != NULL) {
- if (mOutput->stream->resume != NULL) {
+ bool supportsPause = false, supportsResume = false;
+ if (mOutput->stream->supportsPauseAndResume(&supportsPause, &supportsResume) == OK) {
+ if (supportsPause && supportsResume) {
mHwSupportsPause = true;
- } else {
+ } else if (supportsPause) {
ALOGW("direct output implements pause but not resume");
+ } else if (supportsResume) {
+ ALOGW("direct output implements resume but not pause");
}
- } else if (mOutput->stream->resume != NULL) {
- ALOGW("direct output implements resume but not pause");
}
}
if (!mHwSupportsPause && mOutput->flags & AUDIO_OUTPUT_FLAG_HW_AV_SYNC) {
@@ -2599,12 +2442,12 @@
}
// this method must always be called either with ThreadBase mLock held or inside the thread loop
-audio_stream_t* AudioFlinger::PlaybackThread::stream() const
+sp<StreamHalInterface> AudioFlinger::PlaybackThread::stream() const
{
if (mOutput == NULL) {
return NULL;
}
- return &mOutput->stream->common;
+ return mOutput->stream;
}
uint32_t AudioFlinger::PlaybackThread::activeSleepTimeUs() const
@@ -2742,7 +2585,8 @@
void AudioFlinger::PlaybackThread::threadLoop_drain()
{
- if (mOutput->stream->drain) {
+ bool supportsDrain = false;
+ if (mOutput->stream->supportsDrain(&supportsDrain) == OK && supportsDrain) {
ALOGV("draining %s", (mMixerStatus == MIXER_DRAIN_TRACK) ? "early" : "full");
if (mUseAsyncWrite) {
ALOGW_IF(mDrainSequence & 1, "threadLoop_drain(): out of sequence drain request");
@@ -2750,9 +2594,8 @@
ALOG_ASSERT(mCallbackThread != 0);
mCallbackThread->setDraining(mDrainSequence);
}
- mOutput->stream->drain(mOutput->stream,
- (mMixerStatus == MIXER_DRAIN_TRACK) ? AUDIO_DRAIN_EARLY_NOTIFY
- : AUDIO_DRAIN_ALL);
+ status_t result = mOutput->stream->drain(mMixerStatus == MIXER_DRAIN_TRACK);
+ ALOGE_IF(result != OK, "Error when draining stream: %d", result);
}
}
@@ -3437,11 +3280,9 @@
}
return status;
}
- if ((mType == OFFLOAD || mType == DIRECT)
- && mOutput != NULL && mOutput->stream->get_presentation_position) {
+ if ((mType == OFFLOAD || mType == DIRECT) && mOutput != NULL) {
uint64_t position64;
- int ret = mOutput->getPresentationPosition(&position64, ×tamp.mTime);
- if (ret == 0) {
+ if (mOutput->getPresentationPosition(&position64, ×tamp.mTime) == OK) {
timestamp.mPosition = (uint32_t)position64;
return NO_ERROR;
}
@@ -3508,14 +3349,13 @@
mOutDevice = type;
mPatch = *patch;
- if (mOutput->audioHwDev->version() >= AUDIO_DEVICE_API_VERSION_3_0) {
- audio_hw_device_t *hwDevice = mOutput->audioHwDev->hwDevice();
- status = hwDevice->create_audio_patch(hwDevice,
- patch->num_sources,
- patch->sources,
- patch->num_sinks,
- patch->sinks,
- handle);
+ if (mOutput->audioHwDev->supportsAudioPatches()) {
+ sp<DeviceHalInterface> hwDevice = mOutput->audioHwDev->hwDevice();
+ status = hwDevice->createAudioPatch(patch->num_sources,
+ patch->sources,
+ patch->num_sinks,
+ patch->sinks,
+ handle);
} else {
char *address;
if (strcmp(patch->sinks[0].ext.device.address, "") != 0) {
@@ -3528,9 +3368,8 @@
}
AudioParameter param = AudioParameter(String8(address));
free(address);
- param.addInt(String8(AUDIO_PARAMETER_STREAM_ROUTING), (int)type);
- status = mOutput->stream->common.set_parameters(&mOutput->stream->common,
- param.toString().string());
+ param.addInt(String8(AudioParameter::keyRouting), (int)type);
+ status = mOutput->stream->setParameters(param.toString());
*handle = AUDIO_PATCH_HANDLE_NONE;
}
if (configChanged) {
@@ -3560,14 +3399,13 @@
mOutDevice = AUDIO_DEVICE_NONE;
- if (mOutput->audioHwDev->version() >= AUDIO_DEVICE_API_VERSION_3_0) {
- audio_hw_device_t *hwDevice = mOutput->audioHwDev->hwDevice();
- status = hwDevice->release_audio_patch(hwDevice, handle);
+ if (mOutput->audioHwDev->supportsAudioPatches()) {
+ sp<DeviceHalInterface> hwDevice = mOutput->audioHwDev->hwDevice();
+ status = hwDevice->releaseAudioPatch(handle);
} else {
AudioParameter param;
- param.addInt(String8(AUDIO_PARAMETER_STREAM_ROUTING), 0);
- status = mOutput->stream->common.set_parameters(&mOutput->stream->common,
- param.toString().string());
+ param.addInt(String8(AudioParameter::keyRouting), 0);
+ status = mOutput->stream->setParameters(param.toString());
}
return status;
}
@@ -4133,8 +3971,11 @@
// We have consumed all the buffers of this track.
// This would be incomplete if we auto-paused on underrun
{
- size_t audioHALFrames =
- (mOutput->stream->get_latency(mOutput->stream)*mSampleRate) / 1000;
+ uint32_t latency = 0;
+ status_t result = mOutput->stream->getLatency(&latency);
+ ALOGE_IF(result != OK,
+ "Error when retrieving output stream latency: %d", result);
+ size_t audioHALFrames = (latency * mSampleRate) / 1000;
int64_t framesWritten = mBytesWritten / mFrameSize;
if (!(mStandby || track->presentationComplete(framesWritten, audioHALFrames))) {
// track stays in active list until presentation is complete
@@ -4302,7 +4143,7 @@
// read original volumes with volume control
float typeVolume = mStreamTypes[track->streamType()].volume;
float v = masterVolume * typeVolume;
- AudioTrackServerProxy *proxy = track->mAudioTrackServerProxy;
+ sp<AudioTrackServerProxy> proxy = track->mAudioTrackServerProxy;
gain_minifloat_packed_t vlr = proxy->getVolumeLR();
vlf = float_from_gain(gain_minifloat_unpack_left(vlr));
vrf = float_from_gain(gain_minifloat_unpack_right(vlr));
@@ -4587,7 +4428,7 @@
{
uint32_t trackCount = 0;
for (size_t i = 0; i < mTracks.size() ; i++) {
- if (mTracks[i]->uid() == (int)uid) {
+ if (mTracks[i]->uid() == uid) {
trackCount++;
}
}
@@ -4690,14 +4531,12 @@
}
if (status == NO_ERROR) {
- status = mOutput->stream->common.set_parameters(&mOutput->stream->common,
- keyValuePair.string());
+ status = mOutput->stream->setParameters(keyValuePair);
if (!mStandby && status == INVALID_OPERATION) {
mOutput->standby();
mStandby = true;
mBytesWritten = 0;
- status = mOutput->stream->common.set_parameters(&mOutput->stream->common,
- keyValuePair.string());
+ status = mOutput->stream->setParameters(keyValuePair);
}
if (status == NO_ERROR && reconfig) {
readOutputParameters_l();
@@ -4807,7 +4646,7 @@
} else {
float typeVolume = mStreamTypes[track->streamType()].volume;
float v = mMasterVolume * typeVolume;
- AudioTrackServerProxy *proxy = track->mAudioTrackServerProxy;
+ sp<AudioTrackServerProxy> proxy = track->mAudioTrackServerProxy;
gain_minifloat_packed_t vlr = proxy->getVolumeLR();
left = float_from_gain(gain_minifloat_unpack_left(vlr));
if (left > GAIN_FLOAT_UNITY) {
@@ -4838,9 +4677,8 @@
left = (float)vl / (1 << 24);
right = (float)vr / (1 << 24);
}
- if (mOutput->stream->set_volume) {
- mOutput->stream->set_volume(mOutput->stream, left, right);
- }
+ status_t result = mOutput->stream->setVolume(left, right);
+ ALOGE_IF(result != OK, "Error when setting output stream volume: %d", result);
}
}
}
@@ -5050,13 +4888,15 @@
// if resume is received before pause is executed.
if (mHwSupportsPause && !mStandby &&
(doHwPause || (mFlushPending && !mHwPaused && (count != 0)))) {
- mOutput->stream->pause(mOutput->stream);
+ status_t result = mOutput->stream->pause();
+ ALOGE_IF(result != OK, "Error when pausing output stream: %d", result);
}
if (mFlushPending) {
flushHw_l();
}
if (mHwSupportsPause && !mStandby && doHwResume) {
- mOutput->stream->resume(mOutput->stream);
+ status_t result = mOutput->stream->resume();
+ ALOGE_IF(result != OK, "Error when resuming output stream: %d", result);
}
// remove all the tracks that need to be...
removeTracks_l(*tracksToRemove);
@@ -5197,14 +5037,12 @@
}
}
if (status == NO_ERROR) {
- status = mOutput->stream->common.set_parameters(&mOutput->stream->common,
- keyValuePair.string());
+ status = mOutput->stream->setParameters(keyValuePair);
if (!mStandby && status == INVALID_OPERATION) {
mOutput->standby();
mStandby = true;
mBytesWritten = 0;
- status = mOutput->stream->common.set_parameters(&mOutput->stream->common,
- keyValuePair.string());
+ status = mOutput->stream->setParameters(keyValuePair);
}
if (status == NO_ERROR && reconfig) {
readOutputParameters_l();
@@ -5591,8 +5429,11 @@
// Drain has completed or we are in standby, signal presentation complete
if (!(mDrainSequence & 1) || !last || mStandby) {
track->mState = TrackBase::STOPPED;
- size_t audioHALFrames =
- (mOutput->stream->get_latency(mOutput->stream)*mSampleRate) / 1000;
+ uint32_t latency = 0;
+ status_t result = mOutput->stream->getLatency(&latency);
+ ALOGE_IF(result != OK,
+ "Error when retrieving output stream latency: %d", result);
+ size_t audioHALFrames = (latency * mSampleRate) / 1000;
int64_t framesWritten =
mBytesWritten / mOutput->getFrameSize();
track->presentationComplete(framesWritten, audioHALFrames);
@@ -5604,16 +5445,15 @@
// fill a buffer, then remove it from active list.
if (--(track->mRetryCount) <= 0) {
bool running = false;
- if (mOutput->stream->get_presentation_position != nullptr) {
- uint64_t position = 0;
- struct timespec unused;
- // The running check restarts the retry counter at least once.
- int ret = mOutput->stream->get_presentation_position(
- mOutput->stream, &position, &unused);
- if (ret == NO_ERROR && position != mOffloadUnderrunPosition) {
- running = true;
- mOffloadUnderrunPosition = position;
- }
+ uint64_t position = 0;
+ struct timespec unused;
+ // The running check restarts the retry counter at least once.
+ status_t ret = mOutput->stream->getPresentationPosition(&position, &unused);
+ if (ret == NO_ERROR && position != mOffloadUnderrunPosition) {
+ running = true;
+ mOffloadUnderrunPosition = position;
+ }
+ if (ret == NO_ERROR) {
ALOGVV("underrun counter, running(%d): %lld vs %lld", running,
(long long)position, (long long)mOffloadUnderrunPosition);
}
@@ -5641,13 +5481,15 @@
// before flush and then resume HW. This can happen in case of pause/flush/resume
// if resume is received before pause is executed.
if (!mStandby && (doHwPause || (mFlushPending && !mHwPaused && (count != 0)))) {
- mOutput->stream->pause(mOutput->stream);
+ status_t result = mOutput->stream->pause();
+ ALOGE_IF(result != OK, "Error when pausing output stream: %d", result);
}
if (mFlushPending) {
flushHw_l();
}
if (!mStandby && doHwResume) {
- mOutput->stream->resume(mOutput->stream);
+ status_t result = mOutput->stream->resume();
+ ALOGE_IF(result != OK, "Error when resuming output stream: %d", result);
}
// remove all the tracks that need to be...
@@ -5899,7 +5741,7 @@
) :
ThreadBase(audioFlinger, id, outDevice, inDevice, RECORD, systemReady),
mInput(input), mActiveTracksGen(0), mRsmpInBuffer(NULL),
- // mRsmpInFrames and mRsmpInFramesP2 are set by readInputParameters_l()
+ // mRsmpInFrames, mRsmpInFramesP2, and mRsmpInFramesOA are set by readInputParameters_l()
mRsmpInRear(0)
#ifdef TEE_SINK
, mTeeSink(teeSink)
@@ -5951,7 +5793,8 @@
if (initFastCapture) {
// create a Pipe for FastCapture to write to, and for us and fast tracks to read from
NBAIO_Format format = mInputSource->format();
- size_t pipeFramesP2 = roundup(mSampleRate / 25); // double-buffering of 20 ms each
+ // quadruple-buffering of 20 ms each; this ensures we can sleep for 20ms in RecordThread
+ size_t pipeFramesP2 = roundup(4 * FMS_20 * mSampleRate / 1000);
size_t pipeSize = pipeFramesP2 * Format_frameSize(format);
void *pipeBuffer;
const sp<MemoryDealer> roHeap(readOnlyHeap());
@@ -6281,20 +6124,41 @@
// If an NBAIO source is present, use it to read the normal capture's data
if (mPipeSource != 0) {
size_t framesToRead = mBufferSize / mFrameSize;
+ framesToRead = min(mRsmpInFramesOA - rear, mRsmpInFramesP2 / 2);
framesRead = mPipeSource->read((uint8_t*)mRsmpInBuffer + rear * mFrameSize,
framesToRead);
- if (framesRead == 0) {
- // since pipe is non-blocking, simulate blocking input
- sleepUs = (framesToRead * 1000000LL) / mSampleRate;
+ // since pipe is non-blocking, simulate blocking input by waiting for 1/2 of
+ // buffer size or at least for 20ms.
+ size_t sleepFrames = max(
+ min(mPipeFramesP2, mRsmpInFramesP2) / 2, FMS_20 * mSampleRate / 1000);
+ if (framesRead <= (ssize_t) sleepFrames) {
+ sleepUs = (sleepFrames * 1000000LL) / mSampleRate;
+ }
+ if (framesRead < 0) {
+ status_t status = (status_t) framesRead;
+ switch (status) {
+ case OVERRUN:
+ ALOGW("overrun on read from pipe");
+ framesRead = 0;
+ break;
+ case NEGOTIATE:
+ ALOGE("re-negotiation is needed");
+ framesRead = -1; // Will cause an attempt to recover.
+ break;
+ default:
+ ALOGE("unknown error %d on read from pipe", status);
+ break;
+ }
}
// otherwise use the HAL / AudioStreamIn directly
} else {
ATRACE_BEGIN("read");
- ssize_t bytesRead = mInput->stream->read(mInput->stream,
- (uint8_t*)mRsmpInBuffer + rear * mFrameSize, mBufferSize);
+ size_t bytesRead;
+ status_t result = mInput->stream->read(
+ (uint8_t*)mRsmpInBuffer + rear * mFrameSize, mBufferSize, &bytesRead);
ATRACE_END();
- if (bytesRead < 0) {
- framesRead = bytesRead;
+ if (result < 0) {
+ framesRead = result;
} else {
framesRead = bytesRead / mFrameSize;
}
@@ -6306,10 +6170,9 @@
mTimestamp.mTimeNs[ExtendedTimestamp::LOCATION_SERVER] = systemTime();
// Update server timestamp with kernel stats
- if (mInput->stream->get_capture_position != nullptr
- && mPipeSource.get() == nullptr /* don't obtain for FastCapture, could block */) {
+ if (mPipeSource.get() == nullptr /* don't obtain for FastCapture, could block */) {
int64_t position, time;
- int ret = mInput->stream->get_capture_position(mInput->stream, &position, &time);
+ int ret = mInput->stream->getCapturePosition(&position, &time);
if (ret == NO_ERROR) {
mTimestamp.mPosition[ExtendedTimestamp::LOCATION_KERNEL] = position;
mTimestamp.mTimeNs[ExtendedTimestamp::LOCATION_KERNEL] = time;
@@ -6518,7 +6381,18 @@
sq->end(false /*didModify*/);
}
}
- mInput->stream->common.standby(&mInput->stream->common);
+ status_t result = mInput->stream->standby();
+ ALOGE_IF(result != OK, "Error when putting input stream into standby: %d", result);
+
+ // If going into standby, flush the pipe source.
+ if (mPipeSource.get() != nullptr) {
+ const ssize_t flushed = mPipeSource->flush();
+ if (flushed > 0) {
+ ALOGV("Input standby flushed PipeSource %zd frames", flushed);
+ mTimestamp.mPosition[ExtendedTimestamp::LOCATION_SERVER] += flushed;
+ mTimestamp.mTimeNs[ExtendedTimestamp::LOCATION_SERVER] = systemTime();
+ }
+ }
}
// RecordThread::createRecordTrack_l() must be called with AudioFlinger::mLock held
@@ -6530,7 +6404,7 @@
size_t *pFrameCount,
audio_session_t sessionId,
size_t *notificationFrames,
- int uid,
+ uid_t uid,
audio_input_flags_t *flags,
pid_t tid,
status_t *status)
@@ -6861,6 +6735,10 @@
dumpBase(fd, args);
+ AudioStreamIn *input = mInput;
+ audio_input_flags_t flags = input != NULL ? input->flags : AUDIO_INPUT_FLAG_NONE;
+ dprintf(fd, " AudioStreamIn: %p flags %#x (%s)\n",
+ input, flags, inputFlagsToString(flags).c_str());
if (mActiveTracks.size() == 0) {
dprintf(fd, " No active record clients\n");
}
@@ -7361,22 +7239,22 @@
}
if (status == NO_ERROR) {
- status = mInput->stream->common.set_parameters(&mInput->stream->common,
- keyValuePair.string());
+ status = mInput->stream->setParameters(keyValuePair);
if (status == INVALID_OPERATION) {
inputStandBy();
- status = mInput->stream->common.set_parameters(&mInput->stream->common,
- keyValuePair.string());
+ status = mInput->stream->setParameters(keyValuePair);
}
if (reconfig) {
- if (status == BAD_VALUE &&
- audio_is_linear_pcm(mInput->stream->common.get_format(&mInput->stream->common)) &&
- audio_is_linear_pcm(reqFormat) &&
- (mInput->stream->common.get_sample_rate(&mInput->stream->common)
- <= (AUDIO_RESAMPLER_DOWN_RATIO_MAX * samplingRate)) &&
- audio_channel_count_from_in_mask(
- mInput->stream->common.get_channels(&mInput->stream->common)) <= FCC_8) {
- status = NO_ERROR;
+ if (status == BAD_VALUE) {
+ uint32_t sRate;
+ audio_channel_mask_t channelMask;
+ audio_format_t format;
+ if (mInput->stream->getAudioProperties(&sRate, &channelMask, &format) == OK &&
+ audio_is_linear_pcm(format) && audio_is_linear_pcm(reqFormat) &&
+ sRate <= (AUDIO_RESAMPLER_DOWN_RATIO_MAX * samplingRate) &&
+ audio_channel_count_from_in_mask(channelMask) <= FCC_8) {
+ status = NO_ERROR;
+ }
}
if (status == NO_ERROR) {
readInputParameters_l();
@@ -7391,14 +7269,13 @@
String8 AudioFlinger::RecordThread::getParameters(const String8& keys)
{
Mutex::Autolock _l(mLock);
- if (initCheck() != NO_ERROR) {
- return String8();
+ if (initCheck() == NO_ERROR) {
+ String8 out_s8;
+ if (mInput->stream->getParameters(keys, &out_s8) == OK) {
+ return out_s8;
+ }
}
-
- char *s = mInput->stream->common.get_parameters(&mInput->stream->common, keys.string());
- const String8 out_s8(s);
- free(s);
- return out_s8;
+ return String8();
}
void AudioFlinger::RecordThread::ioConfigChanged(audio_io_config_event event, pid_t pid) {
@@ -7427,19 +7304,16 @@
void AudioFlinger::RecordThread::readInputParameters_l()
{
- mSampleRate = mInput->stream->common.get_sample_rate(&mInput->stream->common);
- mChannelMask = mInput->stream->common.get_channels(&mInput->stream->common);
+ status_t result = mInput->stream->getAudioProperties(&mSampleRate, &mChannelMask, &mHALFormat);
+ LOG_ALWAYS_FATAL_IF(result != OK, "Error retrieving audio properties from HAL: %d", result);
mChannelCount = audio_channel_count_from_in_mask(mChannelMask);
- if (mChannelCount > FCC_8) {
- ALOGE("HAL channel count %d > %d", mChannelCount, FCC_8);
- }
- mHALFormat = mInput->stream->common.get_format(&mInput->stream->common);
+ LOG_ALWAYS_FATAL_IF(mChannelCount > FCC_8, "HAL channel count %d > %d", mChannelCount, FCC_8);
mFormat = mHALFormat;
- if (!audio_is_linear_pcm(mFormat)) {
- ALOGE("HAL format %#x is not linear pcm", mFormat);
- }
- mFrameSize = audio_stream_in_frame_size(mInput->stream);
- mBufferSize = mInput->stream->common.get_buffer_size(&mInput->stream->common);
+ LOG_ALWAYS_FATAL_IF(!audio_is_linear_pcm(mFormat), "HAL format %#x is not linear pcm", mFormat);
+ result = mInput->stream->getFrameSize(&mFrameSize);
+ LOG_ALWAYS_FATAL_IF(result != OK, "Error retrieving frame size from HAL: %d", result);
+ result = mInput->stream->getBufferSize(&mBufferSize);
+ LOG_ALWAYS_FATAL_IF(result != OK, "Error retrieving buffer size from HAL: %d", result);
mFrameCount = mBufferSize / mFrameSize;
// This is the formula for calculating the temporary buffer size.
// With 7 HAL buffers, we can guarantee ability to down-sample the input by ratio of 6:1 to
@@ -7462,9 +7336,9 @@
// The current value is higher than necessary. However it should not add to latency.
// Over-allocate beyond mRsmpInFramesP2 to permit a HAL read past end of buffer
- size_t bufferSize = (mRsmpInFramesP2 + mFrameCount - 1) * mFrameSize;
- (void)posix_memalign(&mRsmpInBuffer, 32, bufferSize);
- memset(mRsmpInBuffer, 0, bufferSize); // if posix_memalign fails, will segv here.
+ mRsmpInFramesOA = mRsmpInFramesP2 + mFrameCount - 1;
+ (void)posix_memalign(&mRsmpInBuffer, 32, mRsmpInFramesOA * mFrameSize);
+ memset(mRsmpInBuffer, 0, mRsmpInFramesOA * mFrameSize); // if posix_memalign fails, will segv here.
// AudioRecord mSampleRate and mChannelCount are constant due to AudioRecord API constraints.
// But if thread's mSampleRate or mChannelCount changes, how will that affect active tracks?
@@ -7473,11 +7347,11 @@
uint32_t AudioFlinger::RecordThread::getInputFramesLost()
{
Mutex::Autolock _l(mLock);
- if (initCheck() != NO_ERROR) {
- return 0;
+ uint32_t result;
+ if (initCheck() == NO_ERROR && mInput->stream->getInputFramesLost(&result) == OK) {
+ return result;
}
-
- return mInput->stream->get_input_frames_lost(mInput->stream);
+ return 0;
}
// hasAudioSession_l() must be called with ThreadBase::mLock held
@@ -7524,12 +7398,12 @@
}
// this method must always be called either with ThreadBase mLock held or inside the thread loop
-audio_stream_t* AudioFlinger::RecordThread::stream() const
+sp<StreamHalInterface> AudioFlinger::RecordThread::stream() const
{
if (mInput == NULL) {
return NULL;
}
- return &mInput->stream->common;
+ return mInput->stream;
}
status_t AudioFlinger::RecordThread::addEffectChain_l(const sp<EffectChain>& chain)
@@ -7599,14 +7473,13 @@
}
}
- if (mInput->audioHwDev->version() >= AUDIO_DEVICE_API_VERSION_3_0) {
- audio_hw_device_t *hwDevice = mInput->audioHwDev->hwDevice();
- status = hwDevice->create_audio_patch(hwDevice,
- patch->num_sources,
- patch->sources,
- patch->num_sinks,
- patch->sinks,
- handle);
+ if (mInput->audioHwDev->supportsAudioPatches()) {
+ sp<DeviceHalInterface> hwDevice = mInput->audioHwDev->hwDevice();
+ status = hwDevice->createAudioPatch(patch->num_sources,
+ patch->sources,
+ patch->num_sinks,
+ patch->sinks,
+ handle);
} else {
char *address;
if (strcmp(patch->sources[0].ext.device.address, "") != 0) {
@@ -7618,12 +7491,11 @@
}
AudioParameter param = AudioParameter(String8(address));
free(address);
- param.addInt(String8(AUDIO_PARAMETER_STREAM_ROUTING),
+ param.addInt(String8(AudioParameter::keyRouting),
(int)patch->sources[0].ext.device.type);
- param.addInt(String8(AUDIO_PARAMETER_STREAM_INPUT_SOURCE),
+ param.addInt(String8(AudioParameter::keyInputSource),
(int)patch->sinks[0].ext.mix.usecase.source);
- status = mInput->stream->common.set_parameters(&mInput->stream->common,
- param.toString().string());
+ status = mInput->stream->setParameters(param.toString());
*handle = AUDIO_PATCH_HANDLE_NONE;
}
@@ -7641,14 +7513,13 @@
mInDevice = AUDIO_DEVICE_NONE;
- if (mInput->audioHwDev->version() >= AUDIO_DEVICE_API_VERSION_3_0) {
- audio_hw_device_t *hwDevice = mInput->audioHwDev->hwDevice();
- status = hwDevice->release_audio_patch(hwDevice, handle);
+ if (mInput->audioHwDev->supportsAudioPatches()) {
+ sp<DeviceHalInterface> hwDevice = mInput->audioHwDev->hwDevice();
+ status = hwDevice->releaseAudioPatch(handle);
} else {
AudioParameter param;
- param.addInt(String8(AUDIO_PARAMETER_STREAM_ROUTING), 0);
- status = mInput->stream->common.set_parameters(&mInput->stream->common,
- param.toString().string());
+ param.addInt(String8(AudioParameter::keyRouting), 0);
+ status = mInput->stream->setParameters(param.toString());
}
return status;
}
diff --git a/services/audioflinger/Threads.h b/services/audioflinger/Threads.h
index a55655f..5235cde 100644
--- a/services/audioflinger/Threads.h
+++ b/services/audioflinger/Threads.h
@@ -286,7 +286,7 @@
audio_devices_t outDevice() const { return mOutDevice; }
audio_devices_t inDevice() const { return mInDevice; }
- virtual audio_stream_t* stream() const = 0;
+ virtual sp<StreamHalInterface> stream() const = 0;
sp<EffectHandle> createEffect_l(
const sp<AudioFlinger::Client>& client,
@@ -404,7 +404,6 @@
virtual void acquireWakeLock_l(int uid = -1);
void releaseWakeLock();
void releaseWakeLock_l();
- void updateWakeLockUids(const SortedVector<int> &uids);
void updateWakeLockUids_l(const SortedVector<int> &uids);
void getPowerManager_l();
void setEffectSuspended_l(const effect_uuid_t *type,
@@ -484,7 +483,7 @@
};
// --- PlaybackThread ---
-class PlaybackThread : public ThreadBase {
+class PlaybackThread : public ThreadBase, public StreamOutHalInterfaceCallback {
public:
#include "PlaybackTracks.h"
@@ -541,13 +540,13 @@
virtual mixer_state prepareTracks_l(Vector< sp<Track> > *tracksToRemove) = 0;
void removeTracks_l(const Vector< sp<Track> >& tracksToRemove);
- void writeCallback();
- void resetWriteBlocked(uint32_t sequence);
- void drainCallback();
- void resetDraining(uint32_t sequence);
- void errorCallback();
+ // StreamOutHalInterfaceCallback implementation
+ virtual void onWriteReady();
+ virtual void onDrainReady();
+ virtual void onError();
- static int asyncCallback(stream_callback_event_t event, void *param, void *cookie);
+ void resetWriteBlocked(uint32_t sequence);
+ void resetDraining(uint32_t sequence);
virtual bool waitingAsyncCallback();
virtual bool waitingAsyncCallback_l();
@@ -588,12 +587,12 @@
audio_session_t sessionId,
audio_output_flags_t *flags,
pid_t tid,
- int uid,
+ uid_t uid,
status_t *status /*non-NULL*/);
AudioStreamOut* getOutput() const;
AudioStreamOut* clearOutput();
- virtual audio_stream_t* stream() const;
+ virtual sp<StreamHalInterface> stream() const;
// a very large number of suspend() will eventually wraparound, but unlikely
void suspend() { (void) android_atomic_inc(&mSuspended); }
@@ -1286,7 +1285,7 @@
size_t *pFrameCount,
audio_session_t sessionId,
size_t *notificationFrames,
- int uid,
+ uid_t uid,
audio_input_flags_t *flags,
pid_t tid,
status_t *status /*non-NULL*/);
@@ -1301,7 +1300,7 @@
void dump(int fd, const Vector<String16>& args);
AudioStreamIn* clearInput();
- virtual audio_stream_t* stream() const;
+ virtual sp<StreamHalInterface> stream() const;
virtual bool checkForNewParameter_l(const String8& keyValuePair,
@@ -1357,9 +1356,10 @@
Condition mStartStopCond;
// resampler converts input at HAL Hz to output at AudioRecord client Hz
- void *mRsmpInBuffer; //
+ void *mRsmpInBuffer; // size = mRsmpInFramesOA
size_t mRsmpInFrames; // size of resampler input in frames
size_t mRsmpInFramesP2;// size rounded up to a power-of-2
+ size_t mRsmpInFramesOA;// mRsmpInFramesP2 + over-allocation
// rolling index that is never cleared
int32_t mRsmpInRear; // last filled frame + 1
diff --git a/services/audioflinger/TrackBase.h b/services/audioflinger/TrackBase.h
index 6b97246..4fcb596 100644
--- a/services/audioflinger/TrackBase.h
+++ b/services/audioflinger/TrackBase.h
@@ -62,7 +62,7 @@
size_t frameCount,
void *buffer,
audio_session_t sessionId,
- int uid,
+ uid_t uid,
bool isOut,
alloc_type alloc = ALLOC_CBLK,
track_type type = TYPE_DEFAULT);
@@ -75,7 +75,7 @@
sp<IMemory> getCblk() const { return mCblkMemory; }
audio_track_cblk_t* cblk() const { return mCblk; }
audio_session_t sessionId() const { return mSessionId; }
- int uid() const { return mUid; }
+ uid_t uid() const { return mUid; }
virtual status_t setSyncEvent(const sp<SyncEvent>& event);
sp<IMemory> getBuffers() const { return mBufferMemory; }
@@ -153,10 +153,10 @@
// openRecord(), and then adjusted as needed
const audio_session_t mSessionId;
- int mUid;
+ uid_t mUid;
Vector < sp<SyncEvent> >mSyncEvents;
const bool mIsOut;
- ServerProxy* mServerProxy;
+ sp<ServerProxy> mServerProxy;
const int mId;
sp<NBAIO_Sink> mTeeSink;
sp<NBAIO_Source> mTeeSource;
diff --git a/services/audioflinger/Tracks.cpp b/services/audioflinger/Tracks.cpp
index ba6e6e5..8f134c1 100644
--- a/services/audioflinger/Tracks.cpp
+++ b/services/audioflinger/Tracks.cpp
@@ -52,7 +52,7 @@
// TODO move to a common header (Also shared with AudioTrack.cpp)
#define NANOS_PER_SECOND 1000000000
-#define TIME_TO_NANOS(time) ((uint64_t)time.tv_sec * NANOS_PER_SECOND + time.tv_nsec)
+#define TIME_TO_NANOS(time) ((uint64_t)(time).tv_sec * NANOS_PER_SECOND + (time).tv_nsec)
namespace android {
@@ -72,7 +72,7 @@
size_t frameCount,
void *buffer,
audio_session_t sessionId,
- int clientUid,
+ uid_t clientUid,
bool isOut,
alloc_type alloc,
track_type type)
@@ -93,17 +93,16 @@
mFrameCount(frameCount),
mSessionId(sessionId),
mIsOut(isOut),
- mServerProxy(NULL),
mId(android_atomic_inc(&nextTrackId)),
mTerminated(false),
mType(type),
mThreadIoHandle(thread->id())
{
const uid_t callingUid = IPCThreadState::self()->getCallingUid();
- if (!isTrustedCallingUid(callingUid) || clientUid == -1) {
- ALOGW_IF(clientUid != -1 && clientUid != (int)callingUid,
+ if (!isTrustedCallingUid(callingUid) || clientUid == AUDIO_UID_INVALID) {
+ ALOGW_IF(clientUid != AUDIO_UID_INVALID && clientUid != callingUid,
"%s uid %d tried to pass itself off as %d", __FUNCTION__, callingUid, clientUid);
- clientUid = (int)callingUid;
+ clientUid = callingUid;
}
// clientUid contains the uid of the app that is responsible for this track, so we can blame
// battery usage on it.
@@ -218,7 +217,7 @@
dumpTee(-1, mTeeSource, mId);
#endif
// delete the proxy before deleting the shared memory it refers to, to avoid dangling reference
- delete mServerProxy;
+ mServerProxy.clear();
if (mCblk != NULL) {
if (mClient == 0) {
delete mCblk;
@@ -342,7 +341,7 @@
void *buffer,
const sp<IMemory>& sharedBuffer,
audio_session_t sessionId,
- int uid,
+ uid_t uid,
audio_output_flags_t flags,
track_type type)
: TrackBase(thread, client, sampleRate, format, channelMask, frameCount,
@@ -364,7 +363,6 @@
mFastIndex(-1),
mCachedVolume(1.0),
mIsInvalid(false),
- mAudioTrackServerProxy(NULL),
mResumeToStopping(false),
mFlushHwPending(false),
mFlags(flags)
@@ -1140,12 +1138,12 @@
audio_format_t format,
audio_channel_mask_t channelMask,
size_t frameCount,
- int uid)
+ uid_t uid)
: Track(playbackThread, NULL, AUDIO_STREAM_PATCH,
sampleRate, format, channelMask, frameCount,
NULL, 0, AUDIO_SESSION_NONE, uid, AUDIO_OUTPUT_FLAG_NONE,
TYPE_OUTPUT),
- mActive(false), mSourceThread(sourceThread), mClientProxy(NULL)
+ mActive(false), mSourceThread(sourceThread)
{
if (mCblk != NULL) {
@@ -1170,7 +1168,6 @@
AudioFlinger::PlaybackThread::OutputTrack::~OutputTrack()
{
clearBufferQueue();
- delete mClientProxy;
// superclass destructor will now delete the server proxy and shared memory both refer to
}
@@ -1477,7 +1474,7 @@
size_t frameCount,
void *buffer,
audio_session_t sessionId,
- int uid,
+ uid_t uid,
audio_input_flags_t flags,
track_type type)
: TrackBase(thread, client, sampleRate, format,
diff --git a/services/audioflinger/tests/Android.mk b/services/audioflinger/tests/Android.mk
index 3505e0f..a741079 100644
--- a/services/audioflinger/tests/Android.mk
+++ b/services/audioflinger/tests/Android.mk
@@ -34,11 +34,10 @@
LOCAL_SRC_FILES:= \
test-mixer.cpp \
- ../AudioMixer.cpp.arm \
+ ../AudioMixer.cpp.arm \
../BufferProviders.cpp
LOCAL_C_INCLUDES := \
- $(call include-path-for, audio-effects) \
$(call include-path-for, audio-utils) \
frameworks/av/services/audioflinger \
external/sonic
@@ -47,6 +46,7 @@
libsndfile
LOCAL_SHARED_LIBRARIES := \
+ libaudiohal \
libeffects \
libnbaio \
libaudioresampler \
diff --git a/services/audiopolicy/Android.mk b/services/audiopolicy/Android.mk
index 69d22c0..3fb545d 100644
--- a/services/audiopolicy/Android.mk
+++ b/services/audiopolicy/Android.mk
@@ -20,7 +20,6 @@
LOCAL_C_INCLUDES := \
$(TOPDIR)frameworks/av/services/audioflinger \
- $(call include-path-for, audio-effects) \
$(call include-path-for, audio-utils) \
$(TOPDIR)frameworks/av/services/audiopolicy/common/include \
$(TOPDIR)frameworks/av/services/audiopolicy/engine/interface \
@@ -91,9 +90,10 @@
$(TOPDIR)frameworks/av/services/audiopolicy/utilities
LOCAL_STATIC_LIBRARIES := \
- libmedia_helper \
libaudiopolicycomponents
+LOCAL_WHOLE_STATIC_LIBRARIES := libmedia_helper
+
ifeq ($(USE_XML_AUDIO_POLICY_CONF), 1)
LOCAL_STATIC_LIBRARIES += libxml2
diff --git a/services/audiopolicy/AudioPolicyInterface.h b/services/audiopolicy/AudioPolicyInterface.h
index bb8a8fa..c60b49a 100644
--- a/services/audiopolicy/AudioPolicyInterface.h
+++ b/services/audiopolicy/AudioPolicyInterface.h
@@ -21,8 +21,6 @@
#include <media/AudioPolicy.h>
#include <utils/String8.h>
-#include <hardware/audio_policy.h>
-
namespace android {
// ----------------------------------------------------------------------------
@@ -67,6 +65,16 @@
API_INPUT_TELEPHONY_RX, // used for capture from telephony RX path
} input_type_t;
+ enum {
+ API_INPUT_CONCURRENCY_NONE = 0,
+ API_INPUT_CONCURRENCY_CALL = (1 << 0), // Concurrency with a call
+ API_INPUT_CONCURRENCY_CAPTURE = (1 << 1), // Concurrency with another capture
+
+ API_INPUT_CONCURRENCY_ALL = (API_INPUT_CONCURRENCY_CALL | API_INPUT_CONCURRENCY_CAPTURE),
+ };
+
+ typedef uint32_t concurrency_type__mask_t;
+
public:
virtual ~AudioPolicyInterface() {}
//
@@ -140,7 +148,8 @@
input_type_t *inputType) = 0;
// indicates to the audio policy manager that the input starts being used.
virtual status_t startInput(audio_io_handle_t input,
- audio_session_t session) = 0;
+ audio_session_t session,
+ concurrency_type__mask_t *concurrency) = 0;
// indicates to the audio policy manager that the input stops being used.
virtual status_t stopInput(audio_io_handle_t input,
audio_session_t session) = 0;
@@ -225,9 +234,9 @@
virtual status_t startAudioSource(const struct audio_port_config *source,
const audio_attributes_t *attributes,
- audio_io_handle_t *handle,
+ audio_patch_handle_t *handle,
uid_t uid) = 0;
- virtual status_t stopAudioSource(audio_io_handle_t handle) = 0;
+ virtual status_t stopAudioSource(audio_patch_handle_t handle) = 0;
virtual status_t setMasterMono(bool mono) = 0;
virtual status_t getMasterMono(bool *mono) = 0;
diff --git a/services/audiopolicy/common/include/Volume.h b/services/audiopolicy/common/include/Volume.h
old mode 100755
new mode 100644
diff --git a/services/audiopolicy/common/include/policy.h b/services/audiopolicy/common/include/policy.h
old mode 100755
new mode 100644
index 55ee91f..31f0550
--- a/services/audiopolicy/common/include/policy.h
+++ b/services/audiopolicy/common/include/policy.h
@@ -33,9 +33,9 @@
/**
* A device mask for all audio input devices that are considered "virtual" when evaluating
- * active inputs in getActiveInput()
+ * active inputs in getActiveInputs()
*/
-#define APM_AUDIO_IN_DEVICE_VIRTUAL_ALL (AUDIO_DEVICE_IN_REMOTE_SUBMIX|AUDIO_DEVICE_IN_FM_TUNER)
+#define APM_AUDIO_IN_DEVICE_VIRTUAL_ALL (AUDIO_DEVICE_IN_REMOTE_SUBMIX)
/**
@@ -47,16 +47,6 @@
#define APM_AUDIO_DEVICE_IN_MATCH_ADDRESS_ALL (AUDIO_DEVICE_IN_REMOTE_SUBMIX|AUDIO_DEVICE_IN_BUS)
/**
- * Stub audio output device. Used in policy configuration file on platforms without audio outputs.
- * This alias value to AUDIO_DEVICE_OUT_DEFAULT is only used in the audio policy context.
- */
-#define AUDIO_DEVICE_OUT_STUB AUDIO_DEVICE_OUT_DEFAULT
-/**
- * Stub audio input device. Used in policy configuration file on platforms without audio inputs.
- * This alias value to AUDIO_DEVICE_IN_DEFAULT is only used in the audio policy context.
- */
-#define AUDIO_DEVICE_IN_STUB AUDIO_DEVICE_IN_DEFAULT
-/**
* Alias to AUDIO_DEVICE_OUT_DEFAULT defined for clarification when this value is used by volume
* control APIs (e.g setStreamVolumeIndex().
*/
@@ -109,6 +99,44 @@
((device & APM_AUDIO_DEVICE_OUT_MATCH_ADDRESS_ALL) != 0));
}
+/**
+ * Returns the priority of a given audio source for capture. The priority is used when more than one
+ * capture session is active on a given input stream to determine which session drives routing and
+ * effect configuration.
+ *
+ * @param[in] inputSource to consider. Valid sources are:
+ * - AUDIO_SOURCE_VOICE_COMMUNICATION
+ * - AUDIO_SOURCE_CAMCORDER
+ * - AUDIO_SOURCE_MIC
+ * - AUDIO_SOURCE_FM_TUNER
+ * - AUDIO_SOURCE_VOICE_RECOGNITION
+ * - AUDIO_SOURCE_HOTWORD
+ *
+ * @return the corresponding input source priority or 0 if priority is irrelevant for this source.
+ * This happens when the specified source cannot share a given input stream (e.g remote submix)
+ * The higher the value, the higher the priority.
+ */
+static inline int32_t source_priority(audio_source_t inputSource)
+{
+ switch (inputSource) {
+ case AUDIO_SOURCE_VOICE_COMMUNICATION:
+ return 6;
+ case AUDIO_SOURCE_CAMCORDER:
+ return 5;
+ case AUDIO_SOURCE_MIC:
+ return 4;
+ case AUDIO_SOURCE_FM_TUNER:
+ return 3;
+ case AUDIO_SOURCE_VOICE_RECOGNITION:
+ return 2;
+ case AUDIO_SOURCE_HOTWORD:
+ return 1;
+ default:
+ break;
+ }
+ return 0;
+}
+
/* Indicates if audio formats are equivalent when considering a match between
* audio HAL supported formats and client requested formats
*/
diff --git a/services/audiopolicy/common/managerdefinitions/include/AudioInputDescriptor.h b/services/audiopolicy/common/managerdefinitions/include/AudioInputDescriptor.h
index e689320..5445413 100644
--- a/services/audiopolicy/common/managerdefinitions/include/AudioInputDescriptor.h
+++ b/services/audiopolicy/common/managerdefinitions/include/AudioInputDescriptor.h
@@ -63,7 +63,9 @@
const sp<AudioSession>& audioSession);
status_t removeAudioSession(audio_session_t session);
sp<AudioSession> getAudioSession(audio_session_t session) const;
- AudioSessionCollection getActiveAudioSessions() const;
+ AudioSessionCollection getAudioSessions(bool activeOnly) const;
+ size_t getAudioSessionCount(bool activeOnly) const;
+ audio_source_t getHighestPrioritySource(bool activeOnly) const;
// implementation of AudioSessionInfoProvider
virtual audio_config_base_t getConfig() const;
@@ -102,7 +104,7 @@
* Only considers inputs from physical devices (e.g. main mic, headset mic) when
* ignoreVirtualInputs is true.
*/
- audio_io_handle_t getActiveInput(bool ignoreVirtualInputs = true);
+ Vector<sp <AudioInputDescriptor> > getActiveInputs(bool ignoreVirtualInputs = true);
audio_devices_t getSupportedDevices(audio_io_handle_t handle) const;
diff --git a/services/audiopolicy/common/managerdefinitions/include/AudioPolicyMix.h b/services/audiopolicy/common/managerdefinitions/include/AudioPolicyMix.h
index c9652de..97a9c94 100644
--- a/services/audiopolicy/common/managerdefinitions/include/AudioPolicyMix.h
+++ b/services/audiopolicy/common/managerdefinitions/include/AudioPolicyMix.h
@@ -19,7 +19,7 @@
#include <utils/RefBase.h>
#include <media/AudioPolicy.h>
#include <utils/KeyedVector.h>
-#include <hardware/audio.h>
+#include <system/audio.h>
#include <utils/String8.h>
namespace android {
diff --git a/services/audiopolicy/common/managerdefinitions/include/AudioPort.h b/services/audiopolicy/common/managerdefinitions/include/AudioPort.h
index d00d49f..ded2285 100644
--- a/services/audiopolicy/common/managerdefinitions/include/AudioPort.h
+++ b/services/audiopolicy/common/managerdefinitions/include/AudioPort.h
@@ -118,7 +118,7 @@
audio_format_t targetFormat);
audio_module_handle_t getModuleHandle() const;
- uint32_t getModuleVersion() const;
+ uint32_t getModuleVersionMajor() const;
const char *getModuleName() const;
bool useInputChannelMask() const
@@ -166,6 +166,10 @@
virtual void toAudioPortConfig(struct audio_port_config *dstConfig,
const struct audio_port_config *srcConfig = NULL) const = 0;
virtual sp<AudioPort> getAudioPort() const = 0;
+ virtual bool hasSameHwModuleAs(const sp<AudioPortConfig>& other) const {
+ return (other != 0) &&
+ (other->getAudioPort()->getModuleHandle() == getAudioPort()->getModuleHandle());
+ }
uint32_t mSamplingRate;
audio_format_t mFormat;
audio_channel_mask_t mChannelMask;
diff --git a/services/audiopolicy/common/managerdefinitions/include/AudioSession.h b/services/audiopolicy/common/managerdefinitions/include/AudioSession.h
index 388c25d..18fba25 100644
--- a/services/audiopolicy/common/managerdefinitions/include/AudioSession.h
+++ b/services/audiopolicy/common/managerdefinitions/include/AudioSession.h
@@ -91,8 +91,10 @@
uint32_t getOpenCount() const;
AudioSessionCollection getActiveSessions() const;
+ size_t getActiveSessionCount() const;
bool hasActiveSession() const;
bool isSourceActive(audio_source_t source) const;
+ audio_source_t getHighestPrioritySource(bool activeOnly) const;
// implementation of AudioSessionInfoUpdateListener
virtual void onSessionInfoUpdate() const;
diff --git a/services/audiopolicy/common/managerdefinitions/include/AudioSourceDescriptor.h b/services/audiopolicy/common/managerdefinitions/include/AudioSourceDescriptor.h
index 4ab7cf0..7e1e24d 100644
--- a/services/audiopolicy/common/managerdefinitions/include/AudioSourceDescriptor.h
+++ b/services/audiopolicy/common/managerdefinitions/include/AudioSourceDescriptor.h
@@ -50,7 +50,7 @@
};
class AudioSourceCollection :
- public DefaultKeyedVector< audio_io_handle_t, sp<AudioSourceDescriptor> >
+ public DefaultKeyedVector< audio_patch_handle_t, sp<AudioSourceDescriptor> >
{
public:
status_t dump(int fd) const;
diff --git a/services/audiopolicy/common/managerdefinitions/include/EffectDescriptor.h b/services/audiopolicy/common/managerdefinitions/include/EffectDescriptor.h
index ab650c0..9ea0aea 100644
--- a/services/audiopolicy/common/managerdefinitions/include/EffectDescriptor.h
+++ b/services/audiopolicy/common/managerdefinitions/include/EffectDescriptor.h
@@ -17,7 +17,7 @@
#pragma once
#include <RoutingStrategy.h>
-#include <hardware/audio_effect.h>
+#include <system/audio_effect.h>
#include <utils/KeyedVector.h>
#include <utils/RefBase.h>
#include <utils/Errors.h>
diff --git a/services/audiopolicy/common/managerdefinitions/include/HwModule.h b/services/audiopolicy/common/managerdefinitions/include/HwModule.h
index 3a31672..29b6b9c 100644
--- a/services/audiopolicy/common/managerdefinitions/include/HwModule.h
+++ b/services/audiopolicy/common/managerdefinitions/include/HwModule.h
@@ -18,7 +18,6 @@
#include "DeviceDescriptor.h"
#include "AudioRoute.h"
-#include <hardware/audio.h>
#include <utils/RefBase.h>
#include <utils/String8.h>
#include <utils/Errors.h>
@@ -40,7 +39,7 @@
class HwModule : public RefBase
{
public:
- explicit HwModule(const char *name, uint32_t halVersion = AUDIO_DEVICE_API_VERSION_MIN);
+ explicit HwModule(const char *name, uint32_t halVersionMajor = 0, uint32_t halVersionMinor = 0);
~HwModule();
const char *getName() const { return mName.string(); }
@@ -55,8 +54,11 @@
void setProfiles(const IOProfileCollection &profiles);
- void setHalVersion(uint32_t halVersion) { mHalVersion = halVersion; }
- uint32_t getHalVersion() const { return mHalVersion; }
+ void setHalVersion(uint32_t major, uint32_t minor) {
+ mHalVersion = (major << 8) | (minor & 0xff);
+ }
+ uint32_t getHalVersionMajor() const { return mHalVersion >> 8; }
+ uint32_t getHalVersionMinor() const { return mHalVersion & 0xff; }
sp<DeviceDescriptor> getRouteSinkDevice(const sp<AudioRoute> &route) const;
DeviceVector getRouteSourceDevices(const sp<AudioRoute> &route) const;
diff --git a/services/audiopolicy/common/managerdefinitions/include/StreamDescriptor.h b/services/audiopolicy/common/managerdefinitions/include/StreamDescriptor.h
index 424df84..8822927 100644
--- a/services/audiopolicy/common/managerdefinitions/include/StreamDescriptor.h
+++ b/services/audiopolicy/common/managerdefinitions/include/StreamDescriptor.h
@@ -20,7 +20,7 @@
#include <utils/KeyedVector.h>
#include <utils/StrongPointer.h>
#include <utils/SortedVector.h>
-#include <hardware/audio.h>
+#include <system/audio.h>
namespace android {
diff --git a/services/audiopolicy/common/managerdefinitions/include/TypeConverter.h b/services/audiopolicy/common/managerdefinitions/include/TypeConverter.h
index 1612714..84e3a36 100644
--- a/services/audiopolicy/common/managerdefinitions/include/TypeConverter.h
+++ b/services/audiopolicy/common/managerdefinitions/include/TypeConverter.h
@@ -16,190 +16,19 @@
#pragma once
+#include <media/TypeConverter.h>
+
#include "policy.h"
#include <Volume.h>
-#include <system/audio.h>
-#include <convert/convert.h>
-#include <utils/Log.h>
-#include <string>
-#include <utils/Vector.h>
-#include <utils/SortedVector.h>
namespace android {
-struct SampleRateTraits
-{
- typedef uint32_t Type;
- typedef SortedVector<Type> Collection;
-};
-struct DeviceTraits
-{
- typedef audio_devices_t Type;
- typedef Vector<Type> Collection;
-};
-struct OutputFlagTraits
-{
- typedef audio_output_flags_t Type;
- typedef Vector<Type> Collection;
-};
-struct InputFlagTraits
-{
- typedef audio_input_flags_t Type;
- typedef Vector<Type> Collection;
-};
-struct FormatTraits
-{
- typedef audio_format_t Type;
- typedef Vector<Type> Collection;
-};
-struct ChannelTraits
-{
- typedef audio_channel_mask_t Type;
- typedef SortedVector<Type> Collection;
-};
-struct OutputChannelTraits : public ChannelTraits {};
-struct InputChannelTraits : public ChannelTraits {};
-struct ChannelIndexTraits : public ChannelTraits {};
-struct GainModeTraits
-{
- typedef audio_gain_mode_t Type;
- typedef Vector<Type> Collection;
-};
-struct StreamTraits
-{
- typedef audio_stream_type_t Type;
- typedef Vector<Type> Collection;
-};
struct DeviceCategoryTraits
{
- typedef device_category Type;
- typedef Vector<Type> Collection;
-};
-template <typename T>
-struct DefaultTraits
-{
- typedef T Type;
- typedef Vector<Type> Collection;
+ typedef device_category Type;
+ typedef Vector<Type> Collection;
};
-template <class Traits>
-static void collectionFromString(const std::string &str, typename Traits::Collection &collection,
- const char *del = "|")
-{
- char *literal = strdup(str.c_str());
- for (const char *cstr = strtok(literal, del); cstr != NULL; cstr = strtok(NULL, del)) {
- typename Traits::Type value;
- if (utilities::convertTo<std::string, typename Traits::Type >(cstr, value)) {
- collection.add(value);
- }
- }
- free(literal);
-}
-
-template <class Traits>
-class TypeConverter
-{
-public:
- static bool toString(const typename Traits::Type &value, std::string &str);
-
- static bool fromString(const std::string &str, typename Traits::Type &result);
-
- static void collectionFromString(const std::string &str,
- typename Traits::Collection &collection,
- const char *del = "|");
-
- static uint32_t maskFromString(const std::string &str, const char *del = "|");
-
-protected:
- struct Table {
- const char *literal;
- typename Traits::Type value;
- };
-
- static const Table mTable[];
- static const size_t mSize;
-};
-
-typedef TypeConverter<DeviceTraits> DeviceConverter;
-typedef TypeConverter<OutputFlagTraits> OutputFlagConverter;
-typedef TypeConverter<InputFlagTraits> InputFlagConverter;
-typedef TypeConverter<FormatTraits> FormatConverter;
-typedef TypeConverter<OutputChannelTraits> OutputChannelConverter;
-typedef TypeConverter<InputChannelTraits> InputChannelConverter;
-typedef TypeConverter<ChannelIndexTraits> ChannelIndexConverter;
-typedef TypeConverter<GainModeTraits> GainModeConverter;
-typedef TypeConverter<StreamTraits> StreamTypeConverter;
typedef TypeConverter<DeviceCategoryTraits> DeviceCategoryConverter;
-inline
-static SampleRateTraits::Collection samplingRatesFromString(const std::string &samplingRates,
- const char *del = "|")
-{
- SampleRateTraits::Collection samplingRateCollection;
- collectionFromString<SampleRateTraits>(samplingRates, samplingRateCollection, del);
- return samplingRateCollection;
-}
-
-inline
-static FormatTraits::Collection formatsFromString(const std::string &formats, const char *del = "|")
-{
- FormatTraits::Collection formatCollection;
- FormatConverter::collectionFromString(formats, formatCollection, del);
- return formatCollection;
-}
-
-inline
-static audio_format_t formatFromString(const std::string &literalFormat)
-{
- audio_format_t format;
- if (literalFormat.empty()) {
- return gDynamicFormat;
- }
- FormatConverter::fromString(literalFormat, format);
- return format;
-}
-
-inline
-static audio_channel_mask_t channelMaskFromString(const std::string &literalChannels)
-{
- audio_channel_mask_t channels;
- if (!OutputChannelConverter::fromString(literalChannels, channels) ||
- !InputChannelConverter::fromString(literalChannels, channels)) {
- return AUDIO_CHANNEL_INVALID;
- }
- return channels;
-}
-
-inline
-static ChannelTraits::Collection channelMasksFromString(const std::string &channels,
- const char *del = "|")
-{
- ChannelTraits::Collection channelMaskCollection;
- OutputChannelConverter::collectionFromString(channels, channelMaskCollection, del);
- InputChannelConverter::collectionFromString(channels, channelMaskCollection, del);
- ChannelIndexConverter::collectionFromString(channels, channelMaskCollection, del);
- return channelMaskCollection;
-}
-
-inline
-static InputChannelTraits::Collection inputChannelMasksFromString(const std::string &inChannels,
- const char *del = "|")
-{
- InputChannelTraits::Collection inputChannelMaskCollection;
- InputChannelConverter::collectionFromString(inChannels, inputChannelMaskCollection, del);
- ChannelIndexConverter::collectionFromString(inChannels, inputChannelMaskCollection, del);
- return inputChannelMaskCollection;
-}
-
-inline
-static OutputChannelTraits::Collection outputChannelMasksFromString(const std::string &outChannels,
- const char *del = "|")
-{
- OutputChannelTraits::Collection outputChannelMaskCollection;
- OutputChannelConverter::collectionFromString(outChannels, outputChannelMaskCollection, del);
- ChannelIndexConverter::collectionFromString(outChannels, outputChannelMaskCollection, del);
- return outputChannelMaskCollection;
-}
-
}; // namespace android
-
diff --git a/services/audiopolicy/common/managerdefinitions/include/VolumeCurve.h b/services/audiopolicy/common/managerdefinitions/include/VolumeCurve.h
index 10f0766..e7fcefc 100644
--- a/services/audiopolicy/common/managerdefinitions/include/VolumeCurve.h
+++ b/services/audiopolicy/common/managerdefinitions/include/VolumeCurve.h
@@ -18,7 +18,6 @@
#include "IVolumeCurvesCollection.h"
#include <policy.h>
-#include <hardware/audio.h>
#include <utils/RefBase.h>
#include <utils/String8.h>
#include <utils/SortedVector.h>
diff --git a/services/audiopolicy/common/managerdefinitions/src/AudioInputDescriptor.cpp b/services/audiopolicy/common/managerdefinitions/src/AudioInputDescriptor.cpp
index c7d2ee4..44f9637 100644
--- a/services/audiopolicy/common/managerdefinitions/src/AudioInputDescriptor.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/AudioInputDescriptor.cpp
@@ -132,6 +132,12 @@
return mSessions.isSourceActive(source);
}
+audio_source_t AudioInputDescriptor::getHighestPrioritySource(bool activeOnly) const
+{
+
+ return mSessions.getHighestPrioritySource(activeOnly);
+}
+
bool AudioInputDescriptor::isSoundTrigger() const {
// sound trigger and non sound trigger sessions are not mixed
// on a given input
@@ -143,9 +149,22 @@
return mSessions.valueFor(session);
}
-AudioSessionCollection AudioInputDescriptor::getActiveAudioSessions() const
+AudioSessionCollection AudioInputDescriptor::getAudioSessions(bool activeOnly) const
{
- return mSessions.getActiveSessions();
+ if (activeOnly) {
+ return mSessions.getActiveSessions();
+ } else {
+ return mSessions;
+ }
+}
+
+size_t AudioInputDescriptor::getAudioSessionCount(bool activeOnly) const
+{
+ if (activeOnly) {
+ return mSessions.getActiveSessionCount();
+ } else {
+ return mSessions.size();
+ }
}
status_t AudioInputDescriptor::addAudioSession(audio_session_t session,
@@ -236,17 +255,19 @@
return count;
}
-audio_io_handle_t AudioInputCollection::getActiveInput(bool ignoreVirtualInputs)
+Vector<sp <AudioInputDescriptor> > AudioInputCollection::getActiveInputs(bool ignoreVirtualInputs)
{
+ Vector<sp <AudioInputDescriptor> > activeInputs;
+
for (size_t i = 0; i < size(); i++) {
const sp<AudioInputDescriptor> inputDescriptor = valueAt(i);
if ((inputDescriptor->isActive())
&& (!ignoreVirtualInputs ||
!is_virtual_input_device(inputDescriptor->mDevice))) {
- return keyAt(i);
+ activeInputs.add(inputDescriptor);
}
}
- return 0;
+ return activeInputs;
}
audio_devices_t AudioInputCollection::getSupportedDevices(audio_io_handle_t handle) const
diff --git a/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp b/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp
index 1dbc3d0..93b7f47 100644
--- a/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp
@@ -81,7 +81,7 @@
return sharesHwModuleWith(outputDesc->subOutput1()) ||
sharesHwModuleWith(outputDesc->subOutput2());
} else {
- return (getModuleHandle() == outputDesc->getModuleHandle());
+ return hasSameHwModuleAs(outputDesc);
}
}
diff --git a/services/audiopolicy/common/managerdefinitions/src/AudioPatch.cpp b/services/audiopolicy/common/managerdefinitions/src/AudioPatch.cpp
index f382dec..0daae6c 100644
--- a/services/audiopolicy/common/managerdefinitions/src/AudioPatch.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/AudioPatch.cpp
@@ -54,7 +54,7 @@
for (size_t i = 0; i < mPatch.num_sources; i++) {
if (mPatch.sources[i].type == AUDIO_PORT_TYPE_DEVICE) {
std::string device;
- DeviceConverter::toString(mPatch.sources[i].ext.device.type, device);
+ deviceToString(mPatch.sources[i].ext.device.type, device);
snprintf(buffer, SIZE, "%*s- Device ID %d %s\n", spaces + 2, "",
mPatch.sources[i].id,
device.c_str());
@@ -69,7 +69,7 @@
for (size_t i = 0; i < mPatch.num_sinks; i++) {
if (mPatch.sinks[i].type == AUDIO_PORT_TYPE_DEVICE) {
std::string device;
- DeviceConverter::toString(mPatch.sinks[i].ext.device.type, device);
+ deviceToString(mPatch.sinks[i].ext.device.type, device);
snprintf(buffer, SIZE, "%*s- Device ID %d %s\n", spaces + 2, "",
mPatch.sinks[i].id,
device.c_str());
diff --git a/services/audiopolicy/common/managerdefinitions/src/AudioPort.cpp b/services/audiopolicy/common/managerdefinitions/src/AudioPort.cpp
index 31bf95c..aac23b4 100644
--- a/services/audiopolicy/common/managerdefinitions/src/AudioPort.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/AudioPort.cpp
@@ -50,12 +50,12 @@
return mModule->mHandle;
}
-uint32_t AudioPort::getModuleVersion() const
+uint32_t AudioPort::getModuleVersionMajor() const
{
if (mModule == 0) {
return 0;
}
- return mModule->getHalVersion();
+ return mModule->getHalVersionMajor();
}
const char *AudioPort::getModuleName() const
diff --git a/services/audiopolicy/common/managerdefinitions/src/AudioSession.cpp b/services/audiopolicy/common/managerdefinitions/src/AudioSession.cpp
index da983c5..3b63239 100644
--- a/services/audiopolicy/common/managerdefinitions/src/AudioSession.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/AudioSession.cpp
@@ -18,6 +18,7 @@
//#define LOG_NDEBUG 0
#include <AudioPolicyInterface.h>
+#include "policy.h"
#include "AudioSession.h"
#include "AudioGain.h"
#include "TypeConverter.h"
@@ -214,9 +215,20 @@
return activeSessions;
}
+size_t AudioSessionCollection::getActiveSessionCount() const
+{
+ size_t activeCount = 0;
+ for (size_t i = 0; i < size(); i++) {
+ if (valueAt(i)->activeCount() != 0) {
+ activeCount++;
+ }
+ }
+ return activeCount;
+}
+
bool AudioSessionCollection::hasActiveSession() const
{
- return getActiveSessions().size() != 0;
+ return getActiveSessionCount() != 0;
}
bool AudioSessionCollection::isSourceActive(audio_source_t source) const
@@ -236,6 +248,25 @@
return false;
}
+audio_source_t AudioSessionCollection::getHighestPrioritySource(bool activeOnly) const
+{
+ audio_source_t source = AUDIO_SOURCE_DEFAULT;
+ int32_t priority = -1;
+
+ for (size_t i = 0; i < size(); i++) {
+ const sp<AudioSession> audioSession = valueAt(i);
+ if (activeOnly && audioSession->activeCount() == 0) {
+ continue;
+ }
+ int32_t curPriority = source_priority(audioSession->inputSource());
+ if (curPriority > priority) {
+ priority = curPriority;
+ source = audioSession->inputSource();
+ }
+ }
+ return source;
+}
+
void AudioSessionCollection::onSessionInfoUpdate() const
{
for (size_t i = 0; i < size(); i++) {
@@ -243,7 +274,6 @@
}
}
-
status_t AudioSessionCollection::dump(int fd, int spaces) const
{
const size_t SIZE = 256;
diff --git a/services/audiopolicy/common/managerdefinitions/src/ConfigParsingUtils.cpp b/services/audiopolicy/common/managerdefinitions/src/ConfigParsingUtils.cpp
index a3536e5..e5888e2 100644
--- a/services/audiopolicy/common/managerdefinitions/src/ConfigParsingUtils.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/ConfigParsingUtils.cpp
@@ -18,11 +18,11 @@
//#define LOG_NDEBUG 0
#include "ConfigParsingUtils.h"
-#include <convert/convert.h>
#include "AudioGain.h"
#include "IOProfile.h"
-#include "TypeConverter.h"
-#include <hardware/audio.h>
+#include <system/audio.h>
+#include <media/AudioParameter.h>
+#include <media/TypeConverter.h>
#include <utils/Log.h>
#include <cutils/misc.h>
@@ -105,7 +105,7 @@
audio_devices_t type = AUDIO_DEVICE_NONE;
while (node) {
if (strcmp(node->name, APM_DEVICE_TYPE) == 0) {
- DeviceConverter::fromString(node->value, type);
+ deviceFromString(node->value, type);
break;
}
node = node->next;
@@ -289,11 +289,11 @@
const DeviceVector &declaredDevices)
{
char *tagLiteral = strndup(tag, strlen(tag));
- char *devTag = strtok(tagLiteral, "|");
+ char *devTag = strtok(tagLiteral, AudioParameter::valueListSeparator);
while (devTag != NULL) {
if (strlen(devTag) != 0) {
audio_devices_t type;
- if (DeviceConverter::fromString(devTag, type)) {
+ if (deviceFromString(devTag, type)) {
uint32_t inBit = type & AUDIO_DEVICE_BIT_IN;
type &= ~AUDIO_DEVICE_BIT_IN;
while (type) {
@@ -311,7 +311,7 @@
}
}
}
- devTag = strtok(NULL, "|");
+ devTag = strtok(NULL, AudioParameter::valueListSeparator);
}
free(tagLiteral);
}
@@ -340,7 +340,7 @@
config.addAvailableOutputDevices(availableOutputDevices);
} else if (strcmp(DEFAULT_OUTPUT_DEVICE_TAG, node->name) == 0) {
audio_devices_t device = AUDIO_DEVICE_NONE;
- DeviceConverter::fromString(node->value, device);
+ deviceFromString(node->value, device);
if (device != AUDIO_DEVICE_NONE) {
sp<DeviceDescriptor> defaultOutputDevice = new DeviceDescriptor(device);
config.setDefaultOutputDevice(defaultOutputDevice);
@@ -356,9 +356,8 @@
} else if (strcmp(AUDIO_HAL_VERSION_TAG, node->name) == 0) {
uint32_t major, minor;
sscanf((char *)node->value, "%u.%u", &major, &minor);
- module->setHalVersion(HARDWARE_DEVICE_API_VERSION(major, minor));
- ALOGV("loadGlobalConfig() mHalVersion = %04x major %u minor %u",
- module->getHalVersion(), major, minor);
+ module->setHalVersion(major, minor);
+ ALOGV("loadGlobalConfig() mHalVersion = major %u minor %u", major, minor);
}
node = node->next;
}
diff --git a/services/audiopolicy/common/managerdefinitions/src/DeviceDescriptor.cpp b/services/audiopolicy/common/managerdefinitions/src/DeviceDescriptor.cpp
index ba2b9e3..f0e48b6 100644
--- a/services/audiopolicy/common/managerdefinitions/src/DeviceDescriptor.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/DeviceDescriptor.cpp
@@ -285,7 +285,7 @@
result.append(buffer);
}
std::string deviceLiteral;
- if (DeviceConverter::toString(mDeviceType, deviceLiteral)) {
+ if (deviceToString(mDeviceType, deviceLiteral)) {
snprintf(buffer, SIZE, "%*s- type: %-48s\n", spaces, "", deviceLiteral.c_str());
result.append(buffer);
}
@@ -302,7 +302,7 @@
void DeviceDescriptor::log() const
{
std::string device;
- DeviceConverter::toString(mDeviceType, device);
+ deviceToString(mDeviceType, device);
ALOGI("Device id:%d type:0x%X:%s, addr:%s", mId, mDeviceType, device.c_str(),
mAddress.string());
diff --git a/services/audiopolicy/common/managerdefinitions/src/HwModule.cpp b/services/audiopolicy/common/managerdefinitions/src/HwModule.cpp
index 7a942cd..cc56fb8 100644
--- a/services/audiopolicy/common/managerdefinitions/src/HwModule.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/HwModule.cpp
@@ -20,16 +20,16 @@
#include "HwModule.h"
#include "IOProfile.h"
#include "AudioGain.h"
-#include <hardware/audio.h>
#include <policy.h>
+#include <system/audio.h>
namespace android {
-HwModule::HwModule(const char *name, uint32_t halVersion)
+HwModule::HwModule(const char *name, uint32_t halVersionMajor, uint32_t halVersionMinor)
: mName(String8(name)),
- mHandle(AUDIO_MODULE_HANDLE_NONE),
- mHalVersion(halVersion)
+ mHandle(AUDIO_MODULE_HANDLE_NONE)
{
+ setHalVersion(halVersionMajor, halVersionMinor);
}
HwModule::~HwModule()
@@ -42,8 +42,8 @@
}
}
-status_t HwModule::addOutputProfile(const String8 &name, const audio_config_t *config,
- audio_devices_t device, const String8 &address)
+status_t HwModule::addOutputProfile(const String8& name, const audio_config_t *config,
+ audio_devices_t device, const String8& address)
{
sp<IOProfile> profile = new OutputProfile(name);
@@ -227,7 +227,7 @@
result.append(buffer);
snprintf(buffer, SIZE, " - handle: %d\n", mHandle);
result.append(buffer);
- snprintf(buffer, SIZE, " - version: %u.%u\n", mHalVersion >> 8, mHalVersion & 0xFF);
+ snprintf(buffer, SIZE, " - version: %u.%u\n", getHalVersionMajor(), getHalVersionMinor());
result.append(buffer);
write(fd, result.string(), result.size());
if (mOutputProfiles.size()) {
diff --git a/services/audiopolicy/common/managerdefinitions/src/IOProfile.cpp b/services/audiopolicy/common/managerdefinitions/src/IOProfile.cpp
index 57f2534..74ef4ec 100644
--- a/services/audiopolicy/common/managerdefinitions/src/IOProfile.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/IOProfile.cpp
@@ -108,8 +108,18 @@
AudioPort::dump(fd, 4);
- snprintf(buffer, SIZE, " - flags: 0x%04x\n", getFlags());
+ snprintf(buffer, SIZE, " - flags: 0x%04x", getFlags());
result.append(buffer);
+ std::string flagsLiteral;
+ if (getRole() == AUDIO_PORT_ROLE_SINK) {
+ InputFlagConverter::maskToString(getFlags(), flagsLiteral);
+ } else if (getRole() == AUDIO_PORT_ROLE_SOURCE) {
+ OutputFlagConverter::maskToString(getFlags(), flagsLiteral);
+ }
+ if (!flagsLiteral.empty()) {
+ result.appendFormat(" (%s)", flagsLiteral.c_str());
+ }
+ result.append("\n");
write(fd, result.string(), result.size());
mSupportedDevices.dump(fd, String8("Supported"), 4, false);
}
diff --git a/services/audiopolicy/common/managerdefinitions/src/Serializer.cpp b/services/audiopolicy/common/managerdefinitions/src/Serializer.cpp
index 3e5bb7d..818da72 100644
--- a/services/audiopolicy/common/managerdefinitions/src/Serializer.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/Serializer.cpp
@@ -18,7 +18,7 @@
//#define LOG_NDEBUG 0
#include "Serializer.h"
-#include <convert/convert.h>
+#include <media/convert.h>
#include "TypeConverter.h"
#include <libxml/parser.h>
#include <libxml/xinclude.h>
@@ -199,7 +199,8 @@
string format = getXmlAttribute(root, Attributes::format);
string channels = getXmlAttribute(root, Attributes::channelMasks);
- profile = new Element(formatFromString(format), channelMasksFromString(channels, ","),
+ profile = new Element(formatFromString(format, gDynamicFormat),
+ channelMasksFromString(channels, ","),
samplingRatesFromString(samplingRates, ","));
profile->setDynamicFormat(profile->getFormat() == gDynamicFormat);
@@ -300,7 +301,7 @@
AUDIO_PORT_ROLE_SOURCE : AUDIO_PORT_ROLE_SINK;
audio_devices_t type = AUDIO_DEVICE_NONE;
- if (!DeviceConverter::fromString(typeName, type) ||
+ if (!deviceFromString(typeName, type) ||
(!audio_is_input_device(type) && portRole == AUDIO_PORT_ROLE_SOURCE) ||
(!audio_is_output_devices(type) && portRole == AUDIO_PORT_ROLE_SINK)) {
ALOGW("%s: bad type %08x", __FUNCTION__, type);
@@ -418,19 +419,17 @@
ALOGE("%s: No %s found", __FUNCTION__, Attributes::name);
return BAD_VALUE;
}
- uint32_t version = AUDIO_DEVICE_API_VERSION_MIN;
+ uint32_t versionMajor = 0, versionMinor = 0;
string versionLiteral = getXmlAttribute(root, Attributes::version);
if (!versionLiteral.empty()) {
- uint32_t major, minor;
- sscanf(versionLiteral.c_str(), "%u.%u", &major, &minor);
- version = HARDWARE_DEVICE_API_VERSION(major, minor);
- ALOGV("%s: mHalVersion = %04x major %u minor %u", __FUNCTION__,
- version, major, minor);
+ sscanf(versionLiteral.c_str(), "%u.%u", &versionMajor, &versionMinor);
+ ALOGV("%s: mHalVersion = major %u minor %u", __FUNCTION__,
+ versionMajor, versionMajor);
}
ALOGV("%s: %s %s=%s", __FUNCTION__, tag, Attributes::name, name.c_str());
- module = new Element(name.c_str(), version);
+ module = new Element(name.c_str(), versionMajor, versionMinor);
// Deserialize childrens: Audio Mix Port, Audio Device Ports (Source/Sink), Audio Routes
MixPortTraits::Collection mixPorts;
diff --git a/services/audiopolicy/common/managerdefinitions/src/TypeConverter.cpp b/services/audiopolicy/common/managerdefinitions/src/TypeConverter.cpp
index 48bfd79..4839683 100644
--- a/services/audiopolicy/common/managerdefinitions/src/TypeConverter.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/TypeConverter.cpp
@@ -19,285 +19,17 @@
namespace android {
#define MAKE_STRING_FROM_ENUM(string) { #string, string }
-
-template <>
-const DeviceConverter::Table DeviceConverter::mTable[] = {
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_EARPIECE),
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_SPEAKER),
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_SPEAKER_SAFE),
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_WIRED_HEADSET),
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_WIRED_HEADPHONE),
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_BLUETOOTH_SCO),
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_BLUETOOTH_SCO_HEADSET),
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_BLUETOOTH_SCO_CARKIT),
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_ALL_SCO),
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_BLUETOOTH_A2DP),
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_HEADPHONES),
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_SPEAKER),
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_ALL_A2DP),
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_AUX_DIGITAL),
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_HDMI),
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_ANLG_DOCK_HEADSET),
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_DGTL_DOCK_HEADSET),
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_USB_ACCESSORY),
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_USB_DEVICE),
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_ALL_USB),
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_REMOTE_SUBMIX),
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_TELEPHONY_TX),
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_LINE),
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_HDMI_ARC),
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_SPDIF),
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_FM),
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_AUX_LINE),
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_IP),
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_BUS),
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_STUB),
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_AMBIENT),
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_BUILTIN_MIC),
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_BLUETOOTH_SCO_HEADSET),
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_ALL_SCO),
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_WIRED_HEADSET),
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_AUX_DIGITAL),
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_HDMI),
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_TELEPHONY_RX),
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_VOICE_CALL),
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_BACK_MIC),
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_REMOTE_SUBMIX),
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_ANLG_DOCK_HEADSET),
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_DGTL_DOCK_HEADSET),
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_USB_ACCESSORY),
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_USB_DEVICE),
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_FM_TUNER),
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_TV_TUNER),
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_LINE),
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_SPDIF),
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_BLUETOOTH_A2DP),
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_LOOPBACK),
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_IP),
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_BUS),
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_STUB),
-};
-
-template<>
-const size_t DeviceConverter::mSize = sizeof(DeviceConverter::mTable) /
- sizeof(DeviceConverter::mTable[0]);
-
-
-template <>
-const OutputFlagConverter::Table OutputFlagConverter::mTable[] = {
- MAKE_STRING_FROM_ENUM(AUDIO_OUTPUT_FLAG_DIRECT),
- MAKE_STRING_FROM_ENUM(AUDIO_OUTPUT_FLAG_PRIMARY),
- MAKE_STRING_FROM_ENUM(AUDIO_OUTPUT_FLAG_FAST),
- MAKE_STRING_FROM_ENUM(AUDIO_OUTPUT_FLAG_DEEP_BUFFER),
- MAKE_STRING_FROM_ENUM(AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD),
- MAKE_STRING_FROM_ENUM(AUDIO_OUTPUT_FLAG_NON_BLOCKING),
- MAKE_STRING_FROM_ENUM(AUDIO_OUTPUT_FLAG_HW_AV_SYNC),
- MAKE_STRING_FROM_ENUM(AUDIO_OUTPUT_FLAG_TTS),
- MAKE_STRING_FROM_ENUM(AUDIO_OUTPUT_FLAG_RAW),
- MAKE_STRING_FROM_ENUM(AUDIO_OUTPUT_FLAG_SYNC),
-};
-template<>
-const size_t OutputFlagConverter::mSize = sizeof(OutputFlagConverter::mTable) /
- sizeof(OutputFlagConverter::mTable[0]);
-
-
-template <>
-const InputFlagConverter::Table InputFlagConverter::mTable[] = {
- MAKE_STRING_FROM_ENUM(AUDIO_INPUT_FLAG_FAST),
- MAKE_STRING_FROM_ENUM(AUDIO_INPUT_FLAG_HW_HOTWORD),
- MAKE_STRING_FROM_ENUM(AUDIO_INPUT_FLAG_RAW),
- MAKE_STRING_FROM_ENUM(AUDIO_INPUT_FLAG_SYNC),
-};
-template<>
-const size_t InputFlagConverter::mSize = sizeof(InputFlagConverter::mTable) /
- sizeof(InputFlagConverter::mTable[0]);
-
-
-template <>
-const FormatConverter::Table FormatConverter::mTable[] = {
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_PCM_16_BIT),
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_PCM_8_BIT),
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_PCM_32_BIT),
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_PCM_8_24_BIT),
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_PCM_FLOAT),
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_PCM_24_BIT_PACKED),
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_MP3),
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_AAC),
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_AAC_MAIN),
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_AAC_LC),
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_AAC_SSR),
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_AAC_LTP),
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_AAC_HE_V1),
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_AAC_SCALABLE),
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_AAC_ERLC),
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_AAC_LD),
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_AAC_HE_V2),
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_AAC_ELD),
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_VORBIS),
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_HE_AAC_V1),
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_HE_AAC_V2),
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_OPUS),
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_AC3),
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_E_AC3),
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_DTS),
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_DTS_HD),
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_IEC61937),
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_DOLBY_TRUEHD),
-};
-template<>
-const size_t FormatConverter::mSize = sizeof(FormatConverter::mTable) /
- sizeof(FormatConverter::mTable[0]);
-
-
-template <>
-const OutputChannelConverter::Table OutputChannelConverter::mTable[] = {
- MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_OUT_MONO),
- MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_OUT_STEREO),
- MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_OUT_QUAD),
- MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_OUT_5POINT1),
- MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_OUT_7POINT1),
-};
-template<>
-const size_t OutputChannelConverter::mSize = sizeof(OutputChannelConverter::mTable) /
- sizeof(OutputChannelConverter::mTable[0]);
-
-
-template <>
-const InputChannelConverter::Table InputChannelConverter::mTable[] = {
- MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_IN_MONO),
- MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_IN_STEREO),
- MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_IN_FRONT_BACK),
-};
-template<>
-const size_t InputChannelConverter::mSize = sizeof(InputChannelConverter::mTable) /
- sizeof(InputChannelConverter::mTable[0]);
-
-template <>
-const ChannelIndexConverter::Table ChannelIndexConverter::mTable[] = {
- {"AUDIO_CHANNEL_INDEX_MASK_1", static_cast<audio_channel_mask_t>(AUDIO_CHANNEL_INDEX_MASK_1)},
- {"AUDIO_CHANNEL_INDEX_MASK_2", static_cast<audio_channel_mask_t>(AUDIO_CHANNEL_INDEX_MASK_2)},
- {"AUDIO_CHANNEL_INDEX_MASK_3", static_cast<audio_channel_mask_t>(AUDIO_CHANNEL_INDEX_MASK_3)},
- {"AUDIO_CHANNEL_INDEX_MASK_4", static_cast<audio_channel_mask_t>(AUDIO_CHANNEL_INDEX_MASK_4)},
- {"AUDIO_CHANNEL_INDEX_MASK_5", static_cast<audio_channel_mask_t>(AUDIO_CHANNEL_INDEX_MASK_5)},
- {"AUDIO_CHANNEL_INDEX_MASK_6", static_cast<audio_channel_mask_t>(AUDIO_CHANNEL_INDEX_MASK_6)},
- {"AUDIO_CHANNEL_INDEX_MASK_7", static_cast<audio_channel_mask_t>(AUDIO_CHANNEL_INDEX_MASK_7)},
- {"AUDIO_CHANNEL_INDEX_MASK_8", static_cast<audio_channel_mask_t>(AUDIO_CHANNEL_INDEX_MASK_8)},
-};
-template<>
-const size_t ChannelIndexConverter::mSize = sizeof(ChannelIndexConverter::mTable) /
- sizeof(ChannelIndexConverter::mTable[0]);
-
-
-template <>
-const GainModeConverter::Table GainModeConverter::mTable[] = {
- MAKE_STRING_FROM_ENUM(AUDIO_GAIN_MODE_JOINT),
- MAKE_STRING_FROM_ENUM(AUDIO_GAIN_MODE_CHANNELS),
- MAKE_STRING_FROM_ENUM(AUDIO_GAIN_MODE_RAMP),
-};
-
-template<>
-const size_t GainModeConverter::mSize = sizeof(GainModeConverter::mTable) /
- sizeof(GainModeConverter::mTable[0]);
+#define TERMINATOR { .literal = nullptr }
template <>
const DeviceCategoryConverter::Table DeviceCategoryConverter::mTable[] = {
MAKE_STRING_FROM_ENUM(DEVICE_CATEGORY_HEADSET),
MAKE_STRING_FROM_ENUM(DEVICE_CATEGORY_SPEAKER),
MAKE_STRING_FROM_ENUM(DEVICE_CATEGORY_EARPIECE),
- MAKE_STRING_FROM_ENUM(DEVICE_CATEGORY_EXT_MEDIA)
+ MAKE_STRING_FROM_ENUM(DEVICE_CATEGORY_EXT_MEDIA),
+ TERMINATOR
};
-template<>
-const size_t DeviceCategoryConverter::mSize = sizeof(DeviceCategoryConverter::mTable) /
- sizeof(DeviceCategoryConverter::mTable[0]);
-
-template <>
-const StreamTypeConverter::Table StreamTypeConverter::mTable[] = {
- MAKE_STRING_FROM_ENUM(AUDIO_STREAM_VOICE_CALL),
- MAKE_STRING_FROM_ENUM(AUDIO_STREAM_SYSTEM),
- MAKE_STRING_FROM_ENUM(AUDIO_STREAM_RING),
- MAKE_STRING_FROM_ENUM(AUDIO_STREAM_MUSIC),
- MAKE_STRING_FROM_ENUM(AUDIO_STREAM_ALARM),
- MAKE_STRING_FROM_ENUM(AUDIO_STREAM_NOTIFICATION),
- MAKE_STRING_FROM_ENUM(AUDIO_STREAM_BLUETOOTH_SCO ),
- MAKE_STRING_FROM_ENUM(AUDIO_STREAM_ENFORCED_AUDIBLE),
- MAKE_STRING_FROM_ENUM(AUDIO_STREAM_DTMF),
- MAKE_STRING_FROM_ENUM(AUDIO_STREAM_TTS),
- MAKE_STRING_FROM_ENUM(AUDIO_STREAM_ACCESSIBILITY),
- MAKE_STRING_FROM_ENUM(AUDIO_STREAM_REROUTING),
- MAKE_STRING_FROM_ENUM(AUDIO_STREAM_PATCH),
-};
-
-template<>
-const size_t StreamTypeConverter::mSize = sizeof(StreamTypeConverter::mTable) /
- sizeof(StreamTypeConverter::mTable[0]);
-
-template <class Traits>
-bool TypeConverter<Traits>::toString(const typename Traits::Type &value, std::string &str)
-{
- for (size_t i = 0; i < mSize; i++) {
- if (mTable[i].value == value) {
- str = mTable[i].literal;
- return true;
- }
- }
- return false;
-}
-
-template <class Traits>
-bool TypeConverter<Traits>::fromString(const std::string &str, typename Traits::Type &result)
-{
- for (size_t i = 0; i < mSize; i++) {
- if (strcmp(mTable[i].literal, str.c_str()) == 0) {
- ALOGV("stringToEnum() found %s", mTable[i].literal);
- result = mTable[i].value;
- return true;
- }
- }
- return false;
-}
-
-template <class Traits>
-void TypeConverter<Traits>::collectionFromString(const std::string &str,
- typename Traits::Collection &collection,
- const char *del)
-{
- char *literal = strdup(str.c_str());
-
- for (const char *cstr = strtok(literal, del); cstr != NULL; cstr = strtok(NULL, del)) {
- typename Traits::Type value;
- if (fromString(cstr, value)) {
- collection.add(value);
- }
- }
- free(literal);
-}
-
-template <class Traits>
-uint32_t TypeConverter<Traits>::maskFromString(const std::string &str, const char *del)
-{
- char *literal = strdup(str.c_str());
- uint32_t value = 0;
- for (const char *cstr = strtok(literal, del); cstr != NULL; cstr = strtok(NULL, del)) {
- typename Traits::Type type;
- if (fromString(cstr, type)) {
- value |= static_cast<uint32_t>(type);
- }
- }
- free(literal);
- return value;
-}
-
-template class TypeConverter<DeviceTraits>;
-template class TypeConverter<OutputFlagTraits>;
-template class TypeConverter<InputFlagTraits>;
-template class TypeConverter<FormatTraits>;
-template class TypeConverter<OutputChannelTraits>;
-template class TypeConverter<InputChannelTraits>;
-template class TypeConverter<ChannelIndexTraits>;
-template class TypeConverter<GainModeTraits>;
-template class TypeConverter<StreamTraits>;
template class TypeConverter<DeviceCategoryTraits>;
}; // namespace android
-
diff --git a/services/audiopolicy/engine/interface/AudioPolicyManagerInterface.h b/services/audiopolicy/engine/interface/AudioPolicyManagerInterface.h
old mode 100755
new mode 100644
diff --git a/services/audiopolicy/engine/interface/AudioPolicyManagerObserver.h b/services/audiopolicy/engine/interface/AudioPolicyManagerObserver.h
old mode 100755
new mode 100644
diff --git a/services/audiopolicy/engineconfigurable/Android.mk b/services/audiopolicy/engineconfigurable/Android.mk
old mode 100755
new mode 100644
index 6dba75b..6b18921
--- a/services/audiopolicy/engineconfigurable/Android.mk
+++ b/services/audiopolicy/engineconfigurable/Android.mk
@@ -39,8 +39,10 @@
LOCAL_MODULE := libaudiopolicyengineconfigurable
LOCAL_MODULE_TAGS := optional
+
+LOCAL_WHOLE_STATIC_LIBRARIES := libmedia_helper
+
LOCAL_STATIC_LIBRARIES := \
- libmedia_helper \
libaudiopolicypfwwrapper \
libaudiopolicycomponents \
libxml2
@@ -48,6 +50,7 @@
LOCAL_SHARED_LIBRARIES := \
libcutils \
libutils \
+ liblog \
libaudioutils \
libparameter
diff --git a/services/audiopolicy/engineconfigurable/include/AudioPolicyEngineInstance.h b/services/audiopolicy/engineconfigurable/include/AudioPolicyEngineInstance.h
old mode 100755
new mode 100644
diff --git a/services/audiopolicy/engineconfigurable/include/EngineDefinition.h b/services/audiopolicy/engineconfigurable/include/EngineDefinition.h
old mode 100755
new mode 100644
diff --git a/services/audiopolicy/engineconfigurable/interface/AudioPolicyPluginInterface.h b/services/audiopolicy/engineconfigurable/interface/AudioPolicyPluginInterface.h
old mode 100755
new mode 100644
diff --git a/services/audiopolicy/engineconfigurable/parameter-framework/examples/ParameterFrameworkConfigurationPolicy.xml.in b/services/audiopolicy/engineconfigurable/parameter-framework/examples/ParameterFrameworkConfigurationPolicy.xml.in
old mode 100755
new mode 100644
diff --git a/services/audiopolicy/engineconfigurable/parameter-framework/examples/Settings/strategy_for_stream.pfw b/services/audiopolicy/engineconfigurable/parameter-framework/examples/Settings/strategy_for_stream.pfw
old mode 100755
new mode 100644
diff --git a/services/audiopolicy/engineconfigurable/parameter-framework/examples/Structure/PolicyClass.xml b/services/audiopolicy/engineconfigurable/parameter-framework/examples/Structure/PolicyClass.xml
old mode 100755
new mode 100644
diff --git a/services/audiopolicy/engineconfigurable/parameter-framework/examples/Structure/PolicySubsystem-CommonTypes.xml b/services/audiopolicy/engineconfigurable/parameter-framework/examples/Structure/PolicySubsystem-CommonTypes.xml
old mode 100755
new mode 100644
diff --git a/services/audiopolicy/engineconfigurable/parameter-framework/examples/Structure/PolicySubsystem.xml b/services/audiopolicy/engineconfigurable/parameter-framework/examples/Structure/PolicySubsystem.xml
old mode 100755
new mode 100644
diff --git a/services/audiopolicy/engineconfigurable/parameter-framework/examples/policy_criteria.txt b/services/audiopolicy/engineconfigurable/parameter-framework/examples/policy_criteria.txt
old mode 100755
new mode 100644
diff --git a/services/audiopolicy/engineconfigurable/parameter-framework/plugin/Android.mk b/services/audiopolicy/engineconfigurable/parameter-framework/plugin/Android.mk
old mode 100755
new mode 100644
diff --git a/services/audiopolicy/engineconfigurable/parameter-framework/plugin/InputSource.cpp b/services/audiopolicy/engineconfigurable/parameter-framework/plugin/InputSource.cpp
old mode 100755
new mode 100644
diff --git a/services/audiopolicy/engineconfigurable/parameter-framework/plugin/InputSource.h b/services/audiopolicy/engineconfigurable/parameter-framework/plugin/InputSource.h
old mode 100755
new mode 100644
diff --git a/services/audiopolicy/engineconfigurable/parameter-framework/plugin/PolicyMappingKeys.h b/services/audiopolicy/engineconfigurable/parameter-framework/plugin/PolicyMappingKeys.h
old mode 100755
new mode 100644
diff --git a/services/audiopolicy/engineconfigurable/parameter-framework/plugin/PolicySubsystem.cpp b/services/audiopolicy/engineconfigurable/parameter-framework/plugin/PolicySubsystem.cpp
old mode 100755
new mode 100644
diff --git a/services/audiopolicy/engineconfigurable/parameter-framework/plugin/PolicySubsystem.h b/services/audiopolicy/engineconfigurable/parameter-framework/plugin/PolicySubsystem.h
old mode 100755
new mode 100644
diff --git a/services/audiopolicy/engineconfigurable/parameter-framework/plugin/PolicySubsystemBuilder.cpp b/services/audiopolicy/engineconfigurable/parameter-framework/plugin/PolicySubsystemBuilder.cpp
old mode 100755
new mode 100644
diff --git a/services/audiopolicy/engineconfigurable/parameter-framework/plugin/Strategy.cpp b/services/audiopolicy/engineconfigurable/parameter-framework/plugin/Strategy.cpp
old mode 100755
new mode 100644
diff --git a/services/audiopolicy/engineconfigurable/parameter-framework/plugin/Strategy.h b/services/audiopolicy/engineconfigurable/parameter-framework/plugin/Strategy.h
old mode 100755
new mode 100644
diff --git a/services/audiopolicy/engineconfigurable/parameter-framework/plugin/Stream.cpp b/services/audiopolicy/engineconfigurable/parameter-framework/plugin/Stream.cpp
old mode 100755
new mode 100644
diff --git a/services/audiopolicy/engineconfigurable/parameter-framework/plugin/Stream.h b/services/audiopolicy/engineconfigurable/parameter-framework/plugin/Stream.h
old mode 100755
new mode 100644
diff --git a/services/audiopolicy/engineconfigurable/parameter-framework/plugin/Usage.cpp b/services/audiopolicy/engineconfigurable/parameter-framework/plugin/Usage.cpp
old mode 100755
new mode 100644
diff --git a/services/audiopolicy/engineconfigurable/parameter-framework/plugin/Usage.h b/services/audiopolicy/engineconfigurable/parameter-framework/plugin/Usage.h
old mode 100755
new mode 100644
diff --git a/services/audiopolicy/engineconfigurable/src/Collection.h b/services/audiopolicy/engineconfigurable/src/Collection.h
old mode 100755
new mode 100644
diff --git a/services/audiopolicy/engineconfigurable/src/Element.h b/services/audiopolicy/engineconfigurable/src/Element.h
old mode 100755
new mode 100644
diff --git a/services/audiopolicy/engineconfigurable/src/Engine.cpp b/services/audiopolicy/engineconfigurable/src/Engine.cpp
old mode 100755
new mode 100644
diff --git a/services/audiopolicy/engineconfigurable/src/Engine.h b/services/audiopolicy/engineconfigurable/src/Engine.h
old mode 100755
new mode 100644
diff --git a/services/audiopolicy/engineconfigurable/src/EngineInstance.cpp b/services/audiopolicy/engineconfigurable/src/EngineInstance.cpp
old mode 100755
new mode 100644
diff --git a/services/audiopolicy/engineconfigurable/src/InputSource.cpp b/services/audiopolicy/engineconfigurable/src/InputSource.cpp
old mode 100755
new mode 100644
diff --git a/services/audiopolicy/engineconfigurable/src/InputSource.h b/services/audiopolicy/engineconfigurable/src/InputSource.h
old mode 100755
new mode 100644
diff --git a/services/audiopolicy/engineconfigurable/src/Strategy.cpp b/services/audiopolicy/engineconfigurable/src/Strategy.cpp
old mode 100755
new mode 100644
diff --git a/services/audiopolicy/engineconfigurable/src/Strategy.h b/services/audiopolicy/engineconfigurable/src/Strategy.h
old mode 100755
new mode 100644
diff --git a/services/audiopolicy/engineconfigurable/src/Stream.cpp b/services/audiopolicy/engineconfigurable/src/Stream.cpp
old mode 100755
new mode 100644
diff --git a/services/audiopolicy/engineconfigurable/src/Stream.h b/services/audiopolicy/engineconfigurable/src/Stream.h
old mode 100755
new mode 100644
diff --git a/services/audiopolicy/engineconfigurable/src/Usage.cpp b/services/audiopolicy/engineconfigurable/src/Usage.cpp
old mode 100755
new mode 100644
diff --git a/services/audiopolicy/engineconfigurable/src/Usage.h b/services/audiopolicy/engineconfigurable/src/Usage.h
old mode 100755
new mode 100644
diff --git a/services/audiopolicy/engineconfigurable/wrapper/Android.mk b/services/audiopolicy/engineconfigurable/wrapper/Android.mk
index f4283a8..3cc112f 100644
--- a/services/audiopolicy/engineconfigurable/wrapper/Android.mk
+++ b/services/audiopolicy/engineconfigurable/wrapper/Android.mk
@@ -15,8 +15,7 @@
LOCAL_SRC_FILES:= ParameterManagerWrapper.cpp
-LOCAL_STATIC_LIBRARIES := \
- libmedia_helper \
+LOCAL_WHOLE_STATIC_LIBRARIES := libmedia_helper
LOCAL_MULTILIB := $(AUDIOSERVER_MULTILIB)
diff --git a/services/audiopolicy/engineconfigurable/wrapper/ParameterManagerWrapper.cpp b/services/audiopolicy/engineconfigurable/wrapper/ParameterManagerWrapper.cpp
old mode 100755
new mode 100644
index 6872e52..8d51293
--- a/services/audiopolicy/engineconfigurable/wrapper/ParameterManagerWrapper.cpp
+++ b/services/audiopolicy/engineconfigurable/wrapper/ParameterManagerWrapper.cpp
@@ -22,7 +22,7 @@
#include <ParameterMgrPlatformConnector.h>
#include <SelectionCriterionTypeInterface.h>
#include <SelectionCriterionInterface.h>
-#include <convert.h>
+#include <media/convert.h>
#include <algorithm>
#include <cutils/config_utils.h>
#include <cutils/misc.h>
diff --git a/services/audiopolicy/engineconfigurable/wrapper/audio_policy_criteria_conf.h b/services/audiopolicy/engineconfigurable/wrapper/audio_policy_criteria_conf.h
old mode 100755
new mode 100644
diff --git a/services/audiopolicy/engineconfigurable/wrapper/config/audio_policy_criteria.conf b/services/audiopolicy/engineconfigurable/wrapper/config/audio_policy_criteria.conf
old mode 100755
new mode 100644
diff --git a/services/audiopolicy/engineconfigurable/wrapper/include/ParameterManagerWrapper.h b/services/audiopolicy/engineconfigurable/wrapper/include/ParameterManagerWrapper.h
old mode 100755
new mode 100644
diff --git a/services/audiopolicy/enginedefault/Android.mk b/services/audiopolicy/enginedefault/Android.mk
old mode 100755
new mode 100644
index e6de8ae..c1bb3fb
--- a/services/audiopolicy/enginedefault/Android.mk
+++ b/services/audiopolicy/enginedefault/Android.mk
@@ -34,8 +34,9 @@
LOCAL_MODULE := libaudiopolicyenginedefault
LOCAL_MODULE_TAGS := optional
+LOCAL_WHOLE_STATIC_LIBRARIES := libmedia_helper
+
LOCAL_STATIC_LIBRARIES := \
- libmedia_helper \
libaudiopolicycomponents \
libxml2
diff --git a/services/audiopolicy/enginedefault/include/AudioPolicyEngineInstance.h b/services/audiopolicy/enginedefault/include/AudioPolicyEngineInstance.h
old mode 100755
new mode 100644
diff --git a/services/audiopolicy/enginedefault/src/Engine.cpp b/services/audiopolicy/enginedefault/src/Engine.cpp
old mode 100755
new mode 100644
index d31429c..5f0557c
--- a/services/audiopolicy/enginedefault/src/Engine.cpp
+++ b/services/audiopolicy/enginedefault/src/Engine.cpp
@@ -320,8 +320,7 @@
if (((availableInputDevices.types() &
AUDIO_DEVICE_IN_TELEPHONY_RX & ~AUDIO_DEVICE_BIT_IN) == 0) ||
(((txDevice & availPrimaryInputDevices & ~AUDIO_DEVICE_BIT_IN) != 0) &&
- (primaryOutput->getAudioPort()->getModuleVersion() <
- AUDIO_DEVICE_API_VERSION_3_0))) {
+ (primaryOutput->getAudioPort()->getModuleVersionMajor() < 3))) {
availableOutputDevicesType = availPrimaryOutputDevices;
}
}
diff --git a/services/audiopolicy/enginedefault/src/Engine.h b/services/audiopolicy/enginedefault/src/Engine.h
old mode 100755
new mode 100644
diff --git a/services/audiopolicy/enginedefault/src/EngineInstance.cpp b/services/audiopolicy/enginedefault/src/EngineInstance.cpp
old mode 100755
new mode 100644
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
index a8bdf86..2f01b02 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
@@ -33,11 +33,11 @@
#include <AudioPolicyEngineInstance.h>
#include <cutils/properties.h>
#include <utils/Log.h>
-#include <hardware/audio.h>
-#include <hardware/audio_effect.h>
#include <media/AudioParameter.h>
#include <media/AudioPolicyHelper.h>
#include <soundtrigger/SoundTrigger.h>
+#include <system/audio.h>
+#include <audio_policy_conf.h>
#include "AudioPolicyManager.h"
#ifndef USE_XML_AUDIO_POLICY_CONF
#include <ConfigParsingUtils.h>
@@ -70,7 +70,7 @@
{
AudioParameter param(device_address);
const String8 key(state == AUDIO_POLICY_DEVICE_STATE_AVAILABLE ?
- AUDIO_PARAMETER_DEVICE_CONNECT : AUDIO_PARAMETER_DEVICE_DISCONNECT);
+ AudioParameter::keyStreamConnect : AudioParameter::keyStreamDisconnect);
param.addInt(key, device);
mpClientInterface->setParameters(AUDIO_IO_HANDLE_NONE, param.toString());
}
@@ -448,15 +448,17 @@
// FIXME: would be better to refine to only inputs whose profile connects to the
// call TX device but this information is not in the audio patch and logic here must be
// symmetric to the one in startInput()
- audio_io_handle_t activeInput = mInputs.getActiveInput();
- if (activeInput != 0) {
- sp<AudioInputDescriptor> activeDesc = mInputs.valueFor(activeInput);
- if (activeDesc->getModuleHandle() == txSourceDeviceDesc->getModuleHandle()) {
- //FIXME: consider all active sessions
- AudioSessionCollection activeSessions = activeDesc->getActiveAudioSessions();
- audio_session_t activeSession = activeSessions.keyAt(0);
- stopInput(activeInput, activeSession);
- releaseInput(activeInput, activeSession);
+ Vector<sp <AudioInputDescriptor> > activeInputs = mInputs.getActiveInputs();
+ for (size_t i = 0; i < activeInputs.size(); i++) {
+ sp<AudioInputDescriptor> activeDesc = activeInputs[i];
+ if (activeDesc->hasSameHwModuleAs(txSourceDeviceDesc)) {
+ AudioSessionCollection activeSessions =
+ activeDesc->getAudioSessions(true /*activeOnly*/);
+ for (size_t j = 0; j < activeSessions.size(); j++) {
+ audio_session_t activeSession = activeSessions.keyAt(j);
+ stopInput(activeDesc->mIoHandle, activeSession);
+ releaseInput(activeDesc->mIoHandle, activeSession);
+ }
}
}
@@ -627,15 +629,16 @@
}
}
- audio_io_handle_t activeInput = mInputs.getActiveInput();
- if (activeInput != 0) {
- sp<AudioInputDescriptor> activeDesc = mInputs.valueFor(activeInput);
- audio_devices_t newDevice = getNewInputDevice(activeInput);
+ Vector<sp <AudioInputDescriptor> > activeInputs = mInputs.getActiveInputs();
+ for (size_t i = 0; i < activeInputs.size(); i++) {
+ sp<AudioInputDescriptor> activeDesc = activeInputs[i];
+ audio_devices_t newDevice = getNewInputDevice(activeDesc);
// Force new input selection if the new device can not be reached via current input
- if (activeDesc->mProfile->getSupportedDevices().types() & (newDevice & ~AUDIO_DEVICE_BIT_IN)) {
- setInputDevice(activeInput, newDevice);
+ if (activeDesc->mProfile->getSupportedDevices().types() &
+ (newDevice & ~AUDIO_DEVICE_BIT_IN)) {
+ setInputDevice(activeDesc->mIoHandle, newDevice);
} else {
- closeInput(activeInput);
+ closeInput(activeDesc->mIoHandle);
}
}
}
@@ -1421,6 +1424,7 @@
*input = AUDIO_IO_HANDLE_NONE;
*inputType = API_INPUT_INVALID;
+
audio_devices_t device;
// handle legacy remote submix case where the address was not always specified
String8 address = String8("");
@@ -1564,14 +1568,22 @@
isSoundTrigger,
policyMix, mpClientInterface);
-// TODO enable input reuse
-#if 0
+
// reuse an open input if possible
for (size_t i = 0; i < mInputs.size(); i++) {
sp<AudioInputDescriptor> desc = mInputs.valueAt(i);
- // reuse input if it shares the same profile and same sound trigger attribute
- if (profile == desc->mProfile &&
- isSoundTrigger == desc->isSoundTrigger()) {
+ // reuse input if:
+ // - it shares the same profile
+ // AND
+ // - it is not a reroute submix input
+ // AND
+ // - it is: not used for sound trigger
+ // OR
+ // used for sound trigger and all clients use the same session ID
+ //
+ if ((profile == desc->mProfile) &&
+ (isSoundTrigger == desc->isSoundTrigger()) &&
+ !is_virtual_input_device(device)) {
sp<AudioSession> as = desc->getAudioSession(session);
if (as != 0) {
@@ -1581,16 +1593,33 @@
} else {
ALOGW("getInputForDevice() record with different attributes"
" exists for session %d", session);
- return input;
+ break;
}
+ } else if (isSoundTrigger) {
+ break;
+ }
+ // force close input if current source is now the highest priority request on this input
+ // and current input properties are not exactly as requested.
+ if ((desc->mSamplingRate != samplingRate ||
+ desc->mChannelMask != channelMask ||
+ !audio_formats_match(desc->mFormat, format)) &&
+ (source_priority(desc->getHighestPrioritySource(false /*activeOnly*/)) <
+ source_priority(inputSource))) {
+ ALOGV("%s: ", __FUNCTION__);
+ AudioSessionCollection sessions = desc->getAudioSessions(false /*activeOnly*/);
+ for (size_t j = 0; j < sessions.size(); j++) {
+ audio_session_t currentSession = sessions.keyAt(j);
+ stopInput(desc->mIoHandle, currentSession);
+ releaseInput(desc->mIoHandle, currentSession);
+ }
+ break;
} else {
desc->addAudioSession(session, audioSession);
+ ALOGV("%s: reusing input %d", __FUNCTION__, mInputs.keyAt(i));
+ return mInputs.keyAt(i);
}
- ALOGV("getInputForDevice() reusing input %d", mInputs.keyAt(i));
- return mInputs.keyAt(i);
}
}
-#endif
audio_config_t config = AUDIO_CONFIG_INITIALIZER;
config.sample_rate = profileSamplingRate;
@@ -1633,10 +1662,50 @@
return input;
}
+bool AudioPolicyManager::isConcurentCaptureAllowed(const sp<AudioInputDescriptor>& inputDesc,
+ const sp<AudioSession>& audioSession)
+{
+ // Do not allow capture if an active voice call is using a software patch and
+ // the call TX source device is on the same HW module.
+ // FIXME: would be better to refine to only inputs whose profile connects to the
+ // call TX device but this information is not in the audio patch
+ if (mCallTxPatch != 0 &&
+ inputDesc->getModuleHandle() == mCallTxPatch->mPatch.sources[0].ext.device.hw_module) {
+ return false;
+ }
+
+ // starting concurrent capture is enabled if:
+ // 1) capturing for re-routing
+ // 2) capturing for HOTWORD source
+ // 3) capturing for FM TUNER source
+ // 3) All other active captures are either for re-routing or HOTWORD
+
+ if (is_virtual_input_device(inputDesc->mDevice) ||
+ audioSession->inputSource() == AUDIO_SOURCE_HOTWORD ||
+ audioSession->inputSource() == AUDIO_SOURCE_FM_TUNER) {
+ return true;
+ }
+
+ Vector< sp<AudioInputDescriptor> > activeInputs = mInputs.getActiveInputs();
+ for (size_t i = 0; i < activeInputs.size(); i++) {
+ sp<AudioInputDescriptor> activeInput = activeInputs[i];
+ if ((activeInput->inputSource() != AUDIO_SOURCE_HOTWORD) &&
+ (activeInput->inputSource() != AUDIO_SOURCE_FM_TUNER) &&
+ !is_virtual_input_device(activeInput->mDevice)) {
+ return false;
+ }
+ }
+
+ return true;
+}
+
+
status_t AudioPolicyManager::startInput(audio_io_handle_t input,
- audio_session_t session)
+ audio_session_t session,
+ concurrency_type__mask_t *concurrency)
{
ALOGV("startInput() input %d", input);
+ *concurrency = API_INPUT_CONCURRENCY_NONE;
ssize_t index = mInputs.indexOfKey(input);
if (index < 0) {
ALOGW("startInput() unknown input %d", input);
@@ -1650,86 +1719,66 @@
return BAD_VALUE;
}
- // virtual input devices are compatible with other input devices
- if (!is_virtual_input_device(inputDesc->mDevice)) {
-
- // for a non-virtual input device, check if there is another (non-virtual) active input
- audio_io_handle_t activeInput = mInputs.getActiveInput();
- if (activeInput != 0 && activeInput != input) {
-
- // If the already active input uses AUDIO_SOURCE_HOTWORD then it is closed,
- // otherwise the active input continues and the new input cannot be started.
- sp<AudioInputDescriptor> activeDesc = mInputs.valueFor(activeInput);
- if ((activeDesc->inputSource() == AUDIO_SOURCE_HOTWORD) &&
- !activeDesc->hasPreemptedSession(session)) {
- ALOGW("startInput(%d) preempting low-priority input %d", input, activeInput);
- //FIXME: consider all active sessions
- AudioSessionCollection activeSessions = activeDesc->getActiveAudioSessions();
- audio_session_t activeSession = activeSessions.keyAt(0);
- SortedVector<audio_session_t> sessions =
- activeDesc->getPreemptedSessions();
- sessions.add(activeSession);
- inputDesc->setPreemptedSessions(sessions);
- stopInput(activeInput, activeSession);
- releaseInput(activeInput, activeSession);
- } else {
- ALOGE("startInput(%d) failed: other input %d already started", input, activeInput);
- return INVALID_OPERATION;
- }
- }
-
- // Do not allow capture if an active voice call is using a software patch and
- // the call TX source device is on the same HW module.
- // FIXME: would be better to refine to only inputs whose profile connects to the
- // call TX device but this information is not in the audio patch
- if (mCallTxPatch != 0 &&
- inputDesc->getModuleHandle() == mCallTxPatch->mPatch.sources[0].ext.device.hw_module) {
- return INVALID_OPERATION;
- }
+ if (!isConcurentCaptureAllowed(inputDesc, audioSession)) {
+ ALOGW("startInput(%d) failed: other input already started", input);
+ return INVALID_OPERATION;
}
+ if (isInCall()) {
+ *concurrency |= API_INPUT_CONCURRENCY_CALL;
+ }
+ if (mInputs.activeInputsCountOnDevices() != 0) {
+ *concurrency |= API_INPUT_CONCURRENCY_CAPTURE;
+ }
+
+ // increment activity count before calling getNewInputDevice() below as only active sessions
+ // are considered for device selection
+ audioSession->changeActiveCount(1);
+
// Routing?
mInputRoutes.incRouteActivity(session);
- if (!inputDesc->isActive() || mInputRoutes.hasRouteChanged(session)) {
- // if input maps to a dynamic policy with an activity listener, notify of state change
- if ((inputDesc->mPolicyMix != NULL)
- && ((inputDesc->mPolicyMix->mCbFlags & AudioMix::kCbFlagNotifyActivity) != 0)) {
- mpClientInterface->onDynamicPolicyMixStateUpdate(inputDesc->mPolicyMix->mDeviceAddress,
- MIX_STATE_MIXING);
- }
-
+ if (audioSession->activeCount() == 1 || mInputRoutes.hasRouteChanged(session)) {
// indicate active capture to sound trigger service if starting capture from a mic on
// primary HW module
- audio_devices_t device = getNewInputDevice(input);
- audio_devices_t primaryInputDevices = availablePrimaryInputDevices();
- if (((device & primaryInputDevices & ~AUDIO_DEVICE_BIT_IN) != 0) &&
- mInputs.activeInputsCountOnDevices(primaryInputDevices) == 0) {
- SoundTrigger::setCaptureState(true);
- }
+ audio_devices_t device = getNewInputDevice(inputDesc);
setInputDevice(input, device, true /* force */);
- // automatically enable the remote submix output when input is started if not
- // used by a policy mix of type MIX_TYPE_RECORDERS
- // For remote submix (a virtual device), we open only one input per capture request.
- if (audio_is_remote_submix_device(inputDesc->mDevice)) {
- String8 address = String8("");
- if (inputDesc->mPolicyMix == NULL) {
- address = String8("0");
- } else if (inputDesc->mPolicyMix->mMixType == MIX_TYPE_PLAYERS) {
- address = inputDesc->mPolicyMix->mDeviceAddress;
+ if (inputDesc->getAudioSessionCount(true/*activeOnly*/) == 1) {
+ // if input maps to a dynamic policy with an activity listener, notify of state change
+ if ((inputDesc->mPolicyMix != NULL)
+ && ((inputDesc->mPolicyMix->mCbFlags & AudioMix::kCbFlagNotifyActivity) != 0)) {
+ mpClientInterface->onDynamicPolicyMixStateUpdate(inputDesc->mPolicyMix->mDeviceAddress,
+ MIX_STATE_MIXING);
}
- if (address != "") {
- setDeviceConnectionStateInt(AUDIO_DEVICE_OUT_REMOTE_SUBMIX,
- AUDIO_POLICY_DEVICE_STATE_AVAILABLE,
- address, "remote-submix");
+
+ audio_devices_t primaryInputDevices = availablePrimaryInputDevices();
+ if (((device & primaryInputDevices & ~AUDIO_DEVICE_BIT_IN) != 0) &&
+ mInputs.activeInputsCountOnDevices(primaryInputDevices) == 0) {
+ SoundTrigger::setCaptureState(true);
+ }
+
+ // automatically enable the remote submix output when input is started if not
+ // used by a policy mix of type MIX_TYPE_RECORDERS
+ // For remote submix (a virtual device), we open only one input per capture request.
+ if (audio_is_remote_submix_device(inputDesc->mDevice)) {
+ String8 address = String8("");
+ if (inputDesc->mPolicyMix == NULL) {
+ address = String8("0");
+ } else if (inputDesc->mPolicyMix->mMixType == MIX_TYPE_PLAYERS) {
+ address = inputDesc->mPolicyMix->mDeviceAddress;
+ }
+ if (address != "") {
+ setDeviceConnectionStateInt(AUDIO_DEVICE_OUT_REMOTE_SUBMIX,
+ AUDIO_POLICY_DEVICE_STATE_AVAILABLE,
+ address, "remote-submix");
+ }
}
}
}
ALOGV("AudioPolicyManager::startInput() input source = %d", audioSession->inputSource());
- audioSession->changeActiveCount(1);
return NO_ERROR;
}
@@ -1760,41 +1809,46 @@
// Routing?
mInputRoutes.decRouteActivity(session);
- if (!inputDesc->isActive()) {
- // if input maps to a dynamic policy with an activity listener, notify of state change
- if ((inputDesc->mPolicyMix != NULL)
- && ((inputDesc->mPolicyMix->mCbFlags & AudioMix::kCbFlagNotifyActivity) != 0)) {
- mpClientInterface->onDynamicPolicyMixStateUpdate(inputDesc->mPolicyMix->mDeviceAddress,
- MIX_STATE_IDLE);
- }
+ if (audioSession->activeCount() == 0) {
- // automatically disable the remote submix output when input is stopped if not
- // used by a policy mix of type MIX_TYPE_RECORDERS
- if (audio_is_remote_submix_device(inputDesc->mDevice)) {
- String8 address = String8("");
- if (inputDesc->mPolicyMix == NULL) {
- address = String8("0");
- } else if (inputDesc->mPolicyMix->mMixType == MIX_TYPE_PLAYERS) {
- address = inputDesc->mPolicyMix->mDeviceAddress;
+ if (inputDesc->isActive()) {
+ setInputDevice(input, getNewInputDevice(inputDesc), false /* force */);
+ } else {
+ // if input maps to a dynamic policy with an activity listener, notify of state change
+ if ((inputDesc->mPolicyMix != NULL)
+ && ((inputDesc->mPolicyMix->mCbFlags & AudioMix::kCbFlagNotifyActivity) != 0)) {
+ mpClientInterface->onDynamicPolicyMixStateUpdate(inputDesc->mPolicyMix->mDeviceAddress,
+ MIX_STATE_IDLE);
}
- if (address != "") {
- setDeviceConnectionStateInt(AUDIO_DEVICE_OUT_REMOTE_SUBMIX,
- AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE,
- address, "remote-submix");
+
+ // automatically disable the remote submix output when input is stopped if not
+ // used by a policy mix of type MIX_TYPE_RECORDERS
+ if (audio_is_remote_submix_device(inputDesc->mDevice)) {
+ String8 address = String8("");
+ if (inputDesc->mPolicyMix == NULL) {
+ address = String8("0");
+ } else if (inputDesc->mPolicyMix->mMixType == MIX_TYPE_PLAYERS) {
+ address = inputDesc->mPolicyMix->mDeviceAddress;
+ }
+ if (address != "") {
+ setDeviceConnectionStateInt(AUDIO_DEVICE_OUT_REMOTE_SUBMIX,
+ AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE,
+ address, "remote-submix");
+ }
}
- }
- audio_devices_t device = inputDesc->mDevice;
- resetInputDevice(input);
+ audio_devices_t device = inputDesc->mDevice;
+ resetInputDevice(input);
- // indicate inactive capture to sound trigger service if stopping capture from a mic on
- // primary HW module
- audio_devices_t primaryInputDevices = availablePrimaryInputDevices();
- if (((device & primaryInputDevices & ~AUDIO_DEVICE_BIT_IN) != 0) &&
- mInputs.activeInputsCountOnDevices(primaryInputDevices) == 0) {
- SoundTrigger::setCaptureState(false);
+ // indicate inactive capture to sound trigger service if stopping capture from a mic on
+ // primary HW module
+ audio_devices_t primaryInputDevices = availablePrimaryInputDevices();
+ if (((device & primaryInputDevices & ~AUDIO_DEVICE_BIT_IN) != 0) &&
+ mInputs.activeInputsCountOnDevices(primaryInputDevices) == 0) {
+ SoundTrigger::setCaptureState(false);
+ }
+ inputDesc->clearPreemptedSessions();
}
- inputDesc->clearPreemptedSessions();
}
return NO_ERROR;
}
@@ -2276,7 +2330,9 @@
snprintf(buffer, SIZE, " Primary Output: %d\n",
hasPrimaryOutput() ? mPrimaryOutput->mIoHandle : AUDIO_IO_HANDLE_NONE);
result.append(buffer);
- snprintf(buffer, SIZE, " Phone state: %d\n", mEngine->getPhoneState());
+ std::string stateLiteral;
+ AudioModeConverter::toString(mEngine->getPhoneState(), stateLiteral);
+ snprintf(buffer, SIZE, " Phone state: %s\n", stateLiteral.c_str());
result.append(buffer);
snprintf(buffer, SIZE, " Force use for communications %d\n",
mEngine->getForceUse(AUDIO_POLICY_FORCE_FOR_COMMUNICATION));
@@ -2665,8 +2721,8 @@
// create a software bridge in PatchPanel if:
// - source and sink devices are on differnt HW modules OR
// - audio HAL version is < 3.0
- if ((srcDeviceDesc->getModuleHandle() != sinkDeviceDesc->getModuleHandle()) ||
- (srcDeviceDesc->mModule->getHalVersion() < AUDIO_DEVICE_API_VERSION_3_0)) {
+ if (!srcDeviceDesc->hasSameHwModuleAs(sinkDeviceDesc) ||
+ (srcDeviceDesc->mModule->getHalVersionMajor() < 3)) {
// support only one sink device for now to simplify output selection logic
if (patch->num_sinks > 1) {
return INVALID_OPERATION;
@@ -2765,7 +2821,7 @@
return BAD_VALUE;
}
setInputDevice(inputDesc->mIoHandle,
- getNewInputDevice(inputDesc->mIoHandle),
+ getNewInputDevice(inputDesc),
true,
NULL);
} else if (patch->sinks[0].type == AUDIO_PORT_TYPE_DEVICE) {
@@ -2970,7 +3026,7 @@
status_t AudioPolicyManager::startAudioSource(const struct audio_port_config *source,
const audio_attributes_t *attributes,
- audio_io_handle_t *handle,
+ audio_patch_handle_t *handle,
uid_t uid)
{
ALOGV("%s source %p attributes %p handle %p", __FUNCTION__, source, attributes, handle);
@@ -2978,7 +3034,7 @@
return BAD_VALUE;
}
- *handle = AUDIO_IO_HANDLE_NONE;
+ *handle = AUDIO_PATCH_HANDLE_NONE;
if (source->role != AUDIO_PORT_ROLE_SOURCE ||
source->type != AUDIO_PORT_TYPE_DEVICE) {
@@ -3028,7 +3084,7 @@
if (srcDeviceDesc->getAudioPort()->mModule->getHandle() ==
sinkDeviceDesc->getAudioPort()->mModule->getHandle() &&
- srcDeviceDesc->getAudioPort()->mModule->getHalVersion() >= AUDIO_DEVICE_API_VERSION_3_0 &&
+ srcDeviceDesc->getAudioPort()->mModule->getHalVersionMajor() >= 3 &&
srcDeviceDesc->getAudioPort()->mGains.size() > 0) {
ALOGV("%s AUDIO_DEVICE_API_VERSION_3_0", __FUNCTION__);
// create patch between src device and output device
@@ -3085,7 +3141,7 @@
return NO_ERROR;
}
-status_t AudioPolicyManager::stopAudioSource(audio_io_handle_t handle __unused)
+status_t AudioPolicyManager::stopAudioSource(audio_patch_handle_t handle __unused)
{
sp<AudioSourceDescriptor> sourceDesc = mAudioSources.valueFor(handle);
ALOGV("%s handle %d", __FUNCTION__, handle);
@@ -3539,7 +3595,7 @@
mTestFormat = format;
} else if (mTestOutputs[mCurOutput] != 0) {
AudioParameter outputParam = AudioParameter();
- outputParam.addInt(String8("format"), format);
+ outputParam.addInt(String8(AudioParameter::keyStreamSupportedFormats), format);
mpClientInterface->setParameters(mTestOutputs[mCurOutput], outputParam.toString());
}
}
@@ -3558,7 +3614,7 @@
mTestChannels = channels;
} else if (mTestOutputs[mCurOutput] != 0) {
AudioParameter outputParam = AudioParameter();
- outputParam.addInt(String8("channels"), channels);
+ outputParam.addInt(String8(AudioParameter::keyStreamSupportedChannels), channels);
mpClientInterface->setParameters(mTestOutputs[mCurOutput], outputParam.toString());
}
}
@@ -3571,7 +3627,7 @@
mTestSamplingRate = samplingRate;
} else if (mTestOutputs[mCurOutput] != 0) {
AudioParameter outputParam = AudioParameter();
- outputParam.addInt(String8("sampling_rate"), samplingRate);
+ outputParam.addInt(String8(AudioParameter::keyStreamSupportedSamplingRates), samplingRate);
mpClientInterface->setParameters(mTestOutputs[mCurOutput], outputParam.toString());
}
}
@@ -4173,7 +4229,7 @@
ALOGVV("getOutputsForDevice() device %04x", device);
for (size_t i = 0; i < openOutputs.size(); i++) {
- ALOGVV("output %d isDuplicated=%d device=%04x",
+ ALOGVV("output %zu isDuplicated=%d device=%04x",
i, openOutputs.valueAt(i)->isDuplicated(),
openOutputs.valueAt(i)->supportedDevices());
if ((device & openOutputs.valueAt(i)->supportedDevices()) == device) {
@@ -4295,33 +4351,36 @@
((mAvailableInputDevices.types() & AUDIO_DEVICE_IN_BLUETOOTH_SCO_HEADSET &
~AUDIO_DEVICE_BIT_IN) != 0) ||
((mAvailableOutputDevices.types() & AUDIO_DEVICE_OUT_ALL_SCO) != 0);
- // suspend A2DP output if:
- // (NOT already suspended) &&
- // ((SCO device is connected &&
- // (forced usage for communication || for record is SCO))) ||
- // (phone state is ringing || in call)
+
+ // if suspended, restore A2DP output if:
+ // ((SCO device is NOT connected) ||
+ // ((forced usage communication is NOT SCO) && (forced usage for record is NOT SCO) &&
+ // (phone state is NOT in call) && (phone state is NOT ringing)))
//
- // restore A2DP output if:
- // (Already suspended) &&
- // ((SCO device is NOT connected ||
- // (forced usage NOT for communication && NOT for record is SCO))) &&
- // (phone state is NOT ringing && NOT in call)
+ // if not suspended, suspend A2DP output if:
+ // (SCO device is connected) &&
+ // ((forced usage for communication is SCO) || (forced usage for record is SCO) ||
+ // ((phone state is in call) || (phone state is ringing)))
//
if (mA2dpSuspended) {
- if ((!isScoConnected ||
- ((mEngine->getForceUse(AUDIO_POLICY_FORCE_FOR_COMMUNICATION) != AUDIO_POLICY_FORCE_BT_SCO) &&
- (mEngine->getForceUse(AUDIO_POLICY_FORCE_FOR_RECORD) != AUDIO_POLICY_FORCE_BT_SCO))) &&
- ((mEngine->getPhoneState() != AUDIO_MODE_IN_CALL) &&
+ if (!isScoConnected ||
+ ((mEngine->getForceUse(AUDIO_POLICY_FORCE_FOR_COMMUNICATION) !=
+ AUDIO_POLICY_FORCE_BT_SCO) &&
+ (mEngine->getForceUse(AUDIO_POLICY_FORCE_FOR_RECORD) !=
+ AUDIO_POLICY_FORCE_BT_SCO) &&
+ (mEngine->getPhoneState() != AUDIO_MODE_IN_CALL) &&
(mEngine->getPhoneState() != AUDIO_MODE_RINGTONE))) {
mpClientInterface->restoreOutput(a2dpOutput);
mA2dpSuspended = false;
}
} else {
- if ((isScoConnected &&
- ((mEngine->getForceUse(AUDIO_POLICY_FORCE_FOR_COMMUNICATION) == AUDIO_POLICY_FORCE_BT_SCO) ||
- (mEngine->getForceUse(AUDIO_POLICY_FORCE_FOR_RECORD) == AUDIO_POLICY_FORCE_BT_SCO))) ||
- ((mEngine->getPhoneState() == AUDIO_MODE_IN_CALL) ||
+ if (isScoConnected &&
+ ((mEngine->getForceUse(AUDIO_POLICY_FORCE_FOR_COMMUNICATION) ==
+ AUDIO_POLICY_FORCE_BT_SCO) ||
+ (mEngine->getForceUse(AUDIO_POLICY_FORCE_FOR_RECORD) ==
+ AUDIO_POLICY_FORCE_BT_SCO) ||
+ (mEngine->getPhoneState() == AUDIO_MODE_IN_CALL) ||
(mEngine->getPhoneState() == AUDIO_MODE_RINGTONE))) {
mpClientInterface->suspendOutput(a2dpOutput);
@@ -4392,9 +4451,9 @@
return device;
}
-audio_devices_t AudioPolicyManager::getNewInputDevice(audio_io_handle_t input)
+audio_devices_t AudioPolicyManager::getNewInputDevice(const sp<AudioInputDescriptor>& inputDesc)
{
- sp<AudioInputDescriptor> inputDesc = mInputs.valueFor(input);
+ audio_devices_t device = AUDIO_DEVICE_NONE;
ssize_t index = mAudioPatches.indexOfKey(inputDesc->getPatchHandle());
if (index >= 0) {
@@ -4406,7 +4465,12 @@
}
}
- audio_devices_t device = getDeviceAndMixForInputSource(inputDesc->inputSource());
+ audio_source_t source = inputDesc->getHighestPrioritySource(true /*activeOnly*/);
+ if (isInCall()) {
+ device = getDeviceAndMixForInputSource(AUDIO_SOURCE_VOICE_COMMUNICATION);
+ } else if (source != AUDIO_SOURCE_DEFAULT) {
+ device = getDeviceAndMixForInputSource(source);
+ }
return device;
}
@@ -4614,7 +4678,7 @@
== AUDIO_DEVICE_NONE) {
continue;
}
- ALOGVV("checkDeviceMuteStrategies() %s strategy %d (curDevice %04x)",
+ ALOGVV("checkDeviceMuteStrategies() %s strategy %zu (curDevice %04x)",
mute ? "muting" : "unmuting", i, curDevice);
setStrategyMute((routing_strategy)i, mute, desc, mute ? 0 : delayMs);
if (isStrategyActive(desc, (routing_strategy)i)) {
@@ -5481,12 +5545,12 @@
// Format MUST be checked first to update the list of AudioProfile
if (profiles.hasDynamicFormat()) {
- reply = mpClientInterface->getParameters(ioHandle,
- String8(AUDIO_PARAMETER_STREAM_SUP_FORMATS));
+ reply = mpClientInterface->getParameters(
+ ioHandle, String8(AudioParameter::keyStreamSupportedFormats));
ALOGV("%s: supported formats %s", __FUNCTION__, reply.string());
AudioParameter repliedParameters(reply);
if (repliedParameters.get(
- String8(AUDIO_PARAMETER_STREAM_SUP_FORMATS), reply) != NO_ERROR) {
+ String8(AudioParameter::keyStreamSupportedFormats), reply) != NO_ERROR) {
ALOGE("%s: failed to retrieve format, bailing out", __FUNCTION__);
return;
}
@@ -5503,27 +5567,28 @@
ChannelsVector channelMasks;
SampleRateVector samplingRates;
AudioParameter requestedParameters;
- requestedParameters.addInt(String8(AUDIO_PARAMETER_STREAM_FORMAT), format);
+ requestedParameters.addInt(String8(AudioParameter::keyFormat), format);
if (profiles.hasDynamicRateFor(format)) {
- reply = mpClientInterface->getParameters(ioHandle,
- requestedParameters.toString() + ";" +
- AUDIO_PARAMETER_STREAM_SUP_SAMPLING_RATES);
+ reply = mpClientInterface->getParameters(
+ ioHandle,
+ requestedParameters.toString() + ";" +
+ AudioParameter::keyStreamSupportedSamplingRates);
ALOGV("%s: supported sampling rates %s", __FUNCTION__, reply.string());
AudioParameter repliedParameters(reply);
if (repliedParameters.get(
- String8(AUDIO_PARAMETER_STREAM_SUP_SAMPLING_RATES), reply) == NO_ERROR) {
+ String8(AudioParameter::keyStreamSupportedSamplingRates), reply) == NO_ERROR) {
samplingRates = samplingRatesFromString(reply.string());
}
}
if (profiles.hasDynamicChannelsFor(format)) {
reply = mpClientInterface->getParameters(ioHandle,
requestedParameters.toString() + ";" +
- AUDIO_PARAMETER_STREAM_SUP_CHANNELS);
+ AudioParameter::keyStreamSupportedChannels);
ALOGV("%s: supported channel masks %s", __FUNCTION__, reply.string());
AudioParameter repliedParameters(reply);
if (repliedParameters.get(
- String8(AUDIO_PARAMETER_STREAM_SUP_CHANNELS), reply) == NO_ERROR) {
+ String8(AudioParameter::keyStreamSupportedChannels), reply) == NO_ERROR) {
channelMasks = channelMasksFromString(reply.string());
if (device == AUDIO_DEVICE_OUT_HDMI) {
filterSurroundChannelMasks(&channelMasks);
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.h b/services/audiopolicy/managerdefault/AudioPolicyManager.h
index 5c2b673..52fa082 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.h
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.h
@@ -139,7 +139,8 @@
// indicates to the audio policy manager that the input starts being used.
virtual status_t startInput(audio_io_handle_t input,
- audio_session_t session);
+ audio_session_t session,
+ concurrency_type__mask_t *concurrency);
// indicates to the audio policy manager that the input stops being used.
virtual status_t stopInput(audio_io_handle_t input,
@@ -227,9 +228,9 @@
virtual status_t startAudioSource(const struct audio_port_config *source,
const audio_attributes_t *attributes,
- audio_io_handle_t *handle,
+ audio_patch_handle_t *handle,
uid_t uid);
- virtual status_t stopAudioSource(audio_io_handle_t handle);
+ virtual status_t stopAudioSource(audio_patch_handle_t handle);
virtual status_t setMasterMono(bool mono);
virtual status_t getMasterMono(bool *mono);
@@ -405,7 +406,7 @@
void updateDevicesAndOutputs();
// selects the most appropriate device on input for current state
- audio_devices_t getNewInputDevice(audio_io_handle_t input);
+ audio_devices_t getNewInputDevice(const sp<AudioInputDescriptor>& inputDesc);
virtual uint32_t getMaxEffectsCpuLoad()
{
@@ -506,6 +507,8 @@
void clearAudioSources(uid_t uid);
+ bool isConcurentCaptureAllowed(const sp<AudioInputDescriptor>& inputDesc,
+ const sp<AudioSession>& audioSession);
static bool streamsMatchForvolume(audio_stream_type_t stream1,
audio_stream_type_t stream2);
@@ -659,7 +662,7 @@
const char *device_name);
void updateMono(audio_io_handle_t output) {
AudioParameter param;
- param.addInt(String8(AUDIO_PARAMETER_MONO_OUTPUT), (int)mMasterMono);
+ param.addInt(String8(AudioParameter::keyMonoOutput), (int)mMasterMono);
mpClientInterface->setParameters(output, param.toString());
}
};
diff --git a/services/audiopolicy/service/AudioPolicyClientImplLegacy.cpp b/services/audiopolicy/service/AudioPolicyClientImplLegacy.cpp
index dabffe6..aa228aa 100644
--- a/services/audiopolicy/service/AudioPolicyClientImplLegacy.cpp
+++ b/services/audiopolicy/service/AudioPolicyClientImplLegacy.cpp
@@ -34,7 +34,6 @@
#include "ServiceUtilities.h"
#include <hardware_legacy/power.h>
#include <media/AudioEffect.h>
-#include <media/EffectsFactoryApi.h>
//#include <media/IAudioFlinger.h>
#include <hardware/hardware.h>
diff --git a/services/audiopolicy/service/AudioPolicyEffects.cpp b/services/audiopolicy/service/AudioPolicyEffects.cpp
index b732b20..6586bea 100644
--- a/services/audiopolicy/service/AudioPolicyEffects.cpp
+++ b/services/audiopolicy/service/AudioPolicyEffects.cpp
@@ -23,8 +23,7 @@
#include <cutils/misc.h>
#include <media/AudioEffect.h>
#include <system/audio.h>
-#include <hardware/audio_effect.h>
-#include <audio_effects/audio_effects_conf.h>
+#include <system/audio_effects/audio_effects_conf.h>
#include <utils/Vector.h>
#include <utils/SortedVector.h>
#include <cutils/config_utils.h>
@@ -57,11 +56,11 @@
}
mInputSources.clear();
- for (i = 0; i < mInputs.size(); i++) {
- mInputs.valueAt(i)->mEffects.clear();
- delete mInputs.valueAt(i);
+ for (i = 0; i < mInputSessions.size(); i++) {
+ mInputSessions.valueAt(i)->mEffects.clear();
+ delete mInputSessions.valueAt(i);
}
- mInputs.clear();
+ mInputSessions.clear();
// release audio output processing resources
for (i = 0; i < mOutputStreams.size(); i++) {
@@ -93,19 +92,19 @@
ALOGV("addInputEffects(): no processing needs to be attached to this source");
return status;
}
- ssize_t idx = mInputs.indexOfKey(input);
- EffectVector *inputDesc;
+ ssize_t idx = mInputSessions.indexOfKey(audioSession);
+ EffectVector *sessionDesc;
if (idx < 0) {
- inputDesc = new EffectVector(audioSession);
- mInputs.add(input, inputDesc);
+ sessionDesc = new EffectVector(audioSession);
+ mInputSessions.add(audioSession, sessionDesc);
} else {
// EffectVector is existing and we just need to increase ref count
- inputDesc = mInputs.valueAt(idx);
+ sessionDesc = mInputSessions.valueAt(idx);
}
- inputDesc->mRefCount++;
+ sessionDesc->mRefCount++;
- ALOGV("addInputEffects(): input: %d, refCount: %d", input, inputDesc->mRefCount);
- if (inputDesc->mRefCount == 1) {
+ ALOGV("addInputEffects(): input: %d, refCount: %d", input, sessionDesc->mRefCount);
+ if (sessionDesc->mRefCount == 1) {
Vector <EffectDesc *> effects = mInputSources.valueAt(index)->mEffects;
for (size_t i = 0; i < effects.size(); i++) {
EffectDesc *effect = effects[i];
@@ -123,30 +122,31 @@
}
ALOGV("addInputEffects(): added Fx %s on source: %d",
effect->mName, (int32_t)aliasSource);
- inputDesc->mEffects.add(fx);
+ sessionDesc->mEffects.add(fx);
}
- inputDesc->setProcessorEnabled(true);
+ sessionDesc->setProcessorEnabled(true);
}
return status;
}
-status_t AudioPolicyEffects::releaseInputEffects(audio_io_handle_t input)
+status_t AudioPolicyEffects::releaseInputEffects(audio_io_handle_t input,
+ audio_session_t audioSession)
{
status_t status = NO_ERROR;
Mutex::Autolock _l(mLock);
- ssize_t index = mInputs.indexOfKey(input);
+ ssize_t index = mInputSessions.indexOfKey(audioSession);
if (index < 0) {
return status;
}
- EffectVector *inputDesc = mInputs.valueAt(index);
- inputDesc->mRefCount--;
- ALOGV("releaseInputEffects(): input: %d, refCount: %d", input, inputDesc->mRefCount);
- if (inputDesc->mRefCount == 0) {
- inputDesc->setProcessorEnabled(false);
- delete inputDesc;
- mInputs.removeItemsAt(index);
+ EffectVector *sessionDesc = mInputSessions.valueAt(index);
+ sessionDesc->mRefCount--;
+ ALOGV("releaseInputEffects(): input: %d, refCount: %d", input, sessionDesc->mRefCount);
+ if (sessionDesc->mRefCount == 0) {
+ sessionDesc->setProcessorEnabled(false);
+ delete sessionDesc;
+ mInputSessions.removeItemsAt(index);
ALOGV("releaseInputEffects(): all effects released");
}
return status;
@@ -160,16 +160,16 @@
Mutex::Autolock _l(mLock);
size_t index;
- for (index = 0; index < mInputs.size(); index++) {
- if (mInputs.valueAt(index)->mSessionId == audioSession) {
+ for (index = 0; index < mInputSessions.size(); index++) {
+ if (mInputSessions.valueAt(index)->mSessionId == audioSession) {
break;
}
}
- if (index == mInputs.size()) {
+ if (index == mInputSessions.size()) {
*count = 0;
return BAD_VALUE;
}
- Vector< sp<AudioEffect> > effects = mInputs.valueAt(index)->mEffects;
+ Vector< sp<AudioEffect> > effects = mInputSessions.valueAt(index)->mEffects;
for (size_t i = 0; i < effects.size(); i++) {
effect_descriptor_t desc = effects[i]->descriptor();
diff --git a/services/audiopolicy/service/AudioPolicyEffects.h b/services/audiopolicy/service/AudioPolicyEffects.h
index afdaf98..0c74d87 100644
--- a/services/audiopolicy/service/AudioPolicyEffects.h
+++ b/services/audiopolicy/service/AudioPolicyEffects.h
@@ -23,7 +23,6 @@
#include <cutils/misc.h>
#include <media/AudioEffect.h>
#include <system/audio.h>
-#include <hardware/audio_effect.h>
#include <utils/Vector.h>
#include <utils/SortedVector.h>
@@ -62,7 +61,8 @@
audio_session_t audioSession);
// Add all input effects associated to this input
- status_t releaseInputEffects(audio_io_handle_t input);
+ status_t releaseInputEffects(audio_io_handle_t input,
+ audio_session_t audioSession);
// Return a list of effect descriptors for default output effects
@@ -178,12 +178,12 @@
size_t *curSize,
size_t *totSize);
- // protects access to mInputSources, mInputs, mOutputStreams, mOutputSessions
+ // protects access to mInputSources, mInputSessions, mOutputStreams, mOutputSessions
Mutex mLock;
// Automatic input effects are configured per audio_source_t
KeyedVector< audio_source_t, EffectDescVector* > mInputSources;
// Automatic input effects are unique for audio_io_handle_t
- KeyedVector< audio_io_handle_t, EffectVector* > mInputs;
+ KeyedVector< audio_session_t, EffectVector* > mInputSessions;
// Automatic output effects are organized per audio_stream_type_t
KeyedVector< audio_stream_type_t, EffectDescVector* > mOutputStreams;
diff --git a/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp b/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
index 451ce84..53c54bb 100644
--- a/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
+++ b/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
@@ -361,8 +361,23 @@
return NO_INIT;
}
Mutex::Autolock _l(mLock);
+ AudioPolicyInterface::concurrency_type__mask_t concurrency;
+ status_t status = mAudioPolicyManager->startInput(input, session, &concurrency);
- return mAudioPolicyManager->startInput(input, session);
+ if (status == NO_ERROR) {
+ LOG_ALWAYS_FATAL_IF(concurrency & ~AudioPolicyInterface::API_INPUT_CONCURRENCY_ALL,
+ "startInput(): invalid concurrency type %d", (int)concurrency);
+
+ // enforce permission (if any) required for each type of concurrency
+ if (concurrency & AudioPolicyInterface::API_INPUT_CONCURRENCY_CALL) {
+ //TODO: check incall capture permission
+ }
+ if (concurrency & AudioPolicyInterface::API_INPUT_CONCURRENCY_CAPTURE) {
+ //TODO: check concurrent capture permission
+ }
+ }
+
+ return status;
}
status_t AudioPolicyService::stopInput(audio_io_handle_t input,
@@ -390,7 +405,7 @@
}
if (audioPolicyEffects != 0) {
// release audio processors from the input
- status_t status = audioPolicyEffects->releaseInputEffects(input);
+ status_t status = audioPolicyEffects->releaseInputEffects(input, session);
if(status != NO_ERROR) {
ALOGW("Failed to release effects on input %d", input);
}
@@ -566,7 +581,8 @@
*count = 0;
return NO_INIT;
}
- return audioPolicyEffects->queryDefaultInputEffects(audioSession, descriptors, count);
+ return audioPolicyEffects->queryDefaultInputEffects(
+ (audio_session_t)audioSession, descriptors, count);
}
bool AudioPolicyService::isOffloadSupported(const audio_offload_info_t& info)
@@ -696,7 +712,7 @@
status_t AudioPolicyService::startAudioSource(const struct audio_port_config *source,
const audio_attributes_t *attributes,
- audio_io_handle_t *handle)
+ audio_patch_handle_t *handle)
{
Mutex::Autolock _l(mLock);
if (mAudioPolicyManager == NULL) {
@@ -707,7 +723,7 @@
IPCThreadState::self()->getCallingUid());
}
-status_t AudioPolicyService::stopAudioSource(audio_io_handle_t handle)
+status_t AudioPolicyService::stopAudioSource(audio_patch_handle_t handle)
{
Mutex::Autolock _l(mLock);
if (mAudioPolicyManager == NULL) {
diff --git a/services/audiopolicy/service/AudioPolicyInterfaceImplLegacy.cpp b/services/audiopolicy/service/AudioPolicyInterfaceImplLegacy.cpp
index 946c380..a5b96fe 100644
--- a/services/audiopolicy/service/AudioPolicyInterfaceImplLegacy.cpp
+++ b/services/audiopolicy/service/AudioPolicyInterfaceImplLegacy.cpp
@@ -307,7 +307,7 @@
}
void AudioPolicyService::releaseInput(audio_io_handle_t input,
- audio_session_t session __unused)
+ audio_session_t session)
{
if (mpAudioPolicy == NULL) {
return;
@@ -321,7 +321,7 @@
}
if (audioPolicyEffects != 0) {
// release audio processors from the input
- status_t status = audioPolicyEffects->releaseInputEffects(input);
+ status_t status = audioPolicyEffects->releaseInputEffects(input, session);
if(status != NO_ERROR) {
ALOGW("Failed to release effects on input %d", input);
}
@@ -610,12 +610,12 @@
status_t AudioPolicyService::startAudioSource(const struct audio_port_config *source,
const audio_attributes_t *attributes,
- audio_io_handle_t *handle)
+ audio_patch_handle_t *handle)
{
return INVALID_OPERATION;
}
-status_t AudioPolicyService::stopAudioSource(audio_io_handle_t handle)
+status_t AudioPolicyService::stopAudioSource(audio_patch_handle_t handle)
{
return INVALID_OPERATION;
}
diff --git a/services/audiopolicy/service/AudioPolicyService.cpp b/services/audiopolicy/service/AudioPolicyService.cpp
index 52ed73e..b6b6116 100644
--- a/services/audiopolicy/service/AudioPolicyService.cpp
+++ b/services/audiopolicy/service/AudioPolicyService.cpp
@@ -34,13 +34,15 @@
#include "ServiceUtilities.h"
#include <hardware_legacy/power.h>
#include <media/AudioEffect.h>
-#include <media/EffectsFactoryApi.h>
#include <media/AudioParameter.h>
-#include <hardware/hardware.h>
#include <system/audio.h>
#include <system/audio_policy.h>
+
+#ifdef USE_LEGACY_AUDIO_POLICY
+#include <hardware/hardware.h>
#include <hardware/audio_policy.h>
+#endif
namespace android {
diff --git a/services/audiopolicy/service/AudioPolicyService.h b/services/audiopolicy/service/AudioPolicyService.h
index def6405..a310735 100644
--- a/services/audiopolicy/service/AudioPolicyService.h
+++ b/services/audiopolicy/service/AudioPolicyService.h
@@ -26,12 +26,12 @@
#include <binder/BinderService.h>
#include <system/audio.h>
#include <system/audio_policy.h>
-#include <hardware/audio_policy.h>
#include <media/IAudioPolicyService.h>
#include <media/ToneGenerator.h>
#include <media/AudioEffect.h>
#include <media/AudioPolicy.h>
#ifdef USE_LEGACY_AUDIO_POLICY
+#include <hardware/audio_policy.h>
#include <hardware_legacy/AudioPolicyInterface.h>
#endif
#include "AudioPolicyEffects.h"
@@ -200,8 +200,8 @@
virtual status_t startAudioSource(const struct audio_port_config *source,
const audio_attributes_t *attributes,
- audio_io_handle_t *handle);
- virtual status_t stopAudioSource(audio_io_handle_t handle);
+ audio_patch_handle_t *handle);
+ virtual status_t stopAudioSource(audio_patch_handle_t handle);
virtual status_t setMasterMono(bool mono);
virtual status_t getMasterMono(bool *mono);
diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
index b4f8e21..5166eb5 100644
--- a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
+++ b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
@@ -1210,6 +1210,14 @@
}
}
+void CameraDeviceClient::notifyRequestQueueEmpty() {
+ // Thread safe. Don't bother locking.
+ sp<hardware::camera2::ICameraDeviceCallbacks> remoteCb = getRemoteCallback();
+ if (remoteCb != 0) {
+ remoteCb->onRequestQueueEmpty();
+ }
+}
+
void CameraDeviceClient::detachDevice() {
if (mDevice == 0) return;
diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.h b/services/camera/libcameraservice/api2/CameraDeviceClient.h
index de283ea..68e453c 100644
--- a/services/camera/libcameraservice/api2/CameraDeviceClient.h
+++ b/services/camera/libcameraservice/api2/CameraDeviceClient.h
@@ -164,6 +164,7 @@
const CaptureResultExtras& resultExtras);
virtual void notifyShutter(const CaptureResultExtras& resultExtras, nsecs_t timestamp);
virtual void notifyPrepared(int streamId);
+ virtual void notifyRequestQueueEmpty();
virtual void notifyRepeatingRequestError(long lastFrameNumber);
/**
diff --git a/services/camera/libcameraservice/common/Camera2ClientBase.cpp b/services/camera/libcameraservice/common/Camera2ClientBase.cpp
index ccd1e4d..7e26153 100644
--- a/services/camera/libcameraservice/common/Camera2ClientBase.cpp
+++ b/services/camera/libcameraservice/common/Camera2ClientBase.cpp
@@ -307,6 +307,12 @@
}
template <typename TClientBase>
+void Camera2ClientBase<TClientBase>::notifyRequestQueueEmpty() {
+
+ ALOGV("%s: Request queue now empty", __FUNCTION__);
+}
+
+template <typename TClientBase>
void Camera2ClientBase<TClientBase>::notifyRepeatingRequestError(long lastFrameNumber) {
(void)lastFrameNumber;
diff --git a/services/camera/libcameraservice/common/Camera2ClientBase.h b/services/camera/libcameraservice/common/Camera2ClientBase.h
index dbbf638..9fd0a78 100644
--- a/services/camera/libcameraservice/common/Camera2ClientBase.h
+++ b/services/camera/libcameraservice/common/Camera2ClientBase.h
@@ -73,6 +73,7 @@
virtual void notifyAutoWhitebalance(uint8_t newState,
int triggerId);
virtual void notifyPrepared(int streamId);
+ virtual void notifyRequestQueueEmpty();
virtual void notifyRepeatingRequestError(long lastFrameNumber);
int getCameraId() const;
diff --git a/services/camera/libcameraservice/common/CameraDeviceBase.h b/services/camera/libcameraservice/common/CameraDeviceBase.h
index 984d84b..f30afe3 100644
--- a/services/camera/libcameraservice/common/CameraDeviceBase.h
+++ b/services/camera/libcameraservice/common/CameraDeviceBase.h
@@ -204,6 +204,7 @@
virtual void notifyShutter(const CaptureResultExtras &resultExtras,
nsecs_t timestamp) = 0;
virtual void notifyPrepared(int streamId) = 0;
+ virtual void notifyRequestQueueEmpty() = 0;
// Required only for API1
virtual void notifyAutoFocus(uint8_t newState, int triggerId) = 0;
diff --git a/services/camera/libcameraservice/device3/Camera3Device.cpp b/services/camera/libcameraservice/device3/Camera3Device.cpp
index bfa04f6..3437e9d 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Device.cpp
@@ -686,7 +686,8 @@
}
status_t Camera3Device::convertMetadataListToRequestListLocked(
- const List<const CameraMetadata> &metadataList, RequestList *requestList) {
+ const List<const CameraMetadata> &metadataList, bool repeating,
+ RequestList *requestList) {
if (requestList == NULL) {
CLOGE("requestList cannot be NULL.");
return BAD_VALUE;
@@ -701,6 +702,8 @@
return BAD_VALUE;
}
+ newRequest->mRepeating = repeating;
+
// Setup burst Id and request Id
newRequest->mResultExtras.burstId = burstId++;
if (it->exists(ANDROID_REQUEST_ID)) {
@@ -757,7 +760,8 @@
RequestList requestList;
- res = convertMetadataListToRequestListLocked(requests, /*out*/&requestList);
+ res = convertMetadataListToRequestListLocked(requests, repeating,
+ /*out*/&requestList);
if (res != OK) {
// error logged by previous call
return res;
@@ -3551,6 +3555,12 @@
mRequestQueue.begin();
nextRequest = *firstRequest;
mRequestQueue.erase(firstRequest);
+ if (mRequestQueue.empty() && !nextRequest->mRepeating) {
+ sp<NotificationListener> listener = mListener.promote();
+ if (listener != NULL) {
+ listener->notifyRequestQueueEmpty();
+ }
+ }
}
// In case we've been unpaused by setPaused clearing mDoPause, need to
diff --git a/services/camera/libcameraservice/device3/Camera3Device.h b/services/camera/libcameraservice/device3/Camera3Device.h
index 87c43f3..ac9dfc2 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.h
+++ b/services/camera/libcameraservice/device3/Camera3Device.h
@@ -297,6 +297,8 @@
// requests will be submitted to HAL at a time. The batch size for
// the following 7 requests will be ignored by the request thread.
int mBatchSize;
+ // Whether this request is from a repeating or repeating burst.
+ bool mRepeating;
};
typedef List<sp<CaptureRequest> > RequestList;
@@ -304,6 +306,7 @@
status_t convertMetadataListToRequestListLocked(
const List<const CameraMetadata> &metadataList,
+ bool repeating,
/*out*/
RequestList *requestList);
diff --git a/services/mediadrm/Android.mk b/services/mediadrm/Android.mk
index 4ce5c38..38aa472 100644
--- a/services/mediadrm/Android.mk
+++ b/services/mediadrm/Android.mk
@@ -25,7 +25,6 @@
libbinder \
libcutils \
liblog \
- libmedia \
libmediadrm \
libutils \
diff --git a/services/mediadrm/tests/Android.mk b/services/mediadrm/tests/Android.mk
index 8cbf782..e2f7399 100644
--- a/services/mediadrm/tests/Android.mk
+++ b/services/mediadrm/tests/Android.mk
@@ -19,7 +19,6 @@
frameworks/av/media/libmediaplayerservice \
LOCAL_CFLAGS += -Werror -Wall
-LOCAL_CLANG := true
LOCAL_32_BIT_ONLY := true
diff --git a/services/mediaextractor/Android.mk b/services/mediaextractor/Android.mk
index a9a2d3c..4e337a0 100644
--- a/services/mediaextractor/Android.mk
+++ b/services/mediaextractor/Android.mk
@@ -5,19 +5,20 @@
LOCAL_SRC_FILES := MediaExtractorService.cpp
LOCAL_SHARED_LIBRARIES := libmedia libstagefright libbinder libutils liblog
LOCAL_MODULE:= libmediaextractorservice
-LOCAL_32_BIT_ONLY := true
include $(BUILD_SHARED_LIBRARY)
# service executable
include $(CLEAR_VARS)
+# seccomp filters are defined for the following architectures:
LOCAL_REQUIRED_MODULES_arm := mediaextractor-seccomp.policy
+LOCAL_REQUIRED_MODULES_arm64 := mediaextractor-seccomp.policy
LOCAL_REQUIRED_MODULES_x86 := mediaextractor-seccomp.policy
+# TODO add seccomp filter for x86_64.
LOCAL_SRC_FILES := main_extractorservice.cpp minijail/minijail.cpp
LOCAL_SHARED_LIBRARIES := libmedia libmediaextractorservice libbinder libutils liblog libicuuc libminijail
LOCAL_STATIC_LIBRARIES := libicuandroid_utils
LOCAL_MODULE:= mediaextractor
-LOCAL_32_BIT_ONLY := true
LOCAL_INIT_RC := mediaextractor.rc
LOCAL_C_INCLUDES := frameworks/av/media/libmedia
include $(BUILD_EXECUTABLE)
diff --git a/services/mediaextractor/minijail/Android.mk b/services/mediaextractor/minijail/Android.mk
index 0cf8eff..6b01e77 100644
--- a/services/mediaextractor/minijail/Android.mk
+++ b/services/mediaextractor/minijail/Android.mk
@@ -1,18 +1,12 @@
LOCAL_PATH := $(call my-dir)
-ifeq ($(TARGET_ARCH), $(filter $(TARGET_ARCH), arm arm64 x86 x86_64))
+# TODO add filter for x86_64
+ifeq ($(TARGET_ARCH), $(filter $(TARGET_ARCH), arm arm64 x86))
include $(CLEAR_VARS)
LOCAL_MODULE := mediaextractor-seccomp.policy
LOCAL_MODULE_CLASS := ETC
LOCAL_MODULE_PATH := $(TARGET_OUT)/etc/seccomp_policy
-
-# mediaextractor runs in 32-bit combatibility mode. For 64 bit architectures,
-# use the 32 bit policy
-ifdef TARGET_2ND_ARCH
- LOCAL_SRC_FILES := $(LOCAL_PATH)/seccomp_policy/mediaextractor-seccomp-$(TARGET_2ND_ARCH).policy
-else
- LOCAL_SRC_FILES := $(LOCAL_PATH)/seccomp_policy/mediaextractor-seccomp-$(TARGET_ARCH).policy
-endif
+LOCAL_SRC_FILES := $(LOCAL_PATH)/seccomp_policy/mediaextractor-seccomp-$(TARGET_ARCH).policy
# allow device specific additions to the syscall whitelist
LOCAL_SRC_FILES += $(wildcard $(foreach dir, $(BOARD_SECCOMP_POLICY), \
diff --git a/services/mediaextractor/minijail/seccomp_policy/mediaextractor-seccomp-arm64.policy b/services/mediaextractor/minijail/seccomp_policy/mediaextractor-seccomp-arm64.policy
new file mode 100644
index 0000000..ae6ac05
--- /dev/null
+++ b/services/mediaextractor/minijail/seccomp_policy/mediaextractor-seccomp-arm64.policy
@@ -0,0 +1,36 @@
+# Organized by frequency of systemcall - in descending order for
+# best performance.
+ioctl: 1
+futex: 1
+prctl: 1
+write: 1
+getpriority: 1
+close: 1
+dup: 1
+mmap: 1
+munmap: 1
+openat: 1
+mprotect: 1
+madvise: 1
+getuid: 1
+fstat: 1
+read: 1
+setpriority: 1
+sigaltstack: 1
+clone: 1
+lseek: 1
+newfstatat: 1
+faccessat: 1
+restart_syscall: 1
+exit: 1
+exit_group: 1
+rt_sigreturn: 1
+getrlimit: 1
+
+# for attaching to debuggerd on process crash
+rt_sigaction: 1
+# socket: arg0 == AF_LOCAL
+socket: arg0 == 1
+connect: 1
+rt_tgsigqueueinfo: 1
+writev: 1
diff --git a/services/mediaresourcemanager/Android.mk b/services/mediaresourcemanager/Android.mk
index b72230f..c9cd8cc 100644
--- a/services/mediaresourcemanager/Android.mk
+++ b/services/mediaresourcemanager/Android.mk
@@ -4,7 +4,7 @@
LOCAL_SRC_FILES := ResourceManagerService.cpp ServiceLog.cpp
-LOCAL_SHARED_LIBRARIES := libmedia libstagefright libbinder libutils liblog
+LOCAL_SHARED_LIBRARIES := libmedia libmediautils libbinder libutils liblog
LOCAL_MODULE:= libresourcemanagerservice
@@ -14,7 +14,6 @@
$(TOPDIR)frameworks/av/include
LOCAL_CFLAGS += -Werror -Wall
-LOCAL_CLANG := true
include $(BUILD_SHARED_LIBRARY)
diff --git a/services/mediaresourcemanager/test/Android.mk b/services/mediaresourcemanager/test/Android.mk
index 3b4ef0d..6abcf92 100644
--- a/services/mediaresourcemanager/test/Android.mk
+++ b/services/mediaresourcemanager/test/Android.mk
@@ -21,7 +21,6 @@
frameworks/av/services/mediaresourcemanager \
LOCAL_CFLAGS += -Werror -Wall
-LOCAL_CLANG := true
LOCAL_32_BIT_ONLY := true
@@ -47,7 +46,6 @@
frameworks/av/services/mediaresourcemanager \
LOCAL_CFLAGS += -Werror -Wall
-LOCAL_CLANG := true
LOCAL_32_BIT_ONLY := true
diff --git a/services/radio/Android.mk b/services/radio/Android.mk
index fc8f00c..b4cda19 100644
--- a/services/radio/Android.mk
+++ b/services/radio/Android.mk
@@ -18,7 +18,8 @@
LOCAL_SRC_FILES:= \
- RadioService.cpp
+ RadioService.cpp \
+ RadioHalLegacy.cpp
LOCAL_SHARED_LIBRARIES:= \
liblog \
diff --git a/services/radio/RadioHalLegacy.cpp b/services/radio/RadioHalLegacy.cpp
new file mode 100644
index 0000000..d50ccd4
--- /dev/null
+++ b/services/radio/RadioHalLegacy.cpp
@@ -0,0 +1,218 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "RadioHalLegacy"
+//#define LOG_NDEBUG 0
+
+#include <utils/Log.h>
+#include <utils/misc.h>
+#include "RadioHalLegacy.h"
+
+namespace android {
+
+const char *RadioHalLegacy::sClassModuleNames[] = {
+ RADIO_HARDWARE_MODULE_ID_FM, /* corresponds to RADIO_CLASS_AM_FM */
+ RADIO_HARDWARE_MODULE_ID_SAT, /* corresponds to RADIO_CLASS_SAT */
+ RADIO_HARDWARE_MODULE_ID_DT, /* corresponds to RADIO_CLASS_DT */
+};
+
+/* static */
+sp<RadioInterface> RadioInterface::connectModule(radio_class_t classId)
+{
+ return new RadioHalLegacy(classId);
+}
+
+RadioHalLegacy::RadioHalLegacy(radio_class_t classId)
+ : RadioInterface(), mClassId(classId), mHwDevice(NULL)
+{
+}
+
+void RadioHalLegacy::onFirstRef()
+{
+ const hw_module_t *mod;
+ int rc;
+ ALOGI("%s mClassId %d", __FUNCTION__, mClassId);
+
+ mHwDevice = NULL;
+
+ if ((mClassId < 0) ||
+ (mClassId >= NELEM(sClassModuleNames))) {
+ ALOGE("invalid class ID %d", mClassId);
+ return;
+ }
+
+ ALOGI("%s RADIO_HARDWARE_MODULE_ID %s %s",
+ __FUNCTION__, RADIO_HARDWARE_MODULE_ID, sClassModuleNames[mClassId]);
+
+ rc = hw_get_module_by_class(RADIO_HARDWARE_MODULE_ID, sClassModuleNames[mClassId], &mod);
+ if (rc != 0) {
+ ALOGE("couldn't load radio module %s.%s (%s)",
+ RADIO_HARDWARE_MODULE_ID, sClassModuleNames[mClassId], strerror(-rc));
+ return;
+ }
+ rc = radio_hw_device_open(mod, &mHwDevice);
+ if (rc != 0) {
+ ALOGE("couldn't open radio hw device in %s.%s (%s)",
+ RADIO_HARDWARE_MODULE_ID, "primary", strerror(-rc));
+ mHwDevice = NULL;
+ return;
+ }
+ if (mHwDevice->common.version != RADIO_DEVICE_API_VERSION_CURRENT) {
+ ALOGE("wrong radio hw device version %04x", mHwDevice->common.version);
+ radio_hw_device_close(mHwDevice);
+ mHwDevice = NULL;
+ }
+}
+
+RadioHalLegacy::~RadioHalLegacy()
+{
+ if (mHwDevice != NULL) {
+ radio_hw_device_close(mHwDevice);
+ }
+}
+
+int RadioHalLegacy::getProperties(radio_hal_properties_t *properties)
+{
+ if (mHwDevice == NULL) {
+ return -ENODEV;
+ }
+
+ int rc = mHwDevice->get_properties(mHwDevice, properties);
+ if (rc != 0) {
+ ALOGE("could not read implementation properties");
+ }
+
+ return rc;
+}
+
+int RadioHalLegacy::openTuner(const radio_hal_band_config_t *config,
+ bool audio,
+ sp<TunerCallbackInterface> callback,
+ sp<TunerInterface>& tuner)
+{
+ if (mHwDevice == NULL) {
+ return -ENODEV;
+ }
+ sp<Tuner> tunerImpl = new Tuner(callback);
+
+ const struct radio_tuner *halTuner;
+ int rc = mHwDevice->open_tuner(mHwDevice, config, audio,
+ RadioHalLegacy::Tuner::callback, tunerImpl.get(),
+ &halTuner);
+ if (rc == 0) {
+ tunerImpl->setHalTuner(halTuner);
+ tuner = tunerImpl;
+ }
+ return rc;
+}
+
+int RadioHalLegacy::closeTuner(sp<TunerInterface>& tuner)
+{
+ if (mHwDevice == NULL) {
+ return -ENODEV;
+ }
+ if (tuner == 0) {
+ return -EINVAL;
+ }
+ sp<Tuner> tunerImpl = (Tuner *)tuner.get();
+ return mHwDevice->close_tuner(mHwDevice, tunerImpl->getHalTuner());
+}
+
+int RadioHalLegacy::Tuner::setConfiguration(const radio_hal_band_config_t *config)
+{
+ if (mHalTuner == NULL) {
+ return -ENODEV;
+ }
+ return mHalTuner->set_configuration(mHalTuner, config);
+}
+
+int RadioHalLegacy::Tuner::getConfiguration(radio_hal_band_config_t *config)
+{
+ if (mHalTuner == NULL) {
+ return -ENODEV;
+ }
+ return mHalTuner->get_configuration(mHalTuner, config);
+}
+
+int RadioHalLegacy::Tuner::scan(radio_direction_t direction, bool skip_sub_channel)
+{
+ if (mHalTuner == NULL) {
+ return -ENODEV;
+ }
+ return mHalTuner->scan(mHalTuner, direction, skip_sub_channel);
+}
+
+int RadioHalLegacy::Tuner::step(radio_direction_t direction, bool skip_sub_channel)
+{
+ if (mHalTuner == NULL) {
+ return -ENODEV;
+ }
+ return mHalTuner->step(mHalTuner, direction, skip_sub_channel);
+}
+
+int RadioHalLegacy::Tuner::tune(unsigned int channel, unsigned int sub_channel)
+{
+ if (mHalTuner == NULL) {
+ return -ENODEV;
+ }
+ return mHalTuner->tune(mHalTuner, channel, sub_channel);
+}
+
+int RadioHalLegacy::Tuner::cancel()
+{
+ if (mHalTuner == NULL) {
+ return -ENODEV;
+ }
+ return mHalTuner->cancel(mHalTuner);
+}
+
+int RadioHalLegacy::Tuner::getProgramInformation(radio_program_info_t *info)
+{
+ if (mHalTuner == NULL) {
+ return -ENODEV;
+ }
+ return mHalTuner->get_program_information(mHalTuner, info);
+}
+
+void RadioHalLegacy::Tuner::onCallback(radio_hal_event_t *halEvent)
+{
+ if (mCallback != 0) {
+ mCallback->onEvent(halEvent);
+ }
+}
+
+//static
+void RadioHalLegacy::Tuner::callback(radio_hal_event_t *halEvent, void *cookie)
+{
+ wp<RadioHalLegacy::Tuner> weak = wp<RadioHalLegacy::Tuner>((RadioHalLegacy::Tuner *)cookie);
+ sp<RadioHalLegacy::Tuner> tuner = weak.promote();
+ if (tuner != 0) {
+ tuner->onCallback(halEvent);
+ }
+}
+
+RadioHalLegacy::Tuner::Tuner(sp<TunerCallbackInterface> callback)
+ : TunerInterface(), mHalTuner(NULL), mCallback(callback)
+{
+}
+
+
+RadioHalLegacy::Tuner::~Tuner()
+{
+}
+
+
+} // namespace android
diff --git a/services/radio/RadioHalLegacy.h b/services/radio/RadioHalLegacy.h
new file mode 100644
index 0000000..7d4831b
--- /dev/null
+++ b/services/radio/RadioHalLegacy.h
@@ -0,0 +1,82 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_HARDWARE_RADIO_HAL_LEGACY_H
+#define ANDROID_HARDWARE_RADIO_HAL_LEGACY_H
+
+#include <utils/RefBase.h>
+#include <hardware/radio.h>
+#include "RadioInterface.h"
+#include "TunerInterface.h"
+#include "TunerCallbackInterface.h"
+
+namespace android {
+
+class RadioHalLegacy : public RadioInterface
+{
+public:
+ RadioHalLegacy(radio_class_t classId);
+
+ // RadioInterface
+ virtual int getProperties(radio_hal_properties_t *properties);
+ virtual int openTuner(const radio_hal_band_config_t *config,
+ bool audio,
+ sp<TunerCallbackInterface> callback,
+ sp<TunerInterface>& tuner);
+ virtual int closeTuner(sp<TunerInterface>& tuner);
+
+ // RefBase
+ virtual void onFirstRef();
+
+ class Tuner : public TunerInterface
+ {
+ public:
+ Tuner(sp<TunerCallbackInterface> callback);
+
+ virtual int setConfiguration(const radio_hal_band_config_t *config);
+ virtual int getConfiguration(radio_hal_band_config_t *config);
+ virtual int scan(radio_direction_t direction, bool skip_sub_channel);
+ virtual int step(radio_direction_t direction, bool skip_sub_channel);
+ virtual int tune(unsigned int channel, unsigned int sub_channel);
+ virtual int cancel();
+ virtual int getProgramInformation(radio_program_info_t *info);
+
+ static void callback(radio_hal_event_t *halEvent, void *cookie);
+ void onCallback(radio_hal_event_t *halEvent);
+
+ void setHalTuner(const struct radio_tuner *halTuner) { mHalTuner = halTuner; }
+ const struct radio_tuner *getHalTuner() { return mHalTuner; }
+
+ private:
+ virtual ~Tuner();
+
+ const struct radio_tuner *mHalTuner;
+ sp<TunerCallbackInterface> mCallback;
+ };
+
+protected:
+ virtual ~RadioHalLegacy();
+
+private:
+ static const char * sClassModuleNames[];
+
+ radio_class_t mClassId;
+ struct radio_hw_device *mHwDevice;
+};
+
+} // namespace android
+
+#endif // ANDROID_HARDWARE_RADIO_HAL_LEGACY_H
diff --git a/services/radio/RadioInterface.h b/services/radio/RadioInterface.h
new file mode 100644
index 0000000..fcfb4d5
--- /dev/null
+++ b/services/radio/RadioInterface.h
@@ -0,0 +1,91 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_HARDWARE_RADIO_INTERFACE_H
+#define ANDROID_HARDWARE_RADIO_INTERFACE_H
+
+#include <utils/RefBase.h>
+#include <system/radio.h>
+#include "TunerInterface.h"
+#include "TunerCallbackInterface.h"
+
+namespace android {
+
+class RadioInterface : public virtual RefBase
+{
+public:
+ /* get a sound trigger HAL instance */
+ static sp<RadioInterface> connectModule(radio_class_t classId);
+
+ /*
+ * Retrieve implementation properties.
+ *
+ * arguments:
+ * - properties: where to return the module properties
+ *
+ * returns:
+ * 0 if no error
+ * -EINVAL if invalid arguments are passed
+ */
+ virtual int getProperties(radio_hal_properties_t *properties) = 0;
+
+ /*
+ * Open a tuner interface for the requested configuration.
+ * If no other tuner is opened, this will activate the radio module.
+ *
+ * arguments:
+ * - config: the band configuration to apply
+ * - audio: this tuner will be used for live radio listening and should be connected to
+ * the radio audio source.
+ * - callback: the event callback
+ * - cookie: the cookie to pass when calling the callback
+ * - tuner: where to return the tuner interface
+ *
+ * returns:
+ * 0 if HW was powered up and configuration could be applied
+ * -EINVAL if configuration requested is invalid
+ * -ENOSYS if called out of sequence
+ *
+ * Callback function with event RADIO_EVENT_CONFIG MUST be called once the
+ * configuration is applied or a failure occurs or after a time out.
+ */
+ virtual int openTuner(const radio_hal_band_config_t *config,
+ bool audio,
+ sp<TunerCallbackInterface> callback,
+ sp<TunerInterface>& tuner) = 0;
+
+ /*
+ * Close a tuner interface.
+ * If the last tuner is closed, the radio module is deactivated.
+ *
+ * arguments:
+ * - tuner: the tuner interface to close
+ *
+ * returns:
+ * 0 if powered down successfully.
+ * -EINVAL if an invalid argument is passed
+ * -ENOSYS if called out of sequence
+ */
+ virtual int closeTuner(sp<TunerInterface>& tuner) = 0;
+
+protected:
+ RadioInterface() {}
+ virtual ~RadioInterface() {}
+};
+
+} // namespace android
+
+#endif // ANDROID_HARDWARE_RADIO_INTERFACE_H
diff --git a/services/radio/RadioService.cpp b/services/radio/RadioService.cpp
index 5a3f750..a73ed8f 100644
--- a/services/radio/RadioService.cpp
+++ b/services/radio/RadioService.cpp
@@ -51,31 +51,15 @@
void RadioService::onFirstRef()
{
- const hw_module_t *mod;
- int rc;
- struct radio_hw_device *dev;
-
ALOGI("%s", __FUNCTION__);
- rc = hw_get_module_by_class(RADIO_HARDWARE_MODULE_ID, RADIO_HARDWARE_MODULE_ID_FM, &mod);
- if (rc != 0) {
- ALOGE("couldn't load radio module %s.%s (%s)",
- RADIO_HARDWARE_MODULE_ID, "primary", strerror(-rc));
- return;
- }
- rc = radio_hw_device_open(mod, &dev);
- if (rc != 0) {
- ALOGE("couldn't open radio hw device in %s.%s (%s)",
- RADIO_HARDWARE_MODULE_ID, "primary", strerror(-rc));
- return;
- }
- if (dev->common.version != RADIO_DEVICE_API_VERSION_CURRENT) {
- ALOGE("wrong radio hw device version %04x", dev->common.version);
- return;
- }
+ sp<RadioInterface> dev = RadioInterface::connectModule(RADIO_CLASS_AM_FM);
+ if (dev == 0) {
+ return;
+ }
struct radio_hal_properties halProperties;
- rc = dev->get_properties(dev, &halProperties);
+ int rc = dev->getProperties(&halProperties);
if (rc != 0) {
ALOGE("could not read implementation properties");
return;
@@ -94,9 +78,6 @@
RadioService::~RadioService()
{
- for (size_t i = 0; i < mModules.size(); i++) {
- radio_hw_device_close(mModules.valueAt(i)->hwDevice());
- }
}
status_t RadioService::listModules(struct radio_properties *properties,
@@ -108,7 +89,7 @@
if (numModules == NULL || (*numModules != 0 && properties == NULL)) {
return BAD_VALUE;
}
- size_t maxModules = *numModules;
+ uint32_t maxModules = *numModules;
*numModules = mModules.size();
for (size_t i = 0; i < mModules.size() && i < maxModules; i++) {
properties[i] = mModules.valueAt(i)->properties();
@@ -192,16 +173,6 @@
}
-// static
-void RadioService::callback(radio_hal_event_t *halEvent, void *cookie)
-{
- CallbackThread *callbackThread = (CallbackThread *)cookie;
- if (callbackThread == NULL) {
- return;
- }
- callbackThread->sendEvent(halEvent);
-}
-
/* static */
void RadioService::convertProperties(radio_properties_t *properties,
const radio_hal_properties_t *halProperties)
@@ -305,32 +276,40 @@
{
sp<IMemory> eventMemory;
- size_t headerSize =
- (sizeof(struct radio_event) + sizeof(unsigned int) - 1) /sizeof(unsigned int);
- size_t metadataSize = 0;
+ // The event layout in shared memory is:
+ // sizeof(struct radio_event) bytes : the event itself
+ // 4 bytes : metadata size or 0
+ // N bytes : metadata if present
+ uint32_t metadataOffset = sizeof(struct radio_event) + sizeof(uint32_t);
+ uint32_t metadataSize = 0;
+
switch (halEvent->type) {
case RADIO_EVENT_TUNED:
case RADIO_EVENT_AF_SWITCH:
if (radio_metadata_check(halEvent->info.metadata) == 0) {
- metadataSize = radio_metadata_get_size(halEvent->info.metadata);
+ metadataSize = (uint32_t)radio_metadata_get_size(halEvent->info.metadata);
}
break;
case RADIO_EVENT_METADATA:
if (radio_metadata_check(halEvent->metadata) != 0) {
return eventMemory;
}
- metadataSize = radio_metadata_get_size(halEvent->metadata);
+ metadataSize = (uint32_t)radio_metadata_get_size(halEvent->metadata);
break;
default:
break;
}
- size_t size = headerSize + metadataSize;
- eventMemory = mMemoryDealer->allocate(size);
+
+ eventMemory = mMemoryDealer->allocate(metadataOffset + metadataSize);
if (eventMemory == 0 || eventMemory->pointer() == NULL) {
eventMemory.clear();
return eventMemory;
}
+
struct radio_event *event = (struct radio_event *)eventMemory->pointer();
+
+ *(uint32_t *)((uint8_t *)event + metadataOffset - sizeof(uint32_t)) = metadataSize;
+
event->type = halEvent->type;
event->status = halEvent->status;
@@ -342,10 +321,7 @@
case RADIO_EVENT_AF_SWITCH:
event->info = halEvent->info;
if (metadataSize != 0) {
- memcpy((char *)event + headerSize, halEvent->info.metadata, metadataSize);
- // replace meta data pointer by offset while in shared memory so that receiving side
- // can restore the pointer in destination process.
- event->info.metadata = (radio_metadata_t *)headerSize;
+ memcpy((uint8_t *)event + metadataOffset, halEvent->info.metadata, metadataSize);
}
break;
case RADIO_EVENT_TA:
@@ -355,10 +331,9 @@
event->on = halEvent->on;
break;
case RADIO_EVENT_METADATA:
- memcpy((char *)event + headerSize, halEvent->metadata, metadataSize);
- // replace meta data pointer by offset while in shared memory so that receiving side
- // can restore the pointer in destination process.
- event->metadata = (radio_metadata_t *)headerSize;
+ if (metadataSize != 0) {
+ memcpy((uint8_t *)event + metadataOffset, halEvent->metadata, metadataSize);
+ }
break;
case RADIO_EVENT_HW_FAILURE:
default:
@@ -385,12 +360,13 @@
#undef LOG_TAG
#define LOG_TAG "RadioService::Module"
-RadioService::Module::Module(radio_hw_device* hwDevice, radio_properties properties)
+RadioService::Module::Module(sp<RadioInterface> hwDevice, radio_properties properties)
: mHwDevice(hwDevice), mProperties(properties), mMute(true)
{
}
RadioService::Module::~Module() {
+ mHwDevice.clear();
mModuleClients.clear();
}
@@ -404,10 +380,15 @@
bool audio)
{
ALOGV("addClient() %p config %p product %s", this, config, mProperties.product);
+
AutoMutex lock(mLock);
sp<ModuleClient> moduleClient;
int ret;
+ if (mHwDevice == 0) {
+ return moduleClient;
+ }
+
for (size_t i = 0; i < mModuleClients.size(); i++) {
if (mModuleClients[i]->client() == client) {
// client already connected: reject
@@ -464,7 +445,7 @@
}
}
- const struct radio_tuner *halTuner;
+ sp<TunerInterface> halTuner;
sp<ModuleClient> preemtedClient;
if (audio) {
if (allocatedAudio >= mProperties.num_audio_sources) {
@@ -484,18 +465,19 @@
}
if (preemtedClient != 0) {
halTuner = preemtedClient->getTuner();
- preemtedClient->setTuner(NULL);
- mHwDevice->close_tuner(mHwDevice, halTuner);
+ sp<TunerInterface> clear;
+ preemtedClient->setTuner(clear);
+ mHwDevice->closeTuner(halTuner);
if (preemtedClient->audio()) {
notifyDeviceConnection(false, "");
}
}
- ret = mHwDevice->open_tuner(mHwDevice, &halConfig, audio,
- RadioService::callback, moduleClient->callbackThread().get(),
- &halTuner);
+ ret = mHwDevice->openTuner(&halConfig, audio,
+ moduleClient,
+ halTuner);
if (ret == 0) {
- ALOGV("addClient() setTuner %p", halTuner);
+ ALOGV("addClient() setTuner %p", halTuner.get());
moduleClient->setTuner(halTuner);
mModuleClients.add(moduleClient);
if (audio) {
@@ -527,12 +509,15 @@
}
mModuleClients.removeAt(index);
- const struct radio_tuner *halTuner = moduleClient->getTuner();
+ sp<TunerInterface> halTuner = moduleClient->getTuner();
if (halTuner == NULL) {
return;
}
- mHwDevice->close_tuner(mHwDevice, halTuner);
+ if (mHwDevice != 0) {
+ mHwDevice->closeTuner(halTuner);
+ }
+
if (moduleClient->audio()) {
notifyDeviceConnection(false, "");
}
@@ -543,6 +528,10 @@
return;
}
+ if (mHwDevice == 0) {
+ return;
+ }
+
// Tuner reallocation logic:
// When a client is removed and was controlling a tuner, this tuner will be allocated to a
// previously preempted client. This client will be notified by a callback with
@@ -591,9 +580,9 @@
ALOG_ASSERT(youngestClient != 0, "removeClient() removed client no candidate found for tuner");
struct radio_hal_band_config halConfig = youngestClient->halConfig();
- ret = mHwDevice->open_tuner(mHwDevice, &halConfig, youngestClient->audio(),
- RadioService::callback, moduleClient->callbackThread().get(),
- &halTuner);
+ ret = mHwDevice->openTuner(&halConfig, youngestClient->audio(),
+ moduleClient,
+ halTuner);
if (ret == 0) {
youngestClient->setTuner(halTuner);
@@ -646,7 +635,7 @@
const sp<IRadioClient>& client,
const struct radio_band_config *config,
bool audio)
- : mModule(module), mClient(client), mConfig(*config), mAudio(audio), mTuner(NULL)
+ : mModule(module), mClient(client), mConfig(*config), mAudio(audio), mTuner(0)
{
}
@@ -666,6 +655,11 @@
}
}
+void RadioService::ModuleClient::onEvent(radio_hal_event_t *halEvent)
+{
+ mCallbackThread->sendEvent(halEvent);
+}
+
status_t RadioService::ModuleClient::dump(int fd __unused,
const Vector<String16>& args __unused) {
String8 result;
@@ -696,14 +690,14 @@
return mConfig.band;
}
-const struct radio_tuner *RadioService::ModuleClient::getTuner() const
+sp<TunerInterface>& RadioService::ModuleClient::getTuner()
{
AutoMutex lock(mLock);
ALOGV("%s locked", __FUNCTION__);
return mTuner;
}
-void RadioService::ModuleClient::setTuner(const struct radio_tuner *tuner)
+void RadioService::ModuleClient::setTuner(sp<TunerInterface>& tuner)
{
ALOGV("%s %p", __FUNCTION__, this);
@@ -714,7 +708,7 @@
radio_hal_event_t event;
event.type = RADIO_EVENT_CONTROL;
event.status = 0;
- event.on = mTuner != NULL;
+ event.on = mTuner != 0;
mCallbackThread->sendEvent(&event);
ALOGV("%s DONE", __FUNCTION__);
@@ -726,10 +720,10 @@
status_t status = NO_ERROR;
ALOGV("%s locked", __FUNCTION__);
- if (mTuner != NULL) {
+ if (mTuner != 0) {
struct radio_hal_band_config halConfig;
halConfig = config->band;
- status = (status_t)mTuner->set_configuration(mTuner, &halConfig);
+ status = (status_t)mTuner->setConfiguration(&halConfig);
if (status == NO_ERROR) {
mConfig = *config;
}
@@ -747,9 +741,9 @@
status_t status = NO_ERROR;
ALOGV("%s locked", __FUNCTION__);
- if (mTuner != NULL) {
+ if (mTuner != 0) {
struct radio_hal_band_config halConfig;
- status = (status_t)mTuner->get_configuration(mTuner, &halConfig);
+ status = (status_t)mTuner->getConfiguration(&halConfig);
if (status == NO_ERROR) {
mConfig.band = halConfig;
}
@@ -765,7 +759,7 @@
{
Mutex::Autolock _l(mLock);
ALOGV("%s locked", __FUNCTION__);
- if (mTuner == NULL || !mAudio) {
+ if (mTuner == 0 || !mAudio) {
return INVALID_OPERATION;
}
module = mModule.promote();
@@ -796,8 +790,8 @@
AutoMutex lock(mLock);
ALOGV("%s locked", __FUNCTION__);
status_t status;
- if (mTuner != NULL) {
- status = (status_t)mTuner->scan(mTuner, direction, skipSubChannel);
+ if (mTuner != 0) {
+ status = (status_t)mTuner->scan(direction, skipSubChannel);
} else {
status = INVALID_OPERATION;
}
@@ -809,21 +803,21 @@
AutoMutex lock(mLock);
ALOGV("%s locked", __FUNCTION__);
status_t status;
- if (mTuner != NULL) {
- status = (status_t)mTuner->step(mTuner, direction, skipSubChannel);
+ if (mTuner != 0) {
+ status = (status_t)mTuner->step(direction, skipSubChannel);
} else {
status = INVALID_OPERATION;
}
return status;
}
-status_t RadioService::ModuleClient::tune(unsigned int channel, unsigned int subChannel)
+status_t RadioService::ModuleClient::tune(uint32_t channel, uint32_t subChannel)
{
AutoMutex lock(mLock);
ALOGV("%s locked", __FUNCTION__);
status_t status;
- if (mTuner != NULL) {
- status = (status_t)mTuner->tune(mTuner, channel, subChannel);
+ if (mTuner != 0) {
+ status = (status_t)mTuner->tune(channel, subChannel);
} else {
status = INVALID_OPERATION;
}
@@ -835,8 +829,8 @@
AutoMutex lock(mLock);
ALOGV("%s locked", __FUNCTION__);
status_t status;
- if (mTuner != NULL) {
- status = (status_t)mTuner->cancel(mTuner);
+ if (mTuner != 0) {
+ status = (status_t)mTuner->cancel();
} else {
status = INVALID_OPERATION;
}
@@ -849,10 +843,11 @@
ALOGV("%s locked", __FUNCTION__);
status_t status;
if (mTuner != NULL) {
- status = (status_t)mTuner->get_program_information(mTuner, info);
+ status = (status_t)mTuner->getProgramInformation(info);
} else {
status = INVALID_OPERATION;
}
+
return status;
}
@@ -860,7 +855,7 @@
{
Mutex::Autolock lock(mLock);
ALOGV("%s locked", __FUNCTION__);
- *hasControl = mTuner != NULL;
+ *hasControl = mTuner != 0;
return NO_ERROR;
}
diff --git a/services/radio/RadioService.h b/services/radio/RadioService.h
index ac3481e..444eb7a 100644
--- a/services/radio/RadioService.h
+++ b/services/radio/RadioService.h
@@ -27,6 +27,9 @@
#include <radio/IRadioClient.h>
#include <system/radio.h>
#include <hardware/radio.h>
+#include "RadioInterface.h"
+#include "TunerInterface.h"
+#include "TunerCallbackInterface.h"
namespace android {
@@ -66,7 +69,7 @@
class Module : public virtual RefBase {
public:
- Module(radio_hw_device* hwDevice,
+ Module(sp<RadioInterface> hwDevice,
struct radio_properties properties);
virtual ~Module();
@@ -83,7 +86,7 @@
virtual status_t dump(int fd, const Vector<String16>& args);
- const struct radio_hw_device *hwDevice() const { return mHwDevice; }
+ sp<RadioInterface> hwDevice() const { return mHwDevice; }
const struct radio_properties properties() const { return mProperties; }
const struct radio_band_config *getDefaultConfig() const ;
@@ -92,7 +95,7 @@
void notifyDeviceConnection(bool connected, const char *address);
Mutex mLock; // protects mModuleClients
- const struct radio_hw_device *mHwDevice; // HAL hardware device
+ sp<RadioInterface> mHwDevice; // HAL hardware device
const struct radio_properties mProperties; // cached hardware module properties
Vector< sp<ModuleClient> > mModuleClients; // list of attached clients
bool mMute; // radio audio source state
@@ -128,7 +131,8 @@
}; // class CallbackThread
class ModuleClient : public BnRadio,
- public IBinder::DeathRecipient {
+ public IBinder::DeathRecipient,
+ public TunerCallbackInterface {
public:
ModuleClient(const sp<Module>& module,
@@ -167,8 +171,8 @@
wp<Module> module() const { return mModule; }
radio_hal_band_config_t halConfig() const;
sp<CallbackThread> callbackThread() const { return mCallbackThread; }
- void setTuner(const struct radio_tuner *tuner);
- const struct radio_tuner *getTuner() const;
+ void setTuner(sp<TunerInterface>& tuner);
+ sp<TunerInterface>& getTuner();
bool audio() const { return mAudio; }
void onCallbackEvent(const sp<IMemory>& event);
@@ -179,6 +183,9 @@
// IBinder::DeathRecipient implementation
virtual void binderDied(const wp<IBinder> &who);
+ // TunerCallbackInterface
+ virtual void onEvent(radio_hal_event_t *event);
+
private:
mutable Mutex mLock; // protects mClient, mConfig and mTuner
@@ -187,7 +194,7 @@
radio_band_config_t mConfig; // current band configuration
sp<CallbackThread> mCallbackThread; // event callback thread
const bool mAudio;
- const struct radio_tuner *mTuner; // HAL tuner interface. NULL indicates that
+ sp<TunerInterface> mTuner; // HAL tuner interface. NULL indicates that
// this client does not have control on any
// tuner
}; // class ModuleClient
diff --git a/services/radio/TunerCallbackInterface.h b/services/radio/TunerCallbackInterface.h
new file mode 100644
index 0000000..4973cce
--- /dev/null
+++ b/services/radio/TunerCallbackInterface.h
@@ -0,0 +1,38 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_HARDWARE_TUNER_CALLBACK_INTERFACE_H
+#define ANDROID_HARDWARE_TUNER_CALLBACK_INTERFACE_H
+
+#include <utils/RefBase.h>
+#include <system/radio.h>
+
+namespace android {
+
+class TunerCallbackInterface : public virtual RefBase
+{
+public:
+ virtual void onEvent(radio_hal_event_t *event) = 0;
+
+protected:
+ TunerCallbackInterface() {}
+ virtual ~TunerCallbackInterface() {}
+
+};
+
+} // namespace android
+
+#endif // ANDROID_HARDWARE_TUNER_CALLBACK_INTERFACE_H
diff --git a/services/radio/TunerInterface.h b/services/radio/TunerInterface.h
new file mode 100644
index 0000000..4e657d3
--- /dev/null
+++ b/services/radio/TunerInterface.h
@@ -0,0 +1,162 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_HARDWARE_TUNER_INTERFACE_H
+#define ANDROID_HARDWARE_TUNER_INTERFACE_H
+
+#include <utils/RefBase.h>
+#include <system/radio.h>
+
+namespace android {
+
+class TunerInterface : public virtual RefBase
+{
+public:
+ /*
+ * Apply current radio band configuration (band, range, channel spacing ...).
+ *
+ * arguments:
+ * - config: the band configuration to apply
+ *
+ * returns:
+ * 0 if configuration could be applied
+ * -EINVAL if configuration requested is invalid
+ *
+ * Automatically cancels pending scan, step or tune.
+ *
+ * Callback function with event RADIO_EVENT_CONFIG MUST be called once the
+ * configuration is applied or a failure occurs or after a time out.
+ */
+ virtual int setConfiguration(const radio_hal_band_config_t *config) = 0;
+
+ /*
+ * Retrieve current radio band configuration.
+ *
+ * arguments:
+ * - config: where to return the band configuration
+ *
+ * returns:
+ * 0 if valid configuration is returned
+ * -EINVAL if invalid arguments are passed
+ */
+ virtual int getConfiguration(radio_hal_band_config_t *config) = 0;
+
+ /*
+ * Start scanning up to next valid station.
+ * Must be called when a valid configuration has been applied.
+ *
+ * arguments:
+ * - direction: RADIO_DIRECTION_UP or RADIO_DIRECTION_DOWN
+ * - skip_sub_channel: valid for HD radio or digital radios only: ignore sub channels
+ * (e.g SPS for HD radio).
+ *
+ * returns:
+ * 0 if scan successfully started
+ * -ENOSYS if called out of sequence
+ * -ENODEV if another error occurs
+ *
+ * Automatically cancels pending scan, step or tune.
+ *
+ * Callback function with event RADIO_EVENT_TUNED MUST be called once
+ * locked on a station or after a time out or full frequency scan if
+ * no station found. The event status should indicate if a valid station
+ * is tuned or not.
+ */
+ virtual int scan(radio_direction_t direction, bool skip_sub_channel) = 0;
+
+ /*
+ * Move one channel spacing up or down.
+ * Must be called when a valid configuration has been applied.
+ *
+ * arguments:
+ * - direction: RADIO_DIRECTION_UP or RADIO_DIRECTION_DOWN
+ * - skip_sub_channel: valid for HD radio or digital radios only: ignore sub channels
+ * (e.g SPS for HD radio).
+ *
+ * returns:
+ * 0 if step successfully started
+ * -ENOSYS if called out of sequence
+ * -ENODEV if another error occurs
+ *
+ * Automatically cancels pending scan, step or tune.
+ *
+ * Callback function with event RADIO_EVENT_TUNED MUST be called once
+ * step completed or after a time out. The event status should indicate
+ * if a valid station is tuned or not.
+ */
+ virtual int step(radio_direction_t direction, bool skip_sub_channel) = 0;
+
+ /*
+ * Tune to specified frequency.
+ * Must be called when a valid configuration has been applied.
+ *
+ * arguments:
+ * - channel: channel to tune to. A frequency in kHz for AM/FM/HD Radio bands.
+ * - sub_channel: valid for HD radio or digital radios only: (e.g SPS number for HD radio).
+ *
+ * returns:
+ * 0 if tune successfully started
+ * -ENOSYS if called out of sequence
+ * -EINVAL if invalid arguments are passed
+ * -ENODEV if another error occurs
+ *
+ * Automatically cancels pending scan, step or tune.
+ *
+ * Callback function with event RADIO_EVENT_TUNED MUST be called once
+ * tuned or after a time out. The event status should indicate
+ * if a valid station is tuned or not.
+ */
+ virtual int tune(unsigned int channel, unsigned int sub_channel) = 0;
+
+ /*
+ * Cancel a scan, step or tune operation.
+ * Must be called while a scan, step or tune operation is pending
+ * (callback not yet sent).
+ *
+ * returns:
+ * 0 if successful
+ * -ENOSYS if called out of sequence
+ * -ENODEV if another error occurs
+ *
+ * The callback is not sent.
+ */
+ virtual int cancel() = 0;
+
+ /*
+ * Retrieve current station information.
+ *
+ * arguments:
+ * - info: where to return the program info.
+ * If info->metadata is NULL. no meta data should be returned.
+ * If meta data must be returned, they should be added to or cloned to
+ * info->metadata, not passed from a newly created meta data buffer.
+ *
+ * returns:
+ * 0 if tuned and information available
+ * -EINVAL if invalid arguments are passed
+ * -ENODEV if another error occurs
+ */
+ virtual int getProgramInformation(radio_program_info_t *info) = 0;
+
+protected:
+ TunerInterface() {}
+ virtual ~TunerInterface() {}
+
+};
+
+} // namespace android
+
+#endif // ANDROID_HARDWARE_TUNER_INTERFACE_H
diff --git a/services/soundtrigger/Android.mk b/services/soundtrigger/Android.mk
index 0f5bbba..5d01999 100644
--- a/services/soundtrigger/Android.mk
+++ b/services/soundtrigger/Android.mk
@@ -16,7 +16,6 @@
include $(CLEAR_VARS)
-
ifeq ($(SOUND_TRIGGER_USE_STUB_MODULE), 1)
LOCAL_CFLAGS += -DSOUND_TRIGGER_USE_STUB_MODULE
endif
@@ -34,12 +33,32 @@
libaudioclient \
libserviceutility
+
+ifeq ($(ENABLE_TREBLE),true)
+# Treble configuration
+LOCAL_CFLAGS += -DENABLE_TREBLE
+LOCAL_SRC_FILES += \
+ SoundTriggerHalHidl.cpp
+
+LOCAL_SHARED_LIBRARIES += \
+ libhwbinder \
+ libhidl \
+ libbase \
+ android.hardware.soundtrigger@2.0 \
+ android.hardware.audio.common@2.0
+else
+# libhardware configuration
+LOCAL_SRC_FILES += \
+ SoundTriggerHalLegacy.cpp
+endif
+
+
LOCAL_C_INCLUDES += \
$(TOPDIR)frameworks/av/services/audioflinger
LOCAL_MULTILIB := $(AUDIOSERVER_MULTILIB)
-LOCAL_CFLAGS := -Wall -Werror
+LOCAL_CFLAGS += -Wall -Werror
LOCAL_MODULE:= libsoundtriggerservice
diff --git a/services/soundtrigger/SoundTriggerHalHidl.cpp b/services/soundtrigger/SoundTriggerHalHidl.cpp
new file mode 100644
index 0000000..ecbdec4
--- /dev/null
+++ b/services/soundtrigger/SoundTriggerHalHidl.cpp
@@ -0,0 +1,626 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "SoundTriggerHalHidl"
+//#define LOG_NDEBUG 0
+
+#include <utils/Log.h>
+#include "SoundTriggerHalHidl.h"
+#include <hwbinder/IPCThreadState.h>
+#include <hwbinder/ProcessState.h>
+
+namespace android {
+
+using android::hardware::Return;
+using android::hardware::ProcessState;
+using android::hardware::audio::common::V2_0::AudioDevice;
+
+/* static */
+sp<SoundTriggerHalInterface> SoundTriggerHalInterface::connectModule(const char *moduleName)
+{
+ return new SoundTriggerHalHidl(moduleName);
+}
+
+int SoundTriggerHalHidl::getProperties(struct sound_trigger_properties *properties)
+{
+ sp<ISoundTriggerHw> soundtrigger = getService();
+ if (soundtrigger == 0) {
+ return -ENODEV;
+ }
+
+ ISoundTriggerHw::Properties halProperties;
+ Return<void> hidlReturn;
+ int ret;
+ {
+ AutoMutex lock(mHalLock);
+ hidlReturn = soundtrigger->getProperties([&](int rc, auto res) {
+ ret = rc;
+ halProperties = res;
+ ALOGI("getProperties res implementor %s", res.implementor.c_str());
+ });
+ }
+
+ if (hidlReturn.getStatus().isOk()) {
+ if (ret == 0) {
+ convertPropertiesFromHal(properties, &halProperties);
+ }
+ } else {
+ ret = (int)hidlReturn.getStatus().transactionError();
+ if (ret == -EPIPE) {
+ clearService();
+ }
+ }
+ ALOGI("getProperties ret %d", ret);
+ return ret;
+}
+
+int SoundTriggerHalHidl::loadSoundModel(struct sound_trigger_sound_model *sound_model,
+ sound_model_callback_t callback,
+ void *cookie,
+ sound_model_handle_t *handle)
+{
+ if (handle == NULL) {
+ return -EINVAL;
+ }
+
+ sp<ISoundTriggerHw> soundtrigger = getService();
+ if (soundtrigger == 0) {
+ return -ENODEV;
+ }
+
+ uint32_t modelId;
+ {
+ AutoMutex lock(mLock);
+ do {
+ modelId = nextUniqueId();
+ ALOGI("loadSoundModel modelId %u", modelId);
+ sp<SoundModel> model = mSoundModels.valueFor(modelId);
+ ALOGI("loadSoundModel model %p", model.get());
+ } while (mSoundModels.valueFor(modelId) != 0 && modelId != 0);
+ }
+ LOG_ALWAYS_FATAL_IF(modelId == 0,
+ "loadSoundModel(): wrap around in sound model IDs, num loaded models %zd",
+ mSoundModels.size());
+
+ ISoundTriggerHw::SoundModel *halSoundModel =
+ convertSoundModelToHal(sound_model);
+ if (halSoundModel == NULL) {
+ return -EINVAL;
+ }
+
+ Return<void> hidlReturn;
+ int ret;
+ SoundModelHandle halHandle;
+ {
+ AutoMutex lock(mHalLock);
+ if (sound_model->type == SOUND_MODEL_TYPE_KEYPHRASE) {
+ hidlReturn = soundtrigger->loadPhraseSoundModel(
+ *(const ISoundTriggerHw::PhraseSoundModel *)halSoundModel,
+ this, modelId, [&](int32_t retval, auto res) {
+ ret = retval;
+ halHandle = res;
+ });
+
+ } else {
+ hidlReturn = soundtrigger->loadSoundModel(*halSoundModel,
+ this, modelId, [&](int32_t retval, auto res) {
+ ret = retval;
+ halHandle = res;
+ });
+ }
+ }
+
+ delete halSoundModel;
+
+ if (hidlReturn.getStatus().isOk()) {
+ if (ret == 0) {
+ AutoMutex lock(mLock);
+ *handle = (sound_model_handle_t)modelId;
+ sp<SoundModel> model = new SoundModel(*handle, callback, cookie, halHandle);
+ mSoundModels.add(*handle, model);
+ }
+ } else {
+ ret = (int)hidlReturn.getStatus().transactionError();
+ ALOGE("loadSoundModel error %d", ret);
+ if (ret == -EPIPE) {
+ clearService();
+ }
+ }
+
+
+ return ret;
+}
+
+int SoundTriggerHalHidl::unloadSoundModel(sound_model_handle_t handle)
+{
+ sp<ISoundTriggerHw> soundtrigger = getService();
+ if (soundtrigger == 0) {
+ return -ENODEV;
+ }
+
+ sp<SoundModel> model = removeModel(handle);
+ if (model == 0) {
+ ALOGE("unloadSoundModel model not found for handle %u", handle);
+ return -EINVAL;
+ }
+
+ Return<int32_t> hidlReturn(0);
+ {
+ AutoMutex lock(mHalLock);
+ hidlReturn = soundtrigger->unloadSoundModel(model->mHalHandle);
+ }
+ int ret = (int)hidlReturn.getStatus().transactionError();
+ ALOGE_IF(ret != 0, "unloadSoundModel error %d", ret);
+ if (ret == -EPIPE) {
+ clearService();
+ }
+ if (ret == 0) {
+ ret = hidlReturn;
+ }
+ return ret;
+}
+
+int SoundTriggerHalHidl::startRecognition(sound_model_handle_t handle,
+ const struct sound_trigger_recognition_config *config,
+ recognition_callback_t callback,
+ void *cookie)
+{
+ sp<ISoundTriggerHw> soundtrigger = getService();
+ if (soundtrigger == 0) {
+ return -ENODEV;
+ }
+
+ sp<SoundModel> model = getModel(handle);
+ if (model == 0) {
+ ALOGE("startRecognition model not found for handle %u", handle);
+ return -EINVAL;
+ }
+
+ model->mRecognitionCallback = callback;
+ model->mRecognitionCookie = cookie;
+
+ ISoundTriggerHw::RecognitionConfig *halConfig =
+ convertRecognitionConfigToHal(config);
+
+ Return<int32_t> hidlReturn(0);
+ {
+ AutoMutex lock(mHalLock);
+ hidlReturn = soundtrigger->startRecognition(model->mHalHandle, *halConfig, this, handle);
+ }
+
+ delete halConfig;
+
+ int ret = (int)hidlReturn.getStatus().transactionError();
+ ALOGE_IF(ret != 0, "startRecognition error %d", ret);
+ if (ret == -EPIPE) {
+ clearService();
+ }
+ if (ret == 0) {
+ ret = hidlReturn;
+ }
+ return ret;
+}
+
+int SoundTriggerHalHidl::stopRecognition(sound_model_handle_t handle)
+{
+ sp<ISoundTriggerHw> soundtrigger = getService();
+ if (soundtrigger == 0) {
+ return -ENODEV;
+ }
+
+ sp<SoundModel> model = getModel(handle);
+ if (model == 0) {
+ ALOGE("stopRecognition model not found for handle %u", handle);
+ return -EINVAL;
+ }
+
+ Return<int32_t> hidlReturn(0);
+ {
+ AutoMutex lock(mHalLock);
+ hidlReturn = soundtrigger->stopRecognition(model->mHalHandle);
+ }
+
+ int ret = (int)hidlReturn.getStatus().transactionError();
+ ALOGE_IF(ret != 0, "stopRecognition error %d", ret);
+ if (ret == -EPIPE) {
+ clearService();
+ }
+ if (ret == 0) {
+ ret = hidlReturn;
+ }
+ return ret;
+}
+
+int SoundTriggerHalHidl::stopAllRecognitions()
+{
+ sp<ISoundTriggerHw> soundtrigger = getService();
+ if (soundtrigger == 0) {
+ return -ENODEV;
+ }
+
+ Return<int32_t> hidlReturn(0);
+ {
+ AutoMutex lock(mHalLock);
+ hidlReturn = soundtrigger->stopAllRecognitions();
+ }
+
+ int ret = (int)hidlReturn.getStatus().transactionError();
+ ALOGE_IF(ret != 0, "stopAllRecognitions error %d", ret);
+ if (ret == -EPIPE) {
+ clearService();
+ }
+ if (ret == 0) {
+ ret = hidlReturn;
+ }
+ return ret;
+}
+
+SoundTriggerHalHidl::SoundTriggerHalHidl(const char *moduleName)
+ : mModuleName(moduleName), mNextUniqueId(1)
+{
+}
+
+SoundTriggerHalHidl::~SoundTriggerHalHidl()
+{
+}
+
+sp<ISoundTriggerHw> SoundTriggerHalHidl::getService()
+{
+ AutoMutex lock(mLock);
+ if (mISoundTrigger == 0) {
+ if (mModuleName == NULL) {
+ mModuleName = "primary";
+ }
+ std::string serviceName = "sound_trigger.";
+ serviceName.append(mModuleName);
+ mISoundTrigger = ISoundTriggerHw::getService(serviceName);
+ }
+ return mISoundTrigger;
+}
+
+void SoundTriggerHalHidl::clearService()
+{
+ AutoMutex lock(mLock);
+ mISoundTrigger = 0;
+}
+
+sp<SoundTriggerHalHidl::SoundModel> SoundTriggerHalHidl::getModel(sound_model_handle_t handle)
+{
+ AutoMutex lock(mLock);
+ return mSoundModels.valueFor(handle);
+}
+
+sp<SoundTriggerHalHidl::SoundModel> SoundTriggerHalHidl::removeModel(sound_model_handle_t handle)
+{
+ AutoMutex lock(mLock);
+ sp<SoundModel> model = mSoundModels.valueFor(handle);
+ mSoundModels.removeItem(handle);
+ return model;
+}
+
+uint32_t SoundTriggerHalHidl::nextUniqueId()
+{
+ return (uint32_t) atomic_fetch_add_explicit(&mNextUniqueId,
+ (uint_fast32_t) 1, memory_order_acq_rel);
+}
+
+void SoundTriggerHalHidl::convertUuidToHal(Uuid *halUuid,
+ const sound_trigger_uuid_t *uuid)
+{
+ halUuid->timeLow = uuid->timeLow;
+ halUuid->timeMid = uuid->timeMid;
+ halUuid->versionAndTimeHigh = uuid->timeHiAndVersion;
+ halUuid->variantAndClockSeqHigh = uuid->clockSeq;
+ memcpy(halUuid->node.data(), &uuid->node[0], sizeof(uuid->node));
+}
+
+void SoundTriggerHalHidl::convertUuidFromHal(sound_trigger_uuid_t *uuid,
+ const Uuid *halUuid)
+{
+ uuid->timeLow = halUuid->timeLow;
+ uuid->timeMid = halUuid->timeMid;
+ uuid->timeHiAndVersion = halUuid->versionAndTimeHigh;
+ uuid->clockSeq = halUuid->variantAndClockSeqHigh;
+ memcpy(&uuid->node[0], halUuid->node.data(), sizeof(uuid->node));
+}
+
+void SoundTriggerHalHidl::convertPropertiesFromHal(
+ struct sound_trigger_properties *properties,
+ const ISoundTriggerHw::Properties *halProperties)
+{
+ strlcpy(properties->implementor,
+ halProperties->implementor.c_str(), SOUND_TRIGGER_MAX_STRING_LEN);
+ strlcpy(properties->description,
+ halProperties->description.c_str(), SOUND_TRIGGER_MAX_STRING_LEN);
+ properties->version = halProperties->version;
+ convertUuidFromHal(&properties->uuid, &halProperties->uuid);
+ properties->max_sound_models = halProperties->maxSoundModels;
+ properties->max_key_phrases = halProperties->maxKeyPhrases;
+ properties->max_users = halProperties->maxUsers;
+ properties->recognition_modes = halProperties->recognitionModes;
+ properties->capture_transition = (bool)halProperties->captureTransition;
+ properties->max_buffer_ms = halProperties->maxBufferMs;
+ properties->concurrent_capture = (bool)halProperties->concurrentCapture;
+ properties->trigger_in_event = (bool)halProperties->triggerInEvent;
+ properties->power_consumption_mw = halProperties->powerConsumptionMw;
+}
+
+void SoundTriggerHalHidl::convertTriggerPhraseToHal(
+ ISoundTriggerHw::Phrase *halTriggerPhrase,
+ const struct sound_trigger_phrase *triggerPhrase)
+{
+ halTriggerPhrase->id = triggerPhrase->id;
+ halTriggerPhrase->recognitionModes = triggerPhrase->recognition_mode;
+ halTriggerPhrase->users.setToExternal((uint32_t *)&triggerPhrase->users[0], triggerPhrase->num_users);
+ halTriggerPhrase->locale = triggerPhrase->locale;
+ halTriggerPhrase->text = triggerPhrase->text;
+}
+
+ISoundTriggerHw::SoundModel *SoundTriggerHalHidl::convertSoundModelToHal(
+ const struct sound_trigger_sound_model *soundModel)
+{
+ ISoundTriggerHw::SoundModel *halModel = NULL;
+ if (soundModel->type == SOUND_MODEL_TYPE_KEYPHRASE) {
+ ISoundTriggerHw::PhraseSoundModel *halKeyPhraseModel =
+ new ISoundTriggerHw::PhraseSoundModel();
+ struct sound_trigger_phrase_sound_model *keyPhraseModel =
+ (struct sound_trigger_phrase_sound_model *)soundModel;
+ ISoundTriggerHw::Phrase *halPhrases =
+ new ISoundTriggerHw::Phrase[keyPhraseModel->num_phrases];
+
+
+ for (unsigned int i = 0; i < keyPhraseModel->num_phrases; i++) {
+ convertTriggerPhraseToHal(&halPhrases[i],
+ &keyPhraseModel->phrases[i]);
+ }
+ halKeyPhraseModel->phrases.setToExternal(halPhrases, keyPhraseModel->num_phrases);
+ // FIXME: transfer buffer ownership. should have a method for that in hidl_vec
+ halKeyPhraseModel->phrases.resize(keyPhraseModel->num_phrases);
+
+ delete[] halPhrases;
+
+ halModel = (ISoundTriggerHw::SoundModel *)halKeyPhraseModel;
+ } else {
+ halModel = new ISoundTriggerHw::SoundModel();
+ }
+ halModel->type = (SoundModelType)soundModel->type;
+ convertUuidToHal(&halModel->uuid, &soundModel->uuid);
+ convertUuidToHal(&halModel->vendorUuid, &soundModel->vendor_uuid);
+ halModel->data.setToExternal((uint8_t *)soundModel + soundModel->data_offset, soundModel->data_size);
+ halModel->data.resize(soundModel->data_size);
+
+ return halModel;
+}
+
+void SoundTriggerHalHidl::convertPhraseRecognitionExtraToHal(
+ PhraseRecognitionExtra *halExtra,
+ const struct sound_trigger_phrase_recognition_extra *extra)
+{
+ halExtra->id = extra->id;
+ halExtra->recognitionModes = extra->recognition_modes;
+ halExtra->confidenceLevel = extra->confidence_level;
+ ConfidenceLevel *halLevels =
+ new ConfidenceLevel[extra->num_levels];
+ for (unsigned int i = 0; i < extra->num_levels; i++) {
+ halLevels[i].userId = extra->levels[i].user_id;
+ halLevels[i].levelPercent = extra->levels[i].level;
+ }
+ halExtra->levels.setToExternal(halLevels, extra->num_levels);
+ // FIXME: transfer buffer ownership. should have a method for that in hidl_vec
+ halExtra->levels.resize(extra->num_levels);
+
+ delete[] halLevels;
+}
+
+
+ISoundTriggerHw::RecognitionConfig *SoundTriggerHalHidl::convertRecognitionConfigToHal(
+ const struct sound_trigger_recognition_config *config)
+{
+ ISoundTriggerHw::RecognitionConfig *halConfig =
+ new ISoundTriggerHw::RecognitionConfig();
+
+ halConfig->captureHandle = config->capture_handle;
+ halConfig->captureDevice = (AudioDevice)config->capture_device;
+ halConfig->captureRequested = (uint32_t)config->capture_requested;
+
+ PhraseRecognitionExtra *halExtras =
+ new PhraseRecognitionExtra[config->num_phrases];
+
+ for (unsigned int i = 0; i < config->num_phrases; i++) {
+ convertPhraseRecognitionExtraToHal(&halExtras[i],
+ &config->phrases[i]);
+ }
+ halConfig->phrases.setToExternal(halExtras, config->num_phrases);
+ // FIXME: transfer buffer ownership. should have a method for that in hidl_vec
+ halConfig->phrases.resize(config->num_phrases);
+
+ delete[] halExtras;
+
+ halConfig->data.setToExternal((uint8_t *)config + config->data_offset, config->data_size);
+
+ return halConfig;
+}
+
+
+// ISoundTriggerHwCallback
+::android::hardware::Return<void> SoundTriggerHalHidl::recognitionCallback(
+ const ISoundTriggerHwCallback::RecognitionEvent& halEvent,
+ CallbackCookie cookie)
+{
+ sp<SoundModel> model;
+ {
+ AutoMutex lock(mLock);
+ model = mSoundModels.valueFor((SoundModelHandle)cookie);
+ if (model == 0) {
+ return Return<void>();
+ }
+ }
+ struct sound_trigger_recognition_event *event = convertRecognitionEventFromHal(&halEvent);
+ if (event == NULL) {
+ return Return<void>();
+ }
+ event->model = model->mHandle;
+ model->mRecognitionCallback(event, model->mRecognitionCookie);
+
+ free(event);
+
+ return Return<void>();
+}
+
+::android::hardware::Return<void> SoundTriggerHalHidl::phraseRecognitionCallback(
+ const ISoundTriggerHwCallback::PhraseRecognitionEvent& halEvent,
+ CallbackCookie cookie)
+{
+ sp<SoundModel> model;
+ {
+ AutoMutex lock(mLock);
+ model = mSoundModels.valueFor((SoundModelHandle)cookie);
+ if (model == 0) {
+ return Return<void>();
+ }
+ }
+
+ struct sound_trigger_recognition_event *event = convertRecognitionEventFromHal(
+ (const ISoundTriggerHwCallback::RecognitionEvent *)&halEvent);
+ if (event == NULL) {
+ return Return<void>();
+ }
+
+ event->model = model->mHandle;
+ model->mRecognitionCallback(event, model->mRecognitionCookie);
+
+ free(event);
+
+ return Return<void>();
+}
+
+::android::hardware::Return<void> SoundTriggerHalHidl::soundModelCallback(
+ const ISoundTriggerHwCallback::ModelEvent& halEvent,
+ CallbackCookie cookie)
+{
+ sp<SoundModel> model;
+ {
+ AutoMutex lock(mLock);
+ model = mSoundModels.valueFor((SoundModelHandle)cookie);
+ if (model == 0) {
+ return Return<void>();
+ }
+ }
+
+ struct sound_trigger_model_event *event = convertSoundModelEventFromHal(&halEvent);
+ if (event == NULL) {
+ return Return<void>();
+ }
+
+ event->model = model->mHandle;
+ model->mSoundModelCallback(event, model->mSoundModelCookie);
+
+ free(event);
+
+ return Return<void>();
+}
+
+
+struct sound_trigger_model_event *SoundTriggerHalHidl::convertSoundModelEventFromHal(
+ const ISoundTriggerHwCallback::ModelEvent *halEvent)
+{
+ struct sound_trigger_model_event *event = (struct sound_trigger_model_event *)malloc(
+ sizeof(struct sound_trigger_model_event) +
+ halEvent->data.size());
+ if (event == NULL) {
+ return NULL;
+ }
+
+ event->status = (int)halEvent->status;
+ // event->model to be set by caller
+ event->data_offset = sizeof(struct sound_trigger_model_event);
+ event->data_size = halEvent->data.size();
+ uint8_t *dst = (uint8_t *)event + event->data_offset;
+ uint8_t *src = (uint8_t *)&halEvent->data[0];
+ memcpy(dst, src, halEvent->data.size());
+
+ return event;
+}
+
+void SoundTriggerHalHidl::convertPhraseRecognitionExtraFromHal(
+ struct sound_trigger_phrase_recognition_extra *extra,
+ const PhraseRecognitionExtra *halExtra)
+{
+ extra->id = halExtra->id;
+ extra->recognition_modes = halExtra->recognitionModes;
+ extra->confidence_level = halExtra->confidenceLevel;
+
+ size_t i;
+ for (i = 0; i < halExtra->levels.size() && i < SOUND_TRIGGER_MAX_USERS; i++) {
+ extra->levels[i].user_id = halExtra->levels[i].userId;
+ extra->levels[i].level = halExtra->levels[i].levelPercent;
+ }
+ extra->num_levels = (unsigned int)i;
+}
+
+
+struct sound_trigger_recognition_event *SoundTriggerHalHidl::convertRecognitionEventFromHal(
+ const ISoundTriggerHwCallback::RecognitionEvent *halEvent)
+{
+ struct sound_trigger_recognition_event *event;
+
+ if (halEvent->type == SoundModelType::KEYPHRASE) {
+ struct sound_trigger_phrase_recognition_event *phraseEvent =
+ (struct sound_trigger_phrase_recognition_event *)malloc(
+ sizeof(struct sound_trigger_phrase_recognition_event) +
+ halEvent->data.size());
+ if (phraseEvent == NULL) {
+ return NULL;
+ }
+ const ISoundTriggerHwCallback::PhraseRecognitionEvent *halPhraseEvent =
+ (const ISoundTriggerHwCallback::PhraseRecognitionEvent *)halEvent;
+
+ for (unsigned int i = 0; i < halPhraseEvent->phraseExtras.size(); i++) {
+ convertPhraseRecognitionExtraFromHal(&phraseEvent->phrase_extras[i],
+ &halPhraseEvent->phraseExtras[i]);
+ }
+ phraseEvent->num_phrases = halPhraseEvent->phraseExtras.size();
+ event = (struct sound_trigger_recognition_event *)phraseEvent;
+ event->data_offset = sizeof(sound_trigger_phrase_recognition_event);
+ } else {
+ event = (struct sound_trigger_recognition_event *)malloc(
+ sizeof(struct sound_trigger_recognition_event) + halEvent->data.size());
+ if (event == NULL) {
+ return NULL;
+ }
+ event->data_offset = sizeof(sound_trigger_recognition_event);
+ }
+ event->status = (int)halEvent->status;
+ event->type = (sound_trigger_sound_model_type_t)halEvent->type;
+ // event->model to be set by caller
+ event->capture_available = (bool)halEvent->captureAvailable;
+ event->capture_session = halEvent->captureSession;
+ event->capture_delay_ms = halEvent->captureDelayMs;
+ event->capture_preamble_ms = halEvent->capturePreambleMs;
+ event->trigger_in_data = (bool)halEvent->triggerInData;
+ event->audio_config.sample_rate = halEvent->audioConfig.sampleRateHz;
+ event->audio_config.channel_mask = (audio_channel_mask_t)halEvent->audioConfig.channelMask;
+ event->audio_config.format = (audio_format_t)halEvent->audioConfig.format;
+
+ event->data_size = halEvent->data.size();
+ uint8_t *dst = (uint8_t *)event + event->data_offset;
+ uint8_t *src = (uint8_t *)&halEvent->data[0];
+ memcpy(dst, src, halEvent->data.size());
+
+ return event;
+}
+
+} // namespace android
diff --git a/services/soundtrigger/SoundTriggerHalHidl.h b/services/soundtrigger/SoundTriggerHalHidl.h
new file mode 100644
index 0000000..e578dda
--- /dev/null
+++ b/services/soundtrigger/SoundTriggerHalHidl.h
@@ -0,0 +1,164 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_HARDWARE_SOUNDTRIGGER_HAL_HIDL_H
+#define ANDROID_HARDWARE_SOUNDTRIGGER_HAL_HIDL_H
+
+#include <utils/RefBase.h>
+#include <utils/KeyedVector.h>
+#include <utils/Vector.h>
+#include <utils/threads.h>
+#include "SoundTriggerHalInterface.h"
+#include <android/hardware/soundtrigger/2.0/types.h>
+#include <android/hardware/soundtrigger/2.0/ISoundTriggerHw.h>
+#include <android/hardware/soundtrigger/2.0/ISoundTriggerHwCallback.h>
+#include <android/hardware/soundtrigger/2.0/BnSoundTriggerHwCallback.h>
+
+namespace android {
+
+using android::hardware::audio::common::V2_0::Uuid;
+using android::hardware::soundtrigger::V2_0::ConfidenceLevel;
+using android::hardware::soundtrigger::V2_0::PhraseRecognitionExtra;
+using android::hardware::soundtrigger::V2_0::SoundModelType;
+using android::hardware::soundtrigger::V2_0::SoundModelHandle;
+using android::hardware::soundtrigger::V2_0::ISoundTriggerHw;
+using android::hardware::soundtrigger::V2_0::ISoundTriggerHwCallback;
+
+class SoundTriggerHalHidl : public SoundTriggerHalInterface,
+ public virtual ISoundTriggerHwCallback
+
+{
+public:
+ virtual int getProperties(struct sound_trigger_properties *properties);
+
+ /*
+ * Load a sound model. Once loaded, recognition of this model can be started and stopped.
+ * Only one active recognition per model at a time. The SoundTrigger service will handle
+ * concurrent recognition requests by different users/applications on the same model.
+ * The implementation returns a unique handle used by other functions (unload_sound_model(),
+ * start_recognition(), etc...
+ */
+ virtual int loadSoundModel(struct sound_trigger_sound_model *sound_model,
+ sound_model_callback_t callback,
+ void *cookie,
+ sound_model_handle_t *handle);
+
+ /*
+ * Unload a sound model. A sound model can be unloaded to make room for a new one to overcome
+ * implementation limitations.
+ */
+ virtual int unloadSoundModel(sound_model_handle_t handle);
+
+ /* Start recognition on a given model. Only one recognition active at a time per model.
+ * Once recognition succeeds of fails, the callback is called.
+ * TODO: group recognition configuration parameters into one struct and add key phrase options.
+ */
+ virtual int startRecognition(sound_model_handle_t handle,
+ const struct sound_trigger_recognition_config *config,
+ recognition_callback_t callback,
+ void *cookie);
+
+ /* Stop recognition on a given model.
+ * The implementation does not have to call the callback when stopped via this method.
+ */
+ virtual int stopRecognition(sound_model_handle_t handle);
+
+ /* Stop recognition on all models.
+ * Only supported for device api versions SOUND_TRIGGER_DEVICE_API_VERSION_1_1 or above.
+ * If no implementation is provided, stop_recognition will be called for each running model.
+ */
+ virtual int stopAllRecognitions();
+
+ // ISoundTriggerHwCallback
+ virtual ::android::hardware::Return<void> recognitionCallback(
+ const ISoundTriggerHwCallback::RecognitionEvent& event, CallbackCookie cookie);
+ virtual ::android::hardware::Return<void> phraseRecognitionCallback(
+ const ISoundTriggerHwCallback::PhraseRecognitionEvent& event, int32_t cookie);
+ virtual ::android::hardware::Return<void> soundModelCallback(
+ const ISoundTriggerHwCallback::ModelEvent& event, CallbackCookie cookie);
+private:
+ class SoundModel : public RefBase {
+ public:
+ SoundModel(sound_model_handle_t handle, sound_model_callback_t callback,
+ void *cookie, android::hardware::soundtrigger::V2_0::SoundModelHandle halHandle)
+ : mHandle(handle), mHalHandle(halHandle),
+ mSoundModelCallback(callback), mSoundModelCookie(cookie),
+ mRecognitionCallback(NULL), mRecognitionCookie(NULL) {}
+ ~SoundModel() {}
+
+ sound_model_handle_t mHandle;
+ android::hardware::soundtrigger::V2_0::SoundModelHandle mHalHandle;
+ sound_model_callback_t mSoundModelCallback;
+ void * mSoundModelCookie;
+ recognition_callback_t mRecognitionCallback;
+ void * mRecognitionCookie;
+ };
+
+ friend class SoundTriggerHalInterface;
+
+ explicit SoundTriggerHalHidl(const char *moduleName = NULL);
+ virtual ~SoundTriggerHalHidl();
+
+ void convertUuidToHal(Uuid *halUuid,
+ const sound_trigger_uuid_t *uuid);
+ void convertUuidFromHal(sound_trigger_uuid_t *uuid,
+ const Uuid *halUuid);
+
+ void convertPropertiesFromHal(
+ struct sound_trigger_properties *properties,
+ const ISoundTriggerHw::Properties *halProperties);
+
+ void convertTriggerPhraseToHal(
+ ISoundTriggerHw::Phrase *halTriggerPhrase,
+ const struct sound_trigger_phrase *triggerPhrase);
+ ISoundTriggerHw::SoundModel *convertSoundModelToHal(
+ const struct sound_trigger_sound_model *soundModel);
+
+ void convertPhraseRecognitionExtraToHal(
+ PhraseRecognitionExtra *halExtra,
+ const struct sound_trigger_phrase_recognition_extra *extra);
+ ISoundTriggerHw::RecognitionConfig *convertRecognitionConfigToHal(
+ const struct sound_trigger_recognition_config *config);
+
+ struct sound_trigger_model_event *convertSoundModelEventFromHal(
+ const ISoundTriggerHwCallback::ModelEvent *halEvent);
+ void convertPhraseRecognitionExtraFromHal(
+ struct sound_trigger_phrase_recognition_extra *extra,
+ const PhraseRecognitionExtra *halExtra);
+ struct sound_trigger_recognition_event *convertRecognitionEventFromHal(
+ const ISoundTriggerHwCallback::RecognitionEvent *halEvent);
+
+ uint32_t nextUniqueId();
+ sp<ISoundTriggerHw> getService();
+ void clearService();
+ sp<SoundModel> getModel(sound_model_handle_t handle);
+ sp<SoundModel> removeModel(sound_model_handle_t handle);
+
+ static pthread_once_t sOnceControl;
+ static void sOnceInit();
+
+ Mutex mLock;
+ Mutex mHalLock;
+ const char *mModuleName;
+ volatile atomic_uint_fast32_t mNextUniqueId;
+ // Effect chains without a valid thread
+ DefaultKeyedVector< sound_model_handle_t , sp<SoundModel> > mSoundModels;
+ sp<::android::hardware::soundtrigger::V2_0::ISoundTriggerHw> mISoundTrigger;
+};
+
+} // namespace android
+
+#endif // ANDROID_HARDWARE_SOUNDTRIGGER_HAL_HIDL_H
diff --git a/services/soundtrigger/SoundTriggerHalInterface.h b/services/soundtrigger/SoundTriggerHalInterface.h
new file mode 100644
index 0000000..c083195
--- /dev/null
+++ b/services/soundtrigger/SoundTriggerHalInterface.h
@@ -0,0 +1,80 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_HARDWARE_SOUNDTRIGGER_HAL_INTERFACE_H
+#define ANDROID_HARDWARE_SOUNDTRIGGER_HAL_INTERFACE_H
+
+#include <utils/RefBase.h>
+#include <system/sound_trigger.h>
+#include <hardware/sound_trigger.h>
+
+namespace android {
+
+class SoundTriggerHalInterface : public virtual RefBase
+{
+public:
+ /* get a sound trigger HAL instance */
+ static sp<SoundTriggerHalInterface> connectModule(const char *moduleName);
+
+ virtual ~SoundTriggerHalInterface() {}
+
+ virtual int getProperties(struct sound_trigger_properties *properties) = 0;
+
+ /*
+ * Load a sound model. Once loaded, recognition of this model can be started and stopped.
+ * Only one active recognition per model at a time. The SoundTrigger service will handle
+ * concurrent recognition requests by different users/applications on the same model.
+ * The implementation returns a unique handle used by other functions (unload_sound_model(),
+ * start_recognition(), etc...
+ */
+ virtual int loadSoundModel(struct sound_trigger_sound_model *sound_model,
+ sound_model_callback_t callback,
+ void *cookie,
+ sound_model_handle_t *handle) = 0;
+
+ /*
+ * Unload a sound model. A sound model can be unloaded to make room for a new one to overcome
+ * implementation limitations.
+ */
+ virtual int unloadSoundModel(sound_model_handle_t handle) = 0;
+
+ /* Start recognition on a given model. Only one recognition active at a time per model.
+ * Once recognition succeeds of fails, the callback is called.
+ * TODO: group recognition configuration parameters into one struct and add key phrase options.
+ */
+ virtual int startRecognition(sound_model_handle_t handle,
+ const struct sound_trigger_recognition_config *config,
+ recognition_callback_t callback,
+ void *cookie) = 0;
+
+ /* Stop recognition on a given model.
+ * The implementation does not have to call the callback when stopped via this method.
+ */
+ virtual int stopRecognition(sound_model_handle_t handle) = 0;
+
+ /* Stop recognition on all models.
+ * Only supported for device api versions SOUND_TRIGGER_DEVICE_API_VERSION_1_1 or above.
+ * If no implementation is provided, stop_recognition will be called for each running model.
+ */
+ virtual int stopAllRecognitions() = 0;
+
+protected:
+ SoundTriggerHalInterface() {}
+};
+
+} // namespace android
+
+#endif // ANDROID_HARDWARE_SOUNDTRIGGER_HAL_INTERFACE_H
diff --git a/services/soundtrigger/SoundTriggerHalLegacy.cpp b/services/soundtrigger/SoundTriggerHalLegacy.cpp
new file mode 100644
index 0000000..2b78818
--- /dev/null
+++ b/services/soundtrigger/SoundTriggerHalLegacy.cpp
@@ -0,0 +1,127 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <utils/Log.h>
+#include "SoundTriggerHalLegacy.h"
+
+namespace android {
+
+/* static */
+sp<SoundTriggerHalInterface> SoundTriggerHalInterface::connectModule(const char *moduleName)
+{
+ return new SoundTriggerHalLegacy(moduleName);
+}
+
+SoundTriggerHalLegacy::SoundTriggerHalLegacy(const char *moduleName)
+ : mModuleName(moduleName), mHwDevice(NULL)
+{
+}
+
+void SoundTriggerHalLegacy::onFirstRef()
+{
+ const hw_module_t *mod;
+ int rc;
+
+ if (mModuleName == NULL) {
+ mModuleName = "primary";
+ }
+
+ rc = hw_get_module_by_class(SOUND_TRIGGER_HARDWARE_MODULE_ID, mModuleName, &mod);
+ if (rc != 0) {
+ ALOGE("couldn't load sound trigger module %s.%s (%s)",
+ SOUND_TRIGGER_HARDWARE_MODULE_ID, mModuleName, strerror(-rc));
+ return;
+ }
+ rc = sound_trigger_hw_device_open(mod, &mHwDevice);
+ if (rc != 0) {
+ ALOGE("couldn't open sound trigger hw device in %s.%s (%s)",
+ SOUND_TRIGGER_HARDWARE_MODULE_ID, mModuleName, strerror(-rc));
+ mHwDevice = NULL;
+ return;
+ }
+ if (mHwDevice->common.version < SOUND_TRIGGER_DEVICE_API_VERSION_1_0 ||
+ mHwDevice->common.version > SOUND_TRIGGER_DEVICE_API_VERSION_CURRENT) {
+ ALOGE("wrong sound trigger hw device version %04x", mHwDevice->common.version);
+ return;
+ }
+}
+
+SoundTriggerHalLegacy::~SoundTriggerHalLegacy()
+{
+ if (mHwDevice != NULL) {
+ sound_trigger_hw_device_close(mHwDevice);
+ }
+}
+
+int SoundTriggerHalLegacy::getProperties(struct sound_trigger_properties *properties)
+{
+ if (mHwDevice == NULL) {
+ return -ENODEV;
+ }
+ return mHwDevice->get_properties(mHwDevice, properties);
+}
+
+int SoundTriggerHalLegacy::loadSoundModel(struct sound_trigger_sound_model *sound_model,
+ sound_model_callback_t callback,
+ void *cookie,
+ sound_model_handle_t *handle)
+{
+ if (mHwDevice == NULL) {
+ return -ENODEV;
+ }
+ return mHwDevice->load_sound_model(mHwDevice, sound_model, callback, cookie, handle);
+}
+
+int SoundTriggerHalLegacy::unloadSoundModel(sound_model_handle_t handle)
+{
+ if (mHwDevice == NULL) {
+ return -ENODEV;
+ }
+ return mHwDevice->unload_sound_model(mHwDevice, handle);
+}
+
+int SoundTriggerHalLegacy::startRecognition(sound_model_handle_t handle,
+ const struct sound_trigger_recognition_config *config,
+ recognition_callback_t callback,
+ void *cookie)
+{
+ if (mHwDevice == NULL) {
+ return -ENODEV;
+ }
+ return mHwDevice->start_recognition(mHwDevice, handle, config, callback, cookie);
+}
+
+int SoundTriggerHalLegacy::stopRecognition(sound_model_handle_t handle)
+{
+ if (mHwDevice == NULL) {
+ return -ENODEV;
+ }
+ return mHwDevice->stop_recognition(mHwDevice, handle);
+}
+
+int SoundTriggerHalLegacy::stopAllRecognitions()
+{
+ if (mHwDevice == NULL) {
+ return -ENODEV;
+ }
+ if (mHwDevice->common.version >= SOUND_TRIGGER_DEVICE_API_VERSION_1_1 &&
+ mHwDevice->stop_all_recognitions) {
+ return mHwDevice->stop_all_recognitions(mHwDevice);
+ }
+ return -ENOSYS;
+}
+
+} // namespace android
diff --git a/services/soundtrigger/SoundTriggerHalLegacy.h b/services/soundtrigger/SoundTriggerHalLegacy.h
new file mode 100644
index 0000000..52488de
--- /dev/null
+++ b/services/soundtrigger/SoundTriggerHalLegacy.h
@@ -0,0 +1,85 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_HARDWARE_SOUNDTRIGGER_HAL_LEGACY_H
+#define ANDROID_HARDWARE_SOUNDTRIGGER_HAL_LEGACY_H
+
+#include "SoundTriggerHalInterface.h"
+
+namespace android {
+
+class SoundTriggerHalLegacy : public SoundTriggerHalInterface
+
+{
+public:
+ virtual ~SoundTriggerHalLegacy();
+
+ virtual int getProperties(struct sound_trigger_properties *properties);
+
+ /*
+ * Load a sound model. Once loaded, recognition of this model can be started and stopped.
+ * Only one active recognition per model at a time. The SoundTrigger service will handle
+ * concurrent recognition requests by different users/applications on the same model.
+ * The implementation returns a unique handle used by other functions (unload_sound_model(),
+ * start_recognition(), etc...
+ */
+ virtual int loadSoundModel(struct sound_trigger_sound_model *sound_model,
+ sound_model_callback_t callback,
+ void *cookie,
+ sound_model_handle_t *handle);
+
+ /*
+ * Unload a sound model. A sound model can be unloaded to make room for a new one to overcome
+ * implementation limitations.
+ */
+ virtual int unloadSoundModel(sound_model_handle_t handle);
+
+ /* Start recognition on a given model. Only one recognition active at a time per model.
+ * Once recognition succeeds of fails, the callback is called.
+ * TODO: group recognition configuration parameters into one struct and add key phrase options.
+ */
+ virtual int startRecognition(sound_model_handle_t handle,
+ const struct sound_trigger_recognition_config *config,
+ recognition_callback_t callback,
+ void *cookie);
+
+ /* Stop recognition on a given model.
+ * The implementation does not have to call the callback when stopped via this method.
+ */
+ virtual int stopRecognition(sound_model_handle_t handle);
+
+ /* Stop recognition on all models.
+ * Only supported for device api versions SOUND_TRIGGER_DEVICE_API_VERSION_1_1 or above.
+ * If no implementation is provided, stop_recognition will be called for each running model.
+ */
+ int stopAllRecognitions();
+
+ // RefBase
+ virtual void onFirstRef();
+
+private:
+
+ friend class SoundTriggerHalInterface;
+
+ explicit SoundTriggerHalLegacy(const char *moduleName = NULL);
+
+ const char *mModuleName;
+ struct sound_trigger_hw_device* mHwDevice;
+};
+
+} // namespace android
+
+#endif // ANDROID_HARDWARE_SOUNDTRIGGER_HAL_LEGACY_H
diff --git a/services/soundtrigger/SoundTriggerHwService.cpp b/services/soundtrigger/SoundTriggerHwService.cpp
index 6a52b9c..3ba7f62 100644
--- a/services/soundtrigger/SoundTriggerHwService.cpp
+++ b/services/soundtrigger/SoundTriggerHwService.cpp
@@ -32,17 +32,16 @@
#include <binder/IServiceManager.h>
#include <binder/MemoryBase.h>
#include <binder/MemoryHeapBase.h>
-#include <hardware/sound_trigger.h>
+#include <system/sound_trigger.h>
#include <ServiceUtilities.h>
#include "SoundTriggerHwService.h"
-namespace android {
-
#ifdef SOUND_TRIGGER_USE_STUB_MODULE
#define HW_MODULE_PREFIX "stub"
#else
#define HW_MODULE_PREFIX "primary"
#endif
+namespace android {
SoundTriggerHwService::SoundTriggerHwService()
: BnSoundTriggerHwService(),
@@ -54,30 +53,17 @@
void SoundTriggerHwService::onFirstRef()
{
- const hw_module_t *mod;
int rc;
- sound_trigger_hw_device *dev;
- rc = hw_get_module_by_class(SOUND_TRIGGER_HARDWARE_MODULE_ID, HW_MODULE_PREFIX, &mod);
- if (rc != 0) {
- ALOGE("couldn't load sound trigger module %s.%s (%s)",
- SOUND_TRIGGER_HARDWARE_MODULE_ID, HW_MODULE_PREFIX, strerror(-rc));
- return;
- }
- rc = sound_trigger_hw_device_open(mod, &dev);
- if (rc != 0) {
- ALOGE("couldn't open sound trigger hw device in %s.%s (%s)",
- SOUND_TRIGGER_HARDWARE_MODULE_ID, HW_MODULE_PREFIX, strerror(-rc));
- return;
- }
- if (dev->common.version < SOUND_TRIGGER_DEVICE_API_VERSION_1_0 ||
- dev->common.version > SOUND_TRIGGER_DEVICE_API_VERSION_CURRENT) {
- ALOGE("wrong sound trigger hw device version %04x", dev->common.version);
- return;
- }
+ sp<SoundTriggerHalInterface> halInterface =
+ SoundTriggerHalInterface::connectModule(HW_MODULE_PREFIX);
+ if (halInterface == 0) {
+ ALOGW("could not connect to HAL");
+ return;
+ }
sound_trigger_module_descriptor descriptor;
- rc = dev->get_properties(dev, &descriptor.properties);
+ rc = halInterface->getProperties(&descriptor.properties);
if (rc != 0) {
ALOGE("could not read implementation properties");
return;
@@ -88,7 +74,7 @@
descriptor.handle);
sp<ISoundTriggerClient> client;
- sp<Module> module = new Module(this, dev, descriptor, client);
+ sp<Module> module = new Module(this, halInterface, descriptor, client);
mModules.add(descriptor.handle, module);
mCallbackThread = new CallbackThread(this);
}
@@ -98,9 +84,6 @@
if (mCallbackThread != 0) {
mCallbackThread->exit();
}
- for (size_t i = 0; i < mModules.size(); i++) {
- sound_trigger_hw_device_close(mModules.valueAt(i)->hwDevice());
- }
}
status_t SoundTriggerHwService::listModules(struct sound_trigger_module_descriptor *modules,
@@ -489,10 +472,10 @@
#define LOG_TAG "SoundTriggerHwService::Module"
SoundTriggerHwService::Module::Module(const sp<SoundTriggerHwService>& service,
- sound_trigger_hw_device* hwDevice,
+ const sp<SoundTriggerHalInterface>& halInterface,
sound_trigger_module_descriptor descriptor,
const sp<ISoundTriggerClient>& client)
- : mService(service), mHwDevice(hwDevice), mDescriptor(descriptor),
+ : mService(service), mHalInterface(halInterface), mDescriptor(descriptor),
mClient(client), mServiceState(SOUND_TRIGGER_STATE_NO_INIT)
{
}
@@ -510,10 +493,12 @@
for (size_t i = 0; i < mModels.size(); i++) {
sp<Model> model = mModels.valueAt(i);
ALOGV("detach() unloading model %d", model->mHandle);
- if (model->mState == Model::STATE_ACTIVE) {
- mHwDevice->stop_recognition(mHwDevice, model->mHandle);
+ if (mHalInterface != 0) {
+ if (model->mState == Model::STATE_ACTIVE) {
+ mHalInterface->stopRecognition(model->mHandle);
+ }
+ mHalInterface->unloadSoundModel(model->mHandle);
}
- mHwDevice->unload_sound_model(mHwDevice, model->mHandle);
}
mModels.clear();
}
@@ -531,10 +516,12 @@
sound_model_handle_t *handle)
{
ALOGV("loadSoundModel() handle");
+ if (mHalInterface == 0) {
+ return NO_INIT;
+ }
if (!captureHotwordAllowed()) {
return PERMISSION_DENIED;
}
-
if (modelMemory == 0 || modelMemory->pointer() == NULL) {
ALOGE("loadSoundModel() modelMemory is 0 or has NULL pointer()");
return BAD_VALUE;
@@ -566,7 +553,7 @@
return INVALID_OPERATION;
}
- status_t status = mHwDevice->load_sound_model(mHwDevice, sound_model,
+ status_t status = mHalInterface->loadSoundModel(sound_model,
SoundTriggerHwService::soundModelCallback,
this, handle);
@@ -601,6 +588,9 @@
status_t SoundTriggerHwService::Module::unloadSoundModel_l(sound_model_handle_t handle)
{
+ if (mHalInterface == 0) {
+ return NO_INIT;
+ }
ssize_t index = mModels.indexOfKey(handle);
if (index < 0) {
return BAD_VALUE;
@@ -608,17 +598,20 @@
sp<Model> model = mModels.valueAt(index);
mModels.removeItem(handle);
if (model->mState == Model::STATE_ACTIVE) {
- mHwDevice->stop_recognition(mHwDevice, model->mHandle);
+ mHalInterface->stopRecognition(model->mHandle);
model->mState = Model::STATE_IDLE;
}
AudioSystem::releaseSoundTriggerSession(model->mCaptureSession);
- return mHwDevice->unload_sound_model(mHwDevice, handle);
+ return mHalInterface->unloadSoundModel(handle);
}
status_t SoundTriggerHwService::Module::startRecognition(sound_model_handle_t handle,
const sp<IMemory>& dataMemory)
{
ALOGV("startRecognition() model handle %d", handle);
+ if (mHalInterface == 0) {
+ return NO_INIT;
+ }
if (!captureHotwordAllowed()) {
return PERMISSION_DENIED;
}
@@ -657,7 +650,7 @@
//TODO: get capture handle and device from audio policy service
config->capture_handle = model->mCaptureIOHandle;
config->capture_device = model->mCaptureDevice;
- status_t status = mHwDevice->start_recognition(mHwDevice, handle, config,
+ status_t status = mHalInterface->startRecognition(handle, config,
SoundTriggerHwService::recognitionCallback,
this);
@@ -672,6 +665,9 @@
status_t SoundTriggerHwService::Module::stopRecognition(sound_model_handle_t handle)
{
ALOGV("stopRecognition() model handle %d", handle);
+ if (mHalInterface == 0) {
+ return NO_INIT;
+ }
if (!captureHotwordAllowed()) {
return PERMISSION_DENIED;
}
@@ -685,7 +681,7 @@
if (model->mState != Model::STATE_ACTIVE) {
return INVALID_OPERATION;
}
- mHwDevice->stop_recognition(mHwDevice, handle);
+ mHalInterface->stopRecognition(handle);
model->mState = Model::STATE_IDLE;
return NO_ERROR;
}
@@ -808,18 +804,13 @@
}
const bool supports_stop_all =
- (mHwDevice->common.version >= SOUND_TRIGGER_DEVICE_API_VERSION_1_1 &&
- mHwDevice->stop_all_recognitions);
-
- if (supports_stop_all) {
- mHwDevice->stop_all_recognitions(mHwDevice);
- }
+ (mHalInterface != 0) && (mHalInterface->stopAllRecognitions() == ENOSYS);
for (size_t i = 0; i < mModels.size(); i++) {
sp<Model> model = mModels.valueAt(i);
if (model->mState == Model::STATE_ACTIVE) {
- if (!supports_stop_all) {
- mHwDevice->stop_recognition(mHwDevice, model->mHandle);
+ if (mHalInterface != 0 && !supports_stop_all) {
+ mHalInterface->stopRecognition(model->mHandle);
}
// keep model in ACTIVE state so that event is processed by onCallbackEvent()
if (model->mType == SOUND_MODEL_TYPE_KEYPHRASE) {
diff --git a/services/soundtrigger/SoundTriggerHwService.h b/services/soundtrigger/SoundTriggerHwService.h
index 13a577a..7f7d0cc 100644
--- a/services/soundtrigger/SoundTriggerHwService.h
+++ b/services/soundtrigger/SoundTriggerHwService.h
@@ -26,7 +26,7 @@
#include <soundtrigger/ISoundTrigger.h>
#include <soundtrigger/ISoundTriggerClient.h>
#include <system/sound_trigger.h>
-#include <hardware/sound_trigger.h>
+#include "SoundTriggerHalInterface.h"
namespace android {
@@ -103,7 +103,7 @@
public:
Module(const sp<SoundTriggerHwService>& service,
- sound_trigger_hw_device* hwDevice,
+ const sp<SoundTriggerHalInterface>& halInterface,
sound_trigger_module_descriptor descriptor,
const sp<ISoundTriggerClient>& client);
@@ -123,7 +123,6 @@
virtual status_t dump(int fd, const Vector<String16>& args);
- sound_trigger_hw_device *hwDevice() const { return mHwDevice; }
struct sound_trigger_module_descriptor descriptor() { return mDescriptor; }
void setClient(const sp<ISoundTriggerClient>& client) { mClient = client; }
void clearClient() { mClient.clear(); }
@@ -146,7 +145,7 @@
Mutex mLock;
wp<SoundTriggerHwService> mService;
- struct sound_trigger_hw_device* mHwDevice;
+ sp<SoundTriggerHalInterface> mHalInterface;
struct sound_trigger_module_descriptor mDescriptor;
sp<ISoundTriggerClient> mClient;
DefaultKeyedVector< sound_model_handle_t, sp<Model> > mModels;