Merge "Fix typos and line length in AudioRecord comments"
diff --git a/camera/Camera.cpp b/camera/Camera.cpp
index ee458f1..d43cb0b 100644
--- a/camera/Camera.cpp
+++ b/camera/Camera.cpp
@@ -27,7 +27,8 @@
#include <camera/ICameraRecordingProxyListener.h>
#include <camera/ICameraService.h>
-#include <surfaceflinger/Surface.h>
+#include <gui/ISurfaceTexture.h>
+#include <gui/Surface.h>
namespace android {
diff --git a/camera/ICamera.cpp b/camera/ICamera.cpp
index 70f5dbc..8d8408c 100644
--- a/camera/ICamera.cpp
+++ b/camera/ICamera.cpp
@@ -22,6 +22,8 @@
#include <sys/types.h>
#include <binder/Parcel.h>
#include <camera/ICamera.h>
+#include <gui/ISurfaceTexture.h>
+#include <gui/Surface.h>
namespace android {
diff --git a/cmds/stagefright/Android.mk b/cmds/stagefright/Android.mk
index 11e94e8..f26747b 100644
--- a/cmds/stagefright/Android.mk
+++ b/cmds/stagefright/Android.mk
@@ -147,4 +147,28 @@
include $(BUILD_EXECUTABLE)
+################################################################################
+
+include $(CLEAR_VARS)
+
+LOCAL_SRC_FILES:= \
+ codec.cpp \
+ SimplePlayer.cpp \
+
+LOCAL_SHARED_LIBRARIES := \
+ libstagefright liblog libutils libbinder libstagefright_foundation \
+ libmedia libgui libcutils libui
+
+LOCAL_C_INCLUDES:= \
+ $(JNI_H_INCLUDE) \
+ frameworks/base/media/libstagefright \
+ $(TOP)/frameworks/base/include/media/stagefright/openmax
+
+LOCAL_CFLAGS += -Wno-multichar
+
+LOCAL_MODULE_TAGS := debug
+
+LOCAL_MODULE:= codec
+
+include $(BUILD_EXECUTABLE)
diff --git a/cmds/stagefright/SimplePlayer.cpp b/cmds/stagefright/SimplePlayer.cpp
new file mode 100644
index 0000000..fac3a8c
--- /dev/null
+++ b/cmds/stagefright/SimplePlayer.cpp
@@ -0,0 +1,646 @@
+/*
+ * Copyright (C) 2012 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "SimplePlayer"
+#include <utils/Log.h>
+
+#include "SimplePlayer.h"
+
+#include <gui/SurfaceTextureClient.h>
+#include <media/AudioTrack.h>
+#include <media/stagefright/foundation/ABuffer.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/MediaCodec.h>
+#include <media/stagefright/MediaErrors.h>
+#include <media/stagefright/NativeWindowWrapper.h>
+#include <media/stagefright/NuMediaExtractor.h>
+
+namespace android {
+
+SimplePlayer::SimplePlayer()
+ : mState(UNINITIALIZED),
+ mDoMoreStuffGeneration(0),
+ mStartTimeRealUs(-1ll) {
+}
+
+SimplePlayer::~SimplePlayer() {
+}
+
+// static
+status_t PostAndAwaitResponse(
+ const sp<AMessage> &msg, sp<AMessage> *response) {
+ status_t err = msg->postAndAwaitResponse(response);
+
+ if (err != OK) {
+ return err;
+ }
+
+ if (!(*response)->findInt32("err", &err)) {
+ err = OK;
+ }
+
+ return err;
+}
+status_t SimplePlayer::setDataSource(const char *path) {
+ sp<AMessage> msg = new AMessage(kWhatSetDataSource, id());
+ msg->setString("path", path);
+ sp<AMessage> response;
+ return PostAndAwaitResponse(msg, &response);
+}
+
+status_t SimplePlayer::setSurface(const sp<ISurfaceTexture> &surfaceTexture) {
+ sp<AMessage> msg = new AMessage(kWhatSetSurface, id());
+
+ sp<SurfaceTextureClient> surfaceTextureClient;
+ if (surfaceTexture != NULL) {
+ surfaceTextureClient = new SurfaceTextureClient(surfaceTexture);
+ }
+
+ msg->setObject(
+ "native-window", new NativeWindowWrapper(surfaceTextureClient));
+
+ sp<AMessage> response;
+ return PostAndAwaitResponse(msg, &response);
+}
+
+status_t SimplePlayer::prepare() {
+ sp<AMessage> msg = new AMessage(kWhatPrepare, id());
+ sp<AMessage> response;
+ return PostAndAwaitResponse(msg, &response);
+}
+
+status_t SimplePlayer::start() {
+ sp<AMessage> msg = new AMessage(kWhatStart, id());
+ sp<AMessage> response;
+ return PostAndAwaitResponse(msg, &response);
+}
+
+status_t SimplePlayer::stop() {
+ sp<AMessage> msg = new AMessage(kWhatStop, id());
+ sp<AMessage> response;
+ return PostAndAwaitResponse(msg, &response);
+}
+
+status_t SimplePlayer::reset() {
+ sp<AMessage> msg = new AMessage(kWhatReset, id());
+ sp<AMessage> response;
+ return PostAndAwaitResponse(msg, &response);
+}
+
+void SimplePlayer::onMessageReceived(const sp<AMessage> &msg) {
+ switch (msg->what()) {
+ case kWhatSetDataSource:
+ {
+ status_t err;
+ if (mState != UNINITIALIZED) {
+ err = INVALID_OPERATION;
+ } else {
+ CHECK(msg->findString("path", &mPath));
+ mState = UNPREPARED;
+ }
+
+ uint32_t replyID;
+ CHECK(msg->senderAwaitsResponse(&replyID));
+
+ sp<AMessage> response = new AMessage;
+ response->setInt32("err", err);
+ response->postReply(replyID);
+ break;
+ }
+
+ case kWhatSetSurface:
+ {
+ status_t err;
+ if (mState != UNPREPARED) {
+ err = INVALID_OPERATION;
+ } else {
+ sp<RefBase> obj;
+ CHECK(msg->findObject("native-window", &obj));
+
+ mNativeWindow = static_cast<NativeWindowWrapper *>(obj.get());
+
+ err = OK;
+ }
+
+ uint32_t replyID;
+ CHECK(msg->senderAwaitsResponse(&replyID));
+
+ sp<AMessage> response = new AMessage;
+ response->setInt32("err", err);
+ response->postReply(replyID);
+ break;
+ }
+
+ case kWhatPrepare:
+ {
+ status_t err;
+ if (mState != UNPREPARED) {
+ err = INVALID_OPERATION;
+ } else {
+ err = onPrepare();
+
+ if (err == OK) {
+ mState = STOPPED;
+ }
+ }
+
+ uint32_t replyID;
+ CHECK(msg->senderAwaitsResponse(&replyID));
+
+ sp<AMessage> response = new AMessage;
+ response->setInt32("err", err);
+ response->postReply(replyID);
+ break;
+ }
+
+ case kWhatStart:
+ {
+ status_t err = OK;
+
+ if (mState == UNPREPARED) {
+ err = onPrepare();
+
+ if (err == OK) {
+ mState = STOPPED;
+ }
+ }
+
+ if (err == OK) {
+ if (mState != STOPPED) {
+ err = INVALID_OPERATION;
+ } else {
+ err = onStart();
+
+ if (err == OK) {
+ mState = STARTED;
+ }
+ }
+ }
+
+ uint32_t replyID;
+ CHECK(msg->senderAwaitsResponse(&replyID));
+
+ sp<AMessage> response = new AMessage;
+ response->setInt32("err", err);
+ response->postReply(replyID);
+ break;
+ }
+
+ case kWhatStop:
+ {
+ status_t err;
+
+ if (mState != STARTED) {
+ err = INVALID_OPERATION;
+ } else {
+ err = onStop();
+
+ if (err == OK) {
+ mState = STOPPED;
+ }
+ }
+
+ uint32_t replyID;
+ CHECK(msg->senderAwaitsResponse(&replyID));
+
+ sp<AMessage> response = new AMessage;
+ response->setInt32("err", err);
+ response->postReply(replyID);
+ break;
+ }
+
+ case kWhatReset:
+ {
+ status_t err = OK;
+
+ if (mState == STARTED) {
+ CHECK_EQ(onStop(), (status_t)OK);
+ mState = STOPPED;
+ }
+
+ if (mState == STOPPED) {
+ err = onReset();
+ mState = UNINITIALIZED;
+ }
+
+ uint32_t replyID;
+ CHECK(msg->senderAwaitsResponse(&replyID));
+
+ sp<AMessage> response = new AMessage;
+ response->setInt32("err", err);
+ response->postReply(replyID);
+ break;
+ }
+
+ case kWhatDoMoreStuff:
+ {
+ int32_t generation;
+ CHECK(msg->findInt32("generation", &generation));
+
+ if (generation != mDoMoreStuffGeneration) {
+ break;
+ }
+
+ status_t err = onDoMoreStuff();
+
+ if (err == OK) {
+ msg->post(10000ll);
+ }
+ break;
+ }
+
+ default:
+ TRESPASS();
+ }
+}
+
+status_t SimplePlayer::onPrepare() {
+ CHECK_EQ(mState, UNPREPARED);
+
+ mExtractor = new NuMediaExtractor;
+
+ status_t err = mExtractor->setDataSource(mPath.c_str());
+
+ if (err != OK) {
+ mExtractor.clear();
+ return err;
+ }
+
+ if (mCodecLooper == NULL) {
+ mCodecLooper = new ALooper;
+ mCodecLooper->start();
+ }
+
+ bool haveAudio = false;
+ bool haveVideo = false;
+ for (size_t i = 0; i < mExtractor->countTracks(); ++i) {
+ sp<AMessage> format;
+ status_t err = mExtractor->getTrackFormat(i, &format);
+ CHECK_EQ(err, (status_t)OK);
+
+ AString mime;
+ CHECK(format->findString("mime", &mime));
+
+ if (!haveAudio && !strncasecmp(mime.c_str(), "audio/", 6)) {
+ haveAudio = true;
+ } else if (!haveVideo && !strncasecmp(mime.c_str(), "video/", 6)) {
+ haveVideo = true;
+ } else {
+ continue;
+ }
+
+ err = mExtractor->selectTrack(i);
+ CHECK_EQ(err, (status_t)OK);
+
+ CodecState *state =
+ &mStateByTrackIndex.editValueAt(
+ mStateByTrackIndex.add(i, CodecState()));
+
+ state->mNumFramesWritten = 0;
+ state->mCodec = MediaCodec::CreateByType(
+ mCodecLooper, mime.c_str(), false /* encoder */);
+
+ CHECK(state->mCodec != NULL);
+
+ err = state->mCodec->configure(
+ format, mNativeWindow->getSurfaceTextureClient(),
+ 0 /* flags */);
+
+ CHECK_EQ(err, (status_t)OK);
+
+ size_t j = 0;
+ sp<ABuffer> buffer;
+ while (format->findBuffer(StringPrintf("csd-%d", j).c_str(), &buffer)) {
+ state->mCSD.push_back(buffer);
+
+ ++j;
+ }
+ }
+
+ for (size_t i = 0; i < mStateByTrackIndex.size(); ++i) {
+ CodecState *state = &mStateByTrackIndex.editValueAt(i);
+
+ status_t err = state->mCodec->start();
+ CHECK_EQ(err, (status_t)OK);
+
+ err = state->mCodec->getInputBuffers(&state->mBuffers[0]);
+ CHECK_EQ(err, (status_t)OK);
+
+ err = state->mCodec->getOutputBuffers(&state->mBuffers[1]);
+ CHECK_EQ(err, (status_t)OK);
+
+ for (size_t j = 0; j < state->mCSD.size(); ++j) {
+ const sp<ABuffer> &srcBuffer = state->mCSD.itemAt(j);
+
+ size_t index;
+ err = state->mCodec->dequeueInputBuffer(&index, -1ll);
+ CHECK_EQ(err, (status_t)OK);
+
+ const sp<ABuffer> &dstBuffer = state->mBuffers[0].itemAt(index);
+
+ CHECK_LE(srcBuffer->size(), dstBuffer->capacity());
+ dstBuffer->setRange(0, srcBuffer->size());
+ memcpy(dstBuffer->data(), srcBuffer->data(), srcBuffer->size());
+
+ err = state->mCodec->queueInputBuffer(
+ index,
+ 0,
+ dstBuffer->size(),
+ 0ll,
+ MediaCodec::BUFFER_FLAG_CODECCONFIG);
+ CHECK_EQ(err, (status_t)OK);
+ }
+ }
+
+ return OK;
+}
+
+status_t SimplePlayer::onStart() {
+ CHECK_EQ(mState, STOPPED);
+
+ mStartTimeRealUs = -1ll;
+
+ sp<AMessage> msg = new AMessage(kWhatDoMoreStuff, id());
+ msg->setInt32("generation", ++mDoMoreStuffGeneration);
+ msg->post();
+
+ return OK;
+}
+
+status_t SimplePlayer::onStop() {
+ CHECK_EQ(mState, STARTED);
+
+ ++mDoMoreStuffGeneration;
+
+ return OK;
+}
+
+status_t SimplePlayer::onReset() {
+ CHECK_EQ(mState, STOPPED);
+
+ for (size_t i = 0; i < mStateByTrackIndex.size(); ++i) {
+ CodecState *state = &mStateByTrackIndex.editValueAt(i);
+
+ CHECK_EQ(state->mCodec->release(), (status_t)OK);
+ }
+
+ mStartTimeRealUs = -1ll;
+
+ mStateByTrackIndex.clear();
+ mCodecLooper.clear();
+ mExtractor.clear();
+ mNativeWindow.clear();
+ mPath.clear();
+
+ return OK;
+}
+
+status_t SimplePlayer::onDoMoreStuff() {
+ ALOGV("onDoMoreStuff");
+ for (size_t i = 0; i < mStateByTrackIndex.size(); ++i) {
+ CodecState *state = &mStateByTrackIndex.editValueAt(i);
+
+ status_t err;
+ do {
+ size_t index;
+ err = state->mCodec->dequeueInputBuffer(&index);
+
+ if (err == OK) {
+ ALOGV("dequeued input buffer on track %d",
+ mStateByTrackIndex.keyAt(i));
+
+ state->mAvailInputBufferIndices.push_back(index);
+ } else {
+ ALOGV("dequeueInputBuffer on track %d returned %d",
+ mStateByTrackIndex.keyAt(i), err);
+ }
+ } while (err == OK);
+
+ do {
+ BufferInfo info;
+ err = state->mCodec->dequeueOutputBuffer(
+ &info.mIndex,
+ &info.mOffset,
+ &info.mSize,
+ &info.mPresentationTimeUs,
+ &info.mFlags);
+
+ if (err == OK) {
+ ALOGV("dequeued output buffer on track %d",
+ mStateByTrackIndex.keyAt(i));
+
+ state->mAvailOutputBufferInfos.push_back(info);
+ } else if (err == INFO_FORMAT_CHANGED) {
+ err = onOutputFormatChanged(mStateByTrackIndex.keyAt(i), state);
+ CHECK_EQ(err, (status_t)OK);
+ } else if (err == INFO_OUTPUT_BUFFERS_CHANGED) {
+ err = state->mCodec->getOutputBuffers(&state->mBuffers[1]);
+ CHECK_EQ(err, (status_t)OK);
+ } else {
+ ALOGV("dequeueOutputBuffer on track %d returned %d",
+ mStateByTrackIndex.keyAt(i), err);
+ }
+ } while (err == OK
+ || err == INFO_FORMAT_CHANGED
+ || err == INFO_OUTPUT_BUFFERS_CHANGED);
+ }
+
+ for (;;) {
+ size_t trackIndex;
+ status_t err = mExtractor->getSampleTrackIndex(&trackIndex);
+
+ if (err != OK) {
+ ALOGI("encountered input EOS.");
+ break;
+ } else {
+ CodecState *state = &mStateByTrackIndex.editValueFor(trackIndex);
+
+ if (state->mAvailInputBufferIndices.empty()) {
+ break;
+ }
+
+ size_t index = *state->mAvailInputBufferIndices.begin();
+ state->mAvailInputBufferIndices.erase(
+ state->mAvailInputBufferIndices.begin());
+
+ const sp<ABuffer> &dstBuffer =
+ state->mBuffers[0].itemAt(index);
+
+ err = mExtractor->readSampleData(dstBuffer);
+ CHECK_EQ(err, (status_t)OK);
+
+ int64_t timeUs;
+ CHECK_EQ(mExtractor->getSampleTime(&timeUs), (status_t)OK);
+
+ err = state->mCodec->queueInputBuffer(
+ index,
+ dstBuffer->offset(),
+ dstBuffer->size(),
+ timeUs,
+ 0);
+ CHECK_EQ(err, (status_t)OK);
+
+ ALOGV("enqueued input data on track %d", trackIndex);
+
+ err = mExtractor->advance();
+ CHECK_EQ(err, (status_t)OK);
+ }
+ }
+
+ int64_t nowUs = ALooper::GetNowUs();
+
+ if (mStartTimeRealUs < 0ll) {
+ mStartTimeRealUs = nowUs + 1000000ll;
+ }
+
+ for (size_t i = 0; i < mStateByTrackIndex.size(); ++i) {
+ CodecState *state = &mStateByTrackIndex.editValueAt(i);
+
+ while (!state->mAvailOutputBufferInfos.empty()) {
+ BufferInfo *info = &*state->mAvailOutputBufferInfos.begin();
+
+ int64_t whenRealUs = info->mPresentationTimeUs + mStartTimeRealUs;
+ int64_t lateByUs = nowUs - whenRealUs;
+
+ if (lateByUs > -10000ll) {
+ bool release = true;
+
+ if (lateByUs > 30000ll) {
+ ALOGI("track %d buffer late by %lld us, dropping.",
+ mStateByTrackIndex.keyAt(i), lateByUs);
+ state->mCodec->releaseOutputBuffer(info->mIndex);
+ } else {
+ if (state->mAudioTrack != NULL) {
+ const sp<ABuffer> &srcBuffer =
+ state->mBuffers[1].itemAt(info->mIndex);
+
+ renderAudio(state, info, srcBuffer);
+
+ if (info->mSize > 0) {
+ release = false;
+ }
+ }
+
+ if (release) {
+ state->mCodec->renderOutputBufferAndRelease(
+ info->mIndex);
+ }
+ }
+
+ if (release) {
+ state->mAvailOutputBufferInfos.erase(
+ state->mAvailOutputBufferInfos.begin());
+
+ info = NULL;
+ } else {
+ break;
+ }
+ } else {
+ ALOGV("track %d buffer early by %lld us.",
+ mStateByTrackIndex.keyAt(i), -lateByUs);
+ break;
+ }
+ }
+ }
+
+ return OK;
+}
+
+status_t SimplePlayer::onOutputFormatChanged(
+ size_t trackIndex, CodecState *state) {
+ sp<AMessage> format;
+ status_t err = state->mCodec->getOutputFormat(&format);
+
+ if (err != OK) {
+ return err;
+ }
+
+ AString mime;
+ CHECK(format->findString("mime", &mime));
+
+ if (!strncasecmp(mime.c_str(), "audio/", 6)) {
+ int32_t channelCount;
+ int32_t sampleRate;
+ CHECK(format->findInt32("channel-count", &channelCount));
+ CHECK(format->findInt32("sample-rate", &sampleRate));
+
+ state->mAudioTrack = new AudioTrack(
+ AUDIO_STREAM_MUSIC,
+ sampleRate,
+ AUDIO_FORMAT_PCM_16_BIT,
+ (channelCount == 1)
+ ? AUDIO_CHANNEL_OUT_MONO : AUDIO_CHANNEL_OUT_STEREO,
+ 0);
+
+ state->mNumFramesWritten = 0;
+ }
+
+ return OK;
+}
+
+void SimplePlayer::renderAudio(
+ CodecState *state, BufferInfo *info, const sp<ABuffer> &buffer) {
+ CHECK(state->mAudioTrack != NULL);
+
+ if (state->mAudioTrack->stopped()) {
+ state->mAudioTrack->start();
+ }
+
+ uint32_t numFramesPlayed;
+ CHECK_EQ(state->mAudioTrack->getPosition(&numFramesPlayed), (status_t)OK);
+
+ uint32_t numFramesAvailableToWrite =
+ state->mAudioTrack->frameCount()
+ - (state->mNumFramesWritten - numFramesPlayed);
+
+ size_t numBytesAvailableToWrite =
+ numFramesAvailableToWrite * state->mAudioTrack->frameSize();
+
+ size_t copy = info->mSize;
+ if (copy > numBytesAvailableToWrite) {
+ copy = numBytesAvailableToWrite;
+ }
+
+ if (copy == 0) {
+ return;
+ }
+
+ int64_t startTimeUs = ALooper::GetNowUs();
+
+ ssize_t nbytes = state->mAudioTrack->write(
+ buffer->base() + info->mOffset, copy);
+
+ CHECK_EQ(nbytes, (ssize_t)copy);
+
+ int64_t delayUs = ALooper::GetNowUs() - startTimeUs;
+
+ uint32_t numFramesWritten = nbytes / state->mAudioTrack->frameSize();
+
+ if (delayUs > 2000ll) {
+ ALOGW("AudioTrack::write took %lld us, numFramesAvailableToWrite=%u, "
+ "numFramesWritten=%u",
+ delayUs, numFramesAvailableToWrite, numFramesWritten);
+ }
+
+ info->mOffset += nbytes;
+ info->mSize -= nbytes;
+
+ state->mNumFramesWritten += numFramesWritten;
+}
+
+} // namespace android
diff --git a/cmds/stagefright/SimplePlayer.h b/cmds/stagefright/SimplePlayer.h
new file mode 100644
index 0000000..2548252
--- /dev/null
+++ b/cmds/stagefright/SimplePlayer.h
@@ -0,0 +1,109 @@
+/*
+ * Copyright (C) 2012 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <media/stagefright/foundation/AHandler.h>
+#include <media/stagefright/foundation/AString.h>
+#include <utils/KeyedVector.h>
+
+namespace android {
+
+struct ABuffer;
+struct ALooper;
+struct AudioTrack;
+struct ISurfaceTexture;
+struct MediaCodec;
+struct NativeWindowWrapper;
+struct NuMediaExtractor;
+
+struct SimplePlayer : public AHandler {
+ SimplePlayer();
+
+ status_t setDataSource(const char *path);
+ status_t setSurface(const sp<ISurfaceTexture> &surfaceTexture);
+ status_t prepare();
+ status_t start();
+ status_t stop();
+ status_t reset();
+
+protected:
+ virtual ~SimplePlayer();
+
+ virtual void onMessageReceived(const sp<AMessage> &msg);
+
+private:
+ enum State {
+ UNINITIALIZED,
+ UNPREPARED,
+ STOPPED,
+ STARTED
+ };
+
+ enum {
+ kWhatSetDataSource,
+ kWhatSetSurface,
+ kWhatPrepare,
+ kWhatStart,
+ kWhatStop,
+ kWhatReset,
+ kWhatDoMoreStuff,
+ };
+
+ struct BufferInfo {
+ size_t mIndex;
+ size_t mOffset;
+ size_t mSize;
+ int64_t mPresentationTimeUs;
+ uint32_t mFlags;
+ };
+
+ struct CodecState
+ {
+ sp<MediaCodec> mCodec;
+ Vector<sp<ABuffer> > mCSD;
+ Vector<sp<ABuffer> > mBuffers[2];
+
+ List<size_t> mAvailInputBufferIndices;
+ List<BufferInfo> mAvailOutputBufferInfos;
+
+ sp<AudioTrack> mAudioTrack;
+ uint32_t mNumFramesWritten;
+ };
+
+ State mState;
+ AString mPath;
+ sp<NativeWindowWrapper> mNativeWindow;
+
+ sp<NuMediaExtractor> mExtractor;
+ sp<ALooper> mCodecLooper;
+ KeyedVector<size_t, CodecState> mStateByTrackIndex;
+ int32_t mDoMoreStuffGeneration;
+
+ int64_t mStartTimeRealUs;
+
+ status_t onPrepare();
+ status_t onStart();
+ status_t onStop();
+ status_t onReset();
+ status_t onDoMoreStuff();
+ status_t onOutputFormatChanged(size_t trackIndex, CodecState *state);
+
+ void renderAudio(
+ CodecState *state, BufferInfo *info, const sp<ABuffer> &buffer);
+
+ DISALLOW_EVIL_CONSTRUCTORS(SimplePlayer);
+};
+
+} // namespace android
diff --git a/cmds/stagefright/codec.cpp b/cmds/stagefright/codec.cpp
new file mode 100644
index 0000000..fbf800c
--- /dev/null
+++ b/cmds/stagefright/codec.cpp
@@ -0,0 +1,408 @@
+/*
+ * Copyright (C) 2012 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "codec"
+#include <utils/Log.h>
+
+#include "SimplePlayer.h"
+
+#include <binder/ProcessState.h>
+
+#include <media/stagefright/foundation/ABuffer.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/ALooper.h>
+#include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/DataSource.h>
+#include <media/stagefright/MediaCodec.h>
+#include <media/stagefright/MediaDefs.h>
+#include <media/stagefright/NuMediaExtractor.h>
+#include <gui/SurfaceComposerClient.h>
+
+static void usage(const char *me) {
+ fprintf(stderr, "usage: %s [-a] use audio\n"
+ "\t\t[-v] use video\n"
+ "\t\t[-p] playback\n", me);
+
+ exit(1);
+}
+
+namespace android {
+
+struct CodecState {
+ sp<MediaCodec> mCodec;
+ Vector<sp<ABuffer> > mCSD;
+ size_t mCSDIndex;
+ Vector<sp<ABuffer> > mInBuffers;
+ Vector<sp<ABuffer> > mOutBuffers;
+ bool mSawOutputEOS;
+};
+
+} // namespace android
+
+static int decode(
+ const android::sp<android::ALooper> &looper,
+ const char *path,
+ bool useAudio,
+ bool useVideo) {
+ using namespace android;
+
+ sp<NuMediaExtractor> extractor = new NuMediaExtractor;
+ if (extractor->setDataSource(path) != OK) {
+ fprintf(stderr, "unable to instantiate extractor.\n");
+ return 1;
+ }
+
+ KeyedVector<size_t, CodecState> stateByTrack;
+
+ bool haveAudio = false;
+ bool haveVideo = false;
+ for (size_t i = 0; i < extractor->countTracks(); ++i) {
+ sp<AMessage> format;
+ status_t err = extractor->getTrackFormat(i, &format);
+ CHECK_EQ(err, (status_t)OK);
+
+ AString mime;
+ CHECK(format->findString("mime", &mime));
+
+ if (useAudio && !haveAudio
+ && !strncasecmp(mime.c_str(), "audio/", 6)) {
+ haveAudio = true;
+ } else if (useVideo && !haveVideo
+ && !strncasecmp(mime.c_str(), "video/", 6)) {
+ haveVideo = true;
+ } else {
+ continue;
+ }
+
+ ALOGV("selecting track %d", i);
+
+ err = extractor->selectTrack(i);
+ CHECK_EQ(err, (status_t)OK);
+
+ CodecState *state =
+ &stateByTrack.editValueAt(stateByTrack.add(i, CodecState()));
+
+ state->mCodec = MediaCodec::CreateByType(
+ looper, mime.c_str(), false /* encoder */);
+
+ CHECK(state->mCodec != NULL);
+
+ err = state->mCodec->configure(
+ format, NULL /* surfaceTexture */, 0 /* flags */);
+
+ CHECK_EQ(err, (status_t)OK);
+
+ size_t j = 0;
+ sp<ABuffer> buffer;
+ while (format->findBuffer(StringPrintf("csd-%d", j).c_str(), &buffer)) {
+ state->mCSD.push_back(buffer);
+
+ ++j;
+ }
+
+ state->mCSDIndex = 0;
+ state->mSawOutputEOS = false;
+
+ ALOGV("got %d pieces of codec specific data.", state->mCSD.size());
+ }
+
+ CHECK(!stateByTrack.isEmpty());
+
+ for (size_t i = 0; i < stateByTrack.size(); ++i) {
+ CodecState *state = &stateByTrack.editValueAt(i);
+
+ sp<MediaCodec> codec = state->mCodec;
+
+ CHECK_EQ((status_t)OK, codec->start());
+
+ CHECK_EQ((status_t)OK, codec->getInputBuffers(&state->mInBuffers));
+ CHECK_EQ((status_t)OK, codec->getOutputBuffers(&state->mOutBuffers));
+
+ ALOGV("got %d input and %d output buffers",
+ state->mInBuffers.size(), state->mOutBuffers.size());
+
+ while (state->mCSDIndex < state->mCSD.size()) {
+ size_t index;
+ status_t err = codec->dequeueInputBuffer(&index);
+
+ if (err == -EAGAIN) {
+ usleep(10000);
+ continue;
+ }
+
+ CHECK_EQ(err, (status_t)OK);
+
+ const sp<ABuffer> &srcBuffer =
+ state->mCSD.itemAt(state->mCSDIndex++);
+
+ const sp<ABuffer> &buffer = state->mInBuffers.itemAt(index);
+
+ memcpy(buffer->data(), srcBuffer->data(), srcBuffer->size());
+
+ err = codec->queueInputBuffer(
+ index,
+ 0 /* offset */,
+ srcBuffer->size(),
+ 0ll /* timeUs */,
+ MediaCodec::BUFFER_FLAG_CODECCONFIG);
+
+ CHECK_EQ(err, (status_t)OK);
+ }
+ }
+
+ bool sawInputEOS = false;
+
+ for (;;) {
+ if (!sawInputEOS) {
+ size_t trackIndex;
+ status_t err = extractor->getSampleTrackIndex(&trackIndex);
+
+ if (err != OK) {
+ ALOGV("signalling EOS.");
+
+ for (size_t i = 0; i < stateByTrack.size(); ++i) {
+ CodecState *state = &stateByTrack.editValueAt(i);
+
+ for (;;) {
+ size_t index;
+ err = state->mCodec->dequeueInputBuffer(&index);
+
+ if (err == -EAGAIN) {
+ continue;
+ }
+
+ CHECK_EQ(err, (status_t)OK);
+
+ err = state->mCodec->queueInputBuffer(
+ index,
+ 0 /* offset */,
+ 0 /* size */,
+ 0ll /* timeUs */,
+ MediaCodec::BUFFER_FLAG_EOS);
+
+ CHECK_EQ(err, (status_t)OK);
+ break;
+ }
+ }
+
+ sawInputEOS = true;
+ } else {
+ CodecState *state = &stateByTrack.editValueFor(trackIndex);
+
+ size_t index;
+ err = state->mCodec->dequeueInputBuffer(&index);
+
+ if (err == OK) {
+ ALOGV("filling input buffer %d", index);
+
+ const sp<ABuffer> &buffer = state->mInBuffers.itemAt(index);
+
+ err = extractor->readSampleData(buffer);
+ CHECK_EQ(err, (status_t)OK);
+
+ int64_t timeUs;
+ err = extractor->getSampleTime(&timeUs);
+ CHECK_EQ(err, (status_t)OK);
+
+ err = state->mCodec->queueInputBuffer(
+ index,
+ 0 /* offset */,
+ buffer->size(),
+ timeUs,
+ 0 /* flags */);
+
+ CHECK_EQ(err, (status_t)OK);
+
+ extractor->advance();
+ } else {
+ CHECK_EQ(err, -EAGAIN);
+ }
+ }
+ }
+
+ bool sawOutputEOSOnAllTracks = true;
+ for (size_t i = 0; i < stateByTrack.size(); ++i) {
+ CodecState *state = &stateByTrack.editValueAt(i);
+ if (!state->mSawOutputEOS) {
+ sawOutputEOSOnAllTracks = false;
+ break;
+ }
+ }
+
+ if (sawOutputEOSOnAllTracks) {
+ break;
+ }
+
+ for (size_t i = 0; i < stateByTrack.size(); ++i) {
+ CodecState *state = &stateByTrack.editValueAt(i);
+
+ if (state->mSawOutputEOS) {
+ continue;
+ }
+
+ size_t index;
+ size_t offset;
+ size_t size;
+ int64_t presentationTimeUs;
+ uint32_t flags;
+ status_t err = state->mCodec->dequeueOutputBuffer(
+ &index, &offset, &size, &presentationTimeUs, &flags,
+ 10000ll);
+
+ if (err == OK) {
+ ALOGV("draining output buffer %d, time = %lld us",
+ index, presentationTimeUs);
+
+ err = state->mCodec->releaseOutputBuffer(index);
+ CHECK_EQ(err, (status_t)OK);
+
+ if (flags & MediaCodec::BUFFER_FLAG_EOS) {
+ ALOGV("reached EOS on output.");
+
+ state->mSawOutputEOS = true;
+ }
+ } else if (err == INFO_OUTPUT_BUFFERS_CHANGED) {
+ ALOGV("INFO_OUTPUT_BUFFERS_CHANGED");
+ CHECK_EQ((status_t)OK,
+ state->mCodec->getOutputBuffers(&state->mOutBuffers));
+
+ ALOGV("got %d output buffers", state->mOutBuffers.size());
+ } else if (err == INFO_FORMAT_CHANGED) {
+ sp<AMessage> format;
+ CHECK_EQ((status_t)OK, state->mCodec->getOutputFormat(&format));
+
+ ALOGV("INFO_FORMAT_CHANGED: %s", format->debugString().c_str());
+ } else {
+ CHECK_EQ(err, -EAGAIN);
+ }
+ }
+ }
+
+ for (size_t i = 0; i < stateByTrack.size(); ++i) {
+ CodecState *state = &stateByTrack.editValueAt(i);
+
+ CHECK_EQ((status_t)OK, state->mCodec->release());
+ }
+
+ return 0;
+}
+
+int main(int argc, char **argv) {
+ using namespace android;
+
+ const char *me = argv[0];
+
+ bool useAudio = false;
+ bool useVideo = false;
+ bool playback = false;
+
+ int res;
+ while ((res = getopt(argc, argv, "havp")) >= 0) {
+ switch (res) {
+ case 'a':
+ {
+ useAudio = true;
+ break;
+ }
+
+ case 'v':
+ {
+ useVideo = true;
+ break;
+ }
+
+ case 'p':
+ {
+ playback = true;
+ break;
+ }
+
+ case '?':
+ case 'h':
+ default:
+ {
+ usage(me);
+ }
+ }
+ }
+
+ argc -= optind;
+ argv += optind;
+
+ if (argc != 1) {
+ usage(me);
+ }
+
+ if (!useAudio && !useVideo) {
+ useAudio = useVideo = true;
+ }
+
+ ProcessState::self()->startThreadPool();
+
+ DataSource::RegisterDefaultSniffers();
+
+ sp<ALooper> looper = new ALooper;
+ looper->start();
+
+ if (playback) {
+ sp<SurfaceComposerClient> composerClient = new SurfaceComposerClient;
+ CHECK_EQ(composerClient->initCheck(), (status_t)OK);
+
+ ssize_t displayWidth = composerClient->getDisplayWidth(0);
+ ssize_t displayHeight = composerClient->getDisplayHeight(0);
+
+ ALOGV("display is %ld x %ld\n", displayWidth, displayHeight);
+
+ sp<SurfaceControl> control =
+ composerClient->createSurface(
+ String8("A Surface"),
+ 0,
+ displayWidth,
+ displayHeight,
+ PIXEL_FORMAT_RGB_565,
+ 0);
+
+ CHECK(control != NULL);
+ CHECK(control->isValid());
+
+ SurfaceComposerClient::openGlobalTransaction();
+ CHECK_EQ(control->setLayer(INT_MAX), (status_t)OK);
+ CHECK_EQ(control->show(), (status_t)OK);
+ SurfaceComposerClient::closeGlobalTransaction();
+
+ sp<Surface> surface = control->getSurface();
+ CHECK(surface != NULL);
+
+ sp<SimplePlayer> player = new SimplePlayer;
+ looper->registerHandler(player);
+
+ player->setDataSource(argv[0]);
+ player->setSurface(surface->getSurfaceTexture());
+ player->start();
+ sleep(60);
+ player->stop();
+ player->reset();
+
+ composerClient->dispose();
+ } else {
+ decode(looper, argv[0], useAudio, useVideo);
+ }
+
+ looper->stop();
+
+ return 0;
+}
diff --git a/cmds/stagefright/sf2.cpp b/cmds/stagefright/sf2.cpp
index ae80f88..1d28793 100644
--- a/cmds/stagefright/sf2.cpp
+++ b/cmds/stagefright/sf2.cpp
@@ -32,8 +32,7 @@
#include <media/stagefright/NativeWindowWrapper.h>
#include <media/stagefright/Utils.h>
-#include <surfaceflinger/ISurfaceComposer.h>
-#include <surfaceflinger/SurfaceComposerClient.h>
+#include <gui/SurfaceComposerClient.h>
#include "include/ESDS.h"
@@ -198,9 +197,7 @@
(new AMessage(kWhatSeek, id()))->post(5000000ll);
} else if (what == ACodec::kWhatOutputFormatChanged) {
- } else {
- CHECK_EQ(what, (int32_t)ACodec::kWhatShutdownCompleted);
-
+ } else if (what == ACodec::kWhatShutdownCompleted) {
mDecodeLooper->unregisterHandler(mCodec->id());
if (mDecodeLooper != looper()) {
@@ -360,7 +357,7 @@
buffer->meta()->setInt32("csd", true);
mCSD.push(buffer);
- msg->setObject("csd", buffer);
+ msg->setBuffer("csd", buffer);
} else if (meta->findData(kKeyESDS, &type, &data, &size)) {
ESDS esds((const char *)data, size);
CHECK_EQ(esds.InitCheck(), (status_t)OK);
@@ -410,9 +407,8 @@
return;
}
- sp<RefBase> obj;
- CHECK(msg->findObject("buffer", &obj));
- sp<ABuffer> outBuffer = static_cast<ABuffer *>(obj.get());
+ sp<ABuffer> outBuffer;
+ CHECK(msg->findBuffer("buffer", &outBuffer));
if (mCSDIndex < mCSD.size()) {
outBuffer = mCSD.editItemAt(mCSDIndex++);
@@ -511,15 +507,14 @@
}
}
- reply->setObject("buffer", outBuffer);
+ reply->setBuffer("buffer", outBuffer);
reply->post();
}
void onDrainThisBuffer(const sp<AMessage> &msg) {
- sp<RefBase> obj;
- CHECK(msg->findObject("buffer", &obj));
+ sp<ABuffer> buffer;
+ CHECK(msg->findBuffer("buffer", &buffer));
- sp<ABuffer> buffer = static_cast<ABuffer *>(obj.get());
mTotalBytesReceived += buffer->size();
sp<AMessage> reply;
diff --git a/cmds/stagefright/stagefright.cpp b/cmds/stagefright/stagefright.cpp
index 7cb8f62..dab2e0f 100644
--- a/cmds/stagefright/stagefright.cpp
+++ b/cmds/stagefright/stagefright.cpp
@@ -55,9 +55,7 @@
#include <fcntl.h>
#include <gui/SurfaceTextureClient.h>
-
-#include <surfaceflinger/ISurfaceComposer.h>
-#include <surfaceflinger/SurfaceComposerClient.h>
+#include <gui/SurfaceComposerClient.h>
using namespace android;
diff --git a/cmds/stagefright/stream.cpp b/cmds/stagefright/stream.cpp
index 0d6c738..efa1445 100644
--- a/cmds/stagefright/stream.cpp
+++ b/cmds/stagefright/stream.cpp
@@ -32,8 +32,7 @@
#include <binder/IServiceManager.h>
#include <media/IMediaPlayerService.h>
-#include <surfaceflinger/ISurfaceComposer.h>
-#include <surfaceflinger/SurfaceComposerClient.h>
+#include <gui/SurfaceComposerClient.h>
#include <fcntl.h>
diff --git a/drm/drmserver/DrmManagerService.cpp b/drm/drmserver/DrmManagerService.cpp
index caeb026..8ba0203 100644
--- a/drm/drmserver/DrmManagerService.cpp
+++ b/drm/drmserver/DrmManagerService.cpp
@@ -159,12 +159,18 @@
status_t DrmManagerService::consumeRights(
int uniqueId, DecryptHandle* decryptHandle, int action, bool reserve) {
ALOGV("Entering consumeRights");
+ if (!isProtectedCallAllowed()) {
+ return DRM_ERROR_NO_PERMISSION;
+ }
return mDrmManager->consumeRights(uniqueId, decryptHandle, action, reserve);
}
status_t DrmManagerService::setPlaybackStatus(
int uniqueId, DecryptHandle* decryptHandle, int playbackStatus, int64_t position) {
ALOGV("Entering setPlaybackStatus");
+ if (!isProtectedCallAllowed()) {
+ return DRM_ERROR_NO_PERMISSION;
+ }
return mDrmManager->setPlaybackStatus(uniqueId, decryptHandle, playbackStatus, position);
}
@@ -229,12 +235,18 @@
status_t DrmManagerService::closeDecryptSession(int uniqueId, DecryptHandle* decryptHandle) {
ALOGV("Entering closeDecryptSession");
+ if (!isProtectedCallAllowed()) {
+ return DRM_ERROR_NO_PERMISSION;
+ }
return mDrmManager->closeDecryptSession(uniqueId, decryptHandle);
}
status_t DrmManagerService::initializeDecryptUnit(int uniqueId, DecryptHandle* decryptHandle,
int decryptUnitId, const DrmBuffer* headerInfo) {
ALOGV("Entering initializeDecryptUnit");
+ if (!isProtectedCallAllowed()) {
+ return DRM_ERROR_NO_PERMISSION;
+ }
return mDrmManager->initializeDecryptUnit(uniqueId,decryptHandle, decryptUnitId, headerInfo);
}
@@ -242,18 +254,27 @@
int uniqueId, DecryptHandle* decryptHandle, int decryptUnitId,
const DrmBuffer* encBuffer, DrmBuffer** decBuffer, DrmBuffer* IV) {
ALOGV("Entering decrypt");
+ if (!isProtectedCallAllowed()) {
+ return DRM_ERROR_NO_PERMISSION;
+ }
return mDrmManager->decrypt(uniqueId, decryptHandle, decryptUnitId, encBuffer, decBuffer, IV);
}
status_t DrmManagerService::finalizeDecryptUnit(
int uniqueId, DecryptHandle* decryptHandle, int decryptUnitId) {
ALOGV("Entering finalizeDecryptUnit");
+ if (!isProtectedCallAllowed()) {
+ return DRM_ERROR_NO_PERMISSION;
+ }
return mDrmManager->finalizeDecryptUnit(uniqueId, decryptHandle, decryptUnitId);
}
ssize_t DrmManagerService::pread(int uniqueId, DecryptHandle* decryptHandle,
void* buffer, ssize_t numBytes, off64_t offset) {
ALOGV("Entering pread");
+ if (!isProtectedCallAllowed()) {
+ return DRM_ERROR_NO_PERMISSION;
+ }
return mDrmManager->pread(uniqueId, decryptHandle, buffer, numBytes, offset);
}
diff --git a/drm/drmserver/main_drmserver.cpp b/drm/drmserver/main_drmserver.cpp
index e61b269..434d561 100644
--- a/drm/drmserver/main_drmserver.cpp
+++ b/drm/drmserver/main_drmserver.cpp
@@ -17,15 +17,10 @@
#define LOG_TAG "drmserver"
//#define LOG_NDEBUG 0
-#include <sys/types.h>
-#include <unistd.h>
-#include <grp.h>
-
#include <binder/IPCThreadState.h>
#include <binder/ProcessState.h>
#include <binder/IServiceManager.h>
#include <utils/Log.h>
-#include <private/android_filesystem_config.h>
#include <DrmManagerService.h>
diff --git a/include/camera/ICamera.h b/include/camera/ICamera.h
index 400d7f4..3d18837 100644
--- a/include/camera/ICamera.h
+++ b/include/camera/ICamera.h
@@ -20,15 +20,15 @@
#include <utils/RefBase.h>
#include <binder/IInterface.h>
#include <binder/Parcel.h>
-#include <surfaceflinger/Surface.h>
#include <binder/IMemory.h>
#include <utils/String8.h>
#include <camera/Camera.h>
-#include <gui/ISurfaceTexture.h>
namespace android {
class ICameraClient;
+class ISurfaceTexture;
+class Surface;
class ICamera: public IInterface
{
diff --git a/include/drm/drm_framework_common.h b/include/drm/drm_framework_common.h
index 2632cbd..637409c 100644
--- a/include/drm/drm_framework_common.h
+++ b/include/drm/drm_framework_common.h
@@ -43,6 +43,7 @@
DRM_ERROR_DECRYPT = ERROR_BASE - 5,
DRM_ERROR_CANNOT_HANDLE = ERROR_BASE - 6,
DRM_ERROR_TAMPER_DETECTED = ERROR_BASE - 7,
+ DRM_ERROR_NO_PERMISSION = ERROR_BASE - 8,
DRM_NO_ERROR = NO_ERROR
};
diff --git a/include/media/AudioRecord.h b/include/media/AudioRecord.h
index 5e61ffd..4fbeb38 100644
--- a/include/media/AudioRecord.h
+++ b/include/media/AudioRecord.h
@@ -225,7 +225,7 @@
/* get sample rate for this record track
*/
- uint32_t getSampleRate();
+ uint32_t getSampleRate() const;
/* Sets marker position. When record reaches the number of frames specified,
* a callback with event type EVENT_MARKER is called. Calling setMarkerPosition
@@ -242,7 +242,7 @@
* - INVALID_OPERATION: the AudioRecord has no callback installed.
*/
status_t setMarkerPosition(uint32_t marker);
- status_t getMarkerPosition(uint32_t *marker);
+ status_t getMarkerPosition(uint32_t *marker) const;
/* Sets position update period. Every time the number of frames specified has been recorded,
@@ -261,7 +261,7 @@
* - INVALID_OPERATION: the AudioRecord has no callback installed.
*/
status_t setPositionUpdatePeriod(uint32_t updatePeriod);
- status_t getPositionUpdatePeriod(uint32_t *updatePeriod);
+ status_t getPositionUpdatePeriod(uint32_t *updatePeriod) const;
/* Gets record head position. The position is the total number of frames
@@ -275,7 +275,7 @@
* - NO_ERROR: successful operation
* - BAD_VALUE: position is NULL
*/
- status_t getPosition(uint32_t *position);
+ status_t getPosition(uint32_t *position) const;
/* returns a handle on the audio input used by this AudioRecord.
*
@@ -285,7 +285,7 @@
* Returned value:
* handle on audio hardware input
*/
- audio_io_handle_t getInput();
+ audio_io_handle_t getInput() const;
/* returns the audio session ID associated to this AudioRecord.
*
@@ -295,7 +295,7 @@
* Returned value:
* AudioRecord session ID.
*/
- int getSessionId();
+ int getSessionId() const;
/* obtains a buffer of "frameCount" frames. The buffer must be
* filled entirely. If the track is stopped, obtainBuffer() returns
@@ -327,7 +327,7 @@
* user space process is blocked longer than the capacity of audio driver buffers.
* Unit: the number of input audio frames
*/
- unsigned int getInputFramesLost();
+ unsigned int getInputFramesLost() const;
private:
/* copying audio tracks is not allowed */
@@ -361,7 +361,7 @@
sp<IMemory> mCblkMemory;
sp<ClientRecordThread> mClientRecordThread;
status_t mReadyToRun;
- Mutex mLock;
+ mutable Mutex mLock;
Condition mCondition;
uint32_t mFrameCount;
diff --git a/include/media/AudioSystem.h b/include/media/AudioSystem.h
index 1916ac5..89bc95d 100644
--- a/include/media/AudioSystem.h
+++ b/include/media/AudioSystem.h
@@ -157,7 +157,7 @@
uint32_t samplingRate = 0,
audio_format_t format = AUDIO_FORMAT_DEFAULT,
uint32_t channels = AUDIO_CHANNEL_OUT_STEREO,
- audio_policy_output_flags_t flags = AUDIO_POLICY_OUTPUT_FLAG_INDIRECT);
+ audio_policy_output_flags_t flags = AUDIO_POLICY_OUTPUT_FLAG_NONE);
static status_t startOutput(audio_io_handle_t output,
audio_stream_type_t stream,
int session = 0);
diff --git a/include/media/IAudioPolicyService.h b/include/media/IAudioPolicyService.h
index 4d88297..bdd7747 100644
--- a/include/media/IAudioPolicyService.h
+++ b/include/media/IAudioPolicyService.h
@@ -52,7 +52,7 @@
uint32_t samplingRate = 0,
audio_format_t format = AUDIO_FORMAT_DEFAULT,
uint32_t channels = 0,
- audio_policy_output_flags_t flags = AUDIO_POLICY_OUTPUT_FLAG_INDIRECT) = 0;
+ audio_policy_output_flags_t flags = AUDIO_POLICY_OUTPUT_FLAG_NONE) = 0;
virtual status_t startOutput(audio_io_handle_t output,
audio_stream_type_t stream,
int session = 0) = 0;
diff --git a/include/media/IMediaPlayer.h b/include/media/IMediaPlayer.h
index 6425886..39d58ab 100644
--- a/include/media/IMediaPlayer.h
+++ b/include/media/IMediaPlayer.h
@@ -23,6 +23,10 @@
#include <utils/KeyedVector.h>
#include <system/audio.h>
+// Fwd decl to make sure everyone agrees that the scope of struct sockaddr_in is
+// global, and not in android::
+struct sockaddr_in;
+
namespace android {
class Parcel;
@@ -59,6 +63,7 @@
virtual status_t attachAuxEffect(int effectId) = 0;
virtual status_t setParameter(int key, const Parcel& request) = 0;
virtual status_t getParameter(int key, Parcel* reply) = 0;
+ virtual status_t setRetransmitEndpoint(const struct sockaddr_in* endpoint) = 0;
// Invoke a generic method on the player by using opaque parcels
// for the request and reply.
diff --git a/include/media/MediaPlayerInterface.h b/include/media/MediaPlayerInterface.h
index 23a3e49..f7491d8 100644
--- a/include/media/MediaPlayerInterface.h
+++ b/include/media/MediaPlayerInterface.h
@@ -29,6 +29,10 @@
#include <media/AudioSystem.h>
#include <media/Metadata.h>
+// Fwd decl to make sure everyone agrees that the scope of struct sockaddr_in is
+// global, and not in android::
+struct sockaddr_in;
+
namespace android {
class Parcel;
@@ -141,6 +145,14 @@
virtual status_t setParameter(int key, const Parcel &request) = 0;
virtual status_t getParameter(int key, Parcel *reply) = 0;
+ // Right now, only the AAX TX player supports this functionality. For now,
+ // provide a default implementation which indicates a lack of support for
+ // this functionality to make life easier for all of the other media player
+ // maintainers out there.
+ virtual status_t setRetransmitEndpoint(const struct sockaddr_in* endpoint) {
+ return INVALID_OPERATION;
+ }
+
// Invoke a generic method on the player by using opaque parcels
// for the request and reply.
//
diff --git a/include/media/mediaplayer.h b/include/media/mediaplayer.h
index d0b87c8..9cd5f9f 100644
--- a/include/media/mediaplayer.h
+++ b/include/media/mediaplayer.h
@@ -17,6 +17,8 @@
#ifndef ANDROID_MEDIAPLAYER_H
#define ANDROID_MEDIAPLAYER_H
+#include <arpa/inet.h>
+
#include <binder/IMemory.h>
#include <media/IMediaPlayerClient.h>
#include <media/IMediaPlayer.h>
@@ -204,6 +206,7 @@
status_t attachAuxEffect(int effectId);
status_t setParameter(int key, const Parcel& request);
status_t getParameter(int key, Parcel* reply);
+ status_t setRetransmitEndpoint(const char* addrString, uint16_t port);
private:
void clear_l();
@@ -212,6 +215,7 @@
status_t getDuration_l(int *msec);
status_t attachNewPlayer(const sp<IMediaPlayer>& player);
status_t reset_l();
+ status_t doSetRetransmitEndpoint(const sp<IMediaPlayer>& player);
sp<IMediaPlayer> mPlayer;
thread_id_t mLockThreadId;
@@ -234,6 +238,8 @@
int mVideoHeight;
int mAudioSessionId;
float mSendLevel;
+ struct sockaddr_in mRetransmitEndpoint;
+ bool mRetransmitEndpointValid;
};
}; // namespace android
diff --git a/include/media/stagefright/ACodec.h b/include/media/stagefright/ACodec.h
index 3963d9c..fa1a416 100644
--- a/include/media/stagefright/ACodec.h
+++ b/include/media/stagefright/ACodec.h
@@ -22,6 +22,7 @@
#include <android/native_window.h>
#include <media/IOMX.h>
#include <media/stagefright/foundation/AHierarchicalStateMachine.h>
+#include <OMX_Audio.h>
namespace android {
@@ -37,6 +38,9 @@
kWhatFlushCompleted = 'fcom',
kWhatOutputFormatChanged = 'outC',
kWhatError = 'erro',
+ kWhatComponentAllocated = 'cAll',
+ kWhatComponentConfigured = 'cCon',
+ kWhatBuffersAllocated = 'allc',
};
ACodec();
@@ -45,7 +49,11 @@
void initiateSetup(const sp<AMessage> &msg);
void signalFlush();
void signalResume();
- void initiateShutdown();
+ void initiateShutdown(bool keepComponentAllocated = false);
+
+ void initiateAllocateComponent(const sp<AMessage> &msg);
+ void initiateConfigureComponent(const sp<AMessage> &msg);
+ void initiateStart();
protected:
virtual ~ACodec();
@@ -53,6 +61,7 @@
private:
struct BaseState;
struct UninitializedState;
+ struct LoadedState;
struct LoadedToIdleState;
struct IdleToExecutingState;
struct ExecutingState;
@@ -70,6 +79,9 @@
kWhatFlush = 'flus',
kWhatResume = 'resm',
kWhatDrainDeferredMessages = 'drai',
+ kWhatAllocateComponent = 'allo',
+ kWhatConfigureComponent = 'conf',
+ kWhatStart = 'star',
};
enum {
@@ -96,6 +108,7 @@
sp<AMessage> mNotify;
sp<UninitializedState> mUninitializedState;
+ sp<LoadedState> mLoadedState;
sp<LoadedToIdleState> mLoadedToIdleState;
sp<IdleToExecutingState> mIdleToExecutingState;
sp<ExecutingState> mExecutingState;
@@ -105,6 +118,7 @@
sp<FlushingState> mFlushingState;
AString mComponentName;
+ uint32_t mQuirks;
sp<IOMX> mOMX;
IOMX::node_id mNode;
sp<MemoryDealer> mDealer[2];
@@ -118,6 +132,13 @@
List<sp<AMessage> > mDeferredQueue;
bool mSentFormat;
+ bool mIsEncoder;
+
+ bool mShutdownInProgress;
+
+ // If "mKeepComponentAllocated" we only transition back to Loaded state
+ // and do not release the component instance.
+ bool mKeepComponentAllocated;
status_t allocateBuffersOnPort(OMX_U32 portIndex);
status_t freeBuffersOnPort(OMX_U32 portIndex);
@@ -132,8 +153,8 @@
uint32_t portIndex, IOMX::buffer_id bufferID,
ssize_t *index = NULL);
- void setComponentRole(bool isEncoder, const char *mime);
- void configureCodec(const char *mime, const sp<AMessage> &msg);
+ status_t setComponentRole(bool isEncoder, const char *mime);
+ status_t configureCodec(const char *mime, const sp<AMessage> &msg);
status_t setVideoPortFormatType(
OMX_U32 portIndex,
@@ -145,20 +166,37 @@
status_t setupVideoDecoder(
const char *mime, int32_t width, int32_t height);
+ status_t setupVideoEncoder(
+ const char *mime, const sp<AMessage> &msg);
+
status_t setVideoFormatOnPort(
OMX_U32 portIndex,
int32_t width, int32_t height,
OMX_VIDEO_CODINGTYPE compressionFormat);
- status_t setupAACDecoder(int32_t numChannels, int32_t sampleRate);
- status_t setupAMRDecoder(bool isWAMR);
- status_t setupG711Decoder(int32_t numChannels);
+ status_t setupAACCodec(
+ bool encoder,
+ int32_t numChannels, int32_t sampleRate, int32_t bitRate);
+
+ status_t selectAudioPortFormat(
+ OMX_U32 portIndex, OMX_AUDIO_CODINGTYPE desiredFormat);
+
+ status_t setupAMRCodec(bool encoder, bool isWAMR, int32_t bitRate);
+ status_t setupG711Codec(bool encoder, int32_t numChannels);
status_t setupRawAudioFormat(
OMX_U32 portIndex, int32_t sampleRate, int32_t numChannels);
status_t setMinBufferSize(OMX_U32 portIndex, size_t size);
+ status_t setupMPEG4EncoderParameters(const sp<AMessage> &msg);
+ status_t setupH263EncoderParameters(const sp<AMessage> &msg);
+ status_t setupAVCEncoderParameters(const sp<AMessage> &msg);
+
+ status_t verifySupportForProfileAndLevel(int32_t profile, int32_t level);
+ status_t configureBitrate(int32_t bitrate);
+ status_t setupErrorCorrectionParameters();
+
status_t initNativeWindow();
// Returns true iff all buffers on the given port have status OWNED_BY_US.
@@ -173,7 +211,9 @@
void sendFormatChange();
- void signalError(OMX_ERRORTYPE error = OMX_ErrorUndefined);
+ void signalError(
+ OMX_ERRORTYPE error = OMX_ErrorUndefined,
+ status_t internalError = UNKNOWN_ERROR);
DISALLOW_EVIL_CONSTRUCTORS(ACodec);
};
diff --git a/include/media/stagefright/HardwareAPI.h b/include/media/stagefright/HardwareAPI.h
index 32eed3f..17efd35 100644
--- a/include/media/stagefright/HardwareAPI.h
+++ b/include/media/stagefright/HardwareAPI.h
@@ -19,7 +19,7 @@
#define HARDWARE_API_H_
#include <media/stagefright/OMXPluginBase.h>
-#include <ui/android_native_buffer.h>
+#include <system/window.h>
#include <utils/RefBase.h>
#include <OMX_Component.h>
diff --git a/include/media/stagefright/MediaCodec.h b/include/media/stagefright/MediaCodec.h
new file mode 100644
index 0000000..72ac56a
--- /dev/null
+++ b/include/media/stagefright/MediaCodec.h
@@ -0,0 +1,192 @@
+/*
+ * Copyright 2012, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef MEDIA_CODEC_H_
+
+#define MEDIA_CODEC_H_
+
+#include <gui/ISurfaceTexture.h>
+#include <media/stagefright/foundation/AHandler.h>
+#include <utils/Vector.h>
+
+namespace android {
+
+struct ABuffer;
+struct ACodec;
+struct AMessage;
+struct SoftwareRenderer;
+struct SurfaceTextureClient;
+
+struct MediaCodec : public AHandler {
+ enum ConfigureFlags {
+ CONFIGURE_FLAG_ENCODE = 1,
+ };
+
+ enum BufferFlags {
+ BUFFER_FLAG_SYNCFRAME = 1,
+ BUFFER_FLAG_CODECCONFIG = 2,
+ BUFFER_FLAG_EOS = 4,
+ };
+
+ static sp<MediaCodec> CreateByType(
+ const sp<ALooper> &looper, const char *mime, bool encoder);
+
+ static sp<MediaCodec> CreateByComponentName(
+ const sp<ALooper> &looper, const char *name);
+
+ status_t configure(
+ const sp<AMessage> &format,
+ const sp<SurfaceTextureClient> &nativeWindow,
+ uint32_t flags);
+
+ status_t start();
+
+ // Returns to a state in which the component remains allocated but
+ // unconfigured.
+ status_t stop();
+
+ // Client MUST call release before releasing final reference to this
+ // object.
+ status_t release();
+
+ status_t flush();
+
+ status_t queueInputBuffer(
+ size_t index,
+ size_t offset,
+ size_t size,
+ int64_t presentationTimeUs,
+ uint32_t flags);
+
+ status_t dequeueInputBuffer(size_t *index, int64_t timeoutUs = 0ll);
+
+ status_t dequeueOutputBuffer(
+ size_t *index,
+ size_t *offset,
+ size_t *size,
+ int64_t *presentationTimeUs,
+ uint32_t *flags,
+ int64_t timeoutUs = 0ll);
+
+ status_t renderOutputBufferAndRelease(size_t index);
+ status_t releaseOutputBuffer(size_t index);
+
+ status_t getOutputFormat(sp<AMessage> *format) const;
+
+ status_t getInputBuffers(Vector<sp<ABuffer> > *buffers) const;
+ status_t getOutputBuffers(Vector<sp<ABuffer> > *buffers) const;
+
+protected:
+ virtual ~MediaCodec();
+ virtual void onMessageReceived(const sp<AMessage> &msg);
+
+private:
+ enum State {
+ UNINITIALIZED,
+ INITIALIZING,
+ INITIALIZED,
+ CONFIGURING,
+ CONFIGURED,
+ STARTING,
+ STARTED,
+ FLUSHING,
+ STOPPING,
+ RELEASING,
+ };
+
+ enum {
+ kPortIndexInput = 0,
+ kPortIndexOutput = 1,
+ };
+
+ enum {
+ kWhatInit = 'init',
+ kWhatConfigure = 'conf',
+ kWhatStart = 'strt',
+ kWhatStop = 'stop',
+ kWhatRelease = 'rele',
+ kWhatDequeueInputBuffer = 'deqI',
+ kWhatQueueInputBuffer = 'queI',
+ kWhatDequeueOutputBuffer = 'deqO',
+ kWhatReleaseOutputBuffer = 'relO',
+ kWhatGetBuffers = 'getB',
+ kWhatFlush = 'flus',
+ kWhatGetOutputFormat = 'getO',
+ kWhatDequeueInputTimedOut = 'dITO',
+ kWhatDequeueOutputTimedOut = 'dOTO',
+ kWhatCodecNotify = 'codc',
+ };
+
+ enum {
+ kFlagIsSoftwareCodec = 1,
+ kFlagOutputFormatChanged = 2,
+ kFlagOutputBuffersChanged = 4,
+ kFlagStickyError = 8,
+ kFlagDequeueInputPending = 16,
+ kFlagDequeueOutputPending = 32,
+ };
+
+ struct BufferInfo {
+ void *mBufferID;
+ sp<ABuffer> mData;
+ sp<AMessage> mNotify;
+ bool mOwnedByClient;
+ };
+
+ State mState;
+ sp<ALooper> mLooper;
+ sp<ALooper> mCodecLooper;
+ sp<ACodec> mCodec;
+ uint32_t mReplyID;
+ uint32_t mFlags;
+ sp<SurfaceTextureClient> mNativeWindow;
+ SoftwareRenderer *mSoftRenderer;
+ sp<AMessage> mOutputFormat;
+
+ List<size_t> mAvailPortBuffers[2];
+ Vector<BufferInfo> mPortBuffers[2];
+
+ int32_t mDequeueInputTimeoutGeneration;
+ uint32_t mDequeueInputReplyID;
+
+ int32_t mDequeueOutputTimeoutGeneration;
+ uint32_t mDequeueOutputReplyID;
+
+ MediaCodec(const sp<ALooper> &looper);
+
+ static status_t PostAndAwaitResponse(
+ const sp<AMessage> &msg, sp<AMessage> *response);
+
+ status_t init(const char *name, bool nameIsType, bool encoder);
+
+ void setState(State newState);
+ void returnBuffersToCodec();
+ void returnBuffersToCodecOnPort(int32_t portIndex);
+ size_t updateBuffers(int32_t portIndex, const sp<AMessage> &msg);
+ status_t onQueueInputBuffer(const sp<AMessage> &msg);
+ status_t onReleaseOutputBuffer(const sp<AMessage> &msg);
+ ssize_t dequeuePortBuffer(int32_t portIndex);
+
+ bool handleDequeueInputBuffer(uint32_t replyID, bool newRequest = false);
+ bool handleDequeueOutputBuffer(uint32_t replyID, bool newRequest = false);
+ void cancelPendingDequeueOperations();
+
+ DISALLOW_EVIL_CONSTRUCTORS(MediaCodec);
+};
+
+} // namespace android
+
+#endif // MEDIA_CODEC_H_
diff --git a/include/media/stagefright/MediaCodecList.h b/include/media/stagefright/MediaCodecList.h
new file mode 100644
index 0000000..14dc1b8
--- /dev/null
+++ b/include/media/stagefright/MediaCodecList.h
@@ -0,0 +1,95 @@
+/*
+ * Copyright 2012, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef MEDIA_CODEC_LIST_H_
+
+#define MEDIA_CODEC_LIST_H_
+
+#include <media/stagefright/foundation/ABase.h>
+#include <media/stagefright/foundation/AString.h>
+
+#include <sys/types.h>
+#include <utils/Errors.h>
+#include <utils/KeyedVector.h>
+#include <utils/Vector.h>
+
+namespace android {
+
+struct MediaCodecList {
+ static const MediaCodecList *getInstance();
+
+ ssize_t findCodecByType(
+ const char *type, bool encoder, size_t startIndex = 0) const;
+
+ ssize_t findCodecByName(const char *name) const;
+
+ const char *getCodecName(size_t index) const;
+ bool codecHasQuirk(size_t index, const char *quirkName) const;
+
+private:
+ enum Section {
+ SECTION_TOPLEVEL,
+ SECTION_DECODERS,
+ SECTION_DECODER,
+ SECTION_ENCODERS,
+ SECTION_ENCODER,
+ };
+
+ struct CodecInfo {
+ AString mName;
+ bool mIsEncoder;
+ uint32_t mTypes;
+ uint32_t mQuirks;
+ };
+
+ static MediaCodecList *sCodecList;
+
+ status_t mInitCheck;
+ Section mCurrentSection;
+ int32_t mDepth;
+
+ Vector<CodecInfo> mCodecInfos;
+ KeyedVector<AString, size_t> mCodecQuirks;
+ KeyedVector<AString, size_t> mTypes;
+
+ MediaCodecList();
+ ~MediaCodecList();
+
+ status_t initCheck() const;
+ void parseXMLFile(FILE *file);
+
+ static void StartElementHandlerWrapper(
+ void *me, const char *name, const char **attrs);
+
+ static void EndElementHandlerWrapper(void *me, const char *name);
+
+ void startElementHandler(const char *name, const char **attrs);
+ void endElementHandler(const char *name);
+
+ status_t addMediaCodecFromAttributes(bool encoder, const char **attrs);
+ void addMediaCodec(bool encoder, const char *name, const char *type = NULL);
+
+ status_t addQuirk(const char **attrs);
+ status_t addTypeFromAttributes(const char **attrs);
+ void addType(const char *name);
+
+ DISALLOW_EVIL_CONSTRUCTORS(MediaCodecList);
+};
+
+} // namespace android
+
+#endif // MEDIA_CODEC_LIST_H_
+
diff --git a/include/media/stagefright/MediaErrors.h b/include/media/stagefright/MediaErrors.h
index 21d00b8..dd3bf28 100644
--- a/include/media/stagefright/MediaErrors.h
+++ b/include/media/stagefright/MediaErrors.h
@@ -40,6 +40,7 @@
// Not technically an error.
INFO_FORMAT_CHANGED = MEDIA_ERROR_BASE - 12,
INFO_DISCONTINUITY = MEDIA_ERROR_BASE - 13,
+ INFO_OUTPUT_BUFFERS_CHANGED = MEDIA_ERROR_BASE - 14,
// The following constant values should be in sync with
// drm/drm_framework_common.h
diff --git a/include/media/stagefright/NativeWindowWrapper.h b/include/media/stagefright/NativeWindowWrapper.h
index f323cbc..97cc0ce 100644
--- a/include/media/stagefright/NativeWindowWrapper.h
+++ b/include/media/stagefright/NativeWindowWrapper.h
@@ -18,40 +18,28 @@
#define NATIVE_WINDOW_WRAPPER_H_
-#include <surfaceflinger/Surface.h>
#include <gui/SurfaceTextureClient.h>
namespace android {
-// Both Surface and SurfaceTextureClient are RefBase that implement the
-// ANativeWindow interface, but at different addresses. ANativeWindow is not
-// a RefBase but acts like one for use with sp<>. This wrapper converts a
-// Surface or SurfaceTextureClient into a single reference-counted object
-// that holds an sp reference to the underlying Surface or SurfaceTextureClient,
-// It provides a method to get the ANativeWindow.
+// SurfaceTextureClient derives from ANativeWindow which derives from multiple
+// base classes, in order to carry it in AMessages, we'll temporarily wrap it
+// into a NativeWindowWrapper.
struct NativeWindowWrapper : RefBase {
NativeWindowWrapper(
- const sp<Surface> &surface) :
- mSurface(surface) { }
-
- NativeWindowWrapper(
const sp<SurfaceTextureClient> &surfaceTextureClient) :
mSurfaceTextureClient(surfaceTextureClient) { }
sp<ANativeWindow> getNativeWindow() const {
- if (mSurface != NULL) {
- return mSurface;
- } else {
- return mSurfaceTextureClient;
- }
+ return mSurfaceTextureClient;
}
- // If needed later we can provide a method to ask what kind of native window
+ sp<SurfaceTextureClient> getSurfaceTextureClient() const {
+ return mSurfaceTextureClient;
+ }
private:
- // At most one of mSurface and mSurfaceTextureClient will be non-NULL
- const sp<Surface> mSurface;
const sp<SurfaceTextureClient> mSurfaceTextureClient;
DISALLOW_EVIL_CONSTRUCTORS(NativeWindowWrapper);
diff --git a/include/media/stagefright/NuMediaExtractor.h b/include/media/stagefright/NuMediaExtractor.h
new file mode 100644
index 0000000..96efdff
--- /dev/null
+++ b/include/media/stagefright/NuMediaExtractor.h
@@ -0,0 +1,80 @@
+/*
+ * Copyright 2012, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef NU_MEDIA_EXTRACTOR_H_
+#define NU_MEDIA_EXTRACTOR_H_
+
+#include <media/stagefright/foundation/ABase.h>
+#include <utils/Errors.h>
+#include <utils/RefBase.h>
+#include <utils/Vector.h>
+
+namespace android {
+
+struct ABuffer;
+struct AMessage;
+struct MediaBuffer;
+struct MediaExtractor;
+struct MediaSource;
+
+struct NuMediaExtractor : public RefBase {
+ NuMediaExtractor();
+
+ status_t setDataSource(const char *path);
+
+ size_t countTracks() const;
+ status_t getTrackFormat(size_t index, sp<AMessage> *format) const;
+
+ status_t selectTrack(size_t index);
+
+ status_t seekTo(int64_t timeUs);
+
+ status_t advance();
+ status_t readSampleData(const sp<ABuffer> &buffer);
+ status_t getSampleTrackIndex(size_t *trackIndex);
+ status_t getSampleTime(int64_t *sampleTimeUs);
+
+protected:
+ virtual ~NuMediaExtractor();
+
+private:
+ enum TrackFlags {
+ kIsVorbis = 1,
+ };
+
+ struct TrackInfo {
+ sp<MediaSource> mSource;
+ size_t mTrackIndex;
+ status_t mFinalResult;
+ MediaBuffer *mSample;
+ int64_t mSampleTimeUs;
+ uint32_t mFlags; // bitmask of "TrackFlags"
+ };
+
+ sp<MediaExtractor> mImpl;
+
+ Vector<TrackInfo> mSelectedTracks;
+
+ ssize_t fetchTrackSamples(int64_t seekTimeUs = -1ll);
+ void releaseTrackSamples();
+
+ DISALLOW_EVIL_CONSTRUCTORS(NuMediaExtractor);
+};
+
+} // namespace android
+
+#endif // NU_MEDIA_EXTRACTOR_H_
+
diff --git a/include/media/stagefright/OMXCodec.h b/include/media/stagefright/OMXCodec.h
index e541c18..392ea87 100644
--- a/include/media/stagefright/OMXCodec.h
+++ b/include/media/stagefright/OMXCodec.h
@@ -26,6 +26,7 @@
namespace android {
+struct MediaCodecList;
class MemoryDealer;
struct OMXCodecObserver;
struct CodecProfileLevel;
@@ -82,12 +83,35 @@
// from MediaBufferObserver
virtual void signalBufferReturned(MediaBuffer *buffer);
+ enum Quirks {
+ kNeedsFlushBeforeDisable = 1,
+ kWantsNALFragments = 2,
+ kRequiresLoadedToIdleAfterAllocation = 4,
+ kRequiresAllocateBufferOnInputPorts = 8,
+ kRequiresFlushCompleteEmulation = 16,
+ kRequiresAllocateBufferOnOutputPorts = 32,
+ kRequiresFlushBeforeShutdown = 64,
+ kDefersOutputBufferAllocation = 128,
+ kDecoderLiesAboutNumberOfChannels = 256,
+ kInputBufferSizesAreBogus = 512,
+ kSupportsMultipleFramesPerInputBuffer = 1024,
+ kAvoidMemcopyInputRecordingFrames = 2048,
+ kRequiresLargerEncoderOutputBuffer = 4096,
+ kOutputBuffersAreUnreadable = 8192,
+ };
+
// for use by ACodec
static void findMatchingCodecs(
const char *mime,
bool createEncoder, const char *matchComponentName,
uint32_t flags,
- Vector<String8> *matchingCodecs);
+ Vector<String8> *matchingCodecs,
+ Vector<uint32_t> *matchingCodecQuirks = NULL);
+
+ static uint32_t getComponentQuirks(
+ const MediaCodecList *list, size_t index);
+
+ static bool findCodecQuirks(const char *componentName, uint32_t *quirks);
protected:
virtual ~OMXCodec();
@@ -125,23 +149,6 @@
SHUTTING_DOWN,
};
- enum Quirks {
- kNeedsFlushBeforeDisable = 1,
- kWantsNALFragments = 2,
- kRequiresLoadedToIdleAfterAllocation = 4,
- kRequiresAllocateBufferOnInputPorts = 8,
- kRequiresFlushCompleteEmulation = 16,
- kRequiresAllocateBufferOnOutputPorts = 32,
- kRequiresFlushBeforeShutdown = 64,
- kDefersOutputBufferAllocation = 128,
- kDecoderLiesAboutNumberOfChannels = 256,
- kInputBufferSizesAreBogus = 512,
- kSupportsMultipleFramesPerInputBuffer = 1024,
- kAvoidMemcopyInputRecordingFrames = 2048,
- kRequiresLargerEncoderOutputBuffer = 4096,
- kOutputBuffersAreUnreadable = 8192,
- };
-
enum BufferStatus {
OWNED_BY_US,
OWNED_BY_COMPONENT,
@@ -327,9 +334,6 @@
status_t configureCodec(const sp<MetaData> &meta);
- static uint32_t getComponentQuirks(
- const char *componentName, bool isEncoder);
-
void restorePatchedDataPointer(BufferInfo *info);
status_t applyRotation();
diff --git a/include/media/stagefright/foundation/AMessage.h b/include/media/stagefright/foundation/AMessage.h
index 7ec54aa..e5416e4 100644
--- a/include/media/stagefright/foundation/AMessage.h
+++ b/include/media/stagefright/foundation/AMessage.h
@@ -25,6 +25,7 @@
namespace android {
+struct ABuffer;
struct AString;
struct Parcel;
@@ -50,6 +51,7 @@
void setPointer(const char *name, void *value);
void setString(const char *name, const char *s, ssize_t len = -1);
void setObject(const char *name, const sp<RefBase> &obj);
+ void setBuffer(const char *name, const sp<ABuffer> &buffer);
void setMessage(const char *name, const sp<AMessage> &obj);
void setRect(
@@ -64,6 +66,7 @@
bool findPointer(const char *name, void **value) const;
bool findString(const char *name, AString *value) const;
bool findObject(const char *name, sp<RefBase> *obj) const;
+ bool findBuffer(const char *name, sp<ABuffer> *buffer) const;
bool findMessage(const char *name, sp<AMessage> *obj) const;
bool findRect(
@@ -90,10 +93,6 @@
AString debugString(int32_t indent = 0) const;
-protected:
- virtual ~AMessage();
-
-private:
enum Type {
kTypeInt32,
kTypeInt64,
@@ -105,8 +104,16 @@
kTypeObject,
kTypeMessage,
kTypeRect,
+ kTypeBuffer,
};
+ size_t countEntries() const;
+ const char *getEntryNameAt(size_t index, Type *type) const;
+
+protected:
+ virtual ~AMessage();
+
+private:
uint32_t mWhat;
ALooper::handler_id mTarget;
@@ -131,7 +138,7 @@
};
enum {
- kMaxNumItems = 16
+ kMaxNumItems = 32
};
Item mItems[kMaxNumItems];
size_t mNumItems;
@@ -140,6 +147,9 @@
void freeItem(Item *item);
const Item *findItem(const char *name, Type type) const;
+ void setObjectInternal(
+ const char *name, const sp<RefBase> &obj, Type type);
+
DISALLOW_EVIL_CONSTRUCTORS(AMessage);
};
diff --git a/media/libstagefright/timedtext/TimedTextDriver.h b/include/media/stagefright/timedtext/TimedTextDriver.h
similarity index 100%
rename from media/libstagefright/timedtext/TimedTextDriver.h
rename to include/media/stagefright/timedtext/TimedTextDriver.h
diff --git a/include/private/surfaceflinger/LayerState.h b/include/private/gui/LayerState.h
similarity index 97%
rename from include/private/surfaceflinger/LayerState.h
rename to include/private/gui/LayerState.h
index 3eb5c99..ca277e0 100644
--- a/include/private/surfaceflinger/LayerState.h
+++ b/include/private/gui/LayerState.h
@@ -23,8 +23,7 @@
#include <utils/Errors.h>
#include <ui/Region.h>
-
-#include <surfaceflinger/ISurface.h>
+#include <gui/ISurface.h>
namespace android {
diff --git a/include/private/surfaceflinger/SharedBufferStack.h b/include/private/gui/SharedBufferStack.h
similarity index 100%
rename from include/private/surfaceflinger/SharedBufferStack.h
rename to include/private/gui/SharedBufferStack.h
diff --git a/include/private/opengles/gl_context.h b/include/private/opengles/gl_context.h
deleted file mode 100644
index 6b1fa77..0000000
--- a/include/private/opengles/gl_context.h
+++ /dev/null
@@ -1,640 +0,0 @@
-/*
- * Copyright (C) 2006 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef ANDROID_OPENGLES_CONTEXT_H
-#define ANDROID_OPENGLES_CONTEXT_H
-
-#include <stdint.h>
-#include <stddef.h>
-#include <sys/types.h>
-#include <pthread.h>
-#ifdef HAVE_ANDROID_OS
-#include <bionic_tls.h>
-#endif
-
-#include <private/pixelflinger/ggl_context.h>
-#include <hardware/gralloc.h>
-
-#include <GLES/gl.h>
-#include <GLES/glext.h>
-
-namespace android {
-
-
-const unsigned int OGLES_NUM_COMPRESSED_TEXTURE_FORMATS = 10
-#ifdef GL_OES_compressed_ETC1_RGB8_texture
- + 1
-#endif
- ;
-
-class EGLTextureObject;
-class EGLSurfaceManager;
-class EGLBufferObjectManager;
-
-namespace gl {
-
-struct ogles_context_t;
-struct matrixx_t;
-struct transform_t;
-struct buffer_t;
-
-ogles_context_t* getGlContext();
-
-template<typename T>
-static inline void swap(T& a, T& b) {
- T t(a); a = b; b = t;
-}
-template<typename T>
-inline T max(T a, T b) {
- return a<b ? b : a;
-}
-template<typename T>
-inline T max(T a, T b, T c) {
- return max(a, max(b, c));
-}
-template<typename T>
-inline T min(T a, T b) {
- return a<b ? a : b;
-}
-template<typename T>
-inline T min(T a, T b, T c) {
- return min(a, min(b, c));
-}
-template<typename T>
-inline T min(T a, T b, T c, T d) {
- return min(min(a,b), min(c,d));
-}
-
-// ----------------------------------------------------------------------------
-// vertices
-// ----------------------------------------------------------------------------
-
-struct vec3_t {
- union {
- struct { GLfixed x, y, z; };
- struct { GLfixed r, g, b; };
- struct { GLfixed S, T, R; };
- GLfixed v[3];
- };
-};
-
-struct vec4_t {
- union {
- struct { GLfixed x, y, z, w; };
- struct { GLfixed r, g, b, a; };
- struct { GLfixed S, T, R, Q; };
- GLfixed v[4];
- };
-};
-
-struct vertex_t {
- enum {
- // these constant matter for our clipping
- CLIP_L = 0x0001, // clipping flags
- CLIP_R = 0x0002,
- CLIP_B = 0x0004,
- CLIP_T = 0x0008,
- CLIP_N = 0x0010,
- CLIP_F = 0x0020,
-
- EYE = 0x0040,
- RESERVED = 0x0080,
-
- USER_CLIP_0 = 0x0100, // user clipping flags
- USER_CLIP_1 = 0x0200,
- USER_CLIP_2 = 0x0400,
- USER_CLIP_3 = 0x0800,
- USER_CLIP_4 = 0x1000,
- USER_CLIP_5 = 0x2000,
-
- LIT = 0x4000, // lighting has been applied
- TT = 0x8000, // texture coords transformed
-
- FRUSTUM_CLIP_ALL= 0x003F,
- USER_CLIP_ALL = 0x3F00,
- CLIP_ALL = 0x3F3F,
- };
-
- // the fields below are arranged to minimize d-cache usage
- // we group together, by cache-line, the fields most likely to be used
-
- union {
- vec4_t obj;
- vec4_t eye;
- };
- vec4_t clip;
-
- uint32_t flags;
- size_t index; // cache tag, and vertex index
- GLfixed fog;
- uint8_t locked;
- uint8_t mru;
- uint8_t reserved[2];
- vec4_t window;
-
- vec4_t color;
- vec4_t texture[GGL_TEXTURE_UNIT_COUNT];
- uint32_t reserved1[4];
-
- inline void clear() {
- flags = index = locked = mru = 0;
- }
-};
-
-struct point_size_t {
- GGLcoord size;
- GLboolean smooth;
-};
-
-struct line_width_t {
- GGLcoord width;
- GLboolean smooth;
-};
-
-struct polygon_offset_t {
- GLfixed factor;
- GLfixed units;
- GLboolean enable;
-};
-
-// ----------------------------------------------------------------------------
-// arrays
-// ----------------------------------------------------------------------------
-
-struct array_t {
- typedef void (*fetcher_t)(ogles_context_t*, GLfixed*, const GLvoid*);
- fetcher_t fetch;
- GLvoid const* physical_pointer;
- GLint size;
- GLsizei stride;
- GLvoid const* pointer;
- buffer_t const* bo;
- uint16_t type;
- GLboolean enable;
- GLboolean pad;
- GLsizei bounds;
- void init(GLint, GLenum, GLsizei, const GLvoid *, const buffer_t*, GLsizei);
- inline void resolve();
- inline const GLubyte* element(GLint i) const {
- return (const GLubyte*)physical_pointer + i * stride;
- }
-};
-
-struct array_machine_t {
- array_t vertex;
- array_t normal;
- array_t color;
- array_t texture[GGL_TEXTURE_UNIT_COUNT];
- uint8_t activeTexture;
- uint8_t tmu;
- uint16_t cull;
- uint32_t flags;
- GLenum indicesType;
- buffer_t const* array_buffer;
- buffer_t const* element_array_buffer;
-
- void (*compileElements)(ogles_context_t*, vertex_t*, GLint, GLsizei);
- void (*compileElement)(ogles_context_t*, vertex_t*, GLint);
-
- void (*mvp_transform)(transform_t const*, vec4_t*, vec4_t const*);
- void (*mv_transform)(transform_t const*, vec4_t*, vec4_t const*);
- void (*tex_transform[2])(transform_t const*, vec4_t*, vec4_t const*);
- void (*perspective)(ogles_context_t*c, vertex_t* v);
- void (*clipVertex)(ogles_context_t* c, vertex_t* nv,
- GGLfixed t, const vertex_t* s, const vertex_t* p);
- void (*clipEye)(ogles_context_t* c, vertex_t* nv,
- GGLfixed t, const vertex_t* s, const vertex_t* p);
-};
-
-struct vertex_cache_t {
- enum {
- // must be at least 4
- // 3 vertice for triangles
- // or 2 + 2 for indexed triangles w/ cache contention
- VERTEX_BUFFER_SIZE = 8,
- // must be a power of two and at least 3
- VERTEX_CACHE_SIZE = 64, // 8 KB
-
- INDEX_BITS = 16,
- INDEX_MASK = ((1LU<<INDEX_BITS)-1),
- INDEX_SEQ = 1LU<<INDEX_BITS,
- };
- vertex_t* vBuffer;
- vertex_t* vCache;
- uint32_t sequence;
- void* base;
- uint32_t total;
- uint32_t misses;
- int64_t startTime;
- void init();
- void uninit();
- void clear();
- void dump_stats(GLenum mode);
-};
-
-// ----------------------------------------------------------------------------
-// fog
-// ----------------------------------------------------------------------------
-
-struct fog_t {
- GLfixed density;
- GLfixed start;
- GLfixed end;
- GLfixed invEndMinusStart;
- GLenum mode;
- GLfixed (*fog)(ogles_context_t* c, GLfixed z);
-};
-
-// ----------------------------------------------------------------------------
-// user clip planes
-// ----------------------------------------------------------------------------
-
-const unsigned int OGLES_MAX_CLIP_PLANES = 6;
-
-struct clip_plane_t {
- vec4_t equation;
-};
-
-struct user_clip_planes_t {
- clip_plane_t plane[OGLES_MAX_CLIP_PLANES];
- uint32_t enable;
-};
-
-// ----------------------------------------------------------------------------
-// lighting
-// ----------------------------------------------------------------------------
-
-const unsigned int OGLES_MAX_LIGHTS = 8;
-
-struct light_t {
- vec4_t ambient;
- vec4_t diffuse;
- vec4_t specular;
- vec4_t implicitAmbient;
- vec4_t implicitDiffuse;
- vec4_t implicitSpecular;
- vec4_t position; // position in eye space
- vec4_t objPosition;
- vec4_t normalizedObjPosition;
- vec4_t spotDir;
- vec4_t normalizedSpotDir;
- GLfixed spotExp;
- GLfixed spotCutoff;
- GLfixed spotCutoffCosine;
- GLfixed attenuation[3];
- GLfixed rConstAttenuation;
- GLboolean enable;
-};
-
-struct material_t {
- vec4_t ambient;
- vec4_t diffuse;
- vec4_t specular;
- vec4_t emission;
- GLfixed shininess;
-};
-
-struct light_model_t {
- vec4_t ambient;
- GLboolean twoSide;
-};
-
-struct color_material_t {
- GLenum face;
- GLenum mode;
- GLboolean enable;
-};
-
-struct lighting_t {
- light_t lights[OGLES_MAX_LIGHTS];
- material_t front;
- light_model_t lightModel;
- color_material_t colorMaterial;
- vec4_t implicitSceneEmissionAndAmbient;
- vec4_t objViewer;
- uint32_t enabledLights;
- GLboolean enable;
- GLenum shadeModel;
- typedef void (*light_fct_t)(ogles_context_t*, vertex_t*);
- void (*lightVertex)(ogles_context_t* c, vertex_t* v);
- void (*lightTriangle)(ogles_context_t* c,
- vertex_t* v0, vertex_t* v1, vertex_t* v2);
-};
-
-struct culling_t {
- GLenum cullFace;
- GLenum frontFace;
- GLboolean enable;
-};
-
-// ----------------------------------------------------------------------------
-// textures
-// ----------------------------------------------------------------------------
-
-struct texture_unit_t {
- GLuint name;
- EGLTextureObject* texture;
- uint8_t dirty;
-};
-
-struct texture_state_t
-{
- texture_unit_t tmu[GGL_TEXTURE_UNIT_COUNT];
- int active; // active tmu
- EGLTextureObject* defaultTexture;
- GGLContext* ggl;
- uint8_t packAlignment;
- uint8_t unpackAlignment;
-};
-
-// ----------------------------------------------------------------------------
-// transformation and matrices
-// ----------------------------------------------------------------------------
-
-struct matrixf_t;
-
-struct matrixx_t {
- GLfixed m[16];
- void load(const matrixf_t& rhs);
-};
-
-struct matrix_stack_t;
-
-
-struct matrixf_t {
- void loadIdentity();
- void load(const matrixf_t& rhs);
-
- inline GLfloat* editElements() { return m; }
- inline GLfloat const* elements() const { return m; }
-
- void set(const GLfixed* rhs);
- void set(const GLfloat* rhs);
-
- static void multiply(matrixf_t& r,
- const matrixf_t& lhs, const matrixf_t& rhs);
-
- void dump(const char* what);
-
-private:
- friend struct matrix_stack_t;
- GLfloat m[16];
- void load(const GLfixed* rhs);
- void load(const GLfloat* rhs);
- void multiply(const matrixf_t& rhs);
- void translate(GLfloat x, GLfloat y, GLfloat z);
- void scale(GLfloat x, GLfloat y, GLfloat z);
- void rotate(GLfloat a, GLfloat x, GLfloat y, GLfloat z);
-};
-
-enum {
- OP_IDENTITY = 0x00,
- OP_TRANSLATE = 0x01,
- OP_UNIFORM_SCALE = 0x02,
- OP_SCALE = 0x05,
- OP_ROTATE = 0x08,
- OP_SKEW = 0x10,
- OP_ALL = 0x1F
-};
-
-struct transform_t {
- enum {
- FLAGS_2D_PROJECTION = 0x1
- };
- matrixx_t matrix;
- uint32_t flags;
- uint32_t ops;
-
- union {
- struct {
- void (*point2)(transform_t const* t, vec4_t*, vec4_t const*);
- void (*point3)(transform_t const* t, vec4_t*, vec4_t const*);
- void (*point4)(transform_t const* t, vec4_t*, vec4_t const*);
- };
- void (*pointv[3])(transform_t const* t, vec4_t*, vec4_t const*);
- };
-
- void loadIdentity();
- void picker();
- void dump(const char* what);
-};
-
-struct mvui_transform_t : public transform_t
-{
- void picker();
-};
-
-struct matrix_stack_t {
- enum {
- DO_PICKER = 0x1,
- DO_FLOAT_TO_FIXED = 0x2
- };
- transform_t transform;
- uint8_t maxDepth;
- uint8_t depth;
- uint8_t dirty;
- uint8_t reserved;
- matrixf_t *stack;
- uint8_t *ops;
- void init(int depth);
- void uninit();
- void loadIdentity();
- void load(const GLfixed* rhs);
- void load(const GLfloat* rhs);
- void multiply(const matrixf_t& rhs);
- void translate(GLfloat x, GLfloat y, GLfloat z);
- void scale(GLfloat x, GLfloat y, GLfloat z);
- void rotate(GLfloat a, GLfloat x, GLfloat y, GLfloat z);
- GLint push();
- GLint pop();
- void validate();
- matrixf_t& top() { return stack[depth]; }
- const matrixf_t& top() const { return stack[depth]; }
- uint32_t top_ops() const { return ops[depth]; }
- inline bool isRigidBody() const {
- return !(ops[depth] & ~(OP_TRANSLATE|OP_UNIFORM_SCALE|OP_ROTATE));
- }
-};
-
-struct vp_transform_t {
- transform_t transform;
- matrixf_t matrix;
- GLfloat zNear;
- GLfloat zFar;
- void loadIdentity();
-};
-
-struct transform_state_t {
- enum {
- MODELVIEW = 0x01,
- PROJECTION = 0x02,
- VIEWPORT = 0x04,
- TEXTURE = 0x08,
- MVUI = 0x10,
- MVIT = 0x20,
- MVP = 0x40,
- };
- matrix_stack_t *current;
- matrix_stack_t modelview;
- matrix_stack_t projection;
- matrix_stack_t texture[GGL_TEXTURE_UNIT_COUNT];
-
- // modelview * projection
- transform_t mvp __attribute__((aligned(32)));
- // viewport transformation
- vp_transform_t vpt __attribute__((aligned(32)));
- // same for 4-D vertices
- transform_t mvp4;
- // full modelview inverse transpose
- transform_t mvit4;
- // upper 3x3 of mv-inverse-transpose (for normals)
- mvui_transform_t mvui;
-
- GLenum matrixMode;
- GLenum rescaleNormals;
- uint32_t dirty;
- void invalidate();
- void update_mvp();
- void update_mvit();
- void update_mvui();
-};
-
-struct viewport_t {
- GLint x;
- GLint y;
- GLsizei w;
- GLsizei h;
- struct {
- GLint x;
- GLint y;
- } surfaceport;
- struct {
- GLint x;
- GLint y;
- GLsizei w;
- GLsizei h;
- } scissor;
-};
-
-// ----------------------------------------------------------------------------
-// Lerping
-// ----------------------------------------------------------------------------
-
-struct compute_iterators_t
-{
- void initTriangle(
- vertex_t const* v0,
- vertex_t const* v1,
- vertex_t const* v2);
-
- void initLine(
- vertex_t const* v0,
- vertex_t const* v1);
-
- inline void initLerp(vertex_t const* v0, uint32_t enables);
-
- int iteratorsScale(int32_t it[3],
- int32_t c0, int32_t c1, int32_t c2) const;
-
- void iterators1616(GGLfixed it[3],
- GGLfixed c0, GGLfixed c1, GGLfixed c2) const;
-
- void iterators0032(int32_t it[3],
- int32_t c0, int32_t c1, int32_t c2) const;
-
- void iterators0032(int64_t it[3],
- int32_t c0, int32_t c1, int32_t c2) const;
-
- GGLcoord area() const { return m_area; }
-
-private:
- // don't change order of members here -- used by iterators.S
- GGLcoord m_dx01, m_dy10, m_dx20, m_dy02;
- GGLcoord m_x0, m_y0;
- GGLcoord m_area;
- uint8_t m_scale;
- uint8_t m_area_scale;
- uint8_t m_reserved[2];
-
-};
-
-// ----------------------------------------------------------------------------
-// state
-// ----------------------------------------------------------------------------
-
-#ifdef HAVE_ANDROID_OS
- // We have a dedicated TLS slot in bionic
- inline void setGlThreadSpecific(ogles_context_t *value) {
- ((uint32_t *)__get_tls())[TLS_SLOT_OPENGL] = (uint32_t)value;
- }
- inline ogles_context_t* getGlThreadSpecific() {
- return (ogles_context_t *)(((unsigned *)__get_tls())[TLS_SLOT_OPENGL]);
- }
-#else
- extern pthread_key_t gGLKey;
- inline void setGlThreadSpecific(ogles_context_t *value) {
- pthread_setspecific(gGLKey, value);
- }
- inline ogles_context_t* getGlThreadSpecific() {
- return static_cast<ogles_context_t*>(pthread_getspecific(gGLKey));
- }
-#endif
-
-
-struct prims_t {
- typedef ogles_context_t* GL;
- void (*renderPoint)(GL, vertex_t*);
- void (*renderLine)(GL, vertex_t*, vertex_t*);
- void (*renderTriangle)(GL, vertex_t*, vertex_t*, vertex_t*);
-};
-
-struct ogles_context_t {
- context_t rasterizer;
- array_machine_t arrays __attribute__((aligned(32)));
- texture_state_t textures;
- transform_state_t transforms;
- vertex_cache_t vc;
- prims_t prims;
- culling_t cull;
- lighting_t lighting;
- user_clip_planes_t clipPlanes;
- compute_iterators_t lerp; __attribute__((aligned(32)));
- vertex_t current;
- vec4_t currentColorClamped;
- vec3_t currentNormal;
- viewport_t viewport;
- point_size_t point;
- line_width_t line;
- polygon_offset_t polygonOffset;
- fog_t fog;
- uint32_t perspective : 1;
- uint32_t transformTextures : 1;
- EGLSurfaceManager* surfaceManager;
- EGLBufferObjectManager* bufferObjectManager;
-
- GLenum error;
-
- static inline ogles_context_t* get() {
- return getGlThreadSpecific();
- }
-
-};
-
-}; // namespace gl
-}; // namespace android
-
-#endif // ANDROID_OPENGLES_CONTEXT_H
-
diff --git a/include/private/ui/android_natives_priv.h b/include/private/ui/android_natives_priv.h
deleted file mode 100644
index 6b9f524..0000000
--- a/include/private/ui/android_natives_priv.h
+++ /dev/null
@@ -1,17 +0,0 @@
-/*
- * Copyright (C) 2009 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include <ui/android_native_buffer.h>
diff --git a/media/libaah_rtp/aah_decoder_pump.cpp b/media/libaah_rtp/aah_decoder_pump.cpp
index 72fe43b..28b8c7b 100644
--- a/media/libaah_rtp/aah_decoder_pump.cpp
+++ b/media/libaah_rtp/aah_decoder_pump.cpp
@@ -159,8 +159,8 @@
res = renderer_->queueTimedBuffer(pcm_payload, ts);
if (res != OK) {
- ALOGE("Failed to queue %d byte audio track buffer with media"
- " PTS %lld. (res = %d)", decoded_amt, ts, res);
+ ALOGE("Failed to queue %d byte audio track buffer with"
+ " media PTS %lld. (res = %d)", decoded_amt, ts, res);
} else {
last_queued_pts_valid_ = true;
last_queued_pts_ = ts;
@@ -291,8 +291,8 @@
// thread_status_.
thread_status_ = decoder_->start(format_.get());
if (OK != thread_status_) {
- ALOGE("AAH_DecoderPump's work thread failed to start decoder (res = %d)",
- thread_status_);
+ ALOGE("AAH_DecoderPump's work thread failed to start decoder"
+ " (res = %d)", thread_status_);
return NULL;
}
diff --git a/media/libaah_rtp/aah_rx_player.h b/media/libaah_rtp/aah_rx_player.h
index 7a1b6e3..ba5617e 100644
--- a/media/libaah_rtp/aah_rx_player.h
+++ b/media/libaah_rtp/aah_rx_player.h
@@ -217,14 +217,15 @@
status_t getStatus() const { return status_; }
protected:
- virtual ~Substream() {
- shutdown();
- }
+ virtual ~Substream();
private:
void cleanupDecoder();
bool shouldAbort(const char* log_tag);
void processCompletedBuffer();
+ bool setupSubstreamMeta();
+ bool setupMP3SubstreamMeta();
+ bool setupAACSubstreamMeta();
bool setupSubstreamType(uint8_t substream_type,
uint8_t codec_type);
@@ -235,12 +236,16 @@
bool substream_details_known_;
uint8_t substream_type_;
uint8_t codec_type_;
+ const char* codec_mime_type_;
sp<MetaData> substream_meta_;
MediaBuffer* buffer_in_progress_;
uint32_t expected_buffer_size_;
uint32_t buffer_filled_;
+ Vector<uint8_t> aux_data_in_progress_;
+ uint32_t aux_data_expected_size_;
+
sp<AAH_DecoderPump> decoder_;
static int64_t kAboutToUnderflowThreshold;
diff --git a/media/libaah_rtp/aah_rx_player_core.cpp b/media/libaah_rtp/aah_rx_player_core.cpp
index d2b3386..d6b31fd 100644
--- a/media/libaah_rtp/aah_rx_player_core.cpp
+++ b/media/libaah_rtp/aah_rx_player_core.cpp
@@ -431,8 +431,8 @@
// Looks like a NAK packet; make sure its long enough.
if (amt < static_cast<ssize_t>(sizeof(RetransRequest))) {
- ALOGV("Dropping packet, too short to contain NAK payload (%u bytes)",
- static_cast<uint32_t>(amt));
+ ALOGV("Dropping packet, too short to contain NAK payload"
+ " (%u bytes)", static_cast<uint32_t>(amt));
goto drop_packet;
}
@@ -441,7 +441,8 @@
gap.start_seq_ = ntohs(rtr->start_seq_);
gap.end_seq_ = ntohs(rtr->end_seq_);
- ALOGV("Process NAK for gap at [%hu, %hu]", gap.start_seq_, gap.end_seq_);
+ ALOGV("Process NAK for gap at [%hu, %hu]",
+ gap.start_seq_, gap.end_seq_);
ring_buffer_.processNAK(&gap);
return true;
@@ -770,7 +771,8 @@
ALOGE("Error when sending retransmit request (%d)", errno);
} else {
ALOGV("%s request for range [%hu, %hu] sent",
- (kGS_FastStartGap == gap_status) ? "Fast Start" : "Retransmit",
+ (kGS_FastStartGap == gap_status) ? "Fast Start"
+ : "Retransmit",
gap.start_seq_, gap.end_seq_);
}
diff --git a/media/libaah_rtp/aah_rx_player_ring_buffer.cpp b/media/libaah_rtp/aah_rx_player_ring_buffer.cpp
index 0d8b31f..779405e 100644
--- a/media/libaah_rtp/aah_rx_player_ring_buffer.cpp
+++ b/media/libaah_rtp/aah_rx_player_ring_buffer.cpp
@@ -116,8 +116,8 @@
// Check for overflow first.
if ((!(norm_seq & 0x8000)) && (norm_seq >= (capacity_ - 1))) {
- ALOGW("Ring buffer overflow; cap = %u, [rd, wr] = [%hu, %hu], seq = %hu",
- capacity_, rd_seq_, norm_wr_seq + rd_seq_, seq);
+ ALOGW("Ring buffer overflow; cap = %u, [rd, wr] = [%hu, %hu],"
+ " seq = %hu", capacity_, rd_seq_, norm_wr_seq + rd_seq_, seq);
PacketBuffer::destroy(buf);
return false;
}
diff --git a/media/libaah_rtp/aah_rx_player_substream.cpp b/media/libaah_rtp/aah_rx_player_substream.cpp
index 1e4c784..18b0e2b 100644
--- a/media/libaah_rtp/aah_rx_player_substream.cpp
+++ b/media/libaah_rtp/aah_rx_player_substream.cpp
@@ -27,6 +27,11 @@
#include <media/stagefright/Utils.h>
#include "aah_rx_player.h"
+#include "aah_tx_packet.h"
+
+inline uint32_t min(uint32_t a, uint32_t b) {
+ return (a < b ? a : b);
+}
namespace android {
@@ -38,6 +43,7 @@
substream_details_known_ = false;
buffer_in_progress_ = NULL;
status_ = OK;
+ codec_mime_type_ = "";
decoder_ = new AAH_DecoderPump(omx);
if (decoder_ == NULL) {
@@ -52,6 +58,9 @@
cleanupBufferInProgress();
}
+AAH_RXPlayer::Substream::~Substream() {
+ shutdown();
+}
void AAH_RXPlayer::Substream::shutdown() {
substream_meta_ = NULL;
@@ -69,6 +78,9 @@
expected_buffer_size_ = 0;
buffer_filled_ = 0;
waiting_for_rap_ = true;
+
+ aux_data_in_progress_.clear();
+ aux_data_expected_size_ = 0;
}
void AAH_RXPlayer::Substream::cleanupDecoder() {
@@ -129,16 +141,16 @@
// one that does not conflict with any previously received substream type.
uint8_t header_type = (buf[1] >> 4) & 0xF;
switch (header_type) {
- case 0x01:
+ case TRTPPacket::kHeaderTypeAudio:
// Audio, yay! Just break. We understand audio payloads.
break;
- case 0x02:
+ case TRTPPacket::kHeaderTypeVideo:
ALOGV("RXed packet with unhandled TRTP header type (Video).");
return;
- case 0x03:
+ case TRTPPacket::kHeaderTypeSubpicture:
ALOGV("RXed packet with unhandled TRTP header type (Subpicture).");
return;
- case 0x04:
+ case TRTPPacket::kHeaderTypeControl:
ALOGV("RXed packet with unhandled TRTP header type (Control).");
return;
default:
@@ -148,15 +160,15 @@
}
if (substream_details_known_ && (header_type != substream_type_)) {
- ALOGV("RXed TRTP Payload for SSRC=0x%08x where header type (%u) does not"
- " match previously received header type (%u)",
+ ALOGV("RXed TRTP Payload for SSRC=0x%08x where header type (%u) does"
+ " not match previously received header type (%u)",
ssrc_, header_type, substream_type_);
return;
}
// Check the flags to see if there is another 32 bits of timestamp present.
uint32_t trtp_header_len = 6;
- bool ts_valid = buf[1] & 0x1;
+ bool ts_valid = buf[1] & TRTPPacket::kFlag_TSValid;
if (ts_valid) {
min_length += 4;
trtp_header_len += 4;
@@ -168,11 +180,7 @@
}
// Extract the TRTP length field and sanity check it.
- uint32_t trtp_len;
- trtp_len = (static_cast<uint32_t>(buf[2]) << 24) |
- (static_cast<uint32_t>(buf[3]) << 16) |
- (static_cast<uint32_t>(buf[4]) << 8) |
- static_cast<uint32_t>(buf[5]);
+ uint32_t trtp_len = U32_AT(buf + 2);
if (trtp_len < min_length) {
ALOGV("TRTP length (%u) is too short to be valid. Must be at least %u"
" bytes.", trtp_len, min_length);
@@ -183,17 +191,14 @@
int64_t ts = 0;
uint32_t parse_offset = 6;
if (ts_valid) {
- ts = (static_cast<int64_t>(buf[parse_offset ]) << 56) |
- (static_cast<int64_t>(buf[parse_offset + 1]) << 48) |
- (static_cast<int64_t>(buf[parse_offset + 2]) << 40) |
- (static_cast<int64_t>(buf[parse_offset + 3]) << 32);
- ts |= ts_lower;
+ uint32_t ts_upper = U32_AT(buf + parse_offset);
parse_offset += 4;
+ ts = (static_cast<int64_t>(ts_upper) << 32) | ts_lower;
}
// Check the flags to see if there is another 24 bytes of timestamp
// transformation present.
- if (buf[1] & 0x2) {
+ if (buf[1] & TRTPPacket::kFlag_TSTransformPresent) {
min_length += 24;
parse_offset += 24;
trtp_header_len += 24;
@@ -219,8 +224,8 @@
if (amt < min_length) {
ALOGV("TRTP porttion of RTP payload (%u bytes) too small to contain"
- " entire TRTP header. TRTP does not currently support fragmenting"
- " TRTP headers across RTP payloads", amt);
+ " entire TRTP header. TRTP does not currently support"
+ " fragmenting TRTP headers across RTP payloads", amt);
return;
}
@@ -238,16 +243,42 @@
decoder_->setRenderVolume(volume);
}
- // TODO : move all of the constant flag and offset definitions for TRTP up
- // into some sort of common header file.
- if (waiting_for_rap_ && !(flags & 0x08)) {
+ if (waiting_for_rap_ && !(flags & TRTPAudioPacket::kFlag_RandomAccessPoint)) {
ALOGV("Dropping non-RAP TRTP Audio Payload while waiting for RAP.");
return;
}
- if (flags & 0x10) {
- ALOGV("Dropping TRTP Audio Payload with aux codec data present (only"
- " handle MP3 right now, and it has no aux data)");
+ // Check for the presence of codec aux data.
+ if (flags & TRTPAudioPacket::kFlag_AuxLengthPresent) {
+ min_length += 4;
+ trtp_header_len += 4;
+
+ if (trtp_len < min_length) {
+ ALOGV("TRTP length (%u) is too short to be a valid audio payload. "
+ "Must be at least %u bytes.", trtp_len, min_length);
+ return;
+ }
+
+ if (amt < min_length) {
+ ALOGV("TRTP porttion of RTP payload (%u bytes) too small to contain"
+ " entire TRTP header. TRTP does not currently support"
+ " fragmenting TRTP headers across RTP payloads", amt);
+ return;
+ }
+
+ aux_data_expected_size_ = U32_AT(buf + parse_offset);
+ aux_data_in_progress_.clear();
+ if (aux_data_in_progress_.capacity() < aux_data_expected_size_) {
+ aux_data_in_progress_.setCapacity(aux_data_expected_size_);
+ }
+ } else {
+ aux_data_expected_size_ = 0;
+ }
+
+ if ((aux_data_expected_size_ + trtp_header_len) > trtp_len) {
+ ALOGV("Expected codec aux data length (%u) and TRTP header overhead"
+ " (%u) too large for total TRTP payload length (%u).",
+ aux_data_expected_size_, trtp_header_len, trtp_len);
return;
}
@@ -255,7 +286,9 @@
// the buffer in progress and pack as much payload as we can into it. If
// the payload is finished once we are done, go ahead and send the payload
// to the decoder.
- expected_buffer_size_ = trtp_len - trtp_header_len;
+ expected_buffer_size_ = trtp_len
+ - trtp_header_len
+ - aux_data_expected_size_;
if (!expected_buffer_size_) {
ALOGV("Dropping TRTP Audio Payload with 0 Access Unit length");
return;
@@ -263,9 +296,10 @@
CHECK(amt >= trtp_header_len);
uint32_t todo = amt - trtp_header_len;
- if (expected_buffer_size_ < todo) {
+ if ((expected_buffer_size_ + aux_data_expected_size_) < todo) {
ALOGV("Extra data (%u > %u) present in initial TRTP Audio Payload;"
- " dropping payload.", todo, expected_buffer_size_);
+ " dropping payload.", todo,
+ expected_buffer_size_ + aux_data_expected_size_);
return;
}
@@ -287,18 +321,32 @@
return;
}
- // TODO : set this based on the codec type indicated in the TRTP stream.
- // Right now, we only support MP3, so the choice is obvious.
- meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_AUDIO_MPEG);
+ meta->setCString(kKeyMIMEType, codec_mime_type_);
if (ts_valid) {
meta->setInt64(kKeyTime, ts);
}
- if (amt > 0) {
+ // Skip over the header we have already extracted.
+ amt -= trtp_header_len;
+ buf += trtp_header_len;
+
+ // Extract as much of the expected aux data as we can.
+ todo = min(aux_data_expected_size_, amt);
+ if (todo) {
+ aux_data_in_progress_.appendArray(buf, todo);
+ buf += todo;
+ amt -= todo;
+ }
+
+ // Extract as much of the expected payload as we can.
+ todo = min(expected_buffer_size_, amt);
+ if (todo > 0) {
uint8_t* tgt =
reinterpret_cast<uint8_t*>(buffer_in_progress_->data());
- memcpy(tgt + buffer_filled_, buf + trtp_header_len, todo);
- buffer_filled_ += amt;
+ memcpy(tgt, buf, todo);
+ buffer_filled_ = amt;
+ buf += todo;
+ amt -= todo;
}
if (buffer_filled_ >= expected_buffer_size_) {
@@ -318,6 +366,18 @@
return;
}
+ CHECK(aux_data_in_progress_.size() <= aux_data_expected_size_);
+ uint32_t aux_left = aux_data_expected_size_ - aux_data_in_progress_.size();
+ if (aux_left) {
+ uint32_t todo = min(aux_left, amt);
+ aux_data_in_progress_.appendArray(buf, todo);
+ amt -= todo;
+ buf += todo;
+
+ if (!amt)
+ return;
+ }
+
CHECK(buffer_filled_ < expected_buffer_size_);
uint32_t buffer_left = expected_buffer_size_ - buffer_filled_;
if (amt > buffer_left) {
@@ -340,10 +400,6 @@
}
void AAH_RXPlayer::Substream::processCompletedBuffer() {
- const uint8_t* buffer_data = NULL;
- int sample_rate;
- int channel_count;
- size_t frame_size;
status_t res;
CHECK(NULL != buffer_in_progress_);
@@ -353,56 +409,10 @@
goto bailout;
}
- buffer_data = reinterpret_cast<const uint8_t*>(buffer_in_progress_->data());
- if (buffer_in_progress_->size() < 4) {
- ALOGV("MP3 payload too short to contain header, dropping payload.");
+ // Make sure our metadata used to initialize the decoder has been properly
+ // set up.
+ if (!setupSubstreamMeta())
goto bailout;
- }
-
- // Extract the channel count and the sample rate from the MP3 header. The
- // stagefright MP3 requires that these be delivered before decoing can
- // begin.
- if (!GetMPEGAudioFrameSize(U32_AT(buffer_data),
- &frame_size,
- &sample_rate,
- &channel_count,
- NULL,
- NULL)) {
- ALOGV("Failed to parse MP3 header in payload, droping payload.");
- goto bailout;
- }
-
-
- // Make sure that our substream metadata is set up properly. If there has
- // been a format change, be sure to reset the underlying decoder. In
- // stagefright, it seems like the only way to do this is to destroy and
- // recreate the decoder.
- if (substream_meta_ == NULL) {
- substream_meta_ = new MetaData();
-
- if (substream_meta_ == NULL) {
- ALOGE("Failed to allocate MetaData structure for substream");
- goto bailout;
- }
-
- substream_meta_->setCString(kKeyMIMEType, MEDIA_MIMETYPE_AUDIO_MPEG);
- substream_meta_->setInt32 (kKeyChannelCount, channel_count);
- substream_meta_->setInt32 (kKeySampleRate, sample_rate);
- } else {
- int32_t prev_sample_rate;
- int32_t prev_channel_count;
- substream_meta_->findInt32(kKeySampleRate, &prev_sample_rate);
- substream_meta_->findInt32(kKeyChannelCount, &prev_channel_count);
-
- if ((prev_channel_count != channel_count) ||
- (prev_sample_rate != sample_rate)) {
- ALOGW("Format change detected, forcing decoder reset.");
- cleanupDecoder();
-
- substream_meta_->setInt32(kKeyChannelCount, channel_count);
- substream_meta_->setInt32(kKeySampleRate, sample_rate);
- }
- }
// If our decoder has not be set up, do so now.
res = decoder_->init(substream_meta_);
@@ -418,7 +428,7 @@
if (res != OK) {
ALOGD("Failed to queue payload for decode, resetting decoder pump!"
- " (res = %d)", res);
+ " (res = %d)", res);
status_ = res;
cleanupDecoder();
cleanupBufferInProgress();
@@ -454,6 +464,167 @@
cleanupBufferInProgress();
}
+bool AAH_RXPlayer::Substream::setupSubstreamMeta() {
+ switch (codec_type_) {
+ case TRTPAudioPacket::kCodecMPEG1Audio:
+ codec_mime_type_ = MEDIA_MIMETYPE_AUDIO_MPEG;
+ return setupMP3SubstreamMeta();
+
+ case TRTPAudioPacket::kCodecAACAudio:
+ codec_mime_type_ = MEDIA_MIMETYPE_AUDIO_AAC;
+ return setupAACSubstreamMeta();
+
+ default:
+ ALOGV("Failed to setup substream metadata for unsupported codec"
+ " type (%u)", codec_type_);
+ break;
+ }
+
+ return false;
+}
+
+bool AAH_RXPlayer::Substream::setupMP3SubstreamMeta() {
+ const uint8_t* buffer_data = NULL;
+ int sample_rate;
+ int channel_count;
+ size_t frame_size;
+ status_t res;
+
+ buffer_data = reinterpret_cast<const uint8_t*>(buffer_in_progress_->data());
+ if (buffer_in_progress_->size() < 4) {
+ ALOGV("MP3 payload too short to contain header, dropping payload.");
+ return false;
+ }
+
+ // Extract the channel count and the sample rate from the MP3 header. The
+ // stagefright MP3 requires that these be delivered before decoing can
+ // begin.
+ if (!GetMPEGAudioFrameSize(U32_AT(buffer_data),
+ &frame_size,
+ &sample_rate,
+ &channel_count,
+ NULL,
+ NULL)) {
+ ALOGV("Failed to parse MP3 header in payload, droping payload.");
+ return false;
+ }
+
+
+ // Make sure that our substream metadata is set up properly. If there has
+ // been a format change, be sure to reset the underlying decoder. In
+ // stagefright, it seems like the only way to do this is to destroy and
+ // recreate the decoder.
+ if (substream_meta_ == NULL) {
+ substream_meta_ = new MetaData();
+
+ if (substream_meta_ == NULL) {
+ ALOGE("Failed to allocate MetaData structure for MP3 substream");
+ return false;
+ }
+
+ substream_meta_->setCString(kKeyMIMEType, MEDIA_MIMETYPE_AUDIO_MPEG);
+ substream_meta_->setInt32 (kKeyChannelCount, channel_count);
+ substream_meta_->setInt32 (kKeySampleRate, sample_rate);
+ } else {
+ int32_t prev_sample_rate;
+ int32_t prev_channel_count;
+ substream_meta_->findInt32(kKeySampleRate, &prev_sample_rate);
+ substream_meta_->findInt32(kKeyChannelCount, &prev_channel_count);
+
+ if ((prev_channel_count != channel_count) ||
+ (prev_sample_rate != sample_rate)) {
+ ALOGW("MP3 format change detected, forcing decoder reset.");
+ cleanupDecoder();
+
+ substream_meta_->setInt32(kKeyChannelCount, channel_count);
+ substream_meta_->setInt32(kKeySampleRate, sample_rate);
+ }
+ }
+
+ return true;
+}
+
+bool AAH_RXPlayer::Substream::setupAACSubstreamMeta() {
+ int32_t sample_rate, channel_cnt;
+ static const size_t overhead = sizeof(sample_rate)
+ + sizeof(channel_cnt);
+
+ if (aux_data_in_progress_.size() < overhead) {
+ ALOGE("Not enough aux data (%u) to initialize AAC substream decoder",
+ aux_data_in_progress_.size());
+ return false;
+ }
+
+ const uint8_t* aux_data = aux_data_in_progress_.array();
+ size_t aux_data_size = aux_data_in_progress_.size();
+ sample_rate = U32_AT(aux_data);
+ channel_cnt = U32_AT(aux_data + sizeof(sample_rate));
+
+ const uint8_t* esds_data = NULL;
+ size_t esds_data_size = 0;
+ if (aux_data_size > overhead) {
+ esds_data = aux_data + overhead;
+ esds_data_size = aux_data_size - overhead;
+ }
+
+ // Do we already have metadata? If so, has it changed at all? If not, then
+ // there should be nothing else to do. Otherwise, release our old stream
+ // metadata and make new metadata.
+ if (substream_meta_ != NULL) {
+ uint32_t type;
+ const void* data;
+ size_t size;
+ int32_t prev_sample_rate;
+ int32_t prev_channel_count;
+ bool res;
+
+ res = substream_meta_->findInt32(kKeySampleRate, &prev_sample_rate);
+ CHECK(res);
+ res = substream_meta_->findInt32(kKeyChannelCount, &prev_channel_count);
+ CHECK(res);
+
+ // If nothing has changed about the codec aux data (esds, sample rate,
+ // channel count), then we can just do nothing and get out. Otherwise,
+ // we will need to reset the decoder and make a new metadata object to
+ // deal with the format change.
+ bool hasData = (esds_data != NULL);
+ bool hadData = substream_meta_->findData(kKeyESDS, &type, &data, &size);
+ bool esds_change = (hadData != hasData);
+
+ if (!esds_change && hasData)
+ esds_change = ((size != esds_data_size) ||
+ memcmp(data, esds_data, size));
+
+ if (!esds_change &&
+ (prev_sample_rate == sample_rate) &&
+ (prev_channel_count == channel_cnt)) {
+ return true; // no change, just get out.
+ }
+
+ ALOGW("AAC format change detected, forcing decoder reset.");
+ cleanupDecoder();
+ substream_meta_ = NULL;
+ }
+
+ CHECK(substream_meta_ == NULL);
+
+ substream_meta_ = new MetaData();
+ if (substream_meta_ == NULL) {
+ ALOGE("Failed to allocate MetaData structure for AAC substream");
+ return false;
+ }
+
+ substream_meta_->setCString(kKeyMIMEType, MEDIA_MIMETYPE_AUDIO_AAC);
+ substream_meta_->setInt32 (kKeySampleRate, sample_rate);
+ substream_meta_->setInt32 (kKeyChannelCount, channel_cnt);
+
+ if (esds_data) {
+ substream_meta_->setData(kKeyESDS, kTypeESDS,
+ esds_data, esds_data_size);
+ }
+
+ return true;
+}
void AAH_RXPlayer::Substream::processTSTransform(const LinearTransform& trans) {
if (decoder_ != NULL) {
@@ -471,26 +642,34 @@
bool AAH_RXPlayer::Substream::setupSubstreamType(uint8_t substream_type,
uint8_t codec_type) {
- // Sanity check the codec type. Right now we only support MP3. Also check
- // for conflicts with previously delivered codec types.
- if (substream_details_known_ && (codec_type != codec_type_)) {
- ALOGV("RXed TRTP Payload for SSRC=0x%08x where codec type (%u) does not"
- " match previously received codec type (%u)",
- ssrc_, codec_type, codec_type_);
- return false;
+ // Sanity check the codec type. Right now we only support MP3 and AAC.
+ // Also check for conflicts with previously delivered codec types.
+ if (substream_details_known_) {
+ if (codec_type != codec_type_) {
+ ALOGV("RXed TRTP Payload for SSRC=0x%08x where codec type (%u) does"
+ " not match previously received codec type (%u)",
+ ssrc_, codec_type, codec_type_);
+ return false;
+ }
+
+ return true;
}
- if (codec_type != 0x03) {
- ALOGV("RXed TRTP Audio Payload for SSRC=0x%08x with unsupported codec"
- " type (%u)", ssrc_, codec_type);
- return false;
+ switch (codec_type) {
+ // MP3 and AAC are all we support right now.
+ case TRTPAudioPacket::kCodecMPEG1Audio:
+ case TRTPAudioPacket::kCodecAACAudio:
+ break;
+
+ default:
+ ALOGV("RXed TRTP Audio Payload for SSRC=0x%08x with unsupported"
+ " codec type (%u)", ssrc_, codec_type);
+ return false;
}
- if (!substream_details_known_) {
- substream_type_ = substream_type;
- codec_type_ = codec_type;
- substream_details_known_ = true;
- }
+ substream_type_ = substream_type;
+ codec_type_ = codec_type;
+ substream_details_known_ = true;
return true;
}
diff --git a/media/libaah_rtp/aah_tx_packet.cpp b/media/libaah_rtp/aah_tx_packet.cpp
index 3f6e0e9..4cd6e47 100644
--- a/media/libaah_rtp/aah_tx_packet.cpp
+++ b/media/libaah_rtp/aah_tx_packet.cpp
@@ -142,12 +142,18 @@
mVolume = val;
}
-void TRTPAudioPacket::setAccessUnitData(void* data, int len) {
+void TRTPAudioPacket::setAccessUnitData(const void* data, size_t len) {
CHECK(!mIsPacked);
mAccessUnitData = data;
mAccessUnitLen = len;
}
+void TRTPAudioPacket::setAuxData(const void* data, size_t len) {
+ CHECK(!mIsPacked);
+ mAuxData = data;
+ mAuxDataLen = len;
+}
+
/*** TRTP control packet properties ***/
void TRTPControlPacket::setCommandID(TRTPCommandID val) {
@@ -232,6 +238,7 @@
}
int packetLen = kRTPHeaderLen +
+ mAuxDataLen +
mAccessUnitLen +
TRTPHeaderLen();
@@ -249,16 +256,24 @@
mPacketLen = packetLen;
uint8_t* cur = mPacket;
+ bool hasAux = mAuxData && mAuxDataLen;
+ uint8_t flags = (static_cast<int>(hasAux) << 4) |
+ (static_cast<int>(mRandomAccessPoint) << 3) |
+ (static_cast<int>(mDropable) << 2) |
+ (static_cast<int>(mDiscontinuity) << 1) |
+ (static_cast<int>(mEndOfStream));
writeTRTPHeader(cur, true, packetLen);
writeU8(cur, mCodecType);
- writeU8(cur,
- (static_cast<int>(mRandomAccessPoint) << 3) |
- (static_cast<int>(mDropable) << 2) |
- (static_cast<int>(mDiscontinuity) << 1) |
- (static_cast<int>(mEndOfStream)));
+ writeU8(cur, flags);
writeU8(cur, mVolume);
+ if (hasAux) {
+ writeU32(cur, mAuxDataLen);
+ memcpy(cur, mAuxData, mAuxDataLen);
+ cur += mAuxDataLen;
+ }
+
memcpy(cur, mAccessUnitData, mAccessUnitLen);
mIsPacked = true;
@@ -293,12 +308,10 @@
}
- // TODO : properly compute aux data length. Currently, nothing
- // uses aux data, so its length is always 0.
- int auxDataLength = 0;
+ int auxDataLenField = (NULL != mAuxData) ? sizeof(uint32_t) : 0;
return TRTPPacket::TRTPHeaderLen() +
3 +
- auxDataLength +
+ auxDataLenField +
pcmParamLength;
}
diff --git a/media/libaah_rtp/aah_tx_packet.h b/media/libaah_rtp/aah_tx_packet.h
index 833803e..7f78ea0 100644
--- a/media/libaah_rtp/aah_tx_packet.h
+++ b/media/libaah_rtp/aah_tx_packet.h
@@ -25,7 +25,7 @@
namespace android {
class TRTPPacket : public RefBase {
- protected:
+ public:
enum TRTPHeaderType {
kHeaderTypeAudio = 1,
kHeaderTypeVideo = 2,
@@ -33,6 +33,12 @@
kHeaderTypeControl = 4,
};
+ enum TRTPPayloadFlags {
+ kFlag_TSTransformPresent = 0x02,
+ kFlag_TSValid = 0x01,
+ };
+
+ protected:
TRTPPacket(TRTPHeaderType headerType)
: mIsPacked(false)
, mVersion(2)
@@ -121,6 +127,14 @@
class TRTPAudioPacket : public TRTPPacket {
public:
+ enum AudioPayloadFlags {
+ kFlag_AuxLengthPresent = 0x10,
+ kFlag_RandomAccessPoint = 0x08,
+ kFlag_Dropable = 0x04,
+ kFlag_Discontinuity = 0x02,
+ kFlag_EndOfStream = 0x01,
+ };
+
TRTPAudioPacket()
: TRTPPacket(kHeaderTypeAudio)
, mCodecType(kCodecInvalid)
@@ -129,13 +143,17 @@
, mDiscontinuity(false)
, mEndOfStream(false)
, mVolume(0)
- , mAccessUnitData(NULL) { }
+ , mAccessUnitData(NULL)
+ , mAccessUnitLen(0)
+ , mAuxData(NULL)
+ , mAuxDataLen(0) { }
enum TRTPAudioCodecType {
kCodecInvalid = 0,
kCodecPCMBigEndian = 1,
kCodecPCMLittleEndian = 2,
kCodecMPEG1Audio = 3,
+ kCodecAACAudio = 4,
};
void setCodecType(TRTPAudioCodecType val);
@@ -144,7 +162,8 @@
void setDiscontinuity(bool val);
void setEndOfStream(bool val);
void setVolume(uint8_t val);
- void setAccessUnitData(void* data, int len);
+ void setAccessUnitData(const void* data, size_t len);
+ void setAuxData(const void* data, size_t len);
virtual bool pack();
@@ -158,8 +177,11 @@
bool mDiscontinuity;
bool mEndOfStream;
uint8_t mVolume;
- void* mAccessUnitData;
- int mAccessUnitLen;
+
+ const void* mAccessUnitData;
+ size_t mAccessUnitLen;
+ const void* mAuxData;
+ size_t mAuxDataLen;
DISALLOW_EVIL_CONSTRUCTORS(TRTPAudioPacket);
};
diff --git a/media/libaah_rtp/aah_tx_player.cpp b/media/libaah_rtp/aah_tx_player.cpp
index a79a989..974805b 100644
--- a/media/libaah_rtp/aah_tx_player.cpp
+++ b/media/libaah_rtp/aah_tx_player.cpp
@@ -28,6 +28,7 @@
#include <media/stagefright/foundation/AMessage.h>
#include <media/stagefright/FileSource.h>
#include <media/stagefright/MediaBuffer.h>
+#include <media/stagefright/MediaDefs.h>
#include <media/stagefright/MetaData.h>
#include <utils/Timers.h>
@@ -98,6 +99,8 @@
mPumpAudioEvent = new AAH_TXEvent(this, &AAH_TXPlayer::onPumpAudio);
mPumpAudioEventPending = false;
+ mAudioCodecData = NULL;
+
reset_l();
}
@@ -146,14 +149,7 @@
const KeyedVector<String8, String8> *headers) {
reset_l();
- // the URL must consist of "aahTX://" followed by the real URL of
- // the data source
- const char *kAAHPrefix = "aahTX://";
- if (strncasecmp(url, kAAHPrefix, strlen(kAAHPrefix))) {
- return INVALID_OPERATION;
- }
-
- mUri.setTo(url + strlen(kAAHPrefix));
+ mUri.setTo(url);
if (headers) {
mUriHeaders = *headers;
@@ -398,7 +394,76 @@
}
}
- mAudioSource->getFormat()->findInt64(kKeyDuration, &mDurationUs);
+ mAudioFormat = mAudioSource->getFormat();
+ if (!mAudioFormat->findInt64(kKeyDuration, &mDurationUs))
+ mDurationUs = 1;
+
+ const char* mime_type = NULL;
+ if (!mAudioFormat->findCString(kKeyMIMEType, &mime_type)) {
+ ALOGE("Failed to find audio substream MIME type during prepare.");
+ abortPrepare(BAD_VALUE);
+ return;
+ }
+
+ if (!strcmp(mime_type, MEDIA_MIMETYPE_AUDIO_MPEG)) {
+ mAudioCodec = TRTPAudioPacket::kCodecMPEG1Audio;
+ } else
+ if (!strcmp(mime_type, MEDIA_MIMETYPE_AUDIO_AAC)) {
+ mAudioCodec = TRTPAudioPacket::kCodecAACAudio;
+
+ uint32_t type;
+ int32_t sample_rate;
+ int32_t channel_count;
+ const void* esds_data;
+ size_t esds_len;
+
+ if (!mAudioFormat->findInt32(kKeySampleRate, &sample_rate)) {
+ ALOGE("Failed to find sample rate for AAC substream.");
+ abortPrepare(BAD_VALUE);
+ return;
+ }
+
+ if (!mAudioFormat->findInt32(kKeyChannelCount, &channel_count)) {
+ ALOGE("Failed to find channel count for AAC substream.");
+ abortPrepare(BAD_VALUE);
+ return;
+ }
+
+ if (!mAudioFormat->findData(kKeyESDS, &type, &esds_data, &esds_len)) {
+ ALOGE("Failed to find codec init data for AAC substream.");
+ abortPrepare(BAD_VALUE);
+ return;
+ }
+
+ CHECK(NULL == mAudioCodecData);
+ mAudioCodecDataSize = esds_len
+ + sizeof(sample_rate)
+ + sizeof(channel_count);
+ mAudioCodecData = new uint8_t[mAudioCodecDataSize];
+ if (NULL == mAudioCodecData) {
+ ALOGE("Failed to allocate %u bytes for AAC substream codec aux"
+ " data.", mAudioCodecDataSize);
+ mAudioCodecDataSize = 0;
+ abortPrepare(BAD_VALUE);
+ return;
+ }
+
+ uint8_t* tmp = mAudioCodecData;
+ tmp[0] = static_cast<uint8_t>((sample_rate >> 24) & 0xFF);
+ tmp[1] = static_cast<uint8_t>((sample_rate >> 16) & 0xFF);
+ tmp[2] = static_cast<uint8_t>((sample_rate >> 8) & 0xFF);
+ tmp[3] = static_cast<uint8_t>((sample_rate ) & 0xFF);
+ tmp[4] = static_cast<uint8_t>((channel_count >> 24) & 0xFF);
+ tmp[5] = static_cast<uint8_t>((channel_count >> 16) & 0xFF);
+ tmp[6] = static_cast<uint8_t>((channel_count >> 8) & 0xFF);
+ tmp[7] = static_cast<uint8_t>((channel_count ) & 0xFF);
+
+ memcpy(tmp + 8, esds_data, esds_len);
+ } else {
+ ALOGE("Unsupported MIME type \"%s\" in audio substream", mime_type);
+ abortPrepare(BAD_VALUE);
+ return;
+ }
status_t err = mAudioSource->start();
if (err != OK) {
@@ -666,6 +731,11 @@
mAudioSource->stop();
}
mAudioSource.clear();
+ mAudioCodec = TRTPAudioPacket::kCodecInvalid;
+ mAudioFormat = NULL;
+ delete[] mAudioCodecData;
+ mAudioCodecData = NULL;
+ mAudioCodecDataSize = 0;
mFlags = 0;
mExtractorFlags = 0;
@@ -717,67 +787,7 @@
}
status_t AAH_TXPlayer::invoke(const Parcel& request, Parcel *reply) {
- if (!reply) {
- return BAD_VALUE;
- }
-
- int32_t methodID;
- status_t err = request.readInt32(&methodID);
- if (err != android::OK) {
- return err;
- }
-
- switch (methodID) {
- case kInvokeSetAAHDstIPPort:
- case kInvokeSetAAHConfigBlob: {
- if (mEndpointValid) {
- return INVALID_OPERATION;
- }
-
- String8 addr;
- uint16_t port;
-
- if (methodID == kInvokeSetAAHDstIPPort) {
- addr = String8(request.readString16());
-
- int32_t port32;
- err = request.readInt32(&port32);
- if (err != android::OK) {
- return err;
- }
- port = static_cast<uint16_t>(port32);
- } else {
- String8 blob(request.readString16());
-
- char addr_buf[101];
- if (sscanf(blob.string(), "V1:%100s %" SCNu16,
- addr_buf, &port) != 2) {
- return BAD_VALUE;
- }
- if (addr.setTo(addr_buf) != OK) {
- return NO_MEMORY;
- }
- }
-
- struct hostent* ent = gethostbyname(addr.string());
- if (ent == NULL) {
- return ERROR_UNKNOWN_HOST;
- }
- if (!(ent->h_addrtype == AF_INET && ent->h_length == 4)) {
- return BAD_VALUE;
- }
-
- Mutex::Autolock lock(mEndpointLock);
- mEndpoint = AAH_TXSender::Endpoint(
- reinterpret_cast<struct in_addr*>(ent->h_addr)->s_addr,
- port);
- mEndpointValid = true;
- return OK;
- };
-
- default:
- return INVALID_OPERATION;
- }
+ return INVALID_OPERATION;
}
status_t AAH_TXPlayer::getMetadata(const media::Metadata::Filter& ids,
@@ -812,6 +822,24 @@
return OK;
}
+status_t AAH_TXPlayer::setRetransmitEndpoint(
+ const struct sockaddr_in* endpoint) {
+ Mutex::Autolock lock(mLock);
+
+ if (NULL == endpoint)
+ return BAD_VALUE;
+
+ // Once the endpoint has been registered, it may not be changed.
+ if (mEndpointRegistered)
+ return INVALID_OPERATION;
+
+ mEndpoint.addr = endpoint->sin_addr.s_addr;
+ mEndpoint.port = endpoint->sin_port;
+ mEndpointValid = true;
+
+ return OK;
+}
+
void AAH_TXPlayer::notifyListener_l(int msg, int ext1, int ext2) {
sendEvent(msg, ext1, ext2);
}
@@ -1078,14 +1106,24 @@
packet->setPTS(mediaTimeUs);
packet->setSubstreamID(1);
- packet->setCodecType(TRTPAudioPacket::kCodecMPEG1Audio);
+ packet->setCodecType(mAudioCodec);
packet->setVolume(mTRTPVolume);
// TODO : introduce a throttle for this so we can control the
// frequency with which transforms get sent.
packet->setClockTransform(mCurrentClockTransform);
packet->setAccessUnitData(data, mediaBuffer->range_length());
+
+ // TODO : while its pretty much universally true that audio ES payloads
+ // are all RAPs across all codecs, it might be a good idea to throttle
+ // the frequency with which we send codec out of band data to the RXers.
+ // If/when we do, we need to flag only those payloads which have
+ // required out of band data attached to them as RAPs.
packet->setRandomAccessPoint(true);
+ if (mAudioCodecData && mAudioCodecDataSize) {
+ packet->setAuxData(mAudioCodecData, mAudioCodecDataSize);
+ }
+
queuePacketToSender_l(packet);
mediaBuffer->release();
diff --git a/media/libaah_rtp/aah_tx_player.h b/media/libaah_rtp/aah_tx_player.h
index 64cf5dc..2e4b1f7 100644
--- a/media/libaah_rtp/aah_tx_player.h
+++ b/media/libaah_rtp/aah_tx_player.h
@@ -63,16 +63,8 @@
Parcel* records);
virtual status_t setVolume(float leftVolume, float rightVolume);
virtual status_t setAudioStreamType(audio_stream_type_t streamType);
-
- // invoke method IDs
- enum {
- // set the IP address and port of the A@H receiver
- kInvokeSetAAHDstIPPort = 1,
-
- // set the destination IP address and port (and perhaps any additional
- // parameters added in the future) packaged in one string
- kInvokeSetAAHConfigBlob,
- };
+ virtual status_t setRetransmitEndpoint(
+ const struct sockaddr_in* endpoint);
static const int64_t kAAHRetryKeepAroundTimeNs;
@@ -153,6 +145,11 @@
sp<NuCachedSource2> mCachedSource;
sp<MediaSource> mAudioSource;
+ TRTPAudioPacket::TRTPAudioCodecType mAudioCodec;
+ sp<MetaData> mAudioFormat;
+ uint8_t* mAudioCodecData;
+ size_t mAudioCodecDataSize;
+
int64_t mDurationUs;
int64_t mBitrate;
diff --git a/media/libaah_rtp/aah_tx_sender.cpp b/media/libaah_rtp/aah_tx_sender.cpp
index d991ea7..08e32d2 100644
--- a/media/libaah_rtp/aah_tx_sender.cpp
+++ b/media/libaah_rtp/aah_tx_sender.cpp
@@ -243,7 +243,7 @@
memset(&addr, 0, sizeof(addr));
addr.sin_family = AF_INET;
addr.sin_addr.s_addr = endpoint.addr;
- addr.sin_port = htons(endpoint.port);
+ addr.sin_port = endpoint.port;
ssize_t result = sendto(mSocket,
packet->getPacket(),
@@ -283,7 +283,8 @@
// remove the state for any endpoints that are no longer in use
for (size_t i = 0; i < endpointsToRemove.size(); i++) {
Endpoint& e = endpointsToRemove.editItemAt(i);
- ALOGD("*** %s removing endpoint addr=%08x", __PRETTY_FUNCTION__, e.addr);
+ ALOGD("*** %s removing endpoint addr=%08x",
+ __PRETTY_FUNCTION__, e.addr);
size_t index = mEndpointMap.indexOfKey(e);
delete mEndpointMap.valueAt(index);
mEndpointMap.removeItemsAt(index);
@@ -411,7 +412,7 @@
return;
}
- Endpoint endpoint(request.endpointIP, ntohs(request.endpointPort));
+ Endpoint endpoint(request.endpointIP, request.endpointPort);
Mutex::Autolock lock(mSender->mEndpointLock);
diff --git a/media/libeffects/data/audio_effects.conf b/media/libeffects/data/audio_effects.conf
index b8fa487..ce25bc8 100644
--- a/media/libeffects/data/audio_effects.conf
+++ b/media/libeffects/data/audio_effects.conf
@@ -50,11 +50,11 @@
}
volume {
library bundle
- uuid 119341a0-8469-11df-81f9- 0002a5d5c51b
+ uuid 119341a0-8469-11df-81f9-0002a5d5c51b
}
reverb_env_aux {
library reverb
- uuid 4a387fc0-8ab3-11df-8bad- 0002a5d5c51b
+ uuid 4a387fc0-8ab3-11df-8bad-0002a5d5c51b
}
reverb_env_ins {
library reverb
diff --git a/media/libeffects/preprocessing/PreProcessing.cpp b/media/libeffects/preprocessing/PreProcessing.cpp
index 098a1a2..dc27d38 100755
--- a/media/libeffects/preprocessing/PreProcessing.cpp
+++ b/media/libeffects/preprocessing/PreProcessing.cpp
@@ -845,6 +845,17 @@
config->inputCfg.samplingRate, config->inputCfg.channels);
int status;
+ // if at least one process is enabled, do not accept configuration changes
+ if (session->enabledMsk) {
+ if (session->samplingRate != config->inputCfg.samplingRate ||
+ session->inChannelCount != inCnl ||
+ session->outChannelCount != outCnl) {
+ return -ENOSYS;
+ } else {
+ return 0;
+ }
+ }
+
// AEC implementation is limited to 16kHz
if (config->inputCfg.samplingRate >= 32000 && !(session->createdMsk & (1 << PREPROC_AEC))) {
session->apmSamplingRate = 32000;
@@ -1287,7 +1298,9 @@
if (*(int *)pReplyData != 0) {
break;
}
- *(int *)pReplyData = Effect_SetState(effect, PREPROC_EFFECT_STATE_CONFIG);
+ if (effect->state != PREPROC_EFFECT_STATE_ACTIVE) {
+ *(int *)pReplyData = Effect_SetState(effect, PREPROC_EFFECT_STATE_CONFIG);
+ }
break;
case EFFECT_CMD_GET_CONFIG:
diff --git a/media/libmedia/AudioRecord.cpp b/media/libmedia/AudioRecord.cpp
index a4068ff..943f3af 100644
--- a/media/libmedia/AudioRecord.cpp
+++ b/media/libmedia/AudioRecord.cpp
@@ -307,7 +307,7 @@
pid_t tid;
if (t != 0) {
mReadyToRun = WOULD_BLOCK;
- t->run("ClientRecordThread", ANDROID_PRIORITY_AUDIO);
+ t->run("AudioRecord", ANDROID_PRIORITY_AUDIO);
tid = t->getTid(); // pid_t is unknown until run()
ALOGV("getTid=%d", tid);
if (tid == -1) {
@@ -386,7 +386,7 @@
return !mActive;
}
-uint32_t AudioRecord::getSampleRate()
+uint32_t AudioRecord::getSampleRate() const
{
AutoMutex lock(mLock);
return mCblk->sampleRate;
@@ -402,7 +402,7 @@
return NO_ERROR;
}
-status_t AudioRecord::getMarkerPosition(uint32_t *marker)
+status_t AudioRecord::getMarkerPosition(uint32_t *marker) const
{
if (marker == NULL) return BAD_VALUE;
@@ -423,7 +423,7 @@
return NO_ERROR;
}
-status_t AudioRecord::getPositionUpdatePeriod(uint32_t *updatePeriod)
+status_t AudioRecord::getPositionUpdatePeriod(uint32_t *updatePeriod) const
{
if (updatePeriod == NULL) return BAD_VALUE;
@@ -432,7 +432,7 @@
return NO_ERROR;
}
-status_t AudioRecord::getPosition(uint32_t *position)
+status_t AudioRecord::getPosition(uint32_t *position) const
{
if (position == NULL) return BAD_VALUE;
@@ -442,7 +442,7 @@
return NO_ERROR;
}
-unsigned int AudioRecord::getInputFramesLost()
+unsigned int AudioRecord::getInputFramesLost() const
{
if (mActive)
return AudioSystem::getInputFramesLost(mInput);
@@ -597,7 +597,7 @@
mCblk->stepUser(audioBuffer->frameCount);
}
-audio_io_handle_t AudioRecord::getInput()
+audio_io_handle_t AudioRecord::getInput() const
{
AutoMutex lock(mLock);
return mInput;
@@ -615,7 +615,7 @@
return mInput;
}
-int AudioRecord::getSessionId()
+int AudioRecord::getSessionId() const
{
return mSessionId;
}
diff --git a/media/libmedia/AudioTrack.cpp b/media/libmedia/AudioTrack.cpp
index 74c97ed..4890f05 100644
--- a/media/libmedia/AudioTrack.cpp
+++ b/media/libmedia/AudioTrack.cpp
@@ -370,7 +370,7 @@
android_atomic_and(~CBLK_DISABLED_ON, &cblk->flags);
pid_t tid;
if (t != 0) {
- t->run("AudioTrackThread", ANDROID_PRIORITY_AUDIO);
+ t->run("AudioTrack", ANDROID_PRIORITY_AUDIO);
tid = t->getTid(); // pid_t is unknown until run()
ALOGV("getTid=%d", tid);
if (tid == -1) {
diff --git a/media/libmedia/IMediaPlayer.cpp b/media/libmedia/IMediaPlayer.cpp
index 64cc919..c47fa41 100644
--- a/media/libmedia/IMediaPlayer.cpp
+++ b/media/libmedia/IMediaPlayer.cpp
@@ -15,6 +15,7 @@
** limitations under the License.
*/
+#include <arpa/inet.h>
#include <stdint.h>
#include <sys/types.h>
@@ -23,8 +24,6 @@
#include <media/IMediaPlayer.h>
#include <media/IStreamSource.h>
-#include <surfaceflinger/ISurface.h>
-#include <surfaceflinger/Surface.h>
#include <gui/ISurfaceTexture.h>
#include <utils/String8.h>
@@ -55,6 +54,7 @@
SET_VIDEO_SURFACETEXTURE,
SET_PARAMETER,
GET_PARAMETER,
+ SET_RETRANSMIT_ENDPOINT,
};
class BpMediaPlayer: public BpInterface<IMediaPlayer>
@@ -291,6 +291,25 @@
return remote()->transact(GET_PARAMETER, data, reply);
}
+ status_t setRetransmitEndpoint(const struct sockaddr_in* endpoint) {
+ Parcel data, reply;
+ status_t err;
+
+ data.writeInterfaceToken(IMediaPlayer::getInterfaceDescriptor());
+ if (NULL != endpoint) {
+ data.writeInt32(sizeof(*endpoint));
+ data.write(endpoint, sizeof(*endpoint));
+ } else {
+ data.writeInt32(0);
+ }
+
+ err = remote()->transact(SET_RETRANSMIT_ENDPOINT, data, &reply);
+ if (OK != err) {
+ return err;
+ }
+
+ return reply.readInt32();
+ }
};
IMPLEMENT_META_INTERFACE(MediaPlayer, "android.media.IMediaPlayer");
@@ -459,6 +478,20 @@
CHECK_INTERFACE(IMediaPlayer, data, reply);
return getParameter(data.readInt32(), reply);
} break;
+ case SET_RETRANSMIT_ENDPOINT: {
+ CHECK_INTERFACE(IMediaPlayer, data, reply);
+
+ struct sockaddr_in endpoint;
+ int amt = data.readInt32();
+ if (amt == sizeof(endpoint)) {
+ data.read(&endpoint, sizeof(struct sockaddr_in));
+ reply->writeInt32(setRetransmitEndpoint(&endpoint));
+ } else {
+ reply->writeInt32(setRetransmitEndpoint(NULL));
+ }
+
+ return NO_ERROR;
+ } break;
default:
return BBinder::onTransact(code, data, reply, flags);
}
diff --git a/media/libmedia/IMediaRecorder.cpp b/media/libmedia/IMediaRecorder.cpp
index 42f55c2..2f4e31a 100644
--- a/media/libmedia/IMediaRecorder.cpp
+++ b/media/libmedia/IMediaRecorder.cpp
@@ -19,10 +19,10 @@
#define LOG_TAG "IMediaRecorder"
#include <utils/Log.h>
#include <binder/Parcel.h>
-#include <surfaceflinger/Surface.h>
#include <camera/ICamera.h>
#include <media/IMediaRecorderClient.h>
#include <media/IMediaRecorder.h>
+#include <gui/Surface.h>
#include <gui/ISurfaceTexture.h>
#include <unistd.h>
diff --git a/media/libmedia/IOMX.cpp b/media/libmedia/IOMX.cpp
index 27c7e03..48e427a 100644
--- a/media/libmedia/IOMX.cpp
+++ b/media/libmedia/IOMX.cpp
@@ -22,8 +22,6 @@
#include <binder/Parcel.h>
#include <media/IOMX.h>
#include <media/stagefright/foundation/ADebug.h>
-#include <surfaceflinger/ISurface.h>
-#include <surfaceflinger/Surface.h>
namespace android {
diff --git a/media/libmedia/mediaplayer.cpp b/media/libmedia/mediaplayer.cpp
index 250425b..4ff1862 100644
--- a/media/libmedia/mediaplayer.cpp
+++ b/media/libmedia/mediaplayer.cpp
@@ -32,8 +32,6 @@
#include <media/mediaplayer.h>
#include <media/AudioSystem.h>
-#include <surfaceflinger/Surface.h>
-
#include <binder/MemoryBase.h>
#include <utils/KeyedVector.h>
@@ -63,6 +61,7 @@
mAudioSessionId = AudioSystem::newAudioSessionId();
AudioSystem::acquireAudioSessionId(mAudioSessionId);
mSendLevel = 0;
+ mRetransmitEndpointValid = false;
}
MediaPlayer::~MediaPlayer()
@@ -95,6 +94,7 @@
mCurrentPosition = -1;
mSeekPosition = -1;
mVideoWidth = mVideoHeight = 0;
+ mRetransmitEndpointValid = false;
}
status_t MediaPlayer::setListener(const sp<MediaPlayerListener>& listener)
@@ -146,7 +146,8 @@
const sp<IMediaPlayerService>& service(getMediaPlayerService());
if (service != 0) {
sp<IMediaPlayer> player(service->create(getpid(), this, mAudioSessionId));
- if (NO_ERROR != player->setDataSource(url, headers)) {
+ if ((NO_ERROR != doSetRetransmitEndpoint(player)) ||
+ (NO_ERROR != player->setDataSource(url, headers))) {
player.clear();
}
err = attachNewPlayer(player);
@@ -162,7 +163,8 @@
const sp<IMediaPlayerService>& service(getMediaPlayerService());
if (service != 0) {
sp<IMediaPlayer> player(service->create(getpid(), this, mAudioSessionId));
- if (NO_ERROR != player->setDataSource(fd, offset, length)) {
+ if ((NO_ERROR != doSetRetransmitEndpoint(player)) ||
+ (NO_ERROR != player->setDataSource(fd, offset, length))) {
player.clear();
}
err = attachNewPlayer(player);
@@ -177,7 +179,8 @@
const sp<IMediaPlayerService>& service(getMediaPlayerService());
if (service != 0) {
sp<IMediaPlayer> player(service->create(getpid(), this, mAudioSessionId));
- if (NO_ERROR != player->setDataSource(source)) {
+ if ((NO_ERROR != doSetRetransmitEndpoint(player)) ||
+ (NO_ERROR != player->setDataSource(source))) {
player.clear();
}
err = attachNewPlayer(player);
@@ -471,6 +474,20 @@
return NO_ERROR;
}
+status_t MediaPlayer::doSetRetransmitEndpoint(const sp<IMediaPlayer>& player) {
+ Mutex::Autolock _l(mLock);
+
+ if (player == NULL) {
+ return UNKNOWN_ERROR;
+ }
+
+ if (mRetransmitEndpointValid) {
+ return player->setRetransmitEndpoint(&mRetransmitEndpoint);
+ }
+
+ return OK;
+}
+
status_t MediaPlayer::reset()
{
ALOGV("reset");
@@ -599,6 +616,34 @@
return INVALID_OPERATION;
}
+status_t MediaPlayer::setRetransmitEndpoint(const char* addrString,
+ uint16_t port) {
+ ALOGV("MediaPlayer::setRetransmitEndpoint(%s:%hu)",
+ addrString ? addrString : "(null)", port);
+
+ Mutex::Autolock _l(mLock);
+ if ((mPlayer != NULL) || (mCurrentState != MEDIA_PLAYER_IDLE))
+ return INVALID_OPERATION;
+
+ if (NULL == addrString) {
+ mRetransmitEndpointValid = false;
+ return OK;
+ }
+
+ struct in_addr saddr;
+ if(!inet_aton(addrString, &saddr)) {
+ return BAD_VALUE;
+ }
+
+ memset(&mRetransmitEndpoint, 0, sizeof(&mRetransmitEndpoint));
+ mRetransmitEndpoint.sin_family = AF_INET;
+ mRetransmitEndpoint.sin_addr = saddr;
+ mRetransmitEndpoint.sin_port = htons(port);
+ mRetransmitEndpointValid = true;
+
+ return OK;
+}
+
void MediaPlayer::notify(int msg, int ext1, int ext2, const Parcel *obj)
{
ALOGV("message received msg=%d, ext1=%d, ext2=%d", msg, ext1, ext2);
diff --git a/media/libmedia/mediarecorder.cpp b/media/libmedia/mediarecorder.cpp
index 8d947d8..cc73014 100644
--- a/media/libmedia/mediarecorder.cpp
+++ b/media/libmedia/mediarecorder.cpp
@@ -18,7 +18,6 @@
//#define LOG_NDEBUG 0
#define LOG_TAG "MediaRecorder"
#include <utils/Log.h>
-#include <surfaceflinger/Surface.h>
#include <media/mediarecorder.h>
#include <binder/IServiceManager.h>
#include <utils/String8.h>
diff --git a/media/libmediaplayerservice/MediaPlayerService.cpp b/media/libmediaplayerservice/MediaPlayerService.cpp
index 764eddc..1e2abf0 100644
--- a/media/libmediaplayerservice/MediaPlayerService.cpp
+++ b/media/libmediaplayerservice/MediaPlayerService.cpp
@@ -492,6 +492,7 @@
mStatus = NO_INIT;
mAudioSessionId = audioSessionId;
mUID = uid;
+ mRetransmitEndpointValid = false;
#if CALLBACK_ANTAGONIZER
ALOGD("create Antagonizer");
@@ -602,10 +603,6 @@
return AAH_RX_PLAYER;
}
- if (!strncasecmp("aahTX://", url, 8)) {
- return AAH_TX_PLAYER;
- }
-
// use MidiFile for MIDI extensions
int lenURL = strlen(url);
for (int i = 0; i < NELEM(FILE_EXTS); ++i) {
@@ -621,6 +618,44 @@
return getDefaultPlayerType();
}
+player_type MediaPlayerService::Client::getPlayerType(int fd,
+ int64_t offset,
+ int64_t length)
+{
+ // Until re-transmit functionality is added to the existing core android
+ // players, we use the special AAH TX player whenever we were configured
+ // for retransmission.
+ if (mRetransmitEndpointValid) {
+ return AAH_TX_PLAYER;
+ }
+
+ return android::getPlayerType(fd, offset, length);
+}
+
+player_type MediaPlayerService::Client::getPlayerType(const char* url)
+{
+ // Until re-transmit functionality is added to the existing core android
+ // players, we use the special AAH TX player whenever we were configured
+ // for retransmission.
+ if (mRetransmitEndpointValid) {
+ return AAH_TX_PLAYER;
+ }
+
+ return android::getPlayerType(url);
+}
+
+player_type MediaPlayerService::Client::getPlayerType(
+ const sp<IStreamSource> &source) {
+ // Until re-transmit functionality is added to the existing core android
+ // players, we use the special AAH TX player whenever we were configured
+ // for retransmission.
+ if (mRetransmitEndpointValid) {
+ return AAH_TX_PLAYER;
+ }
+
+ return NU_PLAYER;
+}
+
static sp<MediaPlayerBase> createPlayer(player_type playerType, void* cookie,
notify_callback_f notifyFunc)
{
@@ -686,6 +721,49 @@
return p;
}
+sp<MediaPlayerBase> MediaPlayerService::Client::setDataSource_pre(
+ player_type playerType)
+{
+ ALOGV("player type = %d", playerType);
+
+ // create the right type of player
+ sp<MediaPlayerBase> p = createPlayer(playerType);
+ if (p == NULL) {
+ return p;
+ }
+
+ if (!p->hardwareOutput()) {
+ mAudioOutput = new AudioOutput(mAudioSessionId);
+ static_cast<MediaPlayerInterface*>(p.get())->setAudioSink(mAudioOutput);
+ }
+
+ return p;
+}
+
+void MediaPlayerService::Client::setDataSource_post(
+ const sp<MediaPlayerBase>& p,
+ status_t status)
+{
+ ALOGV(" setDataSource");
+ mStatus = status;
+ if (mStatus != OK) {
+ ALOGE(" error: %d", mStatus);
+ return;
+ }
+
+ // Set the re-transmission endpoint if one was chosen.
+ if (mRetransmitEndpointValid) {
+ mStatus = p->setRetransmitEndpoint(&mRetransmitEndpoint);
+ if (mStatus != NO_ERROR) {
+ ALOGE("setRetransmitEndpoint error: %d", mStatus);
+ }
+ }
+
+ if (mStatus == OK) {
+ mPlayer = p;
+ }
+}
+
status_t MediaPlayerService::Client::setDataSource(
const char *url, const KeyedVector<String8, String8> *headers)
{
@@ -717,25 +795,12 @@
return mStatus;
} else {
player_type playerType = getPlayerType(url);
- ALOGV("player type = %d", playerType);
-
- // create the right type of player
- sp<MediaPlayerBase> p = createPlayer(playerType);
- if (p == NULL) return NO_INIT;
-
- if (!p->hardwareOutput()) {
- mAudioOutput = new AudioOutput(mAudioSessionId);
- static_cast<MediaPlayerInterface*>(p.get())->setAudioSink(mAudioOutput);
+ sp<MediaPlayerBase> p = setDataSource_pre(playerType);
+ if (p == NULL) {
+ return NO_INIT;
}
- // now set data source
- ALOGV(" setDataSource");
- mStatus = p->setDataSource(url, headers);
- if (mStatus == NO_ERROR) {
- mPlayer = p;
- } else {
- ALOGE(" error: %d", mStatus);
- }
+ setDataSource_post(p, p->setDataSource(url, headers));
return mStatus;
}
}
@@ -766,46 +831,34 @@
ALOGV("calculated length = %lld", length);
}
+ // Until re-transmit functionality is added to the existing core android
+ // players, we use the special AAH TX player whenever we were configured for
+ // retransmission.
player_type playerType = getPlayerType(fd, offset, length);
- ALOGV("player type = %d", playerType);
-
- // create the right type of player
- sp<MediaPlayerBase> p = createPlayer(playerType);
- if (p == NULL) return NO_INIT;
-
- if (!p->hardwareOutput()) {
- mAudioOutput = new AudioOutput(mAudioSessionId);
- static_cast<MediaPlayerInterface*>(p.get())->setAudioSink(mAudioOutput);
+ sp<MediaPlayerBase> p = setDataSource_pre(playerType);
+ if (p == NULL) {
+ return NO_INIT;
}
// now set data source
- mStatus = p->setDataSource(fd, offset, length);
- if (mStatus == NO_ERROR) mPlayer = p;
-
+ setDataSource_post(p, p->setDataSource(fd, offset, length));
return mStatus;
}
status_t MediaPlayerService::Client::setDataSource(
const sp<IStreamSource> &source) {
// create the right type of player
- sp<MediaPlayerBase> p = createPlayer(NU_PLAYER);
-
+ // Until re-transmit functionality is added to the existing core android
+ // players, we use the special AAH TX player whenever we were configured for
+ // retransmission.
+ player_type playerType = getPlayerType(source);
+ sp<MediaPlayerBase> p = setDataSource_pre(playerType);
if (p == NULL) {
return NO_INIT;
}
- if (!p->hardwareOutput()) {
- mAudioOutput = new AudioOutput(mAudioSessionId);
- static_cast<MediaPlayerInterface*>(p.get())->setAudioSink(mAudioOutput);
- }
-
// now set data source
- mStatus = p->setDataSource(source);
-
- if (mStatus == OK) {
- mPlayer = p;
- }
-
+ setDataSource_post(p, p->setDataSource(source));
return mStatus;
}
@@ -1026,6 +1079,7 @@
status_t MediaPlayerService::Client::reset()
{
ALOGV("[%d] reset", mConnId);
+ mRetransmitEndpointValid = false;
sp<MediaPlayerBase> p = getPlayer();
if (p == 0) return UNKNOWN_ERROR;
return p->reset();
@@ -1100,6 +1154,36 @@
return p->getParameter(key, reply);
}
+status_t MediaPlayerService::Client::setRetransmitEndpoint(
+ const struct sockaddr_in* endpoint) {
+
+ if (NULL != endpoint) {
+ uint32_t a = ntohl(endpoint->sin_addr.s_addr);
+ uint16_t p = ntohs(endpoint->sin_port);
+ ALOGV("[%d] setRetransmitEndpoint(%u.%u.%u.%u:%hu)", mConnId,
+ (a >> 24), (a >> 16) & 0xFF, (a >> 8) & 0xFF, (a & 0xFF), p);
+ } else {
+ ALOGV("[%d] setRetransmitEndpoint = <none>", mConnId);
+ }
+
+ sp<MediaPlayerBase> p = getPlayer();
+
+ // Right now, the only valid time to set a retransmit endpoint is before
+ // player selection has been made (since the presence or absence of a
+ // retransmit endpoint is going to determine which player is selected during
+ // setDataSource).
+ if (p != 0) return INVALID_OPERATION;
+
+ if (NULL != endpoint) {
+ mRetransmitEndpoint = *endpoint;
+ mRetransmitEndpointValid = true;
+ } else {
+ mRetransmitEndpointValid = false;
+ }
+
+ return NO_ERROR;
+}
+
void MediaPlayerService::Client::notify(
void* cookie, int msg, int ext1, int ext2, const Parcel *obj)
{
diff --git a/media/libmediaplayerservice/MediaPlayerService.h b/media/libmediaplayerservice/MediaPlayerService.h
index 52af64d..53847ed 100644
--- a/media/libmediaplayerservice/MediaPlayerService.h
+++ b/media/libmediaplayerservice/MediaPlayerService.h
@@ -18,6 +18,8 @@
#ifndef ANDROID_MEDIAPLAYERSERVICE_H
#define ANDROID_MEDIAPLAYERSERVICE_H
+#include <arpa/inet.h>
+
#include <utils/Log.h>
#include <utils/threads.h>
#include <utils/List.h>
@@ -276,6 +278,7 @@
virtual status_t attachAuxEffect(int effectId);
virtual status_t setParameter(int key, const Parcel &request);
virtual status_t getParameter(int key, Parcel *reply);
+ virtual status_t setRetransmitEndpoint(const struct sockaddr_in* endpoint);
sp<MediaPlayerBase> createPlayer(player_type playerType);
@@ -287,6 +290,14 @@
virtual status_t setDataSource(const sp<IStreamSource> &source);
+ sp<MediaPlayerBase> setDataSource_pre(player_type playerType);
+ void setDataSource_post(const sp<MediaPlayerBase>& p,
+ status_t status);
+
+ player_type getPlayerType(int fd, int64_t offset, int64_t length);
+ player_type getPlayerType(const char* url);
+ player_type getPlayerType(const sp<IStreamSource> &source);
+
static void notify(void* cookie, int msg,
int ext1, int ext2, const Parcel *obj);
@@ -338,6 +349,8 @@
uid_t mUID;
sp<ANativeWindow> mConnectedWindow;
sp<IBinder> mConnectedWindowBinder;
+ struct sockaddr_in mRetransmitEndpoint;
+ bool mRetransmitEndpointValid;
// Metadata filters.
media::Metadata::Filter mMetadataAllow; // protected by mLock
diff --git a/media/libmediaplayerservice/StagefrightRecorder.cpp b/media/libmediaplayerservice/StagefrightRecorder.cpp
index c5f4f86..ca79657 100644
--- a/media/libmediaplayerservice/StagefrightRecorder.cpp
+++ b/media/libmediaplayerservice/StagefrightRecorder.cpp
@@ -40,7 +40,7 @@
#include <media/MediaProfiles.h>
#include <camera/ICamera.h>
#include <camera/CameraParameters.h>
-#include <surfaceflinger/Surface.h>
+#include <gui/Surface.h>
#include <utils/Errors.h>
#include <sys/types.h>
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
index b731d0f..e618f67 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
@@ -38,7 +38,6 @@
#include <media/stagefright/MediaDefs.h>
#include <media/stagefright/MediaErrors.h>
#include <media/stagefright/MetaData.h>
-#include <surfaceflinger/Surface.h>
#include <gui/ISurfaceTexture.h>
#include "avc_utils.h"
@@ -387,10 +386,10 @@
audio ? "audio" : "video");
mRenderer->queueEOS(audio, UNKNOWN_ERROR);
- } else {
- CHECK_EQ((int)what, (int)ACodec::kWhatDrainThisBuffer);
-
+ } else if (what == ACodec::kWhatDrainThisBuffer) {
renderBuffer(audio, codecRequest);
+ } else {
+ ALOGV("Unhandled codec notification %d.", what);
}
break;
@@ -768,7 +767,7 @@
mediaTimeUs / 1E6);
#endif
- reply->setObject("buffer", accessUnit);
+ reply->setBuffer("buffer", accessUnit);
reply->post();
return OK;
@@ -793,10 +792,8 @@
return;
}
- sp<RefBase> obj;
- CHECK(msg->findObject("buffer", &obj));
-
- sp<ABuffer> buffer = static_cast<ABuffer *>(obj.get());
+ sp<ABuffer> buffer;
+ CHECK(msg->findBuffer("buffer", &buffer));
int64_t &skipUntilMediaTimeUs =
audio
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayer.h b/media/libmediaplayerservice/nuplayer/NuPlayer.h
index ffc710e..6be14be 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayer.h
+++ b/media/libmediaplayerservice/nuplayer/NuPlayer.h
@@ -21,8 +21,6 @@
#include <media/MediaPlayerInterface.h>
#include <media/stagefright/foundation/AHandler.h>
#include <media/stagefright/NativeWindowWrapper.h>
-#include <gui/SurfaceTextureClient.h>
-#include <surfaceflinger/Surface.h>
namespace android {
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.cpp
index 56c2773..460fc98 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.cpp
@@ -29,8 +29,6 @@
#include <media/stagefright/MediaDefs.h>
#include <media/stagefright/MetaData.h>
#include <media/stagefright/Utils.h>
-#include <surfaceflinger/Surface.h>
-#include <gui/ISurfaceTexture.h>
namespace android {
@@ -214,8 +212,6 @@
buffer->meta()->setInt32("csd", true);
mCSD.push(buffer);
-
- msg->setObject("csd", buffer);
} else if (meta->findData(kKeyESDS, &type, &data, &size)) {
ESDS esds((const char *)data, size);
CHECK_EQ(esds.InitCheck(), (status_t)OK);
@@ -242,9 +238,8 @@
CHECK(msg->findMessage("reply", &reply));
#if 0
- sp<RefBase> obj;
- CHECK(msg->findObject("buffer", &obj));
- sp<ABuffer> outBuffer = static_cast<ABuffer *>(obj.get());
+ sp<ABuffer> outBuffer;
+ CHECK(msg->findBuffer("buffer", &outBuffer));
#else
sp<ABuffer> outBuffer;
#endif
@@ -253,7 +248,7 @@
outBuffer = mCSD.editItemAt(mCSDIndex++);
outBuffer->meta()->setInt64("timeUs", 0);
- reply->setObject("buffer", outBuffer);
+ reply->setBuffer("buffer", outBuffer);
reply->post();
return;
}
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp
index 15259cb..5738ecb 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp
@@ -60,7 +60,7 @@
const sp<AMessage> ¬ifyConsumed) {
sp<AMessage> msg = new AMessage(kWhatQueueBuffer, id());
msg->setInt32("audio", static_cast<int32_t>(audio));
- msg->setObject("buffer", buffer);
+ msg->setBuffer("buffer", buffer);
msg->setMessage("notifyConsumed", notifyConsumed);
msg->post();
}
@@ -411,9 +411,8 @@
return;
}
- sp<RefBase> obj;
- CHECK(msg->findObject("buffer", &obj));
- sp<ABuffer> buffer = static_cast<ABuffer *>(obj.get());
+ sp<ABuffer> buffer;
+ CHECK(msg->findBuffer("buffer", &buffer));
sp<AMessage> notifyConsumed;
CHECK(msg->findMessage("notifyConsumed", ¬ifyConsumed));
diff --git a/media/libmediaplayerservice/nuplayer/RTSPSource.cpp b/media/libmediaplayerservice/nuplayer/RTSPSource.cpp
index 6eb0d07..4c65b65 100644
--- a/media/libmediaplayerservice/nuplayer/RTSPSource.cpp
+++ b/media/libmediaplayerservice/nuplayer/RTSPSource.cpp
@@ -218,10 +218,8 @@
CHECK(msg->findSize("trackIndex", &trackIndex));
CHECK_LT(trackIndex, mTracks.size());
- sp<RefBase> obj;
- CHECK(msg->findObject("accessUnit", &obj));
-
- sp<ABuffer> accessUnit = static_cast<ABuffer *>(obj.get());
+ sp<ABuffer> accessUnit;
+ CHECK(msg->findBuffer("accessUnit", &accessUnit));
int32_t damaged;
if (accessUnit->meta()->findInt32("damaged", &damaged)
diff --git a/media/libstagefright/ACodec.cpp b/media/libstagefright/ACodec.cpp
index ca44ea3..09e4e45 100644
--- a/media/libstagefright/ACodec.cpp
+++ b/media/libstagefright/ACodec.cpp
@@ -26,14 +26,12 @@
#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/MediaCodecList.h>
#include <media/stagefright/MediaDefs.h>
#include <media/stagefright/NativeWindowWrapper.h>
#include <media/stagefright/OMXClient.h>
#include <media/stagefright/OMXCodec.h>
-#include <surfaceflinger/Surface.h>
-#include <gui/SurfaceTextureClient.h>
-
#include <OMX_Component.h>
namespace android {
@@ -168,15 +166,36 @@
protected:
virtual bool onMessageReceived(const sp<AMessage> &msg);
+ virtual void stateEntered();
private:
void onSetup(const sp<AMessage> &msg);
+ bool onAllocateComponent(const sp<AMessage> &msg);
DISALLOW_EVIL_CONSTRUCTORS(UninitializedState);
};
////////////////////////////////////////////////////////////////////////////////
+struct ACodec::LoadedState : public ACodec::BaseState {
+ LoadedState(ACodec *codec);
+
+protected:
+ virtual bool onMessageReceived(const sp<AMessage> &msg);
+ virtual void stateEntered();
+
+private:
+ friend struct ACodec::UninitializedState;
+
+ bool onConfigureComponent(const sp<AMessage> &msg);
+ void onStart();
+ void onShutdown(bool keepComponentAllocated);
+
+ DISALLOW_EVIL_CONSTRUCTORS(LoadedState);
+};
+
+////////////////////////////////////////////////////////////////////////////////
+
struct ACodec::LoadedToIdleState : public ACodec::BaseState {
LoadedToIdleState(ACodec *codec);
@@ -265,6 +284,8 @@
private:
void changeStateIfWeOwnAllBuffers();
+ bool mComponentNowIdle;
+
DISALLOW_EVIL_CONSTRUCTORS(ExecutingToIdleState);
};
@@ -308,9 +329,13 @@
////////////////////////////////////////////////////////////////////////////////
ACodec::ACodec()
- : mNode(NULL),
- mSentFormat(false) {
+ : mQuirks(0),
+ mNode(NULL),
+ mSentFormat(false),
+ mIsEncoder(false),
+ mShutdownInProgress(false) {
mUninitializedState = new UninitializedState(this);
+ mLoadedState = new LoadedState(this);
mLoadedToIdleState = new LoadedToIdleState(this);
mIdleToExecutingState = new IdleToExecutingState(this);
mExecutingState = new ExecutingState(this);
@@ -341,6 +366,22 @@
msg->post();
}
+void ACodec::initiateAllocateComponent(const sp<AMessage> &msg) {
+ msg->setWhat(kWhatAllocateComponent);
+ msg->setTarget(id());
+ msg->post();
+}
+
+void ACodec::initiateConfigureComponent(const sp<AMessage> &msg) {
+ msg->setWhat(kWhatConfigureComponent);
+ msg->setTarget(id());
+ msg->post();
+}
+
+void ACodec::initiateStart() {
+ (new AMessage(kWhatStart, id()))->post();
+}
+
void ACodec::signalFlush() {
ALOGV("[%s] signalFlush", mComponentName.c_str());
(new AMessage(kWhatFlush, id()))->post();
@@ -350,8 +391,10 @@
(new AMessage(kWhatResume, id()))->post();
}
-void ACodec::initiateShutdown() {
- (new AMessage(kWhatShutdown, id()))->post();
+void ACodec::initiateShutdown(bool keepComponentAllocated) {
+ sp<AMessage> msg = new AMessage(kWhatShutdown, id());
+ msg->setInt32("keepComponentAllocated", keepComponentAllocated);
+ msg->post();
}
status_t ACodec::allocateBuffersOnPort(OMX_U32 portIndex) {
@@ -360,62 +403,71 @@
CHECK(mDealer[portIndex] == NULL);
CHECK(mBuffers[portIndex].isEmpty());
+ status_t err;
if (mNativeWindow != NULL && portIndex == kPortIndexOutput) {
- return allocateOutputBuffersFromNativeWindow();
+ err = allocateOutputBuffersFromNativeWindow();
+ } else {
+ OMX_PARAM_PORTDEFINITIONTYPE def;
+ InitOMXParams(&def);
+ def.nPortIndex = portIndex;
+
+ err = mOMX->getParameter(
+ mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
+
+ if (err == OK) {
+ ALOGV("[%s] Allocating %lu buffers of size %lu on %s port",
+ mComponentName.c_str(),
+ def.nBufferCountActual, def.nBufferSize,
+ portIndex == kPortIndexInput ? "input" : "output");
+
+ size_t totalSize = def.nBufferCountActual * def.nBufferSize;
+ mDealer[portIndex] = new MemoryDealer(totalSize, "ACodec");
+
+ for (OMX_U32 i = 0; i < def.nBufferCountActual; ++i) {
+ sp<IMemory> mem = mDealer[portIndex]->allocate(def.nBufferSize);
+ CHECK(mem.get() != NULL);
+
+ IOMX::buffer_id buffer;
+
+ uint32_t requiresAllocateBufferBit =
+ (portIndex == kPortIndexInput)
+ ? OMXCodec::kRequiresAllocateBufferOnInputPorts
+ : OMXCodec::kRequiresAllocateBufferOnOutputPorts;
+
+ if (mQuirks & requiresAllocateBufferBit) {
+ err = mOMX->allocateBufferWithBackup(
+ mNode, portIndex, mem, &buffer);
+ } else {
+ err = mOMX->useBuffer(mNode, portIndex, mem, &buffer);
+ }
+
+ BufferInfo info;
+ info.mBufferID = buffer;
+ info.mStatus = BufferInfo::OWNED_BY_US;
+ info.mData = new ABuffer(mem->pointer(), def.nBufferSize);
+ mBuffers[portIndex].push(info);
+ }
+ }
}
- OMX_PARAM_PORTDEFINITIONTYPE def;
- InitOMXParams(&def);
- def.nPortIndex = portIndex;
-
- status_t err = mOMX->getParameter(
- mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
-
if (err != OK) {
return err;
}
- ALOGV("[%s] Allocating %lu buffers of size %lu on %s port",
- mComponentName.c_str(),
- def.nBufferCountActual, def.nBufferSize,
- portIndex == kPortIndexInput ? "input" : "output");
+ sp<AMessage> notify = mNotify->dup();
+ notify->setInt32("what", ACodec::kWhatBuffersAllocated);
- size_t totalSize = def.nBufferCountActual * def.nBufferSize;
- mDealer[portIndex] = new MemoryDealer(totalSize, "OMXCodec");
+ notify->setInt32("portIndex", portIndex);
+ for (size_t i = 0; i < mBuffers[portIndex].size(); ++i) {
+ AString name = StringPrintf("buffer-id_%d", i);
+ notify->setPointer(name.c_str(), mBuffers[portIndex][i].mBufferID);
- for (OMX_U32 i = 0; i < def.nBufferCountActual; ++i) {
- sp<IMemory> mem = mDealer[portIndex]->allocate(def.nBufferSize);
- CHECK(mem.get() != NULL);
-
- IOMX::buffer_id buffer;
-
- if (!strcasecmp(
- mComponentName.c_str(), "OMX.TI.DUCATI1.VIDEO.DECODER")) {
- if (portIndex == kPortIndexInput && i == 0) {
- // Only log this warning once per allocation round.
-
- ALOGW("OMX.TI.DUCATI1.VIDEO.DECODER requires the use of "
- "OMX_AllocateBuffer instead of the preferred "
- "OMX_UseBuffer. Vendor must fix this.");
- }
-
- err = mOMX->allocateBufferWithBackup(
- mNode, portIndex, mem, &buffer);
- } else {
- err = mOMX->useBuffer(mNode, portIndex, mem, &buffer);
- }
-
- if (err != OK) {
- return err;
- }
-
- BufferInfo info;
- info.mBufferID = buffer;
- info.mStatus = BufferInfo::OWNED_BY_US;
- info.mData = new ABuffer(mem->pointer(), def.nBufferSize);
- mBuffers[portIndex].push(info);
+ name = StringPrintf("data_%d", i);
+ notify->setBuffer(name.c_str(), mBuffers[portIndex][i].mData);
}
+ notify->post();
+
return OK;
}
@@ -671,7 +723,7 @@
return NULL;
}
-void ACodec::setComponentRole(
+status_t ACodec::setComponentRole(
bool isEncoder, const char *mime) {
struct MimeToRole {
const char *mime;
@@ -700,6 +752,8 @@
"video_decoder.mpeg4", "video_encoder.mpeg4" },
{ MEDIA_MIMETYPE_VIDEO_H263,
"video_decoder.h263", "video_encoder.h263" },
+ { MEDIA_MIMETYPE_VIDEO_VPX,
+ "video_decoder.vpx", "video_encoder.vpx" },
};
static const size_t kNumMimeToRole =
@@ -713,7 +767,7 @@
}
if (i == kNumMimeToRole) {
- return;
+ return ERROR_UNSUPPORTED;
}
const char *role =
@@ -736,50 +790,83 @@
if (err != OK) {
ALOGW("[%s] Failed to set standard component role '%s'.",
mComponentName.c_str(), role);
+
+ return err;
}
}
+
+ return OK;
}
-void ACodec::configureCodec(
+status_t ACodec::configureCodec(
const char *mime, const sp<AMessage> &msg) {
- setComponentRole(false /* isEncoder */, mime);
+ int32_t encoder;
+ if (!msg->findInt32("encoder", &encoder)) {
+ encoder = false;
+ }
+
+ mIsEncoder = encoder;
+
+ status_t err = setComponentRole(encoder /* isEncoder */, mime);
+
+ if (err != OK) {
+ return err;
+ }
+
+ int32_t bitRate = 0;
+ if (encoder && !msg->findInt32("bitrate", &bitRate)) {
+ return INVALID_OPERATION;
+ }
if (!strncasecmp(mime, "video/", 6)) {
- int32_t width, height;
- CHECK(msg->findInt32("width", &width));
- CHECK(msg->findInt32("height", &height));
-
- CHECK_EQ(setupVideoDecoder(mime, width, height),
- (status_t)OK);
+ if (encoder) {
+ err = setupVideoEncoder(mime, msg);
+ } else {
+ int32_t width, height;
+ if (!msg->findInt32("width", &width)
+ || !msg->findInt32("height", &height)) {
+ err = INVALID_OPERATION;
+ } else {
+ err = setupVideoDecoder(mime, width, height);
+ }
+ }
} else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AAC)) {
int32_t numChannels, sampleRate;
- CHECK(msg->findInt32("channel-count", &numChannels));
- CHECK(msg->findInt32("sample-rate", &sampleRate));
-
- CHECK_EQ(setupAACDecoder(numChannels, sampleRate), (status_t)OK);
+ if (!msg->findInt32("channel-count", &numChannels)
+ || !msg->findInt32("sample-rate", &sampleRate)) {
+ err = INVALID_OPERATION;
+ } else {
+ err = setupAACCodec(encoder, numChannels, sampleRate, bitRate);
+ }
} else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AMR_NB)) {
- CHECK_EQ(setupAMRDecoder(false /* isWAMR */), (status_t)OK);
+ err = setupAMRCodec(encoder, false /* isWAMR */, bitRate);
} else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AMR_WB)) {
- CHECK_EQ(setupAMRDecoder(true /* isWAMR */), (status_t)OK);
+ err = setupAMRCodec(encoder, true /* isWAMR */, bitRate);
} else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_G711_ALAW)
|| !strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_G711_MLAW)) {
// These are PCM-like formats with a fixed sample rate but
// a variable number of channels.
int32_t numChannels;
- CHECK(msg->findInt32("channel-count", &numChannels));
+ if (!msg->findInt32("channel-count", &numChannels)) {
+ err = INVALID_OPERATION;
+ } else {
+ err = setupG711Codec(encoder, numChannels);
+ }
+ }
- CHECK_EQ(setupG711Decoder(numChannels), (status_t)OK);
+ if (err != OK) {
+ return err;
}
int32_t maxInputSize;
if (msg->findInt32("max-input-size", &maxInputSize)) {
- CHECK_EQ(setMinBufferSize(kPortIndexInput, (size_t)maxInputSize),
- (status_t)OK);
+ err = setMinBufferSize(kPortIndexInput, (size_t)maxInputSize);
} else if (!strcmp("OMX.Nvidia.aac.decoder", mComponentName.c_str())) {
- CHECK_EQ(setMinBufferSize(kPortIndexInput, 8192), // XXX
- (status_t)OK);
+ err = setMinBufferSize(kPortIndexInput, 8192); // XXX
}
+
+ return err;
}
status_t ACodec::setMinBufferSize(OMX_U32 portIndex, size_t size) {
@@ -819,12 +906,113 @@
return OK;
}
-status_t ACodec::setupAACDecoder(int32_t numChannels, int32_t sampleRate) {
+status_t ACodec::selectAudioPortFormat(
+ OMX_U32 portIndex, OMX_AUDIO_CODINGTYPE desiredFormat) {
+ OMX_AUDIO_PARAM_PORTFORMATTYPE format;
+ InitOMXParams(&format);
+
+ format.nPortIndex = portIndex;
+ for (OMX_U32 index = 0;; ++index) {
+ format.nIndex = index;
+
+ status_t err = mOMX->getParameter(
+ mNode, OMX_IndexParamAudioPortFormat,
+ &format, sizeof(format));
+
+ if (err != OK) {
+ return err;
+ }
+
+ if (format.eEncoding == desiredFormat) {
+ break;
+ }
+ }
+
+ return mOMX->setParameter(
+ mNode, OMX_IndexParamAudioPortFormat, &format, sizeof(format));
+}
+
+status_t ACodec::setupAACCodec(
+ bool encoder,
+ int32_t numChannels, int32_t sampleRate, int32_t bitRate) {
+ status_t err = setupRawAudioFormat(
+ encoder ? kPortIndexInput : kPortIndexOutput,
+ sampleRate,
+ numChannels);
+
+ if (err != OK) {
+ return err;
+ }
+
+ if (encoder) {
+ err = selectAudioPortFormat(kPortIndexOutput, OMX_AUDIO_CodingAAC);
+
+ if (err != OK) {
+ return err;
+ }
+
+ OMX_PARAM_PORTDEFINITIONTYPE def;
+ InitOMXParams(&def);
+ def.nPortIndex = kPortIndexOutput;
+
+ err = mOMX->getParameter(
+ mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
+
+ if (err != OK) {
+ return err;
+ }
+
+ def.format.audio.bFlagErrorConcealment = OMX_TRUE;
+ def.format.audio.eEncoding = OMX_AUDIO_CodingAAC;
+
+ err = mOMX->setParameter(
+ mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
+
+ if (err != OK) {
+ return err;
+ }
+
+ OMX_AUDIO_PARAM_AACPROFILETYPE profile;
+ InitOMXParams(&profile);
+ profile.nPortIndex = kPortIndexOutput;
+
+ err = mOMX->getParameter(
+ mNode, OMX_IndexParamAudioAac, &profile, sizeof(profile));
+
+ if (err != OK) {
+ return err;
+ }
+
+ profile.nChannels = numChannels;
+
+ profile.eChannelMode =
+ (numChannels == 1)
+ ? OMX_AUDIO_ChannelModeMono: OMX_AUDIO_ChannelModeStereo;
+
+ profile.nSampleRate = sampleRate;
+ profile.nBitRate = bitRate;
+ profile.nAudioBandWidth = 0;
+ profile.nFrameLength = 0;
+ profile.nAACtools = OMX_AUDIO_AACToolAll;
+ profile.nAACERtools = OMX_AUDIO_AACERNone;
+ profile.eAACProfile = OMX_AUDIO_AACObjectLC;
+ profile.eAACStreamFormat = OMX_AUDIO_AACStreamFormatMP4FF;
+
+ err = mOMX->setParameter(
+ mNode, OMX_IndexParamAudioAac, &profile, sizeof(profile));
+
+ if (err != OK) {
+ return err;
+ }
+
+ return err;
+ }
+
OMX_AUDIO_PARAM_AACPROFILETYPE profile;
InitOMXParams(&profile);
profile.nPortIndex = kPortIndexInput;
- status_t err = mOMX->getParameter(
+ err = mOMX->getParameter(
mNode, OMX_IndexParamAudioAac, &profile, sizeof(profile));
if (err != OK) {
@@ -835,16 +1023,59 @@
profile.nSampleRate = sampleRate;
profile.eAACStreamFormat = OMX_AUDIO_AACStreamFormatMP4ADTS;
- err = mOMX->setParameter(
+ return mOMX->setParameter(
mNode, OMX_IndexParamAudioAac, &profile, sizeof(profile));
-
- return err;
}
-status_t ACodec::setupAMRDecoder(bool isWAMR) {
+static OMX_AUDIO_AMRBANDMODETYPE pickModeFromBitRate(
+ bool isAMRWB, int32_t bps) {
+ if (isAMRWB) {
+ if (bps <= 6600) {
+ return OMX_AUDIO_AMRBandModeWB0;
+ } else if (bps <= 8850) {
+ return OMX_AUDIO_AMRBandModeWB1;
+ } else if (bps <= 12650) {
+ return OMX_AUDIO_AMRBandModeWB2;
+ } else if (bps <= 14250) {
+ return OMX_AUDIO_AMRBandModeWB3;
+ } else if (bps <= 15850) {
+ return OMX_AUDIO_AMRBandModeWB4;
+ } else if (bps <= 18250) {
+ return OMX_AUDIO_AMRBandModeWB5;
+ } else if (bps <= 19850) {
+ return OMX_AUDIO_AMRBandModeWB6;
+ } else if (bps <= 23050) {
+ return OMX_AUDIO_AMRBandModeWB7;
+ }
+
+ // 23850 bps
+ return OMX_AUDIO_AMRBandModeWB8;
+ } else { // AMRNB
+ if (bps <= 4750) {
+ return OMX_AUDIO_AMRBandModeNB0;
+ } else if (bps <= 5150) {
+ return OMX_AUDIO_AMRBandModeNB1;
+ } else if (bps <= 5900) {
+ return OMX_AUDIO_AMRBandModeNB2;
+ } else if (bps <= 6700) {
+ return OMX_AUDIO_AMRBandModeNB3;
+ } else if (bps <= 7400) {
+ return OMX_AUDIO_AMRBandModeNB4;
+ } else if (bps <= 7950) {
+ return OMX_AUDIO_AMRBandModeNB5;
+ } else if (bps <= 10200) {
+ return OMX_AUDIO_AMRBandModeNB6;
+ }
+
+ // 12200 bps
+ return OMX_AUDIO_AMRBandModeNB7;
+ }
+}
+
+status_t ACodec::setupAMRCodec(bool encoder, bool isWAMR, int32_t bitrate) {
OMX_AUDIO_PARAM_AMRTYPE def;
InitOMXParams(&def);
- def.nPortIndex = kPortIndexInput;
+ def.nPortIndex = encoder ? kPortIndexOutput : kPortIndexInput;
status_t err =
mOMX->getParameter(mNode, OMX_IndexParamAudioAmr, &def, sizeof(def));
@@ -854,14 +1085,24 @@
}
def.eAMRFrameFormat = OMX_AUDIO_AMRFrameFormatFSF;
+ def.eAMRBandMode = pickModeFromBitRate(isWAMR, bitrate);
- def.eAMRBandMode =
- isWAMR ? OMX_AUDIO_AMRBandModeWB0 : OMX_AUDIO_AMRBandModeNB0;
+ err = mOMX->setParameter(
+ mNode, OMX_IndexParamAudioAmr, &def, sizeof(def));
- return mOMX->setParameter(mNode, OMX_IndexParamAudioAmr, &def, sizeof(def));
+ if (err != OK) {
+ return err;
+ }
+
+ return setupRawAudioFormat(
+ encoder ? kPortIndexInput : kPortIndexOutput,
+ isWAMR ? 16000 : 8000 /* sampleRate */,
+ 1 /* numChannels */);
}
-status_t ACodec::setupG711Decoder(int32_t numChannels) {
+status_t ACodec::setupG711Codec(bool encoder, int32_t numChannels) {
+ CHECK(!encoder); // XXX TODO
+
return setupRawAudioFormat(
kPortIndexInput, 8000 /* sampleRate */, numChannels);
}
@@ -1001,22 +1242,36 @@
&format, sizeof(format));
}
-status_t ACodec::setupVideoDecoder(
- const char *mime, int32_t width, int32_t height) {
- OMX_VIDEO_CODINGTYPE compressionFormat = OMX_VIDEO_CodingUnused;
+static status_t GetVideoCodingTypeFromMime(
+ const char *mime, OMX_VIDEO_CODINGTYPE *codingType) {
if (!strcasecmp(MEDIA_MIMETYPE_VIDEO_AVC, mime)) {
- compressionFormat = OMX_VIDEO_CodingAVC;
+ *codingType = OMX_VIDEO_CodingAVC;
} else if (!strcasecmp(MEDIA_MIMETYPE_VIDEO_MPEG4, mime)) {
- compressionFormat = OMX_VIDEO_CodingMPEG4;
+ *codingType = OMX_VIDEO_CodingMPEG4;
} else if (!strcasecmp(MEDIA_MIMETYPE_VIDEO_H263, mime)) {
- compressionFormat = OMX_VIDEO_CodingH263;
+ *codingType = OMX_VIDEO_CodingH263;
} else if (!strcasecmp(MEDIA_MIMETYPE_VIDEO_MPEG2, mime)) {
- compressionFormat = OMX_VIDEO_CodingMPEG2;
+ *codingType = OMX_VIDEO_CodingMPEG2;
+ } else if (!strcasecmp(MEDIA_MIMETYPE_VIDEO_VPX, mime)) {
+ *codingType = OMX_VIDEO_CodingVPX;
} else {
- TRESPASS();
+ *codingType = OMX_VIDEO_CodingUnused;
+ return ERROR_UNSUPPORTED;
}
- status_t err = setVideoPortFormatType(
+ return OK;
+}
+
+status_t ACodec::setupVideoDecoder(
+ const char *mime, int32_t width, int32_t height) {
+ OMX_VIDEO_CODINGTYPE compressionFormat;
+ status_t err = GetVideoCodingTypeFromMime(mime, &compressionFormat);
+
+ if (err != OK) {
+ return err;
+ }
+
+ err = setVideoPortFormatType(
kPortIndexInput, compressionFormat, OMX_COLOR_FormatUnused);
if (err != OK) {
@@ -1046,6 +1301,489 @@
return OK;
}
+status_t ACodec::setupVideoEncoder(const char *mime, const sp<AMessage> &msg) {
+ int32_t tmp;
+ if (!msg->findInt32("color-format", &tmp)) {
+ return INVALID_OPERATION;
+ }
+
+ OMX_COLOR_FORMATTYPE colorFormat =
+ static_cast<OMX_COLOR_FORMATTYPE>(tmp);
+
+ status_t err = setVideoPortFormatType(
+ kPortIndexInput, OMX_VIDEO_CodingUnused, colorFormat);
+
+ if (err != OK) {
+ ALOGE("[%s] does not support color format %d",
+ mComponentName.c_str(), colorFormat);
+
+ return err;
+ }
+
+ /* Input port configuration */
+
+ OMX_PARAM_PORTDEFINITIONTYPE def;
+ InitOMXParams(&def);
+
+ OMX_VIDEO_PORTDEFINITIONTYPE *video_def = &def.format.video;
+
+ def.nPortIndex = kPortIndexInput;
+
+ err = mOMX->getParameter(
+ mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
+
+ if (err != OK) {
+ return err;
+ }
+
+ int32_t width, height, bitrate;
+ if (!msg->findInt32("width", &width)
+ || !msg->findInt32("height", &height)
+ || !msg->findInt32("bitrate", &bitrate)) {
+ return INVALID_OPERATION;
+ }
+
+ video_def->nFrameWidth = width;
+ video_def->nFrameHeight = height;
+
+ int32_t stride;
+ if (!msg->findInt32("stride", &stride)) {
+ stride = width;
+ }
+
+ video_def->nStride = stride;
+
+ int32_t sliceHeight;
+ if (!msg->findInt32("slice-height", &sliceHeight)) {
+ sliceHeight = height;
+ }
+
+ video_def->nSliceHeight = sliceHeight;
+
+ def.nBufferSize = (video_def->nStride * video_def->nSliceHeight * 3) / 2;
+
+ float frameRate;
+ if (!msg->findFloat("frame-rate", &frameRate)) {
+ int32_t tmp;
+ if (!msg->findInt32("frame-rate", &tmp)) {
+ return INVALID_OPERATION;
+ }
+ frameRate = (float)tmp;
+ }
+
+ video_def->xFramerate = (OMX_U32)(frameRate * 65536.0f);
+ video_def->eCompressionFormat = OMX_VIDEO_CodingUnused;
+ video_def->eColorFormat = colorFormat;
+
+ err = mOMX->setParameter(
+ mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
+
+ if (err != OK) {
+ ALOGE("[%s] failed to set input port definition parameters.",
+ mComponentName.c_str());
+
+ return err;
+ }
+
+ /* Output port configuration */
+
+ OMX_VIDEO_CODINGTYPE compressionFormat;
+ err = GetVideoCodingTypeFromMime(mime, &compressionFormat);
+
+ if (err != OK) {
+ return err;
+ }
+
+ err = setVideoPortFormatType(
+ kPortIndexOutput, compressionFormat, OMX_COLOR_FormatUnused);
+
+ if (err != OK) {
+ ALOGE("[%s] does not support compression format %d",
+ mComponentName.c_str(), compressionFormat);
+
+ return err;
+ }
+
+ def.nPortIndex = kPortIndexOutput;
+
+ err = mOMX->getParameter(
+ mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
+
+ if (err != OK) {
+ return err;
+ }
+
+ video_def->nFrameWidth = width;
+ video_def->nFrameHeight = height;
+ video_def->xFramerate = 0;
+ video_def->nBitrate = bitrate;
+ video_def->eCompressionFormat = compressionFormat;
+ video_def->eColorFormat = OMX_COLOR_FormatUnused;
+
+ err = mOMX->setParameter(
+ mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
+
+ if (err != OK) {
+ ALOGE("[%s] failed to set output port definition parameters.",
+ mComponentName.c_str());
+
+ return err;
+ }
+
+ switch (compressionFormat) {
+ case OMX_VIDEO_CodingMPEG4:
+ err = setupMPEG4EncoderParameters(msg);
+ break;
+
+ case OMX_VIDEO_CodingH263:
+ err = setupH263EncoderParameters(msg);
+ break;
+
+ case OMX_VIDEO_CodingAVC:
+ err = setupAVCEncoderParameters(msg);
+ break;
+
+ default:
+ break;
+ }
+
+ ALOGI("setupVideoEncoder succeeded");
+
+ return err;
+}
+
+static OMX_U32 setPFramesSpacing(int32_t iFramesInterval, int32_t frameRate) {
+ if (iFramesInterval < 0) {
+ return 0xFFFFFFFF;
+ } else if (iFramesInterval == 0) {
+ return 0;
+ }
+ OMX_U32 ret = frameRate * iFramesInterval;
+ CHECK(ret > 1);
+ return ret;
+}
+
+status_t ACodec::setupMPEG4EncoderParameters(const sp<AMessage> &msg) {
+ int32_t bitrate, iFrameInterval;
+ if (!msg->findInt32("bitrate", &bitrate)
+ || !msg->findInt32("i-frame-interval", &iFrameInterval)) {
+ return INVALID_OPERATION;
+ }
+
+ float frameRate;
+ if (!msg->findFloat("frame-rate", &frameRate)) {
+ int32_t tmp;
+ if (!msg->findInt32("frame-rate", &tmp)) {
+ return INVALID_OPERATION;
+ }
+ frameRate = (float)tmp;
+ }
+
+ OMX_VIDEO_PARAM_MPEG4TYPE mpeg4type;
+ InitOMXParams(&mpeg4type);
+ mpeg4type.nPortIndex = kPortIndexOutput;
+
+ status_t err = mOMX->getParameter(
+ mNode, OMX_IndexParamVideoMpeg4, &mpeg4type, sizeof(mpeg4type));
+
+ if (err != OK) {
+ return err;
+ }
+
+ mpeg4type.nSliceHeaderSpacing = 0;
+ mpeg4type.bSVH = OMX_FALSE;
+ mpeg4type.bGov = OMX_FALSE;
+
+ mpeg4type.nAllowedPictureTypes =
+ OMX_VIDEO_PictureTypeI | OMX_VIDEO_PictureTypeP;
+
+ mpeg4type.nPFrames = setPFramesSpacing(iFrameInterval, frameRate);
+ if (mpeg4type.nPFrames == 0) {
+ mpeg4type.nAllowedPictureTypes = OMX_VIDEO_PictureTypeI;
+ }
+ mpeg4type.nBFrames = 0;
+ mpeg4type.nIDCVLCThreshold = 0;
+ mpeg4type.bACPred = OMX_TRUE;
+ mpeg4type.nMaxPacketSize = 256;
+ mpeg4type.nTimeIncRes = 1000;
+ mpeg4type.nHeaderExtension = 0;
+ mpeg4type.bReversibleVLC = OMX_FALSE;
+
+ int32_t profile;
+ if (msg->findInt32("profile", &profile)) {
+ int32_t level;
+ if (!msg->findInt32("level", &level)) {
+ return INVALID_OPERATION;
+ }
+
+ err = verifySupportForProfileAndLevel(profile, level);
+
+ if (err != OK) {
+ return err;
+ }
+
+ mpeg4type.eProfile = static_cast<OMX_VIDEO_MPEG4PROFILETYPE>(profile);
+ mpeg4type.eLevel = static_cast<OMX_VIDEO_MPEG4LEVELTYPE>(level);
+ }
+
+ err = mOMX->setParameter(
+ mNode, OMX_IndexParamVideoMpeg4, &mpeg4type, sizeof(mpeg4type));
+
+ if (err != OK) {
+ return err;
+ }
+
+ err = configureBitrate(bitrate);
+
+ if (err != OK) {
+ return err;
+ }
+
+ return setupErrorCorrectionParameters();
+}
+
+status_t ACodec::setupH263EncoderParameters(const sp<AMessage> &msg) {
+ int32_t bitrate, iFrameInterval;
+ if (!msg->findInt32("bitrate", &bitrate)
+ || !msg->findInt32("i-frame-interval", &iFrameInterval)) {
+ return INVALID_OPERATION;
+ }
+
+ float frameRate;
+ if (!msg->findFloat("frame-rate", &frameRate)) {
+ int32_t tmp;
+ if (!msg->findInt32("frame-rate", &tmp)) {
+ return INVALID_OPERATION;
+ }
+ frameRate = (float)tmp;
+ }
+
+ OMX_VIDEO_PARAM_H263TYPE h263type;
+ InitOMXParams(&h263type);
+ h263type.nPortIndex = kPortIndexOutput;
+
+ status_t err = mOMX->getParameter(
+ mNode, OMX_IndexParamVideoH263, &h263type, sizeof(h263type));
+
+ if (err != OK) {
+ return err;
+ }
+
+ h263type.nAllowedPictureTypes =
+ OMX_VIDEO_PictureTypeI | OMX_VIDEO_PictureTypeP;
+
+ h263type.nPFrames = setPFramesSpacing(iFrameInterval, frameRate);
+ if (h263type.nPFrames == 0) {
+ h263type.nAllowedPictureTypes = OMX_VIDEO_PictureTypeI;
+ }
+ h263type.nBFrames = 0;
+
+ int32_t profile;
+ if (msg->findInt32("profile", &profile)) {
+ int32_t level;
+ if (!msg->findInt32("level", &level)) {
+ return INVALID_OPERATION;
+ }
+
+ err = verifySupportForProfileAndLevel(profile, level);
+
+ if (err != OK) {
+ return err;
+ }
+
+ h263type.eProfile = static_cast<OMX_VIDEO_H263PROFILETYPE>(profile);
+ h263type.eLevel = static_cast<OMX_VIDEO_H263LEVELTYPE>(level);
+ }
+
+ h263type.bPLUSPTYPEAllowed = OMX_FALSE;
+ h263type.bForceRoundingTypeToZero = OMX_FALSE;
+ h263type.nPictureHeaderRepetition = 0;
+ h263type.nGOBHeaderInterval = 0;
+
+ err = mOMX->setParameter(
+ mNode, OMX_IndexParamVideoH263, &h263type, sizeof(h263type));
+
+ if (err != OK) {
+ return err;
+ }
+
+ err = configureBitrate(bitrate);
+
+ if (err != OK) {
+ return err;
+ }
+
+ return setupErrorCorrectionParameters();
+}
+
+status_t ACodec::setupAVCEncoderParameters(const sp<AMessage> &msg) {
+ int32_t bitrate, iFrameInterval;
+ if (!msg->findInt32("bitrate", &bitrate)
+ || !msg->findInt32("i-frame-interval", &iFrameInterval)) {
+ return INVALID_OPERATION;
+ }
+
+ float frameRate;
+ if (!msg->findFloat("frame-rate", &frameRate)) {
+ int32_t tmp;
+ if (!msg->findInt32("frame-rate", &tmp)) {
+ return INVALID_OPERATION;
+ }
+ frameRate = (float)tmp;
+ }
+
+ OMX_VIDEO_PARAM_AVCTYPE h264type;
+ InitOMXParams(&h264type);
+ h264type.nPortIndex = kPortIndexOutput;
+
+ status_t err = mOMX->getParameter(
+ mNode, OMX_IndexParamVideoAvc, &h264type, sizeof(h264type));
+
+ if (err != OK) {
+ return err;
+ }
+
+ h264type.nAllowedPictureTypes =
+ OMX_VIDEO_PictureTypeI | OMX_VIDEO_PictureTypeP;
+
+ int32_t profile;
+ if (msg->findInt32("profile", &profile)) {
+ int32_t level;
+ if (!msg->findInt32("level", &level)) {
+ return INVALID_OPERATION;
+ }
+
+ err = verifySupportForProfileAndLevel(profile, level);
+
+ if (err != OK) {
+ return err;
+ }
+
+ h264type.eProfile = static_cast<OMX_VIDEO_AVCPROFILETYPE>(profile);
+ h264type.eLevel = static_cast<OMX_VIDEO_AVCLEVELTYPE>(level);
+ }
+
+ // XXX
+ if (!strncmp(mComponentName.c_str(), "OMX.TI.DUCATI1", 14)) {
+ h264type.eProfile = OMX_VIDEO_AVCProfileBaseline;
+ }
+
+ if (h264type.eProfile == OMX_VIDEO_AVCProfileBaseline) {
+ h264type.nSliceHeaderSpacing = 0;
+ h264type.bUseHadamard = OMX_TRUE;
+ h264type.nRefFrames = 1;
+ h264type.nBFrames = 0;
+ h264type.nPFrames = setPFramesSpacing(iFrameInterval, frameRate);
+ if (h264type.nPFrames == 0) {
+ h264type.nAllowedPictureTypes = OMX_VIDEO_PictureTypeI;
+ }
+ h264type.nRefIdx10ActiveMinus1 = 0;
+ h264type.nRefIdx11ActiveMinus1 = 0;
+ h264type.bEntropyCodingCABAC = OMX_FALSE;
+ h264type.bWeightedPPrediction = OMX_FALSE;
+ h264type.bconstIpred = OMX_FALSE;
+ h264type.bDirect8x8Inference = OMX_FALSE;
+ h264type.bDirectSpatialTemporal = OMX_FALSE;
+ h264type.nCabacInitIdc = 0;
+ }
+
+ if (h264type.nBFrames != 0) {
+ h264type.nAllowedPictureTypes |= OMX_VIDEO_PictureTypeB;
+ }
+
+ h264type.bEnableUEP = OMX_FALSE;
+ h264type.bEnableFMO = OMX_FALSE;
+ h264type.bEnableASO = OMX_FALSE;
+ h264type.bEnableRS = OMX_FALSE;
+ h264type.bFrameMBsOnly = OMX_TRUE;
+ h264type.bMBAFF = OMX_FALSE;
+ h264type.eLoopFilterMode = OMX_VIDEO_AVCLoopFilterEnable;
+
+ if (!strcasecmp("OMX.Nvidia.h264.encoder", mComponentName.c_str())) {
+ h264type.eLevel = OMX_VIDEO_AVCLevelMax;
+ }
+
+ err = mOMX->setParameter(
+ mNode, OMX_IndexParamVideoAvc, &h264type, sizeof(h264type));
+
+ if (err != OK) {
+ return err;
+ }
+
+ return configureBitrate(bitrate);
+}
+
+status_t ACodec::verifySupportForProfileAndLevel(
+ int32_t profile, int32_t level) {
+ OMX_VIDEO_PARAM_PROFILELEVELTYPE params;
+ InitOMXParams(¶ms);
+ params.nPortIndex = kPortIndexOutput;
+
+ for (params.nProfileIndex = 0;; ++params.nProfileIndex) {
+ status_t err = mOMX->getParameter(
+ mNode,
+ OMX_IndexParamVideoProfileLevelQuerySupported,
+ ¶ms,
+ sizeof(params));
+
+ if (err != OK) {
+ return err;
+ }
+
+ int32_t supportedProfile = static_cast<int32_t>(params.eProfile);
+ int32_t supportedLevel = static_cast<int32_t>(params.eLevel);
+
+ if (profile == supportedProfile && level <= supportedLevel) {
+ return OK;
+ }
+ }
+}
+
+status_t ACodec::configureBitrate(int32_t bitrate) {
+ OMX_VIDEO_PARAM_BITRATETYPE bitrateType;
+ InitOMXParams(&bitrateType);
+ bitrateType.nPortIndex = kPortIndexOutput;
+
+ status_t err = mOMX->getParameter(
+ mNode, OMX_IndexParamVideoBitrate,
+ &bitrateType, sizeof(bitrateType));
+
+ if (err != OK) {
+ return err;
+ }
+
+ bitrateType.eControlRate = OMX_Video_ControlRateVariable;
+ bitrateType.nTargetBitrate = bitrate;
+
+ return mOMX->setParameter(
+ mNode, OMX_IndexParamVideoBitrate,
+ &bitrateType, sizeof(bitrateType));
+}
+
+status_t ACodec::setupErrorCorrectionParameters() {
+ OMX_VIDEO_PARAM_ERRORCORRECTIONTYPE errorCorrectionType;
+ InitOMXParams(&errorCorrectionType);
+ errorCorrectionType.nPortIndex = kPortIndexOutput;
+
+ status_t err = mOMX->getParameter(
+ mNode, OMX_IndexParamVideoErrorCorrection,
+ &errorCorrectionType, sizeof(errorCorrectionType));
+
+ if (err != OK) {
+ return OK; // Optional feature. Ignore this failure
+ }
+
+ errorCorrectionType.bEnableHEC = OMX_FALSE;
+ errorCorrectionType.bEnableResync = OMX_TRUE;
+ errorCorrectionType.nResynchMarkerSpacing = 256;
+ errorCorrectionType.bEnableDataPartitioning = OMX_FALSE;
+ errorCorrectionType.bEnableRVLC = OMX_FALSE;
+
+ return mOMX->setParameter(
+ mNode, OMX_IndexParamVideoErrorCorrection,
+ &errorCorrectionType, sizeof(errorCorrectionType));
+}
+
status_t ACodec::setVideoFormatOnPort(
OMX_U32 portIndex,
int32_t width, int32_t height, OMX_VIDEO_CODINGTYPE compressionFormat) {
@@ -1166,6 +1904,9 @@
notify->setString("mime", MEDIA_MIMETYPE_VIDEO_RAW);
notify->setInt32("width", videoDef->nFrameWidth);
notify->setInt32("height", videoDef->nFrameHeight);
+ notify->setInt32("stride", videoDef->nStride);
+ notify->setInt32("slice-height", videoDef->nSliceHeight);
+ notify->setInt32("color-format", videoDef->eColorFormat);
OMX_CONFIG_RECTTYPE rect;
InitOMXParams(&rect);
@@ -1241,10 +1982,11 @@
mSentFormat = true;
}
-void ACodec::signalError(OMX_ERRORTYPE error) {
+void ACodec::signalError(OMX_ERRORTYPE error, status_t internalError) {
sp<AMessage> notify = mNotify->dup();
notify->setInt32("what", ACodec::kWhatError);
notify->setInt32("omx-error", error);
+ notify->setInt32("err", internalError);
notify->post();
}
@@ -1417,7 +2159,7 @@
notify->setPointer("buffer-id", info->mBufferID);
info->mData->meta()->clear();
- notify->setObject("buffer", info->mData);
+ notify->setBuffer("buffer", info->mData);
sp<AMessage> reply = new AMessage(kWhatInputBufferFilled, mCodec->id());
reply->setPointer("buffer-id", info->mBufferID);
@@ -1433,18 +2175,26 @@
IOMX::buffer_id bufferID;
CHECK(msg->findPointer("buffer-id", &bufferID));
- sp<RefBase> obj;
+ sp<ABuffer> buffer;
int32_t err = OK;
- if (!msg->findObject("buffer", &obj)) {
+ bool eos = false;
+
+ if (!msg->findBuffer("buffer", &buffer)) {
CHECK(msg->findInt32("err", &err));
ALOGV("[%s] saw error %d instead of an input buffer",
mCodec->mComponentName.c_str(), err);
- obj.clear();
+ buffer.clear();
+
+ eos = true;
}
- sp<ABuffer> buffer = static_cast<ABuffer *>(obj.get());
+ int32_t tmp;
+ if (buffer != NULL && buffer->meta()->findInt32("eos", &tmp) && tmp) {
+ eos = true;
+ err = ERROR_END_OF_STREAM;
+ }
BufferInfo *info = mCodec->findBufferByID(kPortIndexInput, bufferID);
CHECK_EQ((int)info->mStatus, (int)BufferInfo::OWNED_BY_UPSTREAM);
@@ -1456,7 +2206,7 @@
switch (mode) {
case KEEP_BUFFERS:
{
- if (buffer == NULL) {
+ if (eos) {
if (!mCodec->mPortEOS[kPortIndexInput]) {
mCodec->mPortEOS[kPortIndexInput] = true;
mCodec->mInputEOSResult = err;
@@ -1467,9 +2217,7 @@
case RESUBMIT_BUFFERS:
{
- if (buffer != NULL) {
- CHECK(!mCodec->mPortEOS[kPortIndexInput]);
-
+ if (buffer != NULL && !mCodec->mPortEOS[kPortIndexInput]) {
int64_t timeUs;
CHECK(buffer->meta()->findInt64("timeUs", &timeUs));
@@ -1480,6 +2228,10 @@
flags |= OMX_BUFFERFLAG_CODECCONFIG;
}
+ if (eos) {
+ flags |= OMX_BUFFERFLAG_EOS;
+ }
+
if (buffer != info->mData) {
if (0 && !(flags & OMX_BUFFERFLAG_CODECCONFIG)) {
ALOGV("[%s] Needs to copy input data.",
@@ -1493,6 +2245,9 @@
if (flags & OMX_BUFFERFLAG_CODECCONFIG) {
ALOGV("[%s] calling emptyBuffer %p w/ codec specific data",
mCodec->mComponentName.c_str(), bufferID);
+ } else if (flags & OMX_BUFFERFLAG_EOS) {
+ ALOGV("[%s] calling emptyBuffer %p w/ EOS",
+ mCodec->mComponentName.c_str(), bufferID);
} else {
ALOGV("[%s] calling emptyBuffer %p w/ time %lld us",
mCodec->mComponentName.c_str(), bufferID, timeUs);
@@ -1509,7 +2264,15 @@
info->mStatus = BufferInfo::OWNED_BY_COMPONENT;
- getMoreInputDataIfPossible();
+ if (!eos) {
+ getMoreInputDataIfPossible();
+ } else {
+ ALOGV("[%s] Signalled EOS on the input port",
+ mCodec->mComponentName.c_str());
+
+ mCodec->mPortEOS[kPortIndexInput] = true;
+ mCodec->mInputEOSResult = err;
+ }
} else if (!mCodec->mPortEOS[kPortIndexInput]) {
if (err != ERROR_END_OF_STREAM) {
ALOGV("[%s] Signalling EOS on the input port "
@@ -1582,8 +2345,8 @@
int64_t timeUs,
void *platformPrivate,
void *dataPtr) {
- ALOGV("[%s] onOMXFillBufferDone %p time %lld us",
- mCodec->mComponentName.c_str(), bufferID, timeUs);
+ ALOGV("[%s] onOMXFillBufferDone %p time %lld us, flags = 0x%08lx",
+ mCodec->mComponentName.c_str(), bufferID, timeUs, flags);
ssize_t index;
BufferInfo *info =
@@ -1601,46 +2364,48 @@
case RESUBMIT_BUFFERS:
{
- if (rangeLength == 0) {
- if (!(flags & OMX_BUFFERFLAG_EOS)) {
- ALOGV("[%s] calling fillBuffer %p",
- mCodec->mComponentName.c_str(), info->mBufferID);
+ if (rangeLength == 0 && !(flags & OMX_BUFFERFLAG_EOS)) {
+ ALOGV("[%s] calling fillBuffer %p",
+ mCodec->mComponentName.c_str(), info->mBufferID);
- CHECK_EQ(mCodec->mOMX->fillBuffer(
- mCodec->mNode, info->mBufferID),
- (status_t)OK);
+ CHECK_EQ(mCodec->mOMX->fillBuffer(
+ mCodec->mNode, info->mBufferID),
+ (status_t)OK);
- info->mStatus = BufferInfo::OWNED_BY_COMPONENT;
- }
- } else {
- if (!mCodec->mSentFormat) {
- mCodec->sendFormatChange();
- }
-
- if (mCodec->mNativeWindow == NULL) {
- info->mData->setRange(rangeOffset, rangeLength);
- }
-
- info->mData->meta()->setInt64("timeUs", timeUs);
-
- sp<AMessage> notify = mCodec->mNotify->dup();
- notify->setInt32("what", ACodec::kWhatDrainThisBuffer);
- notify->setPointer("buffer-id", info->mBufferID);
- notify->setObject("buffer", info->mData);
-
- sp<AMessage> reply =
- new AMessage(kWhatOutputBufferDrained, mCodec->id());
-
- reply->setPointer("buffer-id", info->mBufferID);
-
- notify->setMessage("reply", reply);
-
- notify->post();
-
- info->mStatus = BufferInfo::OWNED_BY_DOWNSTREAM;
+ info->mStatus = BufferInfo::OWNED_BY_COMPONENT;
+ break;
}
+ if (!mCodec->mIsEncoder && !mCodec->mSentFormat) {
+ mCodec->sendFormatChange();
+ }
+
+ if (mCodec->mNativeWindow == NULL) {
+ info->mData->setRange(rangeOffset, rangeLength);
+ }
+
+ info->mData->meta()->setInt64("timeUs", timeUs);
+
+ sp<AMessage> notify = mCodec->mNotify->dup();
+ notify->setInt32("what", ACodec::kWhatDrainThisBuffer);
+ notify->setPointer("buffer-id", info->mBufferID);
+ notify->setBuffer("buffer", info->mData);
+ notify->setInt32("flags", flags);
+
+ sp<AMessage> reply =
+ new AMessage(kWhatOutputBufferDrained, mCodec->id());
+
+ reply->setPointer("buffer-id", info->mBufferID);
+
+ notify->setMessage("reply", reply);
+
+ notify->post();
+
+ info->mStatus = BufferInfo::OWNED_BY_DOWNSTREAM;
+
if (flags & OMX_BUFFERFLAG_EOS) {
+ ALOGV("[%s] saw output EOS", mCodec->mComponentName.c_str());
+
sp<AMessage> notify = mCodec->mNotify->dup();
notify->setInt32("what", ACodec::kWhatEOS);
notify->setInt32("err", mCodec->mInputEOSResult);
@@ -1678,12 +2443,13 @@
&& msg->findInt32("render", &render) && render != 0) {
// The client wants this buffer to be rendered.
- if (mCodec->mNativeWindow->queueBuffer(
+ status_t err;
+ if ((err = mCodec->mNativeWindow->queueBuffer(
mCodec->mNativeWindow.get(),
- info->mGraphicBuffer.get()) == OK) {
+ info->mGraphicBuffer.get())) == OK) {
info->mStatus = BufferInfo::OWNED_BY_NATIVE_WINDOW;
} else {
- mCodec->signalError();
+ mCodec->signalError(OMX_ErrorUndefined, err);
info->mStatus = BufferInfo::OWNED_BY_US;
}
} else {
@@ -1746,6 +2512,10 @@
: BaseState(codec) {
}
+void ACodec::UninitializedState::stateEntered() {
+ ALOGV("Now uninitialized");
+}
+
bool ACodec::UninitializedState::onMessageReceived(const sp<AMessage> &msg) {
bool handled = false;
@@ -1758,8 +2528,20 @@
break;
}
+ case ACodec::kWhatAllocateComponent:
+ {
+ onAllocateComponent(msg);
+ handled = true;
+ break;
+ }
+
case ACodec::kWhatShutdown:
{
+ int32_t keepComponentAllocated;
+ CHECK(msg->findInt32(
+ "keepComponentAllocated", &keepComponentAllocated));
+ CHECK(!keepComponentAllocated);
+
sp<AMessage> notify = mCodec->mNotify->dup();
notify->setInt32("what", ACodec::kWhatShutdownCompleted);
notify->post();
@@ -1787,30 +2569,60 @@
void ACodec::UninitializedState::onSetup(
const sp<AMessage> &msg) {
+ if (onAllocateComponent(msg)
+ && mCodec->mLoadedState->onConfigureComponent(msg)) {
+ mCodec->mLoadedState->onStart();
+ }
+}
+
+bool ACodec::UninitializedState::onAllocateComponent(const sp<AMessage> &msg) {
+ ALOGV("onAllocateComponent");
+
+ CHECK(mCodec->mNode == NULL);
+
OMXClient client;
CHECK_EQ(client.connect(), (status_t)OK);
sp<IOMX> omx = client.interface();
- AString mime;
- CHECK(msg->findString("mime", &mime));
-
Vector<String8> matchingCodecs;
- OMXCodec::findMatchingCodecs(
- mime.c_str(),
- false, // createEncoder
- NULL, // matchComponentName
- 0, // flags
- &matchingCodecs);
+ Vector<uint32_t> matchingCodecQuirks;
+
+ AString mime;
+
+ AString componentName;
+ uint32_t quirks;
+ if (msg->findString("componentName", &componentName)) {
+ matchingCodecs.push_back(String8(componentName.c_str()));
+
+ if (!OMXCodec::findCodecQuirks(componentName.c_str(), &quirks)) {
+ quirks = 0;
+ }
+ matchingCodecQuirks.push_back(quirks);
+ } else {
+ CHECK(msg->findString("mime", &mime));
+
+ int32_t encoder;
+ if (!msg->findInt32("encoder", &encoder)) {
+ encoder = false;
+ }
+
+ OMXCodec::findMatchingCodecs(
+ mime.c_str(),
+ encoder, // createEncoder
+ NULL, // matchComponentName
+ 0, // flags
+ &matchingCodecs,
+ &matchingCodecQuirks);
+ }
sp<CodecObserver> observer = new CodecObserver;
IOMX::node_id node = NULL;
- AString componentName;
-
for (size_t matchIndex = 0; matchIndex < matchingCodecs.size();
++matchIndex) {
componentName = matchingCodecs.itemAt(matchIndex).string();
+ quirks = matchingCodecQuirks.itemAt(matchIndex);
pid_t tid = androidGetTid();
int prevPriority = androidGetThreadPriority(tid);
@@ -1826,16 +2638,22 @@
}
if (node == NULL) {
- ALOGE("Unable to instantiate a decoder for type '%s'.", mime.c_str());
+ if (!mime.empty()) {
+ ALOGE("Unable to instantiate a decoder for type '%s'.",
+ mime.c_str());
+ } else {
+ ALOGE("Unable to instantiate decoder '%s'.", componentName.c_str());
+ }
mCodec->signalError(OMX_ErrorComponentNotFound);
- return;
+ return false;
}
sp<AMessage> notify = new AMessage(kWhatOMXMessage, mCodec->id());
observer->setNotificationMessage(notify);
mCodec->mComponentName = componentName;
+ mCodec->mQuirks = quirks;
mCodec->mOMX = omx;
mCodec->mNode = node;
@@ -1844,20 +2662,142 @@
mCodec->mInputEOSResult = OK;
- mCodec->configureCodec(mime.c_str(), msg);
+ {
+ sp<AMessage> notify = mCodec->mNotify->dup();
+ notify->setInt32("what", ACodec::kWhatComponentAllocated);
+ notify->setString("componentName", mCodec->mComponentName.c_str());
+ notify->post();
+ }
+
+ mCodec->changeState(mCodec->mLoadedState);
+
+ return true;
+}
+
+////////////////////////////////////////////////////////////////////////////////
+
+ACodec::LoadedState::LoadedState(ACodec *codec)
+ : BaseState(codec) {
+}
+
+void ACodec::LoadedState::stateEntered() {
+ ALOGV("[%s] Now Loaded", mCodec->mComponentName.c_str());
+
+ if (mCodec->mShutdownInProgress) {
+ bool keepComponentAllocated = mCodec->mKeepComponentAllocated;
+
+ mCodec->mShutdownInProgress = false;
+ mCodec->mKeepComponentAllocated = false;
+
+ onShutdown(keepComponentAllocated);
+ }
+}
+
+void ACodec::LoadedState::onShutdown(bool keepComponentAllocated) {
+ if (!keepComponentAllocated) {
+ CHECK_EQ(mCodec->mOMX->freeNode(mCodec->mNode), (status_t)OK);
+
+ mCodec->mNativeWindow.clear();
+ mCodec->mNode = NULL;
+ mCodec->mOMX.clear();
+ mCodec->mQuirks = 0;
+ mCodec->mComponentName.clear();
+
+ mCodec->changeState(mCodec->mUninitializedState);
+ }
+
+ sp<AMessage> notify = mCodec->mNotify->dup();
+ notify->setInt32("what", ACodec::kWhatShutdownCompleted);
+ notify->post();
+}
+
+bool ACodec::LoadedState::onMessageReceived(const sp<AMessage> &msg) {
+ bool handled = false;
+
+ switch (msg->what()) {
+ case ACodec::kWhatConfigureComponent:
+ {
+ onConfigureComponent(msg);
+ handled = true;
+ break;
+ }
+
+ case ACodec::kWhatStart:
+ {
+ onStart();
+ handled = true;
+ break;
+ }
+
+ case ACodec::kWhatShutdown:
+ {
+ int32_t keepComponentAllocated;
+ CHECK(msg->findInt32(
+ "keepComponentAllocated", &keepComponentAllocated));
+
+ onShutdown(keepComponentAllocated);
+
+ handled = true;
+ break;
+ }
+
+ case ACodec::kWhatFlush:
+ {
+ sp<AMessage> notify = mCodec->mNotify->dup();
+ notify->setInt32("what", ACodec::kWhatFlushCompleted);
+ notify->post();
+
+ handled = true;
+ break;
+ }
+
+ default:
+ return BaseState::onMessageReceived(msg);
+ }
+
+ return handled;
+}
+
+bool ACodec::LoadedState::onConfigureComponent(
+ const sp<AMessage> &msg) {
+ ALOGV("onConfigureComponent");
+
+ CHECK(mCodec->mNode != NULL);
+
+ AString mime;
+ CHECK(msg->findString("mime", &mime));
+
+ status_t err = mCodec->configureCodec(mime.c_str(), msg);
+
+ if (err != OK) {
+ mCodec->signalError(OMX_ErrorUndefined, err);
+ return false;
+ }
sp<RefBase> obj;
if (msg->findObject("native-window", &obj)
- && strncmp("OMX.google.", componentName.c_str(), 11)) {
+ && strncmp("OMX.google.", mCodec->mComponentName.c_str(), 11)) {
sp<NativeWindowWrapper> nativeWindow(
static_cast<NativeWindowWrapper *>(obj.get()));
CHECK(nativeWindow != NULL);
mCodec->mNativeWindow = nativeWindow->getNativeWindow();
}
-
CHECK_EQ((status_t)OK, mCodec->initNativeWindow());
- CHECK_EQ(omx->sendCommand(node, OMX_CommandStateSet, OMX_StateIdle),
+ {
+ sp<AMessage> notify = mCodec->mNotify->dup();
+ notify->setInt32("what", ACodec::kWhatComponentConfigured);
+ notify->post();
+ }
+
+ return true;
+}
+
+void ACodec::LoadedState::onStart() {
+ ALOGV("onStart");
+
+ CHECK_EQ(mCodec->mOMX->sendCommand(
+ mCodec->mNode, OMX_CommandStateSet, OMX_StateIdle),
(status_t)OK);
mCodec->changeState(mCodec->mLoadedToIdleState);
@@ -1878,7 +2818,7 @@
"(error 0x%08x)",
err);
- mCodec->signalError();
+ mCodec->signalError(OMX_ErrorUndefined, err);
}
}
@@ -2042,6 +2982,13 @@
switch (msg->what()) {
case kWhatShutdown:
{
+ int32_t keepComponentAllocated;
+ CHECK(msg->findInt32(
+ "keepComponentAllocated", &keepComponentAllocated));
+
+ mCodec->mShutdownInProgress = true;
+ mCodec->mKeepComponentAllocated = keepComponentAllocated;
+
mActive = false;
CHECK_EQ(mCodec->mOMX->sendCommand(
@@ -2202,7 +3149,7 @@
"port reconfiguration (error 0x%08x)",
err);
- mCodec->signalError();
+ mCodec->signalError(OMX_ErrorUndefined, err);
// This is technically not correct, since we were unable
// to allocate output buffers and therefore the output port
@@ -2240,7 +3187,8 @@
////////////////////////////////////////////////////////////////////////////////
ACodec::ExecutingToIdleState::ExecutingToIdleState(ACodec *codec)
- : BaseState(codec) {
+ : BaseState(codec),
+ mComponentNowIdle(false) {
}
bool ACodec::ExecutingToIdleState::onMessageReceived(const sp<AMessage> &msg) {
@@ -2274,6 +3222,7 @@
void ACodec::ExecutingToIdleState::stateEntered() {
ALOGV("[%s] Now Executing->Idle", mCodec->mComponentName.c_str());
+ mComponentNowIdle = false;
mCodec->mSentFormat = false;
}
@@ -2285,6 +3234,8 @@
CHECK_EQ(data1, (OMX_U32)OMX_CommandStateSet);
CHECK_EQ(data2, (OMX_U32)OMX_StateIdle);
+ mComponentNowIdle = true;
+
changeStateIfWeOwnAllBuffers();
return true;
@@ -2303,7 +3254,7 @@
}
void ACodec::ExecutingToIdleState::changeStateIfWeOwnAllBuffers() {
- if (mCodec->allYourBuffersAreBelongToUs()) {
+ if (mComponentNowIdle && mCodec->allYourBuffersAreBelongToUs()) {
CHECK_EQ(mCodec->mOMX->sendCommand(
mCodec->mNode, OMX_CommandStateSet, OMX_StateLoaded),
(status_t)OK);
@@ -2375,20 +3326,7 @@
CHECK_EQ(data1, (OMX_U32)OMX_CommandStateSet);
CHECK_EQ(data2, (OMX_U32)OMX_StateLoaded);
- ALOGV("[%s] Now Loaded", mCodec->mComponentName.c_str());
-
- CHECK_EQ(mCodec->mOMX->freeNode(mCodec->mNode), (status_t)OK);
-
- mCodec->mNativeWindow.clear();
- mCodec->mNode = NULL;
- mCodec->mOMX.clear();
- mCodec->mComponentName.clear();
-
- mCodec->changeState(mCodec->mUninitializedState);
-
- sp<AMessage> notify = mCodec->mNotify->dup();
- notify->setInt32("what", ACodec::kWhatShutdownCompleted);
- notify->post();
+ mCodec->changeState(mCodec->mLoadedState);
return true;
}
diff --git a/media/libstagefright/Android.mk b/media/libstagefright/Android.mk
index 3f9ba47..21d6866 100644
--- a/media/libstagefright/Android.mk
+++ b/media/libstagefright/Android.mk
@@ -29,12 +29,15 @@
MPEG4Writer.cpp \
MediaBuffer.cpp \
MediaBufferGroup.cpp \
+ MediaCodec.cpp \
+ MediaCodecList.cpp \
MediaDefs.cpp \
MediaExtractor.cpp \
MediaSource.cpp \
MediaSourceSplitter.cpp \
MetaData.cpp \
NuCachedSource2.cpp \
+ NuMediaExtractor.cpp \
OMXClient.cpp \
OMXCodec.cpp \
OggExtractor.cpp \
@@ -56,25 +59,34 @@
LOCAL_C_INCLUDES:= \
$(JNI_H_INCLUDE) \
$(TOP)/frameworks/base/include/media/stagefright/openmax \
+ $(TOP)/frameworks/base/include/media/stagefright/timedtext \
+ $(TOP)/external/expat/lib \
$(TOP)/external/flac/include \
$(TOP)/external/tremolo \
$(TOP)/external/openssl/include \
LOCAL_SHARED_LIBRARIES := \
- libbinder \
- libmedia \
- libutils \
- libcutils \
- libui \
- libsonivox \
- libvorbisidec \
- libstagefright_yuv \
+ libbinder \
libcamera_client \
- libdrmframework \
- libcrypto \
- libssl \
- libgui \
+ libchromium_net \
+ libcrypto \
+ libcutils \
+ libdl \
+ libdrmframework \
+ libexpat \
+ libgui \
+ libicui18n \
+ libicuuc \
+ liblog \
+ libmedia \
+ libsonivox \
+ libssl \
libstagefright_omx \
+ libstagefright_yuv \
+ libui \
+ libutils \
+ libvorbisidec \
+ libz \
LOCAL_STATIC_LIBRARIES := \
libstagefright_color_conversion \
@@ -88,51 +100,14 @@
libstagefright_httplive \
libstagefright_id3 \
libFLAC \
+ libstagefright_chromium_http \
-################################################################################
-
-# The following was shamelessly copied from external/webkit/Android.mk and
-# currently must follow the same logic to determine how webkit was built and
-# if it's safe to link against libchromium_net
-
-# See if the user has specified a stack they want to use
-HTTP_STACK = $(HTTP)
-# We default to the Chrome HTTP stack.
-DEFAULT_HTTP = chrome
-ALT_HTTP = android
-
-ifneq ($(HTTP_STACK),chrome)
- ifneq ($(HTTP_STACK),android)
- # No HTTP stack is specified, pickup the one we want as default.
- ifeq ($(USE_ALT_HTTP),true)
- HTTP_STACK = $(ALT_HTTP)
- else
- HTTP_STACK = $(DEFAULT_HTTP)
- endif
- endif
-endif
-
-ifeq ($(HTTP_STACK),chrome)
-
-LOCAL_SHARED_LIBRARIES += \
- liblog \
- libicuuc \
- libicui18n \
- libz \
- libdl \
-
-LOCAL_STATIC_LIBRARIES += \
- libstagefright_chromium_http
-
-LOCAL_SHARED_LIBRARIES += libstlport libchromium_net
+LOCAL_SHARED_LIBRARIES += libstlport
include external/stlport/libstlport.mk
+# TODO: Chromium is always available, so this flag can be removed.
LOCAL_CPPFLAGS += -DCHROMIUM_AVAILABLE=1
-endif # ifeq ($(HTTP_STACK),chrome)
-
-################################################################################
-
LOCAL_SHARED_LIBRARIES += \
libstagefright_enc_common \
libstagefright_avc_common \
diff --git a/media/libstagefright/AwesomePlayer.cpp b/media/libstagefright/AwesomePlayer.cpp
index 70945e3..b21e86a 100644
--- a/media/libstagefright/AwesomePlayer.cpp
+++ b/media/libstagefright/AwesomePlayer.cpp
@@ -30,13 +30,12 @@
#include "include/MPEG2TSExtractor.h"
#include "include/WVMExtractor.h"
-#include "timedtext/TimedTextDriver.h"
-
#include <binder/IPCThreadState.h>
#include <binder/IServiceManager.h>
#include <media/IMediaPlayerService.h>
#include <media/stagefright/foundation/hexdump.h>
#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/timedtext/TimedTextDriver.h>
#include <media/stagefright/AudioPlayer.h>
#include <media/stagefright/DataSource.h>
#include <media/stagefright/FileSource.h>
@@ -47,10 +46,8 @@
#include <media/stagefright/MetaData.h>
#include <media/stagefright/OMXCodec.h>
-#include <surfaceflinger/Surface.h>
#include <gui/ISurfaceTexture.h>
#include <gui/SurfaceTextureClient.h>
-#include <surfaceflinger/ISurfaceComposer.h>
#include <media/stagefright/foundation/AMessage.h>
diff --git a/media/libstagefright/CameraSource.cpp b/media/libstagefright/CameraSource.cpp
index ed1d5f4..2df5528 100755
--- a/media/libstagefright/CameraSource.cpp
+++ b/media/libstagefright/CameraSource.cpp
@@ -27,7 +27,7 @@
#include <media/stagefright/MetaData.h>
#include <camera/Camera.h>
#include <camera/CameraParameters.h>
-#include <surfaceflinger/Surface.h>
+#include <gui/Surface.h>
#include <utils/String8.h>
#include <cutils/properties.h>
diff --git a/media/libstagefright/MPEG2TSWriter.cpp b/media/libstagefright/MPEG2TSWriter.cpp
index 0b4ecbe..f702376 100644
--- a/media/libstagefright/MPEG2TSWriter.cpp
+++ b/media/libstagefright/MPEG2TSWriter.cpp
@@ -244,7 +244,7 @@
sp<AMessage> notify = mNotify->dup();
notify->setInt32("what", kNotifyBuffer);
- notify->setObject("buffer", out);
+ notify->setBuffer("buffer", out);
notify->setInt32("oob", true);
notify->post();
}
@@ -270,7 +270,7 @@
copy->meta()->setInt32("isSync", true);
}
- notify->setObject("buffer", copy);
+ notify->setBuffer("buffer", copy);
notify->post();
}
@@ -351,7 +351,7 @@
sp<AMessage> notify = mNotify->dup();
notify->setInt32("what", kNotifyBuffer);
- notify->setObject("buffer", mAACBuffer);
+ notify->setBuffer("buffer", mAACBuffer);
notify->post();
mAACBuffer.clear();
@@ -614,10 +614,8 @@
++mNumSourcesDone;
} else if (what == SourceInfo::kNotifyBuffer) {
- sp<RefBase> obj;
- CHECK(msg->findObject("buffer", &obj));
-
- sp<ABuffer> buffer = static_cast<ABuffer *>(obj.get());
+ sp<ABuffer> buffer;
+ CHECK(msg->findBuffer("buffer", &buffer));
int32_t oob;
if (msg->findInt32("oob", &oob) && oob) {
diff --git a/media/libstagefright/MediaCodec.cpp b/media/libstagefright/MediaCodec.cpp
new file mode 100644
index 0000000..a9e7f36
--- /dev/null
+++ b/media/libstagefright/MediaCodec.cpp
@@ -0,0 +1,1217 @@
+/*
+ * Copyright 2012, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "MediaCodec"
+#include <utils/Log.h>
+
+#include <media/stagefright/MediaCodec.h>
+
+#include "include/SoftwareRenderer.h"
+
+#include <gui/SurfaceTextureClient.h>
+#include <media/stagefright/foundation/ABuffer.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/ACodec.h>
+#include <media/stagefright/MediaErrors.h>
+#include <media/stagefright/MetaData.h>
+#include <media/stagefright/NativeWindowWrapper.h>
+
+namespace android {
+
+// static
+sp<MediaCodec> MediaCodec::CreateByType(
+ const sp<ALooper> &looper, const char *mime, bool encoder) {
+ sp<MediaCodec> codec = new MediaCodec(looper);
+ if (codec->init(mime, true /* nameIsType */, encoder) != OK) {
+ return NULL;
+ }
+
+ return codec;
+}
+
+// static
+sp<MediaCodec> MediaCodec::CreateByComponentName(
+ const sp<ALooper> &looper, const char *name) {
+ sp<MediaCodec> codec = new MediaCodec(looper);
+ if (codec->init(name, false /* nameIsType */, false /* encoder */) != OK) {
+ return NULL;
+ }
+
+ return codec;
+}
+
+MediaCodec::MediaCodec(const sp<ALooper> &looper)
+ : mState(UNINITIALIZED),
+ mLooper(looper),
+ mCodec(new ACodec),
+ mFlags(0),
+ mSoftRenderer(NULL),
+ mDequeueInputTimeoutGeneration(0),
+ mDequeueInputReplyID(0),
+ mDequeueOutputTimeoutGeneration(0),
+ mDequeueOutputReplyID(0) {
+}
+
+MediaCodec::~MediaCodec() {
+ CHECK_EQ(mState, UNINITIALIZED);
+}
+
+// static
+status_t MediaCodec::PostAndAwaitResponse(
+ const sp<AMessage> &msg, sp<AMessage> *response) {
+ status_t err = msg->postAndAwaitResponse(response);
+
+ if (err != OK) {
+ return err;
+ }
+
+ if (!(*response)->findInt32("err", &err)) {
+ err = OK;
+ }
+
+ return err;
+}
+
+status_t MediaCodec::init(const char *name, bool nameIsType, bool encoder) {
+ // Current video decoders do not return from OMX_FillThisBuffer
+ // quickly, violating the OpenMAX specs, until that is remedied
+ // we need to invest in an extra looper to free the main event
+ // queue.
+ bool needDedicatedLooper = false;
+ if (nameIsType && !strncasecmp(name, "video/", 6)) {
+ needDedicatedLooper = true;
+ } else if (!nameIsType && !strncmp(name, "OMX.TI.DUCATI1.VIDEO.", 21)) {
+ needDedicatedLooper = true;
+ }
+
+ if (needDedicatedLooper) {
+ if (mCodecLooper == NULL) {
+ mCodecLooper = new ALooper;
+ mCodecLooper->setName("CodecLooper");
+ mCodecLooper->start(false, false, ANDROID_PRIORITY_AUDIO);
+ }
+
+ mCodecLooper->registerHandler(mCodec);
+ } else {
+ mLooper->registerHandler(mCodec);
+ }
+
+ mLooper->registerHandler(this);
+
+ mCodec->setNotificationMessage(new AMessage(kWhatCodecNotify, id()));
+
+ sp<AMessage> msg = new AMessage(kWhatInit, id());
+ msg->setString("name", name);
+ msg->setInt32("nameIsType", nameIsType);
+
+ if (nameIsType) {
+ msg->setInt32("encoder", encoder);
+ }
+
+ sp<AMessage> response;
+ return PostAndAwaitResponse(msg, &response);
+}
+
+status_t MediaCodec::configure(
+ const sp<AMessage> &format,
+ const sp<SurfaceTextureClient> &nativeWindow,
+ uint32_t flags) {
+ sp<AMessage> msg = new AMessage(kWhatConfigure, id());
+
+ msg->setMessage("format", format);
+ msg->setInt32("flags", flags);
+
+ if (nativeWindow != NULL) {
+ if (!(mFlags & kFlagIsSoftwareCodec)) {
+ msg->setObject(
+ "native-window",
+ new NativeWindowWrapper(nativeWindow));
+ } else {
+ mNativeWindow = nativeWindow;
+ }
+ }
+
+ sp<AMessage> response;
+ return PostAndAwaitResponse(msg, &response);
+}
+
+status_t MediaCodec::start() {
+ sp<AMessage> msg = new AMessage(kWhatStart, id());
+
+ sp<AMessage> response;
+ return PostAndAwaitResponse(msg, &response);
+}
+
+status_t MediaCodec::stop() {
+ sp<AMessage> msg = new AMessage(kWhatStop, id());
+
+ sp<AMessage> response;
+ return PostAndAwaitResponse(msg, &response);
+}
+
+status_t MediaCodec::release() {
+ sp<AMessage> msg = new AMessage(kWhatRelease, id());
+
+ sp<AMessage> response;
+ return PostAndAwaitResponse(msg, &response);
+}
+
+status_t MediaCodec::queueInputBuffer(
+ size_t index,
+ size_t offset,
+ size_t size,
+ int64_t presentationTimeUs,
+ uint32_t flags) {
+ sp<AMessage> msg = new AMessage(kWhatQueueInputBuffer, id());
+ msg->setSize("index", index);
+ msg->setSize("offset", offset);
+ msg->setSize("size", size);
+ msg->setInt64("timeUs", presentationTimeUs);
+ msg->setInt32("flags", flags);
+
+ sp<AMessage> response;
+ return PostAndAwaitResponse(msg, &response);
+}
+
+status_t MediaCodec::dequeueInputBuffer(size_t *index, int64_t timeoutUs) {
+ sp<AMessage> msg = new AMessage(kWhatDequeueInputBuffer, id());
+ msg->setInt64("timeoutUs", timeoutUs);
+
+ sp<AMessage> response;
+ status_t err;
+ if ((err = PostAndAwaitResponse(msg, &response)) != OK) {
+ return err;
+ }
+
+ CHECK(response->findSize("index", index));
+
+ return OK;
+}
+
+status_t MediaCodec::dequeueOutputBuffer(
+ size_t *index,
+ size_t *offset,
+ size_t *size,
+ int64_t *presentationTimeUs,
+ uint32_t *flags,
+ int64_t timeoutUs) {
+ sp<AMessage> msg = new AMessage(kWhatDequeueOutputBuffer, id());
+ msg->setInt64("timeoutUs", timeoutUs);
+
+ sp<AMessage> response;
+ status_t err;
+ if ((err = PostAndAwaitResponse(msg, &response)) != OK) {
+ return err;
+ }
+
+ CHECK(response->findSize("index", index));
+ CHECK(response->findSize("offset", offset));
+ CHECK(response->findSize("size", size));
+ CHECK(response->findInt64("timeUs", presentationTimeUs));
+ CHECK(response->findInt32("flags", (int32_t *)flags));
+
+ return OK;
+}
+
+status_t MediaCodec::renderOutputBufferAndRelease(size_t index) {
+ sp<AMessage> msg = new AMessage(kWhatReleaseOutputBuffer, id());
+ msg->setSize("index", index);
+ msg->setInt32("render", true);
+
+ sp<AMessage> response;
+ return PostAndAwaitResponse(msg, &response);
+}
+
+status_t MediaCodec::releaseOutputBuffer(size_t index) {
+ sp<AMessage> msg = new AMessage(kWhatReleaseOutputBuffer, id());
+ msg->setSize("index", index);
+
+ sp<AMessage> response;
+ return PostAndAwaitResponse(msg, &response);
+}
+
+status_t MediaCodec::getOutputFormat(sp<AMessage> *format) const {
+ sp<AMessage> msg = new AMessage(kWhatGetOutputFormat, id());
+
+ sp<AMessage> response;
+ status_t err;
+ if ((err = PostAndAwaitResponse(msg, &response)) != OK) {
+ return err;
+ }
+
+ CHECK(response->findMessage("format", format));
+
+ return OK;
+}
+
+status_t MediaCodec::getInputBuffers(Vector<sp<ABuffer> > *buffers) const {
+ sp<AMessage> msg = new AMessage(kWhatGetBuffers, id());
+ msg->setInt32("portIndex", kPortIndexInput);
+ msg->setPointer("buffers", buffers);
+
+ sp<AMessage> response;
+ return PostAndAwaitResponse(msg, &response);
+}
+
+status_t MediaCodec::getOutputBuffers(Vector<sp<ABuffer> > *buffers) const {
+ sp<AMessage> msg = new AMessage(kWhatGetBuffers, id());
+ msg->setInt32("portIndex", kPortIndexOutput);
+ msg->setPointer("buffers", buffers);
+
+ sp<AMessage> response;
+ return PostAndAwaitResponse(msg, &response);
+}
+
+status_t MediaCodec::flush() {
+ sp<AMessage> msg = new AMessage(kWhatFlush, id());
+
+ sp<AMessage> response;
+ return PostAndAwaitResponse(msg, &response);
+}
+
+////////////////////////////////////////////////////////////////////////////////
+
+void MediaCodec::cancelPendingDequeueOperations() {
+ if (mFlags & kFlagDequeueInputPending) {
+ sp<AMessage> response = new AMessage;
+ response->setInt32("err", INVALID_OPERATION);
+ response->postReply(mDequeueInputReplyID);
+
+ ++mDequeueInputTimeoutGeneration;
+ mDequeueInputReplyID = 0;
+ mFlags &= ~kFlagDequeueInputPending;
+ }
+
+ if (mFlags & kFlagDequeueOutputPending) {
+ sp<AMessage> response = new AMessage;
+ response->setInt32("err", INVALID_OPERATION);
+ response->postReply(mDequeueOutputReplyID);
+
+ ++mDequeueOutputTimeoutGeneration;
+ mDequeueOutputReplyID = 0;
+ mFlags &= ~kFlagDequeueOutputPending;
+ }
+}
+
+bool MediaCodec::handleDequeueInputBuffer(uint32_t replyID, bool newRequest) {
+ if (mState != STARTED
+ || (mFlags & kFlagStickyError)
+ || (newRequest && (mFlags & kFlagDequeueInputPending))) {
+ sp<AMessage> response = new AMessage;
+ response->setInt32("err", INVALID_OPERATION);
+
+ response->postReply(replyID);
+
+ return true;
+ }
+
+ ssize_t index = dequeuePortBuffer(kPortIndexInput);
+
+ if (index < 0) {
+ CHECK_EQ(index, -EAGAIN);
+ return false;
+ }
+
+ sp<AMessage> response = new AMessage;
+ response->setSize("index", index);
+ response->postReply(replyID);
+
+ return true;
+}
+
+bool MediaCodec::handleDequeueOutputBuffer(uint32_t replyID, bool newRequest) {
+ sp<AMessage> response = new AMessage;
+
+ if (mState != STARTED
+ || (mFlags & kFlagStickyError)
+ || (newRequest && (mFlags & kFlagDequeueOutputPending))) {
+ response->setInt32("err", INVALID_OPERATION);
+ } else if (mFlags & kFlagOutputBuffersChanged) {
+ response->setInt32("err", INFO_OUTPUT_BUFFERS_CHANGED);
+ mFlags &= ~kFlagOutputBuffersChanged;
+ } else if (mFlags & kFlagOutputFormatChanged) {
+ response->setInt32("err", INFO_FORMAT_CHANGED);
+ mFlags &= ~kFlagOutputFormatChanged;
+ } else {
+ ssize_t index = dequeuePortBuffer(kPortIndexOutput);
+
+ if (index < 0) {
+ CHECK_EQ(index, -EAGAIN);
+ return false;
+ }
+
+ const sp<ABuffer> &buffer =
+ mPortBuffers[kPortIndexOutput].itemAt(index).mData;
+
+ response->setSize("index", index);
+ response->setSize("offset", buffer->offset());
+ response->setSize("size", buffer->size());
+
+ int64_t timeUs;
+ CHECK(buffer->meta()->findInt64("timeUs", &timeUs));
+
+ response->setInt64("timeUs", timeUs);
+
+ int32_t omxFlags;
+ CHECK(buffer->meta()->findInt32("omxFlags", &omxFlags));
+
+ uint32_t flags = 0;
+ if (omxFlags & OMX_BUFFERFLAG_SYNCFRAME) {
+ flags |= BUFFER_FLAG_SYNCFRAME;
+ }
+ if (omxFlags & OMX_BUFFERFLAG_CODECCONFIG) {
+ flags |= BUFFER_FLAG_CODECCONFIG;
+ }
+ if (omxFlags & OMX_BUFFERFLAG_EOS) {
+ flags |= BUFFER_FLAG_EOS;
+ }
+
+ response->setInt32("flags", flags);
+ }
+
+ response->postReply(replyID);
+
+ return true;
+}
+
+void MediaCodec::onMessageReceived(const sp<AMessage> &msg) {
+ switch (msg->what()) {
+ case kWhatCodecNotify:
+ {
+ int32_t what;
+ CHECK(msg->findInt32("what", &what));
+
+ switch (what) {
+ case ACodec::kWhatError:
+ {
+ int32_t omxError, internalError;
+ CHECK(msg->findInt32("omx-error", &omxError));
+ CHECK(msg->findInt32("err", &internalError));
+
+ ALOGE("Codec reported an error. "
+ "(omx error 0x%08x, internalError %d)",
+ omxError, internalError);
+
+ bool sendErrorReponse = true;
+
+ switch (mState) {
+ case INITIALIZING:
+ {
+ setState(UNINITIALIZED);
+ break;
+ }
+
+ case CONFIGURING:
+ {
+ setState(INITIALIZED);
+ break;
+ }
+
+ case STARTING:
+ {
+ setState(CONFIGURED);
+ break;
+ }
+
+ case STOPPING:
+ case RELEASING:
+ {
+ // Ignore the error, assuming we'll still get
+ // the shutdown complete notification.
+
+ sendErrorReponse = false;
+ break;
+ }
+
+ case FLUSHING:
+ {
+ setState(STARTED);
+ break;
+ }
+
+ case STARTED:
+ {
+ sendErrorReponse = false;
+
+ mFlags |= kFlagStickyError;
+
+ cancelPendingDequeueOperations();
+ break;
+ }
+
+ default:
+ {
+ sendErrorReponse = false;
+
+ mFlags |= kFlagStickyError;
+ break;
+ }
+ }
+
+ if (sendErrorReponse) {
+ sp<AMessage> response = new AMessage;
+ response->setInt32("err", UNKNOWN_ERROR);
+
+ response->postReply(mReplyID);
+ }
+ break;
+ }
+
+ case ACodec::kWhatComponentAllocated:
+ {
+ CHECK_EQ(mState, INITIALIZING);
+ setState(INITIALIZED);
+
+ AString componentName;
+ CHECK(msg->findString("componentName", &componentName));
+
+ if (componentName.startsWith("OMX.google.")) {
+ mFlags |= kFlagIsSoftwareCodec;
+ } else {
+ mFlags &= ~kFlagIsSoftwareCodec;
+ }
+
+ (new AMessage)->postReply(mReplyID);
+ break;
+ }
+
+ case ACodec::kWhatComponentConfigured:
+ {
+ CHECK_EQ(mState, CONFIGURING);
+ setState(CONFIGURED);
+
+ (new AMessage)->postReply(mReplyID);
+ break;
+ }
+
+ case ACodec::kWhatBuffersAllocated:
+ {
+ int32_t portIndex;
+ CHECK(msg->findInt32("portIndex", &portIndex));
+
+ ALOGV("%s buffers allocated",
+ portIndex == kPortIndexInput ? "input" : "output");
+
+ CHECK(portIndex == kPortIndexInput
+ || portIndex == kPortIndexOutput);
+
+ mPortBuffers[portIndex].clear();
+
+ Vector<BufferInfo> *buffers = &mPortBuffers[portIndex];
+ for (size_t i = 0;; ++i) {
+ AString name = StringPrintf("buffer-id_%d", i);
+
+ void *bufferID;
+ if (!msg->findPointer(name.c_str(), &bufferID)) {
+ break;
+ }
+
+ name = StringPrintf("data_%d", i);
+
+ BufferInfo info;
+ info.mBufferID = bufferID;
+ info.mOwnedByClient = false;
+ CHECK(msg->findBuffer(name.c_str(), &info.mData));
+
+ buffers->push_back(info);
+ }
+
+ if (portIndex == kPortIndexOutput) {
+ if (mState == STARTING) {
+ // We're always allocating output buffers after
+ // allocating input buffers, so this is a good
+ // indication that now all buffers are allocated.
+ setState(STARTED);
+ (new AMessage)->postReply(mReplyID);
+ } else {
+ mFlags |= kFlagOutputBuffersChanged;
+ }
+ }
+ break;
+ }
+
+ case ACodec::kWhatOutputFormatChanged:
+ {
+ ALOGV("codec output format changed");
+
+ if ((mFlags & kFlagIsSoftwareCodec)
+ && mNativeWindow != NULL) {
+ AString mime;
+ CHECK(msg->findString("mime", &mime));
+
+ if (!strncasecmp("video/", mime.c_str(), 6)) {
+ delete mSoftRenderer;
+ mSoftRenderer = NULL;
+
+ int32_t width, height;
+ CHECK(msg->findInt32("width", &width));
+ CHECK(msg->findInt32("height", &height));
+
+ int32_t colorFormat;
+ CHECK(msg->findInt32(
+ "color-format", &colorFormat));
+
+ sp<MetaData> meta = new MetaData;
+ meta->setInt32(kKeyWidth, width);
+ meta->setInt32(kKeyHeight, height);
+ meta->setInt32(kKeyColorFormat, colorFormat);
+
+ mSoftRenderer =
+ new SoftwareRenderer(mNativeWindow, meta);
+ }
+ }
+
+ mOutputFormat = msg;
+ mFlags |= kFlagOutputFormatChanged;
+ break;
+ }
+
+ case ACodec::kWhatFillThisBuffer:
+ {
+ /* size_t index = */updateBuffers(kPortIndexInput, msg);
+
+ if (mState == FLUSHING
+ || mState == STOPPING
+ || mState == RELEASING) {
+ returnBuffersToCodecOnPort(kPortIndexInput);
+ break;
+ }
+
+ if (mFlags & kFlagDequeueInputPending) {
+ CHECK(handleDequeueInputBuffer(mDequeueInputReplyID));
+
+ ++mDequeueInputTimeoutGeneration;
+ mFlags &= ~kFlagDequeueInputPending;
+ mDequeueInputReplyID = 0;
+ }
+ break;
+ }
+
+ case ACodec::kWhatDrainThisBuffer:
+ {
+ /* size_t index = */updateBuffers(kPortIndexOutput, msg);
+
+ if (mState == FLUSHING
+ || mState == STOPPING
+ || mState == RELEASING) {
+ returnBuffersToCodecOnPort(kPortIndexOutput);
+ break;
+ }
+
+ sp<ABuffer> buffer;
+ CHECK(msg->findBuffer("buffer", &buffer));
+
+ int32_t omxFlags;
+ CHECK(msg->findInt32("flags", &omxFlags));
+
+ buffer->meta()->setInt32("omxFlags", omxFlags);
+
+ if (mFlags & kFlagDequeueOutputPending) {
+ CHECK(handleDequeueOutputBuffer(mDequeueOutputReplyID));
+
+ ++mDequeueOutputTimeoutGeneration;
+ mFlags &= ~kFlagDequeueOutputPending;
+ mDequeueOutputReplyID = 0;
+ }
+ break;
+ }
+
+ case ACodec::kWhatEOS:
+ {
+ // We already notify the client of this by using the
+ // corresponding flag in "onOutputBufferReady".
+ break;
+ }
+
+ case ACodec::kWhatShutdownCompleted:
+ {
+ if (mState == STOPPING) {
+ setState(INITIALIZED);
+ } else {
+ CHECK_EQ(mState, RELEASING);
+ setState(UNINITIALIZED);
+ }
+
+ (new AMessage)->postReply(mReplyID);
+ break;
+ }
+
+ case ACodec::kWhatFlushCompleted:
+ {
+ CHECK_EQ(mState, FLUSHING);
+ setState(STARTED);
+
+ mCodec->signalResume();
+
+ (new AMessage)->postReply(mReplyID);
+ break;
+ }
+
+ default:
+ TRESPASS();
+ }
+ break;
+ }
+
+ case kWhatInit:
+ {
+ uint32_t replyID;
+ CHECK(msg->senderAwaitsResponse(&replyID));
+
+ if (mState != UNINITIALIZED) {
+ sp<AMessage> response = new AMessage;
+ response->setInt32("err", INVALID_OPERATION);
+
+ response->postReply(replyID);
+ break;
+ }
+
+ mReplyID = replyID;
+ setState(INITIALIZING);
+
+ AString name;
+ CHECK(msg->findString("name", &name));
+
+ int32_t nameIsType;
+ int32_t encoder = false;
+ CHECK(msg->findInt32("nameIsType", &nameIsType));
+ if (nameIsType) {
+ CHECK(msg->findInt32("encoder", &encoder));
+ }
+
+ sp<AMessage> format = new AMessage;
+
+ if (nameIsType) {
+ format->setString("mime", name.c_str());
+ format->setInt32("encoder", encoder);
+ } else {
+ format->setString("componentName", name.c_str());
+ }
+
+ mCodec->initiateAllocateComponent(format);
+ break;
+ }
+
+ case kWhatConfigure:
+ {
+ uint32_t replyID;
+ CHECK(msg->senderAwaitsResponse(&replyID));
+
+ if (mState != INITIALIZED) {
+ sp<AMessage> response = new AMessage;
+ response->setInt32("err", INVALID_OPERATION);
+
+ response->postReply(replyID);
+ break;
+ }
+
+ mReplyID = replyID;
+ setState(CONFIGURING);
+
+ sp<RefBase> obj;
+ if (!msg->findObject("native-window", &obj)) {
+ obj.clear();
+ }
+
+ sp<AMessage> format;
+ CHECK(msg->findMessage("format", &format));
+
+ if (obj != NULL) {
+ format->setObject("native-window", obj);
+ }
+
+ uint32_t flags;
+ CHECK(msg->findInt32("flags", (int32_t *)&flags));
+
+ if (flags & CONFIGURE_FLAG_ENCODE) {
+ format->setInt32("encoder", true);
+ }
+
+ mCodec->initiateConfigureComponent(format);
+ break;
+ }
+
+ case kWhatStart:
+ {
+ uint32_t replyID;
+ CHECK(msg->senderAwaitsResponse(&replyID));
+
+ if (mState != CONFIGURED) {
+ sp<AMessage> response = new AMessage;
+ response->setInt32("err", INVALID_OPERATION);
+
+ response->postReply(replyID);
+ break;
+ }
+
+ mReplyID = replyID;
+ setState(STARTING);
+
+ mCodec->initiateStart();
+ break;
+ }
+
+ case kWhatStop:
+ {
+ uint32_t replyID;
+ CHECK(msg->senderAwaitsResponse(&replyID));
+
+ if (mState != INITIALIZED
+ && mState != CONFIGURED && mState != STARTED) {
+ sp<AMessage> response = new AMessage;
+ response->setInt32("err", INVALID_OPERATION);
+
+ response->postReply(replyID);
+ break;
+ }
+
+ mReplyID = replyID;
+ setState(STOPPING);
+
+ mCodec->initiateShutdown(true /* keepComponentAllocated */);
+ returnBuffersToCodec();
+ break;
+ }
+
+ case kWhatRelease:
+ {
+ uint32_t replyID;
+ CHECK(msg->senderAwaitsResponse(&replyID));
+
+ if (mState != INITIALIZED
+ && mState != CONFIGURED && mState != STARTED) {
+ sp<AMessage> response = new AMessage;
+ response->setInt32("err", INVALID_OPERATION);
+
+ response->postReply(replyID);
+ break;
+ }
+
+ mReplyID = replyID;
+ setState(RELEASING);
+
+ mCodec->initiateShutdown();
+ returnBuffersToCodec();
+ break;
+ }
+
+ case kWhatDequeueInputBuffer:
+ {
+ uint32_t replyID;
+ CHECK(msg->senderAwaitsResponse(&replyID));
+
+ if (handleDequeueInputBuffer(replyID, true /* new request */)) {
+ break;
+ }
+
+ int64_t timeoutUs;
+ CHECK(msg->findInt64("timeoutUs", &timeoutUs));
+
+ if (timeoutUs == 0ll) {
+ sp<AMessage> response = new AMessage;
+ response->setInt32("err", -EAGAIN);
+ response->postReply(replyID);
+ break;
+ }
+
+ mFlags |= kFlagDequeueInputPending;
+ mDequeueInputReplyID = replyID;
+
+ if (timeoutUs > 0ll) {
+ sp<AMessage> timeoutMsg =
+ new AMessage(kWhatDequeueInputTimedOut, id());
+ timeoutMsg->setInt32(
+ "generation", ++mDequeueInputTimeoutGeneration);
+ timeoutMsg->post(timeoutUs);
+ }
+ break;
+ }
+
+ case kWhatDequeueInputTimedOut:
+ {
+ int32_t generation;
+ CHECK(msg->findInt32("generation", &generation));
+
+ if (generation != mDequeueInputTimeoutGeneration) {
+ // Obsolete
+ break;
+ }
+
+ CHECK(mFlags & kFlagDequeueInputPending);
+
+ sp<AMessage> response = new AMessage;
+ response->setInt32("err", -EAGAIN);
+ response->postReply(mDequeueInputReplyID);
+
+ mFlags &= ~kFlagDequeueInputPending;
+ mDequeueInputReplyID = 0;
+ break;
+ }
+
+ case kWhatQueueInputBuffer:
+ {
+ uint32_t replyID;
+ CHECK(msg->senderAwaitsResponse(&replyID));
+
+ if (mState != STARTED || (mFlags & kFlagStickyError)) {
+ sp<AMessage> response = new AMessage;
+ response->setInt32("err", INVALID_OPERATION);
+
+ response->postReply(replyID);
+ break;
+ }
+
+ status_t err = onQueueInputBuffer(msg);
+
+ sp<AMessage> response = new AMessage;
+ response->setInt32("err", err);
+ response->postReply(replyID);
+ break;
+ }
+
+ case kWhatDequeueOutputBuffer:
+ {
+ uint32_t replyID;
+ CHECK(msg->senderAwaitsResponse(&replyID));
+
+ if (handleDequeueOutputBuffer(replyID, true /* new request */)) {
+ break;
+ }
+
+ int64_t timeoutUs;
+ CHECK(msg->findInt64("timeoutUs", &timeoutUs));
+
+ if (timeoutUs == 0ll) {
+ sp<AMessage> response = new AMessage;
+ response->setInt32("err", -EAGAIN);
+ response->postReply(replyID);
+ break;
+ }
+
+ mFlags |= kFlagDequeueOutputPending;
+ mDequeueOutputReplyID = replyID;
+
+ if (timeoutUs > 0ll) {
+ sp<AMessage> timeoutMsg =
+ new AMessage(kWhatDequeueOutputTimedOut, id());
+ timeoutMsg->setInt32(
+ "generation", ++mDequeueOutputTimeoutGeneration);
+ timeoutMsg->post(timeoutUs);
+ }
+ break;
+ }
+
+ case kWhatDequeueOutputTimedOut:
+ {
+ int32_t generation;
+ CHECK(msg->findInt32("generation", &generation));
+
+ if (generation != mDequeueOutputTimeoutGeneration) {
+ // Obsolete
+ break;
+ }
+
+ CHECK(mFlags & kFlagDequeueOutputPending);
+
+ sp<AMessage> response = new AMessage;
+ response->setInt32("err", -EAGAIN);
+ response->postReply(mDequeueOutputReplyID);
+
+ mFlags &= ~kFlagDequeueOutputPending;
+ mDequeueOutputReplyID = 0;
+ break;
+ }
+
+ case kWhatReleaseOutputBuffer:
+ {
+ uint32_t replyID;
+ CHECK(msg->senderAwaitsResponse(&replyID));
+
+ if (mState != STARTED || (mFlags & kFlagStickyError)) {
+ sp<AMessage> response = new AMessage;
+ response->setInt32("err", INVALID_OPERATION);
+
+ response->postReply(replyID);
+ break;
+ }
+
+ status_t err = onReleaseOutputBuffer(msg);
+
+ sp<AMessage> response = new AMessage;
+ response->setInt32("err", err);
+ response->postReply(replyID);
+ break;
+ }
+
+ case kWhatGetBuffers:
+ {
+ uint32_t replyID;
+ CHECK(msg->senderAwaitsResponse(&replyID));
+
+ if (mState != STARTED || (mFlags & kFlagStickyError)) {
+ sp<AMessage> response = new AMessage;
+ response->setInt32("err", INVALID_OPERATION);
+
+ response->postReply(replyID);
+ break;
+ }
+
+ int32_t portIndex;
+ CHECK(msg->findInt32("portIndex", &portIndex));
+
+ Vector<sp<ABuffer> > *dstBuffers;
+ CHECK(msg->findPointer("buffers", (void **)&dstBuffers));
+
+ dstBuffers->clear();
+ const Vector<BufferInfo> &srcBuffers = mPortBuffers[portIndex];
+
+ for (size_t i = 0; i < srcBuffers.size(); ++i) {
+ const BufferInfo &info = srcBuffers.itemAt(i);
+
+ dstBuffers->push_back(info.mData);
+ }
+
+ (new AMessage)->postReply(replyID);
+ break;
+ }
+
+ case kWhatFlush:
+ {
+ uint32_t replyID;
+ CHECK(msg->senderAwaitsResponse(&replyID));
+
+ if (mState != STARTED || (mFlags & kFlagStickyError)) {
+ sp<AMessage> response = new AMessage;
+ response->setInt32("err", INVALID_OPERATION);
+
+ response->postReply(replyID);
+ break;
+ }
+
+ mReplyID = replyID;
+ setState(FLUSHING);
+
+ mCodec->signalFlush();
+ returnBuffersToCodec();
+ break;
+ }
+
+ case kWhatGetOutputFormat:
+ {
+ uint32_t replyID;
+ CHECK(msg->senderAwaitsResponse(&replyID));
+
+ if ((mState != STARTED && mState != FLUSHING)
+ || (mFlags & kFlagStickyError)) {
+ sp<AMessage> response = new AMessage;
+ response->setInt32("err", INVALID_OPERATION);
+
+ response->postReply(replyID);
+ break;
+ }
+
+ sp<AMessage> response = new AMessage;
+ response->setMessage("format", mOutputFormat);
+ response->postReply(replyID);
+ break;
+ }
+
+ default:
+ TRESPASS();
+ }
+}
+
+void MediaCodec::setState(State newState) {
+ if (newState == UNINITIALIZED) {
+ delete mSoftRenderer;
+ mSoftRenderer = NULL;
+
+ mNativeWindow.clear();
+
+ mOutputFormat.clear();
+ mFlags &= ~kFlagOutputFormatChanged;
+ mFlags &= ~kFlagOutputBuffersChanged;
+ mFlags &= ~kFlagStickyError;
+ }
+
+ mState = newState;
+
+ cancelPendingDequeueOperations();
+}
+
+void MediaCodec::returnBuffersToCodec() {
+ returnBuffersToCodecOnPort(kPortIndexInput);
+ returnBuffersToCodecOnPort(kPortIndexOutput);
+}
+
+void MediaCodec::returnBuffersToCodecOnPort(int32_t portIndex) {
+ CHECK(portIndex == kPortIndexInput || portIndex == kPortIndexOutput);
+
+ Vector<BufferInfo> *buffers = &mPortBuffers[portIndex];
+
+ for (size_t i = 0; i < buffers->size(); ++i) {
+ BufferInfo *info = &buffers->editItemAt(i);
+
+ if (info->mNotify != NULL) {
+ sp<AMessage> msg = info->mNotify;
+ info->mNotify = NULL;
+ info->mOwnedByClient = false;
+
+ if (portIndex == kPortIndexInput) {
+ msg->setInt32("err", ERROR_END_OF_STREAM);
+ }
+ msg->post();
+ }
+ }
+
+ mAvailPortBuffers[portIndex].clear();
+}
+
+size_t MediaCodec::updateBuffers(
+ int32_t portIndex, const sp<AMessage> &msg) {
+ CHECK(portIndex == kPortIndexInput || portIndex == kPortIndexOutput);
+
+ void *bufferID;
+ CHECK(msg->findPointer("buffer-id", &bufferID));
+
+ Vector<BufferInfo> *buffers = &mPortBuffers[portIndex];
+
+ for (size_t i = 0; i < buffers->size(); ++i) {
+ BufferInfo *info = &buffers->editItemAt(i);
+
+ if (info->mBufferID == bufferID) {
+ CHECK(info->mNotify == NULL);
+ CHECK(msg->findMessage("reply", &info->mNotify));
+
+ mAvailPortBuffers[portIndex].push_back(i);
+
+ return i;
+ }
+ }
+
+ TRESPASS();
+
+ return 0;
+}
+
+status_t MediaCodec::onQueueInputBuffer(const sp<AMessage> &msg) {
+ size_t index;
+ size_t offset;
+ size_t size;
+ int64_t timeUs;
+ uint32_t flags;
+ CHECK(msg->findSize("index", &index));
+ CHECK(msg->findSize("offset", &offset));
+ CHECK(msg->findSize("size", &size));
+ CHECK(msg->findInt64("timeUs", &timeUs));
+ CHECK(msg->findInt32("flags", (int32_t *)&flags));
+
+ if (index >= mPortBuffers[kPortIndexInput].size()) {
+ return -ERANGE;
+ }
+
+ BufferInfo *info = &mPortBuffers[kPortIndexInput].editItemAt(index);
+
+ if (info->mNotify == NULL || !info->mOwnedByClient) {
+ return -EACCES;
+ }
+
+ if (offset + size > info->mData->capacity()) {
+ return -EINVAL;
+ }
+
+ sp<AMessage> reply = info->mNotify;
+ info->mNotify = NULL;
+ info->mOwnedByClient = false;
+
+ info->mData->setRange(offset, size);
+ info->mData->meta()->setInt64("timeUs", timeUs);
+
+ if (flags & BUFFER_FLAG_EOS) {
+ info->mData->meta()->setInt32("eos", true);
+ }
+
+ if (flags & BUFFER_FLAG_CODECCONFIG) {
+ info->mData->meta()->setInt32("csd", true);
+ }
+
+ reply->setBuffer("buffer", info->mData);
+ reply->post();
+
+ return OK;
+}
+
+status_t MediaCodec::onReleaseOutputBuffer(const sp<AMessage> &msg) {
+ size_t index;
+ CHECK(msg->findSize("index", &index));
+
+ int32_t render;
+ if (!msg->findInt32("render", &render)) {
+ render = 0;
+ }
+
+ if (mState != STARTED) {
+ return -EINVAL;
+ }
+
+ if (index >= mPortBuffers[kPortIndexOutput].size()) {
+ return -ERANGE;
+ }
+
+ BufferInfo *info = &mPortBuffers[kPortIndexOutput].editItemAt(index);
+
+ if (info->mNotify == NULL || !info->mOwnedByClient) {
+ return -EACCES;
+ }
+
+ if (render) {
+ info->mNotify->setInt32("render", true);
+
+ if (mSoftRenderer != NULL) {
+ mSoftRenderer->render(
+ info->mData->data(), info->mData->size(), NULL);
+ }
+ }
+
+ info->mNotify->post();
+ info->mNotify = NULL;
+ info->mOwnedByClient = false;
+
+ return OK;
+}
+
+ssize_t MediaCodec::dequeuePortBuffer(int32_t portIndex) {
+ CHECK(portIndex == kPortIndexInput || portIndex == kPortIndexOutput);
+
+ List<size_t> *availBuffers = &mAvailPortBuffers[portIndex];
+
+ if (availBuffers->empty()) {
+ return -EAGAIN;
+ }
+
+ size_t index = *availBuffers->begin();
+ availBuffers->erase(availBuffers->begin());
+
+ BufferInfo *info = &mPortBuffers[portIndex].editItemAt(index);
+ CHECK(!info->mOwnedByClient);
+ info->mOwnedByClient = true;
+
+ return index;
+}
+
+} // namespace android
diff --git a/media/libstagefright/MediaCodecList.cpp b/media/libstagefright/MediaCodecList.cpp
new file mode 100644
index 0000000..6b64e21
--- /dev/null
+++ b/media/libstagefright/MediaCodecList.cpp
@@ -0,0 +1,475 @@
+/*
+ * Copyright 2012, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "MediaCodecList"
+#include <utils/Log.h>
+
+#include <media/stagefright/MediaCodecList.h>
+
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/MediaErrors.h>
+#include <utils/threads.h>
+
+#include <expat.h>
+
+namespace android {
+
+static Mutex sInitMutex;
+
+// static
+MediaCodecList *MediaCodecList::sCodecList;
+
+// static
+const MediaCodecList *MediaCodecList::getInstance() {
+ Mutex::Autolock autoLock(sInitMutex);
+
+ if (sCodecList == NULL) {
+ sCodecList = new MediaCodecList;
+ }
+
+ return sCodecList->initCheck() == OK ? sCodecList : NULL;
+}
+
+MediaCodecList::MediaCodecList()
+ : mInitCheck(NO_INIT) {
+ FILE *file = fopen("/etc/media_codecs.xml", "r");
+
+ if (file == NULL) {
+ ALOGW("unable to open media codecs configuration xml file.");
+ return;
+ }
+
+ parseXMLFile(file);
+
+ if (mInitCheck == OK) {
+ // These are currently still used by the video editing suite.
+
+ addMediaCodec(true /* encoder */, "AACEncoder", "audio/mp4a-latm");
+ addMediaCodec(true /* encoder */, "AVCEncoder", "video/avc");
+
+ addMediaCodec(true /* encoder */, "M4vH263Encoder");
+ addType("video/3gpp");
+ addType("video/mp4v-es");
+ }
+
+#if 0
+ for (size_t i = 0; i < mCodecInfos.size(); ++i) {
+ const CodecInfo &info = mCodecInfos.itemAt(i);
+
+ AString line = info.mName;
+ line.append(" supports ");
+ for (size_t j = 0; j < mTypes.size(); ++j) {
+ uint32_t value = mTypes.valueAt(j);
+
+ if (info.mTypes & (1ul << value)) {
+ line.append(mTypes.keyAt(j));
+ line.append(" ");
+ }
+ }
+
+ ALOGI("%s", line.c_str());
+ }
+#endif
+
+ fclose(file);
+ file = NULL;
+}
+
+MediaCodecList::~MediaCodecList() {
+}
+
+status_t MediaCodecList::initCheck() const {
+ return mInitCheck;
+}
+
+void MediaCodecList::parseXMLFile(FILE *file) {
+ mInitCheck = OK;
+ mCurrentSection = SECTION_TOPLEVEL;
+ mDepth = 0;
+
+ XML_Parser parser = ::XML_ParserCreate(NULL);
+ CHECK(parser != NULL);
+
+ ::XML_SetUserData(parser, this);
+ ::XML_SetElementHandler(
+ parser, StartElementHandlerWrapper, EndElementHandlerWrapper);
+
+ const int BUFF_SIZE = 512;
+ while (mInitCheck == OK) {
+ void *buff = ::XML_GetBuffer(parser, BUFF_SIZE);
+ if (buff == NULL) {
+ ALOGE("failed to in call to XML_GetBuffer()");
+ mInitCheck = UNKNOWN_ERROR;
+ break;
+ }
+
+ int bytes_read = ::fread(buff, 1, BUFF_SIZE, file);
+ if (bytes_read < 0) {
+ ALOGE("failed in call to read");
+ mInitCheck = ERROR_IO;
+ break;
+ }
+
+ if (::XML_ParseBuffer(parser, bytes_read, bytes_read == 0)
+ != XML_STATUS_OK) {
+ mInitCheck = ERROR_MALFORMED;
+ break;
+ }
+
+ if (bytes_read == 0) {
+ break;
+ }
+ }
+
+ ::XML_ParserFree(parser);
+
+ if (mInitCheck == OK) {
+ for (size_t i = mCodecInfos.size(); i-- > 0;) {
+ CodecInfo *info = &mCodecInfos.editItemAt(i);
+
+ if (info->mTypes == 0) {
+ // No types supported by this component???
+
+ ALOGW("Component %s does not support any type of media?",
+ info->mName.c_str());
+
+ mCodecInfos.removeAt(i);
+ }
+ }
+ }
+
+ if (mInitCheck != OK) {
+ mCodecInfos.clear();
+ mCodecQuirks.clear();
+ }
+}
+
+// static
+void MediaCodecList::StartElementHandlerWrapper(
+ void *me, const char *name, const char **attrs) {
+ static_cast<MediaCodecList *>(me)->startElementHandler(name, attrs);
+}
+
+// static
+void MediaCodecList::EndElementHandlerWrapper(void *me, const char *name) {
+ static_cast<MediaCodecList *>(me)->endElementHandler(name);
+}
+
+void MediaCodecList::startElementHandler(
+ const char *name, const char **attrs) {
+ if (mInitCheck != OK) {
+ return;
+ }
+
+ switch (mCurrentSection) {
+ case SECTION_TOPLEVEL:
+ {
+ if (!strcmp(name, "Decoders")) {
+ mCurrentSection = SECTION_DECODERS;
+ } else if (!strcmp(name, "Encoders")) {
+ mCurrentSection = SECTION_ENCODERS;
+ }
+ break;
+ }
+
+ case SECTION_DECODERS:
+ {
+ if (!strcmp(name, "MediaCodec")) {
+ mInitCheck =
+ addMediaCodecFromAttributes(false /* encoder */, attrs);
+
+ mCurrentSection = SECTION_DECODER;
+ }
+ break;
+ }
+
+ case SECTION_ENCODERS:
+ {
+ if (!strcmp(name, "MediaCodec")) {
+ mInitCheck =
+ addMediaCodecFromAttributes(true /* encoder */, attrs);
+
+ mCurrentSection = SECTION_ENCODER;
+ }
+ break;
+ }
+
+ case SECTION_DECODER:
+ case SECTION_ENCODER:
+ {
+ if (!strcmp(name, "Quirk")) {
+ mInitCheck = addQuirk(attrs);
+ } else if (!strcmp(name, "Type")) {
+ mInitCheck = addTypeFromAttributes(attrs);
+ }
+ break;
+ }
+
+ default:
+ break;
+ }
+
+ ++mDepth;
+}
+
+void MediaCodecList::endElementHandler(const char *name) {
+ if (mInitCheck != OK) {
+ return;
+ }
+
+ switch (mCurrentSection) {
+ case SECTION_DECODERS:
+ {
+ if (!strcmp(name, "Decoders")) {
+ mCurrentSection = SECTION_TOPLEVEL;
+ }
+ break;
+ }
+
+ case SECTION_ENCODERS:
+ {
+ if (!strcmp(name, "Encoders")) {
+ mCurrentSection = SECTION_TOPLEVEL;
+ }
+ break;
+ }
+
+ case SECTION_DECODER:
+ {
+ if (!strcmp(name, "MediaCodec")) {
+ mCurrentSection = SECTION_DECODERS;
+ }
+ break;
+ }
+
+ case SECTION_ENCODER:
+ {
+ if (!strcmp(name, "MediaCodec")) {
+ mCurrentSection = SECTION_ENCODERS;
+ }
+ break;
+ }
+
+ default:
+ break;
+ }
+
+ --mDepth;
+}
+
+status_t MediaCodecList::addMediaCodecFromAttributes(
+ bool encoder, const char **attrs) {
+ const char *name = NULL;
+ const char *type = NULL;
+
+ size_t i = 0;
+ while (attrs[i] != NULL) {
+ if (!strcmp(attrs[i], "name")) {
+ if (attrs[i + 1] == NULL) {
+ return -EINVAL;
+ }
+ name = attrs[i + 1];
+ ++i;
+ } else if (!strcmp(attrs[i], "type")) {
+ if (attrs[i + 1] == NULL) {
+ return -EINVAL;
+ }
+ type = attrs[i + 1];
+ ++i;
+ } else {
+ return -EINVAL;
+ }
+
+ ++i;
+ }
+
+ if (name == NULL) {
+ return -EINVAL;
+ }
+
+ addMediaCodec(encoder, name, type);
+
+ return OK;
+}
+
+void MediaCodecList::addMediaCodec(
+ bool encoder, const char *name, const char *type) {
+ mCodecInfos.push();
+ CodecInfo *info = &mCodecInfos.editItemAt(mCodecInfos.size() - 1);
+ info->mName = name;
+ info->mIsEncoder = encoder;
+ info->mTypes = 0;
+ info->mQuirks = 0;
+
+ if (type != NULL) {
+ addType(type);
+ }
+}
+
+status_t MediaCodecList::addQuirk(const char **attrs) {
+ const char *name = NULL;
+
+ size_t i = 0;
+ while (attrs[i] != NULL) {
+ if (!strcmp(attrs[i], "name")) {
+ if (attrs[i + 1] == NULL) {
+ return -EINVAL;
+ }
+ name = attrs[i + 1];
+ ++i;
+ } else {
+ return -EINVAL;
+ }
+
+ ++i;
+ }
+
+ if (name == NULL) {
+ return -EINVAL;
+ }
+
+ uint32_t bit;
+ ssize_t index = mCodecQuirks.indexOfKey(name);
+ if (index < 0) {
+ bit = mCodecQuirks.size();
+
+ if (bit == 32) {
+ ALOGW("Too many distinct quirk names in configuration.");
+ return OK;
+ }
+
+ mCodecQuirks.add(name, bit);
+ } else {
+ bit = mCodecQuirks.valueAt(index);
+ }
+
+ CodecInfo *info = &mCodecInfos.editItemAt(mCodecInfos.size() - 1);
+ info->mQuirks |= 1ul << bit;
+
+ return OK;
+}
+
+status_t MediaCodecList::addTypeFromAttributes(const char **attrs) {
+ const char *name = NULL;
+
+ size_t i = 0;
+ while (attrs[i] != NULL) {
+ if (!strcmp(attrs[i], "name")) {
+ if (attrs[i + 1] == NULL) {
+ return -EINVAL;
+ }
+ name = attrs[i + 1];
+ ++i;
+ } else {
+ return -EINVAL;
+ }
+
+ ++i;
+ }
+
+ if (name == NULL) {
+ return -EINVAL;
+ }
+
+ addType(name);
+
+ return OK;
+}
+
+void MediaCodecList::addType(const char *name) {
+ uint32_t bit;
+ ssize_t index = mTypes.indexOfKey(name);
+ if (index < 0) {
+ bit = mTypes.size();
+
+ if (bit == 32) {
+ ALOGW("Too many distinct type names in configuration.");
+ return;
+ }
+
+ mTypes.add(name, bit);
+ } else {
+ bit = mTypes.valueAt(index);
+ }
+
+ CodecInfo *info = &mCodecInfos.editItemAt(mCodecInfos.size() - 1);
+ info->mTypes |= 1ul << bit;
+}
+
+ssize_t MediaCodecList::findCodecByType(
+ const char *type, bool encoder, size_t startIndex) const {
+ ssize_t typeIndex = mTypes.indexOfKey(type);
+
+ if (typeIndex < 0) {
+ return -ENOENT;
+ }
+
+ uint32_t typeMask = 1ul << mTypes.valueAt(typeIndex);
+
+ while (startIndex < mCodecInfos.size()) {
+ const CodecInfo &info = mCodecInfos.itemAt(startIndex);
+
+ if (info.mIsEncoder == encoder && (info.mTypes & typeMask)) {
+ return startIndex;
+ }
+
+ ++startIndex;
+ }
+
+ return -ENOENT;
+}
+
+ssize_t MediaCodecList::findCodecByName(const char *name) const {
+ for (size_t i = 0; i < mCodecInfos.size(); ++i) {
+ const CodecInfo &info = mCodecInfos.itemAt(i);
+
+ if (info.mName == name) {
+ return i;
+ }
+ }
+
+ return -ENOENT;
+}
+
+const char *MediaCodecList::getCodecName(size_t index) const {
+ if (index >= mCodecInfos.size()) {
+ return NULL;
+ }
+
+ const CodecInfo &info = mCodecInfos.itemAt(index);
+ return info.mName.c_str();
+}
+
+bool MediaCodecList::codecHasQuirk(
+ size_t index, const char *quirkName) const {
+ if (index >= mCodecInfos.size()) {
+ return NULL;
+ }
+
+ const CodecInfo &info = mCodecInfos.itemAt(index);
+
+ if (info.mQuirks != 0) {
+ ssize_t index = mCodecQuirks.indexOfKey(quirkName);
+ if (index >= 0 && info.mQuirks & (1ul << mCodecQuirks.valueAt(index))) {
+ return true;
+ }
+ }
+
+ return false;
+}
+
+} // namespace android
diff --git a/media/libstagefright/NuMediaExtractor.cpp b/media/libstagefright/NuMediaExtractor.cpp
new file mode 100644
index 0000000..afd4763
--- /dev/null
+++ b/media/libstagefright/NuMediaExtractor.cpp
@@ -0,0 +1,433 @@
+/*
+ * Copyright 2012, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "NuMediaExtractor"
+#include <utils/Log.h>
+
+#include <media/stagefright/NuMediaExtractor.h>
+
+#include "include/ESDS.h"
+
+#include <media/stagefright/foundation/ABuffer.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/DataSource.h>
+#include <media/stagefright/MediaBuffer.h>
+#include <media/stagefright/MediaDefs.h>
+#include <media/stagefright/MediaErrors.h>
+#include <media/stagefright/MediaExtractor.h>
+#include <media/stagefright/MediaSource.h>
+#include <media/stagefright/MetaData.h>
+#include <media/stagefright/Utils.h>
+
+namespace android {
+
+NuMediaExtractor::NuMediaExtractor() {
+}
+
+NuMediaExtractor::~NuMediaExtractor() {
+ releaseTrackSamples();
+
+ for (size_t i = 0; i < mSelectedTracks.size(); ++i) {
+ TrackInfo *info = &mSelectedTracks.editItemAt(i);
+
+ CHECK_EQ((status_t)OK, info->mSource->stop());
+ }
+
+ mSelectedTracks.clear();
+}
+
+status_t NuMediaExtractor::setDataSource(const char *path) {
+ sp<DataSource> dataSource = DataSource::CreateFromURI(path);
+
+ if (dataSource == NULL) {
+ return -ENOENT;
+ }
+
+ mImpl = MediaExtractor::Create(dataSource);
+
+ if (mImpl == NULL) {
+ return ERROR_UNSUPPORTED;
+ }
+
+ return OK;
+}
+
+size_t NuMediaExtractor::countTracks() const {
+ return mImpl == NULL ? 0 : mImpl->countTracks();
+}
+
+status_t NuMediaExtractor::getTrackFormat(
+ size_t index, sp<AMessage> *format) const {
+ *format = NULL;
+
+ if (mImpl == NULL) {
+ return -EINVAL;
+ }
+
+ if (index >= mImpl->countTracks()) {
+ return -ERANGE;
+ }
+
+ sp<MetaData> meta = mImpl->getTrackMetaData(index);
+
+ const char *mime;
+ CHECK(meta->findCString(kKeyMIMEType, &mime));
+
+ sp<AMessage> msg = new AMessage;
+ msg->setString("mime", mime);
+
+ if (!strncasecmp("video/", mime, 6)) {
+ int32_t width, height;
+ CHECK(meta->findInt32(kKeyWidth, &width));
+ CHECK(meta->findInt32(kKeyHeight, &height));
+
+ msg->setInt32("width", width);
+ msg->setInt32("height", height);
+ } else {
+ CHECK(!strncasecmp("audio/", mime, 6));
+
+ int32_t numChannels, sampleRate;
+ CHECK(meta->findInt32(kKeyChannelCount, &numChannels));
+ CHECK(meta->findInt32(kKeySampleRate, &sampleRate));
+
+ msg->setInt32("channel-count", numChannels);
+ msg->setInt32("sample-rate", sampleRate);
+ }
+
+ int32_t maxInputSize;
+ if (meta->findInt32(kKeyMaxInputSize, &maxInputSize)) {
+ msg->setInt32("max-input-size", maxInputSize);
+ }
+
+ uint32_t type;
+ const void *data;
+ size_t size;
+ if (meta->findData(kKeyAVCC, &type, &data, &size)) {
+ // Parse the AVCDecoderConfigurationRecord
+
+ const uint8_t *ptr = (const uint8_t *)data;
+
+ CHECK(size >= 7);
+ CHECK_EQ((unsigned)ptr[0], 1u); // configurationVersion == 1
+ uint8_t profile = ptr[1];
+ uint8_t level = ptr[3];
+
+ // There is decodable content out there that fails the following
+ // assertion, let's be lenient for now...
+ // CHECK((ptr[4] >> 2) == 0x3f); // reserved
+
+ size_t lengthSize = 1 + (ptr[4] & 3);
+
+ // commented out check below as H264_QVGA_500_NO_AUDIO.3gp
+ // violates it...
+ // CHECK((ptr[5] >> 5) == 7); // reserved
+
+ size_t numSeqParameterSets = ptr[5] & 31;
+
+ ptr += 6;
+ size -= 6;
+
+ sp<ABuffer> buffer = new ABuffer(1024);
+ buffer->setRange(0, 0);
+
+ for (size_t i = 0; i < numSeqParameterSets; ++i) {
+ CHECK(size >= 2);
+ size_t length = U16_AT(ptr);
+
+ ptr += 2;
+ size -= 2;
+
+ CHECK(size >= length);
+
+ memcpy(buffer->data() + buffer->size(), "\x00\x00\x00\x01", 4);
+ memcpy(buffer->data() + buffer->size() + 4, ptr, length);
+ buffer->setRange(0, buffer->size() + 4 + length);
+
+ ptr += length;
+ size -= length;
+ }
+
+ buffer->meta()->setInt32("csd", true);
+ buffer->meta()->setInt64("timeUs", 0);
+
+ msg->setBuffer("csd-0", buffer);
+
+ buffer = new ABuffer(1024);
+ buffer->setRange(0, 0);
+
+ CHECK(size >= 1);
+ size_t numPictureParameterSets = *ptr;
+ ++ptr;
+ --size;
+
+ for (size_t i = 0; i < numPictureParameterSets; ++i) {
+ CHECK(size >= 2);
+ size_t length = U16_AT(ptr);
+
+ ptr += 2;
+ size -= 2;
+
+ CHECK(size >= length);
+
+ memcpy(buffer->data() + buffer->size(), "\x00\x00\x00\x01", 4);
+ memcpy(buffer->data() + buffer->size() + 4, ptr, length);
+ buffer->setRange(0, buffer->size() + 4 + length);
+
+ ptr += length;
+ size -= length;
+ }
+
+ buffer->meta()->setInt32("csd", true);
+ buffer->meta()->setInt64("timeUs", 0);
+ msg->setBuffer("csd-1", buffer);
+ } else if (meta->findData(kKeyESDS, &type, &data, &size)) {
+ ESDS esds((const char *)data, size);
+ CHECK_EQ(esds.InitCheck(), (status_t)OK);
+
+ const void *codec_specific_data;
+ size_t codec_specific_data_size;
+ esds.getCodecSpecificInfo(
+ &codec_specific_data, &codec_specific_data_size);
+
+ sp<ABuffer> buffer = new ABuffer(codec_specific_data_size);
+
+ memcpy(buffer->data(), codec_specific_data,
+ codec_specific_data_size);
+
+ buffer->meta()->setInt32("csd", true);
+ buffer->meta()->setInt64("timeUs", 0);
+ msg->setBuffer("csd-0", buffer);
+ } else if (meta->findData(kKeyVorbisInfo, &type, &data, &size)) {
+ sp<ABuffer> buffer = new ABuffer(size);
+ memcpy(buffer->data(), data, size);
+
+ buffer->meta()->setInt32("csd", true);
+ buffer->meta()->setInt64("timeUs", 0);
+ msg->setBuffer("csd-0", buffer);
+
+ if (!meta->findData(kKeyVorbisBooks, &type, &data, &size)) {
+ return -EINVAL;
+ }
+
+ buffer = new ABuffer(size);
+ memcpy(buffer->data(), data, size);
+
+ buffer->meta()->setInt32("csd", true);
+ buffer->meta()->setInt64("timeUs", 0);
+ msg->setBuffer("csd-1", buffer);
+ }
+
+ *format = msg;
+
+ return OK;
+}
+
+status_t NuMediaExtractor::selectTrack(size_t index) {
+ if (mImpl == NULL) {
+ return -EINVAL;
+ }
+
+ if (index >= mImpl->countTracks()) {
+ return -ERANGE;
+ }
+
+ for (size_t i = 0; i < mSelectedTracks.size(); ++i) {
+ TrackInfo *info = &mSelectedTracks.editItemAt(i);
+
+ if (info->mTrackIndex == index) {
+ // This track has already been selected.
+ return OK;
+ }
+ }
+
+ sp<MediaSource> source = mImpl->getTrack(index);
+
+ CHECK_EQ((status_t)OK, source->start());
+
+ mSelectedTracks.push();
+ TrackInfo *info = &mSelectedTracks.editItemAt(mSelectedTracks.size() - 1);
+
+ info->mSource = source;
+ info->mTrackIndex = index;
+ info->mFinalResult = OK;
+ info->mSample = NULL;
+ info->mSampleTimeUs = -1ll;
+ info->mFlags = 0;
+
+ const char *mime;
+ CHECK(source->getFormat()->findCString(kKeyMIMEType, &mime));
+
+ if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_VORBIS)) {
+ info->mFlags |= kIsVorbis;
+ }
+
+ return OK;
+}
+
+void NuMediaExtractor::releaseTrackSamples() {
+ for (size_t i = 0; i < mSelectedTracks.size(); ++i) {
+ TrackInfo *info = &mSelectedTracks.editItemAt(i);
+
+ if (info->mSample != NULL) {
+ info->mSample->release();
+ info->mSample = NULL;
+
+ info->mSampleTimeUs = -1ll;
+ }
+ }
+}
+
+ssize_t NuMediaExtractor::fetchTrackSamples(int64_t seekTimeUs) {
+ TrackInfo *minInfo = NULL;
+ ssize_t minIndex = -1;
+
+ for (size_t i = 0; i < mSelectedTracks.size(); ++i) {
+ TrackInfo *info = &mSelectedTracks.editItemAt(i);
+
+ if (seekTimeUs >= 0ll) {
+ info->mFinalResult = OK;
+
+ if (info->mSample != NULL) {
+ info->mSample->release();
+ info->mSample = NULL;
+ info->mSampleTimeUs = -1ll;
+ }
+ } else if (info->mFinalResult != OK) {
+ continue;
+ }
+
+ if (info->mSample == NULL) {
+ MediaSource::ReadOptions options;
+ if (seekTimeUs >= 0ll) {
+ options.setSeekTo(seekTimeUs);
+ }
+ status_t err = info->mSource->read(&info->mSample, &options);
+
+ if (err != OK) {
+ CHECK(info->mSample == NULL);
+
+ info->mFinalResult = err;
+ info->mSampleTimeUs = -1ll;
+ continue;
+ } else {
+ CHECK(info->mSample != NULL);
+ CHECK(info->mSample->meta_data()->findInt64(
+ kKeyTime, &info->mSampleTimeUs));
+ }
+ }
+
+ if (minInfo == NULL || info->mSampleTimeUs < minInfo->mSampleTimeUs) {
+ minInfo = info;
+ minIndex = i;
+ }
+ }
+
+ return minIndex;
+}
+
+status_t NuMediaExtractor::seekTo(int64_t timeUs) {
+ return fetchTrackSamples(timeUs);
+}
+
+status_t NuMediaExtractor::advance() {
+ ssize_t minIndex = fetchTrackSamples();
+
+ if (minIndex < 0) {
+ return ERROR_END_OF_STREAM;
+ }
+
+ TrackInfo *info = &mSelectedTracks.editItemAt(minIndex);
+
+ info->mSample->release();
+ info->mSample = NULL;
+ info->mSampleTimeUs = -1ll;
+
+ return OK;
+}
+
+status_t NuMediaExtractor::readSampleData(const sp<ABuffer> &buffer) {
+ ssize_t minIndex = fetchTrackSamples();
+
+ if (minIndex < 0) {
+ return ERROR_END_OF_STREAM;
+ }
+
+ TrackInfo *info = &mSelectedTracks.editItemAt(minIndex);
+
+ size_t sampleSize = info->mSample->range_length();
+
+ if (info->mFlags & kIsVorbis) {
+ // Each sample's data is suffixed by the number of page samples
+ // or -1 if not available.
+ sampleSize += sizeof(int32_t);
+ }
+
+ if (buffer->capacity() < sampleSize) {
+ return -ENOMEM;
+ }
+
+ const uint8_t *src =
+ (const uint8_t *)info->mSample->data()
+ + info->mSample->range_offset();
+
+ memcpy((uint8_t *)buffer->data(), src, info->mSample->range_length());
+
+ if (info->mFlags & kIsVorbis) {
+ int32_t numPageSamples;
+ if (!info->mSample->meta_data()->findInt32(
+ kKeyValidSamples, &numPageSamples)) {
+ numPageSamples = -1;
+ }
+
+ memcpy((uint8_t *)buffer->data() + info->mSample->range_length(),
+ &numPageSamples,
+ sizeof(numPageSamples));
+ }
+
+ buffer->setRange(0, sampleSize);
+
+ return OK;
+}
+
+status_t NuMediaExtractor::getSampleTrackIndex(size_t *trackIndex) {
+ ssize_t minIndex = fetchTrackSamples();
+
+ if (minIndex < 0) {
+ return ERROR_END_OF_STREAM;
+ }
+
+ TrackInfo *info = &mSelectedTracks.editItemAt(minIndex);
+ *trackIndex = info->mTrackIndex;
+
+ return OK;
+}
+
+status_t NuMediaExtractor::getSampleTime(int64_t *sampleTimeUs) {
+ ssize_t minIndex = fetchTrackSamples();
+
+ if (minIndex < 0) {
+ return ERROR_END_OF_STREAM;
+ }
+
+ TrackInfo *info = &mSelectedTracks.editItemAt(minIndex);
+ *sampleTimeUs = info->mSampleTimeUs;
+
+ return OK;
+}
+
+} // namespace android
diff --git a/media/libstagefright/OMXClient.cpp b/media/libstagefright/OMXClient.cpp
index 7a805aa..7cdb793 100644
--- a/media/libstagefright/OMXClient.cpp
+++ b/media/libstagefright/OMXClient.cpp
@@ -335,6 +335,10 @@
}
void OMXClient::disconnect() {
+ if (mOMX.get() != NULL) {
+ mOMX.clear();
+ mOMX = NULL;
+ }
}
} // namespace android
diff --git a/media/libstagefright/OMXCodec.cpp b/media/libstagefright/OMXCodec.cpp
index 470f750..966416e 100755
--- a/media/libstagefright/OMXCodec.cpp
+++ b/media/libstagefright/OMXCodec.cpp
@@ -33,6 +33,7 @@
#include <media/stagefright/MediaBuffer.h>
#include <media/stagefright/MediaBufferGroup.h>
#include <media/stagefright/MediaDefs.h>
+#include <media/stagefright/MediaCodecList.h>
#include <media/stagefright/MediaExtractor.h>
#include <media/stagefright/MetaData.h>
#include <media/stagefright/OMXCodec.h>
@@ -57,11 +58,6 @@
// component in question is buggy or not.
const static uint32_t kMaxColorFormatSupported = 1000;
-struct CodecInfo {
- const char *mime;
- const char *codec;
-};
-
#define FACTORY_CREATE_ENCODER(name) \
static sp<MediaSource> Make##name(const sp<MediaSource> &source, const sp<MetaData> &meta) { \
return new name(source, meta); \
@@ -96,83 +92,8 @@
return NULL;
}
+#undef FACTORY_CREATE_ENCODER
#undef FACTORY_REF
-#undef FACTORY_CREATE
-
-static const CodecInfo kDecoderInfo[] = {
- { MEDIA_MIMETYPE_IMAGE_JPEG, "OMX.TI.JPEG.decode" },
-// { MEDIA_MIMETYPE_AUDIO_MPEG, "OMX.TI.MP3.decode" },
- { MEDIA_MIMETYPE_AUDIO_MPEG, "OMX.google.mp3.decoder" },
- { MEDIA_MIMETYPE_AUDIO_MPEG_LAYER_II, "OMX.Nvidia.mp2.decoder" },
-// { MEDIA_MIMETYPE_AUDIO_AMR_NB, "OMX.TI.AMR.decode" },
-// { MEDIA_MIMETYPE_AUDIO_AMR_NB, "OMX.Nvidia.amr.decoder" },
- { MEDIA_MIMETYPE_AUDIO_AMR_NB, "OMX.google.amrnb.decoder" },
-// { MEDIA_MIMETYPE_AUDIO_AMR_NB, "OMX.Nvidia.amrwb.decoder" },
- { MEDIA_MIMETYPE_AUDIO_AMR_WB, "OMX.TI.WBAMR.decode" },
- { MEDIA_MIMETYPE_AUDIO_AMR_WB, "OMX.google.amrwb.decoder" },
-// { MEDIA_MIMETYPE_AUDIO_AAC, "OMX.Nvidia.aac.decoder" },
- { MEDIA_MIMETYPE_AUDIO_AAC, "OMX.TI.AAC.decode" },
- { MEDIA_MIMETYPE_AUDIO_AAC, "OMX.google.aac.decoder" },
- { MEDIA_MIMETYPE_AUDIO_G711_ALAW, "OMX.google.g711.alaw.decoder" },
- { MEDIA_MIMETYPE_AUDIO_G711_MLAW, "OMX.google.g711.mlaw.decoder" },
- { MEDIA_MIMETYPE_VIDEO_MPEG4, "OMX.TI.DUCATI1.VIDEO.DECODER" },
- { MEDIA_MIMETYPE_VIDEO_MPEG4, "OMX.Nvidia.mp4.decode" },
- { MEDIA_MIMETYPE_VIDEO_MPEG4, "OMX.qcom.7x30.video.decoder.mpeg4" },
- { MEDIA_MIMETYPE_VIDEO_MPEG4, "OMX.qcom.video.decoder.mpeg4" },
- { MEDIA_MIMETYPE_VIDEO_MPEG4, "OMX.TI.Video.Decoder" },
- { MEDIA_MIMETYPE_VIDEO_MPEG4, "OMX.SEC.MPEG4.Decoder" },
- { MEDIA_MIMETYPE_VIDEO_MPEG4, "OMX.google.mpeg4.decoder" },
- { MEDIA_MIMETYPE_VIDEO_H263, "OMX.TI.DUCATI1.VIDEO.DECODER" },
- { MEDIA_MIMETYPE_VIDEO_H263, "OMX.Nvidia.h263.decode" },
- { MEDIA_MIMETYPE_VIDEO_H263, "OMX.qcom.7x30.video.decoder.h263" },
- { MEDIA_MIMETYPE_VIDEO_H263, "OMX.qcom.video.decoder.h263" },
- { MEDIA_MIMETYPE_VIDEO_H263, "OMX.SEC.H263.Decoder" },
- { MEDIA_MIMETYPE_VIDEO_H263, "OMX.google.h263.decoder" },
- { MEDIA_MIMETYPE_VIDEO_AVC, "OMX.TI.DUCATI1.VIDEO.DECODER" },
- { MEDIA_MIMETYPE_VIDEO_AVC, "OMX.Nvidia.h264.decode" },
- { MEDIA_MIMETYPE_VIDEO_AVC, "OMX.qcom.7x30.video.decoder.avc" },
- { MEDIA_MIMETYPE_VIDEO_AVC, "OMX.qcom.video.decoder.avc" },
- { MEDIA_MIMETYPE_VIDEO_AVC, "OMX.TI.Video.Decoder" },
- { MEDIA_MIMETYPE_VIDEO_AVC, "OMX.SEC.AVC.Decoder" },
- { MEDIA_MIMETYPE_VIDEO_AVC, "OMX.google.h264.decoder" },
- { MEDIA_MIMETYPE_VIDEO_AVC, "OMX.google.avc.decoder" },
- { MEDIA_MIMETYPE_AUDIO_VORBIS, "OMX.google.vorbis.decoder" },
- { MEDIA_MIMETYPE_VIDEO_VPX, "OMX.google.vpx.decoder" },
- { MEDIA_MIMETYPE_VIDEO_MPEG2, "OMX.Nvidia.mpeg2v.decode" },
-};
-
-static const CodecInfo kEncoderInfo[] = {
- { MEDIA_MIMETYPE_AUDIO_AMR_NB, "OMX.TI.AMR.encode" },
- { MEDIA_MIMETYPE_AUDIO_AMR_NB, "OMX.google.amrnb.encoder" },
- { MEDIA_MIMETYPE_AUDIO_AMR_WB, "OMX.TI.WBAMR.encode" },
- { MEDIA_MIMETYPE_AUDIO_AMR_WB, "OMX.google.amrwb.encoder" },
- { MEDIA_MIMETYPE_AUDIO_AAC, "OMX.TI.AAC.encode" },
- { MEDIA_MIMETYPE_AUDIO_AAC, "OMX.google.aac.encoder" },
- { MEDIA_MIMETYPE_AUDIO_AAC, "AACEncoder" },
- { MEDIA_MIMETYPE_VIDEO_MPEG4, "OMX.TI.DUCATI1.VIDEO.MPEG4E" },
- { MEDIA_MIMETYPE_VIDEO_MPEG4, "OMX.qcom.7x30.video.encoder.mpeg4" },
- { MEDIA_MIMETYPE_VIDEO_MPEG4, "OMX.qcom.video.encoder.mpeg4" },
- { MEDIA_MIMETYPE_VIDEO_MPEG4, "OMX.TI.Video.encoder" },
- { MEDIA_MIMETYPE_VIDEO_MPEG4, "OMX.Nvidia.mp4.encoder" },
- { MEDIA_MIMETYPE_VIDEO_MPEG4, "OMX.SEC.MPEG4.Encoder" },
- { MEDIA_MIMETYPE_VIDEO_MPEG4, "M4vH263Encoder" },
- { MEDIA_MIMETYPE_VIDEO_H263, "OMX.TI.DUCATI1.VIDEO.MPEG4E" },
- { MEDIA_MIMETYPE_VIDEO_H263, "OMX.qcom.7x30.video.encoder.h263" },
- { MEDIA_MIMETYPE_VIDEO_H263, "OMX.qcom.video.encoder.h263" },
- { MEDIA_MIMETYPE_VIDEO_H263, "OMX.TI.Video.encoder" },
- { MEDIA_MIMETYPE_VIDEO_H263, "OMX.Nvidia.h263.encoder" },
- { MEDIA_MIMETYPE_VIDEO_H263, "OMX.SEC.H263.Encoder" },
- { MEDIA_MIMETYPE_VIDEO_H263, "M4vH263Encoder" },
- { MEDIA_MIMETYPE_VIDEO_AVC, "OMX.TI.DUCATI1.VIDEO.H264E" },
- { MEDIA_MIMETYPE_VIDEO_AVC, "OMX.qcom.7x30.video.encoder.avc" },
- { MEDIA_MIMETYPE_VIDEO_AVC, "OMX.qcom.video.encoder.avc" },
- { MEDIA_MIMETYPE_VIDEO_AVC, "OMX.TI.Video.encoder" },
- { MEDIA_MIMETYPE_VIDEO_AVC, "OMX.Nvidia.h264.encoder" },
- { MEDIA_MIMETYPE_VIDEO_AVC, "OMX.SEC.AVC.Encoder" },
- { MEDIA_MIMETYPE_VIDEO_AVC, "AVCEncoder" },
-};
-
-#undef OPTIONAL
#define CODEC_LOGI(x, ...) ALOGI("[%s] "x, mComponentName, ##__VA_ARGS__)
#define CODEC_LOGV(x, ...) ALOGV("[%s] "x, mComponentName, ##__VA_ARGS__)
@@ -207,22 +128,6 @@
OMXCodecObserver &operator=(const OMXCodecObserver &);
};
-static const char *GetCodec(const CodecInfo *info, size_t numInfos,
- const char *mime, int index) {
- CHECK(index >= 0);
- for(size_t i = 0; i < numInfos; ++i) {
- if (!strcasecmp(mime, info[i].mime)) {
- if (index == 0) {
- return info[i].codec;
- }
-
- --index;
- }
- }
-
- return NULL;
-}
-
template<class T>
static void InitOMXParams(T *params) {
params->nSize = sizeof(T);
@@ -278,119 +183,36 @@
}
// static
-uint32_t OMXCodec::getComponentQuirks(
- const char *componentName, bool isEncoder) {
- uint32_t quirks = 0;
-
- if (!strcmp(componentName, "OMX.Nvidia.amr.decoder") ||
- !strcmp(componentName, "OMX.Nvidia.amrwb.decoder") ||
- !strcmp(componentName, "OMX.Nvidia.aac.decoder") ||
- !strcmp(componentName, "OMX.Nvidia.mp3.decoder")) {
- quirks |= kDecoderLiesAboutNumberOfChannels;
- }
-
- if (!strcmp(componentName, "OMX.TI.MP3.decode")) {
- quirks |= kNeedsFlushBeforeDisable;
- quirks |= kDecoderLiesAboutNumberOfChannels;
- }
- if (!strcmp(componentName, "OMX.TI.AAC.decode")) {
- quirks |= kNeedsFlushBeforeDisable;
- quirks |= kRequiresFlushCompleteEmulation;
- quirks |= kSupportsMultipleFramesPerInputBuffer;
- }
- if (!strncmp(componentName, "OMX.qcom.video.encoder.", 23)) {
- quirks |= kRequiresLoadedToIdleAfterAllocation;
- quirks |= kRequiresAllocateBufferOnInputPorts;
- quirks |= kRequiresAllocateBufferOnOutputPorts;
- if (!strncmp(componentName, "OMX.qcom.video.encoder.avc", 26)) {
-
- // The AVC encoder advertises the size of output buffers
- // based on the input video resolution and assumes
- // the worst/least compression ratio is 0.5. It is found that
- // sometimes, the output buffer size is larger than
- // size advertised by the encoder.
- quirks |= kRequiresLargerEncoderOutputBuffer;
- }
- }
- if (!strncmp(componentName, "OMX.qcom.7x30.video.encoder.", 28)) {
- }
- if (!strncmp(componentName, "OMX.qcom.video.decoder.", 23)) {
- quirks |= kRequiresAllocateBufferOnOutputPorts;
- quirks |= kDefersOutputBufferAllocation;
- }
- if (!strncmp(componentName, "OMX.qcom.7x30.video.decoder.", 28)) {
- quirks |= kRequiresAllocateBufferOnInputPorts;
- quirks |= kRequiresAllocateBufferOnOutputPorts;
- quirks |= kDefersOutputBufferAllocation;
- }
-
- if (!strcmp(componentName, "OMX.TI.DUCATI1.VIDEO.DECODER")) {
- quirks |= kRequiresAllocateBufferOnInputPorts;
- quirks |= kRequiresAllocateBufferOnOutputPorts;
- }
-
- // FIXME:
- // Remove the quirks after the work is done.
- else if (!strcmp(componentName, "OMX.TI.DUCATI1.VIDEO.MPEG4E") ||
- !strcmp(componentName, "OMX.TI.DUCATI1.VIDEO.H264E")) {
-
- quirks |= kRequiresAllocateBufferOnInputPorts;
- quirks |= kRequiresAllocateBufferOnOutputPorts;
- }
- else if (!strncmp(componentName, "OMX.TI.", 7)) {
- // Apparently I must not use OMX_UseBuffer on either input or
- // output ports on any of the TI components or quote:
- // "(I) may have unexpected problem (sic) which can be timing related
- // and hard to reproduce."
-
- quirks |= kRequiresAllocateBufferOnInputPorts;
- quirks |= kRequiresAllocateBufferOnOutputPorts;
- if (!strncmp(componentName, "OMX.TI.Video.encoder", 20)) {
- quirks |= kAvoidMemcopyInputRecordingFrames;
- }
- }
-
- if (!strcmp(componentName, "OMX.TI.Video.Decoder")) {
- quirks |= kInputBufferSizesAreBogus;
- }
-
- if (!strncmp(componentName, "OMX.SEC.", 8) && !isEncoder) {
- // These output buffers contain no video data, just some
- // opaque information that allows the overlay to display their
- // contents.
- quirks |= kOutputBuffersAreUnreadable;
- }
-
- return quirks;
-}
-
-// static
void OMXCodec::findMatchingCodecs(
const char *mime,
bool createEncoder, const char *matchComponentName,
uint32_t flags,
- Vector<String8> *matchingCodecs) {
+ Vector<String8> *matchingCodecs,
+ Vector<uint32_t> *matchingCodecQuirks) {
matchingCodecs->clear();
- for (int index = 0;; ++index) {
- const char *componentName;
+ if (matchingCodecQuirks) {
+ matchingCodecQuirks->clear();
+ }
- if (createEncoder) {
- componentName = GetCodec(
- kEncoderInfo,
- sizeof(kEncoderInfo) / sizeof(kEncoderInfo[0]),
- mime, index);
- } else {
- componentName = GetCodec(
- kDecoderInfo,
- sizeof(kDecoderInfo) / sizeof(kDecoderInfo[0]),
- mime, index);
- }
+ const MediaCodecList *list = MediaCodecList::getInstance();
+ if (list == NULL) {
+ return;
+ }
- if (!componentName) {
+ size_t index = 0;
+ for (;;) {
+ ssize_t matchIndex =
+ list->findCodecByType(mime, createEncoder, index);
+
+ if (matchIndex < 0) {
break;
}
+ index = matchIndex + 1;
+
+ const char *componentName = list->getCodecName(matchIndex);
+
// If a specific codec is requested, skip the non-matching ones.
if (matchComponentName && strcmp(componentName, matchComponentName)) {
continue;
@@ -405,6 +227,10 @@
(!(flags & (kSoftwareCodecsOnly | kHardwareCodecsOnly)))) {
matchingCodecs->push(String8(componentName));
+
+ if (matchingCodecQuirks) {
+ matchingCodecQuirks->push(getComponentQuirks(list, matchIndex));
+ }
}
}
@@ -414,6 +240,45 @@
}
// static
+uint32_t OMXCodec::getComponentQuirks(
+ const MediaCodecList *list, size_t index) {
+ uint32_t quirks = 0;
+ if (list->codecHasQuirk(
+ index, "requires-allocate-on-input-ports")) {
+ quirks |= kRequiresAllocateBufferOnInputPorts;
+ }
+ if (list->codecHasQuirk(
+ index, "requires-allocate-on-output-ports")) {
+ quirks |= kRequiresAllocateBufferOnOutputPorts;
+ }
+ if (list->codecHasQuirk(
+ index, "output-buffers-are-unreadable")) {
+ quirks |= kOutputBuffersAreUnreadable;
+ }
+
+ return quirks;
+}
+
+// static
+bool OMXCodec::findCodecQuirks(const char *componentName, uint32_t *quirks) {
+ const MediaCodecList *list = MediaCodecList::getInstance();
+
+ if (list == NULL) {
+ return false;
+ }
+
+ ssize_t index = list->findCodecByName(componentName);
+
+ if (index < 0) {
+ return false;
+ }
+
+ *quirks = getComponentQuirks(list, index);
+
+ return true;
+}
+
+// static
sp<MediaSource> OMXCodec::Create(
const sp<IOMX> &omx,
const sp<MetaData> &meta, bool createEncoder,
@@ -435,8 +300,10 @@
CHECK(success);
Vector<String8> matchingCodecs;
+ Vector<uint32_t> matchingCodecQuirks;
findMatchingCodecs(
- mime, createEncoder, matchComponentName, flags, &matchingCodecs);
+ mime, createEncoder, matchComponentName, flags,
+ &matchingCodecs, &matchingCodecQuirks);
if (matchingCodecs.isEmpty()) {
return NULL;
@@ -447,6 +314,7 @@
for (size_t i = 0; i < matchingCodecs.size(); ++i) {
const char *componentNameBase = matchingCodecs[i].string();
+ uint32_t quirks = matchingCodecQuirks[i];
const char *componentName = componentNameBase;
AString tmp;
@@ -470,8 +338,6 @@
ALOGV("Attempting to allocate OMX node '%s'", componentName);
- uint32_t quirks = getComponentQuirks(componentNameBase, createEncoder);
-
if (!createEncoder
&& (quirks & kOutputBuffersAreUnreadable)
&& (flags & kClientNeedsFramebuffer)) {
@@ -627,16 +493,6 @@
CODEC_LOGI(
"AVC profile = %u (%s), level = %u",
profile, AVCProfileToString(profile), level);
-
- if (!strcmp(mComponentName, "OMX.TI.Video.Decoder")
- && (profile != kAVCProfileBaseline || level > 30)) {
- // This stream exceeds the decoder's capabilities. The decoder
- // does not handle this gracefully and would clobber the heap
- // and wreak havoc instead...
-
- ALOGE("Profile and/or level exceed the decoder's capabilities.");
- return ERROR_UNSUPPORTED;
- }
} else if (meta->findData(kKeyVorbisInfo, &type, &data, &size)) {
addCodecSpecificData(data, size);
@@ -692,40 +548,11 @@
}
}
- if (!strcasecmp(mMIME, MEDIA_MIMETYPE_IMAGE_JPEG)
- && !strcmp(mComponentName, "OMX.TI.JPEG.decode")) {
- OMX_COLOR_FORMATTYPE format =
- OMX_COLOR_Format32bitARGB8888;
- // OMX_COLOR_FormatYUV420PackedPlanar;
- // OMX_COLOR_FormatCbYCrY;
- // OMX_COLOR_FormatYUV411Planar;
-
- int32_t width, height;
- bool success = meta->findInt32(kKeyWidth, &width);
- success = success && meta->findInt32(kKeyHeight, &height);
-
- int32_t compressedSize;
- success = success && meta->findInt32(
- kKeyMaxInputSize, &compressedSize);
-
- CHECK(success);
- CHECK(compressedSize > 0);
-
- setImageOutputFormat(format, width, height);
- setJPEGInputFormat(width, height, (OMX_U32)compressedSize);
- }
-
int32_t maxInputSize;
if (meta->findInt32(kKeyMaxInputSize, &maxInputSize)) {
setMinBufferSize(kPortIndexInput, (OMX_U32)maxInputSize);
}
- if (!strcmp(mComponentName, "OMX.TI.AMR.encode")
- || !strcmp(mComponentName, "OMX.TI.WBAMR.encode")
- || !strcmp(mComponentName, "OMX.TI.AAC.encode")) {
- setMinBufferSize(kPortIndexOutput, 8192); // XXX
- }
-
initOutputFormat(meta);
if ((mFlags & kClientNeedsFramebuffer)
@@ -829,21 +656,6 @@
index, format.eCompressionFormat, format.eColorFormat);
#endif
- if (!strcmp("OMX.TI.Video.encoder", mComponentName)) {
- if (portIndex == kPortIndexInput
- && colorFormat == format.eColorFormat) {
- // eCompressionFormat does not seem right.
- found = true;
- break;
- }
- if (portIndex == kPortIndexOutput
- && compressionFormat == format.eCompressionFormat) {
- // eColorFormat does not seem right.
- found = true;
- break;
- }
- }
-
if (format.eCompressionFormat == compressionFormat
&& format.eColorFormat == colorFormat) {
found = true;
@@ -906,13 +718,8 @@
int32_t targetColorFormat;
if (meta->findInt32(kKeyColorFormat, &targetColorFormat)) {
*colorFormat = (OMX_COLOR_FORMATTYPE) targetColorFormat;
- } else {
- if (!strcasecmp("OMX.TI.Video.encoder", mComponentName)) {
- *colorFormat = OMX_COLOR_FormatYCbYCr;
- }
}
-
// Check whether the target color format is supported.
return isColorFormatSupported(*colorFormat, kPortIndexInput);
}
@@ -1541,6 +1348,8 @@
"video_decoder.mpeg4", "video_encoder.mpeg4" },
{ MEDIA_MIMETYPE_VIDEO_H263,
"video_decoder.h263", "video_encoder.h263" },
+ { MEDIA_MIMETYPE_VIDEO_VPX,
+ "video_decoder.vpx", "video_encoder.vpx" },
};
static const size_t kNumMimeToRole =
@@ -3324,13 +3133,6 @@
info->mStatus = OWNED_BY_COMPONENT;
- // This component does not ever signal the EOS flag on output buffers,
- // Thanks for nothing.
- if (mSignalledEOS && !strcmp(mComponentName, "OMX.TI.Video.encoder")) {
- mNoMoreOutputData = true;
- mBufferFilled.signal();
- }
-
return true;
}
@@ -3556,6 +3358,7 @@
//////////////// output port ////////////////////
// format
OMX_AUDIO_PARAM_PORTFORMATTYPE format;
+ InitOMXParams(&format);
format.nPortIndex = kPortIndexOutput;
format.nIndex = 0;
status_t err = OMX_ErrorNone;
diff --git a/media/libstagefright/SurfaceMediaSource.cpp b/media/libstagefright/SurfaceMediaSource.cpp
index aa047d6..ab2cff0 100644
--- a/media/libstagefright/SurfaceMediaSource.cpp
+++ b/media/libstagefright/SurfaceMediaSource.cpp
@@ -24,9 +24,8 @@
#include <media/stagefright/MetadataBufferType.h>
#include <ui/GraphicBuffer.h>
-#include <surfaceflinger/ISurfaceComposer.h>
-#include <surfaceflinger/SurfaceComposerClient.h>
-#include <surfaceflinger/IGraphicBufferAlloc.h>
+#include <gui/ISurfaceComposer.h>
+#include <gui/IGraphicBufferAlloc.h>
#include <OMX_Component.h>
#include <utils/Log.h>
diff --git a/media/libstagefright/codecs/aacenc/Android.mk b/media/libstagefright/codecs/aacenc/Android.mk
index 34a2796..509193c 100644
--- a/media/libstagefright/codecs/aacenc/Android.mk
+++ b/media/libstagefright/codecs/aacenc/Android.mk
@@ -79,7 +79,7 @@
endif
ifeq ($(VOTT), v7)
-LOCAL_CFLAGS += -DARMV5E -DARMV7Neon -DARM_INASM -DARMV5_INASM
+LOCAL_CFLAGS += -DARMV5E -DARMV7Neon -DARM_INASM -DARMV5_INASM -DARMV6_INASM
LOCAL_C_INCLUDES += $(LOCAL_PATH)/src/asm/ARMV5E
LOCAL_C_INCLUDES += $(LOCAL_PATH)/src/asm/ARMV7
endif
diff --git a/media/libstagefright/codecs/aacenc/basic_op/basic_op.h b/media/libstagefright/codecs/aacenc/basic_op/basic_op.h
index ef3c31b..5cd7e5f 100644
--- a/media/libstagefright/codecs/aacenc/basic_op/basic_op.h
+++ b/media/libstagefright/codecs/aacenc/basic_op/basic_op.h
@@ -227,27 +227,18 @@
#if ARMV4_INASM
__inline Word32 ASM_L_shr(Word32 L_var1, Word16 var2)
{
- Word32 result;
- asm volatile(
- "MOV %[result], %[L_var1], ASR %[var2] \n"
- :[result]"=r"(result)
- :[L_var1]"r"(L_var1), [var2]"r"(var2)
- );
- return result;
+ return L_var1 >> var2;
}
__inline Word32 ASM_L_shl(Word32 L_var1, Word16 var2)
{
Word32 result;
- asm volatile(
- "MOV r2, %[L_var1] \n"
- "MOV r3, #0x7fffffff\n"
+ asm (
"MOV %[result], %[L_var1], ASL %[var2] \n"
- "TEQ r2, %[result], ASR %[var2]\n"
- "EORNE %[result],r3,r2,ASR#31\n"
- :[result]"+r"(result)
- :[L_var1]"r"(L_var1), [var2]"r"(var2)
- :"r2", "r3"
+ "TEQ %[L_var1], %[result], ASR %[var2]\n"
+ "EORNE %[result], %[mask], %[L_var1], ASR #31\n"
+ :[result]"=&r"(result)
+ :[L_var1]"r"(L_var1), [var2]"r"(var2), [mask]"r"(0x7fffffff)
);
return result;
}
@@ -255,10 +246,10 @@
__inline Word32 ASM_shr(Word32 L_var1, Word16 var2)
{
Word32 result;
- asm volatile(
+ asm (
"CMP %[var2], #15\n"
- "MOVGE %[var2], #15\n"
- "MOV %[result], %[L_var1], ASR %[var2]\n"
+ "MOVLT %[result], %[L_var1], ASR %[var2]\n"
+ "MOVGE %[result], %[L_var1], ASR #15\n"
:[result]"=r"(result)
:[L_var1]"r"(L_var1), [var2]"r"(var2)
);
@@ -267,21 +258,32 @@
__inline Word32 ASM_shl(Word32 L_var1, Word16 var2)
{
+#if ARMV6_SAT
Word32 result;
- asm volatile(
+ asm (
"CMP %[var2], #16\n"
- "MOVGE %[var2], #16\n"
- "MOV %[result], %[L_var1], ASL %[var2]\n"
- "MOV r3, #1\n"
- "MOV r2, %[result], ASR #15\n"
- "RSB r3,r3,r3,LSL #15 \n"
- "TEQ r2, %[result], ASR #31 \n"
- "EORNE %[result], r3, %[result],ASR #31"
- :[result]"+r"(result)
+ "MOVLT %[result], %[L_var1], ASL %[var2]\n"
+ "MOVGE %[result], %[L_var1], ASL #16\n"
+ "SSAT %[result], #16, %[result]\n"
+ :[result]"=r"(result)
:[L_var1]"r"(L_var1), [var2]"r"(var2)
- :"r2", "r3"
);
return result;
+#else
+ Word32 result;
+ Word32 tmp;
+ asm (
+ "CMP %[var2], #16\n"
+ "MOVLT %[result], %[L_var1], ASL %[var2]\n"
+ "MOVGE %[result], %[L_var1], ASL #16\n"
+ "MOV %[tmp], %[result], ASR #15\n"
+ "TEQ %[tmp], %[result], ASR #31 \n"
+ "EORNE %[result], %[mask], %[result],ASR #31"
+ :[result]"=&r"(result), [tmp]"=&r"(tmp)
+ :[L_var1]"r"(L_var1), [var2]"r"(var2), [mask]"r"(0x7fff)
+ );
+ return result;
+#endif
}
#endif
@@ -293,18 +295,24 @@
#if (SATRUATE_IS_INLINE)
__inline Word16 saturate(Word32 L_var1)
{
-#if ARMV5TE_SAT
+#if ARMV6_SAT
+ Word16 result;
+ asm (
+ "SSAT %[result], #16, %[L_var1]"
+ : [result]"=r"(result)
+ : [L_var1]"r"(L_var1)
+ );
+ return result;
+#elif ARMV5TE_SAT
Word16 result;
+ Word32 tmp;
asm volatile (
- "MOV %[result], %[L_var1]\n"
- "MOV r3, #1\n"
- "MOV r2,%[L_var1],ASR#15\n"
- "RSB r3, r3, r3, LSL #15\n"
- "TEQ r2,%[L_var1],ASR#31\n"
- "EORNE %[result],r3,%[L_var1],ASR#31\n"
- :[result]"+r"(result)
- :[L_var1]"r"(L_var1)
- :"r2", "r3"
+ "MOV %[tmp], %[L_var1],ASR#15\n"
+ "TEQ %[tmp], %[L_var1],ASR#31\n"
+ "EORNE %[result], %[mask],%[L_var1],ASR#31\n"
+ "MOVEQ %[result], %[L_var1]\n"
+ :[result]"=&r"(result), [tmp]"=&r"(tmp)
+ :[L_var1]"r"(L_var1), [mask]"r"(0x7fff)
);
return result;
@@ -420,10 +428,10 @@
{
#if ARMV5TE_L_MULT
Word32 result;
- asm volatile(
+ asm (
"SMULBB %[result], %[var1], %[var2] \n"
"QADD %[result], %[result], %[result] \n"
- :[result]"+r"(result)
+ :[result]"=r"(result)
:[var1]"r"(var1), [var2]"r"(var2)
);
return result;
@@ -450,11 +458,10 @@
{
#if ARMV5TE_L_MSU
Word32 result;
- asm volatile(
+ asm (
"SMULBB %[result], %[var1], %[var2] \n"
- "QADD %[result], %[result], %[result] \n"
- "QSUB %[result], %[L_var3], %[result]\n"
- :[result]"+r"(result)
+ "QDSUB %[result], %[L_var3], %[result]\n"
+ :[result]"=&r"(result)
:[L_var3]"r"(L_var3), [var1]"r"(var1), [var2]"r"(var2)
);
return result;
@@ -474,9 +481,9 @@
{
#if ARMV5TE_L_SUB
Word32 result;
- asm volatile(
+ asm (
"QSUB %[result], %[L_var1], %[L_var2]\n"
- :[result]"+r"(result)
+ :[result]"=r"(result)
:[L_var1]"r"(L_var1), [L_var2]"r"(L_var2)
);
return result;
@@ -589,16 +596,14 @@
{
#if ARMV5TE_ADD
Word32 result;
- asm volatile(
+ Word32 tmp;
+ asm (
"ADD %[result], %[var1], %[var2] \n"
- "MOV r3, #0x1\n"
- "MOV r2, %[result], ASR #15\n"
- "RSB r3, r3, r3, LSL, #15\n"
- "TEQ r2, %[result], ASR #31\n"
- "EORNE %[result], r3, %[result], ASR #31"
- :[result]"+r"(result)
- :[var1]"r"(var1), [var2]"r"(var2)
- :"r2", "r3"
+ "MOV %[tmp], %[result], ASR #15 \n"
+ "TEQ %[tmp], %[result], ASR #31 \n"
+ "EORNE %[result], %[mask], %[result], ASR #31"
+ :[result]"=&r"(result), [tmp]"=&r"(tmp)
+ :[var1]"r"(var1), [var2]"r"(var2), [mask]"r"(0x7fff)
);
return result;
#else
@@ -619,16 +624,14 @@
{
#if ARMV5TE_SUB
Word32 result;
- asm volatile(
- "MOV r3, #1\n"
+ Word32 tmp;
+ asm (
"SUB %[result], %[var1], %[var2] \n"
- "RSB r3,r3,r3,LSL#15\n"
- "MOV r2, %[var1], ASR #15 \n"
- "TEQ r2, %[var1], ASR #31 \n"
- "EORNE %[result], r3, %[result], ASR #31 \n"
- :[result]"+r"(result)
- :[var1]"r"(var1), [var2]"r"(var2)
- :"r2", "r3"
+ "MOV %[tmp], %[var1], ASR #15 \n"
+ "TEQ %[tmp], %[var1], ASR #31 \n"
+ "EORNE %[result], %[mask], %[result], ASR #31 \n"
+ :[result]"=&r"(result), [tmp]"=&r"(tmp)
+ :[var1]"r"(var1), [var2]"r"(var2), [mask]"r"(0x7fff)
);
return result;
#else
@@ -682,19 +685,25 @@
#if (MULT_IS_INLINE)
__inline Word16 mult (Word16 var1, Word16 var2)
{
-#if ARMV5TE_MULT
+#if ARMV5TE_MULT && ARMV6_SAT
Word32 result;
- asm volatile(
- "SMULBB r2, %[var1], %[var2] \n"
- "MOV r3, #1\n"
- "MOV %[result], r2, ASR #15\n"
- "RSB r3, r3, r3, LSL #15\n"
- "MOV r2, %[result], ASR #15\n"
- "TEQ r2, %[result], ASR #31\n"
- "EORNE %[result], r3, %[result], ASR #31 \n"
- :[result]"+r"(result)
+ asm (
+ "SMULBB %[result], %[var1], %[var2] \n"
+ "SSAT %[result], #16, %[result], ASR #15 \n"
+ :[result]"=r"(result)
:[var1]"r"(var1), [var2]"r"(var2)
- :"r2", "r3"
+ );
+ return result;
+#elif ARMV5TE_MULT
+ Word32 result, tmp;
+ asm (
+ "SMULBB %[tmp], %[var1], %[var2] \n"
+ "MOV %[result], %[tmp], ASR #15\n"
+ "MOV %[tmp], %[result], ASR #15\n"
+ "TEQ %[tmp], %[result], ASR #31\n"
+ "EORNE %[result], %[mask], %[result], ASR #31 \n"
+ :[result]"=&r"(result), [tmp]"=&r"(tmp)
+ :[var1]"r"(var1), [var2]"r"(var2), [mask]"r"(0x7fff)
);
return result;
#else
@@ -719,18 +728,17 @@
{
#if ARMV5TE_NORM_S
Word16 result;
- asm volatile(
- "MOV r2,%[var1] \n"
- "CMP r2, #0\n"
- "RSBLT %[var1], %[var1], #0 \n"
- "CLZNE %[result], %[var1]\n"
+ Word32 tmp;
+ asm (
+ "RSBS %[tmp], %[var1], #0 \n"
+ "CLZLT %[result], %[var1]\n"
+ "CLZGT %[result], %[tmp]\n"
"SUBNE %[result], %[result], #17\n"
"MOVEQ %[result], #0\n"
- "CMP r2, #-1\n"
+ "CMP %[var1], #-1\n"
"MOVEQ %[result], #15\n"
- :[result]"+r"(result)
+ :[result]"=&r"(result), [tmp]"=&r"(tmp)
:[var1]"r"(var1)
- :"r2"
);
return result;
#else
@@ -774,7 +782,7 @@
"CLZNE %[result], %[L_var1]\n"
"SUBNE %[result], %[result], #1\n"
"MOVEQ %[result], #0\n"
- :[result]"+r"(result)
+ :[result]"=r"(result)
:[L_var1]"r"(L_var1)
);
return result;
@@ -979,13 +987,11 @@
{
#if ARMV5TE_ROUND
Word16 result;
- asm volatile(
- "MOV r1,#0x00008000\n"
- "QADD %[result], %[L_var1], r1\n"
+ asm (
+ "QADD %[result], %[L_var1], %[bias]\n"
"MOV %[result], %[result], ASR #16 \n"
- :[result]"+r"(result)
- :[L_var1]"r"(L_var1)
- :"r1"
+ :[result]"=r"(result)
+ :[L_var1]"r"(L_var1), [bias]"r"(0x8000)
);
return result;
#else
@@ -1005,11 +1011,10 @@
{
#if ARMV5TE_L_MAC
Word32 result;
- asm volatile(
+ asm (
"SMULBB %[result], %[var1], %[var2]\n"
- "QADD %[result], %[result], %[result]\n"
- "QADD %[result], %[result], %[L_var3]\n"
- :[result]"+r"(result)
+ "QDADD %[result], %[L_var3], %[result]\n"
+ :[result]"=&r"(result)
: [L_var3]"r"(L_var3), [var1]"r"(var1), [var2]"r"(var2)
);
return result;
@@ -1029,9 +1034,9 @@
{
#if ARMV5TE_L_ADD
Word32 result;
- asm volatile(
+ asm (
"QADD %[result], %[L_var1], %[L_var2]\n"
- :[result]"+r"(result)
+ :[result]"=r"(result)
:[L_var1]"r"(L_var1), [L_var2]"r"(L_var2)
);
return result;
diff --git a/media/libstagefright/codecs/aacenc/basic_op/oper_32b.h b/media/libstagefright/codecs/aacenc/basic_op/oper_32b.h
index 9ebd1c2..6e5844f 100644
--- a/media/libstagefright/codecs/aacenc/basic_op/oper_32b.h
+++ b/media/libstagefright/codecs/aacenc/basic_op/oper_32b.h
@@ -63,7 +63,7 @@
Word32 result;
asm volatile(
"SMULWB %[result], %[L_var2], %[var1] \n"
- :[result]"+r"(result)
+ :[result]"=r"(result)
:[L_var2]"r"(L_var2), [var1]"r"(var1)
);
return result;
diff --git a/media/libstagefright/codecs/aacenc/basic_op/typedefs.h b/media/libstagefright/codecs/aacenc/basic_op/typedefs.h
index 2d5d956..6059237 100644
--- a/media/libstagefright/codecs/aacenc/basic_op/typedefs.h
+++ b/media/libstagefright/codecs/aacenc/basic_op/typedefs.h
@@ -48,9 +48,7 @@
#define assert(_Expression) ((void)0)
#endif
-#ifdef LINUX
-#define __inline static __inline__
-#endif
+#define __inline static __inline
#define INT_BITS 32
/*
@@ -130,6 +128,13 @@
#define ARMV5TE_NORM_L 1
#define ARMV5TE_L_MPY_LS 1
#endif
+#if ARMV6_INASM
+ #undef ARMV5TE_ADD
+ #define ARMV5TE_ADD 0
+ #undef ARMV5TE_SUB
+ #define ARMV5TE_SUB 0
+ #define ARMV6_SAT 1
+#endif
//basic operation functions optimization flags
#define SATRUATE_IS_INLINE 1 //define saturate as inline function
diff --git a/media/libstagefright/codecs/aacenc/inc/aacenc_core.h b/media/libstagefright/codecs/aacenc/inc/aacenc_core.h
index 1acdbbc..bb75b6d 100644
--- a/media/libstagefright/codecs/aacenc/inc/aacenc_core.h
+++ b/media/libstagefright/codecs/aacenc/inc/aacenc_core.h
@@ -102,7 +102,7 @@
const UWord8 *ancBytes, /*!< pointer to ancillary data bytes */
Word16 *numAncBytes, /*!< number of ancillary Data Bytes, send as fill element */
UWord8 *outBytes, /*!< pointer to output buffer */
- Word32 *numOutBytes /*!< number of bytes in output buffer */
+ VO_U32 *numOutBytes /*!< number of bytes in output buffer */
);
/*---------------------------------------------------------------------------
diff --git a/media/libstagefright/codecs/aacenc/inc/bitbuffer.h b/media/libstagefright/codecs/aacenc/inc/bitbuffer.h
index e538064..7c79f07 100644
--- a/media/libstagefright/codecs/aacenc/inc/bitbuffer.h
+++ b/media/libstagefright/codecs/aacenc/inc/bitbuffer.h
@@ -76,7 +76,7 @@
Word16 WriteBits(HANDLE_BIT_BUF hBitBuf,
- Word32 writeValue,
+ UWord32 writeValue,
Word16 noBitsToWrite);
void ResetBitBuf(HANDLE_BIT_BUF hBitBuf,
diff --git a/media/libstagefright/codecs/aacenc/inc/psy_configuration.h b/media/libstagefright/codecs/aacenc/inc/psy_configuration.h
index 9abfc99..f6981fa 100644
--- a/media/libstagefright/codecs/aacenc/inc/psy_configuration.h
+++ b/media/libstagefright/codecs/aacenc/inc/psy_configuration.h
@@ -31,7 +31,7 @@
Word16 sfbCnt;
Word16 sfbActive; /* number of sf bands containing energy after lowpass */
- Word16 *sfbOffset;
+ const Word16 *sfbOffset;
Word32 sfbThresholdQuiet[MAX_SFB_LONG];
@@ -61,7 +61,7 @@
Word16 sfbCnt;
Word16 sfbActive; /* number of sf bands containing energy after lowpass */
- Word16 *sfbOffset;
+ const Word16 *sfbOffset;
Word32 sfbThresholdQuiet[MAX_SFB_SHORT];
diff --git a/media/libstagefright/codecs/aacenc/src/aacenc_core.c b/media/libstagefright/codecs/aacenc/src/aacenc_core.c
index 2b3bd48..cecbc8f 100644
--- a/media/libstagefright/codecs/aacenc/src/aacenc_core.c
+++ b/media/libstagefright/codecs/aacenc/src/aacenc_core.c
@@ -146,7 +146,7 @@
const UWord8 *ancBytes, /*!< pointer to ancillary data bytes */
Word16 *numAncBytes, /*!< number of ancillary Data Bytes */
UWord8 *outBytes, /*!< pointer to output buffer (must be large MINBITS_COEF/8*MAX_CHANNELS bytes) */
- Word32 *numOutBytes /*!< number of bytes in output buffer after processing */
+ VO_U32 *numOutBytes /*!< number of bytes in output buffer after processing */
)
{
ELEMENT_INFO *elInfo = &aacEnc->elInfo;
diff --git a/media/libstagefright/codecs/aacenc/src/adj_thr.c b/media/libstagefright/codecs/aacenc/src/adj_thr.c
index a8ab809..373b063 100644
--- a/media/libstagefright/codecs/aacenc/src/adj_thr.c
+++ b/media/libstagefright/codecs/aacenc/src/adj_thr.c
@@ -26,6 +26,7 @@
#include "adj_thr.h"
#include "qc_data.h"
#include "line_pe.h"
+#include <string.h>
#define minSnrLimit 0x6666 /* 1 dB */
@@ -1138,6 +1139,7 @@
Word16 maxBitresBits = elBits->maxBits;
Word16 sideInfoBits = (qcOE->staticBitsUsed + qcOE->ancBitsUsed);
Word16 ch;
+ memset(&peData, 0, sizeof(peData));
prepareSfbPe(&peData, psyOutChannel, logSfbEnergy, sfbNRelevantLines, nChannels, AdjThrStateElement->peOffset);
diff --git a/media/libstagefright/codecs/aacenc/src/bitbuffer.c b/media/libstagefright/codecs/aacenc/src/bitbuffer.c
index 5615ac3..0ce93d3 100644
--- a/media/libstagefright/codecs/aacenc/src/bitbuffer.c
+++ b/media/libstagefright/codecs/aacenc/src/bitbuffer.c
@@ -138,7 +138,7 @@
*
*****************************************************************************/
Word16 WriteBits(HANDLE_BIT_BUF hBitBuf,
- Word32 writeValue,
+ UWord32 writeValue,
Word16 noBitsToWrite)
{
Word16 wBitPos;
@@ -152,6 +152,7 @@
wBitPos = hBitBuf->wBitPos;
wBitPos += noBitsToWrite;
+ writeValue &= ~(0xffffffff << noBitsToWrite); // Mask out everything except the lowest noBitsToWrite bits
writeValue <<= 32 - wBitPos;
writeValue |= hBitBuf->cache;
diff --git a/media/libstagefright/codecs/aacenc/src/dyn_bits.c b/media/libstagefright/codecs/aacenc/src/dyn_bits.c
index 3d2efdc..7769188 100644
--- a/media/libstagefright/codecs/aacenc/src/dyn_bits.c
+++ b/media/libstagefright/codecs/aacenc/src/dyn_bits.c
@@ -281,7 +281,7 @@
const Word32 blockType)
{
Word32 grpNdx, i;
- Word16 *sideInfoTab = NULL;
+ const Word16 *sideInfoTab = NULL;
SECTION_INFO *sectionInfo;
/*
diff --git a/media/libstagefright/codecs/aacenc/src/interface.c b/media/libstagefright/codecs/aacenc/src/interface.c
index f2472d8..d0ad433 100644
--- a/media/libstagefright/codecs/aacenc/src/interface.c
+++ b/media/libstagefright/codecs/aacenc/src/interface.c
@@ -99,8 +99,8 @@
Word32 i;
Word32 accuSumMS=0;
Word32 accuSumLR=0;
- Word32 *pSumMS = sfbEnergySumMS.sfbShort;
- Word32 *pSumLR = sfbEnergySumLR.sfbShort;
+ const Word32 *pSumMS = sfbEnergySumMS.sfbShort;
+ const Word32 *pSumLR = sfbEnergySumLR.sfbShort;
for (i=TRANS_FAC; i; i--) {
accuSumLR = L_add(accuSumLR, *pSumLR); pSumLR++;
diff --git a/media/libstagefright/codecs/aacenc/src/psy_configuration.c b/media/libstagefright/codecs/aacenc/src/psy_configuration.c
index 02d92ab..dd40f9b 100644
--- a/media/libstagefright/codecs/aacenc/src/psy_configuration.c
+++ b/media/libstagefright/codecs/aacenc/src/psy_configuration.c
@@ -139,7 +139,7 @@
*
*****************************************************************************/
static void initThrQuiet(Word16 numPb,
- Word16 *pbOffset,
+ const Word16 *pbOffset,
Word16 *pbBarcVal,
Word32 *pbThresholdQuiet) {
Word16 i;
@@ -250,7 +250,7 @@
*
*****************************************************************************/
static void initBarcValues(Word16 numPb,
- Word16 *pbOffset,
+ const Word16 *pbOffset,
Word16 numLines,
Word32 samplingFrequency,
Word16 *pbBval)
diff --git a/media/libstagefright/codecs/aacenc/src/psy_main.c b/media/libstagefright/codecs/aacenc/src/psy_main.c
index 085acb8..4e9218c 100644
--- a/media/libstagefright/codecs/aacenc/src/psy_main.c
+++ b/media/libstagefright/codecs/aacenc/src/psy_main.c
@@ -658,7 +658,8 @@
Word32 normEnergyShift = (psyData->mdctScale + 1) << 1; /* in reference code, mdct spectrum must be multipied with 2, so +1 */
Word32 clipEnergy = hPsyConfShort->clipEnergy >> normEnergyShift;
Word32 wOffset = 0;
- Word32 *data0, *data1;
+ Word32 *data0;
+ const Word32 *data1;
for(w = 0; w < TRANS_FAC; w++) {
Word32 i, tdata;
diff --git a/media/libstagefright/codecs/aacenc/src/qc_main.c b/media/libstagefright/codecs/aacenc/src/qc_main.c
index df6d46e..48ff300 100644
--- a/media/libstagefright/codecs/aacenc/src/qc_main.c
+++ b/media/libstagefright/codecs/aacenc/src/qc_main.c
@@ -163,7 +163,7 @@
Word32 i;
if(hQC)
{
- if(hQC->qcChannel[0].quantSpec);
+ if(hQC->qcChannel[0].quantSpec)
mem_free(pMemOP, hQC->qcChannel[0].quantSpec, VO_INDEX_ENC_AAC);
if(hQC->qcChannel[0].maxValueInSfb)
diff --git a/media/libstagefright/codecs/aacenc/src/quantize.c b/media/libstagefright/codecs/aacenc/src/quantize.c
index 54add2f..0d0f550 100644
--- a/media/libstagefright/codecs/aacenc/src/quantize.c
+++ b/media/libstagefright/codecs/aacenc/src/quantize.c
@@ -110,7 +110,7 @@
Word32 m = gain&3;
Word32 g = (gain >> 2) + 4;
Word32 mdctSpeL;
- Word16 *pquat;
+ const Word16 *pquat;
/* gain&3 */
pquat = quantBorders[m];
@@ -333,7 +333,7 @@
Word32 m = gain&3;
Word32 g = (gain >> 2) + 4;
Word32 g2 = (g << 1) + 1;
- Word16 *pquat, *repquat;
+ const Word16 *pquat, *repquat;
/* gain&3 */
pquat = quantBorders[m];
diff --git a/media/libstagefright/codecs/aacenc/src/sf_estim.c b/media/libstagefright/codecs/aacenc/src/sf_estim.c
index fe40137..bc320ec 100644
--- a/media/libstagefright/codecs/aacenc/src/sf_estim.c
+++ b/media/libstagefright/codecs/aacenc/src/sf_estim.c
@@ -400,7 +400,7 @@
Word16 *minScfCalculated,
Flag restartOnSuccess)
{
- Word32 sfbLast, sfbAct, sfbNext, scfAct, scfMin;
+ Word16 sfbLast, sfbAct, sfbNext, scfAct, scfMin;
Word16 *scfLast, *scfNext;
Word32 sfbPeOld, sfbPeNew;
Word32 sfbDistNew;
diff --git a/media/libstagefright/codecs/aacenc/src/transform.c b/media/libstagefright/codecs/aacenc/src/transform.c
index a154a2f..a02336f 100644
--- a/media/libstagefright/codecs/aacenc/src/transform.c
+++ b/media/libstagefright/codecs/aacenc/src/transform.c
@@ -339,6 +339,12 @@
*buf1-- = MULHIGH(cosb, tr2) + MULHIGH(sinb, ti2);
}
}
+#else
+void Radix4First(int *buf, int num);
+void Radix8First(int *buf, int num);
+void Radix4FFT(int *buf, int num, int bgn, int *twidTab);
+void PreMDCT(int *buf0, int num, const int *csptr);
+void PostMDCT(int *buf0, int num, const int *csptr);
#endif
diff --git a/media/libstagefright/codecs/amrnb/common/include/az_lsp.h b/media/libstagefright/codecs/amrnb/common/include/az_lsp.h
index 3e15ba3..7c24ca9 100644
--- a/media/libstagefright/codecs/amrnb/common/include/az_lsp.h
+++ b/media/libstagefright/codecs/amrnb/common/include/az_lsp.h
@@ -83,7 +83,7 @@
; EXTERNAL VARIABLES REFERENCES
; Declare variables used in this module but defined elsewhere
----------------------------------------------------------------------------*/
- extern Word16 grid[];
+ extern const Word16 grid[];
/*----------------------------------------------------------------------------
; SIMPLE TYPEDEF'S
diff --git a/media/libstagefright/codecs/amrnb/common/include/inv_sqrt.h b/media/libstagefright/codecs/amrnb/common/include/inv_sqrt.h
index 4fb2b11..91ab3e4 100644
--- a/media/libstagefright/codecs/amrnb/common/include/inv_sqrt.h
+++ b/media/libstagefright/codecs/amrnb/common/include/inv_sqrt.h
@@ -85,7 +85,7 @@
; EXTERNAL VARIABLES REFERENCES
; Declare variables used in this module but defined elsewhere
----------------------------------------------------------------------------*/
- extern Word16 inv_sqrt_tbl[];
+ extern const Word16 inv_sqrt_tbl[];
/*----------------------------------------------------------------------------
; SIMPLE TYPEDEF'S
----------------------------------------------------------------------------*/
diff --git a/media/libstagefright/codecs/amrnb/common/include/log2_norm.h b/media/libstagefright/codecs/amrnb/common/include/log2_norm.h
index b104a69..46b4e4d 100644
--- a/media/libstagefright/codecs/amrnb/common/include/log2_norm.h
+++ b/media/libstagefright/codecs/amrnb/common/include/log2_norm.h
@@ -85,7 +85,7 @@
; EXTERNAL VARIABLES REFERENCES
; Declare variables used in this module but defined elsewhere
----------------------------------------------------------------------------*/
- extern Word16 log2_tbl[];
+ extern const Word16 log2_tbl[];
/*----------------------------------------------------------------------------
; SIMPLE TYPEDEF'S
----------------------------------------------------------------------------*/
diff --git a/media/libstagefright/codecs/amrnb/common/include/pow2.h b/media/libstagefright/codecs/amrnb/common/include/pow2.h
index c96fbdd..9b944eb 100644
--- a/media/libstagefright/codecs/amrnb/common/include/pow2.h
+++ b/media/libstagefright/codecs/amrnb/common/include/pow2.h
@@ -81,7 +81,7 @@
; EXTERNAL VARIABLES REFERENCES
; Declare variables used in this module but defined elsewhere
----------------------------------------------------------------------------*/
- extern Word16 pow2_tbl[];
+ extern const Word16 pow2_tbl[];
/*----------------------------------------------------------------------------
; SIMPLE TYPEDEF'S
----------------------------------------------------------------------------*/
diff --git a/media/libstagefright/codecs/amrnb/common/include/sqrt_l.h b/media/libstagefright/codecs/amrnb/common/include/sqrt_l.h
index 86209bd..a6a2ee5 100644
--- a/media/libstagefright/codecs/amrnb/common/include/sqrt_l.h
+++ b/media/libstagefright/codecs/amrnb/common/include/sqrt_l.h
@@ -82,7 +82,7 @@
; EXTERNAL VARIABLES REFERENCES
; Declare variables used in this module but defined elsewhere
----------------------------------------------------------------------------*/
- extern Word16 sqrt_l_tbl[];
+ extern const Word16 sqrt_l_tbl[];
/*----------------------------------------------------------------------------
; SIMPLE TYPEDEF'S
diff --git a/media/libstagefright/codecs/amrnb/common/src/az_lsp.cpp b/media/libstagefright/codecs/amrnb/common/src/az_lsp.cpp
index bd99b30..4135f30 100644
--- a/media/libstagefright/codecs/amrnb/common/src/az_lsp.cpp
+++ b/media/libstagefright/codecs/amrnb/common/src/az_lsp.cpp
@@ -299,7 +299,7 @@
t0 += (Word32) * (p_f) << 13;
- if ((UWord32)(t0 - 0xfe000000L) < 0x01ffffffL - 0xfe000000L)
+ if ((UWord32)(t0 - 0xfe000000L) < (UWord32)0x03ffffffL)
{
cheb = (Word16)(t0 >> 10);
}
diff --git a/media/libstagefright/codecs/amrnb/common/src/bitno_tab.cpp b/media/libstagefright/codecs/amrnb/common/src/bitno_tab.cpp
index fed684d..4ee04a5 100644
--- a/media/libstagefright/codecs/amrnb/common/src/bitno_tab.cpp
+++ b/media/libstagefright/codecs/amrnb/common/src/bitno_tab.cpp
@@ -152,7 +152,7 @@
; Variable declaration - defined here and used outside this module
----------------------------------------------------------------------------*/
/* number of parameters per modes (values must be <= MAX_PRM_SIZE!) */
- extern const Word16 prmno[N_MODES] =
+ const Word16 prmno[N_MODES] =
{
PRMNO_MR475,
PRMNO_MR515,
@@ -166,7 +166,7 @@
};
/* number of parameters to first subframe per modes */
- extern const Word16 prmnofsf[N_MODES - 1] =
+ const Word16 prmnofsf[N_MODES - 1] =
{
PRMNOFSF_MR475,
PRMNOFSF_MR515,
@@ -179,7 +179,7 @@
};
/* parameter sizes (# of bits), one table per mode */
- extern const Word16 bitno_MR475[PRMNO_MR475] =
+ const Word16 bitno_MR475[PRMNO_MR475] =
{
8, 8, 7, /* LSP VQ */
8, 7, 2, 8, /* first subframe */
@@ -188,7 +188,7 @@
4, 7, 2, /* fourth subframe */
};
- extern const Word16 bitno_MR515[PRMNO_MR515] =
+ const Word16 bitno_MR515[PRMNO_MR515] =
{
8, 8, 7, /* LSP VQ */
8, 7, 2, 6, /* first subframe */
@@ -197,7 +197,7 @@
4, 7, 2, 6, /* fourth subframe */
};
- extern const Word16 bitno_MR59[PRMNO_MR59] =
+ const Word16 bitno_MR59[PRMNO_MR59] =
{
8, 9, 9, /* LSP VQ */
8, 9, 2, 6, /* first subframe */
@@ -206,7 +206,7 @@
4, 9, 2, 6, /* fourth subframe */
};
- extern const Word16 bitno_MR67[PRMNO_MR67] =
+ const Word16 bitno_MR67[PRMNO_MR67] =
{
8, 9, 9, /* LSP VQ */
8, 11, 3, 7, /* first subframe */
@@ -215,7 +215,7 @@
4, 11, 3, 7, /* fourth subframe */
};
- extern const Word16 bitno_MR74[PRMNO_MR74] =
+ const Word16 bitno_MR74[PRMNO_MR74] =
{
8, 9, 9, /* LSP VQ */
8, 13, 4, 7, /* first subframe */
@@ -224,7 +224,7 @@
5, 13, 4, 7, /* fourth subframe */
};
- extern const Word16 bitno_MR795[PRMNO_MR795] =
+ const Word16 bitno_MR795[PRMNO_MR795] =
{
9, 9, 9, /* LSP VQ */
8, 13, 4, 4, 5, /* first subframe */
@@ -233,7 +233,7 @@
6, 13, 4, 4, 5, /* fourth subframe */
};
- extern const Word16 bitno_MR102[PRMNO_MR102] =
+ const Word16 bitno_MR102[PRMNO_MR102] =
{
8, 9, 9, /* LSP VQ */
8, 1, 1, 1, 1, 10, 10, 7, 7, /* first subframe */
@@ -242,7 +242,7 @@
5, 1, 1, 1, 1, 10, 10, 7, 7, /* fourth subframe */
};
- extern const Word16 bitno_MR122[PRMNO_MR122] =
+ const Word16 bitno_MR122[PRMNO_MR122] =
{
7, 8, 9, 8, 6, /* LSP VQ */
9, 4, 4, 4, 4, 4, 4, 3, 3, 3, 3, 3, 5, /* first subframe */
@@ -251,7 +251,7 @@
6, 4, 4, 4, 4, 4, 4, 3, 3, 3, 3, 3, 5 /* fourth subframe */
};
- extern const Word16 bitno_MRDTX[PRMNO_MRDTX] =
+ const Word16 bitno_MRDTX[PRMNO_MRDTX] =
{
3,
8, 9, 9,
@@ -259,7 +259,7 @@
};
/* overall table with all parameter sizes for all modes */
- extern const Word16 * const bitno[N_MODES] =
+ const Word16 * const bitno[N_MODES] =
{
bitno_MR475,
bitno_MR515,
diff --git a/media/libstagefright/codecs/amrnb/common/src/bitreorder_tab.cpp b/media/libstagefright/codecs/amrnb/common/src/bitreorder_tab.cpp
index 69b20fb..e284bbc 100644
--- a/media/libstagefright/codecs/amrnb/common/src/bitreorder_tab.cpp
+++ b/media/libstagefright/codecs/amrnb/common/src/bitreorder_tab.cpp
@@ -123,6 +123,7 @@
; INCLUDES
----------------------------------------------------------------------------*/
#include "typedef.h"
+#include "bitreorder_tab.h"
/*--------------------------------------------------------------------------*/
#ifdef __cplusplus
@@ -171,7 +172,7 @@
; Variable declaration - defined here and used outside this module
----------------------------------------------------------------------------*/
/* number of parameters per modes (values must be <= MAX_PRM_SIZE!) */
- extern const Word16 numOfBits[NUM_MODES] =
+ const Word16 numOfBits[NUM_MODES] =
{
NUMBIT_MR475,
NUMBIT_MR515,
@@ -191,7 +192,7 @@
NUMBIT_NO_DATA
};
- extern const Word16 reorderBits_MR475[NUMBIT_MR475] =
+ const Word16 reorderBits_MR475[NUMBIT_MR475] =
{
0, 1, 2, 3, 4, 5, 6, 7, 8, 9,
10, 11, 12, 13, 14, 15, 23, 24, 25, 26,
@@ -205,7 +206,7 @@
92, 31, 52, 65, 86
};
- extern const Word16 reorderBits_MR515[NUMBIT_MR515] =
+ const Word16 reorderBits_MR515[NUMBIT_MR515] =
{
7, 6, 5, 4, 3, 2, 1, 0, 15, 14,
13, 12, 11, 10, 9, 8, 23, 24, 25, 26,
@@ -220,7 +221,7 @@
53, 72, 91
};
- extern const Word16 reorderBits_MR59[NUMBIT_MR59] =
+ const Word16 reorderBits_MR59[NUMBIT_MR59] =
{
0, 1, 4, 5, 3, 6, 7, 2, 13, 15,
8, 9, 11, 12, 14, 10, 16, 28, 74, 29,
@@ -236,7 +237,7 @@
38, 59, 84, 105, 37, 58, 83, 104
};
- extern const Word16 reorderBits_MR67[NUMBIT_MR67] =
+ const Word16 reorderBits_MR67[NUMBIT_MR67] =
{
0, 1, 4, 3, 5, 6, 13, 7, 2, 8,
9, 11, 15, 12, 14, 10, 28, 82, 29, 83,
@@ -254,7 +255,7 @@
36, 61, 90, 115
};
- extern const Word16 reorderBits_MR74[NUMBIT_MR74] =
+ const Word16 reorderBits_MR74[NUMBIT_MR74] =
{
0, 1, 2, 3, 4, 5, 6, 7, 8, 9,
10, 11, 12, 13, 14, 15, 16, 26, 87, 27,
@@ -273,7 +274,7 @@
39, 68, 100, 129, 40, 69, 101, 130
};
- extern const Word16 reorderBits_MR795[NUMBIT_MR795] =
+ const Word16 reorderBits_MR795[NUMBIT_MR795] =
{
8, 7, 6, 5, 4, 3, 2, 14, 16, 9,
10, 12, 13, 15, 11, 17, 20, 22, 24, 23,
@@ -293,7 +294,7 @@
139, 37, 69, 103, 135, 38, 70, 104, 136
};
- extern const Word16 reorderBits_MR102[NUMBIT_MR102] =
+ const Word16 reorderBits_MR102[NUMBIT_MR102] =
{
7, 6, 5, 4, 3, 2, 1, 0, 16, 15,
14, 13, 12, 11, 10, 9, 8, 26, 27, 28,
@@ -318,7 +319,7 @@
63, 46, 55, 56
};
- extern const Word16 reorderBits_MR122[NUMBIT_MR122] =
+ const Word16 reorderBits_MR122[NUMBIT_MR122] =
{
0, 1, 2, 3, 4, 5, 6, 7, 8, 9,
10, 11, 12, 13, 14, 23, 15, 16, 17, 18,
@@ -348,7 +349,7 @@
};
/* overall table with all parameter sizes for all modes */
- extern const Word16 * const reorderBits[NUM_MODES-1] =
+ const Word16 * const reorderBits[NUM_MODES-1] =
{
reorderBits_MR475,
reorderBits_MR515,
@@ -361,7 +362,7 @@
};
/* Number of Frames (16-bit segments sent for each mode */
- extern const Word16 numCompressedBytes[16] =
+ const Word16 numCompressedBytes[16] =
{
13, /*4.75*/
14, /*5.15*/
diff --git a/media/libstagefright/codecs/amrnb/common/src/bytesused.cpp b/media/libstagefright/codecs/amrnb/common/src/bytesused.cpp
index 9552206..b61bac4 100644
--- a/media/libstagefright/codecs/amrnb/common/src/bytesused.cpp
+++ b/media/libstagefright/codecs/amrnb/common/src/bytesused.cpp
@@ -152,7 +152,7 @@
; LOCAL STORE/BUFFER/POINTER DEFINITIONS
; Variable declaration - defined here and used outside this module
----------------------------------------------------------------------------*/
- extern const short BytesUsed[16] =
+ const short BytesUsed[16] =
{
13, /* 4.75 */
14, /* 5.15 */
diff --git a/media/libstagefright/codecs/amrnb/common/src/c2_9pf_tab.cpp b/media/libstagefright/codecs/amrnb/common/src/c2_9pf_tab.cpp
index 471bee8..20de9d6 100644
--- a/media/libstagefright/codecs/amrnb/common/src/c2_9pf_tab.cpp
+++ b/media/libstagefright/codecs/amrnb/common/src/c2_9pf_tab.cpp
@@ -86,7 +86,8 @@
; LOCAL VARIABLE DEFINITIONS
; [Variable declaration - defined here and used outside this module]
----------------------------------------------------------------------------*/
- extern const Word16 startPos[2*4*2] = {0, 2, 0, 3,
+ extern const Word16 startPos[];
+ const Word16 startPos[2*4*2] = {0, 2, 0, 3,
0, 2, 0, 3,
1, 3, 2, 4,
1, 4, 1, 4
diff --git a/media/libstagefright/codecs/amrnb/common/src/gains_tbl.cpp b/media/libstagefright/codecs/amrnb/common/src/gains_tbl.cpp
index a08dd2d..a7cd6fb 100644
--- a/media/libstagefright/codecs/amrnb/common/src/gains_tbl.cpp
+++ b/media/libstagefright/codecs/amrnb/common/src/gains_tbl.cpp
@@ -86,14 +86,16 @@
----------------------------------------------------------------------------*/
- extern const Word16 qua_gain_pitch[NB_QUA_PITCH] =
+ extern const Word16 qua_gain_pitch[];
+ const Word16 qua_gain_pitch[NB_QUA_PITCH] =
{
0, 3277, 6556, 8192, 9830, 11469, 12288, 13107,
13926, 14746, 15565, 16384, 17203, 18022, 18842, 19661
};
- extern const Word16 qua_gain_code[(NB_QUA_CODE+1)*3] =
+ extern const Word16 qua_gain_code[];
+ const Word16 qua_gain_code[(NB_QUA_CODE+1)*3] =
{
/* gain factor (g_fac) and quantized energy error (qua_ener_MR122, qua_ener)
* are stored:
diff --git a/media/libstagefright/codecs/amrnb/common/src/gray_tbl.cpp b/media/libstagefright/codecs/amrnb/common/src/gray_tbl.cpp
index 99073d9..c4b2dbc 100644
--- a/media/libstagefright/codecs/amrnb/common/src/gray_tbl.cpp
+++ b/media/libstagefright/codecs/amrnb/common/src/gray_tbl.cpp
@@ -83,8 +83,10 @@
; [Variable declaration - defined here and used outside this module]
----------------------------------------------------------------------------*/
- extern const Word16 gray[8] = {0, 1, 3, 2, 6, 4, 5, 7};
- extern const Word16 dgray[8] = {0, 1, 3, 2, 5, 6, 4, 7};
+ extern const Word16 gray[];
+ extern const Word16 dgray[];
+ const Word16 gray[8] = {0, 1, 3, 2, 6, 4, 5, 7};
+ const Word16 dgray[8] = {0, 1, 3, 2, 5, 6, 4, 7};
/*--------------------------------------------------------------------------*/
#ifdef __cplusplus
diff --git a/media/libstagefright/codecs/amrnb/common/src/grid_tbl.cpp b/media/libstagefright/codecs/amrnb/common/src/grid_tbl.cpp
index cd81566..48566cc 100644
--- a/media/libstagefright/codecs/amrnb/common/src/grid_tbl.cpp
+++ b/media/libstagefright/codecs/amrnb/common/src/grid_tbl.cpp
@@ -63,6 +63,7 @@
; INCLUDES
----------------------------------------------------------------------------*/
#include "typedef.h"
+#include "az_lsp.h"
/*--------------------------------------------------------------------------*/
#ifdef __cplusplus
@@ -91,7 +92,7 @@
; LOCAL VARIABLE DEFINITIONS
; [Variable declaration - defined here and used outside this module]
----------------------------------------------------------------------------*/
- extern const Word16 grid[grid_points + 1] =
+ const Word16 grid[grid_points + 1] =
{
32760, 32723, 32588, 32364, 32051, 31651,
31164, 30591, 29935, 29196, 28377, 27481,
diff --git a/media/libstagefright/codecs/amrnb/common/src/inv_sqrt_tbl.cpp b/media/libstagefright/codecs/amrnb/common/src/inv_sqrt_tbl.cpp
index bde2c4e..13c3b24 100644
--- a/media/libstagefright/codecs/amrnb/common/src/inv_sqrt_tbl.cpp
+++ b/media/libstagefright/codecs/amrnb/common/src/inv_sqrt_tbl.cpp
@@ -55,6 +55,7 @@
; INCLUDES
----------------------------------------------------------------------------*/
#include "typedef.h"
+#include "inv_sqrt.h"
/*--------------------------------------------------------------------------*/
#ifdef __cplusplus
@@ -82,7 +83,7 @@
; LOCAL VARIABLE DEFINITIONS
; [Variable declaration - defined here and used outside this module]
----------------------------------------------------------------------------*/
- extern const Word16 inv_sqrt_tbl[49] =
+ const Word16 inv_sqrt_tbl[49] =
{
32767, 31790, 30894, 30070, 29309, 28602, 27945, 27330, 26755, 26214,
diff --git a/media/libstagefright/codecs/amrnb/common/src/log2_tbl.cpp b/media/libstagefright/codecs/amrnb/common/src/log2_tbl.cpp
index 25d63b2..9b9b099 100644
--- a/media/libstagefright/codecs/amrnb/common/src/log2_tbl.cpp
+++ b/media/libstagefright/codecs/amrnb/common/src/log2_tbl.cpp
@@ -54,6 +54,7 @@
; INCLUDES
----------------------------------------------------------------------------*/
#include "typedef.h"
+#include "log2_norm.h"
/*--------------------------------------------------------------------------*/
#ifdef __cplusplus
@@ -82,7 +83,7 @@
; [Variable declaration - defined here and used outside this module]
----------------------------------------------------------------------------*/
- extern const Word16 log2_tbl[33] =
+ const Word16 log2_tbl[33] =
{
0, 1455, 2866, 4236, 5568, 6863, 8124, 9352, 10549, 11716,
12855, 13967, 15054, 16117, 17156, 18172, 19167, 20142, 21097, 22033,
diff --git a/media/libstagefright/codecs/amrnb/common/src/lsp_lsf_tbl.cpp b/media/libstagefright/codecs/amrnb/common/src/lsp_lsf_tbl.cpp
index cee0f32..ddeeba4 100644
--- a/media/libstagefright/codecs/amrnb/common/src/lsp_lsf_tbl.cpp
+++ b/media/libstagefright/codecs/amrnb/common/src/lsp_lsf_tbl.cpp
@@ -77,7 +77,8 @@
; [Variable declaration - defined here and used outside this module]
----------------------------------------------------------------------------*/
- extern const Word16 table[65] =
+ extern const Word16 table[];
+ const Word16 table[65] =
{
32767, 32729, 32610, 32413, 32138, 31786, 31357, 30853,
30274, 29622, 28899, 28106, 27246, 26320, 25330, 24279,
@@ -94,7 +95,8 @@
/* slope used to compute y = acos(x) */
- extern const Word16 slope[64] =
+ extern const Word16 slope[];
+ const Word16 slope[64] =
{
-26887, -8812, -5323, -3813, -2979, -2444, -2081, -1811,
-1608, -1450, -1322, -1219, -1132, -1059, -998, -946,
diff --git a/media/libstagefright/codecs/amrnb/common/src/lsp_tab.cpp b/media/libstagefright/codecs/amrnb/common/src/lsp_tab.cpp
index deded93..0a32dd7 100644
--- a/media/libstagefright/codecs/amrnb/common/src/lsp_tab.cpp
+++ b/media/libstagefright/codecs/amrnb/common/src/lsp_tab.cpp
@@ -117,6 +117,7 @@
----------------------------------------------------------------------------*/
#include "typedef.h"
#include "cnst.h"
+#include "lsp_tab.h"
/*--------------------------------------------------------------------------*/
#ifdef __cplusplus
@@ -146,7 +147,7 @@
; LOCAL STORE/BUFFER/POINTER DEFINITIONS
; Variable declaration - defined here and used outside this module
----------------------------------------------------------------------------*/
- extern const Word16 lsp_init_data[M] = {30000, 26000, 21000, 15000, 8000,
+ const Word16 lsp_init_data[M] = {30000, 26000, 21000, 15000, 8000,
0, -8000, -15000, -21000, -26000
};
diff --git a/media/libstagefright/codecs/amrnb/common/src/overflow_tbl.cpp b/media/libstagefright/codecs/amrnb/common/src/overflow_tbl.cpp
index e5d42d6..c4a016d 100644
--- a/media/libstagefright/codecs/amrnb/common/src/overflow_tbl.cpp
+++ b/media/libstagefright/codecs/amrnb/common/src/overflow_tbl.cpp
@@ -81,7 +81,7 @@
; LOCAL VARIABLE DEFINITIONS
; [Variable declaration - defined here and used outside this module]
----------------------------------------------------------------------------*/
- extern const Word32 overflow_tbl [32] = {0x7fffffffL, 0x3fffffffL,
+ const Word32 overflow_tbl [32] = {0x7fffffffL, 0x3fffffffL,
0x1fffffffL, 0x0fffffffL,
0x07ffffffL, 0x03ffffffL,
0x01ffffffL, 0x00ffffffL,
diff --git a/media/libstagefright/codecs/amrnb/common/src/ph_disp_tab.cpp b/media/libstagefright/codecs/amrnb/common/src/ph_disp_tab.cpp
index 99725df..d568b78 100644
--- a/media/libstagefright/codecs/amrnb/common/src/ph_disp_tab.cpp
+++ b/media/libstagefright/codecs/amrnb/common/src/ph_disp_tab.cpp
@@ -81,14 +81,16 @@
; LOCAL VARIABLE DEFINITIONS
; [Variable declaration - defined here and used outside this module]
----------------------------------------------------------------------------*/
- extern const Word16 ph_imp_low_MR795[40] =
+ extern const Word16 ph_imp_low_MR795[];
+ const Word16 ph_imp_low_MR795[40] =
{
26777, 801, 2505, -683, -1382, 582, 604, -1274, 3511, -5894,
4534, -499, -1940, 3011, -5058, 5614, -1990, -1061, -1459, 4442,
-700, -5335, 4609, 452, -589, -3352, 2953, 1267, -1212, -2590,
1731, 3670, -4475, -975, 4391, -2537, 949, -1363, -979, 5734
};
- extern const Word16 ph_imp_mid_MR795[40] =
+ extern const Word16 ph_imp_mid_MR795[];
+ const Word16 ph_imp_mid_MR795[40] =
{
30274, 3831, -4036, 2972, -1048, -1002, 2477, -3043, 2815, -2231,
1753, -1611, 1714, -1775, 1543, -1008, 429, -169, 472, -1264,
@@ -96,14 +98,16 @@
-2063, 2644, -3060, 2897, -1978, 557, 780, -1369, 842, 655
};
- extern const Word16 ph_imp_low[40] =
+ extern const Word16 ph_imp_low[];
+ const Word16 ph_imp_low[40] =
{
14690, 11518, 1268, -2761, -5671, 7514, -35, -2807, -3040, 4823,
2952, -8424, 3785, 1455, 2179, -8637, 8051, -2103, -1454, 777,
1108, -2385, 2254, -363, -674, -2103, 6046, -5681, 1072, 3123,
-5058, 5312, -2329, -3728, 6924, -3889, 675, -1775, 29, 10145
};
- extern const Word16 ph_imp_mid[40] =
+ extern const Word16 ph_imp_mid[];
+ const Word16 ph_imp_mid[40] =
{
30274, 3831, -4036, 2972, -1048, -1002, 2477, -3043, 2815, -2231,
1753, -1611, 1714, -1775, 1543, -1008, 429, -169, 472, -1264,
diff --git a/media/libstagefright/codecs/amrnb/common/src/pow2_tbl.cpp b/media/libstagefright/codecs/amrnb/common/src/pow2_tbl.cpp
index e0183a6..902ea0f 100644
--- a/media/libstagefright/codecs/amrnb/common/src/pow2_tbl.cpp
+++ b/media/libstagefright/codecs/amrnb/common/src/pow2_tbl.cpp
@@ -53,6 +53,7 @@
; INCLUDES
----------------------------------------------------------------------------*/
#include "typedef.h"
+#include "pow2.h"
/*--------------------------------------------------------------------------*/
#ifdef __cplusplus
@@ -81,7 +82,7 @@
; [Variable declaration - defined here and used outside this module]
----------------------------------------------------------------------------*/
- extern const Word16 pow2_tbl[33] =
+ const Word16 pow2_tbl[33] =
{
16384, 16743, 17109, 17484, 17867, 18258, 18658, 19066, 19484, 19911,
20347, 20792, 21247, 21713, 22188, 22674, 23170, 23678, 24196, 24726,
diff --git a/media/libstagefright/codecs/amrnb/common/src/q_plsf_5_tbl.cpp b/media/libstagefright/codecs/amrnb/common/src/q_plsf_5_tbl.cpp
index ceb1e1e..caa81cb 100644
--- a/media/libstagefright/codecs/amrnb/common/src/q_plsf_5_tbl.cpp
+++ b/media/libstagefright/codecs/amrnb/common/src/q_plsf_5_tbl.cpp
@@ -56,6 +56,7 @@
; INCLUDES
----------------------------------------------------------------------------*/
#include "typedef.h"
+#include "q_plsf_5_tbl.h"
/*--------------------------------------------------------------------------*/
#ifdef __cplusplus
@@ -94,7 +95,7 @@
----------------------------------------------------------------------------*/
/* LSF means ->normalize frequency domain */
- extern const Word16 mean_lsf_5[10] =
+ const Word16 mean_lsf_5[10] =
{
1384,
2077,
@@ -108,7 +109,7 @@
13701
};
- extern const Word16 dico1_lsf_5[DICO1_5_SIZE * 4] =
+ const Word16 dico1_lsf_5[DICO1_5_SIZE * 4] =
{
-451, -1065, -529, -1305,
-450, -756, -497, -863,
@@ -240,7 +241,7 @@
1469, 2181, 1443, 2016
};
- extern const Word16 dico2_lsf_5[DICO2_5_SIZE * 4] =
+ const Word16 dico2_lsf_5[DICO2_5_SIZE * 4] =
{
-1631, -1600, -1796, -2290,
-1027, -1770, -1100, -2025,
@@ -500,7 +501,7 @@
2374, 2787, 1821, 2788
};
- extern const Word16 dico3_lsf_5[DICO3_5_SIZE * 4] =
+ const Word16 dico3_lsf_5[DICO3_5_SIZE * 4] =
{
-1812, -2275, -1879, -2537,
-1640, -1848, -1695, -2004,
@@ -760,7 +761,7 @@
2180, 1975, 2326, 2020
};
- extern const Word16 dico4_lsf_5[DICO4_5_SIZE * 4] =
+ const Word16 dico4_lsf_5[DICO4_5_SIZE * 4] =
{
-1857, -1681, -1857, -1755,
-2056, -1150, -2134, -1654,
@@ -1020,7 +1021,7 @@
1716, 1376, 1948, 1465
};
- extern const Word16 dico5_lsf_5[DICO5_5_SIZE * 4] =
+ const Word16 dico5_lsf_5[DICO5_5_SIZE * 4] =
{
-1002, -929, -1096, -1203,
-641, -931, -604, -961,
diff --git a/media/libstagefright/codecs/amrnb/common/src/qua_gain_tbl.cpp b/media/libstagefright/codecs/amrnb/common/src/qua_gain_tbl.cpp
index 52f77e9..2d913b8 100644
--- a/media/libstagefright/codecs/amrnb/common/src/qua_gain_tbl.cpp
+++ b/media/libstagefright/codecs/amrnb/common/src/qua_gain_tbl.cpp
@@ -54,6 +54,7 @@
----------------------------------------------------------------------------*/
#include "typedef.h"
#include "qua_gain.h"
+#include "qua_gain_tbl.h"
/*--------------------------------------------------------------------------*/
#ifdef __cplusplus
@@ -96,7 +97,7 @@
/* table used in 'high' rates: MR67 MR74 */
- extern const Word16 table_gain_highrates[VQ_SIZE_HIGHRATES*4] =
+ const Word16 table_gain_highrates[VQ_SIZE_HIGHRATES*4] =
{
/*
@@ -240,7 +241,7 @@
/* table used in 'low' rates: MR475, MR515, MR59 */
- extern const Word16 table_gain_lowrates[VQ_SIZE_LOWRATES*4] =
+ const Word16 table_gain_lowrates[VQ_SIZE_LOWRATES*4] =
{
/*g_pit, g_fac, qua_ener_MR122, qua_ener */
10813, 28753, 2879, 17333,
diff --git a/media/libstagefright/codecs/amrnb/common/src/sqrt_l_tbl.cpp b/media/libstagefright/codecs/amrnb/common/src/sqrt_l_tbl.cpp
index 5e9898c..5a84b63 100644
--- a/media/libstagefright/codecs/amrnb/common/src/sqrt_l_tbl.cpp
+++ b/media/libstagefright/codecs/amrnb/common/src/sqrt_l_tbl.cpp
@@ -58,6 +58,7 @@
; INCLUDES
----------------------------------------------------------------------------*/
#include "typedef.h"
+#include "sqrt_l.h"
/*--------------------------------------------------------------------------*/
#ifdef __cplusplus
@@ -85,7 +86,7 @@
; LOCAL VARIABLE DEFINITIONS
; [Variable declaration - defined here and used outside this module]
----------------------------------------------------------------------------*/
- extern const Word16 sqrt_l_tbl[50] =
+ const Word16 sqrt_l_tbl[50] =
{
16384, 16888, 17378, 17854, 18318, 18770, 19212, 19644, 20066, 20480,
20886, 21283, 21674, 22058, 22435, 22806, 23170, 23530, 23884, 24232,
diff --git a/media/libstagefright/codecs/amrnb/common/src/window_tab.cpp b/media/libstagefright/codecs/amrnb/common/src/window_tab.cpp
index fa5faa6..d8fc8cc 100644
--- a/media/libstagefright/codecs/amrnb/common/src/window_tab.cpp
+++ b/media/libstagefright/codecs/amrnb/common/src/window_tab.cpp
@@ -117,6 +117,7 @@
----------------------------------------------------------------------------*/
#include "typedef.h"
#include "cnst.h"
+#include "window_tab.h"
/*--------------------------------------------------------------------------*/
#ifdef __cplusplus
@@ -154,7 +155,7 @@
/* window for non-EFR modesm; uses 40 samples lookahead */
- extern const Word16 window_200_40[L_WINDOW] =
+ const Word16 window_200_40[L_WINDOW] =
{
2621, 2623, 2629, 2638, 2651, 2668, 2689, 2713, 2741, 2772,
2808, 2847, 2890, 2936, 2986, 3040, 3097, 3158, 3223, 3291,
@@ -185,7 +186,7 @@
/* window for EFR, first two subframes, no lookahead */
- extern const Word16 window_160_80[L_WINDOW] =
+ const Word16 window_160_80[L_WINDOW] =
{
2621, 2624, 2633, 2648, 2668, 2695, 2727, 2765, 2809, 2859,
2915, 2976, 3043, 3116, 3194, 3279, 3368, 3464, 3565, 3671,
@@ -215,7 +216,7 @@
/* window for EFR, last two subframes, no lookahead */
- extern const Word16 window_232_8[L_WINDOW] =
+ const Word16 window_232_8[L_WINDOW] =
{
2621, 2623, 2627, 2634, 2644, 2656, 2671, 2689, 2710, 2734,
2760, 2789, 2821, 2855, 2893, 2933, 2975, 3021, 3069, 3120,
diff --git a/media/libstagefright/codecs/amrnb/dec/src/dec_input_format_tab.cpp b/media/libstagefright/codecs/amrnb/dec/src/dec_input_format_tab.cpp
index a59f5fa..fffbbfd 100644
--- a/media/libstagefright/codecs/amrnb/dec/src/dec_input_format_tab.cpp
+++ b/media/libstagefright/codecs/amrnb/dec/src/dec_input_format_tab.cpp
@@ -121,6 +121,7 @@
; INCLUDES
----------------------------------------------------------------------------*/
#include "typedef.h"
+#include "amrdecode.h"
/*--------------------------------------------------------------------------*/
#ifdef __cplusplus
@@ -152,7 +153,7 @@
----------------------------------------------------------------------------*/
/* Table containing the number of core AMR data bytes for */
/* each codec mode for WMF input format(number excludes frame type byte) */
- extern const Word16 WmfDecBytesPerFrame[16] =
+ const Word16 WmfDecBytesPerFrame[16] =
{
12, /* 4.75 */
13, /* 5.15 */
@@ -174,7 +175,7 @@
/* Table containing the number of core AMR data bytes for */
/* each codec mode for IF2 input format. */
- extern const Word16 If2DecBytesPerFrame[16] =
+ const Word16 If2DecBytesPerFrame[16] =
{
13, /* 4.75 */
14, /* 5.15 */
diff --git a/media/libstagefright/codecs/amrnb/dec/src/qgain475_tab.cpp b/media/libstagefright/codecs/amrnb/dec/src/qgain475_tab.cpp
index fbcd412..1a08efa 100644
--- a/media/libstagefright/codecs/amrnb/dec/src/qgain475_tab.cpp
+++ b/media/libstagefright/codecs/amrnb/dec/src/qgain475_tab.cpp
@@ -92,7 +92,7 @@
* g_fac(2) (Q12) // frame 1 and 3
*
*/
- extern const Word16 table_gain_MR475[MR475_VQ_SIZE*4] =
+ const Word16 table_gain_MR475[MR475_VQ_SIZE*4] =
{
/*g_pit(0), g_fac(0), g_pit(1), g_fac(1) */
812, 128, 542, 140,
diff --git a/media/libstagefright/codecs/amrnb/enc/src/corrwght_tab.cpp b/media/libstagefright/codecs/amrnb/enc/src/corrwght_tab.cpp
index 769e7ba..b3ed02d 100644
--- a/media/libstagefright/codecs/amrnb/enc/src/corrwght_tab.cpp
+++ b/media/libstagefright/codecs/amrnb/enc/src/corrwght_tab.cpp
@@ -57,6 +57,7 @@
; INCLUDES
----------------------------------------------------------------------------*/
#include "typedef.h"
+#include "p_ol_wgh.h"
/*--------------------------------------------------------------------------*/
#ifdef __cplusplus
@@ -84,7 +85,7 @@
; LOCAL VARIABLE DEFINITIONS
; [Variable declaration - defined here and used outside this module]
----------------------------------------------------------------------------*/
- extern const Word16 corrweight[251] =
+ const Word16 corrweight[251] =
{
20473, 20506, 20539, 20572, 20605, 20644, 20677,
20716, 20749, 20788, 20821, 20860, 20893, 20932,
diff --git a/media/libstagefright/codecs/amrnb/enc/src/enc_output_format_tab.cpp b/media/libstagefright/codecs/amrnb/enc/src/enc_output_format_tab.cpp
index 147989f..4551fd7 100644
--- a/media/libstagefright/codecs/amrnb/enc/src/enc_output_format_tab.cpp
+++ b/media/libstagefright/codecs/amrnb/enc/src/enc_output_format_tab.cpp
@@ -117,6 +117,7 @@
; INCLUDES
----------------------------------------------------------------------------*/
#include "typedef.h"
+#include "amrencode.h"
/*--------------------------------------------------------------------------*/
#ifdef __cplusplus
@@ -150,7 +151,7 @@
/* for WMF output format. */
/* Each entry is the sum of the 3GPP frame type byte and the */
/* number of packed core AMR data bytes */
- extern const Word16 WmfEncBytesPerFrame[16] =
+ const Word16 WmfEncBytesPerFrame[16] =
{
13, /* 4.75 */
14, /* 5.15 */
@@ -173,7 +174,7 @@
/* Number of data bytes in an encoder frame for each codec mode */
/* for IF2 output format */
- extern const Word16 If2EncBytesPerFrame[16] =
+ const Word16 If2EncBytesPerFrame[16] =
{
13, /* 4.75 */
14, /* 5.15 */
diff --git a/media/libstagefright/codecs/amrnb/enc/src/inter_36_tab.cpp b/media/libstagefright/codecs/amrnb/enc/src/inter_36_tab.cpp
index 27f33e9..c8d7b13 100644
--- a/media/libstagefright/codecs/amrnb/enc/src/inter_36_tab.cpp
+++ b/media/libstagefright/codecs/amrnb/enc/src/inter_36_tab.cpp
@@ -123,6 +123,7 @@
----------------------------------------------------------------------------*/
#include "typedef.h"
#include "cnst.h"
+#include "inter_36_tab.h"
/*--------------------------------------------------------------------------*/
#ifdef __cplusplus
@@ -162,7 +163,7 @@
inter_3[k] = inter_6[2*k], 0 <= k <= 3*L_INTER_SRCH
*/
- extern const Word16 inter_6[FIR_SIZE] =
+ const Word16 inter_6[FIR_SIZE] =
{
29519,
28316, 24906, 19838, 13896, 7945, 2755,
diff --git a/media/libstagefright/codecs/amrnb/enc/src/lag_wind_tab.cpp b/media/libstagefright/codecs/amrnb/enc/src/lag_wind_tab.cpp
index 53889bb..b0f5b3a 100644
--- a/media/libstagefright/codecs/amrnb/enc/src/lag_wind_tab.cpp
+++ b/media/libstagefright/codecs/amrnb/enc/src/lag_wind_tab.cpp
@@ -138,6 +138,7 @@
; INCLUDES
----------------------------------------------------------------------------*/
#include "typedef.h"
+#include "lag_wind_tab.h"
/*--------------------------------------------------------------------------*/
#ifdef __cplusplus
@@ -167,7 +168,7 @@
; LOCAL STORE/BUFFER/POINTER DEFINITIONS
; Variable declaration - defined here and used outside this module
----------------------------------------------------------------------------*/
- extern const Word16 lag_h[10] =
+ const Word16 lag_h[10] =
{
32728,
32619,
@@ -181,7 +182,7 @@
29321
};
- extern const Word16 lag_l[10] =
+ const Word16 lag_l[10] =
{
11904,
17280,
diff --git a/media/libstagefright/codecs/amrnb/enc/src/set_sign.cpp b/media/libstagefright/codecs/amrnb/enc/src/set_sign.cpp
index dedf91a..d626de3 100644
--- a/media/libstagefright/codecs/amrnb/enc/src/set_sign.cpp
+++ b/media/libstagefright/codecs/amrnb/enc/src/set_sign.cpp
@@ -552,10 +552,10 @@
else
{
*(p_sign--) = -32767; /* sign = -1 */
- cor = - (cor);
+ cor = negate(cor);
/* modify dn[] according to the fixed sign */
- dn[i] = - val;
+ dn[i] = negate(val);
}
*(p_en--) = cor;
diff --git a/media/libstagefright/codecs/amrnb/enc/src/ton_stab.cpp b/media/libstagefright/codecs/amrnb/enc/src/ton_stab.cpp
index 3c4494d..455a510 100644
--- a/media/libstagefright/codecs/amrnb/enc/src/ton_stab.cpp
+++ b/media/libstagefright/codecs/amrnb/enc/src/ton_stab.cpp
@@ -791,7 +791,8 @@
)
{
OSCL_UNUSED_ARG(pOverflow);
- for (int i = 0; i < N_FRAME - 1; i++)
+ int i;
+ for (i = 0; i < N_FRAME - 1; i++)
{
st->gp[i] = st->gp[i+1];
}
diff --git a/media/libstagefright/codecs/amrwb/include/pvamrwbdecoder_api.h b/media/libstagefright/codecs/amrwb/include/pvamrwbdecoder_api.h
index 457c21f..eca5ae0 100644
--- a/media/libstagefright/codecs/amrwb/include/pvamrwbdecoder_api.h
+++ b/media/libstagefright/codecs/amrwb/include/pvamrwbdecoder_api.h
@@ -106,7 +106,7 @@
#define NUM_OF_MODES 10
- const int16 AMR_WB_COMPRESSED[NUM_OF_MODES] =
+ static const int16 AMR_WB_COMPRESSED[NUM_OF_MODES] =
{
NBBITS_7k,
NBBITS_9k,
diff --git a/media/libstagefright/codecs/amrwb/src/get_amr_wb_bits.cpp b/media/libstagefright/codecs/amrwb/src/get_amr_wb_bits.cpp
index d7287f3..b325e8f 100644
--- a/media/libstagefright/codecs/amrwb/src/get_amr_wb_bits.cpp
+++ b/media/libstagefright/codecs/amrwb/src/get_amr_wb_bits.cpp
@@ -119,8 +119,9 @@
)
{
int16 value = 0;
+ int16 i;
- for (int16 i = no_of_bits >> 1; i != 0; i--)
+ for (i = no_of_bits >> 1; i != 0; i--)
{
value <<= 2;
diff --git a/media/libstagefright/codecs/amrwb/src/homing_amr_wb_dec.cpp b/media/libstagefright/codecs/amrwb/src/homing_amr_wb_dec.cpp
index 59c6c0a..f032a08 100644
--- a/media/libstagefright/codecs/amrwb/src/homing_amr_wb_dec.cpp
+++ b/media/libstagefright/codecs/amrwb/src/homing_amr_wb_dec.cpp
@@ -134,7 +134,7 @@
; LOCAL STORE/BUFFER/POINTER DEFINITIONS
; Variable declaration - defined here and used outside this module
----------------------------------------------------------------------------*/
-const int16 prmnofsf[NUM_OF_SPMODES] =
+static const int16 prmnofsf[NUM_OF_SPMODES] =
{
63, 81, 100,
108, 116, 128,
@@ -142,21 +142,21 @@
};
-const int16 dfh_M7k[PRMN_7k] =
+static const int16 dfh_M7k[PRMN_7k] =
{
3168, 29954, 29213, 16121,
64, 13440, 30624, 16430,
19008
};
-const int16 dfh_M9k[PRMN_9k] =
+static const int16 dfh_M9k[PRMN_9k] =
{
3168, 31665, 9943, 9123,
15599, 4358, 20248, 2048,
17040, 27787, 16816, 13888
};
-const int16 dfh_M12k[PRMN_12k] =
+static const int16 dfh_M12k[PRMN_12k] =
{
3168, 31665, 9943, 9128,
3647, 8129, 30930, 27926,
@@ -165,7 +165,7 @@
13948
};
-const int16 dfh_M14k[PRMN_14k] =
+static const int16 dfh_M14k[PRMN_14k] =
{
3168, 31665, 9943, 9131,
24815, 655, 26616, 26764,
@@ -174,7 +174,7 @@
221, 20321, 17823
};
-const int16 dfh_M16k[PRMN_16k] =
+static const int16 dfh_M16k[PRMN_16k] =
{
3168, 31665, 9943, 9131,
24815, 700, 3824, 7271,
@@ -184,7 +184,7 @@
6759, 24576
};
-const int16 dfh_M18k[PRMN_18k] =
+static const int16 dfh_M18k[PRMN_18k] =
{
3168, 31665, 9943, 9135,
14787, 14423, 30477, 24927,
@@ -195,7 +195,7 @@
0
};
-const int16 dfh_M20k[PRMN_20k] =
+static const int16 dfh_M20k[PRMN_20k] =
{
3168, 31665, 9943, 9129,
8637, 31807, 24646, 736,
@@ -206,7 +206,7 @@
30249, 29123, 0
};
-const int16 dfh_M23k[PRMN_23k] =
+static const int16 dfh_M23k[PRMN_23k] =
{
3168, 31665, 9943, 9132,
16748, 3202, 28179, 16317,
@@ -218,7 +218,7 @@
23392, 26053, 31216
};
-const int16 dfh_M24k[PRMN_24k] =
+static const int16 dfh_M24k[PRMN_24k] =
{
3168, 31665, 9943, 9134,
24776, 5857, 18475, 28535,
diff --git a/media/libstagefright/codecs/amrwb/src/isp_isf.cpp b/media/libstagefright/codecs/amrwb/src/isp_isf.cpp
index 41db7e3..0552733 100644
--- a/media/libstagefright/codecs/amrwb/src/isp_isf.cpp
+++ b/media/libstagefright/codecs/amrwb/src/isp_isf.cpp
@@ -108,7 +108,7 @@
/* table of cos(x) in Q15 */
-const int16 table[129] =
+static const int16 table[129] =
{
32767,
32758, 32729, 32679, 32610, 32522, 32413, 32286, 32138,
diff --git a/media/libstagefright/codecs/amrwb/src/oversamp_12k8_to_16k.cpp b/media/libstagefright/codecs/amrwb/src/oversamp_12k8_to_16k.cpp
index 143c26e..806851e 100644
--- a/media/libstagefright/codecs/amrwb/src/oversamp_12k8_to_16k.cpp
+++ b/media/libstagefright/codecs/amrwb/src/oversamp_12k8_to_16k.cpp
@@ -240,11 +240,11 @@
{
int32 i;
- int16 frac;
+ int16 frac, j;
int16 * pt_sig_u = sig_u;
frac = 1;
- for (int16 j = 0; j < L_frame; j++)
+ for (j = 0; j < L_frame; j++)
{
i = ((int32)j * INV_FAC5) >> 13; /* integer part = pos * 1/5 */
@@ -337,6 +337,6 @@
L_sum = shl_int32(L_sum, 2); /* saturation can occur here */
- return ((int16(L_sum >> 16)));
+ return ((int16)(L_sum >> 16));
}
diff --git a/media/libstagefright/codecs/amrwb/src/phase_dispersion.cpp b/media/libstagefright/codecs/amrwb/src/phase_dispersion.cpp
index f90a534..7b08a40 100644
--- a/media/libstagefright/codecs/amrwb/src/phase_dispersion.cpp
+++ b/media/libstagefright/codecs/amrwb/src/phase_dispersion.cpp
@@ -109,7 +109,7 @@
/* impulse response with phase dispersion */
/* 2.0 - 6.4 kHz phase dispersion */
-const int16 ph_imp_low[L_SUBFR] =
+static const int16 ph_imp_low[L_SUBFR] =
{
20182, 9693, 3270, -3437, 2864, -5240, 1589, -1357,
600, 3893, -1497, -698, 1203, -5249, 1199, 5371,
@@ -122,7 +122,7 @@
};
/* 3.2 - 6.4 kHz phase dispersion */
-const int16 ph_imp_mid[L_SUBFR] =
+static const int16 ph_imp_mid[L_SUBFR] =
{
24098, 10460, -5263, -763, 2048, -927, 1753, -3323,
2212, 652, -2146, 2487, -3539, 4109, -2107, -374,
diff --git a/media/libstagefright/codecs/amrwbenc/inc/isp_isf.tab b/media/libstagefright/codecs/amrwbenc/inc/isp_isf.tab
index 97c3b68..865eea0 100644
--- a/media/libstagefright/codecs/amrwbenc/inc/isp_isf.tab
+++ b/media/libstagefright/codecs/amrwbenc/inc/isp_isf.tab
@@ -21,7 +21,7 @@
/* table of cos(x) in Q15 */
-const static Word16 table[129] = {
+static const Word16 table[129] = {
32767,
32758, 32729, 32679, 32610, 32522, 32413, 32286, 32138,
31972, 31786, 31581, 31357, 31114, 30853, 30572, 30274,
@@ -42,7 +42,7 @@
/* slope in Q11 used to compute y = acos(x) */
-const static Word16 slope[128] = {
+static const Word16 slope[128] = {
-26214, -9039, -5243, -3799, -2979, -2405, -2064, -1771,
-1579, -1409, -1279, -1170, -1079, -1004, -933, -880,
-827, -783, -743, -708, -676, -647, -621, -599,
diff --git a/media/libstagefright/codecs/amrwbenc/src/voAMRWBEnc.c b/media/libstagefright/codecs/amrwbenc/src/voAMRWBEnc.c
index 0f4d689..ea9da52 100644
--- a/media/libstagefright/codecs/amrwbenc/src/voAMRWBEnc.c
+++ b/media/libstagefright/codecs/amrwbenc/src/voAMRWBEnc.c
@@ -1702,7 +1702,7 @@
gData = (Coder_State *)hCodec;
stream = gData->stream;
- if(NULL == pInput || NULL == pInput->Buffer || 0 > pInput->Length)
+ if(NULL == pInput || NULL == pInput->Buffer)
{
return VO_ERR_INVALID_ARG;
}
diff --git a/media/libstagefright/colorconversion/SoftwareRenderer.cpp b/media/libstagefright/colorconversion/SoftwareRenderer.cpp
index e892f92..059d6b9 100644
--- a/media/libstagefright/colorconversion/SoftwareRenderer.cpp
+++ b/media/libstagefright/colorconversion/SoftwareRenderer.cpp
@@ -19,12 +19,9 @@
#include "../include/SoftwareRenderer.h"
-#include <binder/MemoryHeapBase.h>
-#include <binder/MemoryHeapPmem.h>
#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/MetaData.h>
-#include <surfaceflinger/Surface.h>
-#include <ui/android_native_buffer.h>
+#include <system/window.h>
#include <ui/GraphicBufferMapper.h>
#include <gui/ISurfaceTexture.h>
diff --git a/media/libstagefright/foundation/AMessage.cpp b/media/libstagefright/foundation/AMessage.cpp
index 0a6776e..9a00186 100644
--- a/media/libstagefright/foundation/AMessage.cpp
+++ b/media/libstagefright/foundation/AMessage.cpp
@@ -19,6 +19,7 @@
#include <ctype.h>
#include "AAtomizer.h"
+#include "ABuffer.h"
#include "ADebug.h"
#include "ALooperRoster.h"
#include "AString.h"
@@ -157,14 +158,23 @@
item->u.stringValue = new AString(s, len < 0 ? strlen(s) : len);
}
-void AMessage::setObject(const char *name, const sp<RefBase> &obj) {
+void AMessage::setObjectInternal(
+ const char *name, const sp<RefBase> &obj, Type type) {
Item *item = allocateItem(name);
- item->mType = kTypeObject;
+ item->mType = type;
if (obj != NULL) { obj->incStrong(this); }
item->u.refValue = obj.get();
}
+void AMessage::setObject(const char *name, const sp<RefBase> &obj) {
+ setObjectInternal(name, obj, kTypeObject);
+}
+
+void AMessage::setBuffer(const char *name, const sp<ABuffer> &buffer) {
+ setObjectInternal(name, sp<RefBase>(buffer), kTypeBuffer);
+}
+
void AMessage::setMessage(const char *name, const sp<AMessage> &obj) {
Item *item = allocateItem(name);
item->mType = kTypeMessage;
@@ -203,6 +213,15 @@
return false;
}
+bool AMessage::findBuffer(const char *name, sp<ABuffer> *buf) const {
+ const Item *item = findItem(name, kTypeBuffer);
+ if (item) {
+ *buf = (ABuffer *)(item->u.refValue);
+ return true;
+ }
+ return false;
+}
+
bool AMessage::findMessage(const char *name, sp<AMessage> *obj) const {
const Item *item = findItem(name, kTypeMessage);
if (item) {
@@ -542,4 +561,20 @@
}
}
+size_t AMessage::countEntries() const {
+ return mNumItems;
+}
+
+const char *AMessage::getEntryNameAt(size_t index, Type *type) const {
+ if (index >= mNumItems) {
+ *type = kTypeInt32;
+
+ return NULL;
+ }
+
+ *type = mItems[index].mType;
+
+ return mItems[index].mName;
+}
+
} // namespace android
diff --git a/media/libstagefright/include/SoftwareRenderer.h b/media/libstagefright/include/SoftwareRenderer.h
index 8f2ea95..7ab0042 100644
--- a/media/libstagefright/include/SoftwareRenderer.h
+++ b/media/libstagefright/include/SoftwareRenderer.h
@@ -20,7 +20,7 @@
#include <media/stagefright/ColorConverter.h>
#include <utils/RefBase.h>
-#include <ui/android_native_buffer.h>
+#include <system/window.h>
namespace android {
diff --git a/media/libstagefright/rtsp/AAMRAssembler.cpp b/media/libstagefright/rtsp/AAMRAssembler.cpp
index 9d72b1f..fb8abc5 100644
--- a/media/libstagefright/rtsp/AAMRAssembler.cpp
+++ b/media/libstagefright/rtsp/AAMRAssembler.cpp
@@ -211,7 +211,7 @@
}
sp<AMessage> msg = mNotifyMsg->dup();
- msg->setObject("access-unit", accessUnit);
+ msg->setBuffer("access-unit", accessUnit);
msg->post();
queue->erase(queue->begin());
diff --git a/media/libstagefright/rtsp/AAVCAssembler.cpp b/media/libstagefright/rtsp/AAVCAssembler.cpp
index ed8b1df..7ea132e 100644
--- a/media/libstagefright/rtsp/AAVCAssembler.cpp
+++ b/media/libstagefright/rtsp/AAVCAssembler.cpp
@@ -345,7 +345,7 @@
mAccessUnitDamaged = false;
sp<AMessage> msg = mNotifyMsg->dup();
- msg->setObject("access-unit", accessUnit);
+ msg->setBuffer("access-unit", accessUnit);
msg->post();
}
diff --git a/media/libstagefright/rtsp/AH263Assembler.cpp b/media/libstagefright/rtsp/AH263Assembler.cpp
index 498295c..ded70fa 100644
--- a/media/libstagefright/rtsp/AH263Assembler.cpp
+++ b/media/libstagefright/rtsp/AH263Assembler.cpp
@@ -166,7 +166,7 @@
mAccessUnitDamaged = false;
sp<AMessage> msg = mNotifyMsg->dup();
- msg->setObject("access-unit", accessUnit);
+ msg->setBuffer("access-unit", accessUnit);
msg->post();
}
diff --git a/media/libstagefright/rtsp/AMPEG4AudioAssembler.cpp b/media/libstagefright/rtsp/AMPEG4AudioAssembler.cpp
index b0c7007..24c2f30 100644
--- a/media/libstagefright/rtsp/AMPEG4AudioAssembler.cpp
+++ b/media/libstagefright/rtsp/AMPEG4AudioAssembler.cpp
@@ -571,7 +571,7 @@
mAccessUnitDamaged = false;
sp<AMessage> msg = mNotifyMsg->dup();
- msg->setObject("access-unit", accessUnit);
+ msg->setBuffer("access-unit", accessUnit);
msg->post();
}
diff --git a/media/libstagefright/rtsp/AMPEG4ElementaryAssembler.cpp b/media/libstagefright/rtsp/AMPEG4ElementaryAssembler.cpp
index 2f2e2c2..687d72b 100644
--- a/media/libstagefright/rtsp/AMPEG4ElementaryAssembler.cpp
+++ b/media/libstagefright/rtsp/AMPEG4ElementaryAssembler.cpp
@@ -368,7 +368,7 @@
mAccessUnitDamaged = false;
sp<AMessage> msg = mNotifyMsg->dup();
- msg->setObject("access-unit", accessUnit);
+ msg->setBuffer("access-unit", accessUnit);
msg->post();
}
diff --git a/media/libstagefright/rtsp/ARTPConnection.cpp b/media/libstagefright/rtsp/ARTPConnection.cpp
index 8c9dd8d..44988a3 100644
--- a/media/libstagefright/rtsp/ARTPConnection.cpp
+++ b/media/libstagefright/rtsp/ARTPConnection.cpp
@@ -639,7 +639,7 @@
void ARTPConnection::injectPacket(int index, const sp<ABuffer> &buffer) {
sp<AMessage> msg = new AMessage(kWhatInjectPacket, id());
msg->setInt32("index", index);
- msg->setObject("buffer", buffer);
+ msg->setBuffer("buffer", buffer);
msg->post();
}
@@ -647,10 +647,8 @@
int32_t index;
CHECK(msg->findInt32("index", &index));
- sp<RefBase> obj;
- CHECK(msg->findObject("buffer", &obj));
-
- sp<ABuffer> buffer = static_cast<ABuffer *>(obj.get());
+ sp<ABuffer> buffer;
+ CHECK(msg->findBuffer("buffer", &buffer));
List<StreamInfo>::iterator it = mStreams.begin();
while (it != mStreams.end()
diff --git a/media/libstagefright/rtsp/ARTPSession.cpp b/media/libstagefright/rtsp/ARTPSession.cpp
index 7a05b88..ba4e33c 100644
--- a/media/libstagefright/rtsp/ARTPSession.cpp
+++ b/media/libstagefright/rtsp/ARTPSession.cpp
@@ -145,10 +145,8 @@
break;
}
- sp<RefBase> obj;
- CHECK(msg->findObject("access-unit", &obj));
-
- sp<ABuffer> accessUnit = static_cast<ABuffer *>(obj.get());
+ sp<ABuffer> accessUnit;
+ CHECK(msg->findBuffer("access-unit", &accessUnit));
uint64_t ntpTime;
CHECK(accessUnit->meta()->findInt64(
diff --git a/media/libstagefright/rtsp/ARTSPConnection.cpp b/media/libstagefright/rtsp/ARTSPConnection.cpp
index 80a010e..539a888 100644
--- a/media/libstagefright/rtsp/ARTSPConnection.cpp
+++ b/media/libstagefright/rtsp/ARTSPConnection.cpp
@@ -612,7 +612,7 @@
if (mObserveBinaryMessage != NULL) {
sp<AMessage> notify = mObserveBinaryMessage->dup();
- notify->setObject("buffer", buffer);
+ notify->setBuffer("buffer", buffer);
notify->post();
} else {
ALOGW("received binary data, but no one cares.");
diff --git a/media/libstagefright/rtsp/ARawAudioAssembler.cpp b/media/libstagefright/rtsp/ARawAudioAssembler.cpp
index 98bee82..0da5dd2 100644
--- a/media/libstagefright/rtsp/ARawAudioAssembler.cpp
+++ b/media/libstagefright/rtsp/ARawAudioAssembler.cpp
@@ -94,7 +94,7 @@
}
sp<AMessage> msg = mNotifyMsg->dup();
- msg->setObject("access-unit", buffer);
+ msg->setBuffer("access-unit", buffer);
msg->post();
queue->erase(queue->begin());
diff --git a/media/libstagefright/rtsp/MyHandler.h b/media/libstagefright/rtsp/MyHandler.h
index 9a7dd70..deee30f 100644
--- a/media/libstagefright/rtsp/MyHandler.h
+++ b/media/libstagefright/rtsp/MyHandler.h
@@ -857,10 +857,8 @@
return;
}
- sp<RefBase> obj;
- CHECK(msg->findObject("access-unit", &obj));
-
- sp<ABuffer> accessUnit = static_cast<ABuffer *>(obj.get());
+ sp<ABuffer> accessUnit;
+ CHECK(msg->findBuffer("access-unit", &accessUnit));
uint32_t seqNum = (uint32_t)accessUnit->int32Data();
@@ -1005,9 +1003,8 @@
case 'biny':
{
- sp<RefBase> obj;
- CHECK(msg->findObject("buffer", &obj));
- sp<ABuffer> buffer = static_cast<ABuffer *>(obj.get());
+ sp<ABuffer> buffer;
+ CHECK(msg->findBuffer("buffer", &buffer));
int32_t index;
CHECK(buffer->meta()->findInt32("index", &index));
@@ -1488,7 +1485,7 @@
sp<AMessage> msg = mNotify->dup();
msg->setInt32("what", kWhatAccessUnit);
msg->setSize("trackIndex", trackIndex);
- msg->setObject("accessUnit", accessUnit);
+ msg->setBuffer("accessUnit", accessUnit);
msg->post();
}
diff --git a/media/libstagefright/tests/SurfaceMediaSource_test.cpp b/media/libstagefright/tests/SurfaceMediaSource_test.cpp
index d7cec04..3dcd9fc 100644
--- a/media/libstagefright/tests/SurfaceMediaSource_test.cpp
+++ b/media/libstagefright/tests/SurfaceMediaSource_test.cpp
@@ -26,11 +26,11 @@
#include <media/stagefright/SurfaceMediaSource.h>
#include <media/mediarecorder.h>
-#include <gui/SurfaceTextureClient.h>
#include <ui/GraphicBuffer.h>
-#include <surfaceflinger/ISurfaceComposer.h>
-#include <surfaceflinger/Surface.h>
-#include <surfaceflinger/SurfaceComposerClient.h>
+#include <gui/SurfaceTextureClient.h>
+#include <gui/ISurfaceComposer.h>
+#include <gui/Surface.h>
+#include <gui/SurfaceComposerClient.h>
#include <binder/ProcessState.h>
#include <ui/FramebufferNativeWindow.h>
diff --git a/media/libstagefright/timedtext/Android.mk b/media/libstagefright/timedtext/Android.mk
index 8b23dee..d2d5f7b 100644
--- a/media/libstagefright/timedtext/Android.mk
+++ b/media/libstagefright/timedtext/Android.mk
@@ -4,7 +4,7 @@
LOCAL_SRC_FILES:= \
TextDescriptions.cpp \
TimedTextDriver.cpp \
- TimedTextInBandSource.cpp \
+ TimedText3GPPSource.cpp \
TimedTextSource.cpp \
TimedTextSRTSource.cpp \
TimedTextPlayer.cpp
@@ -12,8 +12,8 @@
LOCAL_CFLAGS += -Wno-multichar
LOCAL_C_INCLUDES:= \
$(JNI_H_INCLUDE) \
- $(TOP)/frameworks/base/media/libstagefright \
- $(TOP)/frameworks/base/include/media/stagefright/openmax
+ $(TOP)/frameworks/base/include/media/stagefright/timedtext \
+ $(TOP)/frameworks/base/media/libstagefright
LOCAL_MODULE:= libstagefright_timedtext
diff --git a/media/libstagefright/timedtext/TimedTextInBandSource.cpp b/media/libstagefright/timedtext/TimedText3GPPSource.cpp
similarity index 63%
rename from media/libstagefright/timedtext/TimedTextInBandSource.cpp
rename to media/libstagefright/timedtext/TimedText3GPPSource.cpp
index afb73fb..4a3bfd3 100644
--- a/media/libstagefright/timedtext/TimedTextInBandSource.cpp
+++ b/media/libstagefright/timedtext/TimedText3GPPSource.cpp
@@ -15,7 +15,7 @@
*/
//#define LOG_NDEBUG 0
-#define LOG_TAG "TimedTextInBandSource"
+#define LOG_TAG "TimedText3GPPSource"
#include <utils/Log.h>
#include <binder/Parcel.h>
@@ -26,19 +26,19 @@
#include <media/stagefright/MediaSource.h>
#include <media/stagefright/MetaData.h>
-#include "TimedTextInBandSource.h"
+#include "TimedText3GPPSource.h"
#include "TextDescriptions.h"
namespace android {
-TimedTextInBandSource::TimedTextInBandSource(const sp<MediaSource>& mediaSource)
+TimedText3GPPSource::TimedText3GPPSource(const sp<MediaSource>& mediaSource)
: mSource(mediaSource) {
}
-TimedTextInBandSource::~TimedTextInBandSource() {
+TimedText3GPPSource::~TimedText3GPPSource() {
}
-status_t TimedTextInBandSource::read(
+status_t TimedText3GPPSource::read(
int64_t *timeUs, Parcel *parcel, const MediaSource::ReadOptions *options) {
MediaBuffer *textBuffer = NULL;
status_t err = mSource->read(&textBuffer, options);
@@ -60,7 +60,7 @@
// text style for the string of text. These descriptions are present only
// if they are needed. This method is used to extract the modifier
// description and append it at the end of the text.
-status_t TimedTextInBandSource::extractAndAppendLocalDescriptions(
+status_t TimedText3GPPSource::extractAndAppendLocalDescriptions(
int64_t timeUs, const MediaBuffer *textBuffer, Parcel *parcel) {
const void *data;
size_t size = 0;
@@ -68,51 +68,46 @@
const char *mime;
CHECK(mSource->getFormat()->findCString(kKeyMIMEType, &mime));
+ CHECK(strcasecmp(mime, MEDIA_MIMETYPE_TEXT_3GPP) == 0);
- if (strcasecmp(mime, MEDIA_MIMETYPE_TEXT_3GPP) == 0) {
- data = textBuffer->data();
- size = textBuffer->size();
+ data = textBuffer->data();
+ size = textBuffer->size();
- if (size > 0) {
- parcel->freeData();
- flag |= TextDescriptions::IN_BAND_TEXT_3GPP;
- return TextDescriptions::getParcelOfDescriptions(
- (const uint8_t *)data, size, flag, timeUs / 1000, parcel);
- }
- return OK;
+ if (size > 0) {
+ parcel->freeData();
+ flag |= TextDescriptions::IN_BAND_TEXT_3GPP;
+ return TextDescriptions::getParcelOfDescriptions(
+ (const uint8_t *)data, size, flag, timeUs / 1000, parcel);
}
- return ERROR_UNSUPPORTED;
+ return OK;
}
// To extract and send the global text descriptions for all the text samples
// in the text track or text file.
// TODO: send error message to application via notifyListener()...?
-status_t TimedTextInBandSource::extractGlobalDescriptions(Parcel *parcel) {
+status_t TimedText3GPPSource::extractGlobalDescriptions(Parcel *parcel) {
const void *data;
size_t size = 0;
int32_t flag = TextDescriptions::GLOBAL_DESCRIPTIONS;
const char *mime;
CHECK(mSource->getFormat()->findCString(kKeyMIMEType, &mime));
+ CHECK(strcasecmp(mime, MEDIA_MIMETYPE_TEXT_3GPP) == 0);
- // support 3GPP only for now
- if (strcasecmp(mime, MEDIA_MIMETYPE_TEXT_3GPP) == 0) {
- uint32_t type;
- // get the 'tx3g' box content. This box contains the text descriptions
- // used to render the text track
- if (!mSource->getFormat()->findData(
- kKeyTextFormatData, &type, &data, &size)) {
- return ERROR_MALFORMED;
- }
-
- if (size > 0) {
- flag |= TextDescriptions::IN_BAND_TEXT_3GPP;
- return TextDescriptions::getParcelOfDescriptions(
- (const uint8_t *)data, size, flag, 0, parcel);
- }
- return OK;
+ uint32_t type;
+ // get the 'tx3g' box content. This box contains the text descriptions
+ // used to render the text track
+ if (!mSource->getFormat()->findData(
+ kKeyTextFormatData, &type, &data, &size)) {
+ return ERROR_MALFORMED;
}
- return ERROR_UNSUPPORTED;
+
+ if (size > 0) {
+ flag |= TextDescriptions::IN_BAND_TEXT_3GPP;
+ return TextDescriptions::getParcelOfDescriptions(
+ (const uint8_t *)data, size, flag, 0, parcel);
+ }
+ return OK;
}
} // namespace android
diff --git a/media/libstagefright/timedtext/TimedTextInBandSource.h b/media/libstagefright/timedtext/TimedText3GPPSource.h
similarity index 80%
rename from media/libstagefright/timedtext/TimedTextInBandSource.h
rename to media/libstagefright/timedtext/TimedText3GPPSource.h
index 26e5737..cb7e47c 100644
--- a/media/libstagefright/timedtext/TimedTextInBandSource.h
+++ b/media/libstagefright/timedtext/TimedText3GPPSource.h
@@ -14,8 +14,8 @@
* limitations under the License.
*/
-#ifndef TIMED_TEXT_IN_BAND_SOURCE_H_
-#define TIMED_TEXT_IN_BAND_SOURCE_H_
+#ifndef TIMED_TEXT_3GPP_SOURCE_H_
+#define TIMED_TEXT_3GPP_SOURCE_H_
#include <media/stagefright/MediaErrors.h>
#include <media/stagefright/MediaSource.h>
@@ -27,9 +27,9 @@
class MediaBuffer;
class Parcel;
-class TimedTextInBandSource : public TimedTextSource {
+class TimedText3GPPSource : public TimedTextSource {
public:
- TimedTextInBandSource(const sp<MediaSource>& mediaSource);
+ TimedText3GPPSource(const sp<MediaSource>& mediaSource);
virtual status_t start() { return mSource->start(); }
virtual status_t stop() { return mSource->stop(); }
virtual status_t read(
@@ -39,7 +39,7 @@
virtual status_t extractGlobalDescriptions(Parcel *parcel);
protected:
- virtual ~TimedTextInBandSource();
+ virtual ~TimedText3GPPSource();
private:
sp<MediaSource> mSource;
@@ -47,9 +47,9 @@
status_t extractAndAppendLocalDescriptions(
int64_t timeUs, const MediaBuffer *textBuffer, Parcel *parcel);
- DISALLOW_EVIL_CONSTRUCTORS(TimedTextInBandSource);
+ DISALLOW_EVIL_CONSTRUCTORS(TimedText3GPPSource);
};
} // namespace android
-#endif // TIMED_TEXT_IN_BAND_SOURCE_H_
+#endif // TIMED_TEXT_3GPP_SOURCE_H_
diff --git a/media/libstagefright/timedtext/TimedTextDriver.cpp b/media/libstagefright/timedtext/TimedTextDriver.cpp
index 9ec9415..c70870e 100644
--- a/media/libstagefright/timedtext/TimedTextDriver.cpp
+++ b/media/libstagefright/timedtext/TimedTextDriver.cpp
@@ -27,8 +27,7 @@
#include <media/stagefright/Utils.h>
#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/foundation/ALooper.h>
-
-#include "TimedTextDriver.h"
+#include <media/stagefright/timedtext/TimedTextDriver.h>
#include "TextDescriptions.h"
#include "TimedTextPlayer.h"
diff --git a/media/libstagefright/timedtext/TimedTextPlayer.cpp b/media/libstagefright/timedtext/TimedTextPlayer.cpp
index bf7cbf6..bda7b46 100644
--- a/media/libstagefright/timedtext/TimedTextPlayer.cpp
+++ b/media/libstagefright/timedtext/TimedTextPlayer.cpp
@@ -20,12 +20,12 @@
#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/timedtext/TimedTextDriver.h>
#include <media/stagefright/MediaErrors.h>
#include <media/MediaPlayerInterface.h>
#include "TimedTextPlayer.h"
-#include "TimedTextDriver.h"
#include "TimedTextSource.h"
namespace android {
diff --git a/media/libstagefright/timedtext/TimedTextSource.cpp b/media/libstagefright/timedtext/TimedTextSource.cpp
index 9efe67c..ffbe1c3 100644
--- a/media/libstagefright/timedtext/TimedTextSource.cpp
+++ b/media/libstagefright/timedtext/TimedTextSource.cpp
@@ -18,12 +18,15 @@
#define LOG_TAG "TimedTextSource"
#include <utils/Log.h>
+#include <media/stagefright/foundation/ADebug.h> // CHECK_XX macro
#include <media/stagefright/DataSource.h>
+#include <media/stagefright/MediaDefs.h> // for MEDIA_MIMETYPE_xxx
#include <media/stagefright/MediaSource.h>
+#include <media/stagefright/MetaData.h>
#include "TimedTextSource.h"
-#include "TimedTextInBandSource.h"
+#include "TimedText3GPPSource.h"
#include "TimedTextSRTSource.h"
namespace android {
@@ -31,7 +34,13 @@
// static
sp<TimedTextSource> TimedTextSource::CreateTimedTextSource(
const sp<MediaSource>& mediaSource) {
- return new TimedTextInBandSource(mediaSource);
+ const char *mime;
+ CHECK(mediaSource->getFormat()->findCString(kKeyMIMEType, &mime));
+ if (strcasecmp(mime, MEDIA_MIMETYPE_TEXT_3GPP) == 0) {
+ return new TimedText3GPPSource(mediaSource);
+ }
+ ALOGE("Unsupported mime type for subtitle. : %s", mime);
+ return NULL;
}
// static
diff --git a/services/audioflinger/Android.mk b/services/audioflinger/Android.mk
index 22fa752..86692e7 100644
--- a/services/audioflinger/Android.mk
+++ b/services/audioflinger/Android.mk
@@ -7,7 +7,6 @@
AudioMixer.cpp.arm \
AudioResampler.cpp.arm \
AudioPolicyService.cpp \
- AudioBufferProvider.cpp \
ServiceUtilities.cpp
# AudioResamplerSinc.cpp.arm
# AudioResamplerCubic.cpp.arm
diff --git a/services/audioflinger/AudioBufferProvider.cpp b/services/audioflinger/AudioBufferProvider.cpp
deleted file mode 100644
index 678fd58..0000000
--- a/services/audioflinger/AudioBufferProvider.cpp
+++ /dev/null
@@ -1,28 +0,0 @@
-/*
- * Copyright (C) 2011 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#undef __STRICT_ANSI__
-#define __STDINT_LIMITS
-#define __STDC_LIMIT_MACROS
-#include <stdint.h>
-
-#include "AudioBufferProvider.h"
-
-namespace android {
-
-const int64_t AudioBufferProvider::kInvalidPTS = INT64_MAX;
-
-}; // namespace android
diff --git a/services/audioflinger/AudioBufferProvider.h b/services/audioflinger/AudioBufferProvider.h
index 62ad6bd..43e4de7 100644
--- a/services/audioflinger/AudioBufferProvider.h
+++ b/services/audioflinger/AudioBufferProvider.h
@@ -17,8 +17,6 @@
#ifndef ANDROID_AUDIO_BUFFER_PROVIDER_H
#define ANDROID_AUDIO_BUFFER_PROVIDER_H
-#include <stdint.h>
-#include <sys/types.h>
#include <utils/Errors.h>
namespace android {
@@ -29,6 +27,7 @@
public:
struct Buffer {
+ Buffer() : raw(NULL), frameCount(0) { }
union {
void* raw;
short* i16;
@@ -40,12 +39,12 @@
virtual ~AudioBufferProvider() {}
// value representing an invalid presentation timestamp
- static const int64_t kInvalidPTS;
+ static const int64_t kInvalidPTS = 0x7FFFFFFFFFFFFFFFLL; // <stdint.h> is too painful
// pts is the local time when the next sample yielded by getNextBuffer
// will be rendered.
// Pass kInvalidPTS if the PTS is unknown or not applicable.
- virtual status_t getNextBuffer(Buffer* buffer, int64_t pts) = 0;
+ virtual status_t getNextBuffer(Buffer* buffer, int64_t pts = kInvalidPTS) = 0;
virtual void releaseBuffer(Buffer* buffer) = 0;
};
diff --git a/services/audioflinger/AudioFlinger.cpp b/services/audioflinger/AudioFlinger.cpp
index 6256951..8f7b35c 100644
--- a/services/audioflinger/AudioFlinger.cpp
+++ b/services/audioflinger/AudioFlinger.cpp
@@ -232,7 +232,7 @@
(NO_ERROR != dev->set_master_volume(dev, initialVolume))) {
mMasterVolumeSupportLvl = MVS_NONE;
}
- mHardwareStatus = AUDIO_HW_INIT;
+ mHardwareStatus = AUDIO_HW_IDLE;
}
// Set the mode for each audio HAL, and try to set the initial volume (if
@@ -254,7 +254,7 @@
dev->set_master_volume(dev, initialVolume);
}
- mHardwareStatus = AUDIO_HW_INIT;
+ mHardwareStatus = AUDIO_HW_IDLE;
}
}
@@ -432,6 +432,7 @@
audio_format_t format,
uint32_t channelMask,
int frameCount,
+ // FIXME dead, remove from IAudioFlinger
uint32_t flags,
const sp<IMemory>& sharedBuffer,
audio_io_handle_t output,
@@ -822,8 +823,6 @@
status_t AudioFlinger::setParameters(audio_io_handle_t ioHandle, const String8& keyValuePairs)
{
- status_t result;
-
ALOGV("setParameters(): io %d, keyvalue %s, tid %d, calling pid %d",
ioHandle, keyValuePairs.string(), gettid(), IPCThreadState::self()->getCallingPid());
// check calling permissions
@@ -833,15 +832,17 @@
// ioHandle == 0 means the parameters are global to the audio hardware interface
if (ioHandle == 0) {
- AutoMutex lock(mHardwareLock);
- mHardwareStatus = AUDIO_SET_PARAMETER;
status_t final_result = NO_ERROR;
+ {
+ AutoMutex lock(mHardwareLock);
+ mHardwareStatus = AUDIO_HW_SET_PARAMETER;
for (size_t i = 0; i < mAudioHwDevs.size(); i++) {
audio_hw_device_t *dev = mAudioHwDevs[i];
- result = dev->set_parameters(dev, keyValuePairs.string());
+ status_t result = dev->set_parameters(dev, keyValuePairs.string());
final_result = result ?: final_result;
}
mHardwareStatus = AUDIO_HW_IDLE;
+ }
// disable AEC and NS if the device is a BT SCO headset supporting those pre processings
AudioParameter param = AudioParameter(keyValuePairs);
String8 value;
@@ -904,8 +905,14 @@
String8 out_s8;
for (size_t i = 0; i < mAudioHwDevs.size(); i++) {
+ char *s;
+ {
+ AutoMutex lock(mHardwareLock);
+ mHardwareStatus = AUDIO_HW_GET_PARAMETER;
audio_hw_device_t *dev = mAudioHwDevs[i];
- char *s = dev->get_parameters(dev, keys.string());
+ s = dev->get_parameters(dev, keys.string());
+ mHardwareStatus = AUDIO_HW_IDLE;
+ }
out_s8 += String8(s ? s : "");
free(s);
}
@@ -967,7 +974,7 @@
}
AutoMutex lock(mHardwareLock);
- mHardwareStatus = AUDIO_SET_VOICE_VOLUME;
+ mHardwareStatus = AUDIO_HW_SET_VOICE_VOLUME;
ret = mPrimaryHardwareDev->set_voice_volume(mPrimaryHardwareDev, value);
mHardwareStatus = AUDIO_HW_IDLE;
@@ -1022,12 +1029,7 @@
{
Mutex::Autolock _l(mLock);
- ssize_t index = mNotificationClients.indexOfKey(pid);
- if (index >= 0) {
- sp <NotificationClient> client = mNotificationClients.valueFor(pid);
- ALOGV("removeNotificationClient() %p, pid %d", client.get(), pid);
- mNotificationClients.removeItem(pid);
- }
+ mNotificationClients.removeItem(pid);
ALOGV("%d died, releasing its sessions", pid);
size_t num = mAudioSessionRefs.size();
@@ -1462,7 +1464,7 @@
mMasterVolume(audioFlinger->masterVolumeSW_l()),
mLastWriteTime(0), mNumWrites(0), mNumDelayedWrites(0), mInWrite(false)
{
- snprintf(mName, kNameLength, "AudioOut_%d", id);
+ snprintf(mName, kNameLength, "AudioOut_%X", id);
readOutputParameters();
@@ -1932,12 +1934,71 @@
delete mAudioMixer;
}
+class CpuStats {
+public:
+ void sample();
+#ifdef DEBUG_CPU_USAGE
+private:
+ ThreadCpuUsage mCpu;
+#endif
+};
+
+void CpuStats::sample() {
+#ifdef DEBUG_CPU_USAGE
+ const CentralTendencyStatistics& stats = mCpu.statistics();
+ mCpu.sampleAndEnable();
+ unsigned n = stats.n();
+ // mCpu.elapsed() is expensive, so don't call it every loop
+ if ((n & 127) == 1) {
+ long long elapsed = mCpu.elapsed();
+ if (elapsed >= DEBUG_CPU_USAGE * 1000000000LL) {
+ double perLoop = elapsed / (double) n;
+ double perLoop100 = perLoop * 0.01;
+ double mean = stats.mean();
+ double stddev = stats.stddev();
+ double minimum = stats.minimum();
+ double maximum = stats.maximum();
+ mCpu.resetStatistics();
+ ALOGI("CPU usage over past %.1f secs (%u mixer loops at %.1f mean ms per loop):\n us per mix loop: mean=%.0f stddev=%.0f min=%.0f max=%.0f\n %% of wall: mean=%.1f stddev=%.1f min=%.1f max=%.1f",
+ elapsed * .000000001, n, perLoop * .000001,
+ mean * .001,
+ stddev * .001,
+ minimum * .001,
+ maximum * .001,
+ mean / perLoop100,
+ stddev / perLoop100,
+ minimum / perLoop100,
+ maximum / perLoop100);
+ }
+ }
+#endif
+};
+
+void AudioFlinger::PlaybackThread::checkSilentMode_l()
+{
+ if (!mMasterMute) {
+ char value[PROPERTY_VALUE_MAX];
+ if (property_get("ro.audio.silent", value, "0") > 0) {
+ char *endptr;
+ unsigned long ul = strtoul(value, &endptr, 0);
+ if (*endptr == '\0' && ul != 0) {
+ ALOGD("Silence is golden");
+ // The setprop command will not allow a property to be changed after
+ // the first time it is set, so we don't have to worry about un-muting.
+ setMasterMute_l(true);
+ }
+ }
+ }
+}
+
bool AudioFlinger::MixerThread::threadLoop()
{
+ // DirectOutputThread has single trackToRemove instead of Vector
Vector< sp<Track> > tracksToRemove;
- mixer_state mixerStatus = MIXER_IDLE;
+ // DirectOutputThread has activeTrack here
nsecs_t standbyTime = systemTime();
size_t mixBufferSize = mFrameCount * mFrameSize;
+
// FIXME: Relaxed timing because of a certain device that can't meet latency
// Should be reduced to 2x after the vendor fixes the driver issue
// increase threshold again due to low power audio mode. The way this warning threshold is
@@ -1945,78 +2006,58 @@
nsecs_t maxPeriod = seconds(mFrameCount) / mSampleRate * 15;
nsecs_t lastWarning = 0;
bool longStandbyExit = false;
+
uint32_t activeSleepTime = activeSleepTimeUs();
uint32_t idleSleepTime = idleSleepTimeUs();
uint32_t sleepTime = idleSleepTime;
+
uint32_t sleepTimeShift = 0;
- Vector< sp<EffectChain> > effectChains;
-#ifdef DEBUG_CPU_USAGE
- ThreadCpuUsage cpu;
- const CentralTendencyStatistics& stats = cpu.statistics();
-#endif
+ CpuStats cpuStats;
+
+ // DirectOutputThread has shorter standbyDelay
acquireWakeLock();
while (!exitPending())
{
-#ifdef DEBUG_CPU_USAGE
- cpu.sampleAndEnable();
- unsigned n = stats.n();
- // cpu.elapsed() is expensive, so don't call it every loop
- if ((n & 127) == 1) {
- long long elapsed = cpu.elapsed();
- if (elapsed >= DEBUG_CPU_USAGE * 1000000000LL) {
- double perLoop = elapsed / (double) n;
- double perLoop100 = perLoop * 0.01;
- double mean = stats.mean();
- double stddev = stats.stddev();
- double minimum = stats.minimum();
- double maximum = stats.maximum();
- cpu.resetStatistics();
- ALOGI("CPU usage over past %.1f secs (%u mixer loops at %.1f mean ms per loop):\n us per mix loop: mean=%.0f stddev=%.0f min=%.0f max=%.0f\n %% of wall: mean=%.1f stddev=%.1f min=%.1f max=%.1f",
- elapsed * .000000001, n, perLoop * .000001,
- mean * .001,
- stddev * .001,
- minimum * .001,
- maximum * .001,
- mean / perLoop100,
- stddev / perLoop100,
- minimum / perLoop100,
- maximum / perLoop100);
- }
- }
-#endif
+ cpuStats.sample();
+
+ // DirectOutputThread has rampVolume, leftVol, rightVol
+
+ Vector< sp<EffectChain> > effectChains;
+
processConfigEvents();
- mixerStatus = MIXER_IDLE;
+ mixer_state mixerStatus = MIXER_IDLE;
{ // scope for mLock
Mutex::Autolock _l(mLock);
if (checkForNewParameters_l()) {
mixBufferSize = mFrameCount * mFrameSize;
+
// FIXME: Relaxed timing because of a certain device that can't meet latency
// Should be reduced to 2x after the vendor fixes the driver issue
// increase threshold again due to low power audio mode. The way this warning
// threshold is calculated and its usefulness should be reconsidered anyway.
maxPeriod = seconds(mFrameCount) / mSampleRate * 15;
+
activeSleepTime = activeSleepTimeUs();
idleSleepTime = idleSleepTimeUs();
+ // DirectOutputThread updates standbyDelay also
}
- const SortedVector< wp<Track> >& activeTracks = mActiveTracks;
-
// put audio hardware into standby after short delay
- if (CC_UNLIKELY((!activeTracks.size() && systemTime() > standbyTime) ||
- mSuspended)) {
+ if (CC_UNLIKELY((!mActiveTracks.size() && systemTime() > standbyTime) ||
+ mSuspended > 0)) {
if (!mStandby) {
- ALOGV("Audio hardware entering standby, mixer %p, mSuspended %d", this, mSuspended);
+ ALOGV("Audio hardware entering standby, mixer %p, suspend count %u", this, mSuspended);
mOutput->stream->common.standby(&mOutput->stream->common);
mStandby = true;
mBytesWritten = 0;
}
- if (!activeTracks.size() && mConfigEvents.isEmpty()) {
+ if (!mActiveTracks.size() && mConfigEvents.isEmpty()) {
// we're about to wait, flush the binder command buffer
IPCThreadState::self()->flushCommands();
@@ -2024,20 +2065,13 @@
releaseWakeLock_l();
// wait until we have something to do...
- ALOGV("MixerThread %p TID %d going to sleep", this, gettid());
+ ALOGV("Thread %p type %d TID %d going to sleep", this, mType, gettid());
mWaitWorkCV.wait(mLock);
- ALOGV("MixerThread %p TID %d waking up", this, gettid());
+ ALOGV("Thread %p type %d TID %d waking up", this, mType, gettid());
acquireWakeLock_l();
mPrevMixerStatus = MIXER_IDLE;
- if (!mMasterMute) {
- char value[PROPERTY_VALUE_MAX];
- property_get("ro.audio.silent", value, "0");
- if (atoi(value)) {
- ALOGD("Silence is golden");
- setMasterMute_l(true);
- }
- }
+ checkSilentMode_l();
standbyTime = systemTime() + mStandbyTimeInNsecs;
sleepTime = idleSleepTime;
@@ -2046,7 +2080,7 @@
}
}
- mixerStatus = prepareTracks_l(activeTracks, &tracksToRemove);
+ mixerStatus = prepareTracks_l(&tracksToRemove);
// prevent any changes in effect chain list and in each effect chain
// during mixing and effect process as the audio buffers could be deleted
@@ -2108,24 +2142,35 @@
// TODO add standby time extension fct of effect tail
}
- if (mSuspended) {
+ if (mSuspended > 0) {
sleepTime = suspendSleepTimeUs();
}
- // sleepTime == 0 means we must write to audio hardware
+
+ // only process effects if we're going to write
if (sleepTime == 0) {
+
+ // DirectOutputThread adds applyVolume here
+
for (size_t i = 0; i < effectChains.size(); i ++) {
effectChains[i]->process_l();
}
- // enable changes in effect chain
- unlockEffectChains(effectChains);
+ }
+
+ // enable changes in effect chain
+ unlockEffectChains(effectChains);
+
+ // sleepTime == 0 means we must write to audio hardware
+ if (sleepTime == 0) {
+ // FIXME Only in MixerThread, and rewrite to reduce number of system calls
mLastWriteTime = systemTime();
mInWrite = true;
mBytesWritten += mixBufferSize;
-
int bytesWritten = (int)mOutput->stream->write(mOutput->stream, mMixBuffer, mixBufferSize);
if (bytesWritten < 0) mBytesWritten -= mixBufferSize;
mNumWrites++;
mInWrite = false;
+
+ // Only in MixerThread: start of write blocked detection
nsecs_t now = systemTime();
nsecs_t delta = now - mLastWriteTime;
if (!mStandby && delta > maxPeriod) {
@@ -2139,14 +2184,14 @@
longStandbyExit = true;
}
}
+ // end of write blocked detection
+
mStandby = false;
} else {
- // enable changes in effect chain
- unlockEffectChains(effectChains);
usleep(sleepTime);
}
- // finally let go of all our tracks, without the lock held
+ // finally let go of removed track(s), without the lock held
// since we can't guarantee the destructors won't acquire that
// same lock.
tracksToRemove.clear();
@@ -2154,26 +2199,30 @@
// Effect chains will be actually deleted here if they were removed from
// mEffectChains list during mixing or effects processing
effectChains.clear();
+
+ // FIXME Note that the above .clear() is no longer necessary since effectChains
+ // is now local to this block, but will keep it for now (at least until merge done).
}
+ // put output stream into standby mode
if (!mStandby) {
mOutput->stream->common.standby(&mOutput->stream->common);
}
releaseWakeLock();
- ALOGV("MixerThread %p exiting", this);
+ ALOGV("Thread %p type %d exiting", this, mType);
return false;
}
// prepareTracks_l() must be called with ThreadBase::mLock held
AudioFlinger::PlaybackThread::mixer_state AudioFlinger::MixerThread::prepareTracks_l(
- const SortedVector< wp<Track> >& activeTracks, Vector< sp<Track> > *tracksToRemove)
+ Vector< sp<Track> > *tracksToRemove)
{
mixer_state mixerStatus = MIXER_IDLE;
// find out which tracks need to be processed
- size_t count = activeTracks.size();
+ size_t count = mActiveTracks.size();
size_t mixedTracks = 0;
size_t tracksWithEffect = 0;
@@ -2193,7 +2242,7 @@
}
for (size_t i=0 ; i<count ; i++) {
- sp<Track> t = activeTracks[i].promote();
+ sp<Track> t = mActiveTracks[i].promote();
if (t == 0) continue;
// this const just means the local variable doesn't change
@@ -2680,14 +2729,20 @@
bool AudioFlinger::DirectOutputThread::threadLoop()
{
- mixer_state mixerStatus = MIXER_IDLE;
+ // MixerThread has Vector instead of single trackToRemove
sp<Track> trackToRemove;
- sp<Track> activeTrack;
+
nsecs_t standbyTime = systemTime();
- size_t mixBufferSize = mFrameCount*mFrameSize;
+ size_t mixBufferSize = mFrameCount * mFrameSize;
+
+ // MixerThread has relaxed timing: maxPeriod, lastWarning, longStandbyExit
+
uint32_t activeSleepTime = activeSleepTimeUs();
uint32_t idleSleepTime = idleSleepTimeUs();
uint32_t sleepTime = idleSleepTime;
+
+ // MixerThread has sleepTimeShift and cpuStats
+
// use shorter standby delay as on normal output to release
// hardware resources as soon as possible
nsecs_t standbyDelay = microseconds(activeSleepTime*2);
@@ -2696,21 +2751,30 @@
while (!exitPending())
{
+ // MixerThread has cpuStats.sample()
+
bool rampVolume;
uint16_t leftVol;
uint16_t rightVol;
+
Vector< sp<EffectChain> > effectChains;
processConfigEvents();
- mixerStatus = MIXER_IDLE;
+ // MixerThread does not have activeTrack here
+ sp<Track> activeTrack;
+ mixer_state mixerStatus = MIXER_IDLE;
{ // scope for the mLock
Mutex::Autolock _l(mLock);
if (checkForNewParameters_l()) {
- mixBufferSize = mFrameCount*mFrameSize;
+ mixBufferSize = mFrameCount * mFrameSize;
+
+ // different calculations here
+ standbyDelay = microseconds(activeSleepTime*2);
+
activeSleepTime = activeSleepTimeUs();
idleSleepTime = idleSleepTimeUs();
standbyDelay = microseconds(activeSleepTime*2);
@@ -2718,10 +2782,9 @@
// put audio hardware into standby after short delay
if (CC_UNLIKELY((!mActiveTracks.size() && systemTime() > standbyTime) ||
- mSuspended)) {
- // wait until we have something to do...
+ mSuspended > 0)) {
if (!mStandby) {
- ALOGV("Audio hardware entering standby, mixer %p", this);
+ ALOGV("Audio hardware entering standby, mixer %p, suspend count %u", this, mSuspended);
mOutput->stream->common.standby(&mOutput->stream->common);
mStandby = true;
mBytesWritten = 0;
@@ -2734,26 +2797,27 @@
if (exitPending()) break;
releaseWakeLock_l();
- ALOGV("DirectOutputThread %p TID %d going to sleep", this, gettid());
+ // wait until we have something to do...
+ ALOGV("Thread %p type %d TID %d going to sleep", this, mType, gettid());
mWaitWorkCV.wait(mLock);
- ALOGV("DirectOutputThread %p TID %d waking up in active mode", this, gettid());
+ ALOGV("Thread %p type %d TID %d waking up", this, mType, gettid());
acquireWakeLock_l();
- if (!mMasterMute) {
- char value[PROPERTY_VALUE_MAX];
- property_get("ro.audio.silent", value, "0");
- if (atoi(value)) {
- ALOGD("Silence is golden");
- setMasterMute_l(true);
- }
- }
+ // MixerThread has "mPrevMixerStatus = MIXER_IDLE"
+ checkSilentMode_l();
+ // MixerThread has different standbyDelay
standbyTime = systemTime() + standbyDelay;
sleepTime = idleSleepTime;
+ // MixerThread has "sleepTimeShift = 0"
continue;
}
}
+ // MixerThread has "mixerStatus = prepareTracks_l(...)"
+
+ // equivalent to MixerThread's lockEffectChains_l, but without the lock
+ // FIXME - is it OK to omit the lock here?
effectChains = mEffectChains;
// find out which tracks need to be processed
@@ -2884,6 +2948,7 @@
lockEffectChains_l(effectChains);
}
+ // For DirectOutputThread, this test is equivalent to "activeTrack != 0"
if (CC_LIKELY(mixerStatus == MIXER_TRACKS_READY)) {
AudioBufferProvider::Buffer buffer;
size_t frameCount = mFrameCount;
@@ -2891,8 +2956,7 @@
// output audio to hardware
while (frameCount) {
buffer.frameCount = frameCount;
- activeTrack->getNextBuffer(&buffer,
- AudioBufferProvider::kInvalidPTS);
+ activeTrack->getNextBuffer(&buffer);
if (CC_UNLIKELY(buffer.raw == NULL)) {
memset(curBuf, 0, frameCount * mFrameSize);
break;
@@ -2917,19 +2981,28 @@
}
}
- if (mSuspended) {
+ if (mSuspended > 0) {
sleepTime = suspendSleepTimeUs();
}
- // sleepTime == 0 means we must write to audio hardware
+
+ // only process effects if we're going to write
if (sleepTime == 0) {
+
+ // MixerThread does not have applyVolume
if (mixerStatus == MIXER_TRACKS_READY) {
applyVolume(leftVol, rightVol, rampVolume);
}
+
for (size_t i = 0; i < effectChains.size(); i ++) {
effectChains[i]->process_l();
}
- unlockEffectChains(effectChains);
+ }
+ // enable changes in effect chain
+ unlockEffectChains(effectChains);
+
+ // sleepTime == 0 means we must write to audio hardware
+ if (sleepTime == 0) {
mLastWriteTime = systemTime();
mInWrite = true;
mBytesWritten += mixBufferSize;
@@ -2937,13 +3010,15 @@
if (bytesWritten < 0) mBytesWritten -= mixBufferSize;
mNumWrites++;
mInWrite = false;
+
+ // MixerThread has write blocked detection here
+
mStandby = false;
} else {
- unlockEffectChains(effectChains);
usleep(sleepTime);
}
- // finally let go of removed track, without the lock held
+ // finally let go of removed track(s), without the lock held
// since we can't guarantee the destructors won't acquire that
// same lock.
trackToRemove.clear();
@@ -2952,15 +3027,19 @@
// Effect chains will be actually deleted here if they were removed from
// mEffectChains list during mixing or effects processing
effectChains.clear();
+
+ // FIXME Note that the above .clear() is no longer necessary since effectChains
+ // is now local to this block, but will keep it for now (at least until merge done).
}
+ // put output stream into standby mode
if (!mStandby) {
mOutput->stream->common.standby(&mOutput->stream->common);
}
releaseWakeLock();
- ALOGV("DirectOutputThread %p exiting", this);
+ ALOGV("Thread %p type %d exiting", this, mType);
return false;
}
@@ -3077,44 +3156,52 @@
bool AudioFlinger::DuplicatingThread::threadLoop()
{
Vector< sp<Track> > tracksToRemove;
- mixer_state mixerStatus = MIXER_IDLE;
nsecs_t standbyTime = systemTime();
- size_t mixBufferSize = mFrameCount*mFrameSize;
+ size_t mixBufferSize = mFrameCount * mFrameSize;
+
+ // Only in DuplicatingThread
SortedVector< sp<OutputTrack> > outputTracks;
uint32_t writeFrames = 0;
+
uint32_t activeSleepTime = activeSleepTimeUs();
uint32_t idleSleepTime = idleSleepTimeUs();
uint32_t sleepTime = idleSleepTime;
- Vector< sp<EffectChain> > effectChains;
acquireWakeLock();
while (!exitPending())
{
+ // MixerThread has cpuStats.sample
+
+ Vector< sp<EffectChain> > effectChains;
+
processConfigEvents();
- mixerStatus = MIXER_IDLE;
+ mixer_state mixerStatus = MIXER_IDLE;
{ // scope for the mLock
Mutex::Autolock _l(mLock);
if (checkForNewParameters_l()) {
- mixBufferSize = mFrameCount*mFrameSize;
+ mixBufferSize = mFrameCount * mFrameSize;
+
+ // Only in DuplicatingThread
updateWaitTime();
+
activeSleepTime = activeSleepTimeUs();
idleSleepTime = idleSleepTimeUs();
}
- const SortedVector< wp<Track> >& activeTracks = mActiveTracks;
-
+ // Only in DuplicatingThread
for (size_t i = 0; i < mOutputTracks.size(); i++) {
outputTracks.add(mOutputTracks[i]);
}
// put audio hardware into standby after short delay
- if (CC_UNLIKELY((!activeTracks.size() && systemTime() > standbyTime) ||
- mSuspended)) {
+ if (CC_UNLIKELY((!mActiveTracks.size() && systemTime() > standbyTime) ||
+ mSuspended > 0)) {
if (!mStandby) {
+ // DuplicatingThread implements standby by stopping all tracks
for (size_t i = 0; i < outputTracks.size(); i++) {
outputTracks[i]->stop();
}
@@ -3122,7 +3209,7 @@
mBytesWritten = 0;
}
- if (!activeTracks.size() && mConfigEvents.isEmpty()) {
+ if (!mActiveTracks.size() && mConfigEvents.isEmpty()) {
// we're about to wait, flush the binder command buffer
IPCThreadState::self()->flushCommands();
outputTracks.clear();
@@ -3130,28 +3217,23 @@
if (exitPending()) break;
releaseWakeLock_l();
- ALOGV("DuplicatingThread %p TID %d going to sleep", this, gettid());
+ // wait until we have something to do...
+ ALOGV("Thread %p type %d TID %d going to sleep", this, mType, gettid());
mWaitWorkCV.wait(mLock);
- ALOGV("DuplicatingThread %p TID %d waking up", this, gettid());
+ ALOGV("Thread %p type %d TID %d waking up", this, mType, gettid());
acquireWakeLock_l();
- mPrevMixerStatus = MIXER_IDLE;
- if (!mMasterMute) {
- char value[PROPERTY_VALUE_MAX];
- property_get("ro.audio.silent", value, "0");
- if (atoi(value)) {
- ALOGD("Silence is golden");
- setMasterMute_l(true);
- }
- }
+ // MixerThread has "mPrevMixerStatus = MIXER_IDLE"
+ checkSilentMode_l();
standbyTime = systemTime() + mStandbyTimeInNsecs;
sleepTime = idleSleepTime;
+ // MixerThread has sleepTimeShift
continue;
}
}
- mixerStatus = prepareTracks_l(activeTracks, &tracksToRemove);
+ mixerStatus = prepareTracks_l(&tracksToRemove);
// prevent any changes in effect chain list and in each effect chain
// during mixing and effect process as the audio buffers could be deleted
@@ -3159,6 +3241,7 @@
lockEffectChains_l(effectChains);
}
+ // Duplicating Thread is completely different here
if (CC_LIKELY(mixerStatus == MIXER_TRACKS_READY)) {
// mix buffers...
if (outputsReady(outputTracks)) {
@@ -3188,30 +3271,36 @@
}
}
- if (mSuspended) {
+ if (mSuspended > 0) {
sleepTime = suspendSleepTimeUs();
}
- // sleepTime == 0 means we must write to audio hardware
+
+ // only process effects if we're going to write
if (sleepTime == 0) {
for (size_t i = 0; i < effectChains.size(); i ++) {
effectChains[i]->process_l();
}
- // enable changes in effect chain
- unlockEffectChains(effectChains);
+ }
+ // enable changes in effect chain
+ unlockEffectChains(effectChains);
+
+ // sleepTime == 0 means we must write to audio hardware
+ if (sleepTime == 0) {
standbyTime = systemTime() + mStandbyTimeInNsecs;
for (size_t i = 0; i < outputTracks.size(); i++) {
outputTracks[i]->write(mMixBuffer, writeFrames);
}
mStandby = false;
mBytesWritten += mixBufferSize;
+
+ // MixerThread has write blocked detection here
+
} else {
- // enable changes in effect chain
- unlockEffectChains(effectChains);
usleep(sleepTime);
}
- // finally let go of all our tracks, without the lock held
+ // finally let go of removed track(s), without the lock held
// since we can't guarantee the destructors won't acquire that
// same lock.
tracksToRemove.clear();
@@ -3220,15 +3309,23 @@
// Effect chains will be actually deleted here if they were removed from
// mEffectChains list during mixing or effects processing
effectChains.clear();
+
+ // FIXME Note that the above .clear() is no longer necessary since effectChains
+ // is now local to this block, but will keep it for now (at least until merge done).
}
+ // MixerThread and DirectOutpuThread have standby here,
+ // but for DuplicatingThread this is handled by the outputTracks
+
releaseWakeLock();
+ ALOGV("Thread %p type %d exiting", this, mType);
return false;
}
void AudioFlinger::DuplicatingThread::addOutputTrack(MixerThread *thread)
{
+ Mutex::Autolock _l(mLock);
// FIXME explain this formula
int frameCount = (3 * mFrameCount * mSampleRate) / thread->sampleRate();
OutputTrack *outputTrack = new OutputTrack(thread,
@@ -3274,7 +3371,7 @@
}
-bool AudioFlinger::DuplicatingThread::outputsReady(SortedVector< sp<OutputTrack> > &outputTracks)
+bool AudioFlinger::DuplicatingThread::outputsReady(const SortedVector< sp<OutputTrack> > &outputTracks)
{
for (size_t i = 0; i < outputTracks.size(); i++) {
sp <ThreadBase> thread = outputTracks[i]->thread().promote();
@@ -3306,7 +3403,6 @@
audio_format_t format,
uint32_t channelMask,
int frameCount,
- uint32_t flags,
const sp<IMemory>& sharedBuffer,
int sessionId)
: RefBase(),
@@ -3318,7 +3414,7 @@
mFrameCount(0),
mState(IDLE),
mFormat(format),
- mFlags(flags & ~SYSTEM_FLAGS_MASK),
+ mStepServerFailed(false),
mSessionId(sessionId)
// mChannelCount
// mChannelMask
@@ -3398,11 +3494,14 @@
}
}
+// AudioBufferProvider interface
+// getNextBuffer() = 0;
+// This implementation of releaseBuffer() is used by Track and RecordTrack, but not TimedTrack
void AudioFlinger::ThreadBase::TrackBase::releaseBuffer(AudioBufferProvider::Buffer* buffer)
{
buffer->raw = NULL;
mFrameCount = buffer->frameCount;
- step();
+ (void) step(); // ignore return value of step()
buffer->frameCount = 0;
}
@@ -3413,7 +3512,7 @@
result = cblk->stepServer(mFrameCount);
if (!result) {
ALOGV("stepServer failed acquiring cblk mutex");
- mFlags |= STEPSERVER_FAILED;
+ mStepServerFailed = true;
}
return result;
}
@@ -3425,7 +3524,7 @@
cblk->server = 0;
cblk->userBase = 0;
cblk->serverBase = 0;
- mFlags &= (uint32_t)(~SYSTEM_FLAGS_MASK);
+ mStepServerFailed = false;
ALOGV("TrackBase::reset");
}
@@ -3465,7 +3564,7 @@
int frameCount,
const sp<IMemory>& sharedBuffer,
int sessionId)
- : TrackBase(thread, client, sampleRate, format, channelMask, frameCount, 0, sharedBuffer, sessionId),
+ : TrackBase(thread, client, sampleRate, format, channelMask, frameCount, sharedBuffer, sessionId),
mMute(false), mSharedBuffer(sharedBuffer), mName(-1), mMainBuffer(NULL), mAuxBuffer(NULL),
mAuxEffectId(0), mHasVolumeController(false)
{
@@ -3548,6 +3647,7 @@
(int)mAuxBuffer);
}
+// AudioBufferProvider interface
status_t AudioFlinger::PlaybackThread::Track::getNextBuffer(
AudioBufferProvider::Buffer* buffer, int64_t pts)
{
@@ -3556,10 +3656,10 @@
uint32_t framesReq = buffer->frameCount;
// Check if last stepServer failed, try to step now
- if (mFlags & TrackBase::STEPSERVER_FAILED) {
+ if (mStepServerFailed) {
if (!step()) goto getNextBuffer_exit;
ALOGV("stepServer recovered");
- mFlags &= ~TrackBase::STEPSERVER_FAILED;
+ mStepServerFailed = false;
}
framesReady = cblk->framesReady();
@@ -4096,6 +4196,7 @@
mTimedAudioOutputOnTime = false;
}
+// AudioBufferProvider interface
void AudioFlinger::PlaybackThread::TimedTrack::releaseBuffer(
AudioBufferProvider::Buffer* buffer) {
@@ -4155,10 +4256,9 @@
audio_format_t format,
uint32_t channelMask,
int frameCount,
- uint32_t flags,
int sessionId)
: TrackBase(thread, client, sampleRate, format,
- channelMask, frameCount, flags, 0, sessionId),
+ channelMask, frameCount, 0 /*sharedBuffer*/, sessionId),
mOverflow(false)
{
if (mCblk != NULL) {
@@ -4181,6 +4281,7 @@
}
}
+// AudioBufferProvider interface
status_t AudioFlinger::RecordThread::RecordTrack::getNextBuffer(AudioBufferProvider::Buffer* buffer, int64_t pts)
{
audio_track_cblk_t* cblk = this->cblk();
@@ -4188,10 +4289,10 @@
uint32_t framesReq = buffer->frameCount;
// Check if last stepServer failed, try to step now
- if (mFlags & TrackBase::STEPSERVER_FAILED) {
+ if (mStepServerFailed) {
if (!step()) goto getNextBuffer_exit;
ALOGV("stepServer recovered");
- mFlags &= ~TrackBase::STEPSERVER_FAILED;
+ mStepServerFailed = false;
}
framesAvail = cblk->framesAvailable_l();
@@ -4654,6 +4755,7 @@
audio_format_t format,
uint32_t channelMask,
int frameCount,
+ // FIXME dead, remove from IAudioFlinger
uint32_t flags,
int *sessionId,
status_t *status)
@@ -4698,7 +4800,6 @@
format,
channelMask,
frameCount,
- flags,
lSessionId,
&lStatus);
}
@@ -4769,7 +4870,7 @@
// mBytesRead is only meaningful while active, and so is cleared in start()
// (but might be better to also clear here for dump?)
{
- snprintf(mName, kNameLength, "AudioIn_%d", id);
+ snprintf(mName, kNameLength, "AudioIn_%X", id);
readInputParameters();
}
@@ -4868,8 +4969,7 @@
}
buffer.frameCount = mFrameCount;
- if (CC_LIKELY(mActiveTrack->getNextBuffer(
- &buffer, AudioBufferProvider::kInvalidPTS) == NO_ERROR)) {
+ if (CC_LIKELY(mActiveTrack->getNextBuffer(&buffer) == NO_ERROR)) {
size_t framesOut = buffer.frameCount;
if (mResampler == NULL) {
// no resampling
@@ -4992,7 +5092,6 @@
audio_format_t format,
int channelMask,
int frameCount,
- uint32_t flags,
int sessionId,
status_t *status)
{
@@ -5009,7 +5108,7 @@
Mutex::Autolock _l(mLock);
track = new RecordTrack(this, client, sampleRate,
- format, channelMask, frameCount, flags, sessionId);
+ format, channelMask, frameCount, sessionId);
if (track->getCblk() == 0) {
lStatus = NO_MEMORY;
@@ -5147,6 +5246,7 @@
return NO_ERROR;
}
+// AudioBufferProvider interface
status_t AudioFlinger::RecordThread::getNextBuffer(AudioBufferProvider::Buffer* buffer, int64_t pts)
{
size_t framesReq = buffer->frameCount;
@@ -5185,6 +5285,7 @@
return NO_ERROR;
}
+// AudioBufferProvider interface
void AudioFlinger::RecordThread::releaseBuffer(AudioBufferProvider::Buffer* buffer)
{
mRsmpInIndex += buffer->frameCount;
@@ -5429,7 +5530,6 @@
{
status_t status;
PlaybackThread *thread = NULL;
- mHardwareStatus = AUDIO_HW_OUTPUT_OPEN;
uint32_t samplingRate = pSamplingRate ? *pSamplingRate : 0;
audio_format_t format = pFormat ? *pFormat : AUDIO_FORMAT_DEFAULT;
uint32_t channels = pChannels ? *pChannels : 0;
@@ -5454,8 +5554,10 @@
if (outHwDev == NULL)
return 0;
+ mHardwareStatus = AUDIO_HW_OUTPUT_OPEN;
status = outHwDev->open_output_stream(outHwDev, *pDevices, &format,
&channels, &samplingRate, &outStream);
+ mHardwareStatus = AUDIO_HW_IDLE;
ALOGV("openOutput() openOutputStream returned output %p, SamplingRate %d, Format %d, Channels %x, status %d",
outStream,
samplingRate,
@@ -5463,7 +5565,6 @@
channels,
status);
- mHardwareStatus = AUDIO_HW_IDLE;
if (outStream != NULL) {
AudioStreamOut *output = new AudioStreamOut(outHwDev, outStream);
audio_io_handle_t id = nextUniqueId();
@@ -5855,7 +5956,7 @@
return android_atomic_inc(&mNextUniqueId);
}
-AudioFlinger::PlaybackThread *AudioFlinger::primaryPlaybackThread_l()
+AudioFlinger::PlaybackThread *AudioFlinger::primaryPlaybackThread_l() const
{
for (size_t i = 0; i < mPlaybackThreads.size(); i++) {
PlaybackThread *thread = mPlaybackThreads.valueAt(i).get();
@@ -5867,7 +5968,7 @@
return NULL;
}
-uint32_t AudioFlinger::primaryOutputDevice_l()
+uint32_t AudioFlinger::primaryOutputDevice_l() const
{
PlaybackThread *thread = primaryPlaybackThread_l();
@@ -6362,7 +6463,7 @@
}
void AudioFlinger::ThreadBase::unlockEffectChains(
- Vector<sp <AudioFlinger::EffectChain> >& effectChains)
+ const Vector<sp <AudioFlinger::EffectChain> >& effectChains)
{
for (size_t i = 0; i < effectChains.size(); i++) {
effectChains[i]->unlock();
diff --git a/services/audioflinger/AudioFlinger.h b/services/audioflinger/AudioFlinger.h
index 1a52de5..bdaf97c 100644
--- a/services/audioflinger/AudioFlinger.h
+++ b/services/audioflinger/AudioFlinger.h
@@ -312,19 +312,12 @@
PAUSED
};
- enum track_flags {
- STEPSERVER_FAILED = 0x01, // StepServer could not acquire cblk->lock mutex
- SYSTEM_FLAGS_MASK = 0x0000ffffUL,
- // The upper 16 bits are used for track-specific flags.
- };
-
TrackBase(ThreadBase *thread,
const sp<Client>& client,
uint32_t sampleRate,
audio_format_t format,
uint32_t channelMask,
int frameCount,
- uint32_t flags,
const sp<IMemory>& sharedBuffer,
int sessionId);
virtual ~TrackBase();
@@ -346,9 +339,8 @@
TrackBase(const TrackBase&);
TrackBase& operator = (const TrackBase&);
- virtual status_t getNextBuffer(
- AudioBufferProvider::Buffer* buffer,
- int64_t pts) = 0;
+ // AudioBufferProvider interface
+ virtual status_t getNextBuffer(AudioBufferProvider::Buffer* buffer, int64_t pts) = 0;
virtual void releaseBuffer(AudioBufferProvider::Buffer* buffer);
audio_format_t format() const {
@@ -384,7 +376,7 @@
// we don't really need a lock for these
track_state mState;
const audio_format_t mFormat;
- uint32_t mFlags;
+ bool mStepServerFailed;
const int mSessionId;
uint8_t mChannelCount;
uint32_t mChannelMask;
@@ -431,8 +423,8 @@
void sendConfigEvent_l(int event, int param = 0);
void processConfigEvents();
audio_io_handle_t id() const { return mId;}
- bool standby() { return mStandby; }
- uint32_t device() { return mDevice; }
+ bool standby() const { return mStandby; }
+ uint32_t device() const { return mDevice; }
virtual audio_stream_t* stream() = 0;
sp<EffectHandle> createEffect_l(
@@ -463,12 +455,13 @@
virtual status_t addEffectChain_l(const sp<EffectChain>& chain) = 0;
// remove an effect chain from the chain list (mEffectChains)
virtual size_t removeEffectChain_l(const sp<EffectChain>& chain) = 0;
- // lock mall effect chains Mutexes. Must be called before releasing the
+ // lock all effect chains Mutexes. Must be called before releasing the
// ThreadBase mutex before processing the mixer and effects. This guarantees the
// integrity of the chains during the process.
+ // Also sets the parameter 'effectChains' to current value of mEffectChains.
void lockEffectChains_l(Vector<sp <EffectChain> >& effectChains);
// unlock effect chains after process
- void unlockEffectChains(Vector<sp <EffectChain> >& effectChains);
+ void unlockEffectChains(const Vector<sp<EffectChain> >& effectChains);
// set audio mode to all effect chains
void setMode(audio_mode_t mode);
// get effect module with corresponding ID on specified audio session
@@ -556,7 +549,7 @@
Vector< sp<EffectChain> > mEffectChains;
uint32_t mDevice; // output device for PlaybackThread
// input + output devices for RecordThread
- static const int kNameLength = 32;
+ static const int kNameLength = 16; // prctl(PR_SET_NAME) limit
char mName[kNameLength];
sp<IPowerManager> mPowerManager;
sp<IBinder> mWakeLockToken;
@@ -583,9 +576,11 @@
public:
enum mixer_state {
- MIXER_IDLE,
- MIXER_TRACKS_ENABLED,
- MIXER_TRACKS_READY
+ MIXER_IDLE, // no active tracks
+ MIXER_TRACKS_ENABLED, // at least one active track, but no track has any data ready
+ MIXER_TRACKS_READY // at least one active track, and at least one track has data
+ // standby mode does not have an enum value
+ // suspend by audio policy manager is orthogonal to mixer state
};
// playback track
@@ -634,9 +629,10 @@
Track(const Track&);
Track& operator = (const Track&);
- virtual status_t getNextBuffer(
- AudioBufferProvider::Buffer* buffer,
- int64_t pts);
+ // AudioBufferProvider interface
+ virtual status_t getNextBuffer(AudioBufferProvider::Buffer* buffer, int64_t pts = kInvalidPTS);
+ // releaseBuffer() not overridden
+
virtual uint32_t framesReady() const;
bool isMuted() const { return mMute; }
@@ -703,9 +699,10 @@
virtual uint32_t framesReady() const;
- virtual status_t getNextBuffer(AudioBufferProvider::Buffer* buffer,
- int64_t pts);
+ // AudioBufferProvider interface
+ virtual status_t getNextBuffer(AudioBufferProvider::Buffer* buffer, int64_t pts);
virtual void releaseBuffer(AudioBufferProvider::Buffer* buffer);
+
void timedYieldSamples(AudioBufferProvider::Buffer* buffer);
void timedYieldSilence(uint32_t numFrames,
AudioBufferProvider::Buffer* buffer);
@@ -827,8 +824,8 @@
virtual audio_stream_t* stream();
void suspend() { mSuspended++; }
- void restore() { if (mSuspended) mSuspended--; }
- bool isSuspended() const { return (mSuspended != 0); }
+ void restore() { if (mSuspended > 0) mSuspended--; }
+ bool isSuspended() const { return (mSuspended > 0); }
virtual String8 getParameters(const String8& keys);
virtual void audioConfigChanged_l(int event, int param = 0);
virtual status_t getRenderPosition(uint32_t *halFrames, uint32_t *dspFrames);
@@ -849,7 +846,7 @@
protected:
int16_t* mMixBuffer;
- int mSuspended;
+ uint32_t mSuspended; // suspend count, > 0 means suspended
int mBytesWritten;
private:
// mMasterMute is in both PlaybackThread and in AudioFlinger. When a
@@ -866,6 +863,9 @@
virtual uint32_t idleSleepTimeUs() = 0;
virtual uint32_t suspendSleepTimeUs() = 0;
+ // Code snippets that are temporarily lifted up out of threadLoop() until the merge
+ void checkSilentMode_l();
+
private:
friend class AudioFlinger;
@@ -916,8 +916,11 @@
virtual status_t dumpInternals(int fd, const Vector<String16>& args);
protected:
- mixer_state prepareTracks_l(const SortedVector< wp<Track> >& activeTracks,
- Vector< sp<Track> > *tracksToRemove);
+ // prepareTracks_l reads and writes mActiveTracks, and also returns the
+ // pending set of tracks to remove via Vector 'tracksToRemove'. The caller is
+ // responsible for clearing or destroying this Vector later on, when it
+ // is safe to do so. That will drop the final ref count and destroy the tracks.
+ mixer_state prepareTracks_l(Vector< sp<Track> > *tracksToRemove);
virtual int getTrackName_l();
virtual void deleteTrackName_l(int name);
virtual uint32_t idleSleepTimeUs();
@@ -972,7 +975,7 @@
virtual uint32_t activeSleepTimeUs();
private:
- bool outputsReady(SortedVector< sp<OutputTrack> > &outputTracks);
+ bool outputsReady(const SortedVector<sp<OutputTrack> > &outputTracks);
void updateWaitTime();
SortedVector < sp<OutputTrack> > mOutputTracks;
@@ -997,8 +1000,9 @@
PlaybackThread *srcThread,
PlaybackThread *dstThread,
bool reRegister);
- PlaybackThread *primaryPlaybackThread_l();
- uint32_t primaryOutputDevice_l();
+ // return thread associated with primary hardware device, or NULL
+ PlaybackThread *primaryPlaybackThread_l() const;
+ uint32_t primaryOutputDevice_l() const;
friend class AudioBuffer;
@@ -1048,7 +1052,6 @@
audio_format_t format,
uint32_t channelMask,
int frameCount,
- uint32_t flags,
int sessionId);
virtual ~RecordTrack();
@@ -1067,9 +1070,9 @@
RecordTrack(const RecordTrack&);
RecordTrack& operator = (const RecordTrack&);
- virtual status_t getNextBuffer(
- AudioBufferProvider::Buffer* buffer,
- int64_t pts);
+ // AudioBufferProvider interface
+ virtual status_t getNextBuffer(AudioBufferProvider::Buffer* buffer, int64_t pts = kInvalidPTS);
+ // releaseBuffer() not overridden
bool mOverflow;
};
@@ -1094,7 +1097,6 @@
audio_format_t format,
int channelMask,
int frameCount,
- uint32_t flags,
int sessionId,
status_t *status);
@@ -1106,9 +1108,10 @@
AudioStreamIn* clearInput();
virtual audio_stream_t* stream();
- virtual status_t getNextBuffer(AudioBufferProvider::Buffer* buffer,
- int64_t pts);
+ // AudioBufferProvider interface
+ virtual status_t getNextBuffer(AudioBufferProvider::Buffer* buffer, int64_t pts);
virtual void releaseBuffer(AudioBufferProvider::Buffer* buffer);
+
virtual bool checkForNewParameters_l();
virtual String8 getParameters(const String8& keys);
virtual void audioConfigChanged_l(int event, int param = 0);
@@ -1537,25 +1540,27 @@
audio_hw_device_t* mPrimaryHardwareDev; // mAudioHwDevs[0] or NULL
Vector<audio_hw_device_t*> mAudioHwDevs;
+ // for dump, indicates which hardware operation is currently in progress (but not stream ops)
enum hardware_call_state {
- AUDIO_HW_IDLE = 0,
- AUDIO_HW_INIT,
- AUDIO_HW_OUTPUT_OPEN,
- AUDIO_HW_OUTPUT_CLOSE,
- AUDIO_HW_INPUT_OPEN,
- AUDIO_HW_INPUT_CLOSE,
- AUDIO_HW_STANDBY,
- AUDIO_HW_SET_MASTER_VOLUME,
- AUDIO_HW_GET_ROUTING,
- AUDIO_HW_SET_ROUTING,
- AUDIO_HW_GET_MODE,
- AUDIO_HW_SET_MODE,
- AUDIO_HW_GET_MIC_MUTE,
- AUDIO_HW_SET_MIC_MUTE,
- AUDIO_SET_VOICE_VOLUME,
- AUDIO_SET_PARAMETER,
- AUDIO_HW_GET_INPUT_BUFFER_SIZE,
- AUDIO_HW_GET_MASTER_VOLUME,
+ AUDIO_HW_IDLE = 0, // no operation in progress
+ AUDIO_HW_INIT, // init_check
+ AUDIO_HW_OUTPUT_OPEN, // open_output_stream
+ AUDIO_HW_OUTPUT_CLOSE, // unused
+ AUDIO_HW_INPUT_OPEN, // unused
+ AUDIO_HW_INPUT_CLOSE, // unused
+ AUDIO_HW_STANDBY, // unused
+ AUDIO_HW_SET_MASTER_VOLUME, // set_master_volume
+ AUDIO_HW_GET_ROUTING, // unused
+ AUDIO_HW_SET_ROUTING, // unused
+ AUDIO_HW_GET_MODE, // unused
+ AUDIO_HW_SET_MODE, // set_mode
+ AUDIO_HW_GET_MIC_MUTE, // get_mic_mute
+ AUDIO_HW_SET_MIC_MUTE, // set_mic_mute
+ AUDIO_HW_SET_VOICE_VOLUME, // set_voice_volume
+ AUDIO_HW_SET_PARAMETER, // set_parameters
+ AUDIO_HW_GET_INPUT_BUFFER_SIZE, // get_input_buffer_size
+ AUDIO_HW_GET_MASTER_VOLUME, // get_master_volume
+ AUDIO_HW_GET_PARAMETER, // get_parameters
};
mutable hardware_call_state mHardwareStatus; // for dump only
diff --git a/services/audioflinger/AudioMixer.cpp b/services/audioflinger/AudioMixer.cpp
index 020d62a..2cec525 100644
--- a/services/audioflinger/AudioMixer.cpp
+++ b/services/audioflinger/AudioMixer.cpp
@@ -332,11 +332,11 @@
return 0;
}
-void AudioMixer::setBufferProvider(int name, AudioBufferProvider* buffer)
+void AudioMixer::setBufferProvider(int name, AudioBufferProvider* bufferProvider)
{
name -= TRACK0;
assert(uint32_t(name) < MAX_NUM_TRACKS);
- mState.tracks[name].bufferProvider = buffer;
+ mState.tracks[name].bufferProvider = bufferProvider;
}
diff --git a/services/audioflinger/AudioPolicyService.cpp b/services/audioflinger/AudioPolicyService.cpp
index 987b039..753b1d2 100644
--- a/services/audioflinger/AudioPolicyService.cpp
+++ b/services/audioflinger/AudioPolicyService.cpp
@@ -69,7 +69,7 @@
// start tone playback thread
mTonePlaybackThread = new AudioCommandThread(String8(""));
// start audio commands thread
- mAudioCommandThread = new AudioCommandThread(String8("ApmCommandThread"));
+ mAudioCommandThread = new AudioCommandThread(String8("ApmCommand"));
/* instantiate the audio policy manager */
rc = hw_get_module(AUDIO_POLICY_HARDWARE_MODULE_ID, &module);
@@ -638,7 +638,7 @@
if (mName != "") {
run(mName.string(), ANDROID_PRIORITY_AUDIO);
} else {
- run("AudioCommandThread", ANDROID_PRIORITY_AUDIO);
+ run("AudioCommand", ANDROID_PRIORITY_AUDIO);
}
}
diff --git a/services/audioflinger/AudioPolicyService.h b/services/audioflinger/AudioPolicyService.h
index 679fd30..962c917 100644
--- a/services/audioflinger/AudioPolicyService.h
+++ b/services/audioflinger/AudioPolicyService.h
@@ -66,7 +66,7 @@
audio_format_t format = AUDIO_FORMAT_DEFAULT,
uint32_t channels = 0,
audio_policy_output_flags_t flags =
- AUDIO_POLICY_OUTPUT_FLAG_INDIRECT);
+ AUDIO_POLICY_OUTPUT_FLAG_NONE);
virtual status_t startOutput(audio_io_handle_t output,
audio_stream_type_t stream,
int session = 0);
diff --git a/services/camera/libcameraservice/CameraHardwareInterface.h b/services/camera/libcameraservice/CameraHardwareInterface.h
index 2ac69f78..87a0802 100644
--- a/services/camera/libcameraservice/CameraHardwareInterface.h
+++ b/services/camera/libcameraservice/CameraHardwareInterface.h
@@ -21,8 +21,6 @@
#include <binder/MemoryBase.h>
#include <binder/MemoryHeapBase.h>
#include <utils/RefBase.h>
-#include <surfaceflinger/ISurface.h>
-#include <ui/android_native_buffer.h>
#include <ui/GraphicBuffer.h>
#include <camera/Camera.h>
#include <camera/CameraParameters.h>
diff --git a/services/camera/libcameraservice/CameraService.cpp b/services/camera/libcameraservice/CameraService.cpp
index 06fc708..adf1d49 100644
--- a/services/camera/libcameraservice/CameraService.cpp
+++ b/services/camera/libcameraservice/CameraService.cpp
@@ -29,10 +29,10 @@
#include <cutils/atomic.h>
#include <cutils/properties.h>
#include <gui/SurfaceTextureClient.h>
+#include <gui/Surface.h>
#include <hardware/hardware.h>
#include <media/AudioSystem.h>
#include <media/mediaplayer.h>
-#include <surfaceflinger/ISurface.h>
#include <utils/Errors.h>
#include <utils/Log.h>
#include <utils/String16.h>
diff --git a/services/camera/tests/CameraServiceTest/CameraServiceTest.cpp b/services/camera/tests/CameraServiceTest/CameraServiceTest.cpp
index 1055538..e417b79 100644
--- a/services/camera/tests/CameraServiceTest/CameraServiceTest.cpp
+++ b/services/camera/tests/CameraServiceTest/CameraServiceTest.cpp
@@ -22,7 +22,6 @@
#include <sys/types.h>
#include <sys/wait.h>
#include <unistd.h>
-#include <surfaceflinger/ISurface.h>
#include <camera/Camera.h>
#include <camera/CameraParameters.h>
#include <ui/GraphicBuffer.h>