resolved conflicts for merge of 0d28be68 to master
Change-Id: Iec5f810c366d3e1c14a6f6294b0aea4ffb30ae3e
diff --git a/camera/Camera.cpp b/camera/Camera.cpp
index 7efc6d7..450971d 100644
--- a/camera/Camera.cpp
+++ b/camera/Camera.cpp
@@ -167,32 +167,20 @@
return c->unlock();
}
-// pass the buffered ISurface to the camera service
+// pass the buffered Surface to the camera service
status_t Camera::setPreviewDisplay(const sp<Surface>& surface)
{
- LOGV("setPreviewDisplay");
+ LOGV("setPreviewDisplay(%p)", surface.get());
sp <ICamera> c = mCamera;
if (c == 0) return NO_INIT;
if (surface != 0) {
- return c->setPreviewDisplay(surface->getISurface());
+ return c->setPreviewDisplay(surface);
} else {
LOGD("app passed NULL surface");
return c->setPreviewDisplay(0);
}
}
-status_t Camera::setPreviewDisplay(const sp<ISurface>& surface)
-{
- LOGV("setPreviewDisplay");
- if (surface == 0) {
- LOGD("app passed NULL surface");
- }
- sp <ICamera> c = mCamera;
- if (c == 0) return NO_INIT;
- return c->setPreviewDisplay(surface);
-}
-
-
// start preview mode
status_t Camera::startPreview()
{
@@ -202,6 +190,31 @@
return c->startPreview();
}
+int32_t Camera::getNumberOfVideoBuffers() const
+{
+ LOGV("getNumberOfVideoBuffers");
+ sp <ICamera> c = mCamera;
+ if (c == 0) return 0;
+ return c->getNumberOfVideoBuffers();
+}
+
+sp<IMemory> Camera::getVideoBuffer(int32_t index) const
+{
+ LOGV("getVideoBuffer: %d", index);
+ sp <ICamera> c = mCamera;
+ if (c == 0) return 0;
+ return c->getVideoBuffer(index);
+}
+
+status_t Camera::storeMetaDataInBuffers(bool enabled)
+{
+ LOGV("storeMetaDataInBuffers: %s",
+ enabled? "true": "false");
+ sp <ICamera> c = mCamera;
+ if (c == 0) return NO_INIT;
+ return c->storeMetaDataInBuffers(enabled);
+}
+
// start recording mode, must call setPreviewDisplay first
status_t Camera::startRecording()
{
@@ -359,6 +372,9 @@
}
if (listener != NULL) {
listener->postDataTimestamp(timestamp, msgType, dataPtr);
+ } else {
+ LOGW("No listener was set. Drop a recording frame.");
+ releaseRecordingFrame(dataPtr);
}
}
@@ -375,4 +391,3 @@
}
}; // namespace android
-
diff --git a/camera/CameraParameters.cpp b/camera/CameraParameters.cpp
index 83e5e57..e9a5f8c 100644
--- a/camera/CameraParameters.cpp
+++ b/camera/CameraParameters.cpp
@@ -73,6 +73,9 @@
const char CameraParameters::KEY_SMOOTH_ZOOM_SUPPORTED[] = "smooth-zoom-supported";
const char CameraParameters::KEY_FOCUS_DISTANCES[] = "focus-distances";
const char CameraParameters::KEY_VIDEO_FRAME_FORMAT[] = "video-frame-format";
+const char CameraParameters::KEY_VIDEO_SIZE[] = "video-size";
+const char CameraParameters::KEY_SUPPORTED_VIDEO_SIZES[] = "video-size-values";
+const char CameraParameters::KEY_PREFERRED_PREVIEW_SIZE_FOR_VIDEO[] = "preferred-preview-size-for-video";
const char CameraParameters::TRUE[] = "true";
const char CameraParameters::FOCUS_DISTANCE_INFINITY[] = "Infinity";
@@ -129,7 +132,7 @@
const char CameraParameters::SCENE_MODE_CANDLELIGHT[] = "candlelight";
const char CameraParameters::SCENE_MODE_BARCODE[] = "barcode";
-// Formats for setPreviewFormat and setPictureFormat.
+const char CameraParameters::PIXEL_FORMAT_YUV420P[] = "yuv420p";
const char CameraParameters::PIXEL_FORMAT_YUV422SP[] = "yuv422sp";
const char CameraParameters::PIXEL_FORMAT_YUV420SP[] = "yuv420sp";
const char CameraParameters::PIXEL_FORMAT_YUV422I[] = "yuv422i-yuyv";
@@ -331,12 +334,41 @@
parse_pair(p, width, height, 'x');
}
+void CameraParameters::getPreferredPreviewSizeForVideo(int *width, int *height) const
+{
+ *width = *height = -1;
+ const char *p = get(KEY_PREFERRED_PREVIEW_SIZE_FOR_VIDEO);
+ if (p == 0) return;
+ parse_pair(p, width, height, 'x');
+}
+
void CameraParameters::getSupportedPreviewSizes(Vector<Size> &sizes) const
{
const char *previewSizesStr = get(KEY_SUPPORTED_PREVIEW_SIZES);
parseSizesList(previewSizesStr, sizes);
}
+void CameraParameters::setVideoSize(int width, int height)
+{
+ char str[32];
+ sprintf(str, "%dx%d", width, height);
+ set(KEY_VIDEO_SIZE, str);
+}
+
+void CameraParameters::getVideoSize(int *width, int *height) const
+{
+ *width = *height = -1;
+ const char *p = get(KEY_VIDEO_SIZE);
+ if (p == 0) return;
+ parse_pair(p, width, height, 'x');
+}
+
+void CameraParameters::getSupportedVideoSizes(Vector<Size> &sizes) const
+{
+ const char *videoSizesStr = get(KEY_SUPPORTED_VIDEO_SIZES);
+ parseSizesList(videoSizesStr, sizes);
+}
+
void CameraParameters::setPreviewFrameRate(int fps)
{
set(KEY_PREVIEW_FRAME_RATE, fps);
diff --git a/camera/ICamera.cpp b/camera/ICamera.cpp
index 13673b5..7ba8d12 100644
--- a/camera/ICamera.cpp
+++ b/camera/ICamera.cpp
@@ -45,6 +45,9 @@
STOP_RECORDING,
RECORDING_ENABLED,
RELEASE_RECORDING_FRAME,
+ GET_NUM_VIDEO_BUFFERS,
+ GET_VIDEO_BUFFER,
+ STORE_META_DATA_IN_BUFFERS,
};
class BpCamera: public BpInterface<ICamera>
@@ -64,13 +67,13 @@
remote()->transact(DISCONNECT, data, &reply);
}
- // pass the buffered ISurface to the camera service
- status_t setPreviewDisplay(const sp<ISurface>& surface)
+ // pass the buffered Surface to the camera service
+ status_t setPreviewDisplay(const sp<Surface>& surface)
{
LOGV("setPreviewDisplay");
Parcel data, reply;
data.writeInterfaceToken(ICamera::getInterfaceDescriptor());
- data.writeStrongBinder(surface->asBinder());
+ Surface::writeToParcel(surface, &data);
remote()->transact(SET_PREVIEW_DISPLAY, data, &reply);
return reply.readInt32();
}
@@ -133,6 +136,37 @@
remote()->transact(RELEASE_RECORDING_FRAME, data, &reply);
}
+ int32_t getNumberOfVideoBuffers() const
+ {
+ LOGV("getNumberOfVideoBuffers");
+ Parcel data, reply;
+ data.writeInterfaceToken(ICamera::getInterfaceDescriptor());
+ remote()->transact(GET_NUM_VIDEO_BUFFERS, data, &reply);
+ return reply.readInt32();
+ }
+
+ sp<IMemory> getVideoBuffer(int32_t index) const
+ {
+ LOGV("getVideoBuffer: %d", index);
+ Parcel data, reply;
+ data.writeInterfaceToken(ICamera::getInterfaceDescriptor());
+ data.writeInt32(index);
+ remote()->transact(GET_VIDEO_BUFFER, data, &reply);
+ sp<IMemory> mem = interface_cast<IMemory>(
+ reply.readStrongBinder());
+ return mem;
+ }
+
+ status_t storeMetaDataInBuffers(bool enabled)
+ {
+ LOGV("storeMetaDataInBuffers: %s", enabled? "true": "false");
+ Parcel data, reply;
+ data.writeInterfaceToken(ICamera::getInterfaceDescriptor());
+ data.writeInt32(enabled);
+ remote()->transact(STORE_META_DATA_IN_BUFFERS, data, &reply);
+ return reply.readInt32();
+ }
+
// check preview state
bool previewEnabled()
{
@@ -258,7 +292,7 @@
case SET_PREVIEW_DISPLAY: {
LOGV("SET_PREVIEW_DISPLAY");
CHECK_INTERFACE(ICamera, data, reply);
- sp<ISurface> surface = interface_cast<ISurface>(data.readStrongBinder());
+ sp<Surface> surface = Surface::readFromParcel(data);
reply->writeInt32(setPreviewDisplay(surface));
return NO_ERROR;
} break;
@@ -300,6 +334,26 @@
releaseRecordingFrame(mem);
return NO_ERROR;
} break;
+ case GET_NUM_VIDEO_BUFFERS: {
+ LOGV("GET_NUM_VIDEO_BUFFERS");
+ CHECK_INTERFACE(ICamera, data, reply);
+ reply->writeInt32(getNumberOfVideoBuffers());
+ return NO_ERROR;
+ } break;
+ case GET_VIDEO_BUFFER: {
+ LOGV("GET_VIDEO_BUFFER");
+ CHECK_INTERFACE(ICamera, data, reply);
+ int32_t index = data.readInt32();
+ reply->writeStrongBinder(getVideoBuffer(index)->asBinder());
+ return NO_ERROR;
+ } break;
+ case STORE_META_DATA_IN_BUFFERS: {
+ LOGV("STORE_META_DATA_IN_BUFFERS");
+ CHECK_INTERFACE(ICamera, data, reply);
+ bool enabled = data.readInt32();
+ reply->writeInt32(storeMetaDataInBuffers(enabled));
+ return NO_ERROR;
+ } break;
case PREVIEW_ENABLED: {
LOGV("PREVIEW_ENABLED");
CHECK_INTERFACE(ICamera, data, reply);
@@ -376,4 +430,3 @@
// ----------------------------------------------------------------------------
}; // namespace android
-
diff --git a/cmds/stagefright/Android.mk b/cmds/stagefright/Android.mk
index 5b74007..f8650eb 100644
--- a/cmds/stagefright/Android.mk
+++ b/cmds/stagefright/Android.mk
@@ -7,13 +7,16 @@
SineSource.cpp
LOCAL_SHARED_LIBRARIES := \
- libstagefright libmedia libutils libbinder libstagefright_foundation
+ libstagefright libmedia libutils libbinder libstagefright_foundation \
+ libskia
LOCAL_C_INCLUDES:= \
$(JNI_H_INCLUDE) \
frameworks/base/media/libstagefright \
frameworks/base/media/libstagefright/include \
- $(TOP)/frameworks/base/include/media/stagefright/openmax
+ $(TOP)/frameworks/base/include/media/stagefright/openmax \
+ external/skia/include/core \
+ external/skia/include/images \
LOCAL_CFLAGS += -Wno-multichar
@@ -53,6 +56,31 @@
LOCAL_SRC_FILES:= \
SineSource.cpp \
+ recordvideo.cpp
+
+LOCAL_SHARED_LIBRARIES := \
+ libstagefright liblog libutils libbinder
+
+LOCAL_C_INCLUDES:= \
+ $(JNI_H_INCLUDE) \
+ frameworks/base/media/libstagefright \
+ $(TOP)/frameworks/base/include/media/stagefright/openmax
+
+LOCAL_CFLAGS += -Wno-multichar
+
+LOCAL_MODULE_TAGS := debug
+
+LOCAL_MODULE:= recordvideo
+
+include $(BUILD_EXECUTABLE)
+
+
+################################################################################
+
+include $(CLEAR_VARS)
+
+LOCAL_SRC_FILES:= \
+ SineSource.cpp \
audioloop.cpp
LOCAL_SHARED_LIBRARIES := \
@@ -70,3 +98,27 @@
LOCAL_MODULE:= audioloop
include $(BUILD_EXECUTABLE)
+
+################################################################################
+
+include $(CLEAR_VARS)
+
+LOCAL_SRC_FILES:= \
+ stream.cpp \
+
+LOCAL_SHARED_LIBRARIES := \
+ libstagefright liblog libutils libbinder libsurfaceflinger_client \
+ libstagefright_foundation libmedia
+
+LOCAL_C_INCLUDES:= \
+ $(JNI_H_INCLUDE) \
+ frameworks/base/media/libstagefright \
+ $(TOP)/frameworks/base/include/media/stagefright/openmax
+
+LOCAL_CFLAGS += -Wno-multichar
+
+LOCAL_MODULE_TAGS := debug
+
+LOCAL_MODULE:= stream
+
+include $(BUILD_EXECUTABLE)
diff --git a/cmds/stagefright/recordvideo.cpp b/cmds/stagefright/recordvideo.cpp
new file mode 100644
index 0000000..1264215
--- /dev/null
+++ b/cmds/stagefright/recordvideo.cpp
@@ -0,0 +1,303 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "SineSource.h"
+
+#include <binder/ProcessState.h>
+#include <media/stagefright/AudioPlayer.h>
+#include <media/stagefright/MediaBufferGroup.h>
+#include <media/stagefright/MediaDebug.h>
+#include <media/stagefright/MediaDefs.h>
+#include <media/stagefright/MetaData.h>
+#include <media/stagefright/MPEG4Writer.h>
+#include <media/stagefright/OMXClient.h>
+#include <media/stagefright/OMXCodec.h>
+#include <media/MediaPlayerInterface.h>
+
+using namespace android;
+
+// Print usage showing how to use this utility to record videos
+static void usage(const char *me) {
+ fprintf(stderr, "usage: %s\n", me);
+ fprintf(stderr, " -h(elp)\n");
+ fprintf(stderr, " -b bit rate in bits per second (default: 300000)\n");
+ fprintf(stderr, " -c YUV420 color format: [0] semi planar or [1] planar (default: 1)\n");
+ fprintf(stderr, " -f frame rate in frames per second (default: 30)\n");
+ fprintf(stderr, " -i I frame interval in seconds (default: 1)\n");
+ fprintf(stderr, " -n number of frames to be recorded (default: 300)\n");
+ fprintf(stderr, " -w width in pixels (default: 176)\n");
+ fprintf(stderr, " -t height in pixels (default: 144)\n");
+ fprintf(stderr, " -l encoder level. see omx il header (default: encoder specific)\n");
+ fprintf(stderr, " -p encoder profile. see omx il header (default: encoder specific)\n");
+ fprintf(stderr, " -v video codec: [0] AVC [1] M4V [2] H263 (default: 0)\n");
+ fprintf(stderr, "The output file is /sdcard/output.mp4\n");
+ exit(1);
+}
+
+class DummySource : public MediaSource {
+
+public:
+ DummySource(int width, int height, int nFrames, int fps, int colorFormat)
+ : mWidth(width),
+ mHeight(height),
+ mMaxNumFrames(nFrames),
+ mFrameRate(fps),
+ mColorFormat(colorFormat),
+ mSize((width * height * 3) / 2) {
+
+ mGroup.add_buffer(new MediaBuffer(mSize));
+
+ // Check the color format to make sure
+ // that the buffer size mSize it set correctly above.
+ CHECK(colorFormat == OMX_COLOR_FormatYUV420SemiPlanar ||
+ colorFormat == OMX_COLOR_FormatYUV420Planar);
+ }
+
+ virtual sp<MetaData> getFormat() {
+ sp<MetaData> meta = new MetaData;
+ meta->setInt32(kKeyWidth, mWidth);
+ meta->setInt32(kKeyHeight, mHeight);
+ meta->setInt32(kKeyColorFormat, mColorFormat);
+ meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_RAW);
+
+ return meta;
+ }
+
+ virtual status_t start(MetaData *params) {
+ mNumFramesOutput = 0;
+ return OK;
+ }
+
+ virtual status_t stop() {
+ return OK;
+ }
+
+ virtual status_t read(
+ MediaBuffer **buffer, const MediaSource::ReadOptions *options) {
+
+ if (mNumFramesOutput % 10 == 0) {
+ fprintf(stderr, ".");
+ }
+ if (mNumFramesOutput == mMaxNumFrames) {
+ return ERROR_END_OF_STREAM;
+ }
+
+ status_t err = mGroup.acquire_buffer(buffer);
+ if (err != OK) {
+ return err;
+ }
+
+ // We don't care about the contents. we just test video encoder
+ // Also, by skipping the content generation, we can return from
+ // read() much faster.
+ //char x = (char)((double)rand() / RAND_MAX * 255);
+ //memset((*buffer)->data(), x, mSize);
+ (*buffer)->set_range(0, mSize);
+ (*buffer)->meta_data()->clear();
+ (*buffer)->meta_data()->setInt64(
+ kKeyTime, (mNumFramesOutput * 1000000) / mFrameRate);
+ ++mNumFramesOutput;
+
+ return OK;
+ }
+
+protected:
+ virtual ~DummySource() {}
+
+private:
+ MediaBufferGroup mGroup;
+ int mWidth, mHeight;
+ int mMaxNumFrames;
+ int mFrameRate;
+ int mColorFormat;
+ size_t mSize;
+ int64_t mNumFramesOutput;;
+
+ DummySource(const DummySource &);
+ DummySource &operator=(const DummySource &);
+};
+
+enum {
+ kYUV420SP = 0,
+ kYUV420P = 1,
+};
+
+// returns -1 if mapping of the given color is unsuccessful
+// returns an omx color enum value otherwise
+static int translateColorToOmxEnumValue(int color) {
+ switch (color) {
+ case kYUV420SP:
+ return OMX_COLOR_FormatYUV420SemiPlanar;
+ case kYUV420P:
+ return OMX_COLOR_FormatYUV420Planar;
+ default:
+ fprintf(stderr, "Unsupported color: %d\n", color);
+ return -1;
+ }
+}
+
+int main(int argc, char **argv) {
+
+ // Default values for the program if not overwritten
+ int frameRateFps = 30;
+ int width = 176;
+ int height = 144;
+ int bitRateBps = 300000;
+ int iFramesIntervalSeconds = 1;
+ int colorFormat = OMX_COLOR_FormatYUV420Planar;
+ int nFrames = 300;
+ int level = -1; // Encoder specific default
+ int profile = -1; // Encoder specific default
+ int codec = 0;
+ const char *fileName = "/sdcard/output.mp4";
+
+ android::ProcessState::self()->startThreadPool();
+ int res;
+ while ((res = getopt(argc, argv, "b:c:f:i:n:w:t:l:p:v:h")) >= 0) {
+ switch (res) {
+ case 'b':
+ {
+ bitRateBps = atoi(optarg);
+ break;
+ }
+
+ case 'c':
+ {
+ colorFormat = translateColorToOmxEnumValue(atoi(optarg));
+ if (colorFormat == -1) {
+ usage(argv[0]);
+ }
+ break;
+ }
+
+ case 'f':
+ {
+ frameRateFps = atoi(optarg);
+ break;
+ }
+
+ case 'i':
+ {
+ iFramesIntervalSeconds = atoi(optarg);
+ break;
+ }
+
+ case 'n':
+ {
+ nFrames = atoi(optarg);
+ break;
+ }
+
+ case 'w':
+ {
+ width = atoi(optarg);
+ break;
+ }
+
+ case 't':
+ {
+ height = atoi(optarg);
+ break;
+ }
+
+ case 'l':
+ {
+ level = atoi(optarg);
+ break;
+ }
+
+ case 'p':
+ {
+ profile = atoi(optarg);
+ break;
+ }
+
+ case 'v':
+ {
+ codec = atoi(optarg);
+ if (codec < 0 || codec > 2) {
+ usage(argv[0]);
+ }
+ break;
+ }
+
+ case 'h':
+ default:
+ {
+ usage(argv[0]);
+ break;
+ }
+ }
+ }
+
+ OMXClient client;
+ CHECK_EQ(client.connect(), OK);
+
+ status_t err = OK;
+ sp<MediaSource> source =
+ new DummySource(width, height, nFrames, frameRateFps, colorFormat);
+
+ sp<MetaData> enc_meta = new MetaData;
+ switch (codec) {
+ case 1:
+ enc_meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_MPEG4);
+ break;
+ case 2:
+ enc_meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_H263);
+ break;
+ default:
+ enc_meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_AVC);
+ break;
+ }
+ enc_meta->setInt32(kKeyWidth, width);
+ enc_meta->setInt32(kKeyHeight, height);
+ enc_meta->setInt32(kKeyFrameRate, frameRateFps);
+ enc_meta->setInt32(kKeyBitRate, bitRateBps);
+ enc_meta->setInt32(kKeyStride, width);
+ enc_meta->setInt32(kKeySliceHeight, height);
+ enc_meta->setInt32(kKeyIFramesInterval, iFramesIntervalSeconds);
+ enc_meta->setInt32(kKeyColorFormat, colorFormat);
+ if (level != -1) {
+ enc_meta->setInt32(kKeyVideoLevel, level);
+ }
+ if (profile != -1) {
+ enc_meta->setInt32(kKeyVideoProfile, profile);
+ }
+
+ sp<MediaSource> encoder =
+ OMXCodec::Create(
+ client.interface(), enc_meta, true /* createEncoder */, source);
+
+ sp<MPEG4Writer> writer = new MPEG4Writer(fileName);
+ writer->addSource(encoder);
+ int64_t start = systemTime();
+ CHECK_EQ(OK, writer->start());
+ while (!writer->reachedEOS()) {
+ }
+ err = writer->stop();
+ int64_t end = systemTime();
+
+ fprintf(stderr, "$\n");
+ client.disconnect();
+
+ if (err != OK && err != ERROR_END_OF_STREAM) {
+ fprintf(stderr, "record failed: %d\n", err);
+ return 1;
+ }
+ fprintf(stderr, "encoding %d frames in %lld us\n", nFrames, (end-start)/1000);
+ fprintf(stderr, "encoding speed is: %.2f fps\n", (nFrames * 1E9) / (end-start));
+ return 0;
+}
diff --git a/cmds/stagefright/stagefright.cpp b/cmds/stagefright/stagefright.cpp
index f55b746..7e7f6d1 100644
--- a/cmds/stagefright/stagefright.cpp
+++ b/cmds/stagefright/stagefright.cpp
@@ -49,6 +49,10 @@
#include <media/stagefright/MPEG2TSWriter.h>
#include <media/stagefright/MPEG4Writer.h>
+#include <private/media/VideoFrame.h>
+#include <SkBitmap.h>
+#include <SkImageEncoder.h>
+
#include <fcntl.h>
using namespace android;
@@ -59,6 +63,7 @@
static bool gPreferSoftwareCodec;
static bool gPlaybackAudio;
static bool gWriteMP4;
+static bool gDisplayHistogram;
static String8 gWriteMP4Filename;
static int64_t getNowUs() {
@@ -68,6 +73,58 @@
return (int64_t)tv.tv_usec + tv.tv_sec * 1000000ll;
}
+static int CompareIncreasing(const int64_t *a, const int64_t *b) {
+ return (*a) < (*b) ? -1 : (*a) > (*b) ? 1 : 0;
+}
+
+static void displayDecodeHistogram(Vector<int64_t> *decodeTimesUs) {
+ printf("decode times:\n");
+
+ decodeTimesUs->sort(CompareIncreasing);
+
+ size_t n = decodeTimesUs->size();
+ int64_t minUs = decodeTimesUs->itemAt(0);
+ int64_t maxUs = decodeTimesUs->itemAt(n - 1);
+
+ printf("min decode time %lld us (%.2f secs)\n", minUs, minUs / 1E6);
+ printf("max decode time %lld us (%.2f secs)\n", maxUs, maxUs / 1E6);
+
+ size_t counts[100];
+ for (size_t i = 0; i < 100; ++i) {
+ counts[i] = 0;
+ }
+
+ for (size_t i = 0; i < n; ++i) {
+ int64_t x = decodeTimesUs->itemAt(i);
+
+ size_t slot = ((x - minUs) * 100) / (maxUs - minUs);
+ if (slot == 100) { slot = 99; }
+
+ ++counts[slot];
+ }
+
+ for (size_t i = 0; i < 100; ++i) {
+ int64_t slotUs = minUs + (i * (maxUs - minUs) / 100);
+
+ double fps = 1E6 / slotUs;
+ printf("[%.2f fps]: %d\n", fps, counts[i]);
+ }
+}
+
+static void displayAVCProfileLevelIfPossible(const sp<MetaData>& meta) {
+ uint32_t type;
+ const void *data;
+ size_t size;
+ if (meta->findData(kKeyAVCC, &type, &data, &size)) {
+ const uint8_t *ptr = (const uint8_t *)data;
+ CHECK(size >= 7);
+ CHECK(ptr[0] == 1); // configurationVersion == 1
+ uint8_t profile = ptr[1];
+ uint8_t level = ptr[3];
+ fprintf(stderr, "AVC video profile %d and level %d\n", profile, level);
+ }
+}
+
static void playSource(OMXClient *client, sp<MediaSource> &source) {
sp<MetaData> meta = source->getFormat();
@@ -87,6 +144,7 @@
fprintf(stderr, "Failed to instantiate decoder for '%s'.\n", mime);
return;
}
+ displayAVCProfileLevelIfPossible(meta);
}
source.clear();
@@ -201,6 +259,8 @@
int64_t sumDecodeUs = 0;
int64_t totalBytes = 0;
+ Vector<int64_t> decodeTimesUs;
+
while (numIterationsLeft-- > 0) {
long numFrames = 0;
@@ -224,9 +284,17 @@
break;
}
- if (buffer->range_length() > 0 && (n++ % 16) == 0) {
- printf(".");
- fflush(stdout);
+ if (buffer->range_length() > 0) {
+ if (gDisplayHistogram && n > 0) {
+ // Ignore the first time since it includes some setup
+ // cost.
+ decodeTimesUs.push(delayDecodeUs);
+ }
+
+ if ((n++ % 16) == 0) {
+ printf(".");
+ fflush(stdout);
+ }
}
sumDecodeUs += delayDecodeUs;
@@ -266,6 +334,10 @@
(double)sumDecodeUs / n);
printf("decoded a total of %d frame(s).\n", n);
+
+ if (gDisplayHistogram) {
+ displayDecodeHistogram(&decodeTimesUs);
+ }
} else if (!strncasecmp("audio/", mime, 6)) {
// Frame count makes less sense for audio, as the output buffer
// sizes may be different across decoders.
@@ -466,6 +538,8 @@
fprintf(stderr, " -o playback audio\n");
fprintf(stderr, " -w(rite) filename (write to .mp4 file)\n");
fprintf(stderr, " -k seek test\n");
+ fprintf(stderr, " -x display a histogram of decoding times/fps "
+ "(video only)\n");
}
int main(int argc, char **argv) {
@@ -482,12 +556,13 @@
gPreferSoftwareCodec = false;
gPlaybackAudio = false;
gWriteMP4 = false;
+ gDisplayHistogram = false;
sp<ALooper> looper;
sp<ARTSPController> rtspController;
int res;
- while ((res = getopt(argc, argv, "han:lm:b:ptsow:k")) >= 0) {
+ while ((res = getopt(argc, argv, "han:lm:b:ptsow:kx")) >= 0) {
switch (res) {
case 'a':
{
@@ -560,6 +635,12 @@
break;
}
+ case 'x':
+ {
+ gDisplayHistogram = true;
+ break;
+ }
+
case '?':
case 'h':
default:
@@ -604,6 +685,19 @@
if (mem != NULL) {
printf("captureFrame(%s) => OK\n", filename);
+
+ VideoFrame *frame = (VideoFrame *)mem->pointer();
+
+ SkBitmap bitmap;
+ bitmap.setConfig(
+ SkBitmap::kRGB_565_Config, frame->mWidth, frame->mHeight);
+
+ bitmap.setPixels((uint8_t *)frame + sizeof(VideoFrame));
+
+ CHECK(SkImageEncoder::EncodeFile(
+ "/sdcard/out.jpg", bitmap,
+ SkImageEncoder::kJPEG_Type,
+ SkImageEncoder::kDefaultQuality));
} else {
mem = retriever->extractAlbumArt();
diff --git a/cmds/stagefright/stream.cpp b/cmds/stagefright/stream.cpp
new file mode 100644
index 0000000..f2b5638
--- /dev/null
+++ b/cmds/stagefright/stream.cpp
@@ -0,0 +1,168 @@
+#include <binder/ProcessState.h>
+
+#include <media/IStreamSource.h>
+#include <media/mediaplayer.h>
+#include <media/stagefright/foundation/ADebug.h>
+
+#include <binder/IServiceManager.h>
+#include <media/IMediaPlayerService.h>
+#include <surfaceflinger/ISurfaceComposer.h>
+#include <surfaceflinger/SurfaceComposerClient.h>
+
+#include <fcntl.h>
+
+using namespace android;
+
+struct MyStreamSource : public BnStreamSource {
+ // Caller retains ownership of fd.
+ MyStreamSource(int fd);
+
+ virtual void setListener(const sp<IStreamListener> &listener);
+ virtual void setBuffers(const Vector<sp<IMemory> > &buffers);
+
+ virtual void onBufferAvailable(size_t index);
+
+protected:
+ virtual ~MyStreamSource();
+
+private:
+ int mFd;
+
+ sp<IStreamListener> mListener;
+ Vector<sp<IMemory> > mBuffers;
+
+ DISALLOW_EVIL_CONSTRUCTORS(MyStreamSource);
+};
+
+MyStreamSource::MyStreamSource(int fd)
+ : mFd(fd) {
+ CHECK_GE(fd, 0);
+}
+
+MyStreamSource::~MyStreamSource() {
+}
+
+void MyStreamSource::setListener(const sp<IStreamListener> &listener) {
+ mListener = listener;
+}
+
+void MyStreamSource::setBuffers(const Vector<sp<IMemory> > &buffers) {
+ mBuffers = buffers;
+}
+
+void MyStreamSource::onBufferAvailable(size_t index) {
+ CHECK_LT(index, mBuffers.size());
+ sp<IMemory> mem = mBuffers.itemAt(index);
+
+ ssize_t n = read(mFd, mem->pointer(), mem->size());
+ if (n <= 0) {
+ mListener->queueCommand(IStreamListener::EOS);
+ } else {
+ mListener->queueBuffer(index, n);
+ }
+}
+
+////////////////////////////////////////////////////////////////////////////////
+
+struct MyClient : public BnMediaPlayerClient {
+ MyClient()
+ : mEOS(false) {
+ }
+
+ virtual void notify(int msg, int ext1, int ext2) {
+ Mutex::Autolock autoLock(mLock);
+
+ if (msg == MEDIA_ERROR || msg == MEDIA_PLAYBACK_COMPLETE) {
+ mEOS = true;
+ mCondition.signal();
+ }
+ }
+
+ void waitForEOS() {
+ Mutex::Autolock autoLock(mLock);
+ while (!mEOS) {
+ mCondition.wait(mLock);
+ }
+ }
+
+protected:
+ virtual ~MyClient() {
+ }
+
+private:
+ Mutex mLock;
+ Condition mCondition;
+
+ bool mEOS;
+
+ DISALLOW_EVIL_CONSTRUCTORS(MyClient);
+};
+
+int main(int argc, char **argv) {
+ android::ProcessState::self()->startThreadPool();
+
+ if (argc != 2) {
+ fprintf(stderr, "Usage: %s filename\n", argv[0]);
+ return 1;
+ }
+
+ sp<SurfaceComposerClient> composerClient = new SurfaceComposerClient;
+ CHECK_EQ(composerClient->initCheck(), (status_t)OK);
+
+ sp<SurfaceControl> control =
+ composerClient->createSurface(
+ getpid(),
+ String8("A Surface"),
+ 0,
+ 1280,
+ 800,
+ PIXEL_FORMAT_RGB_565,
+ 0);
+
+ CHECK(control != NULL);
+ CHECK(control->isValid());
+
+ CHECK_EQ(composerClient->openTransaction(), (status_t)OK);
+ CHECK_EQ(control->setLayer(30000), (status_t)OK);
+ CHECK_EQ(control->show(), (status_t)OK);
+ CHECK_EQ(composerClient->closeTransaction(), (status_t)OK);
+
+ sp<Surface> surface = control->getSurface();
+ CHECK(surface != NULL);
+
+ sp<IServiceManager> sm = defaultServiceManager();
+ sp<IBinder> binder = sm->getService(String16("media.player"));
+ sp<IMediaPlayerService> service = interface_cast<IMediaPlayerService>(binder);
+
+ CHECK(service.get() != NULL);
+
+ int fd = open(argv[1], O_RDONLY);
+
+ if (fd < 0) {
+ fprintf(stderr, "Failed to open file '%s'.", argv[1]);
+ return 1;
+ }
+
+ sp<MyClient> client = new MyClient;
+
+ sp<IMediaPlayer> player =
+ service->create(getpid(), client, new MyStreamSource(fd), 0);
+
+ if (player != NULL) {
+ player->setVideoSurface(surface);
+ player->start();
+
+ client->waitForEOS();
+
+ player->stop();
+ } else {
+ fprintf(stderr, "failed to instantiate player.\n");
+ }
+
+ close(fd);
+ fd = -1;
+
+ composerClient->dispose();
+
+ return 0;
+}
diff --git a/drm/common/DrmEngineBase.cpp b/drm/common/DrmEngineBase.cpp
index ac360eb..9b16c36 100644
--- a/drm/common/DrmEngineBase.cpp
+++ b/drm/common/DrmEngineBase.cpp
@@ -84,7 +84,7 @@
}
status_t DrmEngineBase::setPlaybackStatus(
- int uniqueId, DecryptHandle* decryptHandle, int playbackStatus, int position) {
+ int uniqueId, DecryptHandle* decryptHandle, int playbackStatus, int64_t position) {
return onSetPlaybackStatus(uniqueId, decryptHandle, playbackStatus, position);
}
@@ -120,7 +120,7 @@
}
status_t DrmEngineBase::openDecryptSession(
- int uniqueId, DecryptHandle* decryptHandle, int fd, int offset, int length) {
+ int uniqueId, DecryptHandle* decryptHandle, int fd, off64_t offset, off64_t length) {
return onOpenDecryptSession(uniqueId, decryptHandle, fd, offset, length);
}
@@ -150,7 +150,7 @@
}
ssize_t DrmEngineBase::pread(
- int uniqueId, DecryptHandle* decryptHandle, void* buffer, ssize_t numBytes, off_t offset) {
+ int uniqueId, DecryptHandle* decryptHandle, void* buffer, ssize_t numBytes, off64_t offset) {
return onPread(uniqueId, decryptHandle, buffer, numBytes, offset);
}
diff --git a/drm/common/IDrmManagerService.cpp b/drm/common/IDrmManagerService.cpp
index 723b50e..75edac6 100644
--- a/drm/common/IDrmManagerService.cpp
+++ b/drm/common/IDrmManagerService.cpp
@@ -363,7 +363,7 @@
}
status_t BpDrmManagerService::setPlaybackStatus(
- int uniqueId, DecryptHandle* decryptHandle, int playbackStatus, int position) {
+ int uniqueId, DecryptHandle* decryptHandle, int playbackStatus, int64_t position) {
LOGV("setPlaybackStatus");
Parcel data, reply;
@@ -459,7 +459,7 @@
if (0 != reply.dataAvail()) {
//Filling DRM Converted Status
const int statusCode = reply.readInt32();
- const int offset = reply.readInt32();
+ const off64_t offset = reply.readInt64();
DrmBuffer* convertedData = NULL;
if (0 != reply.dataAvail()) {
@@ -491,7 +491,7 @@
if (0 != reply.dataAvail()) {
//Filling DRM Converted Status
const int statusCode = reply.readInt32();
- const int offset = reply.readInt32();
+ const off64_t offset = reply.readInt64();
DrmBuffer* convertedData = NULL;
if (0 != reply.dataAvail()) {
@@ -545,15 +545,15 @@
}
DecryptHandle* BpDrmManagerService::openDecryptSession(
- int uniqueId, int fd, int offset, int length) {
+ int uniqueId, int fd, off64_t offset, off64_t length) {
LOGV("Entering BpDrmManagerService::openDecryptSession");
Parcel data, reply;
data.writeInterfaceToken(IDrmManagerService::getInterfaceDescriptor());
data.writeInt32(uniqueId);
data.writeFileDescriptor(fd);
- data.writeInt32(offset);
- data.writeInt32(length);
+ data.writeInt64(offset);
+ data.writeInt64(length);
remote()->transact(OPEN_DECRYPT_SESSION, data, &reply);
@@ -569,8 +569,6 @@
handle->decryptInfo = new DecryptInfo();
handle->decryptInfo->decryptBufferLength = reply.readInt32();
}
- } else {
- LOGE("no decryptHandle is generated in service side");
}
return handle;
}
@@ -729,7 +727,7 @@
ssize_t BpDrmManagerService::pread(
int uniqueId, DecryptHandle* decryptHandle, void* buffer,
- ssize_t numBytes, off_t offset) {
+ ssize_t numBytes, off64_t offset) {
LOGV("read");
Parcel data, reply;
int result;
@@ -749,7 +747,7 @@
}
data.writeInt32(numBytes);
- data.writeInt32(offset);
+ data.writeInt64(offset);
remote()->transact(PREAD, data, &reply);
result = reply.readInt32();
@@ -1185,7 +1183,7 @@
if (NULL != drmConvertedStatus) {
//Filling Drm Converted Ststus
reply->writeInt32(drmConvertedStatus->statusCode);
- reply->writeInt32(drmConvertedStatus->offset);
+ reply->writeInt64(drmConvertedStatus->offset);
if (NULL != drmConvertedStatus->convertedData) {
const DrmBuffer* convertedData = drmConvertedStatus->convertedData;
@@ -1214,7 +1212,7 @@
if (NULL != drmConvertedStatus) {
//Filling Drm Converted Ststus
reply->writeInt32(drmConvertedStatus->statusCode);
- reply->writeInt32(drmConvertedStatus->offset);
+ reply->writeInt64(drmConvertedStatus->offset);
if (NULL != drmConvertedStatus->convertedData) {
const DrmBuffer* convertedData = drmConvertedStatus->convertedData;
@@ -1274,7 +1272,7 @@
const int fd = data.readFileDescriptor();
DecryptHandle* handle
- = openDecryptSession(uniqueId, fd, data.readInt32(), data.readInt32());
+ = openDecryptSession(uniqueId, fd, data.readInt64(), data.readInt64());
if (NULL != handle) {
reply->writeInt32(handle->decryptId);
@@ -1285,8 +1283,6 @@
reply->writeInt32(handle->decryptInfo->decryptBufferLength);
delete handle->decryptInfo; handle->decryptInfo = NULL;
}
- } else {
- LOGE("NULL decryptHandle is returned");
}
delete handle; handle = NULL;
return DRM_NO_ERROR;
@@ -1481,7 +1477,7 @@
const int numBytes = data.readInt32();
char* buffer = new char[numBytes];
- const off_t offset = data.readInt32();
+ const off64_t offset = data.readInt64();
ssize_t result = pread(uniqueId, &handle, buffer, numBytes, offset);
reply->writeInt32(result);
diff --git a/drm/common/ReadWriteUtils.cpp b/drm/common/ReadWriteUtils.cpp
index 7ec4fa2..c16214e 100644
--- a/drm/common/ReadWriteUtils.cpp
+++ b/drm/common/ReadWriteUtils.cpp
@@ -42,7 +42,7 @@
struct stat sb;
if (fstat(fd, &sb) == 0 && sb.st_size > 0) {
- int length = sb.st_size;
+ off64_t length = sb.st_size;
char* bytes = new char[length];
if (length == read(fd, (void*) bytes, length)) {
string.append(bytes, length);
@@ -57,7 +57,7 @@
int ReadWriteUtils::readBytes(const String8& filePath, char** buffer) {
FILE* file = NULL;
file = fopen(filePath.string(), "r");
- int length = 0;
+ off64_t length = 0;
if (NULL != file) {
int fd = fileno(file);
diff --git a/drm/drmserver/DrmManager.cpp b/drm/drmserver/DrmManager.cpp
index 537791c..9a6f787 100644
--- a/drm/drmserver/DrmManager.cpp
+++ b/drm/drmserver/DrmManager.cpp
@@ -268,7 +268,7 @@
}
status_t DrmManager::setPlaybackStatus(
- int uniqueId, DecryptHandle* decryptHandle, int playbackStatus, int position) {
+ int uniqueId, DecryptHandle* decryptHandle, int playbackStatus, int64_t position) {
status_t result = DRM_ERROR_UNKNOWN;
if (mDecryptSessionMap.indexOfKey(decryptHandle->decryptId) != NAME_NOT_FOUND) {
IDrmEngine* drmEngine = mDecryptSessionMap.valueFor(decryptHandle->decryptId);
@@ -380,7 +380,7 @@
return DRM_NO_ERROR;
}
-DecryptHandle* DrmManager::openDecryptSession(int uniqueId, int fd, int offset, int length) {
+DecryptHandle* DrmManager::openDecryptSession(int uniqueId, int fd, off64_t offset, off64_t length) {
Mutex::Autolock _l(mDecryptLock);
status_t result = DRM_ERROR_CANNOT_HANDLE;
Vector<String8> plugInIdList = mPlugInManager.getPlugInIdList();
@@ -403,7 +403,6 @@
}
if (DRM_NO_ERROR != result) {
delete handle; handle = NULL;
- LOGE("DrmManager::openDecryptSession: no capable plug-in found");
}
return handle;
}
@@ -481,7 +480,7 @@
}
ssize_t DrmManager::pread(int uniqueId, DecryptHandle* decryptHandle,
- void* buffer, ssize_t numBytes, off_t offset) {
+ void* buffer, ssize_t numBytes, off64_t offset) {
ssize_t result = DECRYPT_FILE_ERROR;
if (mDecryptSessionMap.indexOfKey(decryptHandle->decryptId) != NAME_NOT_FOUND) {
diff --git a/drm/drmserver/DrmManagerService.cpp b/drm/drmserver/DrmManagerService.cpp
index 4dcfa72..0901a44 100644
--- a/drm/drmserver/DrmManagerService.cpp
+++ b/drm/drmserver/DrmManagerService.cpp
@@ -162,7 +162,7 @@
}
status_t DrmManagerService::setPlaybackStatus(
- int uniqueId, DecryptHandle* decryptHandle, int playbackStatus, int position) {
+ int uniqueId, DecryptHandle* decryptHandle, int playbackStatus, int64_t position) {
LOGV("Entering setPlaybackStatus");
return mDrmManager->setPlaybackStatus(uniqueId, decryptHandle, playbackStatus, position);
}
@@ -207,7 +207,7 @@
}
DecryptHandle* DrmManagerService::openDecryptSession(
- int uniqueId, int fd, int offset, int length) {
+ int uniqueId, int fd, off64_t offset, off64_t length) {
LOGV("Entering DrmManagerService::openDecryptSession");
if (isProtectedCallAllowed()) {
return mDrmManager->openDecryptSession(uniqueId, fd, offset, length);
@@ -251,7 +251,7 @@
}
ssize_t DrmManagerService::pread(int uniqueId, DecryptHandle* decryptHandle,
- void* buffer, ssize_t numBytes, off_t offset) {
+ void* buffer, ssize_t numBytes, off64_t offset) {
LOGV("Entering pread");
return mDrmManager->pread(uniqueId, decryptHandle, buffer, numBytes, offset);
}
diff --git a/drm/libdrmframework/DrmManagerClient.cpp b/drm/libdrmframework/DrmManagerClient.cpp
index fa3d52a..578e135 100644
--- a/drm/libdrmframework/DrmManagerClient.cpp
+++ b/drm/libdrmframework/DrmManagerClient.cpp
@@ -82,7 +82,7 @@
}
status_t DrmManagerClient::setPlaybackStatus(
- DecryptHandle* decryptHandle, int playbackStatus, int position) {
+ DecryptHandle* decryptHandle, int playbackStatus, int64_t position) {
return mDrmManagerClientImpl
->setPlaybackStatus(mUniqueId, decryptHandle, playbackStatus, position);
}
@@ -116,7 +116,7 @@
return mDrmManagerClientImpl->getAllSupportInfo(mUniqueId, length, drmSupportInfoArray);
}
-DecryptHandle* DrmManagerClient::openDecryptSession(int fd, int offset, int length) {
+DecryptHandle* DrmManagerClient::openDecryptSession(int fd, off64_t offset, off64_t length) {
return mDrmManagerClientImpl->openDecryptSession(mUniqueId, fd, offset, length);
}
@@ -149,7 +149,7 @@
}
ssize_t DrmManagerClient::pread(
- DecryptHandle* decryptHandle, void* buffer, ssize_t numBytes, off_t offset) {
+ DecryptHandle* decryptHandle, void* buffer, ssize_t numBytes, off64_t offset) {
Mutex::Autolock _l(mDecryptLock);
return mDrmManagerClientImpl->pread(mUniqueId, decryptHandle, buffer, numBytes, offset);
}
diff --git a/drm/libdrmframework/DrmManagerClientImpl.cpp b/drm/libdrmframework/DrmManagerClientImpl.cpp
index 32fa491..f39131d 100644
--- a/drm/libdrmframework/DrmManagerClientImpl.cpp
+++ b/drm/libdrmframework/DrmManagerClientImpl.cpp
@@ -178,7 +178,7 @@
}
status_t DrmManagerClientImpl::setPlaybackStatus(
- int uniqueId, DecryptHandle* decryptHandle, int playbackStatus, int position) {
+ int uniqueId, DecryptHandle* decryptHandle, int playbackStatus, int64_t position) {
status_t status = DRM_ERROR_UNKNOWN;
if (NULL != decryptHandle) {
status = getDrmManagerService()->setPlaybackStatus(
@@ -239,7 +239,7 @@
}
DecryptHandle* DrmManagerClientImpl::openDecryptSession(
- int uniqueId, int fd, int offset, int length) {
+ int uniqueId, int fd, off64_t offset, off64_t length) {
return getDrmManagerService()->openDecryptSession(uniqueId, fd, offset, length);
}
@@ -291,7 +291,7 @@
}
ssize_t DrmManagerClientImpl::pread(int uniqueId, DecryptHandle* decryptHandle,
- void* buffer, ssize_t numBytes, off_t offset) {
+ void* buffer, ssize_t numBytes, off64_t offset) {
ssize_t retCode = INVALID_VALUE;
if ((NULL != decryptHandle) && (NULL != buffer) && (0 < numBytes)) {
retCode = getDrmManagerService()->pread(uniqueId, decryptHandle, buffer, numBytes, offset);
diff --git a/drm/libdrmframework/include/DrmManager.h b/drm/libdrmframework/include/DrmManager.h
index bc462c2..e05366d 100644
--- a/drm/libdrmframework/include/DrmManager.h
+++ b/drm/libdrmframework/include/DrmManager.h
@@ -95,7 +95,7 @@
status_t consumeRights(int uniqueId, DecryptHandle* decryptHandle, int action, bool reserve);
status_t setPlaybackStatus(
- int uniqueId, DecryptHandle* decryptHandle, int playbackStatus, int position);
+ int uniqueId, DecryptHandle* decryptHandle, int playbackStatus, int64_t position);
bool validateAction(
int uniqueId, const String8& path, int action, const ActionDescription& description);
@@ -112,7 +112,7 @@
status_t getAllSupportInfo(int uniqueId, int* length, DrmSupportInfo** drmSupportInfoArray);
- DecryptHandle* openDecryptSession(int uniqueId, int fd, int offset, int length);
+ DecryptHandle* openDecryptSession(int uniqueId, int fd, off64_t offset, off64_t length);
DecryptHandle* openDecryptSession(int uniqueId, const char* uri);
@@ -127,7 +127,7 @@
status_t finalizeDecryptUnit(int uniqueId, DecryptHandle* decryptHandle, int decryptUnitId);
ssize_t pread(int uniqueId, DecryptHandle* decryptHandle,
- void* buffer, ssize_t numBytes, off_t offset);
+ void* buffer, ssize_t numBytes, off64_t offset);
void onInfo(const DrmInfoEvent& event);
diff --git a/drm/libdrmframework/include/DrmManagerClientImpl.h b/drm/libdrmframework/include/DrmManagerClientImpl.h
index ff84fc7..0a7fcd1 100644
--- a/drm/libdrmframework/include/DrmManagerClientImpl.h
+++ b/drm/libdrmframework/include/DrmManagerClientImpl.h
@@ -203,7 +203,7 @@
* Returns DRM_NO_ERROR for success, DRM_ERROR_UNKNOWN for failure
*/
status_t setPlaybackStatus(
- int uniqueId, DecryptHandle* decryptHandle, int playbackStatus, int position);
+ int uniqueId, DecryptHandle* decryptHandle, int playbackStatus, int64_t position);
/**
* Validates whether an action on the DRM content is allowed or not.
@@ -303,7 +303,7 @@
* @return
* Handle for the decryption session
*/
- DecryptHandle* openDecryptSession(int uniqueId, int fd, int offset, int length);
+ DecryptHandle* openDecryptSession(int uniqueId, int fd, off64_t offset, off64_t length);
/**
* Open the decrypt session to decrypt the given protected content
@@ -381,7 +381,7 @@
* @return Number of bytes read. Returns -1 for Failure.
*/
ssize_t pread(int uniqueId, DecryptHandle* decryptHandle,
- void* buffer, ssize_t numBytes, off_t offset);
+ void* buffer, ssize_t numBytes, off64_t offset);
/**
* Notify the event to the registered listener
diff --git a/drm/libdrmframework/include/DrmManagerService.h b/drm/libdrmframework/include/DrmManagerService.h
index f346356..d0a0db7 100644
--- a/drm/libdrmframework/include/DrmManagerService.h
+++ b/drm/libdrmframework/include/DrmManagerService.h
@@ -81,7 +81,7 @@
status_t consumeRights(int uniqueId, DecryptHandle* decryptHandle, int action, bool reserve);
status_t setPlaybackStatus(
- int uniqueId, DecryptHandle* decryptHandle, int playbackStatus, int position);
+ int uniqueId, DecryptHandle* decryptHandle, int playbackStatus, int64_t position);
bool validateAction(int uniqueId, const String8& path,
int action, const ActionDescription& description);
@@ -98,7 +98,7 @@
status_t getAllSupportInfo(int uniqueId, int* length, DrmSupportInfo** drmSupportInfoArray);
- DecryptHandle* openDecryptSession(int uniqueId, int fd, int offset, int length);
+ DecryptHandle* openDecryptSession(int uniqueId, int fd, off64_t offset, off64_t length);
DecryptHandle* openDecryptSession(int uniqueId, const char* uri);
@@ -113,7 +113,7 @@
status_t finalizeDecryptUnit(int uniqueId, DecryptHandle* decryptHandle, int decryptUnitId);
ssize_t pread(int uniqueId, DecryptHandle* decryptHandle,
- void* buffer, ssize_t numBytes, off_t offset);
+ void* buffer, ssize_t numBytes, off64_t offset);
private:
DrmManager* mDrmManager;
diff --git a/drm/libdrmframework/include/IDrmManagerService.h b/drm/libdrmframework/include/IDrmManagerService.h
index f1dabd3..2424ea5 100644
--- a/drm/libdrmframework/include/IDrmManagerService.h
+++ b/drm/libdrmframework/include/IDrmManagerService.h
@@ -120,7 +120,7 @@
int uniqueId, DecryptHandle* decryptHandle, int action, bool reserve) = 0;
virtual status_t setPlaybackStatus(
- int uniqueId, DecryptHandle* decryptHandle, int playbackStatus, int position) = 0;
+ int uniqueId, DecryptHandle* decryptHandle, int playbackStatus, int64_t position) = 0;
virtual bool validateAction(
int uniqueId, const String8& path,
@@ -140,7 +140,7 @@
virtual status_t getAllSupportInfo(
int uniqueId, int* length, DrmSupportInfo** drmSupportInfoArray) = 0;
- virtual DecryptHandle* openDecryptSession(int uniqueId, int fd, int offset, int length) = 0;
+ virtual DecryptHandle* openDecryptSession(int uniqueId, int fd, off64_t offset, off64_t length) = 0;
virtual DecryptHandle* openDecryptSession(int uniqueId, const char* uri) = 0;
@@ -156,7 +156,7 @@
int uniqueId, DecryptHandle* decryptHandle, int decryptUnitId) = 0;
virtual ssize_t pread(int uniqueId, DecryptHandle* decryptHandle,
- void* buffer, ssize_t numBytes,off_t offset) = 0;
+ void* buffer, ssize_t numBytes,off64_t offset) = 0;
};
/**
@@ -204,7 +204,7 @@
int uniqueId, DecryptHandle* decryptHandle, int action, bool reserve);
virtual status_t setPlaybackStatus(
- int uniqueId, DecryptHandle* decryptHandle, int playbackStatus, int position);
+ int uniqueId, DecryptHandle* decryptHandle, int playbackStatus, int64_t position);
virtual bool validateAction(
int uniqueId, const String8& path, int action, const ActionDescription& description);
@@ -223,7 +223,7 @@
virtual status_t getAllSupportInfo(
int uniqueId, int* length, DrmSupportInfo** drmSupportInfoArray);
- virtual DecryptHandle* openDecryptSession(int uniqueId, int fd, int offset, int length);
+ virtual DecryptHandle* openDecryptSession(int uniqueId, int fd, off64_t offset, off64_t length);
virtual DecryptHandle* openDecryptSession(int uniqueId, const char* uri);
@@ -239,7 +239,7 @@
int uniqueId, DecryptHandle* decryptHandle, int decryptUnitId);
virtual ssize_t pread(int uniqueId, DecryptHandle* decryptHandle,
- void* buffer, ssize_t numBytes, off_t offset);
+ void* buffer, ssize_t numBytes, off64_t offset);
};
/**
diff --git a/drm/libdrmframework/plugins/common/include/DrmEngineBase.h b/drm/libdrmframework/plugins/common/include/DrmEngineBase.h
index 67b6355..b61e3d3 100644
--- a/drm/libdrmframework/plugins/common/include/DrmEngineBase.h
+++ b/drm/libdrmframework/plugins/common/include/DrmEngineBase.h
@@ -62,7 +62,7 @@
status_t consumeRights(int uniqueId, DecryptHandle* decryptHandle, int action, bool reserve);
status_t setPlaybackStatus(
- int uniqueId, DecryptHandle* decryptHandle, int playbackStatus, int position);
+ int uniqueId, DecryptHandle* decryptHandle, int playbackStatus, int64_t position);
bool validateAction(
int uniqueId, const String8& path, int action, const ActionDescription& description);
@@ -80,7 +80,7 @@
DrmSupportInfo* getSupportInfo(int uniqueId);
status_t openDecryptSession(
- int uniqueId, DecryptHandle* decryptHandle, int fd, int offset, int length);
+ int uniqueId, DecryptHandle* decryptHandle, int fd, off64_t offset, off64_t length);
status_t openDecryptSession(
int uniqueId, DecryptHandle* decryptHandle, const char* uri);
@@ -96,7 +96,7 @@
status_t finalizeDecryptUnit(int uniqueId, DecryptHandle* decryptHandle, int decryptUnitId);
ssize_t pread(int uniqueId, DecryptHandle* decryptHandle,
- void* buffer, ssize_t numBytes, off_t offset);
+ void* buffer, ssize_t numBytes, off64_t offset);
protected:
/////////////////////////////////////////////////////
@@ -268,7 +268,7 @@
* Returns DRM_NO_ERROR for success, DRM_ERROR_UNKNOWN for failure
*/
virtual status_t onSetPlaybackStatus(
- int uniqueId, DecryptHandle* decryptHandle, int playbackStatus, int position) = 0;
+ int uniqueId, DecryptHandle* decryptHandle, int playbackStatus, int64_t position) = 0;
/**
* Validates whether an action on the DRM content is allowed or not.
@@ -369,7 +369,7 @@
* DRM_ERROR_CANNOT_HANDLE for failure and DRM_NO_ERROR for success
*/
virtual status_t onOpenDecryptSession(
- int uniqueId, DecryptHandle* decryptHandle, int fd, int offset, int length) = 0;
+ int uniqueId, DecryptHandle* decryptHandle, int fd, off64_t offset, off64_t length) = 0;
/**
* Open the decrypt session to decrypt the given protected content
@@ -450,7 +450,7 @@
* @return Number of bytes read. Returns -1 for Failure.
*/
virtual ssize_t onPread(int uniqueId, DecryptHandle* decryptHandle,
- void* buffer, ssize_t numBytes, off_t offset) = 0;
+ void* buffer, ssize_t numBytes, off64_t offset) = 0;
};
};
diff --git a/drm/libdrmframework/plugins/common/include/IDrmEngine.h b/drm/libdrmframework/plugins/common/include/IDrmEngine.h
index f839070..d05c24f 100644
--- a/drm/libdrmframework/plugins/common/include/IDrmEngine.h
+++ b/drm/libdrmframework/plugins/common/include/IDrmEngine.h
@@ -224,7 +224,7 @@
* Returns DRM_NO_ERROR for success, DRM_ERROR_UNKNOWN for failure
*/
virtual status_t setPlaybackStatus(int uniqueId, DecryptHandle* decryptHandle,
- int playbackStatus, int position) = 0;
+ int playbackStatus, int64_t position) = 0;
/**
* Validates whether an action on the DRM content is allowed or not.
@@ -325,7 +325,7 @@
* DRM_ERROR_CANNOT_HANDLE for failure and DRM_NO_ERROR for success
*/
virtual status_t openDecryptSession(
- int uniqueId, DecryptHandle* decryptHandle, int fd, int offset, int length) = 0;
+ int uniqueId, DecryptHandle* decryptHandle, int fd, off64_t offset, off64_t length) = 0;
/**
* Open the decrypt session to decrypt the given protected content
@@ -406,7 +406,7 @@
* @return Number of bytes read. Returns -1 for Failure.
*/
virtual ssize_t pread(int uniqueId, DecryptHandle* decryptHandle,
- void* buffer, ssize_t numBytes, off_t offset) = 0;
+ void* buffer, ssize_t numBytes, off64_t offset) = 0;
};
};
diff --git a/drm/libdrmframework/plugins/passthru/include/DrmPassthruPlugIn.h b/drm/libdrmframework/plugins/passthru/include/DrmPassthruPlugIn.h
index bbcd9ed..f941f70 100644
--- a/drm/libdrmframework/plugins/passthru/include/DrmPassthruPlugIn.h
+++ b/drm/libdrmframework/plugins/passthru/include/DrmPassthruPlugIn.h
@@ -56,7 +56,7 @@
status_t onConsumeRights(int uniqueId, DecryptHandle* decryptHandle, int action, bool reserve);
status_t onSetPlaybackStatus(
- int uniqueId, DecryptHandle* decryptHandle, int playbackStatus, int position);
+ int uniqueId, DecryptHandle* decryptHandle, int playbackStatus, int64_t position);
bool onValidateAction(
int uniqueId, const String8& path, int action, const ActionDescription& description);
@@ -74,7 +74,7 @@
DrmSupportInfo* onGetSupportInfo(int uniqueId);
status_t onOpenDecryptSession(
- int uniqueId, DecryptHandle* decryptHandle, int fd, int offset, int length);
+ int uniqueId, DecryptHandle* decryptHandle, int fd, off64_t offset, off64_t length);
status_t onOpenDecryptSession(
int uniqueId, DecryptHandle* decryptHandle, const char* uri);
@@ -90,7 +90,7 @@
status_t onFinalizeDecryptUnit(int uniqueId, DecryptHandle* decryptHandle, int decryptUnitId);
ssize_t onPread(int uniqueId, DecryptHandle* decryptHandle,
- void* buffer, ssize_t numBytes, off_t offset);
+ void* buffer, ssize_t numBytes, off64_t offset);
private:
DecryptHandle* openDecryptSessionImpl();
diff --git a/drm/libdrmframework/plugins/passthru/src/DrmPassthruPlugIn.cpp b/drm/libdrmframework/plugins/passthru/src/DrmPassthruPlugIn.cpp
index dee1fdb..976978f 100644
--- a/drm/libdrmframework/plugins/passthru/src/DrmPassthruPlugIn.cpp
+++ b/drm/libdrmframework/plugins/passthru/src/DrmPassthruPlugIn.cpp
@@ -187,7 +187,7 @@
}
status_t DrmPassthruPlugIn::onSetPlaybackStatus(int uniqueId, DecryptHandle* decryptHandle,
- int playbackStatus, int position) {
+ int playbackStatus, int64_t position) {
LOGD("DrmPassthruPlugIn::onSetPlaybackStatus() : %d", uniqueId);
return DRM_NO_ERROR;
}
@@ -234,7 +234,7 @@
}
status_t DrmPassthruPlugIn::onOpenDecryptSession(
- int uniqueId, DecryptHandle* decryptHandle, int fd, int offset, int length) {
+ int uniqueId, DecryptHandle* decryptHandle, int fd, off64_t offset, off64_t length) {
LOGD("DrmPassthruPlugIn::onOpenDecryptSession() : %d", uniqueId);
#ifdef ENABLE_PASSTHRU_DECRYPTION
@@ -292,7 +292,7 @@
}
ssize_t DrmPassthruPlugIn::onPread(int uniqueId, DecryptHandle* decryptHandle,
- void* buffer, ssize_t numBytes, off_t offset) {
+ void* buffer, ssize_t numBytes, off64_t offset) {
LOGD("DrmPassthruPlugIn::onPread() : %d", uniqueId);
return 0;
}
diff --git a/include/camera/Camera.h b/include/camera/Camera.h
index e734c38..c7f0b15 100644
--- a/include/camera/Camera.h
+++ b/include/camera/Camera.h
@@ -22,8 +22,6 @@
namespace android {
-class ISurface;
-
/*
* A set of bit masks for specifying how the received preview frames are
* handled before the previewCallback() call.
@@ -96,6 +94,14 @@
// or CAMERA_MSG_COMPRESSED_IMAGE. This is not allowed to be set during
// preview.
CAMERA_CMD_SET_DISPLAY_ORIENTATION = 3,
+
+ // cmdType to disable/enable shutter sound.
+ // In sendCommand passing arg1 = 0 will disable,
+ // while passing arg1 = 1 will enable the shutter sound.
+ CAMERA_CMD_ENABLE_SHUTTER_SOUND = 4,
+
+ // cmdType to play recording sound.
+ CAMERA_CMD_PLAY_RECORDING_SOUND = 5,
};
// camera fatal errors
@@ -166,9 +172,8 @@
status_t getStatus() { return mStatus; }
- // pass the buffered ISurface to the camera service
+ // pass the buffered Surface to the camera service
status_t setPreviewDisplay(const sp<Surface>& surface);
- status_t setPreviewDisplay(const sp<ISurface>& surface);
// start preview mode, must call setPreviewDisplay first
status_t startPreview();
@@ -209,6 +214,15 @@
// send command to camera driver
status_t sendCommand(int32_t cmd, int32_t arg1, int32_t arg2);
+ // return the total number of available video buffers.
+ int32_t getNumberOfVideoBuffers() const;
+
+ // return the individual video buffer corresponding to the given index.
+ sp<IMemory> getVideoBuffer(int32_t index) const;
+
+ // tell camera hal to store meta data or real YUV in video buffers.
+ status_t storeMetaDataInBuffers(bool enabled);
+
void setListener(const sp<CameraListener>& listener);
void setPreviewCallbackFlags(int preview_callback_flag);
diff --git a/include/camera/CameraHardwareInterface.h b/include/camera/CameraHardwareInterface.h
index 35c5aa1..16f572c 100644
--- a/include/camera/CameraHardwareInterface.h
+++ b/include/camera/CameraHardwareInterface.h
@@ -18,8 +18,11 @@
#define ANDROID_HARDWARE_CAMERA_HARDWARE_INTERFACE_H
#include <binder/IMemory.h>
+#include <ui/egl/android_natives.h>
#include <utils/RefBase.h>
#include <surfaceflinger/ISurface.h>
+#include <ui/android_native_buffer.h>
+#include <ui/GraphicBuffer.h>
#include <camera/Camera.h>
#include <camera/CameraParameters.h>
@@ -86,8 +89,8 @@
public:
virtual ~CameraHardwareInterface() { }
- /** Return the IMemoryHeap for the preview image heap */
- virtual sp<IMemoryHeap> getPreviewHeap() const = 0;
+ /** Set the ANativeWindow to which preview frames are sent */
+ virtual status_t setPreviewWindow(const sp<ANativeWindow>& buf) = 0;
/** Return the IMemoryHeap for the raw image heap */
virtual sp<IMemoryHeap> getRawHeap() const = 0;
@@ -143,6 +146,82 @@
virtual bool previewEnabled() = 0;
/**
+ * Retrieve the total number of available buffers from camera hal for passing
+ * video frame data in a recording session. Must be called again if a new
+ * recording session is started.
+ *
+ * This method should be called after startRecording(), since
+ * the some camera hal may choose to allocate the video buffers only after
+ * recording is started.
+ *
+ * Some camera hal may not implement this method, and 0 can be returned to
+ * indicate that this feature is not available.
+ *
+ * @return the number of video buffers that camera hal makes available.
+ * Zero (0) is returned to indicate that camera hal does not support
+ * this feature.
+ */
+ virtual int32_t getNumberOfVideoBuffers() const { return 0; }
+
+ /**
+ * Retrieve the video buffer corresponding to the given index in a
+ * recording session. Must be called again if a new recording session
+ * is started.
+ *
+ * It allows a client to retrieve all video buffers that camera hal makes
+ * available to passing video frame data by calling this method with all
+ * valid index values. The valid index value ranges from 0 to n, where
+ * n = getNumberOfVideoBuffers() - 1. With an index outside of the valid
+ * range, 0 must be returned. This method should be called after
+ * startRecording().
+ *
+ * The video buffers should NOT be modified/released by camera hal
+ * until stopRecording() is called and all outstanding video buffers
+ * previously sent out via CAMERA_MSG_VIDEO_FRAME have been released
+ * via releaseVideoBuffer().
+ *
+ * @param index an index to retrieve the corresponding video buffer.
+ *
+ * @return the video buffer corresponding to the given index.
+ */
+ virtual sp<IMemory> getVideoBuffer(int32_t index) const { return 0; }
+
+ /**
+ * Request the camera hal to store meta data or real YUV data in
+ * the video buffers send out via CAMERA_MSG_VIDEO_FRRAME for a
+ * recording session. If it is not called, the default camera
+ * hal behavior is to store real YUV data in the video buffers.
+ *
+ * This method should be called before startRecording() in order
+ * to be effective.
+ *
+ * If meta data is stored in the video buffers, it is up to the
+ * receiver of the video buffers to interpret the contents and
+ * to find the actual frame data with the help of the meta data
+ * in the buffer. How this is done is outside of the scope of
+ * this method.
+ *
+ * Some camera hal may not support storing meta data in the video
+ * buffers, but all camera hal should support storing real YUV data
+ * in the video buffers. If the camera hal does not support storing
+ * the meta data in the video buffers when it is requested to do
+ * do, INVALID_OPERATION must be returned. It is very useful for
+ * the camera hal to pass meta data rather than the actual frame
+ * data directly to the video encoder, since the amount of the
+ * uncompressed frame data can be very large if video size is large.
+ *
+ * @param enable if true to instruct the camera hal to store
+ * meta data in the video buffers; false to instruct
+ * the camera hal to store real YUV data in the video
+ * buffers.
+ *
+ * @return OK on success.
+ */
+ virtual status_t storeMetaDataInBuffers(bool enable) {
+ return enable? INVALID_OPERATION: OK;
+ }
+
+ /**
* Start record mode. When a record image is available a CAMERA_MSG_VIDEO_FRAME
* message is sent with the corresponding frame. Every record frame must be released
* by calling releaseRecordingFrame().
diff --git a/include/camera/CameraParameters.h b/include/camera/CameraParameters.h
index 4e770fd..431aaa4 100644
--- a/include/camera/CameraParameters.h
+++ b/include/camera/CameraParameters.h
@@ -59,6 +59,35 @@
void setPreviewSize(int width, int height);
void getPreviewSize(int *width, int *height) const;
void getSupportedPreviewSizes(Vector<Size> &sizes) const;
+
+ // Set the dimensions in pixels to the given width and height
+ // for video frames. The given width and height must be one
+ // of the supported dimensions returned from
+ // getSupportedVideoSizes(). Must not be called if
+ // getSupportedVideoSizes() returns an empty Vector of Size.
+ void setVideoSize(int width, int height);
+ // Retrieve the current dimensions (width and height)
+ // in pixels for video frames, which must be one of the
+ // supported dimensions returned from getSupportedVideoSizes().
+ // Must not be called if getSupportedVideoSizes() returns an
+ // empty Vector of Size.
+ void getVideoSize(int *width, int *height) const;
+ // Retrieve a Vector of supported dimensions (width and height)
+ // in pixels for video frames. If sizes returned from the method
+ // is empty, the camera does not support calls to setVideoSize()
+ // or getVideoSize(). In adddition, it also indicates that
+ // the camera only has a single output, and does not have
+ // separate output for video frames and preview frame.
+ void getSupportedVideoSizes(Vector<Size> &sizes) const;
+ // Retrieve the preferred preview size (width and height) in pixels
+ // for video recording. The given width and height must be one of
+ // supported preview sizes returned from getSupportedPreviewSizes().
+ // Must not be called if getSupportedVideoSizes() returns an empty
+ // Vector of Size. If getSupportedVideoSizes() returns an empty
+ // Vector of Size, the width and height returned from this method
+ // is invalid, and is "-1x-1".
+ void getPreferredPreviewSizeForVideo(int *width, int *height) const;
+
void setPreviewFrameRate(int fps);
int getPreviewFrameRate() const;
void getPreviewFpsRange(int *min_fps, int *max_fps) const;
@@ -288,6 +317,31 @@
// Example value: "0.95,1.9,Infinity" or "0.049,0.05,0.051". Read only.
static const char KEY_FOCUS_DISTANCES[];
+ // The current dimensions in pixels (width x height) for video frames.
+ // The width and height must be one of the supported sizes retrieved
+ // via KEY_SUPPORTED_VIDEO_SIZES.
+ // Example value: "1280x720". Read/write.
+ static const char KEY_VIDEO_SIZE[];
+ // A list of the supported dimensions in pixels (width x height)
+ // for video frames. See CAMERA_MSG_VIDEO_FRAME for details in
+ // frameworks/base/include/camera/Camera.h.
+ // Example: "176x144,1280x720". Read only.
+ static const char KEY_SUPPORTED_VIDEO_SIZES[];
+
+ // Preferred preview frame size in pixels for video recording.
+ // The width and height must be one of the supported sizes retrieved
+ // via KEY_SUPPORTED_PREVIEW_SIZES. This key can be used only when
+ // getSupportedVideoSizes() does not return an empty Vector of Size.
+ // Camcorder applications are recommended to set the preview size
+ // to a value that is not larger than the preferred preview size.
+ // In other words, the product of the width and height of the
+ // preview size should not be larger than that of the preferred
+ // preview size. In addition, we recommend to choos a preview size
+ // that has the same aspect ratio as the resolution of video to be
+ // recorded.
+ // Example value: "800x600". Read only.
+ static const char KEY_PREFERRED_PREVIEW_SIZE_FOR_VIDEO[];
+
// The image format for video frames. See CAMERA_MSG_VIDEO_FRAME in
// frameworks/base/include/camera/Camera.h.
// Example value: "yuv420sp" or PIXEL_FORMAT_XXX constants. Read only.
@@ -361,7 +415,10 @@
// for barcode reading.
static const char SCENE_MODE_BARCODE[];
- // Formats for setPreviewFormat and setPictureFormat.
+ // Pixel color formats for KEY_PREVIEW_FORMAT, KEY_PICTURE_FORMAT,
+ // and KEY_VIDEO_FRAME_FORMAT
+ // Planar variant of the YUV420 color format
+ static const char PIXEL_FORMAT_YUV420P[];
static const char PIXEL_FORMAT_YUV422SP[];
static const char PIXEL_FORMAT_YUV420SP[]; // NV21
static const char PIXEL_FORMAT_YUV422I[]; // YUY2
diff --git a/include/camera/ICamera.h b/include/camera/ICamera.h
index 6fcf9e5..b69e075 100644
--- a/include/camera/ICamera.h
+++ b/include/camera/ICamera.h
@@ -20,7 +20,7 @@
#include <utils/RefBase.h>
#include <binder/IInterface.h>
#include <binder/Parcel.h>
-#include <surfaceflinger/ISurface.h>
+#include <surfaceflinger/Surface.h>
#include <binder/IMemory.h>
#include <utils/String8.h>
#include <camera/Camera.h>
@@ -45,8 +45,8 @@
// allow other processes to use this ICamera interface
virtual status_t unlock() = 0;
- // pass the buffered ISurface to the camera service
- virtual status_t setPreviewDisplay(const sp<ISurface>& surface) = 0;
+ // pass the buffered Surface to the camera service
+ virtual status_t setPreviewDisplay(const sp<Surface>& surface) = 0;
// set the preview callback flag to affect how the received frames from
// preview are handled.
@@ -90,6 +90,15 @@
// send command to camera driver
virtual status_t sendCommand(int32_t cmd, int32_t arg1, int32_t arg2) = 0;
+
+ // return the total number of available video buffers
+ virtual int32_t getNumberOfVideoBuffers() const = 0;
+
+ // return the individual video buffer corresponding to the given index.
+ virtual sp<IMemory> getVideoBuffer(int32_t index) const = 0;
+
+ // tell the camera hal to store meta data or real YUV data in video buffers.
+ virtual status_t storeMetaDataInBuffers(bool enabled) = 0;
};
// ----------------------------------------------------------------------------
diff --git a/include/drm/DrmManagerClient.h b/include/drm/DrmManagerClient.h
index 3dbfbe2..e6ba3c4 100644
--- a/include/drm/DrmManagerClient.h
+++ b/include/drm/DrmManagerClient.h
@@ -66,7 +66,7 @@
* @return
* Handle for the decryption session
*/
- DecryptHandle* openDecryptSession(int fd, int offset, int length);
+ DecryptHandle* openDecryptSession(int fd, off64_t offset, off64_t length);
/**
* Open the decrypt session to decrypt the given protected content
@@ -110,7 +110,7 @@
* @return status_t
* Returns DRM_NO_ERROR for success, DRM_ERROR_UNKNOWN for failure
*/
- status_t setPlaybackStatus(DecryptHandle* decryptHandle, int playbackStatus, int position);
+ status_t setPlaybackStatus(DecryptHandle* decryptHandle, int playbackStatus, int64_t position);
/**
* Initialize decryption for the given unit of the protected content
@@ -164,7 +164,7 @@
*
* @return Number of bytes read. Returns -1 for Failure.
*/
- ssize_t pread(DecryptHandle* decryptHandle, void* buffer, ssize_t numBytes, off_t offset);
+ ssize_t pread(DecryptHandle* decryptHandle, void* buffer, ssize_t numBytes, off64_t offset);
/**
* Validates whether an action on the DRM content is allowed or not.
diff --git a/include/drm/drm_framework_common.h b/include/drm/drm_framework_common.h
index c5765a9..1758cdd 100644
--- a/include/drm/drm_framework_common.h
+++ b/include/drm/drm_framework_common.h
@@ -217,6 +217,10 @@
* POSIX based Decrypt API set for container based DRM
*/
static const int CONTAINER_BASED = 0x02;
+ /**
+ * Decrypt API for Widevine streams
+ */
+ static const int WV_BASED = 0x3;
};
/**
diff --git a/include/media/AudioEffect.h b/include/media/AudioEffect.h
index c967efb..cda2be0 100644
--- a/include/media/AudioEffect.h
+++ b/include/media/AudioEffect.h
@@ -403,7 +403,7 @@
static status_t guidToString(const effect_uuid_t *guid, char *str, size_t maxLen);
protected:
- volatile int32_t mEnabled; // enable state
+ bool mEnabled; // enable state
int32_t mSessionId; // audio session ID
int32_t mPriority; // priority for effect control
status_t mStatus; // effect status
@@ -412,6 +412,7 @@
void* mUserData; // client context for callback function
effect_descriptor_t mDescriptor; // effect descriptor
int32_t mId; // system wide unique effect engine instance ID
+ Mutex mLock; // Mutex for mEnabled access
private:
diff --git a/include/media/AudioRecord.h b/include/media/AudioRecord.h
index 38e3d44..5f7cd90 100644
--- a/include/media/AudioRecord.h
+++ b/include/media/AudioRecord.h
@@ -356,7 +356,7 @@
sp<IAudioRecord> mAudioRecord;
sp<IMemory> mCblkMemory;
sp<ClientRecordThread> mClientRecordThread;
- Mutex mRecordThreadLock;
+ Mutex mLock;
uint32_t mFrameCount;
diff --git a/include/media/AudioSystem.h b/include/media/AudioSystem.h
index 9fd905f..1e29d82 100644
--- a/include/media/AudioSystem.h
+++ b/include/media/AudioSystem.h
@@ -156,6 +156,7 @@
MODE_NORMAL = 0,
MODE_RINGTONE,
MODE_IN_CALL,
+ MODE_IN_COMMUNICATION,
NUM_MODES // not a valid entry, denotes end-of-list
};
@@ -262,11 +263,15 @@
DEVICE_OUT_BLUETOOTH_A2DP_HEADPHONES = 0x100,
DEVICE_OUT_BLUETOOTH_A2DP_SPEAKER = 0x200,
DEVICE_OUT_AUX_DIGITAL = 0x400,
+ DEVICE_OUT_ANLG_DOCK_HEADSET = 0x800,
+ DEVICE_OUT_DGTL_DOCK_HEADSET = 0x1000,
DEVICE_OUT_DEFAULT = 0x8000,
DEVICE_OUT_ALL = (DEVICE_OUT_EARPIECE | DEVICE_OUT_SPEAKER | DEVICE_OUT_WIRED_HEADSET |
DEVICE_OUT_WIRED_HEADPHONE | DEVICE_OUT_BLUETOOTH_SCO | DEVICE_OUT_BLUETOOTH_SCO_HEADSET |
DEVICE_OUT_BLUETOOTH_SCO_CARKIT | DEVICE_OUT_BLUETOOTH_A2DP | DEVICE_OUT_BLUETOOTH_A2DP_HEADPHONES |
- DEVICE_OUT_BLUETOOTH_A2DP_SPEAKER | DEVICE_OUT_AUX_DIGITAL | DEVICE_OUT_DEFAULT),
+ DEVICE_OUT_BLUETOOTH_A2DP_SPEAKER | DEVICE_OUT_AUX_DIGITAL |
+ DEVICE_OUT_ANLG_DOCK_HEADSET | DEVICE_OUT_DGTL_DOCK_HEADSET |
+ DEVICE_OUT_DEFAULT),
DEVICE_OUT_ALL_A2DP = (DEVICE_OUT_BLUETOOTH_A2DP | DEVICE_OUT_BLUETOOTH_A2DP_HEADPHONES |
DEVICE_OUT_BLUETOOTH_A2DP_SPEAKER),
@@ -309,6 +314,8 @@
FORCE_WIRED_ACCESSORY,
FORCE_BT_CAR_DOCK,
FORCE_BT_DESK_DOCK,
+ FORCE_ANALOG_DOCK,
+ FORCE_DIGITAL_DOCK,
NUM_FORCE_CONFIG,
FORCE_DEFAULT = FORCE_NONE
};
@@ -466,7 +473,7 @@
AudioParameter(const String8& keyValuePairs);
virtual ~AudioParameter();
- // reserved parameter keys for changeing standard parameters with setParameters() function.
+ // reserved parameter keys for changing standard parameters with setParameters() function.
// Using these keys is mandatory for AudioFlinger to properly monitor audio output/input
// configuration changes and act accordingly.
// keyRouting: to change audio routing, value is an int in AudioSystem::audio_devices
@@ -474,11 +481,14 @@
// keyFormat: to change audio format, value is an int in AudioSystem::audio_format
// keyChannels: to change audio channel configuration, value is an int in AudioSystem::audio_channels
// keyFrameCount: to change audio output frame count, value is an int
+ // keyInputSource: to change audio input source, value is an int in audio_source
+ // (defined in media/mediarecorder.h)
static const char *keyRouting;
static const char *keySamplingRate;
static const char *keyFormat;
static const char *keyChannels;
static const char *keyFrameCount;
+ static const char *keyInputSource;
String8 toString();
diff --git a/include/media/AudioTrack.h b/include/media/AudioTrack.h
index 4475d4a..813a905 100644
--- a/include/media/AudioTrack.h
+++ b/include/media/AudioTrack.h
@@ -480,6 +480,7 @@
uint32_t mFlags;
int mSessionId;
int mAuxEffectId;
+ Mutex mLock;
};
diff --git a/include/media/EffectApi.h b/include/media/EffectApi.h
index 16fb43c..b97c22e 100644
--- a/include/media/EffectApi.h
+++ b/include/media/EffectApi.h
@@ -602,9 +602,9 @@
// Audio mode
enum audio_mode_e {
- AUDIO_MODE_NORMAL, // phone idle
- AUDIO_MODE_RINGTONE, // phone ringing
- AUDIO_MODE_IN_CALL // phone call connected
+ AUDIO_MODE_NORMAL, // device idle
+ AUDIO_MODE_RINGTONE, // device ringing
+ AUDIO_MODE_IN_CALL // audio call connected (VoIP or telephony)
};
// Values for "accessMode" field of buffer_config_t:
diff --git a/include/media/IMediaPlayer.h b/include/media/IMediaPlayer.h
index af9a7ed..bba7ed7 100644
--- a/include/media/IMediaPlayer.h
+++ b/include/media/IMediaPlayer.h
@@ -25,6 +25,7 @@
class Parcel;
class ISurface;
+class Surface;
class IMediaPlayer: public IInterface
{
@@ -33,7 +34,7 @@
virtual void disconnect() = 0;
- virtual status_t setVideoSurface(const sp<ISurface>& surface) = 0;
+ virtual status_t setVideoSurface(const sp<Surface>& surface) = 0;
virtual status_t prepareAsync() = 0;
virtual status_t start() = 0;
virtual status_t stop() = 0;
@@ -46,8 +47,6 @@
virtual status_t setAudioStreamType(int type) = 0;
virtual status_t setLooping(int loop) = 0;
virtual status_t setVolume(float leftVolume, float rightVolume) = 0;
- virtual status_t suspend() = 0;
- virtual status_t resume() = 0;
virtual status_t setAuxEffectSendLevel(float level) = 0;
virtual status_t attachAuxEffect(int effectId) = 0;
diff --git a/include/media/IMediaPlayerService.h b/include/media/IMediaPlayerService.h
index 9416ca1..0bfb166 100644
--- a/include/media/IMediaPlayerService.h
+++ b/include/media/IMediaPlayerService.h
@@ -32,6 +32,7 @@
class IMediaRecorder;
class IOMX;
+struct IStreamSource;
class IMediaPlayerService: public IInterface
{
@@ -45,6 +46,11 @@
int audioSessionId = 0) = 0;
virtual sp<IMediaPlayer> create(pid_t pid, const sp<IMediaPlayerClient>& client,
int fd, int64_t offset, int64_t length, int audioSessionId) = 0;
+
+ virtual sp<IMediaPlayer> create(
+ pid_t pid, const sp<IMediaPlayerClient> &client,
+ const sp<IStreamSource> &source, int audioSessionId) = 0;
+
virtual sp<IMemory> decode(const char* url, uint32_t *pSampleRate, int* pNumChannels, int* pFormat) = 0;
virtual sp<IMemory> decode(int fd, int64_t offset, int64_t length, uint32_t *pSampleRate, int* pNumChannels, int* pFormat) = 0;
virtual sp<IOMX> getOMX() = 0;
diff --git a/include/media/IMediaRecorder.h b/include/media/IMediaRecorder.h
index 54adca8..28be7c1 100644
--- a/include/media/IMediaRecorder.h
+++ b/include/media/IMediaRecorder.h
@@ -22,7 +22,7 @@
namespace android {
-class ISurface;
+class Surface;
class ICamera;
class IMediaRecorderClient;
@@ -32,7 +32,7 @@
DECLARE_META_INTERFACE(MediaRecorder);
virtual status_t setCamera(const sp<ICamera>& camera) = 0;
- virtual status_t setPreviewSurface(const sp<ISurface>& surface) = 0;
+ virtual status_t setPreviewSurface(const sp<Surface>& surface) = 0;
virtual status_t setVideoSource(int vs) = 0;
virtual status_t setAudioSource(int as) = 0;
virtual status_t setOutputFormat(int of) = 0;
@@ -40,6 +40,7 @@
virtual status_t setAudioEncoder(int ae) = 0;
virtual status_t setOutputFile(const char* path) = 0;
virtual status_t setOutputFile(int fd, int64_t offset, int64_t length) = 0;
+ virtual status_t setOutputFileAuxiliary(int fd) = 0;
virtual status_t setVideoSize(int width, int height) = 0;
virtual status_t setVideoFrameRate(int frames_per_second) = 0;
virtual status_t setParameters(const String8& params) = 0;
@@ -68,4 +69,3 @@
}; // namespace android
#endif // ANDROID_IMEDIARECORDER_H
-
diff --git a/include/media/IOMX.h b/include/media/IOMX.h
index f794766..cb36bbb 100644
--- a/include/media/IOMX.h
+++ b/include/media/IOMX.h
@@ -19,6 +19,7 @@
#define ANDROID_IOMX_H_
#include <binder/IInterface.h>
+#include <ui/GraphicBuffer.h>
#include <utils/List.h>
#include <utils/String8.h>
@@ -78,10 +79,20 @@
node_id node, OMX_INDEXTYPE index,
const void *params, size_t size) = 0;
+ virtual status_t storeMetaDataInBuffers(
+ node_id node, OMX_U32 port_index, OMX_BOOL enable) = 0;
+
+ virtual status_t enableGraphicBuffers(
+ node_id node, OMX_U32 port_index, OMX_BOOL enable) = 0;
+
virtual status_t useBuffer(
node_id node, OMX_U32 port_index, const sp<IMemory> ¶ms,
buffer_id *buffer) = 0;
+ virtual status_t useGraphicBuffer(
+ node_id node, OMX_U32 port_index,
+ const sp<GraphicBuffer> &graphicBuffer, buffer_id *buffer) = 0;
+
// This API clearly only makes sense if the caller lives in the
// same process as the callee, i.e. is the media_server, as the
// returned "buffer_data" pointer is just that, a pointer into local
@@ -109,33 +120,6 @@
node_id node,
const char *parameter_name,
OMX_INDEXTYPE *index) = 0;
-
- virtual sp<IOMXRenderer> createRenderer(
- const sp<ISurface> &surface,
- const char *componentName,
- OMX_COLOR_FORMATTYPE colorFormat,
- size_t encodedWidth, size_t encodedHeight,
- size_t displayWidth, size_t displayHeight,
- int32_t rotationDegrees) = 0;
-
- // Note: These methods are _not_ virtual, it exists as a wrapper around
- // the virtual "createRenderer" method above facilitating extraction
- // of the ISurface from a regular Surface or a java Surface object.
- sp<IOMXRenderer> createRenderer(
- const sp<Surface> &surface,
- const char *componentName,
- OMX_COLOR_FORMATTYPE colorFormat,
- size_t encodedWidth, size_t encodedHeight,
- size_t displayWidth, size_t displayHeight,
- int32_t rotationDegrees);
-
- sp<IOMXRenderer> createRendererFromJavaSurface(
- JNIEnv *env, jobject javaSurface,
- const char *componentName,
- OMX_COLOR_FORMATTYPE colorFormat,
- size_t encodedWidth, size_t encodedHeight,
- size_t displayWidth, size_t displayHeight,
- int32_t rotationDegrees);
};
struct omx_message {
@@ -182,13 +166,6 @@
virtual void onMessage(const omx_message &msg) = 0;
};
-class IOMXRenderer : public IInterface {
-public:
- DECLARE_META_INTERFACE(OMXRenderer);
-
- virtual void render(IOMX::buffer_id buffer) = 0;
-};
-
////////////////////////////////////////////////////////////////////////////////
class BnOMX : public BnInterface<IOMX> {
@@ -205,13 +182,6 @@
uint32_t flags = 0);
};
-class BnOMXRenderer : public BnInterface<IOMXRenderer> {
-public:
- virtual status_t onTransact(
- uint32_t code, const Parcel &data, Parcel *reply,
- uint32_t flags = 0);
-};
-
} // namespace android
#endif // ANDROID_IOMX_H_
diff --git a/include/media/IStreamSource.h b/include/media/IStreamSource.h
new file mode 100644
index 0000000..6291124
--- /dev/null
+++ b/include/media/IStreamSource.h
@@ -0,0 +1,66 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_ISTREAMSOURCE_H_
+
+#define ANDROID_ISTREAMSOURCE_H_
+
+#include <binder/IInterface.h>
+
+namespace android {
+
+struct IMemory;
+struct IStreamListener;
+
+struct IStreamSource : public IInterface {
+ DECLARE_META_INTERFACE(StreamSource);
+
+ virtual void setListener(const sp<IStreamListener> &listener) = 0;
+ virtual void setBuffers(const Vector<sp<IMemory> > &buffers) = 0;
+
+ virtual void onBufferAvailable(size_t index) = 0;
+};
+
+struct IStreamListener : public IInterface {
+ DECLARE_META_INTERFACE(StreamListener);
+
+ enum Command {
+ FLUSH,
+ DISCONTINUITY,
+ EOS
+ };
+
+ virtual void queueBuffer(size_t index, size_t size) = 0;
+ virtual void queueCommand(Command cmd) = 0;
+};
+
+////////////////////////////////////////////////////////////////////////////////
+
+struct BnStreamSource : public BnInterface<IStreamSource> {
+ virtual status_t onTransact(
+ uint32_t code, const Parcel &data, Parcel *reply,
+ uint32_t flags = 0);
+};
+
+struct BnStreamListener : public BnInterface<IStreamListener> {
+ virtual status_t onTransact(
+ uint32_t code, const Parcel &data, Parcel *reply,
+ uint32_t flags = 0);
+};
+
+} // namespace android
+
+#endif // ANDROID_ISTREAMSOURCE_H_
diff --git a/include/media/MediaPlayerInterface.h b/include/media/MediaPlayerInterface.h
index 0521709..e7f1d6d 100644
--- a/include/media/MediaPlayerInterface.h
+++ b/include/media/MediaPlayerInterface.h
@@ -33,6 +33,7 @@
class Parcel;
class ISurface;
+class Surface;
template<typename T> class SortedVector;
@@ -105,7 +106,12 @@
const KeyedVector<String8, String8> *headers = NULL) = 0;
virtual status_t setDataSource(int fd, int64_t offset, int64_t length) = 0;
- virtual status_t setVideoSurface(const sp<ISurface>& surface) = 0;
+
+ virtual status_t setDataSource(const sp<IStreamSource> &source) {
+ return INVALID_OPERATION;
+ }
+
+ virtual status_t setVideoSurface(const sp<Surface>& surface) = 0;
virtual status_t prepare() = 0;
virtual status_t prepareAsync() = 0;
virtual status_t start() = 0;
@@ -118,8 +124,6 @@
virtual status_t reset() = 0;
virtual status_t setLooping(int loop) = 0;
virtual player_type playerType() = 0;
- virtual status_t suspend() { return INVALID_OPERATION; }
- virtual status_t resume() { return INVALID_OPERATION; }
virtual void setNotifyCallback(void* cookie, notify_callback_f notifyFunc) {
mCookie = cookie; mNotify = notifyFunc; }
diff --git a/include/media/MediaProfiles.h b/include/media/MediaProfiles.h
index c3cd361..aa97874 100644
--- a/include/media/MediaProfiles.h
+++ b/include/media/MediaProfiles.h
@@ -25,7 +25,20 @@
enum camcorder_quality {
CAMCORDER_QUALITY_LOW = 0,
- CAMCORDER_QUALITY_HIGH = 1
+ CAMCORDER_QUALITY_HIGH = 1,
+ CAMCORDER_QUALITY_QCIF = 2,
+ CAMCORDER_QUALITY_CIF = 3,
+ CAMCORDER_QUALITY_480P = 4,
+ CAMCORDER_QUALITY_720P = 5,
+ CAMCORDER_QUALITY_1080P = 6,
+
+ CAMCORDER_QUALITY_TIME_LAPSE_LOW = 1000,
+ CAMCORDER_QUALITY_TIME_LAPSE_HIGH = 1001,
+ CAMCORDER_QUALITY_TIME_LAPSE_QCIF = 1002,
+ CAMCORDER_QUALITY_TIME_LAPSE_CIF = 1003,
+ CAMCORDER_QUALITY_TIME_LAPSE_480P = 1004,
+ CAMCORDER_QUALITY_TIME_LAPSE_720P = 1005,
+ CAMCORDER_QUALITY_TIME_LAPSE_1080P = 1006
};
enum video_decoder {
@@ -68,6 +81,12 @@
camcorder_quality quality) const;
/**
+ * Returns true if a profile for the given camera at the given quality exists,
+ * or false if not.
+ */
+ bool hasCamcorderProfile(int cameraId, camcorder_quality quality) const;
+
+ /**
* Returns the output file formats supported.
*/
Vector<output_format> getOutputFileFormats() const;
@@ -252,6 +271,8 @@
Vector<int> mLevels;
};
+ int getCamcorderProfileIndex(int cameraId, camcorder_quality quality) const;
+
// Debug
static void logVideoCodec(const VideoCodec& codec);
static void logAudioCodec(const AudioCodec& codec);
@@ -281,8 +302,25 @@
// If the xml configuration file does not exist, use hard-coded values
static MediaProfiles* createDefaultInstance();
- static CamcorderProfile *createDefaultCamcorderLowProfile();
- static CamcorderProfile *createDefaultCamcorderHighProfile();
+
+ static CamcorderProfile *createDefaultCamcorderQcifProfile(camcorder_quality quality);
+ static CamcorderProfile *createDefaultCamcorderCifProfile(camcorder_quality quality);
+ static void createDefaultCamcorderLowProfiles(
+ MediaProfiles::CamcorderProfile **lowProfile,
+ MediaProfiles::CamcorderProfile **lowSpecificProfile);
+ static void createDefaultCamcorderHighProfiles(
+ MediaProfiles::CamcorderProfile **highProfile,
+ MediaProfiles::CamcorderProfile **highSpecificProfile);
+
+ static CamcorderProfile *createDefaultCamcorderTimeLapseQcifProfile(camcorder_quality quality);
+ static CamcorderProfile *createDefaultCamcorderTimeLapse480pProfile(camcorder_quality quality);
+ static void createDefaultCamcorderTimeLapseLowProfiles(
+ MediaProfiles::CamcorderProfile **lowTimeLapseProfile,
+ MediaProfiles::CamcorderProfile **lowSpecificTimeLapseProfile);
+ static void createDefaultCamcorderTimeLapseHighProfiles(
+ MediaProfiles::CamcorderProfile **highTimeLapseProfile,
+ MediaProfiles::CamcorderProfile **highSpecificTimeLapseProfile);
+
static void createDefaultCamcorderProfiles(MediaProfiles *profiles);
static void createDefaultVideoEncoders(MediaProfiles *profiles);
static void createDefaultAudioEncoders(MediaProfiles *profiles);
diff --git a/include/media/MediaRecorderBase.h b/include/media/MediaRecorderBase.h
index 5e9e368..c42346e 100644
--- a/include/media/MediaRecorderBase.h
+++ b/include/media/MediaRecorderBase.h
@@ -22,7 +22,7 @@
namespace android {
-class ISurface;
+class Surface;
struct MediaRecorderBase {
MediaRecorderBase() {}
@@ -37,9 +37,10 @@
virtual status_t setVideoSize(int width, int height) = 0;
virtual status_t setVideoFrameRate(int frames_per_second) = 0;
virtual status_t setCamera(const sp<ICamera>& camera) = 0;
- virtual status_t setPreviewSurface(const sp<ISurface>& surface) = 0;
+ virtual status_t setPreviewSurface(const sp<Surface>& surface) = 0;
virtual status_t setOutputFile(const char *path) = 0;
virtual status_t setOutputFile(int fd, int64_t offset, int64_t length) = 0;
+ virtual status_t setOutputFileAuxiliary(int fd) {return INVALID_OPERATION;}
virtual status_t setParameters(const String8& params) = 0;
virtual status_t setListener(const sp<IMediaRecorderClient>& listener) = 0;
virtual status_t prepare() = 0;
diff --git a/include/media/PVMediaRecorder.h b/include/media/PVMediaRecorder.h
deleted file mode 100644
index c091c39..0000000
--- a/include/media/PVMediaRecorder.h
+++ /dev/null
@@ -1,69 +0,0 @@
-/*
- **
- ** Copyright 2008, The Android Open Source Project
- **
- ** Licensed under the Apache License, Version 2.0 (the "License");
- ** you may not use this file except in compliance with the License.
- ** You may obtain a copy of the License at
- **
- ** http://www.apache.org/licenses/LICENSE-2.0
- **
- ** Unless required by applicable law or agreed to in writing, software
- ** distributed under the License is distributed on an "AS IS" BASIS,
- ** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- ** See the License for the specific language governing permissions and
- ** limitations under the License.
- */
-
-#ifndef ANDROID_PVMEDIARECORDER_H
-#define ANDROID_PVMEDIARECORDER_H
-
-#include <media/IMediaRecorderClient.h>
-#include <media/MediaRecorderBase.h>
-
-namespace android {
-
-class ISurface;
-class ICamera;
-class AuthorDriverWrapper;
-
-class PVMediaRecorder : public MediaRecorderBase {
-public:
- PVMediaRecorder();
- virtual ~PVMediaRecorder();
-
- virtual status_t init();
- virtual status_t setAudioSource(audio_source as);
- virtual status_t setVideoSource(video_source vs);
- virtual status_t setOutputFormat(output_format of);
- virtual status_t setAudioEncoder(audio_encoder ae);
- virtual status_t setVideoEncoder(video_encoder ve);
- virtual status_t setVideoSize(int width, int height);
- virtual status_t setVideoFrameRate(int frames_per_second);
- virtual status_t setCamera(const sp<ICamera>& camera);
- virtual status_t setPreviewSurface(const sp<ISurface>& surface);
- virtual status_t setOutputFile(const char *path);
- virtual status_t setOutputFile(int fd, int64_t offset, int64_t length);
- virtual status_t setParameters(const String8& params);
- virtual status_t setListener(const sp<IMediaRecorderClient>& listener);
- virtual status_t prepare();
- virtual status_t start();
- virtual status_t stop();
- virtual status_t close();
- virtual status_t reset();
- virtual status_t getMaxAmplitude(int *max);
- virtual status_t dump(int fd, const Vector<String16>& args) const;
-
-private:
- status_t doStop();
-
- AuthorDriverWrapper* mAuthorDriverWrapper;
-
- PVMediaRecorder(const PVMediaRecorder &);
- PVMediaRecorder &operator=(const PVMediaRecorder &);
-};
-
-}; // namespace android
-
-#endif // ANDROID_PVMEDIARECORDER_H
-
diff --git a/include/media/PVMetadataRetriever.h b/include/media/PVMetadataRetriever.h
deleted file mode 100644
index c202dfe..0000000
--- a/include/media/PVMetadataRetriever.h
+++ /dev/null
@@ -1,51 +0,0 @@
-/*
-**
-** Copyright (C) 2008 The Android Open Source Project
-**
-** Licensed under the Apache License, Version 2.0 (the "License");
-** you may not use this file except in compliance with the License.
-** You may obtain a copy of the License at
-**
-** http://www.apache.org/licenses/LICENSE-2.0
-**
-** Unless required by applicable law or agreed to in writing, software
-** distributed under the License is distributed on an "AS IS" BASIS,
-** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-** See the License for the specific language governing permissions and
-** limitations under the License.
-*/
-
-#ifndef ANDROID_PVMETADATARETRIEVER_H
-#define ANDROID_PVMETADATARETRIEVER_H
-
-#include <utils/Errors.h>
-#include <media/MediaMetadataRetrieverInterface.h>
-#include <private/media/VideoFrame.h>
-
-namespace android {
-
-class MetadataDriver;
-
-class PVMetadataRetriever : public MediaMetadataRetrieverInterface
-{
-public:
- PVMetadataRetriever();
- virtual ~PVMetadataRetriever();
-
- virtual status_t setDataSource(const char *url);
- virtual status_t setDataSource(int fd, int64_t offset, int64_t length);
- virtual status_t setMode(int mode);
- virtual status_t getMode(int* mode) const;
- virtual VideoFrame* captureFrame();
- virtual MediaAlbumArt* extractAlbumArt();
- virtual const char* extractMetadata(int keyCode);
-
-private:
- mutable Mutex mLock;
- MetadataDriver* mMetadataDriver;
- char* mDataSourcePath;
-};
-
-}; // namespace android
-
-#endif // ANDROID_PVMETADATARETRIEVER_H
diff --git a/include/media/PVPlayer.h b/include/media/PVPlayer.h
deleted file mode 100644
index df50981..0000000
--- a/include/media/PVPlayer.h
+++ /dev/null
@@ -1,90 +0,0 @@
-/*
- * Copyright (C) 2008 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef ANDROID_PVPLAYER_H
-#define ANDROID_PVPLAYER_H
-
-#include <utils/Errors.h>
-#include <media/MediaPlayerInterface.h>
-#include <media/Metadata.h>
-
-#define MAX_OPENCORE_INSTANCES 25
-
-#ifdef MAX_OPENCORE_INSTANCES
-#include <cutils/atomic.h>
-#endif
-
-class PlayerDriver;
-
-namespace android {
-
-class PVPlayer : public MediaPlayerInterface
-{
-public:
- PVPlayer();
- virtual ~PVPlayer();
-
- virtual status_t initCheck();
-
- virtual status_t setDataSource(
- const char *url, const KeyedVector<String8, String8> *headers);
-
- virtual status_t setDataSource(int fd, int64_t offset, int64_t length);
- virtual status_t setVideoSurface(const sp<ISurface>& surface);
- virtual status_t prepare();
- virtual status_t prepareAsync();
- virtual status_t start();
- virtual status_t stop();
- virtual status_t pause();
- virtual bool isPlaying();
- virtual status_t seekTo(int msec);
- virtual status_t getCurrentPosition(int *msec);
- virtual status_t getDuration(int *msec);
- virtual status_t reset();
- virtual status_t setLooping(int loop);
- virtual player_type playerType() { return PV_PLAYER; }
- virtual status_t invoke(const Parcel& request, Parcel *reply);
- virtual status_t getMetadata(
- const SortedVector<media::Metadata::Type>& ids,
- Parcel *records);
-
- // make available to PlayerDriver
- void sendEvent(int msg, int ext1=0, int ext2=0) { MediaPlayerBase::sendEvent(msg, ext1, ext2); }
-
-private:
- static void do_nothing(status_t s, void *cookie, bool cancelled) { }
- static void run_init(status_t s, void *cookie, bool cancelled);
- static void run_set_video_surface(status_t s, void *cookie, bool cancelled);
- static void run_set_audio_output(status_t s, void *cookie, bool cancelled);
- static void run_prepare(status_t s, void *cookie, bool cancelled);
- static void check_for_live_streaming(status_t s, void *cookie, bool cancelled);
-
- PlayerDriver* mPlayerDriver;
- char * mDataSourcePath;
- bool mIsDataSourceSet;
- sp<ISurface> mSurface;
- int mSharedFd;
- status_t mInit;
- int mDuration;
-
-#ifdef MAX_OPENCORE_INSTANCES
- static volatile int32_t sNumInstances;
-#endif
-};
-
-}; // namespace android
-
-#endif // ANDROID_PVPLAYER_H
diff --git a/include/media/mediaplayer.h b/include/media/mediaplayer.h
index 207191d..88b0c3e 100644
--- a/include/media/mediaplayer.h
+++ b/include/media/mediaplayer.h
@@ -169,8 +169,6 @@
status_t invoke(const Parcel& request, Parcel *reply);
status_t setMetadataFilter(const Parcel& filter);
status_t getMetadata(bool update_only, bool apply_filter, Parcel *metadata);
- status_t suspend();
- status_t resume();
status_t setAudioSessionId(int sessionId);
int getAudioSessionId();
status_t setAuxEffectSendLevel(float level);
diff --git a/include/media/mediarecorder.h b/include/media/mediarecorder.h
index 5ab1640..a710546 100644
--- a/include/media/mediarecorder.h
+++ b/include/media/mediarecorder.h
@@ -44,7 +44,8 @@
AUDIO_SOURCE_VOICE_CALL = 4,
AUDIO_SOURCE_CAMCORDER = 5,
AUDIO_SOURCE_VOICE_RECOGNITION = 6,
- AUDIO_SOURCE_MAX = AUDIO_SOURCE_VOICE_RECOGNITION,
+ AUDIO_SOURCE_VOICE_COMMUNICATION = 7,
+ AUDIO_SOURCE_MAX = AUDIO_SOURCE_VOICE_COMMUNICATION,
AUDIO_SOURCE_LIST_END // must be last - used to validate audio source type
};
@@ -173,6 +174,7 @@
status_t setAudioEncoder(int ae);
status_t setOutputFile(const char* path);
status_t setOutputFile(int fd, int64_t offset, int64_t length);
+ status_t setOutputFileAuxiliary(int fd);
status_t setVideoSize(int width, int height);
status_t setVideoFrameRate(int frames_per_second);
status_t setParameters(const String8& params);
@@ -199,6 +201,7 @@
bool mIsAudioEncoderSet;
bool mIsVideoEncoderSet;
bool mIsOutputFileSet;
+ bool mIsAuxiliaryOutputFileSet;
Mutex mLock;
Mutex mNotifyLock;
};
diff --git a/include/media/mediascanner.h b/include/media/mediascanner.h
index 0d397ac..74c9d5d 100644
--- a/include/media/mediascanner.h
+++ b/include/media/mediascanner.h
@@ -38,8 +38,7 @@
typedef bool (*ExceptionCheck)(void* env);
virtual status_t processDirectory(
- const char *path, const char *extensions,
- MediaScannerClient &client,
+ const char *path, MediaScannerClient &client,
ExceptionCheck exceptionCheck, void *exceptionEnv);
void setLocale(const char *locale);
@@ -55,9 +54,8 @@
char *mLocale;
status_t doProcessDirectory(
- char *path, int pathRemaining, const char *extensions,
- MediaScannerClient &client, ExceptionCheck exceptionCheck,
- void *exceptionEnv);
+ char *path, int pathRemaining, MediaScannerClient &client,
+ ExceptionCheck exceptionCheck, void *exceptionEnv);
MediaScanner(const MediaScanner &);
MediaScanner &operator=(const MediaScanner &);
diff --git a/include/media/stagefright/AMRWriter.h b/include/media/stagefright/AMRWriter.h
index aa965e1..62d57b4 100644
--- a/include/media/stagefright/AMRWriter.h
+++ b/include/media/stagefright/AMRWriter.h
@@ -44,7 +44,7 @@
virtual ~AMRWriter();
private:
- FILE *mFile;
+ int mFd;
status_t mInitCheck;
sp<MediaSource> mSource;
bool mStarted;
diff --git a/include/media/stagefright/CameraSource.h b/include/media/stagefright/CameraSource.h
index 3192d03..e6c9f93 100644
--- a/include/media/stagefright/CameraSource.h
+++ b/include/media/stagefright/CameraSource.h
@@ -20,39 +20,167 @@
#include <media/stagefright/MediaBuffer.h>
#include <media/stagefright/MediaSource.h>
+#include <camera/ICamera.h>
+#include <camera/CameraParameters.h>
#include <utils/List.h>
#include <utils/RefBase.h>
-#include <utils/threads.h>
namespace android {
-class ICamera;
class IMemory;
class Camera;
+class Surface;
class CameraSource : public MediaSource, public MediaBufferObserver {
public:
+ /**
+ * Factory method to create a new CameraSource using the current
+ * settings (such as video size, frame rate, color format, etc)
+ * from the default camera.
+ *
+ * @return NULL on error.
+ */
static CameraSource *Create();
- static CameraSource *CreateFromCamera(const sp<Camera> &camera);
+
+ /**
+ * Factory method to create a new CameraSource.
+ *
+ * @param camera the video input frame data source. If it is NULL,
+ * we will try to connect to the camera with the given
+ * cameraId.
+ *
+ * @param cameraId the id of the camera that the source will connect
+ * to if camera is NULL; otherwise ignored.
+ *
+ * @param videoSize the dimension (in pixels) of the video frame
+ * @param frameRate the target frames per second
+ * @param surface the preview surface for display where preview
+ * frames are sent to
+ * @param storeMetaDataInVideoBuffers true to request the camera
+ * source to store meta data in video buffers; false to
+ * request the camera source to store real YUV frame data
+ * in the video buffers. The camera source may not support
+ * storing meta data in video buffers, if so, a request
+ * to do that will NOT be honored. To find out whether
+ * meta data is actually being stored in video buffers
+ * during recording, call isMetaDataStoredInVideoBuffers().
+ *
+ * @return NULL on error.
+ */
+ static CameraSource *CreateFromCamera(const sp<ICamera> &camera,
+ int32_t cameraId,
+ Size videoSize,
+ int32_t frameRate,
+ const sp<Surface>& surface,
+ bool storeMetaDataInVideoBuffers = false);
virtual ~CameraSource();
virtual status_t start(MetaData *params = NULL);
virtual status_t stop();
-
- virtual sp<MetaData> getFormat();
-
virtual status_t read(
MediaBuffer **buffer, const ReadOptions *options = NULL);
+ /**
+ * Check whether a CameraSource object is properly initialized.
+ * Must call this method before stop().
+ * @return OK if initialization has successfully completed.
+ */
+ virtual status_t initCheck() const;
+
+ /**
+ * Returns the MetaData associated with the CameraSource,
+ * including:
+ * kKeyColorFormat: YUV color format of the video frames
+ * kKeyWidth, kKeyHeight: dimension (in pixels) of the video frames
+ * kKeySampleRate: frame rate in frames per second
+ * kKeyMIMEType: always fixed to be MEDIA_MIMETYPE_VIDEO_RAW
+ */
+ virtual sp<MetaData> getFormat();
+
+ /**
+ * Retrieve the total number of video buffers available from
+ * this source.
+ *
+ * This method is useful if these video buffers are used
+ * for passing video frame data to other media components,
+ * such as OMX video encoders, in order to eliminate the
+ * memcpy of the data.
+ *
+ * @return the total numbner of video buffers. Returns 0 to
+ * indicate that this source does not make the video
+ * buffer information availalble.
+ */
+ size_t getNumberOfVideoBuffers() const;
+
+ /**
+ * Retrieve the individual video buffer available from
+ * this source.
+ *
+ * @param index the index corresponding to the video buffer.
+ * Valid range of the index is [0, n], where n =
+ * getNumberOfVideoBuffers() - 1.
+ *
+ * @return the video buffer corresponding to the given index.
+ * If index is out of range, 0 should be returned.
+ */
+ sp<IMemory> getVideoBuffer(size_t index) const;
+
+ /**
+ * Tell whether this camera source stores meta data or real YUV
+ * frame data in video buffers.
+ *
+ * @return true if meta data is stored in the video
+ * buffers; false if real YUV data is stored in
+ * the video buffers.
+ */
+ bool isMetaDataStoredInVideoBuffers() const;
+
virtual void signalBufferReturned(MediaBuffer* buffer);
+protected:
+ enum CameraFlags {
+ FLAGS_SET_CAMERA = 1L << 0,
+ FLAGS_HOT_CAMERA = 1L << 1,
+ };
+
+ int32_t mCameraFlags;
+ Size mVideoSize;
+ int32_t mVideoFrameRate;
+ int32_t mColorFormat;
+ status_t mInitCheck;
+
+ sp<Camera> mCamera;
+ sp<Surface> mSurface;
+ sp<MetaData> mMeta;
+
+ int64_t mStartTimeUs;
+ int32_t mNumFramesReceived;
+ int64_t mLastFrameTimestampUs;
+ bool mStarted;
+
+ CameraSource(const sp<ICamera>& camera, int32_t cameraId,
+ Size videoSize, int32_t frameRate,
+ const sp<Surface>& surface,
+ bool storeMetaDataInVideoBuffers);
+
+ virtual void startCameraRecording();
+ virtual void stopCameraRecording();
+ virtual void releaseRecordingFrame(const sp<IMemory>& frame);
+
+ // Returns true if need to skip the current frame.
+ // Called from dataCallbackTimestamp.
+ virtual bool skipCurrentFrame(int64_t timestampUs) {return false;}
+
+ // Callback called when still camera raw data is available.
+ virtual void dataCallback(int32_t msgType, const sp<IMemory> &data) {}
+
+ virtual void dataCallbackTimestamp(int64_t timestampUs, int32_t msgType,
+ const sp<IMemory> &data);
+
private:
friend class CameraSourceListener;
- sp<Camera> mCamera;
- sp<MetaData> mMeta;
-
Mutex mLock;
Condition mFrameAvailableCondition;
Condition mFrameCompleteCondition;
@@ -60,25 +188,33 @@
List<sp<IMemory> > mFramesBeingEncoded;
List<int64_t> mFrameTimes;
- int64_t mStartTimeUs;
int64_t mFirstFrameTimeUs;
- int64_t mLastFrameTimestampUs;
- int32_t mNumFramesReceived;
int32_t mNumFramesEncoded;
int32_t mNumFramesDropped;
int32_t mNumGlitches;
int64_t mGlitchDurationThresholdUs;
bool mCollectStats;
- bool mStarted;
-
- CameraSource(const sp<Camera> &camera);
-
- void dataCallbackTimestamp(
- int64_t timestampUs, int32_t msgType, const sp<IMemory> &data);
+ bool mIsMetaDataStoredInVideoBuffers;
void releaseQueuedFrames();
void releaseOneRecordingFrame(const sp<IMemory>& frame);
+
+ status_t init(const sp<ICamera>& camera, int32_t cameraId,
+ Size videoSize, int32_t frameRate,
+ bool storeMetaDataInVideoBuffers);
+ status_t isCameraAvailable(const sp<ICamera>& camera, int32_t cameraId);
+ status_t isCameraColorFormatSupported(const CameraParameters& params);
+ status_t configureCamera(CameraParameters* params,
+ int32_t width, int32_t height,
+ int32_t frameRate);
+
+ status_t checkVideoSize(const CameraParameters& params,
+ int32_t width, int32_t height);
+
+ status_t checkFrameRate(const CameraParameters& params,
+ int32_t frameRate);
+
CameraSource(const CameraSource &);
CameraSource &operator=(const CameraSource &);
};
diff --git a/include/media/stagefright/CameraSourceTimeLapse.h b/include/media/stagefright/CameraSourceTimeLapse.h
new file mode 100644
index 0000000..afe7287
--- /dev/null
+++ b/include/media/stagefright/CameraSourceTimeLapse.h
@@ -0,0 +1,243 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef CAMERA_SOURCE_TIME_LAPSE_H_
+
+#define CAMERA_SOURCE_TIME_LAPSE_H_
+
+#include <pthread.h>
+
+#include <utils/RefBase.h>
+#include <utils/threads.h>
+
+namespace android {
+
+class ICamera;
+class IMemory;
+class Camera;
+
+class CameraSourceTimeLapse : public CameraSource {
+public:
+ static CameraSourceTimeLapse *CreateFromCamera(
+ const sp<ICamera> &camera,
+ int32_t cameraId,
+ Size videoSize,
+ int32_t videoFrameRate,
+ const sp<Surface>& surface,
+ int64_t timeBetweenTimeLapseFrameCaptureUs);
+
+ virtual ~CameraSourceTimeLapse();
+
+ // If the frame capture interval is large, read will block for a long time.
+ // Due to the way the mediaRecorder framework works, a stop() call from
+ // mediaRecorder waits until the read returns, causing a long wait for
+ // stop() to return. To avoid this, we can make read() return a copy of the
+ // last read frame with the same time stamp frequently. This keeps the
+ // read() call from blocking too long. Calling this function quickly
+ // captures another frame, keeps its copy, and enables this mode of read()
+ // returning quickly.
+ void startQuickReadReturns();
+
+private:
+ // If true, will use still camera takePicture() for time lapse frames
+ // If false, will use the videocamera frames instead.
+ bool mUseStillCameraForTimeLapse;
+
+ // Size of picture taken from still camera. This may be larger than the size
+ // of the video, as still camera may not support the exact video resolution
+ // demanded. See setPictureSizeToClosestSupported().
+ int32_t mPictureWidth;
+ int32_t mPictureHeight;
+
+ // size of the encoded video.
+ int32_t mVideoWidth;
+ int32_t mVideoHeight;
+
+ // True if we need to crop the still camera image to get the video frame.
+ bool mNeedCropping;
+
+ // Start location of the cropping rectangle.
+ int32_t mCropRectStartX;
+ int32_t mCropRectStartY;
+
+ // Time between capture of two frames during time lapse recording
+ // Negative value indicates that timelapse is disabled.
+ int64_t mTimeBetweenTimeLapseFrameCaptureUs;
+
+ // Time between two frames in final video (1/frameRate)
+ int64_t mTimeBetweenTimeLapseVideoFramesUs;
+
+ // Real timestamp of the last encoded time lapse frame
+ int64_t mLastTimeLapseFrameRealTimestampUs;
+
+ // Thread id of thread which takes still picture and sleeps in a loop.
+ pthread_t mThreadTimeLapse;
+
+ // Variable set in dataCallbackTimestamp() to help skipCurrentFrame()
+ // to know if current frame needs to be skipped.
+ bool mSkipCurrentFrame;
+
+ // Lock for accessing mCameraIdle
+ Mutex mCameraIdleLock;
+
+ // Condition variable to wait on if camera is is not yet idle. Once the
+ // camera gets idle, this variable will be signalled.
+ Condition mCameraIdleCondition;
+
+ // True if camera is in preview mode and ready for takePicture().
+ // False after a call to takePicture() but before the final compressed
+ // data callback has been called and preview has been restarted.
+ volatile bool mCameraIdle;
+
+ // True if stop() is waiting for camera to get idle, i.e. for the last
+ // takePicture() to complete. This is needed so that dataCallbackTimestamp()
+ // can return immediately.
+ volatile bool mStopWaitingForIdleCamera;
+
+ // Lock for accessing quick stop variables.
+ Mutex mQuickStopLock;
+
+ // Condition variable to wake up still picture thread.
+ Condition mTakePictureCondition;
+
+ // mQuickStop is set to true if we use quick read() returns, otherwise it is set
+ // to false. Once in this mode read() return a copy of the last read frame
+ // with the same time stamp. See startQuickReadReturns().
+ volatile bool mQuickStop;
+
+ // Forces the next frame passed to dataCallbackTimestamp() to be read
+ // as a time lapse frame. Used by startQuickReadReturns() so that the next
+ // frame wakes up any blocking read.
+ volatile bool mForceRead;
+
+ // Stores a copy of the MediaBuffer read in the last read() call after
+ // mQuickStop was true.
+ MediaBuffer* mLastReadBufferCopy;
+
+ // Status code for last read.
+ status_t mLastReadStatus;
+
+ CameraSourceTimeLapse(
+ const sp<ICamera> &camera,
+ int32_t cameraId,
+ Size videoSize,
+ int32_t videoFrameRate,
+ const sp<Surface>& surface,
+ int64_t timeBetweenTimeLapseFrameCaptureUs);
+
+ // Wrapper over CameraSource::signalBufferReturned() to implement quick stop.
+ // It only handles the case when mLastReadBufferCopy is signalled. Otherwise
+ // it calls the base class' function.
+ virtual void signalBufferReturned(MediaBuffer* buffer);
+
+ // Wrapper over CameraSource::read() to implement quick stop.
+ virtual status_t read(MediaBuffer **buffer, const ReadOptions *options = NULL);
+
+ // For still camera case starts a thread which calls camera's takePicture()
+ // in a loop. For video camera case, just starts the camera's video recording.
+ virtual void startCameraRecording();
+
+ // For still camera case joins the thread created in startCameraRecording().
+ // For video camera case, just stops the camera's video recording.
+ virtual void stopCameraRecording();
+
+ // For still camera case don't need to do anything as memory is locally
+ // allocated with refcounting.
+ // For video camera case just tell the camera to release the frame.
+ virtual void releaseRecordingFrame(const sp<IMemory>& frame);
+
+ // mSkipCurrentFrame is set to true in dataCallbackTimestamp() if the current
+ // frame needs to be skipped and this function just returns the value of mSkipCurrentFrame.
+ virtual bool skipCurrentFrame(int64_t timestampUs);
+
+ // Handles the callback to handle raw frame data from the still camera.
+ // Creates a copy of the frame data as the camera can reuse the frame memory
+ // once this callback returns. The function also sets a new timstamp corresponding
+ // to one frame time ahead of the last encoded frame's time stamp. It then
+ // calls dataCallbackTimestamp() of the base class with the copied data and the
+ // modified timestamp, which will think that it recieved the frame from a video
+ // camera and proceed as usual.
+ virtual void dataCallback(int32_t msgType, const sp<IMemory> &data);
+
+ // In the video camera case calls skipFrameAndModifyTimeStamp() to modify
+ // timestamp and set mSkipCurrentFrame.
+ // Then it calls the base CameraSource::dataCallbackTimestamp()
+ virtual void dataCallbackTimestamp(int64_t timestampUs, int32_t msgType,
+ const sp<IMemory> &data);
+
+ // Convenience function to fill mLastReadBufferCopy from the just read
+ // buffer.
+ void fillLastReadBufferCopy(MediaBuffer& sourceBuffer);
+
+ // If the passed in size (width x height) is a supported preview size,
+ // the function sets the camera's preview size to it and returns true.
+ // Otherwise returns false.
+ bool trySettingPreviewSize(int32_t width, int32_t height);
+
+ // The still camera may not support the demanded video width and height.
+ // We look for the supported picture sizes from the still camera and
+ // choose the smallest one with either dimensions higher than the corresponding
+ // video dimensions. The still picture will be cropped to get the video frame.
+ // The function returns true if the camera supports picture sizes greater than
+ // or equal to the passed in width and height, and false otherwise.
+ bool setPictureSizeToClosestSupported(int32_t width, int32_t height);
+
+ // Computes the offset of the rectangle from where to start cropping the
+ // still image into the video frame. We choose the center of the image to be
+ // cropped. The offset is stored in (mCropRectStartX, mCropRectStartY).
+ bool computeCropRectangleOffset();
+
+ // Crops the source data into a smaller image starting at
+ // (mCropRectStartX, mCropRectStartY) and of the size of the video frame.
+ // The data is returned into a newly allocated IMemory.
+ sp<IMemory> cropYUVImage(const sp<IMemory> &source_data);
+
+ // When video camera is used for time lapse capture, returns true
+ // until enough time has passed for the next time lapse frame. When
+ // the frame needs to be encoded, it returns false and also modifies
+ // the time stamp to be one frame time ahead of the last encoded
+ // frame's time stamp.
+ bool skipFrameAndModifyTimeStamp(int64_t *timestampUs);
+
+ // Wrapper to enter threadTimeLapseEntry()
+ static void *ThreadTimeLapseWrapper(void *me);
+
+ // Runs a loop which sleeps until a still picture is required
+ // and then calls mCamera->takePicture() to take the still picture.
+ // Used only in the case mUseStillCameraForTimeLapse = true.
+ void threadTimeLapseEntry();
+
+ // Wrapper to enter threadStartPreview()
+ static void *ThreadStartPreviewWrapper(void *me);
+
+ // Starts the camera's preview.
+ void threadStartPreview();
+
+ // Starts thread ThreadStartPreviewWrapper() for restarting preview.
+ // Needs to be done in a thread so that dataCallback() which calls this function
+ // can return, and the camera can know that takePicture() is done.
+ void restartPreview();
+
+ // Creates a copy of source_data into a new memory of final type MemoryBase.
+ sp<IMemory> createIMemoryCopy(const sp<IMemory> &source_data);
+
+ CameraSourceTimeLapse(const CameraSourceTimeLapse &);
+ CameraSourceTimeLapse &operator=(const CameraSourceTimeLapse &);
+};
+
+} // namespace android
+
+#endif // CAMERA_SOURCE_TIME_LAPSE_H_
diff --git a/include/media/stagefright/ColorConverter.h b/include/media/stagefright/ColorConverter.h
index bc3f464..2b61f58 100644
--- a/include/media/stagefright/ColorConverter.h
+++ b/include/media/stagefright/ColorConverter.h
@@ -33,35 +33,47 @@
bool isValid() const;
void convert(
- size_t width, size_t height,
- const void *srcBits, size_t srcSkip,
- void *dstBits, size_t dstSkip);
+ const void *srcBits,
+ size_t srcWidth, size_t srcHeight,
+ size_t srcCropLeft, size_t srcCropTop,
+ size_t srcCropRight, size_t srcCropBottom,
+ void *dstBits,
+ size_t dstWidth, size_t dstHeight,
+ size_t dstCropLeft, size_t dstCropTop,
+ size_t dstCropRight, size_t dstCropBottom);
private:
+ struct BitmapParams {
+ BitmapParams(
+ void *bits,
+ size_t width, size_t height,
+ size_t cropLeft, size_t cropTop,
+ size_t cropRight, size_t cropBottom);
+
+ size_t cropWidth() const;
+ size_t cropHeight() const;
+
+ void *mBits;
+ size_t mWidth, mHeight;
+ size_t mCropLeft, mCropTop, mCropRight, mCropBottom;
+ };
+
OMX_COLOR_FORMATTYPE mSrcFormat, mDstFormat;
uint8_t *mClip;
uint8_t *initClip();
void convertCbYCrY(
- size_t width, size_t height,
- const void *srcBits, size_t srcSkip,
- void *dstBits, size_t dstSkip);
+ const BitmapParams &src, const BitmapParams &dst);
void convertYUV420Planar(
- size_t width, size_t height,
- const void *srcBits, size_t srcSkip,
- void *dstBits, size_t dstSkip);
+ const BitmapParams &src, const BitmapParams &dst);
void convertQCOMYUV420SemiPlanar(
- size_t width, size_t height,
- const void *srcBits, size_t srcSkip,
- void *dstBits, size_t dstSkip);
+ const BitmapParams &src, const BitmapParams &dst);
void convertYUV420SemiPlanar(
- size_t width, size_t height,
- const void *srcBits, size_t srcSkip,
- void *dstBits, size_t dstSkip);
+ const BitmapParams &src, const BitmapParams &dst);
ColorConverter(const ColorConverter &);
ColorConverter &operator=(const ColorConverter &);
diff --git a/include/media/stagefright/DataSource.h b/include/media/stagefright/DataSource.h
index d0b9fcd..d4f1733 100644
--- a/include/media/stagefright/DataSource.h
+++ b/include/media/stagefright/DataSource.h
@@ -48,13 +48,13 @@
virtual status_t initCheck() const = 0;
- virtual ssize_t readAt(off_t offset, void *data, size_t size) = 0;
+ virtual ssize_t readAt(off64_t offset, void *data, size_t size) = 0;
// Convenience methods:
- bool getUInt16(off_t offset, uint16_t *x);
+ bool getUInt16(off64_t offset, uint16_t *x);
// May return ERROR_UNSUPPORTED.
- virtual status_t getSize(off_t *size);
+ virtual status_t getSize(off64_t *size);
virtual uint32_t flags() {
return 0;
@@ -80,6 +80,9 @@
}
virtual void getDrmInfo(DecryptHandle **handle, DrmManagerClient **client) {};
+ virtual String8 getUri() {
+ return String8();
+ }
protected:
virtual ~DataSource() {}
diff --git a/include/media/stagefright/FileSource.h b/include/media/stagefright/FileSource.h
index 4307263..72a0403 100644
--- a/include/media/stagefright/FileSource.h
+++ b/include/media/stagefright/FileSource.h
@@ -34,9 +34,9 @@
virtual status_t initCheck() const;
- virtual ssize_t readAt(off_t offset, void *data, size_t size);
+ virtual ssize_t readAt(off64_t offset, void *data, size_t size);
- virtual status_t getSize(off_t *size);
+ virtual status_t getSize(off64_t *size);
virtual DecryptHandle* DrmInitialization(DrmManagerClient *client);
@@ -46,7 +46,6 @@
virtual ~FileSource();
private:
- FILE *mFile;
int mFd;
int64_t mOffset;
int64_t mLength;
@@ -59,7 +58,7 @@
int64_t mDrmBufSize;
unsigned char *mDrmBuf;
- ssize_t readAtDRM(off_t offset, void *data, size_t size);
+ ssize_t readAtDRM(off64_t offset, void *data, size_t size);
FileSource(const FileSource &);
FileSource &operator=(const FileSource &);
diff --git a/include/media/stagefright/HardwareAPI.h b/include/media/stagefright/HardwareAPI.h
index 63f11d1..17908b4 100644
--- a/include/media/stagefright/HardwareAPI.h
+++ b/include/media/stagefright/HardwareAPI.h
@@ -19,28 +19,76 @@
#define HARDWARE_API_H_
#include <media/stagefright/OMXPluginBase.h>
-#include <media/stagefright/VideoRenderer.h>
-#include <surfaceflinger/ISurface.h>
+#include <ui/android_native_buffer.h>
#include <utils/RefBase.h>
#include <OMX_Component.h>
-extern android::VideoRenderer *createRenderer(
- const android::sp<android::ISurface> &surface,
- const char *componentName,
- OMX_COLOR_FORMATTYPE colorFormat,
- size_t displayWidth, size_t displayHeight,
- size_t decodedWidth, size_t decodedHeight);
+namespace android {
-extern android::VideoRenderer *createRendererWithRotation(
- const android::sp<android::ISurface> &surface,
- const char *componentName,
- OMX_COLOR_FORMATTYPE colorFormat,
- size_t displayWidth, size_t displayHeight,
- size_t decodedWidth, size_t decodedHeight,
- int32_t rotationDegrees);
+// A pointer to this struct is passed to the OMX_SetParameter when the extension
+// index for the 'OMX.google.android.index.enableAndroidNativeBuffers' extension
+// is given.
+//
+// When Android native buffer use is disabled for a port (the default state),
+// the OMX node should operate as normal, and expect UseBuffer calls to set its
+// buffers. This is the mode that will be used when CPU access to the buffer is
+// required.
+//
+// When Android native buffer use has been enabled for a given port, the video
+// color format for the port is to be interpreted as an Android pixel format
+// rather than an OMX color format. The node should then expect to receive
+// UseAndroidNativeBuffer calls (via OMX_SetParameter) rather than UseBuffer
+// calls for that port.
+struct EnableAndroidNativeBuffersParams {
+ OMX_U32 nSize;
+ OMX_VERSIONTYPE nVersion;
+ OMX_U32 nPortIndex;
+ OMX_BOOL enable;
+};
+
+// A pointer to this struct is passed to OMX_SetParameter() when the extension
+// index "OMX.google.android.index.storeMetaDataInBuffers"
+// is given.
+//
+// When meta data is stored in the video buffers passed between OMX clients
+// and OMX components, interpretation of the buffer data is up to the
+// buffer receiver, and the data may or may not be the actual video data, but
+// some information helpful for the receiver to locate the actual data.
+// The buffer receiver thus needs to know how to interpret what is stored
+// in these buffers, with mechanisms pre-determined externally. How to
+// interpret the meta data is outside of the scope of this method.
+//
+// Currently, this is specifically used to pass meta data from video source
+// (camera component, for instance) to video encoder to avoid memcpying of
+// input video frame data. To do this, bStoreMetaDta is set to OMX_TRUE.
+// If bStoreMetaData is set to false, real YUV frame data will be stored
+// in the buffers. In addition, if no OMX_SetParameter() call is made
+// with the corresponding extension index, real YUV data is stored
+// in the buffers.
+struct StoreMetaDataInBuffersParams {
+ OMX_U32 nSize;
+ OMX_VERSIONTYPE nVersion;
+ OMX_U32 nPortIndex;
+ OMX_BOOL bStoreMetaData;
+};
+
+// A pointer to this struct is passed to OMX_SetParameter when the extension
+// index for the 'OMX.google.android.index.useAndroidNativeBuffer' extension is
+// given. This call will only be performed if a prior call was made with the
+// 'OMX.google.android.index.enableAndroidNativeBuffers' extension index,
+// enabling use of Android native buffers.
+struct UseAndroidNativeBufferParams {
+ OMX_U32 nSize;
+ OMX_VERSIONTYPE nVersion;
+ OMX_U32 nPortIndex;
+ OMX_PTR pAppPrivate;
+ OMX_BUFFERHEADERTYPE **bufferHeader;
+ const sp<android_native_buffer_t>& nativeBuffer;
+};
+
+} // namespace android
extern android::OMXPluginBase *createOMXPlugin();
#endif // HARDWARE_API_H_
-
diff --git a/include/media/stagefright/JPEGSource.h b/include/media/stagefright/JPEGSource.h
index 9d0a700..1b7e91b 100644
--- a/include/media/stagefright/JPEGSource.h
+++ b/include/media/stagefright/JPEGSource.h
@@ -42,9 +42,9 @@
sp<DataSource> mSource;
MediaBufferGroup *mGroup;
bool mStarted;
- off_t mSize;
+ off64_t mSize;
int32_t mWidth, mHeight;
- off_t mOffset;
+ off64_t mOffset;
status_t parseJPEG();
diff --git a/include/media/stagefright/MPEG4Writer.h b/include/media/stagefright/MPEG4Writer.h
index 7bf07eb..f7618e9 100644
--- a/include/media/stagefright/MPEG4Writer.h
+++ b/include/media/stagefright/MPEG4Writer.h
@@ -61,20 +61,21 @@
private:
class Track;
- FILE *mFile;
+ int mFd;
+ status_t mInitCheck;
bool mUse4ByteNalLength;
bool mUse32BitOffset;
bool mIsFileSizeLimitExplicitlyRequested;
bool mPaused;
bool mStarted;
- off_t mOffset;
+ off64_t mOffset;
off_t mMdatOffset;
uint8_t *mMoovBoxBuffer;
- off_t mMoovBoxBufferOffset;
+ off64_t mMoovBoxBufferOffset;
bool mWriteMoovBoxToMemory;
- off_t mFreeBoxOffset;
+ off64_t mFreeBoxOffset;
bool mStreamableFile;
- off_t mEstimatedMoovBoxSize;
+ off64_t mEstimatedMoovBoxSize;
uint32_t mInterleaveDurationUs;
int32_t mTimeScale;
int64_t mStartTimestampUs;
@@ -83,7 +84,7 @@
List<Track *> mTracks;
- List<off_t> mBoxes;
+ List<off64_t> mBoxes;
void setStartTimestampUs(int64_t timeUs);
int64_t getStartTimestampUs(); // Not const
@@ -145,10 +146,10 @@
void unlock();
// Acquire lock before calling these methods
- off_t addSample_l(MediaBuffer *buffer);
- off_t addLengthPrefixedSample_l(MediaBuffer *buffer);
+ off64_t addSample_l(MediaBuffer *buffer);
+ off64_t addLengthPrefixedSample_l(MediaBuffer *buffer);
- inline size_t write(const void *ptr, size_t size, size_t nmemb, FILE* stream);
+ inline size_t write(const void *ptr, size_t size, size_t nmemb);
bool exceedsFileSizeLimit();
bool use32BitFileOffset() const;
bool exceedsFileDurationLimit();
diff --git a/include/media/stagefright/MediaBuffer.h b/include/media/stagefright/MediaBuffer.h
index 339e6fb..c1c4f94 100644
--- a/include/media/stagefright/MediaBuffer.h
+++ b/include/media/stagefright/MediaBuffer.h
@@ -25,6 +25,7 @@
namespace android {
+class GraphicBuffer;
class MediaBuffer;
class MediaBufferObserver;
class MetaData;
@@ -48,6 +49,8 @@
MediaBuffer(size_t size);
+ MediaBuffer(const sp<GraphicBuffer>& graphicBuffer);
+
// Decrements the reference count and returns the buffer to its
// associated MediaBufferGroup if the reference count drops to 0.
void release();
@@ -63,6 +66,8 @@
void set_range(size_t offset, size_t length);
+ sp<GraphicBuffer> graphicBuffer() const;
+
sp<MetaData> meta_data();
// Clears meta data and resets the range to the full extent.
@@ -94,6 +99,7 @@
void *mData;
size_t mSize, mRangeOffset, mRangeLength;
+ sp<GraphicBuffer> mGraphicBuffer;
bool mOwnsData;
diff --git a/include/media/stagefright/MediaDefs.h b/include/media/stagefright/MediaDefs.h
index 92ce068..2d50ca5 100644
--- a/include/media/stagefright/MediaDefs.h
+++ b/include/media/stagefright/MediaDefs.h
@@ -44,6 +44,8 @@
extern const char *MEDIA_MIMETYPE_CONTAINER_MATROSKA;
extern const char *MEDIA_MIMETYPE_CONTAINER_MPEG2TS;
+extern const char *MEDIA_MIMETYPE_CONTAINER_WVM;
+
} // namespace android
#endif // MEDIA_DEFS_H_
diff --git a/include/media/stagefright/MediaSourceSplitter.h b/include/media/stagefright/MediaSourceSplitter.h
new file mode 100644
index 0000000..568f4c2
--- /dev/null
+++ b/include/media/stagefright/MediaSourceSplitter.h
@@ -0,0 +1,193 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// This class provides a way to split a single media source into multiple sources.
+// The constructor takes in the real mediaSource and createClient() can then be
+// used to create multiple sources served from this real mediaSource.
+//
+// Usage:
+// - Create MediaSourceSplitter by passing in a real mediaSource from which
+// multiple duplicate channels are needed.
+// - Create a client using createClient() and use it as any other mediaSource.
+//
+// Note that multiple clients can be created using createClient() and
+// started/stopped in any order. MediaSourceSplitter stops the real source only
+// when all clients have been stopped.
+//
+// If a new client is created/started after some existing clients have already
+// started, the new client will start getting its read frames from the current
+// time.
+
+#ifndef MEDIA_SOURCE_SPLITTER_H_
+
+#define MEDIA_SOURCE_SPLITTER_H_
+
+#include <media/stagefright/MediaSource.h>
+#include <utils/threads.h>
+#include <utils/Vector.h>
+#include <utils/RefBase.h>
+
+namespace android {
+
+class MediaBuffer;
+class MetaData;
+
+class MediaSourceSplitter : public RefBase {
+public:
+ // Constructor
+ // mediaSource: The real mediaSource. The class keeps a reference to it to
+ // implement the various clients.
+ MediaSourceSplitter(sp<MediaSource> mediaSource);
+
+ ~MediaSourceSplitter();
+
+ // Creates a new client of base type MediaSource. Multiple clients can be
+ // created which get their data through the same real mediaSource. These
+ // clients can then be used like any other MediaSource, all of which provide
+ // data from the same real source.
+ sp<MediaSource> createClient();
+
+private:
+ // Total number of clients created through createClient().
+ int32_t mNumberOfClients;
+
+ // reference to the real MediaSource passed to the constructor.
+ sp<MediaSource> mSource;
+
+ // Stores pointer to the MediaBuffer read from the real MediaSource.
+ // All clients use this to implement the read() call.
+ MediaBuffer *mLastReadMediaBuffer;
+
+ // Status code for read from the real MediaSource. All clients return
+ // this for their read().
+ status_t mLastReadStatus;
+
+ // Boolean telling whether the real MediaSource has started.
+ bool mSourceStarted;
+
+ // List of booleans, one for each client, storing whether the corresponding
+ // client's start() has been called.
+ Vector<bool> mClientsStarted;
+
+ // Stores the number of clients which are currently started.
+ int32_t mNumberOfClientsStarted;
+
+ // Since different clients call read() asynchronously, we need to keep track
+ // of what data is currently read into the mLastReadMediaBuffer.
+ // mCurrentReadBit stores the bit for the current read buffer. This bit
+ // flips each time a new buffer is read from the source.
+ // mClientsDesiredReadBit stores the bit for the next desired read buffer
+ // for each client. This bit flips each time read() is completed for this
+ // client.
+ bool mCurrentReadBit;
+ Vector<bool> mClientsDesiredReadBit;
+
+ // Number of clients whose current read has been completed.
+ int32_t mNumberOfCurrentReads;
+
+ // Boolean telling whether the last read has been completed for all clients.
+ // The variable is reset to false each time buffer is read from the real
+ // source.
+ bool mLastReadCompleted;
+
+ // A global mutex for access to critical sections.
+ Mutex mLock;
+
+ // Condition variable for waiting on read from source to complete.
+ Condition mReadFromSourceCondition;
+
+ // Condition variable for waiting on all client's last read to complete.
+ Condition mAllReadsCompleteCondition;
+
+ // Functions used by Client to implement the MediaSource interface.
+
+ // If the real source has not been started yet by any client, starts it.
+ status_t start(int clientId, MetaData *params);
+
+ // Stops the real source after all clients have called stop().
+ status_t stop(int clientId);
+
+ // returns the real source's getFormat().
+ sp<MetaData> getFormat(int clientId);
+
+ // If the client's desired buffer has already been read into
+ // mLastReadMediaBuffer, points the buffer to that. Otherwise if it is the
+ // master client, reads the buffer from source or else waits for the master
+ // client to read the buffer and uses that.
+ status_t read(int clientId,
+ MediaBuffer **buffer, const MediaSource::ReadOptions *options = NULL);
+
+ // Not implemented right now.
+ status_t pause(int clientId);
+
+ // Function which reads a buffer from the real source into
+ // mLastReadMediaBuffer
+ void readFromSource_lock(const MediaSource::ReadOptions *options);
+
+ // Waits until read from the real source has been completed.
+ // _lock means that the function should be called when the thread has already
+ // obtained the lock for the mutex mLock.
+ void waitForReadFromSource_lock(int32_t clientId);
+
+ // Waits until all clients have read the current buffer in
+ // mLastReadCompleted.
+ void waitForAllClientsLastRead_lock(int32_t clientId);
+
+ // Each client calls this after it completes its read(). Once all clients
+ // have called this for the current buffer, the function calls
+ // mAllReadsCompleteCondition.broadcast() to signal the waiting clients.
+ void signalReadComplete_lock(bool readAborted);
+
+ // Make these constructors private.
+ MediaSourceSplitter();
+ MediaSourceSplitter(const MediaSourceSplitter &);
+ MediaSourceSplitter &operator=(const MediaSourceSplitter &);
+
+ // This class implements the MediaSource interface. Each client stores a
+ // reference to the parent MediaSourceSplitter and uses it to complete the
+ // various calls.
+ class Client : public MediaSource {
+ public:
+ // Constructor stores reference to the parent MediaSourceSplitter and it
+ // client id.
+ Client(sp<MediaSourceSplitter> splitter, int32_t clientId);
+
+ // MediaSource interface
+ virtual status_t start(MetaData *params = NULL);
+
+ virtual status_t stop();
+
+ virtual sp<MetaData> getFormat();
+
+ virtual status_t read(
+ MediaBuffer **buffer, const ReadOptions *options = NULL);
+
+ virtual status_t pause();
+
+ private:
+ // Refernce to the parent MediaSourceSplitter
+ sp<MediaSourceSplitter> mSplitter;
+
+ // Id of this client.
+ int32_t mClientId;
+ };
+
+ friend class Client;
+};
+
+} // namespace android
+
+#endif // MEDIA_SOURCE_SPLITTER_H_
diff --git a/include/media/stagefright/MetaData.h b/include/media/stagefright/MetaData.h
index a69d9af..5f33739 100644
--- a/include/media/stagefright/MetaData.h
+++ b/include/media/stagefright/MetaData.h
@@ -32,12 +32,17 @@
kKeyMIMEType = 'mime', // cstring
kKeyWidth = 'widt', // int32_t
kKeyHeight = 'heig', // int32_t
+
+ // a rectangle, if absent assumed to be (0, 0, width - 1, height - 1)
+ kKeyCropRect = 'crop',
+
kKeyRotation = 'rotA', // int32_t (angle in degrees)
kKeyIFramesInterval = 'ifiv', // int32_t
kKeyStride = 'strd', // int32_t
kKeySliceHeight = 'slht', // int32_t
kKeyChannelCount = '#chn', // int32_t
- kKeySampleRate = 'srte', // int32_t (also video frame rate)
+ kKeySampleRate = 'srte', // int32_t (audio sampling rate Hz)
+ kKeyFrameRate = 'frmR', // int32_t (video frame rate fps)
kKeyBitRate = 'brte', // int32_t (bps)
kKeyESDS = 'esds', // raw data
kKeyAVCC = 'avcc', // raw data
@@ -93,7 +98,6 @@
// Track authoring progress status
// kKeyTrackTimeStatus is used to track progress in elapsed time
kKeyTrackTimeStatus = 'tktm', // int64_t
- kKeyRotationDegree = 'rdge', // int32_t (clockwise, in degree)
kKeyNotRealTime = 'ntrt', // bool (int32_t)
@@ -103,6 +107,9 @@
kKeyValidSamples = 'valD', // int32_t
kKeyIsUnreadable = 'unre', // bool (int32_t)
+
+ // An indication that a video buffer has been rendered.
+ kKeyRendered = 'rend', // bool (int32_t)
};
enum {
@@ -122,6 +129,7 @@
TYPE_INT64 = 'in64',
TYPE_FLOAT = 'floa',
TYPE_POINTER = 'ptr ',
+ TYPE_RECT = 'rect',
};
void clear();
@@ -133,12 +141,22 @@
bool setFloat(uint32_t key, float value);
bool setPointer(uint32_t key, void *value);
+ bool setRect(
+ uint32_t key,
+ int32_t left, int32_t top,
+ int32_t right, int32_t bottom);
+
bool findCString(uint32_t key, const char **value);
bool findInt32(uint32_t key, int32_t *value);
bool findInt64(uint32_t key, int64_t *value);
bool findFloat(uint32_t key, float *value);
bool findPointer(uint32_t key, void **value);
+ bool findRect(
+ uint32_t key,
+ int32_t *left, int32_t *top,
+ int32_t *right, int32_t *bottom);
+
bool setData(uint32_t key, uint32_t type, const void *data, size_t size);
bool findData(uint32_t key, uint32_t *type,
@@ -184,6 +202,10 @@
}
};
+ struct Rect {
+ int32_t mLeft, mTop, mRight, mBottom;
+ };
+
KeyedVector<uint32_t, typed_data> mItems;
// MetaData &operator=(const MetaData &);
diff --git a/include/media/stagefright/OMXCodec.h b/include/media/stagefright/OMXCodec.h
index fed6761..0aba347 100644
--- a/include/media/stagefright/OMXCodec.h
+++ b/include/media/stagefright/OMXCodec.h
@@ -18,6 +18,7 @@
#define OMX_CODEC_H_
+#include <android/native_window.h>
#include <media/IOMX.h>
#include <media/stagefright/MediaBuffer.h>
#include <media/stagefright/MediaSource.h>
@@ -38,13 +39,22 @@
// The client wants to access the output buffer's video
// data for example for thumbnail extraction.
kClientNeedsFramebuffer = 4,
+
+ // Request for software or hardware codecs. If request
+ // can not be fullfilled, Create() returns NULL.
+ kSoftwareCodecsOnly = 8,
+ kHardwareCodecsOnly = 16,
+
+ // Store meta data in video buffers
+ kStoreMetaDataInVideoBuffers = 32,
};
static sp<MediaSource> Create(
const sp<IOMX> &omx,
const sp<MetaData> &meta, bool createEncoder,
const sp<MediaSource> &source,
const char *matchComponentName = NULL,
- uint32_t flags = 0);
+ uint32_t flags = 0,
+ const sp<ANativeWindow> &nativeWindow = NULL);
static void setComponentRole(
const sp<IOMX> &omx, IOMX::node_id node, bool isEncoder,
@@ -109,12 +119,12 @@
kAvoidMemcopyInputRecordingFrames = 2048,
kRequiresLargerEncoderOutputBuffer = 4096,
kOutputBuffersAreUnreadable = 8192,
- kStoreMetaDataInInputVideoBuffers = 16384,
};
struct BufferInfo {
IOMX::buffer_id mBuffer;
bool mOwnedByComponent;
+ bool mOwnedByNativeWindow;
sp<IMemory> mMem;
size_t mSize;
void *mData;
@@ -160,13 +170,23 @@
bool mPaused;
+ sp<ANativeWindow> mNativeWindow;
+
+ // The index in each of the mPortBuffers arrays of the buffer that will be
+ // submitted to OMX next. This only applies when using buffers from a
+ // native window.
+ size_t mNextNativeBufferIndex[2];
+
// A list of indices into mPortStatus[kPortIndexOutput] filled with data.
List<size_t> mFilledBuffers;
Condition mBufferFilled;
+ bool mIsMetaDataStoredInVideoBuffers;
+
OMXCodec(const sp<IOMX> &omx, IOMX::node_id node, uint32_t quirks,
bool isEncoder, const char *mime, const char *componentName,
- const sp<MediaSource> &source);
+ const sp<MediaSource> &source,
+ const sp<ANativeWindow> &nativeWindow);
void addCodecSpecificData(const void *data, size_t size);
void clearCodecSpecificData();
@@ -217,10 +237,17 @@
status_t allocateBuffers();
status_t allocateBuffersOnPort(OMX_U32 portIndex);
+ status_t allocateOutputBuffersFromNativeWindow();
+
+ status_t queueBufferToNativeWindow(BufferInfo *info);
+ status_t cancelBufferToNativeWindow(BufferInfo *info);
+ BufferInfo* dequeueBufferFromNativeWindow();
status_t freeBuffersOnPort(
OMX_U32 portIndex, bool onlyThoseWeOwn = false);
+ status_t freeBuffer(OMX_U32 portIndex, size_t bufIndex);
+
void drainInputBuffer(IOMX::buffer_id buffer);
void fillOutputBuffer(IOMX::buffer_id buffer);
void drainInputBuffer(BufferInfo *info);
@@ -251,6 +278,7 @@
status_t init();
void initOutputFormat(const sp<MetaData> &inputFormat);
+ status_t initNativeWindow();
void dumpPortStatus(OMX_U32 portIndex);
@@ -265,6 +293,8 @@
uint32_t flags,
Vector<String8> *matchingCodecs);
+ void restorePatchedDataPointer(BufferInfo *info);
+
OMXCodec(const OMXCodec &);
OMXCodec &operator=(const OMXCodec &);
};
@@ -277,6 +307,7 @@
struct CodecCapabilities {
String8 mComponentName;
Vector<CodecProfileLevel> mProfileLevels;
+ Vector<OMX_U32> mColorFormats;
};
// Return a vector of componentNames with supported profile/level pairs
diff --git a/include/media/stagefright/VideoRenderer.h b/include/media/stagefright/VideoRenderer.h
deleted file mode 100644
index f80b277..0000000
--- a/include/media/stagefright/VideoRenderer.h
+++ /dev/null
@@ -1,41 +0,0 @@
-/*
- * Copyright (C) 2009 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef VIDEO_RENDERER_H_
-
-#define VIDEO_RENDERER_H_
-
-#include <sys/types.h>
-
-namespace android {
-
-class VideoRenderer {
-public:
- virtual ~VideoRenderer() {}
-
- virtual void render(
- const void *data, size_t size, void *platformPrivate) = 0;
-
-protected:
- VideoRenderer() {}
-
- VideoRenderer(const VideoRenderer &);
- VideoRenderer &operator=(const VideoRenderer &);
-};
-
-} // namespace android
-
-#endif // VIDEO_RENDERER_H_
diff --git a/include/media/stagefright/VideoSourceDownSampler.h b/include/media/stagefright/VideoSourceDownSampler.h
new file mode 100644
index 0000000..439918c
--- /dev/null
+++ b/include/media/stagefright/VideoSourceDownSampler.h
@@ -0,0 +1,97 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// VideoSourceDownSampler implements the MediaSource interface,
+// downsampling frames provided from a real video source.
+
+#ifndef VIDEO_SOURCE_DOWN_SAMPLER_H_
+
+#define VIDEO_SOURCE_DOWN_SAMPLER_H_
+
+#include <media/stagefright/MediaSource.h>
+#include <utils/RefBase.h>
+
+namespace android {
+
+class IMemory;
+class MediaBuffer;
+class MetaData;
+
+class VideoSourceDownSampler : public MediaSource {
+public:
+ virtual ~VideoSourceDownSampler();
+
+ // Constructor:
+ // videoSource: The real video source which provides the original frames.
+ // width, height: The desired width, height. These should be less than or equal
+ // to those of the real video source. We then downsample the original frames to
+ // this size.
+ VideoSourceDownSampler(const sp<MediaSource> &videoSource,
+ int32_t width, int32_t height);
+
+ // MediaSource interface
+ virtual status_t start(MetaData *params = NULL);
+
+ virtual status_t stop();
+
+ virtual sp<MetaData> getFormat();
+
+ virtual status_t read(
+ MediaBuffer **buffer, const ReadOptions *options = NULL);
+
+ virtual status_t pause();
+
+private:
+ // Reference to the real video source.
+ sp<MediaSource> mRealVideoSource;
+
+ // Size of frames to be provided by this source.
+ int32_t mWidth;
+ int32_t mHeight;
+
+ // Size of frames provided by the real source.
+ int32_t mRealSourceWidth;
+ int32_t mRealSourceHeight;
+
+ // Down sampling paramters.
+ int32_t mDownSampleOffsetX;
+ int32_t mDownSampleOffsetY;
+ int32_t mDownSampleSkipX;
+ int32_t mDownSampleSkipY;
+
+ // True if we need to crop the still video image to get the video frame.
+ bool mNeedDownSampling;
+
+ // Meta data. This is a copy of the real source except for the width and
+ // height parameters.
+ sp<MetaData> mMeta;
+
+ // Computes the offset, skip parameters for downsampling the original frame
+ // to the desired size.
+ void computeDownSamplingParameters();
+
+ // Downsamples the frame in sourceBuffer to size (mWidth x mHeight). A new
+ // buffer is created which stores the downsampled image.
+ void downSampleYUVImage(const MediaBuffer &sourceBuffer, MediaBuffer **buffer) const;
+
+ // Disallow these.
+ VideoSourceDownSampler(const VideoSourceDownSampler &);
+ VideoSourceDownSampler &operator=(const VideoSourceDownSampler &);
+};
+
+} // namespace android
+
+#endif // VIDEO_SOURCE_DOWN_SAMPLER_H_
diff --git a/include/media/stagefright/YUVCanvas.h b/include/media/stagefright/YUVCanvas.h
new file mode 100644
index 0000000..ff70923
--- /dev/null
+++ b/include/media/stagefright/YUVCanvas.h
@@ -0,0 +1,79 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// YUVCanvas holds a reference to a YUVImage on which it can do various
+// drawing operations. It provides various utility functions for filling,
+// cropping, etc.
+
+
+#ifndef YUV_CANVAS_H_
+
+#define YUV_CANVAS_H_
+
+#include <stdint.h>
+
+namespace android {
+
+class YUVImage;
+class Rect;
+
+class YUVCanvas {
+public:
+
+ // Constructor takes in reference to a yuvImage on which it can do
+ // various drawing opreations.
+ YUVCanvas(YUVImage &yuvImage);
+ ~YUVCanvas();
+
+ // Fills the entire image with the given YUV values.
+ void FillYUV(uint8_t yValue, uint8_t uValue, uint8_t vValue);
+
+ // Fills the rectangular region [startX,endX]x[startY,endY] with the given YUV values.
+ void FillYUVRectangle(const Rect& rect,
+ uint8_t yValue, uint8_t uValue, uint8_t vValue);
+
+ // Copies the region [startX,endX]x[startY,endY] from srcImage into the
+ // canvas' target image (mYUVImage) starting at
+ // (destinationStartX,destinationStartY).
+ // Note that undefined behavior may occur if srcImage is same as the canvas'
+ // target image.
+ void CopyImageRect(
+ const Rect& srcRect,
+ int32_t destStartX, int32_t destStartY,
+ const YUVImage &srcImage);
+
+ // Downsamples the srcImage into the canvas' target image (mYUVImage)
+ // The downsampling copies pixels from the source image starting at
+ // (srcOffsetX, srcOffsetY) to the target image, starting at (0, 0).
+ // For each X increment in the target image, skipX pixels are skipped
+ // in the source image.
+ // Similarly for each Y increment in the target image, skipY pixels
+ // are skipped in the source image.
+ void downsample(
+ int32_t srcOffsetX, int32_t srcOffsetY,
+ int32_t skipX, int32_t skipY,
+ const YUVImage &srcImage);
+
+private:
+ YUVImage& mYUVImage;
+
+ YUVCanvas(const YUVCanvas &);
+ YUVCanvas &operator=(const YUVCanvas &);
+};
+
+} // namespace android
+
+#endif // YUV_CANVAS_H_
diff --git a/include/media/stagefright/YUVImage.h b/include/media/stagefright/YUVImage.h
new file mode 100644
index 0000000..4e98618
--- /dev/null
+++ b/include/media/stagefright/YUVImage.h
@@ -0,0 +1,178 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// A container class to hold YUV data and provide various utilities,
+// e.g. to set/get pixel values.
+// Supported formats:
+// - YUV420 Planar
+// - YUV420 Semi Planar
+//
+// Currently does not support variable strides.
+//
+// Implementation: Two simple abstractions are done to simplify access
+// to YUV channels for different formats:
+// - initializeYUVPointers() sets up pointers (mYdata, mUdata, mVdata) to
+// point to the right start locations of the different channel data depending
+// on the format.
+// - getOffsets() returns the correct offset for the different channels
+// depending on the format.
+// Location of any pixel's YUV channels can then be easily computed using these.
+//
+
+#ifndef YUV_IMAGE_H_
+
+#define YUV_IMAGE_H_
+
+#include <stdint.h>
+#include <cstring>
+
+namespace android {
+
+class Rect;
+
+class YUVImage {
+public:
+ // Supported YUV formats
+ enum YUVFormat {
+ YUV420Planar,
+ YUV420SemiPlanar
+ };
+
+ // Constructs an image with the given size, format. Also allocates and owns
+ // the required memory.
+ YUVImage(YUVFormat yuvFormat, int32_t width, int32_t height);
+
+ // Constructs an image with the given size, format. The memory is provided
+ // by the caller and we don't own it.
+ YUVImage(YUVFormat yuvFormat, int32_t width, int32_t height, uint8_t *buffer);
+
+ // Destructor to delete the memory if it owns it.
+ ~YUVImage();
+
+ // Returns the size of the buffer required to store the YUV data for the given
+ // format and geometry. Useful when the caller wants to allocate the requisite
+ // memory.
+ static size_t bufferSize(YUVFormat yuvFormat, int32_t width, int32_t height);
+
+ int32_t width() const {return mWidth;}
+ int32_t height() const {return mHeight;}
+
+ // Returns true if pixel is the range [0, width-1] x [0, height-1]
+ // and false otherwise.
+ bool validPixel(int32_t x, int32_t y) const;
+
+ // Get the pixel YUV value at pixel (x,y).
+ // Note that the range of x is [0, width-1] and the range of y is [0, height-1].
+ // Returns true if get was successful and false otherwise.
+ bool getPixelValue(int32_t x, int32_t y,
+ uint8_t *yPtr, uint8_t *uPtr, uint8_t *vPtr) const;
+
+ // Set the pixel YUV value at pixel (x,y).
+ // Note that the range of x is [0, width-1] and the range of y is [0, height-1].
+ // Returns true if set was successful and false otherwise.
+ bool setPixelValue(int32_t x, int32_t y,
+ uint8_t yValue, uint8_t uValue, uint8_t vValue);
+
+ // Uses memcpy to copy an entire row of data
+ static void fastCopyRectangle420Planar(
+ const Rect& srcRect,
+ int32_t destStartX, int32_t destStartY,
+ const YUVImage &srcImage, YUVImage &destImage);
+
+ // Uses memcpy to copy an entire row of data
+ static void fastCopyRectangle420SemiPlanar(
+ const Rect& srcRect,
+ int32_t destStartX, int32_t destStartY,
+ const YUVImage &srcImage, YUVImage &destImage);
+
+ // Tries to use memcopy to copy entire rows of data.
+ // Returns false if fast copy is not possible for the passed image formats.
+ static bool fastCopyRectangle(
+ const Rect& srcRect,
+ int32_t destStartX, int32_t destStartY,
+ const YUVImage &srcImage, YUVImage &destImage);
+
+ // Convert the given YUV value to RGB.
+ void yuv2rgb(uint8_t yValue, uint8_t uValue, uint8_t vValue,
+ uint8_t *r, uint8_t *g, uint8_t *b) const;
+
+ // Write the image to a human readable PPM file.
+ // Returns true if write was succesful and false otherwise.
+ bool writeToPPM(const char *filename) const;
+
+private:
+ // YUV Format of the image.
+ YUVFormat mYUVFormat;
+
+ int32_t mWidth;
+ int32_t mHeight;
+
+ // Pointer to the memory buffer.
+ uint8_t *mBuffer;
+
+ // Boolean telling whether we own the memory buffer.
+ bool mOwnBuffer;
+
+ // Pointer to start of the Y data plane.
+ uint8_t *mYdata;
+
+ // Pointer to start of the U data plane. Note that in case of interleaved formats like
+ // YUV420 semiplanar, mUdata points to the start of the U data in the UV plane.
+ uint8_t *mUdata;
+
+ // Pointer to start of the V data plane. Note that in case of interleaved formats like
+ // YUV420 semiplanar, mVdata points to the start of the V data in the UV plane.
+ uint8_t *mVdata;
+
+ // Initialize the pointers mYdata, mUdata, mVdata to point to the right locations for
+ // the given format and geometry.
+ // Returns true if initialize was succesful and false otherwise.
+ bool initializeYUVPointers();
+
+ // For the given pixel location, this returns the offset of the location of y, u and v
+ // data from the corresponding base pointers -- mYdata, mUdata, mVdata.
+ // Note that the range of x is [0, width-1] and the range of y is [0, height-1].
+ // Returns true if getting offsets was succesful and false otherwise.
+ bool getOffsets(int32_t x, int32_t y,
+ int32_t *yOffset, int32_t *uOffset, int32_t *vOffset) const;
+
+ // Returns the offset increments incurred in going from one data row to the next data row
+ // for the YUV channels. Note that this corresponds to data rows and not pixel rows.
+ // E.g. depending on formats, U/V channels may have only one data row corresponding
+ // to two pixel rows.
+ bool getOffsetIncrementsPerDataRow(
+ int32_t *yDataOffsetIncrement,
+ int32_t *uDataOffsetIncrement,
+ int32_t *vDataOffsetIncrement) const;
+
+ // Given the offset return the address of the corresponding channel's data.
+ uint8_t* getYAddress(int32_t offset) const;
+ uint8_t* getUAddress(int32_t offset) const;
+ uint8_t* getVAddress(int32_t offset) const;
+
+ // Given the pixel location, returns the address of the corresponding channel's data.
+ // Note that the range of x is [0, width-1] and the range of y is [0, height-1].
+ bool getYUVAddresses(int32_t x, int32_t y,
+ uint8_t **yAddr, uint8_t **uAddr, uint8_t **vAddr) const;
+
+ // Disallow implicit casting and copying.
+ YUVImage(const YUVImage &);
+ YUVImage &operator=(const YUVImage &);
+};
+
+} // namespace android
+
+#endif // YUV_IMAGE_H_
diff --git a/include/private/surfaceflinger/SharedBufferStack.h b/include/private/surfaceflinger/SharedBufferStack.h
index d6ae5e9..9d589cf 100644
--- a/include/private/surfaceflinger/SharedBufferStack.h
+++ b/include/private/surfaceflinger/SharedBufferStack.h
@@ -285,6 +285,8 @@
uint32_t getTransform(int buffer) const;
status_t resize(int newNumBuffers);
+ status_t grow(int newNumBuffers);
+ status_t shrink(int newNumBuffers);
SharedBufferStack::Statistics getStats() const;
@@ -346,6 +348,14 @@
int mNumBuffers;
BufferList mBufferList;
+ struct BuffersAvailableCondition : public ConditionBase {
+ int mNumBuffers;
+ inline BuffersAvailableCondition(SharedBufferServer* sbs,
+ int numBuffers);
+ inline bool operator()() const;
+ inline const char* name() const { return "BuffersAvailableCondition"; }
+ };
+
struct UnlockUpdate : public UpdateBase {
const int lockedBuffer;
inline UnlockUpdate(SharedBufferBase* sbb, int lockedBuffer);
diff --git a/media/libeffects/visualizer/Android.mk b/media/libeffects/visualizer/Android.mk
index 48b45ff..e6ff654 100644
--- a/media/libeffects/visualizer/Android.mk
+++ b/media/libeffects/visualizer/Android.mk
@@ -27,4 +27,4 @@
LOCAL_PRELINK_MODULE := false
-include $(BUILD_SHARED_LIBRARY)
\ No newline at end of file
+include $(BUILD_SHARED_LIBRARY)
diff --git a/media/libmedia/Android.mk b/media/libmedia/Android.mk
index 2e5cbe3..731c09d 100644
--- a/media/libmedia/Android.mk
+++ b/media/libmedia/Android.mk
@@ -15,6 +15,7 @@
IMediaRecorderClient.cpp \
IMediaPlayer.cpp \
IMediaRecorder.cpp \
+ IStreamSource.cpp \
Metadata.cpp \
mediarecorder.cpp \
IMediaMetadataRetriever.cpp \
diff --git a/media/libmedia/AudioEffect.cpp b/media/libmedia/AudioEffect.cpp
index 88b8c86..aadeba5 100644
--- a/media/libmedia/AudioEffect.cpp
+++ b/media/libmedia/AudioEffect.cpp
@@ -27,7 +27,6 @@
#include <media/AudioEffect.h>
#include <utils/Log.h>
-#include <cutils/atomic.h>
#include <binder/IPCThreadState.h>
@@ -207,18 +206,22 @@
return INVALID_OPERATION;
}
- if (enabled) {
- LOGV("enable %p", this);
- if (android_atomic_or(1, &mEnabled) == 0) {
- return mIEffect->enable();
+ status_t status = NO_ERROR;
+
+ AutoMutex lock(mLock);
+ if (enabled != mEnabled) {
+ if (enabled) {
+ LOGV("enable %p", this);
+ status = mIEffect->enable();
+ } else {
+ LOGV("disable %p", this);
+ status = mIEffect->disable();
}
- } else {
- LOGV("disable %p", this);
- if (android_atomic_and(~1, &mEnabled) == 1) {
- return mIEffect->disable();
+ if (status == NO_ERROR) {
+ mEnabled = enabled;
}
}
- return NO_ERROR;
+ return status;
}
status_t AudioEffect::command(uint32_t cmdCode,
@@ -232,26 +235,26 @@
return INVALID_OPERATION;
}
- if ((cmdCode == EFFECT_CMD_ENABLE || cmdCode == EFFECT_CMD_DISABLE) &&
- (replySize == NULL || *replySize != sizeof(status_t) || replyData == NULL)) {
- return BAD_VALUE;
+ if (cmdCode == EFFECT_CMD_ENABLE || cmdCode == EFFECT_CMD_DISABLE) {
+ if (mEnabled == (cmdCode == EFFECT_CMD_ENABLE)) {
+ return NO_ERROR;
+ }
+ if (replySize == NULL || *replySize != sizeof(status_t) || replyData == NULL) {
+ return BAD_VALUE;
+ }
+ mLock.lock();
}
status_t status = mIEffect->command(cmdCode, cmdSize, cmdData, replySize, replyData);
- if (status != NO_ERROR) {
- return status;
- }
if (cmdCode == EFFECT_CMD_ENABLE || cmdCode == EFFECT_CMD_DISABLE) {
- status = *(status_t *)replyData;
- if (status != NO_ERROR) {
- return status;
+ if (status == NO_ERROR) {
+ status = *(status_t *)replyData;
}
- if (cmdCode == EFFECT_CMD_ENABLE) {
- android_atomic_or(1, &mEnabled);
- } else {
- android_atomic_and(~1, &mEnabled);
+ if (status == NO_ERROR) {
+ mEnabled = (cmdCode == EFFECT_CMD_ENABLE);
}
+ mLock.unlock();
}
return status;
@@ -370,11 +373,7 @@
{
LOGV("enableStatusChanged %p enabled %d mCbf %p", this, enabled, mCbf);
if (mStatus == ALREADY_EXISTS) {
- if (enabled) {
- android_atomic_or(1, &mEnabled);
- } else {
- android_atomic_and(~1, &mEnabled);
- }
+ mEnabled = enabled;
if (mCbf) {
mCbf(EVENT_ENABLE_STATUS_CHANGED, mUserData, &enabled);
}
diff --git a/media/libmedia/AudioRecord.cpp b/media/libmedia/AudioRecord.cpp
index a6c515c..1d6ffa0 100644
--- a/media/libmedia/AudioRecord.cpp
+++ b/media/libmedia/AudioRecord.cpp
@@ -35,7 +35,6 @@
#include <binder/Parcel.h>
#include <binder/IPCThreadState.h>
#include <utils/Timers.h>
-#include <cutils/atomic.h>
#define LIKELY( exp ) (__builtin_expect( (exp) != 0, true ))
#define UNLIKELY( exp ) (__builtin_expect( (exp) != 0, false ))
@@ -282,7 +281,9 @@
t->mLock.lock();
}
- if (android_atomic_or(1, &mActive) == 0) {
+ AutoMutex lock(mLock);
+ if (mActive == 0) {
+ mActive = 1;
ret = mAudioRecord->start();
if (ret == DEAD_OBJECT) {
LOGV("start() dead IAudioRecord: creating a new one");
@@ -302,8 +303,7 @@
setpriority(PRIO_PROCESS, 0, THREAD_PRIORITY_AUDIO_CLIENT);
}
} else {
- LOGV("start() failed");
- android_atomic_and(~1, &mActive);
+ mActive = 0;
}
}
@@ -322,9 +322,11 @@
if (t != 0) {
t->mLock.lock();
- }
+ }
- if (android_atomic_and(~1, &mActive) == 1) {
+ AutoMutex lock(mLock);
+ if (mActive == 1) {
+ mActive = 0;
mCblk->cv.signal();
mAudioRecord->stop();
// the record head position will reset to 0, so if a marker is set, we need
diff --git a/media/libmedia/AudioSystem.cpp b/media/libmedia/AudioSystem.cpp
index 7e3b743..1a3fcd6 100644
--- a/media/libmedia/AudioSystem.cpp
+++ b/media/libmedia/AudioSystem.cpp
@@ -763,7 +763,8 @@
if ((popCount(device) == 1 ) &&
(device & (AudioSystem::DEVICE_OUT_BLUETOOTH_SCO |
AudioSystem::DEVICE_OUT_BLUETOOTH_SCO_HEADSET |
- AudioSystem::DEVICE_OUT_BLUETOOTH_SCO_CARKIT))) {
+ AudioSystem::DEVICE_OUT_BLUETOOTH_SCO_CARKIT |
+ AudioSystem::DEVICE_IN_BLUETOOTH_SCO_HEADSET))) {
return true;
} else {
return false;
@@ -834,6 +835,7 @@
const char *AudioParameter::keyFormat = "format";
const char *AudioParameter::keyChannels = "channels";
const char *AudioParameter::keyFrameCount = "frame_count";
+const char *AudioParameter::keyInputSource = "input_source";
AudioParameter::AudioParameter(const String8& keyValuePairs)
{
diff --git a/media/libmedia/AudioTrack.cpp b/media/libmedia/AudioTrack.cpp
index 587c8ff..c1bed59 100644
--- a/media/libmedia/AudioTrack.cpp
+++ b/media/libmedia/AudioTrack.cpp
@@ -35,7 +35,6 @@
#include <binder/Parcel.h>
#include <binder/IPCThreadState.h>
#include <utils/Timers.h>
-#include <cutils/atomic.h>
#define LIKELY( exp ) (__builtin_expect( (exp) != 0, true ))
#define UNLIKELY( exp ) (__builtin_expect( (exp) != 0, false ))
@@ -312,7 +311,9 @@
t->mLock.lock();
}
- if (android_atomic_or(1, &mActive) == 0) {
+ AutoMutex lock(mLock);
+ if (mActive == 0) {
+ mActive = 1;
mNewPosition = mCblk->server + mUpdatePeriod;
mCblk->bufferTimeoutMs = MAX_STARTUP_TIMEOUT_MS;
mCblk->waitTimeMs = 0;
@@ -344,7 +345,7 @@
}
if (status != NO_ERROR) {
LOGV("start() failed");
- android_atomic_and(~1, &mActive);
+ mActive = 0;
if (t != 0) {
t->requestExit();
} else {
@@ -367,7 +368,9 @@
t->mLock.lock();
}
- if (android_atomic_and(~1, &mActive) == 1) {
+ AutoMutex lock(mLock);
+ if (mActive == 1) {
+ mActive = 0;
mCblk->cv.signal();
mAudioTrack->stop();
// Cancel loops (If we are in the middle of a loop, playback
@@ -407,7 +410,6 @@
mMarkerReached = false;
mUpdatePeriod = 0;
-
if (!mActive) {
mAudioTrack->flush();
// Release AudioTrack callback thread in case it was waiting for new buffers
@@ -419,7 +421,9 @@
void AudioTrack::pause()
{
LOGV("pause");
- if (android_atomic_and(~1, &mActive) == 1) {
+ AutoMutex lock(mLock);
+ if (mActive == 1) {
+ mActive = 0;
mAudioTrack->pause();
}
}
diff --git a/media/libmedia/IMediaPlayer.cpp b/media/libmedia/IMediaPlayer.cpp
index 0f55b19..c287c0a 100644
--- a/media/libmedia/IMediaPlayer.cpp
+++ b/media/libmedia/IMediaPlayer.cpp
@@ -22,6 +22,7 @@
#include <media/IMediaPlayer.h>
#include <surfaceflinger/ISurface.h>
+#include <surfaceflinger/Surface.h>
namespace android {
@@ -43,8 +44,6 @@
INVOKE,
SET_METADATA_FILTER,
GET_METADATA,
- SUSPEND,
- RESUME,
SET_AUX_EFFECT_SEND_LEVEL,
ATTACH_AUX_EFFECT
};
@@ -65,11 +64,11 @@
remote()->transact(DISCONNECT, data, &reply);
}
- status_t setVideoSurface(const sp<ISurface>& surface)
+ status_t setVideoSurface(const sp<Surface>& surface)
{
Parcel data, reply;
data.writeInterfaceToken(IMediaPlayer::getInterfaceDescriptor());
- data.writeStrongBinder(surface->asBinder());
+ Surface::writeToParcel(surface, &data);
remote()->transact(SET_VIDEO_SURFACE, data, &reply);
return reply.readInt32();
}
@@ -204,26 +203,6 @@
return reply->readInt32();
}
- status_t suspend() {
- Parcel request;
- request.writeInterfaceToken(IMediaPlayer::getInterfaceDescriptor());
-
- Parcel reply;
- remote()->transact(SUSPEND, request, &reply);
-
- return reply.readInt32();
- }
-
- status_t resume() {
- Parcel request;
- request.writeInterfaceToken(IMediaPlayer::getInterfaceDescriptor());
-
- Parcel reply;
- remote()->transact(RESUME, request, &reply);
-
- return reply.readInt32();
- }
-
status_t setAuxEffectSendLevel(float level)
{
Parcel data, reply;
@@ -258,7 +237,7 @@
} break;
case SET_VIDEO_SURFACE: {
CHECK_INTERFACE(IMediaPlayer, data, reply);
- sp<ISurface> surface = interface_cast<ISurface>(data.readStrongBinder());
+ sp<Surface> surface = Surface::readFromParcel(data);
reply->writeInt32(setVideoSurface(surface));
return NO_ERROR;
} break;
@@ -341,16 +320,6 @@
reply->writeInt32(setMetadataFilter(data));
return NO_ERROR;
} break;
- case SUSPEND: {
- CHECK_INTERFACE(IMediaPlayer, data, reply);
- reply->writeInt32(suspend());
- return NO_ERROR;
- } break;
- case RESUME: {
- CHECK_INTERFACE(IMediaPlayer, data, reply);
- reply->writeInt32(resume());
- return NO_ERROR;
- } break;
case GET_METADATA: {
CHECK_INTERFACE(IMediaPlayer, data, reply);
const status_t retcode = getMetadata(data.readInt32(), data.readInt32(), reply);
diff --git a/media/libmedia/IMediaPlayerService.cpp b/media/libmedia/IMediaPlayerService.cpp
index 4abfa75..77199e1 100644
--- a/media/libmedia/IMediaPlayerService.cpp
+++ b/media/libmedia/IMediaPlayerService.cpp
@@ -23,6 +23,7 @@
#include <media/IMediaPlayerService.h>
#include <media/IMediaRecorder.h>
#include <media/IOMX.h>
+#include <media/IStreamSource.h>
#include <utils/Errors.h> // for status_t
@@ -31,6 +32,7 @@
enum {
CREATE_URL = IBinder::FIRST_CALL_TRANSACTION,
CREATE_FD,
+ CREATE_STREAM,
DECODE_URL,
DECODE_FD,
CREATE_MEDIA_RECORDER,
@@ -107,6 +109,21 @@
return interface_cast<IMediaPlayer>(reply.readStrongBinder());;
}
+ virtual sp<IMediaPlayer> create(
+ pid_t pid, const sp<IMediaPlayerClient> &client,
+ const sp<IStreamSource> &source, int audioSessionId) {
+ Parcel data, reply;
+ data.writeInterfaceToken(IMediaPlayerService::getInterfaceDescriptor());
+ data.writeInt32(static_cast<int32_t>(pid));
+ data.writeStrongBinder(client->asBinder());
+ data.writeStrongBinder(source->asBinder());
+ data.writeInt32(static_cast<int32_t>(audioSessionId));
+
+ remote()->transact(CREATE_STREAM, data, &reply);
+
+ return interface_cast<IMediaPlayer>(reply.readStrongBinder());;
+ }
+
virtual sp<IMemory> decode(const char* url, uint32_t *pSampleRate, int* pNumChannels, int* pFormat)
{
Parcel data, reply;
@@ -184,6 +201,27 @@
reply->writeStrongBinder(player->asBinder());
return NO_ERROR;
} break;
+ case CREATE_STREAM:
+ {
+ CHECK_INTERFACE(IMediaPlayerService, data, reply);
+
+ pid_t pid = static_cast<pid_t>(data.readInt32());
+
+ sp<IMediaPlayerClient> client =
+ interface_cast<IMediaPlayerClient>(data.readStrongBinder());
+
+ sp<IStreamSource> source =
+ interface_cast<IStreamSource>(data.readStrongBinder());
+
+ int audioSessionId = static_cast<int>(data.readInt32());
+
+ sp<IMediaPlayer> player =
+ create(pid, client, source, audioSessionId);
+
+ reply->writeStrongBinder(player->asBinder());
+ return OK;
+ break;
+ }
case DECODE_URL: {
CHECK_INTERFACE(IMediaPlayerService, data, reply);
const char* url = data.readCString();
diff --git a/media/libmedia/IMediaRecorder.cpp b/media/libmedia/IMediaRecorder.cpp
index 947ff34..59cd1b7 100644
--- a/media/libmedia/IMediaRecorder.cpp
+++ b/media/libmedia/IMediaRecorder.cpp
@@ -19,7 +19,7 @@
#define LOG_TAG "IMediaRecorder"
#include <utils/Log.h>
#include <binder/Parcel.h>
-#include <surfaceflinger/ISurface.h>
+#include <surfaceflinger/Surface.h>
#include <camera/ICamera.h>
#include <media/IMediaRecorderClient.h>
#include <media/IMediaRecorder.h>
@@ -43,6 +43,7 @@
SET_AUDIO_ENCODER,
SET_OUTPUT_FILE_PATH,
SET_OUTPUT_FILE_FD,
+ SET_OUTPUT_FILE_AUXILIARY_FD,
SET_VIDEO_SIZE,
SET_VIDEO_FRAMERATE,
SET_PARAMETERS,
@@ -69,12 +70,12 @@
return reply.readInt32();
}
- status_t setPreviewSurface(const sp<ISurface>& surface)
+ status_t setPreviewSurface(const sp<Surface>& surface)
{
LOGV("setPreviewSurface(%p)", surface.get());
Parcel data, reply;
data.writeInterfaceToken(IMediaRecorder::getInterfaceDescriptor());
- data.writeStrongBinder(surface->asBinder());
+ Surface::writeToParcel(surface, &data);
remote()->transact(SET_PREVIEW_SURFACE, data, &reply);
return reply.readInt32();
}
@@ -159,6 +160,15 @@
return reply.readInt32();
}
+ status_t setOutputFileAuxiliary(int fd) {
+ LOGV("setOutputFileAuxiliary(%d)", fd);
+ Parcel data, reply;
+ data.writeInterfaceToken(IMediaRecorder::getInterfaceDescriptor());
+ data.writeFileDescriptor(fd);
+ remote()->transact(SET_OUTPUT_FILE_AUXILIARY_FD, data, &reply);
+ return reply.readInt32();
+ }
+
status_t setVideoSize(int width, int height)
{
LOGV("setVideoSize(%dx%d)", width, height);
@@ -377,6 +387,13 @@
::close(fd);
return NO_ERROR;
} break;
+ case SET_OUTPUT_FILE_AUXILIARY_FD: {
+ LOGV("SET_OUTPUT_FILE_AUXILIARY_FD");
+ CHECK_INTERFACE(IMediaRecorder, data, reply);
+ int fd = dup(data.readFileDescriptor());
+ reply->writeInt32(setOutputFileAuxiliary(fd));
+ return NO_ERROR;
+ } break;
case SET_VIDEO_SIZE: {
LOGV("SET_VIDEO_SIZE");
CHECK_INTERFACE(IMediaRecorder, data, reply);
@@ -409,7 +426,7 @@
case SET_PREVIEW_SURFACE: {
LOGV("SET_PREVIEW_SURFACE");
CHECK_INTERFACE(IMediaRecorder, data, reply);
- sp<ISurface> surface = interface_cast<ISurface>(data.readStrongBinder());
+ sp<Surface> surface = Surface::readFromParcel(data);
reply->writeInt32(setPreviewSurface(surface));
return NO_ERROR;
} break;
diff --git a/media/libmedia/IOMX.cpp b/media/libmedia/IOMX.cpp
index ae6c2bf..9ce6738 100644
--- a/media/libmedia/IOMX.cpp
+++ b/media/libmedia/IOMX.cpp
@@ -21,59 +21,19 @@
SET_PARAMETER,
GET_CONFIG,
SET_CONFIG,
+ ENABLE_GRAPHIC_BUFFERS,
USE_BUFFER,
+ USE_GRAPHIC_BUFFER,
+ STORE_META_DATA_IN_BUFFERS,
ALLOC_BUFFER,
ALLOC_BUFFER_WITH_BACKUP,
FREE_BUFFER,
FILL_BUFFER,
EMPTY_BUFFER,
GET_EXTENSION_INDEX,
- CREATE_RENDERER,
OBSERVER_ON_MSG,
- RENDERER_RENDER,
};
-sp<IOMXRenderer> IOMX::createRenderer(
- const sp<Surface> &surface,
- const char *componentName,
- OMX_COLOR_FORMATTYPE colorFormat,
- size_t encodedWidth, size_t encodedHeight,
- size_t displayWidth, size_t displayHeight,
- int32_t rotationDegrees) {
- return createRenderer(
- surface->getISurface(),
- componentName, colorFormat, encodedWidth, encodedHeight,
- displayWidth, displayHeight,
- rotationDegrees);
-}
-
-sp<IOMXRenderer> IOMX::createRendererFromJavaSurface(
- JNIEnv *env, jobject javaSurface,
- const char *componentName,
- OMX_COLOR_FORMATTYPE colorFormat,
- size_t encodedWidth, size_t encodedHeight,
- size_t displayWidth, size_t displayHeight,
- int32_t rotationDegrees) {
- jclass surfaceClass = env->FindClass("android/view/Surface");
- if (surfaceClass == NULL) {
- LOGE("Can't find android/view/Surface");
- return NULL;
- }
-
- jfieldID surfaceID = env->GetFieldID(surfaceClass, ANDROID_VIEW_SURFACE_JNI_ID, "I");
- if (surfaceID == NULL) {
- LOGE("Can't find Surface.mSurface");
- return NULL;
- }
-
- sp<Surface> surface = (Surface *)env->GetIntField(javaSurface, surfaceID);
-
- return createRenderer(
- surface, componentName, colorFormat, encodedWidth,
- encodedHeight, displayWidth, displayHeight,
- rotationDegrees);
-}
-
class BpOMX : public BpInterface<IOMX> {
public:
BpOMX(const sp<IBinder> &impl)
@@ -220,6 +180,19 @@
return reply.readInt32();
}
+ virtual status_t enableGraphicBuffers(
+ node_id node, OMX_U32 port_index, OMX_BOOL enable) {
+ Parcel data, reply;
+ data.writeInterfaceToken(IOMX::getInterfaceDescriptor());
+ data.writeIntPtr((intptr_t)node);
+ data.writeInt32(port_index);
+ data.writeInt32((uint32_t)enable);
+ remote()->transact(ENABLE_GRAPHIC_BUFFERS, data, &reply);
+
+ status_t err = reply.readInt32();
+ return err;
+ }
+
virtual status_t useBuffer(
node_id node, OMX_U32 port_index, const sp<IMemory> ¶ms,
buffer_id *buffer) {
@@ -242,6 +215,42 @@
return err;
}
+
+ virtual status_t useGraphicBuffer(
+ node_id node, OMX_U32 port_index,
+ const sp<GraphicBuffer> &graphicBuffer, buffer_id *buffer) {
+ Parcel data, reply;
+ data.writeInterfaceToken(IOMX::getInterfaceDescriptor());
+ data.writeIntPtr((intptr_t)node);
+ data.writeInt32(port_index);
+ data.write(*graphicBuffer);
+ remote()->transact(USE_GRAPHIC_BUFFER, data, &reply);
+
+ status_t err = reply.readInt32();
+ if (err != OK) {
+ *buffer = 0;
+
+ return err;
+ }
+
+ *buffer = (void*)reply.readIntPtr();
+
+ return err;
+ }
+
+ virtual status_t storeMetaDataInBuffers(
+ node_id node, OMX_U32 port_index, OMX_BOOL enable) {
+ Parcel data, reply;
+ data.writeInterfaceToken(IOMX::getInterfaceDescriptor());
+ data.writeIntPtr((intptr_t)node);
+ data.writeInt32(port_index);
+ data.writeInt32((uint32_t)enable);
+ remote()->transact(STORE_META_DATA_IN_BUFFERS, data, &reply);
+
+ status_t err = reply.readInt32();
+ return err;
+ }
+
virtual status_t allocateBuffer(
node_id node, OMX_U32 port_index, size_t size,
buffer_id *buffer, void **buffer_data) {
@@ -347,30 +356,6 @@
return err;
}
-
- virtual sp<IOMXRenderer> createRenderer(
- const sp<ISurface> &surface,
- const char *componentName,
- OMX_COLOR_FORMATTYPE colorFormat,
- size_t encodedWidth, size_t encodedHeight,
- size_t displayWidth, size_t displayHeight,
- int32_t rotationDegrees) {
- Parcel data, reply;
- data.writeInterfaceToken(IOMX::getInterfaceDescriptor());
-
- data.writeStrongBinder(surface->asBinder());
- data.writeCString(componentName);
- data.writeInt32(colorFormat);
- data.writeInt32(encodedWidth);
- data.writeInt32(encodedHeight);
- data.writeInt32(displayWidth);
- data.writeInt32(displayHeight);
- data.writeInt32(rotationDegrees);
-
- remote()->transact(CREATE_RENDERER, data, &reply);
-
- return interface_cast<IOMXRenderer>(reply.readStrongBinder());
- }
};
IMPLEMENT_META_INTERFACE(OMX, "android.hardware.IOMX");
@@ -547,6 +532,20 @@
return NO_ERROR;
}
+ case ENABLE_GRAPHIC_BUFFERS:
+ {
+ CHECK_INTERFACE(IOMX, data, reply);
+
+ node_id node = (void*)data.readIntPtr();
+ OMX_U32 port_index = data.readInt32();
+ OMX_BOOL enable = (OMX_BOOL)data.readInt32();
+
+ status_t err = enableGraphicBuffers(node, port_index, enable);
+ reply->writeInt32(err);
+
+ return NO_ERROR;
+ }
+
case USE_BUFFER:
{
CHECK_INTERFACE(IOMX, data, reply);
@@ -567,6 +566,41 @@
return NO_ERROR;
}
+ case USE_GRAPHIC_BUFFER:
+ {
+ CHECK_INTERFACE(IOMX, data, reply);
+
+ node_id node = (void*)data.readIntPtr();
+ OMX_U32 port_index = data.readInt32();
+ sp<GraphicBuffer> graphicBuffer = new GraphicBuffer();
+ data.read(*graphicBuffer);
+
+ buffer_id buffer;
+ status_t err = useGraphicBuffer(
+ node, port_index, graphicBuffer, &buffer);
+ reply->writeInt32(err);
+
+ if (err == OK) {
+ reply->writeIntPtr((intptr_t)buffer);
+ }
+
+ return NO_ERROR;
+ }
+
+ case STORE_META_DATA_IN_BUFFERS:
+ {
+ CHECK_INTERFACE(IOMX, data, reply);
+
+ node_id node = (void*)data.readIntPtr();
+ OMX_U32 port_index = data.readInt32();
+ OMX_BOOL enable = (OMX_BOOL)data.readInt32();
+
+ status_t err = storeMetaDataInBuffers(node, port_index, enable);
+ reply->writeInt32(err);
+
+ return NO_ERROR;
+ }
+
case ALLOC_BUFFER:
{
CHECK_INTERFACE(IOMX, data, reply);
@@ -672,35 +706,6 @@
return OK;
}
- case CREATE_RENDERER:
- {
- CHECK_INTERFACE(IOMX, data, reply);
-
- sp<ISurface> isurface =
- interface_cast<ISurface>(data.readStrongBinder());
-
- const char *componentName = data.readCString();
-
- OMX_COLOR_FORMATTYPE colorFormat =
- static_cast<OMX_COLOR_FORMATTYPE>(data.readInt32());
-
- size_t encodedWidth = (size_t)data.readInt32();
- size_t encodedHeight = (size_t)data.readInt32();
- size_t displayWidth = (size_t)data.readInt32();
- size_t displayHeight = (size_t)data.readInt32();
- int32_t rotationDegrees = data.readInt32();
-
- sp<IOMXRenderer> renderer =
- createRenderer(isurface, componentName, colorFormat,
- encodedWidth, encodedHeight,
- displayWidth, displayHeight,
- rotationDegrees);
-
- reply->writeStrongBinder(renderer->asBinder());
-
- return OK;
- }
-
default:
return BBinder::onTransact(code, data, reply, flags);
}
@@ -746,44 +751,4 @@
}
}
-////////////////////////////////////////////////////////////////////////////////
-
-class BpOMXRenderer : public BpInterface<IOMXRenderer> {
-public:
- BpOMXRenderer(const sp<IBinder> &impl)
- : BpInterface<IOMXRenderer>(impl) {
- }
-
- virtual void render(IOMX::buffer_id buffer) {
- Parcel data, reply;
- data.writeInterfaceToken(IOMXRenderer::getInterfaceDescriptor());
- data.writeIntPtr((intptr_t)buffer);
-
- // NOTE: Do NOT make this a ONE_WAY call, it must be synchronous
- // so that the caller knows when to recycle the buffer.
- remote()->transact(RENDERER_RENDER, data, &reply);
- }
-};
-
-IMPLEMENT_META_INTERFACE(OMXRenderer, "android.hardware.IOMXRenderer");
-
-status_t BnOMXRenderer::onTransact(
- uint32_t code, const Parcel &data, Parcel *reply, uint32_t flags) {
- switch (code) {
- case RENDERER_RENDER:
- {
- CHECK_INTERFACE(IOMXRenderer, data, reply);
-
- IOMX::buffer_id buffer = (void*)data.readIntPtr();
-
- render(buffer);
-
- return NO_ERROR;
- }
-
- default:
- return BBinder::onTransact(code, data, reply, flags);
- }
-}
-
} // namespace android
diff --git a/media/libmedia/IStreamSource.cpp b/media/libmedia/IStreamSource.cpp
new file mode 100644
index 0000000..89f2b44
--- /dev/null
+++ b/media/libmedia/IStreamSource.cpp
@@ -0,0 +1,168 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "IStreamSource"
+#include <utils/Log.h>
+
+#include <media/IStreamSource.h>
+
+#include <binder/IMemory.h>
+#include <binder/Parcel.h>
+
+namespace android {
+
+enum {
+ // IStreamSource
+ SET_LISTENER = IBinder::FIRST_CALL_TRANSACTION,
+ SET_BUFFERS,
+ ON_BUFFER_AVAILABLE,
+
+ // IStreamListener
+ QUEUE_BUFFER,
+ QUEUE_COMMAND,
+};
+
+struct BpStreamSource : public BpInterface<IStreamSource> {
+ BpStreamSource(const sp<IBinder> &impl)
+ : BpInterface<IStreamSource>(impl) {
+ }
+
+ virtual void setListener(const sp<IStreamListener> &listener) {
+ Parcel data, reply;
+ data.writeInterfaceToken(IStreamSource::getInterfaceDescriptor());
+ data.writeStrongBinder(listener->asBinder());
+ remote()->transact(SET_LISTENER, data, &reply);
+ }
+
+ virtual void setBuffers(const Vector<sp<IMemory> > &buffers) {
+ Parcel data, reply;
+ data.writeInterfaceToken(IStreamSource::getInterfaceDescriptor());
+ data.writeInt32(static_cast<int32_t>(buffers.size()));
+ for (size_t i = 0; i < buffers.size(); ++i) {
+ data.writeStrongBinder(buffers.itemAt(i)->asBinder());
+ }
+ remote()->transact(SET_BUFFERS, data, &reply);
+ }
+
+ virtual void onBufferAvailable(size_t index) {
+ Parcel data, reply;
+ data.writeInterfaceToken(IStreamSource::getInterfaceDescriptor());
+ data.writeInt32(static_cast<int32_t>(index));
+ remote()->transact(
+ ON_BUFFER_AVAILABLE, data, &reply, IBinder::FLAG_ONEWAY);
+ }
+};
+
+IMPLEMENT_META_INTERFACE(StreamSource, "android.hardware.IStreamSource");
+
+status_t BnStreamSource::onTransact(
+ uint32_t code, const Parcel &data, Parcel *reply, uint32_t flags) {
+ switch (code) {
+ case SET_LISTENER:
+ {
+ CHECK_INTERFACE(IStreamSource, data, reply);
+ setListener(
+ interface_cast<IStreamListener>(data.readStrongBinder()));
+ break;
+ }
+
+ case SET_BUFFERS:
+ {
+ CHECK_INTERFACE(IStreamSource, data, reply);
+ size_t n = static_cast<size_t>(data.readInt32());
+ Vector<sp<IMemory> > buffers;
+ for (size_t i = 0; i < n; ++i) {
+ sp<IMemory> mem =
+ interface_cast<IMemory>(data.readStrongBinder());
+
+ buffers.push(mem);
+ }
+ setBuffers(buffers);
+ break;
+ }
+
+ case ON_BUFFER_AVAILABLE:
+ {
+ CHECK_INTERFACE(IStreamSource, data, reply);
+ onBufferAvailable(static_cast<size_t>(data.readInt32()));
+ break;
+ }
+
+ default:
+ return BBinder::onTransact(code, data, reply, flags);
+ }
+
+ return OK;
+}
+
+////////////////////////////////////////////////////////////////////////////////
+
+struct BpStreamListener : public BpInterface<IStreamListener> {
+ BpStreamListener(const sp<IBinder> &impl)
+ : BpInterface<IStreamListener>(impl) {
+ }
+
+ virtual void queueBuffer(size_t index, size_t size) {
+ Parcel data, reply;
+ data.writeInterfaceToken(IStreamListener::getInterfaceDescriptor());
+ data.writeInt32(static_cast<int32_t>(index));
+ data.writeInt32(static_cast<int32_t>(size));
+
+ remote()->transact(QUEUE_BUFFER, data, &reply, IBinder::FLAG_ONEWAY);
+ }
+
+ virtual void queueCommand(Command cmd) {
+ Parcel data, reply;
+ data.writeInterfaceToken(IStreamListener::getInterfaceDescriptor());
+ data.writeInt32(static_cast<int32_t>(cmd));
+
+ remote()->transact(QUEUE_COMMAND, data, &reply, IBinder::FLAG_ONEWAY);
+ }
+};
+
+IMPLEMENT_META_INTERFACE(StreamListener, "android.hardware.IStreamListener");
+
+status_t BnStreamListener::onTransact(
+ uint32_t code, const Parcel &data, Parcel *reply, uint32_t flags) {
+ switch (code) {
+ case QUEUE_BUFFER:
+ {
+ CHECK_INTERFACE(IStreamListener, data, reply);
+ size_t index = static_cast<size_t>(data.readInt32());
+ size_t size = static_cast<size_t>(data.readInt32());
+
+ queueBuffer(index, size);
+ break;
+ }
+
+ case QUEUE_COMMAND:
+ {
+ CHECK_INTERFACE(IStreamListener, data, reply);
+ Command cmd = static_cast<Command>(data.readInt32());
+
+ queueCommand(cmd);
+ break;
+ }
+
+ default:
+ return BBinder::onTransact(code, data, reply, flags);
+ }
+
+ return OK;
+}
+
+} // namespace android
diff --git a/media/libmedia/MediaProfiles.cpp b/media/libmedia/MediaProfiles.cpp
index 3869389..9ad63f0 100644
--- a/media/libmedia/MediaProfiles.cpp
+++ b/media/libmedia/MediaProfiles.cpp
@@ -59,8 +59,21 @@
};
const MediaProfiles::NameToTagMap MediaProfiles::sCamcorderQualityNameMap[] = {
+ {"low", CAMCORDER_QUALITY_LOW},
{"high", CAMCORDER_QUALITY_HIGH},
- {"low", CAMCORDER_QUALITY_LOW}
+ {"qcif", CAMCORDER_QUALITY_QCIF},
+ {"cif", CAMCORDER_QUALITY_CIF},
+ {"480p", CAMCORDER_QUALITY_480P},
+ {"720p", CAMCORDER_QUALITY_720P},
+ {"1080p", CAMCORDER_QUALITY_1080P},
+
+ {"timelapselow", CAMCORDER_QUALITY_TIME_LAPSE_LOW},
+ {"timelapsehigh", CAMCORDER_QUALITY_TIME_LAPSE_HIGH},
+ {"timelapseqcif", CAMCORDER_QUALITY_TIME_LAPSE_QCIF},
+ {"timelapsecif", CAMCORDER_QUALITY_TIME_LAPSE_CIF},
+ {"timelapse480p", CAMCORDER_QUALITY_TIME_LAPSE_480P},
+ {"timelapse720p", CAMCORDER_QUALITY_TIME_LAPSE_720P},
+ {"timelapse1080p", CAMCORDER_QUALITY_TIME_LAPSE_1080P}
};
/*static*/ void
@@ -411,16 +424,16 @@
}
/*static*/ MediaProfiles::CamcorderProfile*
-MediaProfiles::createDefaultCamcorderHighProfile()
+MediaProfiles::createDefaultCamcorderTimeLapseQcifProfile(camcorder_quality quality)
{
MediaProfiles::VideoCodec *videoCodec =
- new MediaProfiles::VideoCodec(VIDEO_ENCODER_H263, 360000, 352, 288, 20);
+ new MediaProfiles::VideoCodec(VIDEO_ENCODER_H263, 1000000, 176, 144, 20);
AudioCodec *audioCodec = new AudioCodec(AUDIO_ENCODER_AMR_NB, 12200, 8000, 1);
CamcorderProfile *profile = new MediaProfiles::CamcorderProfile;
profile->mCameraId = 0;
profile->mFileFormat = OUTPUT_FORMAT_THREE_GPP;
- profile->mQuality = CAMCORDER_QUALITY_HIGH;
+ profile->mQuality = quality;
profile->mDuration = 60;
profile->mVideoCodec = videoCodec;
profile->mAudioCodec = audioCodec;
@@ -428,7 +441,40 @@
}
/*static*/ MediaProfiles::CamcorderProfile*
-MediaProfiles::createDefaultCamcorderLowProfile()
+MediaProfiles::createDefaultCamcorderTimeLapse480pProfile(camcorder_quality quality)
+{
+ MediaProfiles::VideoCodec *videoCodec =
+ new MediaProfiles::VideoCodec(VIDEO_ENCODER_H263, 20000000, 720, 480, 20);
+
+ AudioCodec *audioCodec = new AudioCodec(AUDIO_ENCODER_AMR_NB, 12200, 8000, 1);
+ CamcorderProfile *profile = new MediaProfiles::CamcorderProfile;
+ profile->mCameraId = 0;
+ profile->mFileFormat = OUTPUT_FORMAT_THREE_GPP;
+ profile->mQuality = quality;
+ profile->mDuration = 60;
+ profile->mVideoCodec = videoCodec;
+ profile->mAudioCodec = audioCodec;
+ return profile;
+}
+
+/*static*/ void
+MediaProfiles::createDefaultCamcorderTimeLapseLowProfiles(
+ MediaProfiles::CamcorderProfile **lowTimeLapseProfile,
+ MediaProfiles::CamcorderProfile **lowSpecificTimeLapseProfile) {
+ *lowTimeLapseProfile = createDefaultCamcorderTimeLapseQcifProfile(CAMCORDER_QUALITY_TIME_LAPSE_LOW);
+ *lowSpecificTimeLapseProfile = createDefaultCamcorderTimeLapseQcifProfile(CAMCORDER_QUALITY_TIME_LAPSE_QCIF);
+}
+
+/*static*/ void
+MediaProfiles::createDefaultCamcorderTimeLapseHighProfiles(
+ MediaProfiles::CamcorderProfile **highTimeLapseProfile,
+ MediaProfiles::CamcorderProfile **highSpecificTimeLapseProfile) {
+ *highTimeLapseProfile = createDefaultCamcorderTimeLapse480pProfile(CAMCORDER_QUALITY_TIME_LAPSE_HIGH);
+ *highSpecificTimeLapseProfile = createDefaultCamcorderTimeLapse480pProfile(CAMCORDER_QUALITY_TIME_LAPSE_480P);
+}
+
+/*static*/ MediaProfiles::CamcorderProfile*
+MediaProfiles::createDefaultCamcorderQcifProfile(camcorder_quality quality)
{
MediaProfiles::VideoCodec *videoCodec =
new MediaProfiles::VideoCodec(VIDEO_ENCODER_H263, 192000, 176, 144, 20);
@@ -439,18 +485,72 @@
MediaProfiles::CamcorderProfile *profile = new MediaProfiles::CamcorderProfile;
profile->mCameraId = 0;
profile->mFileFormat = OUTPUT_FORMAT_THREE_GPP;
- profile->mQuality = CAMCORDER_QUALITY_LOW;
+ profile->mQuality = quality;
profile->mDuration = 30;
profile->mVideoCodec = videoCodec;
profile->mAudioCodec = audioCodec;
return profile;
}
+/*static*/ MediaProfiles::CamcorderProfile*
+MediaProfiles::createDefaultCamcorderCifProfile(camcorder_quality quality)
+{
+ MediaProfiles::VideoCodec *videoCodec =
+ new MediaProfiles::VideoCodec(VIDEO_ENCODER_H263, 360000, 352, 288, 20);
+
+ AudioCodec *audioCodec = new AudioCodec(AUDIO_ENCODER_AMR_NB, 12200, 8000, 1);
+ CamcorderProfile *profile = new MediaProfiles::CamcorderProfile;
+ profile->mCameraId = 0;
+ profile->mFileFormat = OUTPUT_FORMAT_THREE_GPP;
+ profile->mQuality = quality;
+ profile->mDuration = 60;
+ profile->mVideoCodec = videoCodec;
+ profile->mAudioCodec = audioCodec;
+ return profile;
+}
+
+/*static*/ void
+MediaProfiles::createDefaultCamcorderLowProfiles(
+ MediaProfiles::CamcorderProfile **lowProfile,
+ MediaProfiles::CamcorderProfile **lowSpecificProfile) {
+ *lowProfile = createDefaultCamcorderQcifProfile(CAMCORDER_QUALITY_LOW);
+ *lowSpecificProfile = createDefaultCamcorderQcifProfile(CAMCORDER_QUALITY_QCIF);
+}
+
+/*static*/ void
+MediaProfiles::createDefaultCamcorderHighProfiles(
+ MediaProfiles::CamcorderProfile **highProfile,
+ MediaProfiles::CamcorderProfile **highSpecificProfile) {
+ *highProfile = createDefaultCamcorderCifProfile(CAMCORDER_QUALITY_HIGH);
+ *highSpecificProfile = createDefaultCamcorderCifProfile(CAMCORDER_QUALITY_CIF);
+}
+
/*static*/ void
MediaProfiles::createDefaultCamcorderProfiles(MediaProfiles *profiles)
{
- profiles->mCamcorderProfiles.add(createDefaultCamcorderHighProfile());
- profiles->mCamcorderProfiles.add(createDefaultCamcorderLowProfile());
+ // low camcorder profiles.
+ MediaProfiles::CamcorderProfile *lowProfile, *lowSpecificProfile;
+ createDefaultCamcorderLowProfiles(&lowProfile, &lowSpecificProfile);
+ profiles->mCamcorderProfiles.add(lowProfile);
+ profiles->mCamcorderProfiles.add(lowSpecificProfile);
+
+ // high camcorder profiles.
+ MediaProfiles::CamcorderProfile* highProfile, *highSpecificProfile;
+ createDefaultCamcorderHighProfiles(&highProfile, &highSpecificProfile);
+ profiles->mCamcorderProfiles.add(highProfile);
+ profiles->mCamcorderProfiles.add(highSpecificProfile);
+
+ // low camcorder time lapse profiles.
+ MediaProfiles::CamcorderProfile *lowTimeLapseProfile, *lowSpecificTimeLapseProfile;
+ createDefaultCamcorderTimeLapseLowProfiles(&lowTimeLapseProfile, &lowSpecificTimeLapseProfile);
+ profiles->mCamcorderProfiles.add(lowTimeLapseProfile);
+ profiles->mCamcorderProfiles.add(lowSpecificTimeLapseProfile);
+
+ // high camcorder time lapse profiles.
+ MediaProfiles::CamcorderProfile *highTimeLapseProfile, *highSpecificTimeLapseProfile;
+ createDefaultCamcorderTimeLapseHighProfiles(&highTimeLapseProfile, &highSpecificTimeLapseProfile);
+ profiles->mCamcorderProfiles.add(highTimeLapseProfile);
+ profiles->mCamcorderProfiles.add(highSpecificTimeLapseProfile);
}
/*static*/ void
@@ -668,13 +768,8 @@
return decoders; // copy out
}
-int MediaProfiles::getCamcorderProfileParamByName(const char *name,
- int cameraId,
- camcorder_quality quality) const
+int MediaProfiles::getCamcorderProfileIndex(int cameraId, camcorder_quality quality) const
{
- LOGV("getCamcorderProfileParamByName: %s for camera %d, quality %d",
- name, cameraId, quality);
-
int index = -1;
for (size_t i = 0, n = mCamcorderProfiles.size(); i < n; ++i) {
if (mCamcorderProfiles[i]->mCameraId == cameraId &&
@@ -683,6 +778,17 @@
break;
}
}
+ return index;
+}
+
+int MediaProfiles::getCamcorderProfileParamByName(const char *name,
+ int cameraId,
+ camcorder_quality quality) const
+{
+ LOGV("getCamcorderProfileParamByName: %s for camera %d, quality %d",
+ name, cameraId, quality);
+
+ int index = getCamcorderProfileIndex(cameraId, quality);
if (index == -1) {
LOGE("The given camcorder profile camera %d quality %d is not found",
cameraId, quality);
@@ -705,6 +811,11 @@
return -1;
}
+bool MediaProfiles::hasCamcorderProfile(int cameraId, camcorder_quality quality) const
+{
+ return (getCamcorderProfileIndex(cameraId, quality) != -1);
+}
+
Vector<int> MediaProfiles::getImageEncodingQualityLevels(int cameraId) const
{
Vector<int> result;
diff --git a/media/libmedia/MediaScanner.cpp b/media/libmedia/MediaScanner.cpp
index c5112a5..c31b622 100644
--- a/media/libmedia/MediaScanner.cpp
+++ b/media/libmedia/MediaScanner.cpp
@@ -48,8 +48,7 @@
}
status_t MediaScanner::processDirectory(
- const char *path, const char *extensions,
- MediaScannerClient &client,
+ const char *path, MediaScannerClient &client,
ExceptionCheck exceptionCheck, void *exceptionEnv) {
int pathLength = strlen(path);
if (pathLength >= PATH_MAX) {
@@ -72,35 +71,16 @@
status_t result =
doProcessDirectory(
- pathBuffer, pathRemaining, extensions, client,
- exceptionCheck, exceptionEnv);
+ pathBuffer, pathRemaining, client, exceptionCheck, exceptionEnv);
free(pathBuffer);
return result;
}
-static bool fileMatchesExtension(const char* path, const char* extensions) {
- const char* extension = strrchr(path, '.');
- if (!extension) return false;
- ++extension; // skip the dot
- if (extension[0] == 0) return false;
-
- while (extensions[0]) {
- const char* comma = strchr(extensions, ',');
- size_t length = (comma ? comma - extensions : strlen(extensions));
- if (length == strlen(extension) && strncasecmp(extension, extensions, length) == 0) return true;
- extensions += length;
- if (extensions[0] == ',') ++extensions;
- }
-
- return false;
-}
-
status_t MediaScanner::doProcessDirectory(
- char *path, int pathRemaining, const char *extensions,
- MediaScannerClient &client, ExceptionCheck exceptionCheck,
- void *exceptionEnv) {
+ char *path, int pathRemaining, MediaScannerClient &client,
+ ExceptionCheck exceptionCheck, void *exceptionEnv) {
// place to copy file or directory name
char* fileSpot = path + strlen(path);
struct dirent* entry;
@@ -133,6 +113,13 @@
continue;
}
+ int nameLength = strlen(name);
+ if (nameLength + 1 > pathRemaining) {
+ // path too long!
+ continue;
+ }
+ strcpy(fileSpot, name);
+
int type = entry->d_type;
if (type == DT_UNKNOWN) {
// If the type is unknown, stat() the file instead.
@@ -150,29 +137,20 @@
}
}
if (type == DT_REG || type == DT_DIR) {
- int nameLength = strlen(name);
- bool isDirectory = (type == DT_DIR);
-
- if (nameLength > pathRemaining || (isDirectory && nameLength + 1 > pathRemaining)) {
- // path too long!
- continue;
- }
-
- strcpy(fileSpot, name);
- if (isDirectory) {
+ if (type == DT_DIR) {
// ignore directories with a name that starts with '.'
// for example, the Mac ".Trashes" directory
if (name[0] == '.') continue;
strcat(fileSpot, "/");
- int err = doProcessDirectory(path, pathRemaining - nameLength - 1, extensions, client, exceptionCheck, exceptionEnv);
+ int err = doProcessDirectory(path, pathRemaining - nameLength - 1, client, exceptionCheck, exceptionEnv);
if (err) {
// pass exceptions up - ignore other errors
if (exceptionCheck && exceptionCheck(exceptionEnv)) goto failure;
LOGE("Error processing '%s' - skipping\n", path);
continue;
}
- } else if (fileMatchesExtension(path, extensions)) {
+ } else {
struct stat statbuf;
stat(path, &statbuf);
if (statbuf.st_size > 0) {
diff --git a/media/libmedia/mediaplayer.cpp b/media/libmedia/mediaplayer.cpp
index cc41e66..a098d69 100644
--- a/media/libmedia/mediaplayer.cpp
+++ b/media/libmedia/mediaplayer.cpp
@@ -172,16 +172,6 @@
return INVALID_OPERATION;
}
-status_t MediaPlayer::suspend() {
- Mutex::Autolock _l(mLock);
- return mPlayer->suspend();
-}
-
-status_t MediaPlayer::resume() {
- Mutex::Autolock _l(mLock);
- return mPlayer->resume();
-}
-
status_t MediaPlayer::setMetadataFilter(const Parcel& filter)
{
LOGD("setMetadataFilter");
@@ -207,10 +197,8 @@
LOGV("setVideoSurface");
Mutex::Autolock _l(mLock);
if (mPlayer == 0) return NO_INIT;
- if (surface != NULL)
- return mPlayer->setVideoSurface(surface->getISurface());
- else
- return mPlayer->setVideoSurface(NULL);
+
+ return mPlayer->setVideoSurface(surface);
}
// must call with lock held
@@ -449,6 +437,9 @@
} else {
mCurrentState = MEDIA_PLAYER_IDLE;
}
+ // setDataSource has to be called again to create a
+ // new mediaplayer.
+ mPlayer = 0;
return ret;
}
clear_l();
diff --git a/media/libmedia/mediarecorder.cpp b/media/libmedia/mediarecorder.cpp
index e20e3ba..fd575fe 100644
--- a/media/libmedia/mediarecorder.cpp
+++ b/media/libmedia/mediarecorder.cpp
@@ -65,7 +65,7 @@
return INVALID_OPERATION;
}
- status_t ret = mMediaRecorder->setPreviewSurface(surface->getISurface());
+ status_t ret = mMediaRecorder->setPreviewSurface(surface);
if (OK != ret) {
LOGV("setPreviewSurface failed: %d", ret);
mCurrentState = MEDIA_RECORDER_ERROR;
@@ -308,6 +308,32 @@
return ret;
}
+status_t MediaRecorder::setOutputFileAuxiliary(int fd)
+{
+ LOGV("setOutputFileAuxiliary(%d)", fd);
+ if(mMediaRecorder == NULL) {
+ LOGE("media recorder is not initialized yet");
+ return INVALID_OPERATION;
+ }
+ if (mIsAuxiliaryOutputFileSet) {
+ LOGE("output file has already been set");
+ return INVALID_OPERATION;
+ }
+ if (!(mCurrentState & MEDIA_RECORDER_DATASOURCE_CONFIGURED)) {
+ LOGE("setOutputFile called in an invalid state(%d)", mCurrentState);
+ return INVALID_OPERATION;
+ }
+
+ status_t ret = mMediaRecorder->setOutputFileAuxiliary(fd);
+ if (OK != ret) {
+ LOGV("setOutputFileAuxiliary failed: %d", ret);
+ mCurrentState = MEDIA_RECORDER_ERROR;
+ return ret;
+ }
+ mIsAuxiliaryOutputFileSet = true;
+ return ret;
+}
+
status_t MediaRecorder::setVideoSize(int width, int height)
{
LOGV("setVideoSize(%d, %d)", width, height);
@@ -571,6 +597,7 @@
mIsAudioEncoderSet = false;
mIsVideoEncoderSet = false;
mIsOutputFileSet = false;
+ mIsAuxiliaryOutputFileSet = false;
}
// Release should be OK in any state
@@ -643,4 +670,3 @@
}
}; // namespace android
-
diff --git a/media/libmediaplayerservice/Android.mk b/media/libmediaplayerservice/Android.mk
index 55846be..a9d537f 100644
--- a/media/libmediaplayerservice/Android.mk
+++ b/media/libmediaplayerservice/Android.mk
@@ -31,21 +31,12 @@
libandroid_runtime \
libstagefright \
libstagefright_omx \
- libstagefright_color_conversion \
libstagefright_foundation \
libsurfaceflinger_client
LOCAL_STATIC_LIBRARIES := \
libstagefright_rtsp
-ifneq ($(BUILD_WITHOUT_PV),true)
-LOCAL_SHARED_LIBRARIES += \
- libopencore_player \
- libopencore_author
-else
-LOCAL_CFLAGS += -DNO_OPENCORE
-endif
-
ifneq ($(TARGET_SIMULATOR),true)
LOCAL_SHARED_LIBRARIES += libdl
endif
diff --git a/media/libmediaplayerservice/MediaPlayerService.cpp b/media/libmediaplayerservice/MediaPlayerService.cpp
index c43e9bb..63d09d6 100644
--- a/media/libmediaplayerservice/MediaPlayerService.cpp
+++ b/media/libmediaplayerservice/MediaPlayerService.cpp
@@ -56,7 +56,6 @@
#include "MetadataRetrieverClient.h"
#include "MidiFile.h"
-#include <media/PVPlayer.h>
#include "TestPlayerStub.h"
#include "StagefrightPlayer.h"
@@ -196,11 +195,6 @@
{".rtttl", SONIVOX_PLAYER},
{".rtx", SONIVOX_PLAYER},
{".ota", SONIVOX_PLAYER},
-#ifndef NO_OPENCORE
- {".wma", PV_PLAYER},
- {".wmv", PV_PLAYER},
- {".asf", PV_PLAYER},
-#endif
};
// TODO: Find real cause of Audio/Video delay in PV framework and remove this workaround
@@ -284,6 +278,26 @@
return c;
}
+sp<IMediaPlayer> MediaPlayerService::create(
+ pid_t pid, const sp<IMediaPlayerClient> &client,
+ const sp<IStreamSource> &source, int audioSessionId) {
+ int32_t connId = android_atomic_inc(&mNextConnId);
+ sp<Client> c = new Client(this, pid, connId, client, audioSessionId);
+
+ LOGV("Create new client(%d) from pid %d, audioSessionId=%d",
+ connId, pid, audioSessionId);
+
+ if (OK != c->setDataSource(source)) {
+ c.clear();
+ } else {
+ wp<Client> w = c;
+ Mutex::Autolock lock(mLock);
+ mClients.add(w);
+ }
+
+ return c;
+}
+
sp<IOMX> MediaPlayerService::getOMX() {
Mutex::Autolock autoLock(mLock);
@@ -691,14 +705,6 @@
if (ident == 0x5367674f) // 'OggS'
return STAGEFRIGHT_PLAYER;
-#ifndef NO_OPENCORE
- if (ident == 0x75b22630) {
- // The magic number for .asf files, i.e. wmv and wma content.
- // These are not currently supported through stagefright.
- return PV_PLAYER;
- }
-#endif
-
// Some kind of MIDI?
EAS_DATA_HANDLE easdata;
if (EAS_Init(&easdata) == EAS_SUCCESS) {
@@ -737,16 +743,6 @@
}
}
- if (!strncasecmp(url, "rtsp://", 7)) {
- char value[PROPERTY_VALUE_MAX];
- if (property_get("media.stagefright.enable-rtsp", value, NULL)
- && (strcmp(value, "1") && strcasecmp(value, "true"))) {
- // For now, we're going to use PV for rtsp-based playback
- // by default until we can clear up a few more issues.
- return PV_PLAYER;
- }
- }
-
return getDefaultPlayerType();
}
@@ -755,12 +751,6 @@
{
sp<MediaPlayerBase> p;
switch (playerType) {
-#ifndef NO_OPENCORE
- case PV_PLAYER:
- LOGV(" create PVPlayer");
- p = new PVPlayer();
- break;
-#endif
case SONIVOX_PLAYER:
LOGV(" create MidiFile");
p = new MidiFile();
@@ -773,6 +763,9 @@
LOGV("Create Test Player stub");
p = new TestPlayerStub();
break;
+ default:
+ LOGE("Unknown player type: %d", playerType);
+ return NULL;
}
if (p != NULL) {
if (p->initCheck() == NO_ERROR) {
@@ -891,7 +884,31 @@
return mStatus;
}
-status_t MediaPlayerService::Client::setVideoSurface(const sp<ISurface>& surface)
+status_t MediaPlayerService::Client::setDataSource(
+ const sp<IStreamSource> &source) {
+ // create the right type of player
+ sp<MediaPlayerBase> p = createPlayer(STAGEFRIGHT_PLAYER);
+
+ if (p == NULL) {
+ return NO_INIT;
+ }
+
+ if (!p->hardwareOutput()) {
+ mAudioOutput = new AudioOutput(mAudioSessionId);
+ static_cast<MediaPlayerInterface*>(p.get())->setAudioSink(mAudioOutput);
+ }
+
+ // now set data source
+ mStatus = p->setDataSource(source);
+
+ if (mStatus == OK) {
+ mPlayer = p;
+ }
+
+ return mStatus;
+}
+
+status_t MediaPlayerService::Client::setVideoSurface(const sp<Surface>& surface)
{
LOGV("[%d] setVideoSurface(%p)", mConnId, surface.get());
sp<MediaPlayerBase> p = getPlayer();
@@ -966,20 +983,6 @@
return OK;
}
-status_t MediaPlayerService::Client::suspend() {
- sp<MediaPlayerBase> p = getPlayer();
- if (p == 0) return UNKNOWN_ERROR;
-
- return p->suspend();
-}
-
-status_t MediaPlayerService::Client::resume() {
- sp<MediaPlayerBase> p = getPlayer();
- if (p == 0) return UNKNOWN_ERROR;
-
- return p->resume();
-}
-
status_t MediaPlayerService::Client::prepareAsync()
{
LOGV("[%d] prepareAsync", mConnId);
diff --git a/media/libmediaplayerservice/MediaPlayerService.h b/media/libmediaplayerservice/MediaPlayerService.h
index 4492e20..62f8ed6 100644
--- a/media/libmediaplayerservice/MediaPlayerService.h
+++ b/media/libmediaplayerservice/MediaPlayerService.h
@@ -191,6 +191,11 @@
const KeyedVector<String8, String8> *headers, int audioSessionId);
virtual sp<IMediaPlayer> create(pid_t pid, const sp<IMediaPlayerClient>& client, int fd, int64_t offset, int64_t length, int audioSessionId);
+
+ virtual sp<IMediaPlayer> create(
+ pid_t pid, const sp<IMediaPlayerClient> &client,
+ const sp<IStreamSource> &source, int audioSessionId);
+
virtual sp<IMemory> decode(const char* url, uint32_t *pSampleRate, int* pNumChannels, int* pFormat);
virtual sp<IMemory> decode(int fd, int64_t offset, int64_t length, uint32_t *pSampleRate, int* pNumChannels, int* pFormat);
virtual sp<IOMX> getOMX();
@@ -206,7 +211,7 @@
// IMediaPlayer interface
virtual void disconnect();
- virtual status_t setVideoSurface(const sp<ISurface>& surface);
+ virtual status_t setVideoSurface(const sp<Surface>& surface);
virtual status_t prepareAsync();
virtual status_t start();
virtual status_t stop();
@@ -224,8 +229,6 @@
virtual status_t getMetadata(bool update_only,
bool apply_filter,
Parcel *reply);
- virtual status_t suspend();
- virtual status_t resume();
virtual status_t setAuxEffectSendLevel(float level);
virtual status_t attachAuxEffect(int effectId);
@@ -236,6 +239,9 @@
const KeyedVector<String8, String8> *headers);
status_t setDataSource(int fd, int64_t offset, int64_t length);
+
+ status_t setDataSource(const sp<IStreamSource> &source);
+
static void notify(void* cookie, int msg, int ext1, int ext2);
pid_t pid() const { return mPid; }
diff --git a/media/libmediaplayerservice/MediaRecorderClient.cpp b/media/libmediaplayerservice/MediaRecorderClient.cpp
index 19915f1..1a1780c 100644
--- a/media/libmediaplayerservice/MediaRecorderClient.cpp
+++ b/media/libmediaplayerservice/MediaRecorderClient.cpp
@@ -31,10 +31,6 @@
#include <binder/MemoryHeapBase.h>
#include <binder/MemoryBase.h>
-#ifndef NO_OPENCORE
-#include <media/PVMediaRecorder.h>
-#endif
-
#include <utils/String16.h>
#include <media/AudioTrack.h>
@@ -70,7 +66,7 @@
return mRecorder->setCamera(camera);
}
-status_t MediaRecorderClient::setPreviewSurface(const sp<ISurface>& surface)
+status_t MediaRecorderClient::setPreviewSurface(const sp<Surface>& surface)
{
LOGV("setPreviewSurface");
Mutex::Autolock lock(mLock);
@@ -164,6 +160,17 @@
return mRecorder->setOutputFile(fd, offset, length);
}
+status_t MediaRecorderClient::setOutputFileAuxiliary(int fd)
+{
+ LOGV("setOutputFileAuxiliary(%d)", fd);
+ Mutex::Autolock lock(mLock);
+ if (mRecorder == NULL) {
+ LOGE("recorder is not initialized");
+ return NO_INIT;
+ }
+ return mRecorder->setOutputFileAuxiliary(fd);
+}
+
status_t MediaRecorderClient::setVideoSize(int width, int height)
{
LOGV("setVideoSize(%dx%d)", width, height);
@@ -293,22 +300,7 @@
{
LOGV("Client constructor");
mPid = pid;
-
- char value[PROPERTY_VALUE_MAX];
- if (!property_get("media.stagefright.enable-record", value, NULL)
- || !strcmp(value, "1") || !strcasecmp(value, "true")) {
- mRecorder = new StagefrightRecorder;
- } else
-#ifndef NO_OPENCORE
- {
- mRecorder = new PVMediaRecorder();
- }
-#else
- {
- mRecorder = NULL;
- }
-#endif
-
+ mRecorder = new StagefrightRecorder;
mMediaPlayerService = service;
}
@@ -337,4 +329,3 @@
}
}; // namespace android
-
diff --git a/media/libmediaplayerservice/MediaRecorderClient.h b/media/libmediaplayerservice/MediaRecorderClient.h
index 1d1913d..fded98e 100644
--- a/media/libmediaplayerservice/MediaRecorderClient.h
+++ b/media/libmediaplayerservice/MediaRecorderClient.h
@@ -29,7 +29,7 @@
{
public:
virtual status_t setCamera(const sp<ICamera>& camera);
- virtual status_t setPreviewSurface(const sp<ISurface>& surface);
+ virtual status_t setPreviewSurface(const sp<Surface>& surface);
virtual status_t setVideoSource(int vs);
virtual status_t setAudioSource(int as);
virtual status_t setOutputFormat(int of);
@@ -37,6 +37,7 @@
virtual status_t setAudioEncoder(int ae);
virtual status_t setOutputFile(const char* path);
virtual status_t setOutputFile(int fd, int64_t offset, int64_t length);
+ virtual status_t setOutputFileAuxiliary(int fd);
virtual status_t setVideoSize(int width, int height);
virtual status_t setVideoFrameRate(int frames_per_second);
virtual status_t setParameters(const String8& params);
@@ -66,4 +67,3 @@
}; // namespace android
#endif // ANDROID_MEDIARECORDERCLIENT_H
-
diff --git a/media/libmediaplayerservice/MetadataRetrieverClient.cpp b/media/libmediaplayerservice/MetadataRetrieverClient.cpp
index 39fce81..b069345 100644
--- a/media/libmediaplayerservice/MetadataRetrieverClient.cpp
+++ b/media/libmediaplayerservice/MetadataRetrieverClient.cpp
@@ -35,7 +35,6 @@
#include <binder/IServiceManager.h>
#include <media/MediaMetadataRetrieverInterface.h>
#include <media/MediaPlayerInterface.h>
-#include <media/PVMetadataRetriever.h>
#include <private/media/VideoFrame.h>
#include "MidiMetadataRetriever.h"
#include "MetadataRetrieverClient.h"
@@ -107,12 +106,6 @@
p = new StagefrightMetadataRetriever;
break;
}
-#ifndef NO_OPENCORE
- case PV_PLAYER:
- LOGV("create pv metadata retriever");
- p = new PVMetadataRetriever();
- break;
-#endif
case SONIVOX_PLAYER:
LOGV("create midi metadata retriever");
p = new MidiMetadataRetriever();
diff --git a/media/libmediaplayerservice/MidiFile.h b/media/libmediaplayerservice/MidiFile.h
index 4a60ece..aa8f3f0 100644
--- a/media/libmediaplayerservice/MidiFile.h
+++ b/media/libmediaplayerservice/MidiFile.h
@@ -35,7 +35,7 @@
const char* path, const KeyedVector<String8, String8> *headers);
virtual status_t setDataSource(int fd, int64_t offset, int64_t length);
- virtual status_t setVideoSurface(const sp<ISurface>& surface) { return UNKNOWN_ERROR; }
+ virtual status_t setVideoSurface(const sp<Surface>& surface) { return UNKNOWN_ERROR; }
virtual status_t prepare();
virtual status_t prepareAsync();
virtual status_t start();
diff --git a/media/libmediaplayerservice/StagefrightPlayer.cpp b/media/libmediaplayerservice/StagefrightPlayer.cpp
index 6bded09..da564dc 100644
--- a/media/libmediaplayerservice/StagefrightPlayer.cpp
+++ b/media/libmediaplayerservice/StagefrightPlayer.cpp
@@ -44,10 +44,14 @@
return mPlayer->setDataSource(dup(fd), offset, length);
}
-status_t StagefrightPlayer::setVideoSurface(const sp<ISurface> &surface) {
+status_t StagefrightPlayer::setDataSource(const sp<IStreamSource> &source) {
+ return mPlayer->setDataSource(source);
+}
+
+status_t StagefrightPlayer::setVideoSurface(const sp<Surface> &surface) {
LOGV("setVideoSurface");
- mPlayer->setISurface(surface);
+ mPlayer->setSurface(surface);
return OK;
}
@@ -140,16 +144,6 @@
return STAGEFRIGHT_PLAYER;
}
-status_t StagefrightPlayer::suspend() {
- LOGV("suspend");
- return mPlayer->suspend();
-}
-
-status_t StagefrightPlayer::resume() {
- LOGV("resume");
- return mPlayer->resume();
-}
-
status_t StagefrightPlayer::invoke(const Parcel &request, Parcel *reply) {
return INVALID_OPERATION;
}
diff --git a/media/libmediaplayerservice/StagefrightPlayer.h b/media/libmediaplayerservice/StagefrightPlayer.h
index 781eb44..fc72bfb 100644
--- a/media/libmediaplayerservice/StagefrightPlayer.h
+++ b/media/libmediaplayerservice/StagefrightPlayer.h
@@ -35,7 +35,10 @@
const char *url, const KeyedVector<String8, String8> *headers);
virtual status_t setDataSource(int fd, int64_t offset, int64_t length);
- virtual status_t setVideoSurface(const sp<ISurface> &surface);
+
+ virtual status_t setDataSource(const sp<IStreamSource> &source);
+
+ virtual status_t setVideoSurface(const sp<Surface> &surface);
virtual status_t prepare();
virtual status_t prepareAsync();
virtual status_t start();
@@ -50,8 +53,6 @@
virtual player_type playerType();
virtual status_t invoke(const Parcel &request, Parcel *reply);
virtual void setAudioSink(const sp<AudioSink> &audioSink);
- virtual status_t suspend();
- virtual status_t resume();
virtual status_t getMetadata(
const media::Metadata::Filter& ids, Parcel *records);
diff --git a/media/libmediaplayerservice/StagefrightRecorder.cpp b/media/libmediaplayerservice/StagefrightRecorder.cpp
index 553648d..ec3b5a2 100644
--- a/media/libmediaplayerservice/StagefrightRecorder.cpp
+++ b/media/libmediaplayerservice/StagefrightRecorder.cpp
@@ -20,10 +20,12 @@
#include "StagefrightRecorder.h"
-#include <binder/IPCThreadState.h>
#include <media/stagefright/AudioSource.h>
#include <media/stagefright/AMRWriter.h>
#include <media/stagefright/CameraSource.h>
+#include <media/stagefright/VideoSourceDownSampler.h>
+#include <media/stagefright/CameraSourceTimeLapse.h>
+#include <media/stagefright/MediaSourceSplitter.h>
#include <media/stagefright/MPEG2TSWriter.h>
#include <media/stagefright/MPEG4Writer.h>
#include <media/stagefright/MediaDebug.h>
@@ -33,21 +35,20 @@
#include <media/stagefright/OMXCodec.h>
#include <media/MediaProfiles.h>
#include <camera/ICamera.h>
-#include <camera/Camera.h>
#include <camera/CameraParameters.h>
-#include <surfaceflinger/ISurface.h>
+#include <surfaceflinger/Surface.h>
#include <utils/Errors.h>
#include <sys/types.h>
-#include <unistd.h>
#include <ctype.h>
+#include <unistd.h>
#include "ARTPWriter.h"
namespace android {
StagefrightRecorder::StagefrightRecorder()
- : mWriter(NULL),
- mOutputFd(-1) {
+ : mWriter(NULL), mWriterAux(NULL),
+ mOutputFd(-1), mOutputFdAux(-1) {
LOGV("Constructor");
reset();
@@ -164,7 +165,8 @@
status_t StagefrightRecorder::setVideoFrameRate(int frames_per_second) {
LOGV("setVideoFrameRate: %d", frames_per_second);
- if (frames_per_second <= 0 || frames_per_second > 30) {
+ if ((frames_per_second <= 0 && frames_per_second != -1) ||
+ frames_per_second > 120) {
LOGE("Invalid video frame rate: %d", frames_per_second);
return BAD_VALUE;
}
@@ -182,26 +184,11 @@
return BAD_VALUE;
}
- int64_t token = IPCThreadState::self()->clearCallingIdentity();
- mFlags &= ~FLAGS_HOT_CAMERA;
- mCamera = Camera::create(camera);
- if (mCamera == 0) {
- LOGE("Unable to connect to camera");
- IPCThreadState::self()->restoreCallingIdentity(token);
- return -EBUSY;
- }
-
- LOGV("Connected to camera");
- if (mCamera->previewEnabled()) {
- LOGV("camera is hot");
- mFlags |= FLAGS_HOT_CAMERA;
- }
- IPCThreadState::self()->restoreCallingIdentity(token);
-
+ mCamera = camera;
return OK;
}
-status_t StagefrightRecorder::setPreviewSurface(const sp<ISurface> &surface) {
+status_t StagefrightRecorder::setPreviewSurface(const sp<Surface> &surface) {
LOGV("setPreviewSurface: %p", surface.get());
mPreviewSurface = surface;
@@ -235,6 +222,24 @@
return OK;
}
+status_t StagefrightRecorder::setOutputFileAuxiliary(int fd) {
+ LOGV("setOutputFileAuxiliary: %d", fd);
+
+ if (fd < 0) {
+ LOGE("Invalid file descriptor: %d", fd);
+ return -EBADF;
+ }
+
+ mCaptureAuxVideo = true;
+
+ if (mOutputFdAux >= 0) {
+ ::close(mOutputFdAux);
+ }
+ mOutputFdAux = dup(fd);
+
+ return OK;
+}
+
// Attempt to parse an int64 literal optionally surrounded by whitespace,
// returns true on success, false otherwise.
static bool safe_strtoi64(const char *s, int64_t *val) {
@@ -485,6 +490,68 @@
return OK;
}
+status_t StagefrightRecorder::setParamTimeLapseEnable(int32_t timeLapseEnable) {
+ LOGV("setParamTimeLapseEnable: %d", timeLapseEnable);
+
+ if(timeLapseEnable == 0) {
+ mCaptureTimeLapse = false;
+ } else if (timeLapseEnable == 1) {
+ mCaptureTimeLapse = true;
+ } else {
+ return BAD_VALUE;
+ }
+ return OK;
+}
+
+status_t StagefrightRecorder::setParamTimeBetweenTimeLapseFrameCapture(int64_t timeUs) {
+ LOGV("setParamTimeBetweenTimeLapseFrameCapture: %lld us", timeUs);
+
+ // Not allowing time more than a day
+ if (timeUs <= 0 || timeUs > 86400*1E6) {
+ LOGE("Time between time lapse frame capture (%lld) is out of range [0, 1 Day]", timeUs);
+ return BAD_VALUE;
+ }
+
+ mTimeBetweenTimeLapseFrameCaptureUs = timeUs;
+ return OK;
+}
+
+status_t StagefrightRecorder::setParamAuxVideoWidth(int32_t width) {
+ LOGV("setParamAuxVideoWidth : %d", width);
+
+ if (width <= 0) {
+ LOGE("Width (%d) is not positive", width);
+ return BAD_VALUE;
+ }
+
+ mAuxVideoWidth = width;
+ return OK;
+}
+
+status_t StagefrightRecorder::setParamAuxVideoHeight(int32_t height) {
+ LOGV("setParamAuxVideoHeight : %d", height);
+
+ if (height <= 0) {
+ LOGE("Height (%d) is not positive", height);
+ return BAD_VALUE;
+ }
+
+ mAuxVideoHeight = height;
+ return OK;
+}
+
+status_t StagefrightRecorder::setParamAuxVideoEncodingBitRate(int32_t bitRate) {
+ LOGV("StagefrightRecorder::setParamAuxVideoEncodingBitRate: %d", bitRate);
+
+ if (bitRate <= 0) {
+ LOGE("Invalid video encoding bit rate: %d", bitRate);
+ return BAD_VALUE;
+ }
+
+ mAuxVideoBitRate = bitRate;
+ return OK;
+}
+
status_t StagefrightRecorder::setParameter(
const String8 &key, const String8 &value) {
LOGV("setParameter: key (%s) => value (%s)", key.string(), value.string());
@@ -573,6 +640,32 @@
if (safe_strtoi32(value.string(), &timeScale)) {
return setParamVideoTimeScale(timeScale);
}
+ } else if (key == "time-lapse-enable") {
+ int32_t timeLapseEnable;
+ if (safe_strtoi32(value.string(), &timeLapseEnable)) {
+ return setParamTimeLapseEnable(timeLapseEnable);
+ }
+ } else if (key == "time-between-time-lapse-frame-capture") {
+ int64_t timeBetweenTimeLapseFrameCaptureMs;
+ if (safe_strtoi64(value.string(), &timeBetweenTimeLapseFrameCaptureMs)) {
+ return setParamTimeBetweenTimeLapseFrameCapture(
+ 1000LL * timeBetweenTimeLapseFrameCaptureMs);
+ }
+ } else if (key == "video-aux-param-width") {
+ int32_t auxWidth;
+ if (safe_strtoi32(value.string(), &auxWidth)) {
+ return setParamAuxVideoWidth(auxWidth);
+ }
+ } else if (key == "video-aux-param-height") {
+ int32_t auxHeight;
+ if (safe_strtoi32(value.string(), &auxHeight)) {
+ return setParamAuxVideoHeight(auxHeight);
+ }
+ } else if (key == "video-aux-param-encoding-bitrate") {
+ int32_t auxVideoBitRate;
+ if (safe_strtoi32(value.string(), &auxVideoBitRate)) {
+ return setParamAuxVideoEncodingBitRate(auxVideoBitRate);
+ }
} else {
LOGE("setParameter: failed to find key %s", key.string());
}
@@ -771,7 +864,7 @@
return UNKNOWN_ERROR;
}
- mWriter = new AMRWriter(dup(mOutputFd));
+ mWriter = new AMRWriter(mOutputFd);
mWriter->addSource(audioEncoder);
if (mMaxFileDurationUs != 0) {
@@ -806,13 +899,20 @@
if (mAudioSource != AUDIO_SOURCE_LIST_END) {
source = createAudioSource();
} else {
- status_t err = setupVideoEncoder(&source);
+
+ sp<CameraSource> cameraSource;
+ status_t err = setupCameraSource(&cameraSource);
+ if (err != OK) {
+ return err;
+ }
+
+ err = setupVideoEncoder(cameraSource, mVideoBitRate, &source);
if (err != OK) {
return err;
}
}
- mWriter = new ARTPWriter(dup(mOutputFd));
+ mWriter = new ARTPWriter(mOutputFd);
mWriter->addSource(source);
mWriter->setListener(mListener);
@@ -822,7 +922,7 @@
status_t StagefrightRecorder::startMPEG2TSRecording() {
CHECK_EQ(mOutputFormat, OUTPUT_FORMAT_MPEG2TS);
- sp<MediaWriter> writer = new MPEG2TSWriter(dup(mOutputFd));
+ sp<MediaWriter> writer = new MPEG2TSWriter(mOutputFd);
if (mAudioSource != AUDIO_SOURCE_LIST_END) {
if (mAudioEncoder != AUDIO_ENCODER_AAC) {
@@ -842,8 +942,14 @@
return ERROR_UNSUPPORTED;
}
+ sp<CameraSource> cameraSource;
+ status_t err = setupCameraSource(&cameraSource);
+ if (err != OK) {
+ return err;
+ }
+
sp<MediaSource> encoder;
- status_t err = setupVideoEncoder(&encoder);
+ err = setupVideoEncoder(cameraSource, mVideoBitRate, &encoder);
if (err != OK) {
return err;
@@ -871,7 +977,7 @@
"enc.vid.fps.min", mVideoEncoder);
int maxFrameRate = mEncoderProfiles->getVideoEncoderParamByName(
"enc.vid.fps.max", mVideoEncoder);
- if (mFrameRate < minFrameRate) {
+ if (mFrameRate < minFrameRate && mFrameRate != -1) {
LOGW("Intended video encoding frame rate (%d fps) is too small"
" and will be set to (%d fps)", mFrameRate, minFrameRate);
mFrameRate = minFrameRate;
@@ -916,57 +1022,15 @@
}
}
-status_t StagefrightRecorder::setupCameraSource() {
- clipVideoBitRate();
- clipVideoFrameRate();
- clipVideoFrameWidth();
- clipVideoFrameHeight();
-
- int64_t token = IPCThreadState::self()->clearCallingIdentity();
- if (mCamera == 0) {
- mCamera = Camera::connect(mCameraId);
- if (mCamera == 0) {
- LOGE("Camera connection could not be established.");
- return -EBUSY;
- }
- mFlags &= ~FLAGS_HOT_CAMERA;
- mCamera->lock();
+status_t StagefrightRecorder::checkVideoEncoderCapabilities() {
+ if (!mCaptureTimeLapse) {
+ // Dont clip for time lapse capture as encoder will have enough
+ // time to encode because of slow capture rate of time lapse.
+ clipVideoBitRate();
+ clipVideoFrameRate();
+ clipVideoFrameWidth();
+ clipVideoFrameHeight();
}
-
- // Set the actual video recording frame size
- CameraParameters params(mCamera->getParameters());
- params.setPreviewSize(mVideoWidth, mVideoHeight);
- params.setPreviewFrameRate(mFrameRate);
- String8 s = params.flatten();
- if (OK != mCamera->setParameters(s)) {
- LOGE("Could not change settings."
- " Someone else is using camera %d?", mCameraId);
- return -EBUSY;
- }
- CameraParameters newCameraParams(mCamera->getParameters());
-
- // Check on video frame size
- int frameWidth = 0, frameHeight = 0;
- newCameraParams.getPreviewSize(&frameWidth, &frameHeight);
- if (frameWidth < 0 || frameWidth != mVideoWidth ||
- frameHeight < 0 || frameHeight != mVideoHeight) {
- LOGE("Failed to set the video frame size to %dx%d",
- mVideoWidth, mVideoHeight);
- IPCThreadState::self()->restoreCallingIdentity(token);
- return UNKNOWN_ERROR;
- }
-
- // Check on video frame rate
- int frameRate = newCameraParams.getPreviewFrameRate();
- if (frameRate < 0 || (frameRate - mFrameRate) != 0) {
- LOGE("Failed to set frame rate to %d fps. The actual "
- "frame rate is %d", mFrameRate, frameRate);
- }
-
- // This CHECK is good, since we just passed the lock/unlock
- // check earlier by calling mCamera->setParameters().
- CHECK_EQ(OK, mCamera->setPreviewDisplay(mPreviewSurface));
- IPCThreadState::self()->restoreCallingIdentity(token);
return OK;
}
@@ -987,18 +1051,56 @@
}
}
-status_t StagefrightRecorder::setupVideoEncoder(sp<MediaSource> *source) {
+status_t StagefrightRecorder::setupCameraSource(
+ sp<CameraSource> *cameraSource) {
+ status_t err = OK;
+ if ((err = checkVideoEncoderCapabilities()) != OK) {
+ return err;
+ }
+ Size videoSize;
+ videoSize.width = mVideoWidth;
+ videoSize.height = mVideoHeight;
+ if (mCaptureTimeLapse) {
+ mCameraSourceTimeLapse = CameraSourceTimeLapse::CreateFromCamera(
+ mCamera, mCameraId,
+ videoSize, mFrameRate, mPreviewSurface,
+ mTimeBetweenTimeLapseFrameCaptureUs);
+ *cameraSource = mCameraSourceTimeLapse;
+ } else {
+ *cameraSource = CameraSource::CreateFromCamera(
+ mCamera, mCameraId, videoSize, mFrameRate,
+ mPreviewSurface, true /*storeMetaDataInVideoBuffers*/);
+ }
+ CHECK(*cameraSource != NULL);
+
+ // When frame rate is not set, the actual frame rate will be set to
+ // the current frame rate being used.
+ if (mFrameRate == -1) {
+ int32_t frameRate = 0;
+ CHECK ((*cameraSource)->getFormat()->findInt32(
+ kKeyFrameRate, &frameRate));
+ LOGI("Frame rate is not explicitly set. Use the current frame "
+ "rate (%d fps)", frameRate);
+ mFrameRate = frameRate;
+ }
+
+ CHECK(mFrameRate != -1);
+
+ mIsMetaDataStoredInVideoBuffers =
+ (*cameraSource)->isMetaDataStoredInVideoBuffers();
+
+ return OK;
+}
+
+status_t StagefrightRecorder::setupVideoEncoder(
+ sp<MediaSource> cameraSource,
+ int32_t videoBitRate,
+ sp<MediaSource> *source) {
source->clear();
- status_t err = setupCameraSource();
- if (err != OK) return err;
-
- sp<CameraSource> cameraSource = CameraSource::CreateFromCamera(mCamera);
- CHECK(cameraSource != NULL);
-
sp<MetaData> enc_meta = new MetaData;
- enc_meta->setInt32(kKeyBitRate, mVideoBitRate);
- enc_meta->setInt32(kKeySampleRate, mFrameRate);
+ enc_meta->setInt32(kKeyBitRate, videoBitRate);
+ enc_meta->setInt32(kKeyFrameRate, mFrameRate);
switch (mVideoEncoder) {
case VIDEO_ENCODER_H263:
@@ -1041,14 +1143,27 @@
}
if (mVideoEncoderLevel != -1) {
enc_meta->setInt32(kKeyVideoLevel, mVideoEncoderLevel);
+ } else if (mCaptureTimeLapse) {
+ // Check if we are using high resolution and/or high bitrate and
+ // set appropriate level for the software AVCEncoder.
+ if ((width * height >= 921600) // 720p
+ || (videoBitRate >= 20000000)) {
+ enc_meta->setInt32(kKeyVideoLevel, 50);
+ }
}
OMXClient client;
CHECK_EQ(client.connect(), OK);
+ // Use software codec for time lapse
+ uint32_t encoder_flags = (mCaptureTimeLapse) ? OMXCodec::kPreferSoftwareCodecs : 0;
+ if (mIsMetaDataStoredInVideoBuffers) {
+ encoder_flags |= OMXCodec::kStoreMetaDataInVideoBuffers;
+ }
sp<MediaSource> encoder = OMXCodec::Create(
client.interface(), enc_meta,
- true /* createEncoder */, cameraSource);
+ true /* createEncoder */, cameraSource,
+ NULL, encoder_flags);
if (encoder == NULL) {
return UNKNOWN_ERROR;
}
@@ -1079,54 +1194,150 @@
return OK;
}
-status_t StagefrightRecorder::startMPEG4Recording() {
- int32_t totalBitRate = 0;
+status_t StagefrightRecorder::setupMPEG4Recording(
+ bool useSplitCameraSource,
+ int outputFd,
+ int32_t videoWidth, int32_t videoHeight,
+ int32_t videoBitRate,
+ int32_t *totalBitRate,
+ sp<MediaWriter> *mediaWriter) {
+ mediaWriter->clear();
+ *totalBitRate = 0;
status_t err = OK;
- sp<MediaWriter> writer = new MPEG4Writer(dup(mOutputFd));
+ sp<MediaWriter> writer = new MPEG4Writer(outputFd);
// Add audio source first if it exists
- if (mAudioSource != AUDIO_SOURCE_LIST_END) {
+ if (!mCaptureTimeLapse && (mAudioSource != AUDIO_SOURCE_LIST_END)) {
err = setupAudioEncoder(writer);
if (err != OK) return err;
- totalBitRate += mAudioBitRate;
+ *totalBitRate += mAudioBitRate;
}
if (mVideoSource == VIDEO_SOURCE_DEFAULT
|| mVideoSource == VIDEO_SOURCE_CAMERA) {
+
+ sp<MediaSource> cameraMediaSource;
+ if (useSplitCameraSource) {
+ LOGV("Using Split camera source");
+ cameraMediaSource = mCameraSourceSplitter->createClient();
+ } else {
+ sp<CameraSource> cameraSource;
+ err = setupCameraSource(&cameraSource);
+ cameraMediaSource = cameraSource;
+ }
+ if ((videoWidth != mVideoWidth) || (videoHeight != mVideoHeight)) {
+ // Use downsampling from the original source.
+ cameraMediaSource =
+ new VideoSourceDownSampler(cameraMediaSource, videoWidth, videoHeight);
+ }
+ if (err != OK) {
+ return err;
+ }
+
sp<MediaSource> encoder;
- err = setupVideoEncoder(&encoder);
- if (err != OK) return err;
+ err = setupVideoEncoder(cameraMediaSource, videoBitRate, &encoder);
+ if (err != OK) {
+ return err;
+ }
+
writer->addSource(encoder);
- totalBitRate += mVideoBitRate;
+ *totalBitRate += videoBitRate;
}
if (mInterleaveDurationUs > 0) {
reinterpret_cast<MPEG4Writer *>(writer.get())->
setInterleaveDuration(mInterleaveDurationUs);
}
-
if (mMaxFileDurationUs != 0) {
writer->setMaxFileDuration(mMaxFileDurationUs);
}
if (mMaxFileSizeBytes != 0) {
writer->setMaxFileSize(mMaxFileSizeBytes);
}
- sp<MetaData> meta = new MetaData;
- meta->setInt64(kKeyTime, systemTime() / 1000);
- meta->setInt32(kKeyFileType, mOutputFormat);
- meta->setInt32(kKeyBitRate, totalBitRate);
- meta->setInt32(kKey64BitFileOffset, mUse64BitFileOffset);
+
+ writer->setListener(mListener);
+ *mediaWriter = writer;
+ return OK;
+}
+
+void StagefrightRecorder::setupMPEG4MetaData(int64_t startTimeUs, int32_t totalBitRate,
+ sp<MetaData> *meta) {
+ (*meta)->setInt64(kKeyTime, startTimeUs);
+ (*meta)->setInt32(kKeyFileType, mOutputFormat);
+ (*meta)->setInt32(kKeyBitRate, totalBitRate);
+ (*meta)->setInt32(kKey64BitFileOffset, mUse64BitFileOffset);
if (mMovieTimeScale > 0) {
- meta->setInt32(kKeyTimeScale, mMovieTimeScale);
+ (*meta)->setInt32(kKeyTimeScale, mMovieTimeScale);
}
if (mTrackEveryTimeDurationUs > 0) {
- meta->setInt64(kKeyTrackTimeStatus, mTrackEveryTimeDurationUs);
+ (*meta)->setInt64(kKeyTrackTimeStatus, mTrackEveryTimeDurationUs);
}
if (mRotationDegrees != 0) {
- meta->setInt32(kKeyRotationDegree, mRotationDegrees);
+ (*meta)->setInt32(kKeyRotation, mRotationDegrees);
}
- writer->setListener(mListener);
- mWriter = writer;
- return mWriter->start(meta.get());
+}
+
+status_t StagefrightRecorder::startMPEG4Recording() {
+ if (mCaptureAuxVideo) {
+ if (!mCaptureTimeLapse) {
+ LOGE("Auxiliary video can be captured only in time lapse mode");
+ return UNKNOWN_ERROR;
+ }
+ LOGV("Creating MediaSourceSplitter");
+ sp<CameraSource> cameraSource;
+ status_t err = setupCameraSource(&cameraSource);
+ if (err != OK) {
+ return err;
+ }
+ mCameraSourceSplitter = new MediaSourceSplitter(cameraSource);
+ } else {
+ mCameraSourceSplitter = NULL;
+ }
+
+ int32_t totalBitRate;
+ status_t err = setupMPEG4Recording(mCaptureAuxVideo,
+ mOutputFd, mVideoWidth, mVideoHeight,
+ mVideoBitRate, &totalBitRate, &mWriter);
+ if (err != OK) {
+ return err;
+ }
+
+ int64_t startTimeUs = systemTime() / 1000;
+ sp<MetaData> meta = new MetaData;
+ setupMPEG4MetaData(startTimeUs, totalBitRate, &meta);
+
+ err = mWriter->start(meta.get());
+ if (err != OK) {
+ return err;
+ }
+
+ if (mCaptureAuxVideo) {
+ CHECK(mOutputFdAux >= 0);
+ if (mWriterAux != NULL) {
+ LOGE("Auxiliary File writer is not avaialble");
+ return UNKNOWN_ERROR;
+ }
+ if ((mAuxVideoWidth > mVideoWidth) || (mAuxVideoHeight > mVideoHeight) ||
+ ((mAuxVideoWidth == mVideoWidth) && mAuxVideoHeight == mVideoHeight)) {
+ LOGE("Auxiliary video size (%d x %d) same or larger than the main video size (%d x %d)",
+ mAuxVideoWidth, mAuxVideoHeight, mVideoWidth, mVideoHeight);
+ return UNKNOWN_ERROR;
+ }
+
+ int32_t totalBitrateAux;
+ err = setupMPEG4Recording(mCaptureAuxVideo,
+ mOutputFdAux, mAuxVideoWidth, mAuxVideoHeight,
+ mAuxVideoBitRate, &totalBitrateAux, &mWriterAux);
+ if (err != OK) {
+ return err;
+ }
+
+ sp<MetaData> metaAux = new MetaData;
+ setupMPEG4MetaData(startTimeUs, totalBitrateAux, &metaAux);
+
+ return mWriterAux->start(metaAux.get());
+ }
+
+ return OK;
}
status_t StagefrightRecorder::pause() {
@@ -1135,35 +1346,50 @@
return UNKNOWN_ERROR;
}
mWriter->pause();
+
+ if (mCaptureAuxVideo) {
+ if (mWriterAux == NULL) {
+ return UNKNOWN_ERROR;
+ }
+ mWriterAux->pause();
+ }
+
return OK;
}
status_t StagefrightRecorder::stop() {
LOGV("stop");
status_t err = OK;
+
+ if (mCaptureTimeLapse && mCameraSourceTimeLapse != NULL) {
+ mCameraSourceTimeLapse->startQuickReadReturns();
+ mCameraSourceTimeLapse = NULL;
+ }
+
+ if (mCaptureAuxVideo) {
+ if (mWriterAux != NULL) {
+ mWriterAux->stop();
+ mWriterAux.clear();
+ }
+ }
+
if (mWriter != NULL) {
err = mWriter->stop();
mWriter.clear();
}
- if (mCamera != 0) {
- LOGV("Disconnect camera");
- int64_t token = IPCThreadState::self()->clearCallingIdentity();
- if ((mFlags & FLAGS_HOT_CAMERA) == 0) {
- LOGV("Camera was cold when we started, stopping preview");
- mCamera->stopPreview();
- }
- mCamera->unlock();
- mCamera.clear();
- IPCThreadState::self()->restoreCallingIdentity(token);
- mFlags = 0;
- }
-
if (mOutputFd >= 0) {
::close(mOutputFd);
mOutputFd = -1;
}
+ if (mCaptureAuxVideo) {
+ if (mOutputFdAux >= 0) {
+ ::close(mOutputFdAux);
+ mOutputFdAux = -1;
+ }
+ }
+
return err;
}
@@ -1188,8 +1414,11 @@
mVideoEncoder = VIDEO_ENCODER_H263;
mVideoWidth = 176;
mVideoHeight = 144;
- mFrameRate = 20;
+ mAuxVideoWidth = 176;
+ mAuxVideoHeight = 144;
+ mFrameRate = -1;
mVideoBitRate = 192000;
+ mAuxVideoBitRate = 192000;
mSampleRate = 8000;
mAudioChannels = 1;
mAudioBitRate = 12200;
@@ -1206,11 +1435,17 @@
mMaxFileDurationUs = 0;
mMaxFileSizeBytes = 0;
mTrackEveryTimeDurationUs = 0;
- mRotationDegrees = 0;
+ mCaptureTimeLapse = false;
+ mTimeBetweenTimeLapseFrameCaptureUs = -1;
+ mCaptureAuxVideo = false;
+ mCameraSourceSplitter = NULL;
+ mCameraSourceTimeLapse = NULL;
+ mIsMetaDataStoredInVideoBuffers = false;
mEncoderProfiles = MediaProfiles::getInstance();
+ mRotationDegrees = 0;
mOutputFd = -1;
- mFlags = 0;
+ mOutputFdAux = -1;
return OK;
}
@@ -1247,6 +1482,8 @@
snprintf(buffer, SIZE, " Recorder: %p\n", this);
snprintf(buffer, SIZE, " Output file (fd %d):\n", mOutputFd);
result.append(buffer);
+ snprintf(buffer, SIZE, " Output file Auxiliary (fd %d):\n", mOutputFdAux);
+ result.append(buffer);
snprintf(buffer, SIZE, " File format: %d\n", mOutputFormat);
result.append(buffer);
snprintf(buffer, SIZE, " Max file size (bytes): %lld\n", mMaxFileSizeBytes);
@@ -1279,8 +1516,6 @@
result.append(buffer);
snprintf(buffer, SIZE, " Camera Id: %d\n", mCameraId);
result.append(buffer);
- snprintf(buffer, SIZE, " Camera flags: %d\n", mFlags);
- result.append(buffer);
snprintf(buffer, SIZE, " Encoder: %d\n", mVideoEncoder);
result.append(buffer);
snprintf(buffer, SIZE, " Encoder profile: %d\n", mVideoEncoderProfile);
@@ -1291,10 +1526,14 @@
result.append(buffer);
snprintf(buffer, SIZE, " Frame size (pixels): %dx%d\n", mVideoWidth, mVideoHeight);
result.append(buffer);
+ snprintf(buffer, SIZE, " Aux Frame size (pixels): %dx%d\n", mAuxVideoWidth, mAuxVideoHeight);
+ result.append(buffer);
snprintf(buffer, SIZE, " Frame rate (fps): %d\n", mFrameRate);
result.append(buffer);
snprintf(buffer, SIZE, " Bit rate (bps): %d\n", mVideoBitRate);
result.append(buffer);
+ snprintf(buffer, SIZE, " Aux Bit rate (bps): %d\n", mAuxVideoBitRate);
+ result.append(buffer);
::write(fd, result.string(), result.size());
return OK;
}
diff --git a/media/libmediaplayerservice/StagefrightRecorder.h b/media/libmediaplayerservice/StagefrightRecorder.h
index e42df2e..36a15a8 100644
--- a/media/libmediaplayerservice/StagefrightRecorder.h
+++ b/media/libmediaplayerservice/StagefrightRecorder.h
@@ -19,13 +19,18 @@
#define STAGEFRIGHT_RECORDER_H_
#include <media/MediaRecorderBase.h>
+#include <camera/CameraParameters.h>
#include <utils/String8.h>
namespace android {
class Camera;
+class CameraSource;
+class CameraSourceTimeLapse;
+class MediaSourceSplitter;
struct MediaSource;
struct MediaWriter;
+class MetaData;
struct AudioSource;
class MediaProfiles;
@@ -42,9 +47,10 @@
virtual status_t setVideoSize(int width, int height);
virtual status_t setVideoFrameRate(int frames_per_second);
virtual status_t setCamera(const sp<ICamera>& camera);
- virtual status_t setPreviewSurface(const sp<ISurface>& surface);
+ virtual status_t setPreviewSurface(const sp<Surface>& surface);
virtual status_t setOutputFile(const char *path);
virtual status_t setOutputFile(int fd, int64_t offset, int64_t length);
+ virtual status_t setOutputFileAuxiliary(int fd);
virtual status_t setParameters(const String8& params);
virtual status_t setListener(const sp<IMediaRecorderClient>& listener);
virtual status_t prepare();
@@ -57,15 +63,10 @@
virtual status_t dump(int fd, const Vector<String16>& args) const;
private:
- enum CameraFlags {
- FLAGS_SET_CAMERA = 1L << 0,
- FLAGS_HOT_CAMERA = 1L << 1,
- };
-
- sp<Camera> mCamera;
- sp<ISurface> mPreviewSurface;
+ sp<ICamera> mCamera;
+ sp<Surface> mPreviewSurface;
sp<IMediaRecorderClient> mListener;
- sp<MediaWriter> mWriter;
+ sp<MediaWriter> mWriter, mWriterAux;
sp<AudioSource> mAudioSourceNode;
audio_source mAudioSource;
@@ -75,8 +76,9 @@
video_encoder mVideoEncoder;
bool mUse64BitFileOffset;
int32_t mVideoWidth, mVideoHeight;
+ int32_t mAuxVideoWidth, mAuxVideoHeight;
int32_t mFrameRate;
- int32_t mVideoBitRate;
+ int32_t mVideoBitRate, mAuxVideoBitRate;
int32_t mAudioBitRate;
int32_t mAudioChannels;
int32_t mSampleRate;
@@ -93,21 +95,40 @@
int64_t mTrackEveryTimeDurationUs;
int32_t mRotationDegrees; // Clockwise
- String8 mParams;
- int mOutputFd;
- int32_t mFlags;
+ bool mCaptureTimeLapse;
+ int64_t mTimeBetweenTimeLapseFrameCaptureUs;
+ bool mCaptureAuxVideo;
+ sp<MediaSourceSplitter> mCameraSourceSplitter;
+ sp<CameraSourceTimeLapse> mCameraSourceTimeLapse;
+ String8 mParams;
+ int mOutputFd, mOutputFdAux;
+
+ bool mIsMetaDataStoredInVideoBuffers;
MediaProfiles *mEncoderProfiles;
+ status_t setupMPEG4Recording(
+ bool useSplitCameraSource,
+ int outputFd,
+ int32_t videoWidth, int32_t videoHeight,
+ int32_t videoBitRate,
+ int32_t *totalBitRate,
+ sp<MediaWriter> *mediaWriter);
+ void setupMPEG4MetaData(int64_t startTimeUs, int32_t totalBitRate,
+ sp<MetaData> *meta);
status_t startMPEG4Recording();
status_t startAMRRecording();
status_t startAACRecording();
status_t startRTPRecording();
status_t startMPEG2TSRecording();
sp<MediaSource> createAudioSource();
- status_t setupCameraSource();
+ status_t checkVideoEncoderCapabilities();
+ status_t setupCameraSource(sp<CameraSource> *cameraSource);
status_t setupAudioEncoder(const sp<MediaWriter>& writer);
- status_t setupVideoEncoder(sp<MediaSource> *source);
+ status_t setupVideoEncoder(
+ sp<MediaSource> cameraSource,
+ int32_t videoBitRate,
+ sp<MediaSource> *source);
// Encoding parameter handling utilities
status_t setParameter(const String8 &key, const String8 &value);
@@ -115,6 +136,11 @@
status_t setParamAudioNumberOfChannels(int32_t channles);
status_t setParamAudioSamplingRate(int32_t sampleRate);
status_t setParamAudioTimeScale(int32_t timeScale);
+ status_t setParamTimeLapseEnable(int32_t timeLapseEnable);
+ status_t setParamTimeBetweenTimeLapseFrameCapture(int64_t timeUs);
+ status_t setParamAuxVideoHeight(int32_t height);
+ status_t setParamAuxVideoWidth(int32_t width);
+ status_t setParamAuxVideoEncodingBitRate(int32_t bitRate);
status_t setParamVideoEncodingBitRate(int32_t bitRate);
status_t setParamVideoIFramesInterval(int32_t seconds);
status_t setParamVideoEncoderProfile(int32_t profile);
@@ -140,4 +166,3 @@
} // namespace android
#endif // STAGEFRIGHT_RECORDER_H_
-
diff --git a/media/libmediaplayerservice/TestPlayerStub.h b/media/libmediaplayerservice/TestPlayerStub.h
index 6e6c3cd..6abd8e3 100644
--- a/media/libmediaplayerservice/TestPlayerStub.h
+++ b/media/libmediaplayerservice/TestPlayerStub.h
@@ -75,7 +75,7 @@
// All the methods below wrap the mPlayer instance.
- virtual status_t setVideoSurface(const android::sp<android::ISurface>& s) {
+ virtual status_t setVideoSurface(const android::sp<android::Surface>& s) {
return mPlayer->setVideoSurface(s);
}
virtual status_t prepare() {return mPlayer->prepare();}
diff --git a/media/libstagefright/AMRExtractor.cpp b/media/libstagefright/AMRExtractor.cpp
index 1b05528..ac87c29 100644
--- a/media/libstagefright/AMRExtractor.cpp
+++ b/media/libstagefright/AMRExtractor.cpp
@@ -55,7 +55,7 @@
size_t mFrameSize;
bool mIsWide;
- off_t mOffset;
+ off64_t mOffset;
int64_t mCurrentTimeUs;
bool mStarted;
MediaBufferGroup *mGroup;
@@ -115,9 +115,9 @@
mFrameSize = getFrameSize(mIsWide, FT);
- off_t streamSize;
+ off64_t streamSize;
if (mDataSource->getSize(&streamSize) == OK) {
- off_t numFrames = streamSize / mFrameSize;
+ off64_t numFrames = streamSize / mFrameSize;
mMeta->setInt64(kKeyDuration, 20000ll * numFrames);
}
diff --git a/media/libstagefright/AMRWriter.cpp b/media/libstagefright/AMRWriter.cpp
index c0b1abe..0db3d1d 100644
--- a/media/libstagefright/AMRWriter.cpp
+++ b/media/libstagefright/AMRWriter.cpp
@@ -24,20 +24,28 @@
#include <media/mediarecorder.h>
#include <sys/prctl.h>
#include <sys/resource.h>
+#include <sys/types.h>
+#include <sys/stat.h>
+#include <fcntl.h>
namespace android {
AMRWriter::AMRWriter(const char *filename)
- : mFile(fopen(filename, "wb")),
- mInitCheck(mFile != NULL ? OK : NO_INIT),
+ : mFd(-1),
+ mInitCheck(NO_INIT),
mStarted(false),
mPaused(false),
mResumed(false) {
+
+ mFd = open(filename, O_CREAT | O_LARGEFILE | O_TRUNC);
+ if (mFd >= 0) {
+ mInitCheck = OK;
+ }
}
AMRWriter::AMRWriter(int fd)
- : mFile(fdopen(fd, "wb")),
- mInitCheck(mFile != NULL ? OK : NO_INIT),
+ : mFd(dup(fd)),
+ mInitCheck(mFd < 0? NO_INIT: OK),
mStarted(false),
mPaused(false),
mResumed(false) {
@@ -48,9 +56,9 @@
stop();
}
- if (mFile != NULL) {
- fclose(mFile);
- mFile = NULL;
+ if (mFd != -1) {
+ close(mFd);
+ mFd = -1;
}
}
@@ -90,8 +98,8 @@
mSource = source;
const char *kHeader = isWide ? "#!AMR-WB\n" : "#!AMR\n";
- size_t n = strlen(kHeader);
- if (fwrite(kHeader, 1, n, mFile) != n) {
+ ssize_t n = strlen(kHeader);
+ if (write(mFd, kHeader, n) != n) {
return ERROR_IO;
}
@@ -240,11 +248,9 @@
notify(MEDIA_RECORDER_EVENT_INFO, MEDIA_RECORDER_INFO_MAX_DURATION_REACHED, 0);
break;
}
- ssize_t n = fwrite(
- (const uint8_t *)buffer->data() + buffer->range_offset(),
- 1,
- buffer->range_length(),
- mFile);
+ ssize_t n = write(mFd,
+ (const uint8_t *)buffer->data() + buffer->range_offset(),
+ buffer->range_length());
if (n < (ssize_t)buffer->range_length()) {
buffer->release();
@@ -266,9 +272,8 @@
notify(MEDIA_RECORDER_EVENT_INFO, MEDIA_RECORDER_INFO_COMPLETION_STATUS, UNKNOWN_ERROR);
}
- fflush(mFile);
- fclose(mFile);
- mFile = NULL;
+ close(mFd);
+ mFd = -1;
mReachedEOS = true;
if (err == ERROR_END_OF_STREAM) {
return OK;
diff --git a/media/libstagefright/Android.mk b/media/libstagefright/Android.mk
index d674547..db23836 100644
--- a/media/libstagefright/Android.mk
+++ b/media/libstagefright/Android.mk
@@ -10,6 +10,8 @@
AudioSource.cpp \
AwesomePlayer.cpp \
CameraSource.cpp \
+ CameraSourceTimeLapse.cpp \
+ VideoSourceDownSampler.cpp \
DataSource.cpp \
DRMExtractor.cpp \
ESDS.cpp \
@@ -25,6 +27,7 @@
MediaDefs.cpp \
MediaExtractor.cpp \
MediaSource.cpp \
+ MediaSourceSplitter.cpp \
MetaData.cpp \
NuCachedSource2.cpp \
NuHTTPDataSource.cpp \
@@ -41,9 +44,11 @@
TimeSource.cpp \
TimedEventQueue.cpp \
Utils.cpp \
+ VBRISeeker.cpp \
WAVExtractor.cpp \
+ WVMExtractor.cpp \
+ XINGSeeker.cpp \
avc_utils.cpp \
- string.cpp
LOCAL_C_INCLUDES:= \
$(JNI_H_INCLUDE) \
@@ -60,10 +65,13 @@
libsonivox \
libvorbisidec \
libsurfaceflinger_client \
+ libstagefright_yuv \
libcamera_client \
- libdrmframework
+ libdrmframework \
+ libcrypto
LOCAL_STATIC_LIBRARIES := \
+ libstagefright_color_conversion \
libstagefright_aacdec \
libstagefright_aacenc \
libstagefright_amrnbdec \
@@ -90,7 +98,6 @@
libstagefright_enc_common \
libstagefright_avc_common \
libstagefright_foundation \
- libstagefright_color_conversion
ifeq ($(TARGET_OS)-$(TARGET_SIMULATOR),linux-true)
LOCAL_LDLIBS += -lpthread -ldl
diff --git a/media/libstagefright/AwesomePlayer.cpp b/media/libstagefright/AwesomePlayer.cpp
index 44d12e5..a804866 100644
--- a/media/libstagefright/AwesomePlayer.cpp
+++ b/media/libstagefright/AwesomePlayer.cpp
@@ -34,21 +34,26 @@
#include "UDPPusher.h"
#include <binder/IPCThreadState.h>
+#include <binder/MemoryDealer.h>
+#include <media/IStreamSource.h>
+#include <media/stagefright/foundation/hexdump.h>
+#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/AudioPlayer.h>
#include <media/stagefright/DataSource.h>
#include <media/stagefright/FileSource.h>
#include <media/stagefright/MediaBuffer.h>
#include <media/stagefright/MediaDefs.h>
#include <media/stagefright/MediaExtractor.h>
-#include <media/stagefright/MediaDebug.h>
#include <media/stagefright/MediaSource.h>
#include <media/stagefright/MetaData.h>
#include <media/stagefright/OMXCodec.h>
-#include <surfaceflinger/ISurface.h>
+#include <surfaceflinger/Surface.h>
#include <media/stagefright/foundation/ALooper.h>
+#define USE_SURFACE_ALLOC 1
+
namespace android {
static int64_t kLowWaterMarkUs = 2000000ll; // 2secs
@@ -77,49 +82,10 @@
AwesomeEvent &operator=(const AwesomeEvent &);
};
-struct AwesomeRemoteRenderer : public AwesomeRenderer {
- AwesomeRemoteRenderer(const sp<IOMXRenderer> &target)
- : mTarget(target) {
- }
-
- virtual status_t initCheck() const {
- return OK;
- }
-
- virtual void render(MediaBuffer *buffer) {
- void *id;
- if (buffer->meta_data()->findPointer(kKeyBufferID, &id)) {
- mTarget->render((IOMX::buffer_id)id);
- }
- }
-
-private:
- sp<IOMXRenderer> mTarget;
-
- AwesomeRemoteRenderer(const AwesomeRemoteRenderer &);
- AwesomeRemoteRenderer &operator=(const AwesomeRemoteRenderer &);
-};
-
struct AwesomeLocalRenderer : public AwesomeRenderer {
AwesomeLocalRenderer(
- bool previewOnly,
- const char *componentName,
- OMX_COLOR_FORMATTYPE colorFormat,
- const sp<ISurface> &surface,
- size_t displayWidth, size_t displayHeight,
- size_t decodedWidth, size_t decodedHeight,
- int32_t rotationDegrees)
- : mInitCheck(NO_INIT),
- mTarget(NULL),
- mLibHandle(NULL) {
- mInitCheck = init(previewOnly, componentName,
- colorFormat, surface, displayWidth,
- displayHeight, decodedWidth, decodedHeight,
- rotationDegrees);
- }
-
- virtual status_t initCheck() const {
- return mInitCheck;
+ const sp<Surface> &surface, const sp<MetaData> &meta)
+ : mTarget(new SoftwareRenderer(surface, meta)) {
}
virtual void render(MediaBuffer *buffer) {
@@ -135,107 +101,258 @@
virtual ~AwesomeLocalRenderer() {
delete mTarget;
mTarget = NULL;
-
- if (mLibHandle) {
- dlclose(mLibHandle);
- mLibHandle = NULL;
- }
}
private:
- status_t mInitCheck;
- VideoRenderer *mTarget;
- void *mLibHandle;
-
- status_t init(
- bool previewOnly,
- const char *componentName,
- OMX_COLOR_FORMATTYPE colorFormat,
- const sp<ISurface> &surface,
- size_t displayWidth, size_t displayHeight,
- size_t decodedWidth, size_t decodedHeight,
- int32_t rotationDegrees);
+ SoftwareRenderer *mTarget;
AwesomeLocalRenderer(const AwesomeLocalRenderer &);
AwesomeLocalRenderer &operator=(const AwesomeLocalRenderer &);;
};
-status_t AwesomeLocalRenderer::init(
- bool previewOnly,
- const char *componentName,
- OMX_COLOR_FORMATTYPE colorFormat,
- const sp<ISurface> &surface,
- size_t displayWidth, size_t displayHeight,
- size_t decodedWidth, size_t decodedHeight,
- int32_t rotationDegrees) {
- if (!previewOnly) {
- // We will stick to the vanilla software-color-converting renderer
- // for "previewOnly" mode, to avoid unneccessarily switching overlays
- // more often than necessary.
+struct AwesomeNativeWindowRenderer : public AwesomeRenderer {
+ AwesomeNativeWindowRenderer(
+ const sp<ANativeWindow> &nativeWindow,
+ int32_t rotationDegrees)
+ : mNativeWindow(nativeWindow) {
+ applyRotation(rotationDegrees);
+ }
- mLibHandle = dlopen("libstagefrighthw.so", RTLD_NOW);
+ virtual void render(MediaBuffer *buffer) {
+ status_t err = mNativeWindow->queueBuffer(
+ mNativeWindow.get(), buffer->graphicBuffer().get());
+ if (err != 0) {
+ LOGE("queueBuffer failed with error %s (%d)", strerror(-err),
+ -err);
+ return;
+ }
- if (mLibHandle) {
- typedef VideoRenderer *(*CreateRendererWithRotationFunc)(
- const sp<ISurface> &surface,
- const char *componentName,
- OMX_COLOR_FORMATTYPE colorFormat,
- size_t displayWidth, size_t displayHeight,
- size_t decodedWidth, size_t decodedHeight,
- int32_t rotationDegrees);
+ sp<MetaData> metaData = buffer->meta_data();
+ metaData->setInt32(kKeyRendered, 1);
+ }
- typedef VideoRenderer *(*CreateRendererFunc)(
- const sp<ISurface> &surface,
- const char *componentName,
- OMX_COLOR_FORMATTYPE colorFormat,
- size_t displayWidth, size_t displayHeight,
- size_t decodedWidth, size_t decodedHeight);
+protected:
+ virtual ~AwesomeNativeWindowRenderer() {}
- CreateRendererWithRotationFunc funcWithRotation =
- (CreateRendererWithRotationFunc)dlsym(
- mLibHandle,
- "_Z26createRendererWithRotationRKN7android2spINS_8"
- "ISurfaceEEEPKc20OMX_COLOR_FORMATTYPEjjjji");
+private:
+ sp<ANativeWindow> mNativeWindow;
- if (funcWithRotation) {
- mTarget =
- (*funcWithRotation)(
- surface, componentName, colorFormat,
- displayWidth, displayHeight,
- decodedWidth, decodedHeight,
- rotationDegrees);
- } else {
- if (rotationDegrees != 0) {
- LOGW("renderer does not support rotation.");
- }
+ void applyRotation(int32_t rotationDegrees) {
+ uint32_t transform;
+ switch (rotationDegrees) {
+ case 0: transform = 0; break;
+ case 90: transform = HAL_TRANSFORM_ROT_90; break;
+ case 180: transform = HAL_TRANSFORM_ROT_180; break;
+ case 270: transform = HAL_TRANSFORM_ROT_270; break;
+ default: transform = 0; break;
+ }
- CreateRendererFunc func =
- (CreateRendererFunc)dlsym(
- mLibHandle,
- "_Z14createRendererRKN7android2spINS_8ISurfaceEEEPKc20"
- "OMX_COLOR_FORMATTYPEjjjj");
-
- if (func) {
- mTarget =
- (*func)(surface, componentName, colorFormat,
- displayWidth, displayHeight,
- decodedWidth, decodedHeight);
- }
- }
+ if (transform) {
+ CHECK_EQ(0, native_window_set_buffers_transform(
+ mNativeWindow.get(), transform));
}
}
- if (mTarget != NULL) {
- return OK;
+ AwesomeNativeWindowRenderer(const AwesomeNativeWindowRenderer &);
+ AwesomeNativeWindowRenderer &operator=(
+ const AwesomeNativeWindowRenderer &);
+};
+
+////////////////////////////////////////////////////////////////////////////////
+
+struct QueueDataSource;
+
+struct QueueListener : public BnStreamListener {
+ QueueListener(QueueDataSource *owner)
+ : mOwner(owner) {
}
- mTarget = new SoftwareRenderer(
- colorFormat, surface, displayWidth, displayHeight,
- decodedWidth, decodedHeight, rotationDegrees);
+ void clearOwner();
- return ((SoftwareRenderer *)mTarget)->initCheck();
+ virtual void queueBuffer(size_t index, size_t size);
+ virtual void queueCommand(Command cmd);
+
+private:
+ Mutex mLock;
+
+ QueueDataSource *mOwner;
+
+ DISALLOW_EVIL_CONSTRUCTORS(QueueListener);
+};
+
+struct QueueDataSource : public DataSource {
+ QueueDataSource(const sp<IStreamSource> &source);
+
+ virtual status_t initCheck() const;
+
+ virtual ssize_t readAt(off64_t offset, void *data, size_t size);
+
+ virtual void queueBuffer(size_t index, size_t size);
+ virtual void queueCommand(IStreamListener::Command cmd);
+
+protected:
+ virtual ~QueueDataSource();
+
+private:
+ enum {
+ kNumBuffers = 16
+ };
+
+ struct BufferInfo {
+ size_t mIndex;
+ size_t mOffset;
+ size_t mSize;
+ };
+
+ Mutex mLock;
+ Condition mCondition;
+
+ sp<IStreamSource> mSource;
+ sp<QueueListener> mListener;
+ sp<MemoryDealer> mDealer;
+ Vector<sp<IMemory> > mBuffers;
+
+ List<BufferInfo> mFilledBuffers;
+
+ off64_t mPosition;
+ bool mEOS;
+
+ DISALLOW_EVIL_CONSTRUCTORS(QueueDataSource);
+};
+
+QueueDataSource::QueueDataSource(const sp<IStreamSource> &source)
+ : mSource(source),
+ mPosition(0),
+ mEOS(false) {
+ mListener = new QueueListener(this);
+ mSource->setListener(mListener);
+
+ static const size_t kBufferSize = 8192;
+
+ mDealer = new MemoryDealer(kNumBuffers * kBufferSize);
+ for (size_t i = 0; i < kNumBuffers; ++i) {
+ sp<IMemory> mem = mDealer->allocate(kBufferSize);
+ CHECK(mem != NULL);
+
+ mBuffers.push(mem);
+ }
+ mSource->setBuffers(mBuffers);
+
+ for (size_t i = 0; i < kNumBuffers; ++i) {
+ mSource->onBufferAvailable(i);
+ }
}
+QueueDataSource::~QueueDataSource() {
+ Mutex::Autolock autoLock(mLock);
+
+ while (mFilledBuffers.size() < kNumBuffers && !mEOS) {
+ mCondition.wait(mLock);
+ }
+
+ mListener->clearOwner();
+}
+
+status_t QueueDataSource::initCheck() const {
+ return OK;
+}
+
+ssize_t QueueDataSource::readAt(off64_t offset, void *data, size_t size) {
+ if (offset != mPosition) {
+ return -EPIPE;
+ }
+
+ Mutex::Autolock autoLock(mLock);
+
+ size_t sizeDone = 0;
+
+ while (sizeDone < size) {
+ while (mFilledBuffers.empty() && !mEOS) {
+ mCondition.wait(mLock);
+ }
+
+ if (mFilledBuffers.empty()) {
+ if (sizeDone > 0) {
+ mPosition += sizeDone;
+ return sizeDone;
+ }
+ return ERROR_END_OF_STREAM;
+ }
+
+ BufferInfo &info = *mFilledBuffers.begin();
+
+ size_t copy = size - sizeDone;
+ if (copy > info.mSize) {
+ copy = info.mSize;
+ }
+
+ memcpy((uint8_t *)data + sizeDone,
+ (const uint8_t *)mBuffers.itemAt(info.mIndex)->pointer()
+ + info.mOffset,
+ copy);
+
+ info.mSize -= copy;
+ info.mOffset += copy;
+ sizeDone += copy;
+
+ if (info.mSize == 0) {
+ mSource->onBufferAvailable(info.mIndex);
+ mFilledBuffers.erase(mFilledBuffers.begin());
+ }
+ }
+
+ mPosition += sizeDone;
+
+ return sizeDone;
+}
+
+void QueueDataSource::queueBuffer(size_t index, size_t size) {
+ Mutex::Autolock autoLock(mLock);
+
+ CHECK_LT(index, mBuffers.size());
+ CHECK_LE(size, mBuffers.itemAt(index)->size());
+
+ BufferInfo info;
+ info.mIndex = index;
+ info.mSize = size;
+ info.mOffset = 0;
+
+ mFilledBuffers.push_back(info);
+ mCondition.signal();
+}
+
+void QueueDataSource::queueCommand(IStreamListener::Command cmd) {
+ Mutex::Autolock autoLock(mLock);
+
+ if (cmd == IStreamListener::EOS) {
+ mEOS = true;
+ mCondition.signal();
+ }
+}
+
+void QueueListener::clearOwner() {
+ Mutex::Autolock autoLock(mLock);
+ mOwner = NULL;
+}
+
+void QueueListener::queueBuffer(size_t index, size_t size) {
+ Mutex::Autolock autoLock(mLock);
+ if (mOwner == NULL) {
+ return;
+ }
+ mOwner->queueBuffer(index, size);
+}
+
+void QueueListener::queueCommand(Command cmd) {
+ Mutex::Autolock autoLock(mLock);
+ if (mOwner == NULL) {
+ return;
+ }
+ mOwner->queueCommand(cmd);
+}
+
+////////////////////////////////////////////////////////////////////////////////
+
AwesomePlayer::AwesomePlayer()
: mQueueStarted(false),
mTimeSource(NULL),
@@ -243,11 +360,9 @@
mAudioPlayer(NULL),
mFlags(0),
mExtractorFlags(0),
- mLastVideoBuffer(NULL),
mVideoBuffer(NULL),
- mSuspensionState(NULL),
mDecryptHandle(NULL) {
- CHECK_EQ(mClient.connect(), OK);
+ CHECK_EQ(mClient.connect(), (status_t)OK);
DataSource::RegisterDefaultSniffers();
@@ -307,6 +422,16 @@
mUri = uri;
+ if (!strncmp("http://", uri, 7)) {
+ // Hack to support http live.
+
+ size_t len = strlen(uri);
+ if (!strcasecmp(&uri[len - 5], ".m3u8")) {
+ mUri = "httplive://";
+ mUri.append(&uri[7]);
+ }
+ }
+
if (headers) {
mUriHeaders = *headers;
}
@@ -337,6 +462,26 @@
return setDataSource_l(dataSource);
}
+status_t AwesomePlayer::setDataSource(const sp<IStreamSource> &source) {
+ Mutex::Autolock autoLock(mLock);
+
+ reset_l();
+
+ sp<DataSource> dataSource = new QueueDataSource(source);
+
+#if 0
+ sp<MediaExtractor> extractor =
+ MediaExtractor::Create(dataSource, MEDIA_MIMETYPE_CONTAINER_MPEG2TS);
+
+ return setDataSource_l(extractor);
+#else
+ sp<NuCachedSource2> cached = new NuCachedSource2(dataSource);
+ dataSource = cached;
+
+ return setDataSource_l(dataSource);
+#endif
+}
+
status_t AwesomePlayer::setDataSource_l(
const sp<DataSource> &dataSource) {
sp<MediaExtractor> extractor = MediaExtractor::Create(dataSource);
@@ -429,6 +574,7 @@
if (mDecryptHandle != NULL) {
mDrmManagerClient->setPlaybackStatus(mDecryptHandle,
Playback::STOP, 0);
+ mDrmManagerClient->closeDecryptSession(mDecryptHandle);
mDecryptHandle = NULL;
mDrmManagerClient = NULL;
}
@@ -439,6 +585,12 @@
LOGI("interrupting the connection process");
mConnectingDataSource->disconnect();
}
+
+ if (mFlags & PREPARING_CONNECTED) {
+ // We are basically done preparing, we're just buffering
+ // enough data to start playback, we can safely interrupt that.
+ finishAsyncPrepare_l();
+ }
}
while (mFlags & PREPARING) {
@@ -471,11 +623,6 @@
mVideoRenderer.clear();
- if (mLastVideoBuffer) {
- mLastVideoBuffer->release();
- mLastVideoBuffer = NULL;
- }
-
if (mVideoBuffer) {
mVideoBuffer->release();
mVideoBuffer = NULL;
@@ -507,7 +654,6 @@
mDurationUs = -1;
mFlags = 0;
mExtractorFlags = 0;
- mVideoWidth = mVideoHeight = -1;
mTimeSourceDeltaUs = 0;
mVideoTimeUs = 0;
@@ -520,9 +666,6 @@
mFileSource.clear();
- delete mSuspensionState;
- mSuspensionState = NULL;
-
mBitrate = -1;
}
@@ -537,7 +680,7 @@
}
bool AwesomePlayer::getBitrate(int64_t *bitrate) {
- off_t size;
+ off64_t size;
if (mDurationUs >= 0 && mCachedSource != NULL
&& mCachedSource->getSize(&size) == OK) {
*bitrate = size * 8000000ll / mDurationUs; // in bits/sec
@@ -570,6 +713,12 @@
return false;
}
+void AwesomePlayer::ensureCacheIsFetching_l() {
+ if (mCachedSource != NULL) {
+ mCachedSource->resumeFetchingIfNecessary();
+ }
+}
+
void AwesomePlayer::onBufferingUpdate() {
Mutex::Autolock autoLock(mLock);
if (!mBufferingEventPending) {
@@ -612,6 +761,7 @@
kLowWaterMarkBytes);
mFlags |= CACHE_UNDERRUN;
pause_l();
+ ensureCacheIsFetching_l();
notifyListener_l(MEDIA_INFO, MEDIA_INFO_BUFFERING_START);
} else if (eos || cachedDataRemaining > kHighWaterMarkBytes) {
if (mFlags & CACHE_UNDERRUN) {
@@ -633,12 +783,16 @@
int64_t cachedDurationUs;
bool eos;
if (getCachedDuration_l(&cachedDurationUs, &eos)) {
+ LOGV("cachedDurationUs = %.2f secs, eos=%d",
+ cachedDurationUs / 1E6, eos);
+
if ((mFlags & PLAYING) && !eos
&& (cachedDurationUs < kLowWaterMarkUs)) {
LOGI("cache is running low (%.2f secs) , pausing.",
cachedDurationUs / 1E6);
mFlags |= CACHE_UNDERRUN;
pause_l();
+ ensureCacheIsFetching_l();
notifyListener_l(MEDIA_INFO, MEDIA_INFO_BUFFERING_START);
} else if (eos || cachedDurationUs > kHighWaterMarkUs) {
if (mFlags & CACHE_UNDERRUN) {
@@ -664,11 +818,6 @@
mVideoRenderer.clear();
- if (mLastVideoBuffer) {
- mLastVideoBuffer->release();
- mLastVideoBuffer = NULL;
- }
-
if (mVideoBuffer) {
mVideoBuffer->release();
mVideoBuffer = NULL;
@@ -688,7 +837,8 @@
IPCThreadState::self()->flushCommands();
}
- CHECK_EQ(OK, initVideoDecoder(OMXCodec::kIgnoreCodecSpecificData));
+ CHECK_EQ((status_t)OK,
+ initVideoDecoder(OMXCodec::kIgnoreCodecSpecificData));
}
void AwesomePlayer::onStreamDone() {
@@ -844,9 +994,47 @@
return OK;
}
-status_t AwesomePlayer::initRenderer_l() {
- if (mISurface == NULL) {
- return OK;
+void AwesomePlayer::notifyVideoSize_l() {
+ sp<MetaData> meta = mVideoSource->getFormat();
+
+ int32_t cropLeft, cropTop, cropRight, cropBottom;
+ if (!meta->findRect(
+ kKeyCropRect, &cropLeft, &cropTop, &cropRight, &cropBottom)) {
+ int32_t width, height;
+ CHECK(meta->findInt32(kKeyWidth, &width));
+ CHECK(meta->findInt32(kKeyHeight, &height));
+
+ cropLeft = cropTop = 0;
+ cropRight = width - 1;
+ cropBottom = height - 1;
+
+ LOGV("got dimensions only %d x %d", width, height);
+ } else {
+ LOGV("got crop rect %d, %d, %d, %d",
+ cropLeft, cropTop, cropRight, cropBottom);
+ }
+
+ int32_t usableWidth = cropRight - cropLeft + 1;
+ int32_t usableHeight = cropBottom - cropTop + 1;
+
+ int32_t rotationDegrees;
+ if (!mVideoTrack->getFormat()->findInt32(
+ kKeyRotation, &rotationDegrees)) {
+ rotationDegrees = 0;
+ }
+
+ if (rotationDegrees == 90 || rotationDegrees == 270) {
+ notifyListener_l(
+ MEDIA_SET_VIDEO_SIZE, usableHeight, usableWidth);
+ } else {
+ notifyListener_l(
+ MEDIA_SET_VIDEO_SIZE, usableWidth, usableHeight);
+ }
+}
+
+void AwesomePlayer::initRenderer_l() {
+ if (mSurface == NULL) {
+ return;
}
sp<MetaData> meta = mVideoSource->getFormat();
@@ -871,37 +1059,19 @@
// before creating a new one.
IPCThreadState::self()->flushCommands();
- if (!strncmp("OMX.", component, 4)) {
- // Our OMX codecs allocate buffers on the media_server side
- // therefore they require a remote IOMXRenderer that knows how
- // to display them.
-
- sp<IOMXRenderer> native =
- mClient.interface()->createRenderer(
- mISurface, component,
- (OMX_COLOR_FORMATTYPE)format,
- decodedWidth, decodedHeight,
- mVideoWidth, mVideoHeight,
- rotationDegrees);
-
- if (native == NULL) {
- return NO_INIT;
- }
-
- mVideoRenderer = new AwesomeRemoteRenderer(native);
+ if (USE_SURFACE_ALLOC && strncmp(component, "OMX.", 4) == 0) {
+ // Hardware decoders avoid the CPU color conversion by decoding
+ // directly to ANativeBuffers, so we must use a renderer that
+ // just pushes those buffers to the ANativeWindow.
+ mVideoRenderer =
+ new AwesomeNativeWindowRenderer(mSurface, rotationDegrees);
} else {
// Other decoders are instantiated locally and as a consequence
- // allocate their buffers in local address space.
- mVideoRenderer = new AwesomeLocalRenderer(
- false, // previewOnly
- component,
- (OMX_COLOR_FORMATTYPE)format,
- mISurface,
- mVideoWidth, mVideoHeight,
- decodedWidth, decodedHeight, rotationDegrees);
+ // allocate their buffers in local address space. This renderer
+ // then performs a color conversion and copy to get the data
+ // into the ANativeBuffer.
+ mVideoRenderer = new AwesomeLocalRenderer(mSurface, meta);
}
-
- return mVideoRenderer->initCheck();
}
status_t AwesomePlayer::pause() {
@@ -944,10 +1114,10 @@
return (mFlags & PLAYING) || (mFlags & CACHE_UNDERRUN);
}
-void AwesomePlayer::setISurface(const sp<ISurface> &isurface) {
+void AwesomePlayer::setSurface(const sp<Surface> &surface) {
Mutex::Autolock autoLock(mLock);
- mISurface = isurface;
+ mSurface = surface;
}
void AwesomePlayer::setAudioSink(
@@ -1064,20 +1234,6 @@
}
}
-status_t AwesomePlayer::getVideoDimensions(
- int32_t *width, int32_t *height) const {
- Mutex::Autolock autoLock(mLock);
-
- if (mVideoWidth < 0 || mVideoHeight < 0) {
- return UNKNOWN_ERROR;
- }
-
- *width = mVideoWidth;
- *height = mVideoHeight;
-
- return OK;
-}
-
void AwesomePlayer::setAudioSource(sp<MediaSource> source) {
CHECK(source != NULL);
@@ -1135,7 +1291,7 @@
mClient.interface(), mVideoTrack->getFormat(),
false, // createEncoder
mVideoTrack,
- NULL, flags);
+ NULL, flags, USE_SURFACE_ALLOC ? mSurface : NULL);
if (mVideoSource != NULL) {
int64_t durationUs;
@@ -1146,9 +1302,6 @@
}
}
- CHECK(mVideoTrack->getFormat()->findInt32(kKeyWidth, &mVideoWidth));
- CHECK(mVideoTrack->getFormat()->findInt32(kKeyHeight, &mVideoHeight));
-
status_t err = mVideoSource->start();
if (err != OK) {
@@ -1166,7 +1319,7 @@
}
if (mAudioPlayer != NULL) {
- LOGV("seeking audio to %lld us (%.2f secs).", timeUs, timeUs / 1E6);
+ LOGV("seeking audio to %lld us (%.2f secs).", videoTimeUs, videoTimeUs / 1E6);
// If we don't have a video time, seek audio to the originally
// requested seek time instead.
@@ -1184,6 +1337,13 @@
mFlags |= FIRST_FRAME;
mSeeking = false;
mSeekNotificationSent = false;
+
+ if (mDecryptHandle != NULL) {
+ mDrmManagerClient->setPlaybackStatus(mDecryptHandle,
+ Playback::PAUSE, 0);
+ mDrmManagerClient->setPlaybackStatus(mDecryptHandle,
+ Playback::START, videoTimeUs / 1000);
+ }
}
void AwesomePlayer::onVideoEvent() {
@@ -1196,11 +1356,6 @@
mVideoEventPending = false;
if (mSeeking) {
- if (mLastVideoBuffer) {
- mLastVideoBuffer->release();
- mLastVideoBuffer = NULL;
- }
-
if (mVideoBuffer) {
mVideoBuffer->release();
mVideoBuffer = NULL;
@@ -1235,21 +1390,18 @@
options.clearSeekTo();
if (err != OK) {
- CHECK_EQ(mVideoBuffer, NULL);
+ CHECK(mVideoBuffer == NULL);
if (err == INFO_FORMAT_CHANGED) {
LOGV("VideoSource signalled format change.");
+ notifyVideoSize_l();
+
if (mVideoRenderer != NULL) {
mVideoRendererIsPreview = false;
- err = initRenderer_l();
-
- if (err == OK) {
- continue;
- }
-
- // fall through
+ initRenderer_l();
}
+ continue;
}
// So video playback is complete, but we may still have
@@ -1291,13 +1443,6 @@
TimeSource *ts = (mFlags & AUDIO_AT_EOS) ? &mSystemTimeSource : mTimeSource;
- if (mDecryptHandle != NULL) {
- mDrmManagerClient->setPlaybackStatus(mDecryptHandle,
- Playback::PAUSE, 0);
- mDrmManagerClient->setPlaybackStatus(mDecryptHandle,
- Playback::START, timeUs / 1000);
- }
-
if (mFlags & FIRST_FRAME) {
mFlags &= ~FIRST_FRAME;
@@ -1346,26 +1491,14 @@
if (mVideoRendererIsPreview || mVideoRenderer == NULL) {
mVideoRendererIsPreview = false;
- status_t err = initRenderer_l();
-
- if (err != OK) {
- finishSeekIfNecessary(-1);
-
- mFlags |= VIDEO_AT_EOS;
- postStreamDoneEvent_l(err);
- return;
- }
+ initRenderer_l();
}
if (mVideoRenderer != NULL) {
mVideoRenderer->render(mVideoBuffer);
}
- if (mLastVideoBuffer) {
- mLastVideoBuffer->release();
- mLastVideoBuffer = NULL;
- }
- mLastVideoBuffer = mVideoBuffer;
+ mVideoBuffer->release();
mVideoBuffer = NULL;
postVideoEvent_l();
@@ -1682,9 +1815,15 @@
}
dataSource->getDrmInfo(&mDecryptHandle, &mDrmManagerClient);
- if (mDecryptHandle != NULL
- && RightsStatus::RIGHTS_VALID != mDecryptHandle->status) {
- notifyListener_l(MEDIA_ERROR, MEDIA_ERROR_UNKNOWN, ERROR_NO_LICENSE);
+ if (mDecryptHandle != NULL) {
+ if (RightsStatus::RIGHTS_VALID == mDecryptHandle->status) {
+ if (DecryptApiType::WV_BASED == mDecryptHandle->decryptApiType) {
+ LOGD("Setting mCachedSource to NULL for WVM\n");
+ mCachedSource.clear();
+ }
+ } else {
+ notifyListener_l(MEDIA_ERROR, MEDIA_ERROR_UNKNOWN, ERROR_NO_LICENSE);
+ }
}
return setDataSource_l(extractor);
@@ -1698,7 +1837,7 @@
}
mPrepareResult = err;
- mFlags &= ~(PREPARING|PREPARE_CANCELLED);
+ mFlags &= ~(PREPARING|PREPARE_CANCELLED|PREPARING_CONNECTED);
mAsyncPrepareEvent = NULL;
mPreparedCondition.broadcast();
}
@@ -1746,6 +1885,8 @@
}
}
+ mFlags |= PREPARING_CONNECTED;
+
if (mCachedSource != NULL || mRTSPController != NULL) {
postBufferingEvent_l();
} else {
@@ -1755,175 +1896,22 @@
void AwesomePlayer::finishAsyncPrepare_l() {
if (mIsAsyncPrepare) {
- if (mVideoWidth < 0 || mVideoHeight < 0) {
+ if (mVideoSource == NULL) {
notifyListener_l(MEDIA_SET_VIDEO_SIZE, 0, 0);
} else {
- int32_t rotationDegrees;
- if (!mVideoTrack->getFormat()->findInt32(
- kKeyRotation, &rotationDegrees)) {
- rotationDegrees = 0;
- }
-
-#if 1
- if (rotationDegrees == 90 || rotationDegrees == 270) {
- notifyListener_l(
- MEDIA_SET_VIDEO_SIZE, mVideoHeight, mVideoWidth);
- } else
-#endif
- {
- notifyListener_l(
- MEDIA_SET_VIDEO_SIZE, mVideoWidth, mVideoHeight);
- }
+ notifyVideoSize_l();
}
notifyListener_l(MEDIA_PREPARED);
}
mPrepareResult = OK;
- mFlags &= ~(PREPARING|PREPARE_CANCELLED);
+ mFlags &= ~(PREPARING|PREPARE_CANCELLED|PREPARING_CONNECTED);
mFlags |= PREPARED;
mAsyncPrepareEvent = NULL;
mPreparedCondition.broadcast();
}
-status_t AwesomePlayer::suspend() {
- LOGV("suspend");
- Mutex::Autolock autoLock(mLock);
-
- if (mSuspensionState != NULL) {
- if (mLastVideoBuffer == NULL) {
- //go into here if video is suspended again
- //after resuming without being played between
- //them
- SuspensionState *state = mSuspensionState;
- mSuspensionState = NULL;
- reset_l();
- mSuspensionState = state;
- return OK;
- }
-
- delete mSuspensionState;
- mSuspensionState = NULL;
- }
-
- if (mFlags & PREPARING) {
- mFlags |= PREPARE_CANCELLED;
- if (mConnectingDataSource != NULL) {
- LOGI("interrupting the connection process");
- mConnectingDataSource->disconnect();
- }
- }
-
- while (mFlags & PREPARING) {
- mPreparedCondition.wait(mLock);
- }
-
- SuspensionState *state = new SuspensionState;
- state->mUri = mUri;
- state->mUriHeaders = mUriHeaders;
- state->mFileSource = mFileSource;
-
- state->mFlags = mFlags & (PLAYING | AUTO_LOOPING | LOOPING | AT_EOS);
- getPosition(&state->mPositionUs);
-
- if (mLastVideoBuffer) {
- size_t size = mLastVideoBuffer->range_length();
-
- if (size) {
- int32_t unreadable;
- if (!mLastVideoBuffer->meta_data()->findInt32(
- kKeyIsUnreadable, &unreadable)
- || unreadable == 0) {
- state->mLastVideoFrameSize = size;
- state->mLastVideoFrame = malloc(size);
- memcpy(state->mLastVideoFrame,
- (const uint8_t *)mLastVideoBuffer->data()
- + mLastVideoBuffer->range_offset(),
- size);
-
- state->mVideoWidth = mVideoWidth;
- state->mVideoHeight = mVideoHeight;
-
- sp<MetaData> meta = mVideoSource->getFormat();
- CHECK(meta->findInt32(kKeyColorFormat, &state->mColorFormat));
- CHECK(meta->findInt32(kKeyWidth, &state->mDecodedWidth));
- CHECK(meta->findInt32(kKeyHeight, &state->mDecodedHeight));
- } else {
- LOGV("Unable to save last video frame, we have no access to "
- "the decoded video data.");
- }
- }
- }
-
- reset_l();
-
- mSuspensionState = state;
-
- return OK;
-}
-
-status_t AwesomePlayer::resume() {
- LOGV("resume");
- Mutex::Autolock autoLock(mLock);
-
- if (mSuspensionState == NULL) {
- return INVALID_OPERATION;
- }
-
- SuspensionState *state = mSuspensionState;
- mSuspensionState = NULL;
-
- status_t err;
- if (state->mFileSource != NULL) {
- err = setDataSource_l(state->mFileSource);
-
- if (err == OK) {
- mFileSource = state->mFileSource;
- }
- } else {
- err = setDataSource_l(state->mUri, &state->mUriHeaders);
- }
-
- if (err != OK) {
- delete state;
- state = NULL;
-
- return err;
- }
-
- seekTo_l(state->mPositionUs);
-
- mFlags = state->mFlags & (AUTO_LOOPING | LOOPING | AT_EOS);
-
- if (state->mLastVideoFrame && mISurface != NULL) {
- mVideoRenderer =
- new AwesomeLocalRenderer(
- true, // previewOnly
- "",
- (OMX_COLOR_FORMATTYPE)state->mColorFormat,
- mISurface,
- state->mVideoWidth,
- state->mVideoHeight,
- state->mDecodedWidth,
- state->mDecodedHeight,
- 0);
-
- mVideoRendererIsPreview = true;
-
- ((AwesomeLocalRenderer *)mVideoRenderer.get())->render(
- state->mLastVideoFrame, state->mLastVideoFrameSize);
- }
-
- if (state->mFlags & PLAYING) {
- play_l();
- }
-
- mSuspensionState = state;
- state = NULL;
-
- return OK;
-}
-
uint32_t AwesomePlayer::flags() const {
return mExtractorFlags;
}
@@ -1937,4 +1925,3 @@
}
} // namespace android
-
diff --git a/media/libstagefright/CameraSource.cpp b/media/libstagefright/CameraSource.cpp
index 89cb135..d9ff723 100644
--- a/media/libstagefright/CameraSource.cpp
+++ b/media/libstagefright/CameraSource.cpp
@@ -27,6 +27,7 @@
#include <media/stagefright/MetaData.h>
#include <camera/Camera.h>
#include <camera/CameraParameters.h>
+#include <surfaceflinger/Surface.h>
#include <utils/String8.h>
#include <cutils/properties.h>
@@ -65,6 +66,11 @@
void CameraSourceListener::postData(int32_t msgType, const sp<IMemory> &dataPtr) {
LOGV("postData(%d, ptr:%p, size:%d)",
msgType, dataPtr->pointer(), dataPtr->size());
+
+ sp<CameraSource> source = mSource.promote();
+ if (source.get() != NULL) {
+ source->dataCallback(msgType, dataPtr);
+ }
}
void CameraSourceListener::postDataTimestamp(
@@ -77,6 +83,10 @@
}
static int32_t getColorFormat(const char* colorFormat) {
+ if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_YUV420P)) {
+ return OMX_COLOR_FormatYUV420Planar;
+ }
+
if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_YUV422SP)) {
return OMX_COLOR_FormatYUV422SemiPlanar;
}
@@ -99,72 +109,410 @@
CHECK_EQ(0, "Unknown color format");
}
-// static
CameraSource *CameraSource::Create() {
- sp<Camera> camera = Camera::connect(0);
+ Size size;
+ size.width = -1;
+ size.height = -1;
- if (camera.get() == NULL) {
- return NULL;
- }
-
- return new CameraSource(camera);
+ sp<ICamera> camera;
+ return new CameraSource(camera, 0, size, -1, NULL, false);
}
// static
-CameraSource *CameraSource::CreateFromCamera(const sp<Camera> &camera) {
- if (camera.get() == NULL) {
- return NULL;
- }
+CameraSource *CameraSource::CreateFromCamera(
+ const sp<ICamera>& camera,
+ int32_t cameraId,
+ Size videoSize,
+ int32_t frameRate,
+ const sp<Surface>& surface,
+ bool storeMetaDataInVideoBuffers) {
- return new CameraSource(camera);
+ CameraSource *source = new CameraSource(camera, cameraId,
+ videoSize, frameRate, surface,
+ storeMetaDataInVideoBuffers);
+
+ if (source != NULL) {
+ if (source->initCheck() != OK) {
+ delete source;
+ return NULL;
+ }
+ }
+ return source;
}
-CameraSource::CameraSource(const sp<Camera> &camera)
- : mCamera(camera),
- mFirstFrameTimeUs(0),
- mLastFrameTimestampUs(0),
+CameraSource::CameraSource(
+ const sp<ICamera>& camera,
+ int32_t cameraId,
+ Size videoSize,
+ int32_t frameRate,
+ const sp<Surface>& surface,
+ bool storeMetaDataInVideoBuffers)
+ : mCameraFlags(0),
+ mVideoFrameRate(-1),
+ mCamera(0),
+ mSurface(surface),
mNumFramesReceived(0),
+ mLastFrameTimestampUs(0),
+ mStarted(false),
+ mFirstFrameTimeUs(0),
mNumFramesEncoded(0),
mNumFramesDropped(0),
mNumGlitches(0),
mGlitchDurationThresholdUs(200000),
- mCollectStats(false),
- mStarted(false) {
+ mCollectStats(false) {
+ mVideoSize.width = -1;
+ mVideoSize.height = -1;
+
+ mInitCheck = init(camera, cameraId,
+ videoSize, frameRate,
+ storeMetaDataInVideoBuffers);
+}
+
+status_t CameraSource::initCheck() const {
+ return mInitCheck;
+}
+
+status_t CameraSource::isCameraAvailable(
+ const sp<ICamera>& camera, int32_t cameraId) {
+
+ if (camera == 0) {
+ mCamera = Camera::connect(cameraId);
+ mCameraFlags &= ~FLAGS_HOT_CAMERA;
+ } else {
+ mCamera = Camera::create(camera);
+ mCameraFlags |= FLAGS_HOT_CAMERA;
+ }
+
+ // Is camera available?
+ if (mCamera == 0) {
+ LOGE("Camera connection could not be established.");
+ return -EBUSY;
+ }
+ if (!(mCameraFlags & FLAGS_HOT_CAMERA)) {
+ mCamera->lock();
+ }
+ return OK;
+}
+
+
+/*
+ * Check to see whether the requested video width and height is one
+ * of the supported sizes.
+ * @param width the video frame width in pixels
+ * @param height the video frame height in pixels
+ * @param suppportedSizes the vector of sizes that we check against
+ * @return true if the dimension (width and height) is supported.
+ */
+static bool isVideoSizeSupported(
+ int32_t width, int32_t height,
+ const Vector<Size>& supportedSizes) {
+
+ LOGV("isVideoSizeSupported");
+ for (size_t i = 0; i < supportedSizes.size(); ++i) {
+ if (width == supportedSizes[i].width &&
+ height == supportedSizes[i].height) {
+ return true;
+ }
+ }
+ return false;
+}
+
+/*
+ * If the preview and video output is separate, we only set the
+ * the video size, and applications should set the preview size
+ * to some proper value, and the recording framework will not
+ * change the preview size; otherwise, if the video and preview
+ * output is the same, we need to set the preview to be the same
+ * as the requested video size.
+ *
+ */
+/*
+ * Query the camera to retrieve the supported video frame sizes
+ * and also to see whether CameraParameters::setVideoSize()
+ * is supported or not.
+ * @param params CameraParameters to retrieve the information
+ * @@param isSetVideoSizeSupported retunrs whether method
+ * CameraParameters::setVideoSize() is supported or not.
+ * @param sizes returns the vector of Size objects for the
+ * supported video frame sizes advertised by the camera.
+ */
+static void getSupportedVideoSizes(
+ const CameraParameters& params,
+ bool *isSetVideoSizeSupported,
+ Vector<Size>& sizes) {
+
+ *isSetVideoSizeSupported = true;
+ params.getSupportedVideoSizes(sizes);
+ if (sizes.size() == 0) {
+ LOGD("Camera does not support setVideoSize()");
+ params.getSupportedPreviewSizes(sizes);
+ *isSetVideoSizeSupported = false;
+ }
+}
+
+/*
+ * Check whether the camera has the supported color format
+ * @param params CameraParameters to retrieve the information
+ * @return OK if no error.
+ */
+status_t CameraSource::isCameraColorFormatSupported(
+ const CameraParameters& params) {
+ mColorFormat = getColorFormat(params.get(
+ CameraParameters::KEY_VIDEO_FRAME_FORMAT));
+ if (mColorFormat == -1) {
+ return BAD_VALUE;
+ }
+ return OK;
+}
+
+/*
+ * Configure the camera to use the requested video size
+ * (width and height) and/or frame rate. If both width and
+ * height are -1, configuration on the video size is skipped.
+ * if frameRate is -1, configuration on the frame rate
+ * is skipped. Skipping the configuration allows one to
+ * use the current camera setting without the need to
+ * actually know the specific values (see Create() method).
+ *
+ * @param params the CameraParameters to be configured
+ * @param width the target video frame width in pixels
+ * @param height the target video frame height in pixels
+ * @param frameRate the target frame rate in frames per second.
+ * @return OK if no error.
+ */
+status_t CameraSource::configureCamera(
+ CameraParameters* params,
+ int32_t width, int32_t height,
+ int32_t frameRate) {
+
+ Vector<Size> sizes;
+ bool isSetVideoSizeSupportedByCamera = true;
+ getSupportedVideoSizes(*params, &isSetVideoSizeSupportedByCamera, sizes);
+ bool isCameraParamChanged = false;
+ if (width != -1 && height != -1) {
+ if (!isVideoSizeSupported(width, height, sizes)) {
+ LOGE("Video dimension (%dx%d) is unsupported", width, height);
+ return BAD_VALUE;
+ }
+ if (isSetVideoSizeSupportedByCamera) {
+ params->setVideoSize(width, height);
+ } else {
+ params->setPreviewSize(width, height);
+ }
+ isCameraParamChanged = true;
+ } else if ((width == -1 && height != -1) ||
+ (width != -1 && height == -1)) {
+ // If one and only one of the width and height is -1
+ // we reject such a request.
+ LOGE("Requested video size (%dx%d) is not supported", width, height);
+ return BAD_VALUE;
+ } else { // width == -1 && height == -1
+ // Do not configure the camera.
+ // Use the current width and height value setting from the camera.
+ }
+
+ if (frameRate != -1) {
+ CHECK(frameRate > 0 && frameRate <= 120);
+ const char* supportedFrameRates =
+ params->get(CameraParameters::KEY_SUPPORTED_PREVIEW_FRAME_RATES);
+ CHECK(supportedFrameRates != NULL);
+ LOGV("Supported frame rates: %s", supportedFrameRates);
+ char buf[4];
+ snprintf(buf, 4, "%d", frameRate);
+ if (strstr(supportedFrameRates, buf) == NULL) {
+ LOGE("Requested frame rate (%d) is not supported: %s",
+ frameRate, supportedFrameRates);
+ return BAD_VALUE;
+ }
+
+ // The frame rate is supported, set the camera to the requested value.
+ params->setPreviewFrameRate(frameRate);
+ isCameraParamChanged = true;
+ } else { // frameRate == -1
+ // Do not configure the camera.
+ // Use the current frame rate value setting from the camera
+ }
+
+ if (isCameraParamChanged) {
+ // Either frame rate or frame size needs to be changed.
+ String8 s = params->flatten();
+ if (OK != mCamera->setParameters(s)) {
+ LOGE("Could not change settings."
+ " Someone else is using camera %p?", mCamera.get());
+ return -EBUSY;
+ }
+ }
+ return OK;
+}
+
+/*
+ * Check whether the requested video frame size
+ * has been successfully configured or not. If both width and height
+ * are -1, check on the current width and height value setting
+ * is performed.
+ *
+ * @param params CameraParameters to retrieve the information
+ * @param the target video frame width in pixels to check against
+ * @param the target video frame height in pixels to check against
+ * @return OK if no error
+ */
+status_t CameraSource::checkVideoSize(
+ const CameraParameters& params,
+ int32_t width, int32_t height) {
+
+ // The actual video size is the same as the preview size
+ // if the camera hal does not support separate video and
+ // preview output. In this case, we retrieve the video
+ // size from preview.
+ int32_t frameWidthActual = -1;
+ int32_t frameHeightActual = -1;
+ Vector<Size> sizes;
+ params.getSupportedVideoSizes(sizes);
+ if (sizes.size() == 0) {
+ // video size is the same as preview size
+ params.getPreviewSize(&frameWidthActual, &frameHeightActual);
+ } else {
+ // video size may not be the same as preview
+ params.getVideoSize(&frameWidthActual, &frameHeightActual);
+ }
+ if (frameWidthActual < 0 || frameHeightActual < 0) {
+ LOGE("Failed to retrieve video frame size (%dx%d)",
+ frameWidthActual, frameHeightActual);
+ return UNKNOWN_ERROR;
+ }
+
+ // Check the actual video frame size against the target/requested
+ // video frame size.
+ if (width != -1 && height != -1) {
+ if (frameWidthActual != width || frameHeightActual != height) {
+ LOGE("Failed to set video frame size to %dx%d. "
+ "The actual video size is %dx%d ", width, height,
+ frameWidthActual, frameHeightActual);
+ return UNKNOWN_ERROR;
+ }
+ }
+
+ // Good now.
+ mVideoSize.width = frameWidthActual;
+ mVideoSize.height = frameHeightActual;
+ return OK;
+}
+
+/*
+ * Check the requested frame rate has been successfully configured or not.
+ * If the target frameRate is -1, check on the current frame rate value
+ * setting is performed.
+ *
+ * @param params CameraParameters to retrieve the information
+ * @param the target video frame rate to check against
+ * @return OK if no error.
+ */
+status_t CameraSource::checkFrameRate(
+ const CameraParameters& params,
+ int32_t frameRate) {
+
+ int32_t frameRateActual = params.getPreviewFrameRate();
+ if (frameRateActual < 0) {
+ LOGE("Failed to retrieve preview frame rate (%d)", frameRateActual);
+ return UNKNOWN_ERROR;
+ }
+
+ // Check the actual video frame rate against the target/requested
+ // video frame rate.
+ if (frameRate != -1 && (frameRateActual - frameRate) != 0) {
+ LOGE("Failed to set preview frame rate to %d fps. The actual "
+ "frame rate is %d", frameRate, frameRateActual);
+ return UNKNOWN_ERROR;
+ }
+
+ // Good now.
+ mVideoFrameRate = frameRateActual;
+ return OK;
+}
+
+/*
+ * Initialize the CameraSource to so that it becomes
+ * ready for providing the video input streams as requested.
+ * @param camera the camera object used for the video source
+ * @param cameraId if camera == 0, use camera with this id
+ * as the video source
+ * @param videoSize the target video frame size. If both
+ * width and height in videoSize is -1, use the current
+ * width and heigth settings by the camera
+ * @param frameRate the target frame rate in frames per second.
+ * if it is -1, use the current camera frame rate setting.
+ * @param storeMetaDataInVideoBuffers request to store meta
+ * data or real YUV data in video buffers. Request to
+ * store meta data in video buffers may not be honored
+ * if the source does not support this feature.
+ *
+ * @return OK if no error.
+ */
+status_t CameraSource::init(
+ const sp<ICamera>& camera,
+ int32_t cameraId,
+ Size videoSize,
+ int32_t frameRate,
+ bool storeMetaDataInVideoBuffers) {
+
+ status_t err = OK;
int64_t token = IPCThreadState::self()->clearCallingIdentity();
- String8 s = mCamera->getParameters();
+
+ if ((err = isCameraAvailable(camera, cameraId)) != OK) {
+ return err;
+ }
+ CameraParameters params(mCamera->getParameters());
+ if ((err = isCameraColorFormatSupported(params)) != OK) {
+ return err;
+ }
+
+ // Set the camera to use the requested video frame size
+ // and/or frame rate.
+ if ((err = configureCamera(¶ms,
+ videoSize.width, videoSize.height,
+ frameRate))) {
+ return err;
+ }
+
+ // Check on video frame size and frame rate.
+ CameraParameters newCameraParams(mCamera->getParameters());
+ if ((err = checkVideoSize(newCameraParams,
+ videoSize.width, videoSize.height)) != OK) {
+ return err;
+ }
+ if ((err = checkFrameRate(newCameraParams, frameRate)) != OK) {
+ return err;
+ }
+
+ // This CHECK is good, since we just passed the lock/unlock
+ // check earlier by calling mCamera->setParameters().
+ CHECK_EQ(OK, mCamera->setPreviewDisplay(mSurface));
+
+ mIsMetaDataStoredInVideoBuffers = false;
+ if (storeMetaDataInVideoBuffers &&
+ OK == mCamera->storeMetaDataInBuffers(true)) {
+ mIsMetaDataStoredInVideoBuffers = true;
+ }
+
IPCThreadState::self()->restoreCallingIdentity(token);
- printf("params: \"%s\"\n", s.string());
-
- int32_t width, height, stride, sliceHeight;
- CameraParameters params(s);
- params.getPreviewSize(&width, &height);
-
- // Calculate glitch duraton threshold based on frame rate
- int32_t frameRate = params.getPreviewFrameRate();
- int64_t glitchDurationUs = (1000000LL / frameRate);
+ int64_t glitchDurationUs = (1000000LL / mVideoFrameRate);
if (glitchDurationUs > mGlitchDurationThresholdUs) {
mGlitchDurationThresholdUs = glitchDurationUs;
}
- const char *colorFormatStr = params.get(CameraParameters::KEY_VIDEO_FRAME_FORMAT);
- CHECK(colorFormatStr != NULL);
- int32_t colorFormat = getColorFormat(colorFormatStr);
-
// XXX: query camera for the stride and slice height
// when the capability becomes available.
- stride = width;
- sliceHeight = height;
-
mMeta = new MetaData;
- mMeta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_RAW);
- mMeta->setInt32(kKeyColorFormat, colorFormat);
- mMeta->setInt32(kKeyWidth, width);
- mMeta->setInt32(kKeyHeight, height);
- mMeta->setInt32(kKeyStride, stride);
- mMeta->setInt32(kKeySliceHeight, sliceHeight);
-
+ mMeta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_RAW);
+ mMeta->setInt32(kKeyColorFormat, mColorFormat);
+ mMeta->setInt32(kKeyWidth, mVideoSize.width);
+ mMeta->setInt32(kKeyHeight, mVideoSize.height);
+ mMeta->setInt32(kKeyStride, mVideoSize.width);
+ mMeta->setInt32(kKeySliceHeight, mVideoSize.height);
+ mMeta->setInt32(kKeyFrameRate, mVideoFrameRate);
+ return OK;
}
CameraSource::~CameraSource() {
@@ -173,8 +521,17 @@
}
}
+void CameraSource::startCameraRecording() {
+ CHECK_EQ(OK, mCamera->startRecording());
+ CHECK(mCamera->recordingEnabled());
+}
+
status_t CameraSource::start(MetaData *meta) {
CHECK(!mStarted);
+ if (mInitCheck != OK) {
+ LOGE("CameraSource is not initialized yet");
+ return mInitCheck;
+ }
char value[PROPERTY_VALUE_MAX];
if (property_get("media.stagefright.record-stats", value, NULL)
@@ -188,15 +545,22 @@
mStartTimeUs = startTimeUs;
}
+ // Call setListener first before calling startCameraRecording()
+ // to avoid recording frames being dropped.
int64_t token = IPCThreadState::self()->clearCallingIdentity();
mCamera->setListener(new CameraSourceListener(this));
- CHECK_EQ(OK, mCamera->startRecording());
+ startCameraRecording();
IPCThreadState::self()->restoreCallingIdentity(token);
mStarted = true;
return OK;
}
+void CameraSource::stopCameraRecording() {
+ mCamera->setListener(NULL);
+ mCamera->stopRecording();
+}
+
status_t CameraSource::stop() {
LOGV("stop");
Mutex::Autolock autoLock(mLock);
@@ -204,15 +568,23 @@
mFrameAvailableCondition.signal();
int64_t token = IPCThreadState::self()->clearCallingIdentity();
- mCamera->setListener(NULL);
- mCamera->stopRecording();
+ stopCameraRecording();
releaseQueuedFrames();
while (!mFramesBeingEncoded.empty()) {
LOGI("Waiting for outstanding frames being encoded: %d",
mFramesBeingEncoded.size());
mFrameCompleteCondition.wait(mLock);
}
- mCamera = NULL;
+
+ LOGV("Disconnect camera");
+ if ((mCameraFlags & FLAGS_HOT_CAMERA) == 0) {
+ LOGV("Camera was cold when we started, stopping preview");
+ mCamera->stopPreview();
+ }
+ mCamera->unlock();
+ mCamera.clear();
+ mCamera = 0;
+ mCameraFlags = 0;
IPCThreadState::self()->restoreCallingIdentity(token);
if (mCollectStats) {
@@ -225,11 +597,15 @@
return OK;
}
+void CameraSource::releaseRecordingFrame(const sp<IMemory>& frame) {
+ mCamera->releaseRecordingFrame(frame);
+}
+
void CameraSource::releaseQueuedFrames() {
List<sp<IMemory> >::iterator it;
while (!mFramesReceived.empty()) {
it = mFramesReceived.begin();
- mCamera->releaseRecordingFrame(*it);
+ releaseRecordingFrame(*it);
mFramesReceived.erase(it);
++mNumFramesDropped;
}
@@ -241,7 +617,7 @@
void CameraSource::releaseOneRecordingFrame(const sp<IMemory>& frame) {
int64_t token = IPCThreadState::self()->clearCallingIdentity();
- mCamera->releaseRecordingFrame(frame);
+ releaseRecordingFrame(frame);
IPCThreadState::self()->restoreCallingIdentity(token);
}
@@ -251,7 +627,6 @@
for (List<sp<IMemory> >::iterator it = mFramesBeingEncoded.begin();
it != mFramesBeingEncoded.end(); ++it) {
if ((*it)->pointer() == buffer->data()) {
-
releaseOneRecordingFrame((*it));
mFramesBeingEncoded.erase(it);
++mNumFramesEncoded;
@@ -343,6 +718,13 @@
++mNumGlitches;
}
+ // May need to skip frame or modify timestamp. Currently implemented
+ // by the subclass CameraSourceTimeLapse.
+ if(skipCurrentFrame(timestampUs)) {
+ releaseOneRecordingFrame(data);
+ return;
+ }
+
mLastFrameTimestampUs = timestampUs;
if (mNumFramesReceived == 0) {
mFirstFrameTimeUs = timestampUs;
@@ -367,4 +749,31 @@
mFrameAvailableCondition.signal();
}
+size_t CameraSource::getNumberOfVideoBuffers() const {
+ LOGV("getNumberOfVideoBuffers");
+ size_t nBuffers = 0;
+ int64_t token = IPCThreadState::self()->clearCallingIdentity();
+ if (mInitCheck == OK && mCamera != 0) {
+ nBuffers = mCamera->getNumberOfVideoBuffers();
+ }
+ IPCThreadState::self()->restoreCallingIdentity(token);
+ return nBuffers;
+}
+
+sp<IMemory> CameraSource::getVideoBuffer(size_t index) const {
+ LOGV("getVideoBuffer: %d", index);
+ sp<IMemory> buffer = 0;
+ int64_t token = IPCThreadState::self()->clearCallingIdentity();
+ if (mInitCheck == OK && mCamera != 0) {
+ buffer = mCamera->getVideoBuffer(index);
+ }
+ IPCThreadState::self()->restoreCallingIdentity(token);
+ return buffer;
+}
+
+bool CameraSource::isMetaDataStoredInVideoBuffers() const {
+ LOGV("isMetaDataStoredInVideoBuffers");
+ return mIsMetaDataStoredInVideoBuffers;
+}
+
} // namespace android
diff --git a/media/libstagefright/CameraSourceTimeLapse.cpp b/media/libstagefright/CameraSourceTimeLapse.cpp
new file mode 100644
index 0000000..6fd1825
--- /dev/null
+++ b/media/libstagefright/CameraSourceTimeLapse.cpp
@@ -0,0 +1,518 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "CameraSourceTimeLapse"
+
+#include <binder/IPCThreadState.h>
+#include <binder/MemoryBase.h>
+#include <binder/MemoryHeapBase.h>
+#include <media/stagefright/CameraSource.h>
+#include <media/stagefright/CameraSourceTimeLapse.h>
+#include <media/stagefright/MediaDebug.h>
+#include <media/stagefright/MetaData.h>
+#include <media/stagefright/YUVImage.h>
+#include <media/stagefright/YUVCanvas.h>
+#include <camera/Camera.h>
+#include <camera/CameraParameters.h>
+#include <ui/Rect.h>
+#include <utils/String8.h>
+#include <utils/Vector.h>
+#include "OMX_Video.h"
+#include <limits.h>
+
+namespace android {
+
+// static
+CameraSourceTimeLapse *CameraSourceTimeLapse::CreateFromCamera(
+ const sp<ICamera> &camera,
+ int32_t cameraId,
+ Size videoSize,
+ int32_t videoFrameRate,
+ const sp<Surface>& surface,
+ int64_t timeBetweenTimeLapseFrameCaptureUs) {
+
+ CameraSourceTimeLapse *source = new
+ CameraSourceTimeLapse(camera, cameraId,
+ videoSize, videoFrameRate, surface,
+ timeBetweenTimeLapseFrameCaptureUs);
+
+ if (source != NULL) {
+ if (source->initCheck() != OK) {
+ delete source;
+ return NULL;
+ }
+ }
+ return source;
+}
+
+CameraSourceTimeLapse::CameraSourceTimeLapse(
+ const sp<ICamera>& camera,
+ int32_t cameraId,
+ Size videoSize,
+ int32_t videoFrameRate,
+ const sp<Surface>& surface,
+ int64_t timeBetweenTimeLapseFrameCaptureUs)
+ : CameraSource(camera, cameraId, videoSize, videoFrameRate, surface, false),
+ mTimeBetweenTimeLapseFrameCaptureUs(timeBetweenTimeLapseFrameCaptureUs),
+ mTimeBetweenTimeLapseVideoFramesUs(1E6/videoFrameRate),
+ mLastTimeLapseFrameRealTimestampUs(0),
+ mSkipCurrentFrame(false) {
+
+ LOGV("starting time lapse mode");
+ mVideoWidth = videoSize.width;
+ mVideoHeight = videoSize.height;
+
+ if (trySettingPreviewSize(videoSize.width, videoSize.height)) {
+ mUseStillCameraForTimeLapse = false;
+ } else {
+ // TODO: Add a check to see that mTimeBetweenTimeLapseFrameCaptureUs is greater
+ // than the fastest rate at which the still camera can take pictures.
+ mUseStillCameraForTimeLapse = true;
+ CHECK(setPictureSizeToClosestSupported(videoSize.width, videoSize.height));
+ mNeedCropping = computeCropRectangleOffset();
+ mMeta->setInt32(kKeyWidth, videoSize.width);
+ mMeta->setInt32(kKeyHeight, videoSize.height);
+ }
+
+ // Initialize quick stop variables.
+ mQuickStop = false;
+ mForceRead = false;
+ mLastReadBufferCopy = NULL;
+ mStopWaitingForIdleCamera = false;
+}
+
+CameraSourceTimeLapse::~CameraSourceTimeLapse() {
+}
+
+void CameraSourceTimeLapse::startQuickReadReturns() {
+ Mutex::Autolock autoLock(mQuickStopLock);
+ LOGV("Enabling quick read returns");
+
+ // Enable quick stop mode.
+ mQuickStop = true;
+
+ if (mUseStillCameraForTimeLapse) {
+ // wake up the thread right away.
+ mTakePictureCondition.signal();
+ } else {
+ // Force dataCallbackTimestamp() coming from the video camera to not skip the
+ // next frame as we want read() to get a get a frame right away.
+ mForceRead = true;
+ }
+}
+
+bool CameraSourceTimeLapse::trySettingPreviewSize(int32_t width, int32_t height) {
+ int64_t token = IPCThreadState::self()->clearCallingIdentity();
+ String8 s = mCamera->getParameters();
+ IPCThreadState::self()->restoreCallingIdentity(token);
+
+ CameraParameters params(s);
+ Vector<Size> supportedSizes;
+ params.getSupportedPreviewSizes(supportedSizes);
+
+ bool previewSizeSupported = false;
+ for (uint32_t i = 0; i < supportedSizes.size(); ++i) {
+ int32_t pictureWidth = supportedSizes[i].width;
+ int32_t pictureHeight = supportedSizes[i].height;
+
+ if ((pictureWidth == width) && (pictureHeight == height)) {
+ previewSizeSupported = true;
+ }
+ }
+
+ if (previewSizeSupported) {
+ LOGV("Video size (%d, %d) is a supported preview size", width, height);
+ params.setPreviewSize(width, height);
+ CHECK(mCamera->setParameters(params.flatten()));
+ return true;
+ }
+
+ return false;
+}
+
+bool CameraSourceTimeLapse::setPictureSizeToClosestSupported(int32_t width, int32_t height) {
+ int64_t token = IPCThreadState::self()->clearCallingIdentity();
+ String8 s = mCamera->getParameters();
+ IPCThreadState::self()->restoreCallingIdentity(token);
+
+ CameraParameters params(s);
+ Vector<Size> supportedSizes;
+ params.getSupportedPictureSizes(supportedSizes);
+
+ int32_t minPictureSize = INT_MAX;
+ for (uint32_t i = 0; i < supportedSizes.size(); ++i) {
+ int32_t pictureWidth = supportedSizes[i].width;
+ int32_t pictureHeight = supportedSizes[i].height;
+
+ if ((pictureWidth >= width) && (pictureHeight >= height)) {
+ int32_t pictureSize = pictureWidth*pictureHeight;
+ if (pictureSize < minPictureSize) {
+ minPictureSize = pictureSize;
+ mPictureWidth = pictureWidth;
+ mPictureHeight = pictureHeight;
+ }
+ }
+ }
+ LOGV("Picture size = (%d, %d)", mPictureWidth, mPictureHeight);
+ return (minPictureSize != INT_MAX);
+}
+
+bool CameraSourceTimeLapse::computeCropRectangleOffset() {
+ if ((mPictureWidth == mVideoWidth) && (mPictureHeight == mVideoHeight)) {
+ return false;
+ }
+
+ CHECK((mPictureWidth > mVideoWidth) && (mPictureHeight > mVideoHeight));
+
+ int32_t widthDifference = mPictureWidth - mVideoWidth;
+ int32_t heightDifference = mPictureHeight - mVideoHeight;
+
+ mCropRectStartX = widthDifference/2;
+ mCropRectStartY = heightDifference/2;
+
+ LOGV("setting crop rectangle offset to (%d, %d)", mCropRectStartX, mCropRectStartY);
+
+ return true;
+}
+
+void CameraSourceTimeLapse::signalBufferReturned(MediaBuffer* buffer) {
+ Mutex::Autolock autoLock(mQuickStopLock);
+ if (mQuickStop && (buffer == mLastReadBufferCopy)) {
+ buffer->setObserver(NULL);
+ buffer->release();
+ } else {
+ return CameraSource::signalBufferReturned(buffer);
+ }
+}
+
+void createMediaBufferCopy(const MediaBuffer& sourceBuffer, int64_t frameTime, MediaBuffer **newBuffer) {
+ size_t sourceSize = sourceBuffer.size();
+ void* sourcePointer = sourceBuffer.data();
+
+ (*newBuffer) = new MediaBuffer(sourceSize);
+ memcpy((*newBuffer)->data(), sourcePointer, sourceSize);
+
+ (*newBuffer)->meta_data()->setInt64(kKeyTime, frameTime);
+}
+
+void CameraSourceTimeLapse::fillLastReadBufferCopy(MediaBuffer& sourceBuffer) {
+ int64_t frameTime;
+ CHECK(sourceBuffer.meta_data()->findInt64(kKeyTime, &frameTime));
+ createMediaBufferCopy(sourceBuffer, frameTime, &mLastReadBufferCopy);
+ mLastReadBufferCopy->add_ref();
+ mLastReadBufferCopy->setObserver(this);
+}
+
+status_t CameraSourceTimeLapse::read(
+ MediaBuffer **buffer, const ReadOptions *options) {
+ if (mLastReadBufferCopy == NULL) {
+ mLastReadStatus = CameraSource::read(buffer, options);
+
+ // mQuickStop may have turned to true while read was blocked. Make a copy of
+ // the buffer in that case.
+ Mutex::Autolock autoLock(mQuickStopLock);
+ if (mQuickStop && *buffer) {
+ fillLastReadBufferCopy(**buffer);
+ }
+ return mLastReadStatus;
+ } else {
+ (*buffer) = mLastReadBufferCopy;
+ (*buffer)->add_ref();
+ return mLastReadStatus;
+ }
+}
+
+// static
+void *CameraSourceTimeLapse::ThreadTimeLapseWrapper(void *me) {
+ CameraSourceTimeLapse *source = static_cast<CameraSourceTimeLapse *>(me);
+ source->threadTimeLapseEntry();
+ return NULL;
+}
+
+void CameraSourceTimeLapse::threadTimeLapseEntry() {
+ while (mStarted) {
+ {
+ Mutex::Autolock autoLock(mCameraIdleLock);
+ if (!mCameraIdle) {
+ mCameraIdleCondition.wait(mCameraIdleLock);
+ }
+ CHECK(mCameraIdle);
+ mCameraIdle = false;
+ }
+
+ // Even if mQuickStop == true we need to take one more picture
+ // as a read() may be blocked, waiting for a frame to get available.
+ // After this takePicture, if mQuickStop == true, we can safely exit
+ // this thread as read() will make a copy of this last frame and keep
+ // returning it in the quick stop mode.
+ Mutex::Autolock autoLock(mQuickStopLock);
+ CHECK_EQ(OK, mCamera->takePicture());
+ if (mQuickStop) {
+ LOGV("threadTimeLapseEntry: Exiting due to mQuickStop = true");
+ return;
+ }
+ mTakePictureCondition.waitRelative(mQuickStopLock,
+ mTimeBetweenTimeLapseFrameCaptureUs * 1000);
+ }
+ LOGV("threadTimeLapseEntry: Exiting due to mStarted = false");
+}
+
+void CameraSourceTimeLapse::startCameraRecording() {
+ if (mUseStillCameraForTimeLapse) {
+ LOGV("start time lapse recording using still camera");
+
+ int64_t token = IPCThreadState::self()->clearCallingIdentity();
+ String8 s = mCamera->getParameters();
+ IPCThreadState::self()->restoreCallingIdentity(token);
+
+ CameraParameters params(s);
+ params.setPictureSize(mPictureWidth, mPictureHeight);
+ mCamera->setParameters(params.flatten());
+ mCameraIdle = true;
+ mStopWaitingForIdleCamera = false;
+
+ // disable shutter sound and play the recording sound.
+ mCamera->sendCommand(CAMERA_CMD_ENABLE_SHUTTER_SOUND, 0, 0);
+ mCamera->sendCommand(CAMERA_CMD_PLAY_RECORDING_SOUND, 0, 0);
+
+ // create a thread which takes pictures in a loop
+ pthread_attr_t attr;
+ pthread_attr_init(&attr);
+ pthread_attr_setdetachstate(&attr, PTHREAD_CREATE_JOINABLE);
+
+ pthread_create(&mThreadTimeLapse, &attr, ThreadTimeLapseWrapper, this);
+ pthread_attr_destroy(&attr);
+ } else {
+ LOGV("start time lapse recording using video camera");
+ CHECK_EQ(OK, mCamera->startRecording());
+ }
+}
+
+void CameraSourceTimeLapse::stopCameraRecording() {
+ if (mUseStillCameraForTimeLapse) {
+ void *dummy;
+ pthread_join(mThreadTimeLapse, &dummy);
+
+ // Last takePicture may still be underway. Wait for the camera to get
+ // idle.
+ Mutex::Autolock autoLock(mCameraIdleLock);
+ mStopWaitingForIdleCamera = true;
+ if (!mCameraIdle) {
+ mCameraIdleCondition.wait(mCameraIdleLock);
+ }
+ CHECK(mCameraIdle);
+ mCamera->setListener(NULL);
+
+ // play the recording sound.
+ mCamera->sendCommand(CAMERA_CMD_PLAY_RECORDING_SOUND, 0, 0);
+ } else {
+ mCamera->setListener(NULL);
+ mCamera->stopRecording();
+ }
+ if (mLastReadBufferCopy) {
+ mLastReadBufferCopy->release();
+ mLastReadBufferCopy = NULL;
+ }
+}
+
+void CameraSourceTimeLapse::releaseRecordingFrame(const sp<IMemory>& frame) {
+ if (!mUseStillCameraForTimeLapse) {
+ mCamera->releaseRecordingFrame(frame);
+ }
+}
+
+sp<IMemory> CameraSourceTimeLapse::createIMemoryCopy(const sp<IMemory> &source_data) {
+ size_t source_size = source_data->size();
+ void* source_pointer = source_data->pointer();
+
+ sp<MemoryHeapBase> newMemoryHeap = new MemoryHeapBase(source_size);
+ sp<MemoryBase> newMemory = new MemoryBase(newMemoryHeap, 0, source_size);
+ memcpy(newMemory->pointer(), source_pointer, source_size);
+ return newMemory;
+}
+
+// Allocates IMemory of final type MemoryBase with the given size.
+sp<IMemory> allocateIMemory(size_t size) {
+ sp<MemoryHeapBase> newMemoryHeap = new MemoryHeapBase(size);
+ sp<MemoryBase> newMemory = new MemoryBase(newMemoryHeap, 0, size);
+ return newMemory;
+}
+
+// static
+void *CameraSourceTimeLapse::ThreadStartPreviewWrapper(void *me) {
+ CameraSourceTimeLapse *source = static_cast<CameraSourceTimeLapse *>(me);
+ source->threadStartPreview();
+ return NULL;
+}
+
+void CameraSourceTimeLapse::threadStartPreview() {
+ CHECK_EQ(OK, mCamera->startPreview());
+ Mutex::Autolock autoLock(mCameraIdleLock);
+ mCameraIdle = true;
+ mCameraIdleCondition.signal();
+}
+
+void CameraSourceTimeLapse::restartPreview() {
+ // Start this in a different thread, so that the dataCallback can return
+ LOGV("restartPreview");
+ pthread_attr_t attr;
+ pthread_attr_init(&attr);
+ pthread_attr_setdetachstate(&attr, PTHREAD_CREATE_DETACHED);
+
+ pthread_t threadPreview;
+ pthread_create(&threadPreview, &attr, ThreadStartPreviewWrapper, this);
+ pthread_attr_destroy(&attr);
+}
+
+sp<IMemory> CameraSourceTimeLapse::cropYUVImage(const sp<IMemory> &source_data) {
+ // find the YUV format
+ int32_t srcFormat;
+ CHECK(mMeta->findInt32(kKeyColorFormat, &srcFormat));
+ YUVImage::YUVFormat yuvFormat;
+ if (srcFormat == OMX_COLOR_FormatYUV420SemiPlanar) {
+ yuvFormat = YUVImage::YUV420SemiPlanar;
+ } else {
+ CHECK_EQ(srcFormat, OMX_COLOR_FormatYUV420Planar);
+ yuvFormat = YUVImage::YUV420Planar;
+ }
+
+ // allocate memory for cropped image and setup a canvas using it.
+ sp<IMemory> croppedImageMemory = allocateIMemory(
+ YUVImage::bufferSize(yuvFormat, mVideoWidth, mVideoHeight));
+ YUVImage yuvImageCropped(yuvFormat,
+ mVideoWidth, mVideoHeight,
+ (uint8_t *)croppedImageMemory->pointer());
+ YUVCanvas yuvCanvasCrop(yuvImageCropped);
+
+ YUVImage yuvImageSource(yuvFormat,
+ mPictureWidth, mPictureHeight,
+ (uint8_t *)source_data->pointer());
+ yuvCanvasCrop.CopyImageRect(
+ Rect(mCropRectStartX, mCropRectStartY,
+ mCropRectStartX + mVideoWidth,
+ mCropRectStartY + mVideoHeight),
+ 0, 0,
+ yuvImageSource);
+
+ return croppedImageMemory;
+}
+
+void CameraSourceTimeLapse::dataCallback(int32_t msgType, const sp<IMemory> &data) {
+ if (msgType == CAMERA_MSG_COMPRESSED_IMAGE) {
+ // takePicture will complete after this callback, so restart preview.
+ restartPreview();
+ return;
+ }
+ if (msgType != CAMERA_MSG_RAW_IMAGE) {
+ return;
+ }
+
+ LOGV("dataCallback for timelapse still frame");
+ CHECK_EQ(true, mUseStillCameraForTimeLapse);
+
+ int64_t timestampUs;
+ if (mNumFramesReceived == 0) {
+ timestampUs = mStartTimeUs;
+ } else {
+ timestampUs = mLastFrameTimestampUs + mTimeBetweenTimeLapseVideoFramesUs;
+ }
+
+ if (mNeedCropping) {
+ sp<IMemory> croppedImageData = cropYUVImage(data);
+ dataCallbackTimestamp(timestampUs, msgType, croppedImageData);
+ } else {
+ sp<IMemory> dataCopy = createIMemoryCopy(data);
+ dataCallbackTimestamp(timestampUs, msgType, dataCopy);
+ }
+}
+
+bool CameraSourceTimeLapse::skipCurrentFrame(int64_t timestampUs) {
+ if (mSkipCurrentFrame) {
+ mSkipCurrentFrame = false;
+ return true;
+ } else {
+ return false;
+ }
+}
+
+bool CameraSourceTimeLapse::skipFrameAndModifyTimeStamp(int64_t *timestampUs) {
+ if (!mUseStillCameraForTimeLapse) {
+ if (mLastTimeLapseFrameRealTimestampUs == 0) {
+ // First time lapse frame. Initialize mLastTimeLapseFrameRealTimestampUs
+ // to current time (timestampUs) and save frame data.
+ LOGV("dataCallbackTimestamp timelapse: initial frame");
+
+ mLastTimeLapseFrameRealTimestampUs = *timestampUs;
+ return false;
+ }
+
+ {
+ Mutex::Autolock autoLock(mQuickStopLock);
+
+ // mForceRead may be set to true by startQuickReadReturns(). In that
+ // case don't skip this frame.
+ if (mForceRead) {
+ LOGV("dataCallbackTimestamp timelapse: forced read");
+ mForceRead = false;
+ *timestampUs = mLastFrameTimestampUs;
+ return false;
+ }
+ }
+
+ if (*timestampUs <
+ (mLastTimeLapseFrameRealTimestampUs + mTimeBetweenTimeLapseFrameCaptureUs)) {
+ // Skip all frames from last encoded frame until
+ // sufficient time (mTimeBetweenTimeLapseFrameCaptureUs) has passed.
+ // Tell the camera to release its recording frame and return.
+ LOGV("dataCallbackTimestamp timelapse: skipping intermediate frame");
+ return true;
+ } else {
+ // Desired frame has arrived after mTimeBetweenTimeLapseFrameCaptureUs time:
+ // - Reset mLastTimeLapseFrameRealTimestampUs to current time.
+ // - Artificially modify timestampUs to be one frame time (1/framerate) ahead
+ // of the last encoded frame's time stamp.
+ LOGV("dataCallbackTimestamp timelapse: got timelapse frame");
+
+ mLastTimeLapseFrameRealTimestampUs = *timestampUs;
+ *timestampUs = mLastFrameTimestampUs + mTimeBetweenTimeLapseVideoFramesUs;
+ return false;
+ }
+ }
+ return false;
+}
+
+void CameraSourceTimeLapse::dataCallbackTimestamp(int64_t timestampUs, int32_t msgType,
+ const sp<IMemory> &data) {
+ if (!mUseStillCameraForTimeLapse) {
+ mSkipCurrentFrame = skipFrameAndModifyTimeStamp(×tampUs);
+ } else {
+ Mutex::Autolock autoLock(mCameraIdleLock);
+ // If we are using the still camera and stop() has been called, it may
+ // be waiting for the camera to get idle. In that case return
+ // immediately. Calling CameraSource::dataCallbackTimestamp() will lead
+ // to a deadlock since it tries to access CameraSource::mLock which in
+ // this case is held by CameraSource::stop() currently waiting for the
+ // camera to get idle. And camera will not get idle until this call
+ // returns.
+ if (mStopWaitingForIdleCamera) {
+ return;
+ }
+ }
+ CameraSource::dataCallbackTimestamp(timestampUs, msgType, data);
+}
+
+} // namespace android
diff --git a/media/libstagefright/DRMExtractor.cpp b/media/libstagefright/DRMExtractor.cpp
index aa9ad23..3c98932 100644
--- a/media/libstagefright/DRMExtractor.cpp
+++ b/media/libstagefright/DRMExtractor.cpp
@@ -280,18 +280,23 @@
if (gDrmManagerClient == NULL) {
gDrmManagerClient = new DrmManagerClient();
}
+
+ if (gDrmManagerClient == NULL) {
+ return false;
+ }
}
DecryptHandle *decryptHandle = source->DrmInitialization(gDrmManagerClient);
if (decryptHandle != NULL) {
if (decryptHandle->decryptApiType == DecryptApiType::CONTAINER_BASED) {
- *mimeType = String8("drm+container_based+");
+ *mimeType = String8("drm+container_based+") + decryptHandle->mimeType;
} else if (decryptHandle->decryptApiType == DecryptApiType::ELEMENTARY_STREAM_BASED) {
- *mimeType = String8("drm+es_based+");
+ *mimeType = String8("drm+es_based+") + decryptHandle->mimeType;
+ } else if (decryptHandle->decryptApiType == DecryptApiType::WV_BASED) {
+ *mimeType = MEDIA_MIMETYPE_CONTAINER_WVM;
+ LOGW("SniffWVM: found match\n");
}
-
- *mimeType += decryptHandle->mimeType;
*confidence = 10.0f;
return true;
diff --git a/media/libstagefright/DataSource.cpp b/media/libstagefright/DataSource.cpp
index 0b8997c..ee0d792 100644
--- a/media/libstagefright/DataSource.cpp
+++ b/media/libstagefright/DataSource.cpp
@@ -36,7 +36,7 @@
namespace android {
-bool DataSource::getUInt16(off_t offset, uint16_t *x) {
+bool DataSource::getUInt16(off64_t offset, uint16_t *x) {
*x = 0;
uint8_t byte[2];
@@ -49,7 +49,7 @@
return true;
}
-status_t DataSource::getSize(off_t *size) {
+status_t DataSource::getSize(off64_t *size) {
*size = 0;
return ERROR_UNSUPPORTED;
diff --git a/media/libstagefright/FileSource.cpp b/media/libstagefright/FileSource.cpp
index b46d8d0..98d5b50 100644
--- a/media/libstagefright/FileSource.cpp
+++ b/media/libstagefright/FileSource.cpp
@@ -16,12 +16,16 @@
#include <media/stagefright/FileSource.h>
#include <media/stagefright/MediaDebug.h>
+#include <sys/types.h>
+#include <unistd.h>
+#include <sys/types.h>
+#include <sys/stat.h>
+#include <fcntl.h>
namespace android {
FileSource::FileSource(const char *filename)
- : mFile(fopen(filename, "rb")),
- mFd(fileno(mFile)),
+ : mFd(-1),
mOffset(0),
mLength(-1),
mDecryptHandle(NULL),
@@ -29,11 +33,12 @@
mDrmBufOffset(0),
mDrmBufSize(0),
mDrmBuf(NULL){
+
+ mFd = open(filename, O_LARGEFILE | O_RDONLY);
}
FileSource::FileSource(int fd, int64_t offset, int64_t length)
- : mFile(fdopen(fd, "rb")),
- mFd(fd),
+ : mFd(fd),
mOffset(offset),
mLength(length),
mDecryptHandle(NULL),
@@ -46,26 +51,23 @@
}
FileSource::~FileSource() {
- if (mFile != NULL) {
- fclose(mFile);
- mFile = NULL;
+ if (mFd >= 0) {
+ close(mFd);
+ mFd = -1;
}
if (mDrmBuf != NULL) {
delete[] mDrmBuf;
mDrmBuf = NULL;
}
- if (mDecryptHandle != NULL) {
- mDrmManagerClient->closeDecryptSession(mDecryptHandle);
- }
}
status_t FileSource::initCheck() const {
- return mFile != NULL ? OK : NO_INIT;
+ return mFd >= 0 ? OK : NO_INIT;
}
-ssize_t FileSource::readAt(off_t offset, void *data, size_t size) {
- if (mFile == NULL) {
+ssize_t FileSource::readAt(off64_t offset, void *data, size_t size) {
+ if (mFd < 0) {
return NO_INIT;
}
@@ -85,18 +87,18 @@
== mDecryptHandle->decryptApiType) {
return readAtDRM(offset, data, size);
} else {
- int err = fseeko(mFile, offset + mOffset, SEEK_SET);
- if (err < 0) {
+ off64_t result = lseek64(mFd, offset + mOffset, SEEK_SET);
+ if (result == -1) {
LOGE("seek to %lld failed", offset + mOffset);
return UNKNOWN_ERROR;
}
- return fread(data, 1, size, mFile);
+ return ::read(mFd, data, size);
}
}
-status_t FileSource::getSize(off_t *size) {
- if (mFile == NULL) {
+status_t FileSource::getSize(off64_t *size) {
+ if (mFd < 0) {
return NO_INIT;
}
@@ -106,14 +108,17 @@
return OK;
}
- fseek(mFile, 0, SEEK_END);
- *size = ftello(mFile);
+ *size = lseek64(mFd, 0, SEEK_END);
return OK;
}
DecryptHandle* FileSource::DrmInitialization(DrmManagerClient* client) {
+ if (client == NULL) {
+ return NULL;
+ }
mDrmManagerClient = client;
+
if (mDecryptHandle == NULL) {
mDecryptHandle = mDrmManagerClient->openDecryptSession(
mFd, mOffset, mLength);
@@ -132,7 +137,7 @@
*client = mDrmManagerClient;
}
-ssize_t FileSource::readAtDRM(off_t offset, void *data, size_t size) {
+ssize_t FileSource::readAtDRM(off64_t offset, void *data, size_t size) {
size_t DRM_CACHE_SIZE = 1024;
if (mDrmBuf == NULL) {
mDrmBuf = new unsigned char[DRM_CACHE_SIZE];
diff --git a/media/libstagefright/HTTPStream.cpp b/media/libstagefright/HTTPStream.cpp
index ccc6a34..e7f00aa 100644
--- a/media/libstagefright/HTTPStream.cpp
+++ b/media/libstagefright/HTTPStream.cpp
@@ -36,7 +36,7 @@
namespace android {
// static
-const char *HTTPStream::kStatusKey = ":status:";
+const char *HTTPStream::kStatusKey = ":status:"; // MUST be lowercase.
HTTPStream::HTTPStream()
: mState(READY),
@@ -220,7 +220,7 @@
return err;
}
- mHeaders.add(string(kStatusKey), string(line));
+ mHeaders.add(AString(kStatusKey), AString(line));
char *spacePos = strchr(line, ' ');
if (spacePos == NULL) {
@@ -264,7 +264,10 @@
char *colonPos = strchr(line, ':');
if (colonPos == NULL) {
- mHeaders.add(string(line), string());
+ AString key = line;
+ key.tolower();
+
+ mHeaders.add(key, AString());
} else {
char *end_of_key = colonPos;
while (end_of_key > line && isspace(end_of_key[-1])) {
@@ -278,7 +281,10 @@
*end_of_key = '\0';
- mHeaders.add(string(line), string(start_of_value));
+ AString key = line;
+ key.tolower();
+
+ mHeaders.add(key, AString(start_of_value));
}
}
@@ -314,8 +320,11 @@
return (ssize_t)total;
}
-bool HTTPStream::find_header_value(const string &key, string *value) const {
- ssize_t index = mHeaders.indexOfKey(key);
+bool HTTPStream::find_header_value(const AString &key, AString *value) const {
+ AString key_lower = key;
+ key_lower.tolower();
+
+ ssize_t index = mHeaders.indexOfKey(key_lower);
if (index < 0) {
value->clear();
return false;
diff --git a/media/libstagefright/JPEGSource.cpp b/media/libstagefright/JPEGSource.cpp
index ec81097..e818115 100644
--- a/media/libstagefright/JPEGSource.cpp
+++ b/media/libstagefright/JPEGSource.cpp
@@ -142,7 +142,7 @@
mWidth = 0;
mHeight = 0;
- off_t i = 0;
+ off64_t i = 0;
uint16_t soi;
if (!mSource->getUInt16(i, &soi)) {
diff --git a/media/libstagefright/MP3Extractor.cpp b/media/libstagefright/MP3Extractor.cpp
index 82c0426..9610f90 100644
--- a/media/libstagefright/MP3Extractor.cpp
+++ b/media/libstagefright/MP3Extractor.cpp
@@ -21,6 +21,8 @@
#include "include/MP3Extractor.h"
#include "include/ID3.h"
+#include "include/VBRISeeker.h"
+#include "include/XINGSeeker.h"
#include <media/stagefright/foundation/AMessage.h>
#include <media/stagefright/DataSource.h>
@@ -42,10 +44,11 @@
// Yes ... there are things that must indeed match...
static const uint32_t kMask = 0xfffe0cc0;
-static bool get_mp3_frame_size(
+// static
+bool MP3Extractor::get_mp3_frame_size(
uint32_t header, size_t *frame_size,
- int *out_sampling_rate = NULL, int *out_channels = NULL,
- int *out_bitrate = NULL) {
+ int *out_sampling_rate, int *out_channels,
+ int *out_bitrate) {
*frame_size = 0;
if (out_sampling_rate) {
@@ -178,136 +181,13 @@
return true;
}
-static bool parse_xing_header(
- const sp<DataSource> &source, off_t first_frame_pos,
- int32_t *frame_number = NULL, int32_t *byte_number = NULL,
- char *table_of_contents = NULL, int32_t *quality_indicator = NULL,
- int64_t *duration = NULL) {
-
- if (frame_number) {
- *frame_number = 0;
- }
- if (byte_number) {
- *byte_number = 0;
- }
- if (table_of_contents) {
- table_of_contents[0] = 0;
- }
- if (quality_indicator) {
- *quality_indicator = 0;
- }
- if (duration) {
- *duration = 0;
- }
-
- uint8_t buffer[4];
- int offset = first_frame_pos;
- if (source->readAt(offset, &buffer, 4) < 4) { // get header
- return false;
- }
- offset += 4;
-
- uint8_t id, layer, sr_index, mode;
- layer = (buffer[1] >> 1) & 3;
- id = (buffer[1] >> 3) & 3;
- sr_index = (buffer[2] >> 2) & 3;
- mode = (buffer[3] >> 6) & 3;
- if (layer == 0) {
- return false;
- }
- if (id == 1) {
- return false;
- }
- if (sr_index == 3) {
- return false;
- }
- // determine offset of XING header
- if(id&1) { // mpeg1
- if (mode != 3) offset += 32;
- else offset += 17;
- } else { // mpeg2
- if (mode != 3) offset += 17;
- else offset += 9;
- }
-
- if (source->readAt(offset, &buffer, 4) < 4) { // XING header ID
- return false;
- }
- offset += 4;
- // Check XING ID
- if ((buffer[0] != 'X') || (buffer[1] != 'i')
- || (buffer[2] != 'n') || (buffer[3] != 'g')) {
- if ((buffer[0] != 'I') || (buffer[1] != 'n')
- || (buffer[2] != 'f') || (buffer[3] != 'o')) {
- return false;
- }
- }
-
- if (source->readAt(offset, &buffer, 4) < 4) { // flags
- return false;
- }
- offset += 4;
- uint32_t flags = U32_AT(buffer);
-
- if (flags & 0x0001) { // Frames field is present
- if (source->readAt(offset, buffer, 4) < 4) {
- return false;
- }
- if (frame_number) {
- *frame_number = U32_AT(buffer);
- }
- int32_t frame = U32_AT(buffer);
- // Samples per Frame: 1. index = MPEG Version ID, 2. index = Layer
- const int samplesPerFrames[2][3] =
- {
- { 384, 1152, 576 }, // MPEG 2, 2.5: layer1, layer2, layer3
- { 384, 1152, 1152 }, // MPEG 1: layer1, layer2, layer3
- };
- // sampling rates in hertz: 1. index = MPEG Version ID, 2. index = sampling rate index
- const int samplingRates[4][3] =
- {
- { 11025, 12000, 8000, }, // MPEG 2.5
- { 0, 0, 0, }, // reserved
- { 22050, 24000, 16000, }, // MPEG 2
- { 44100, 48000, 32000, } // MPEG 1
- };
- if (duration) {
- *duration = (int64_t)frame * samplesPerFrames[id&1][3-layer] * 1000000LL
- / samplingRates[id][sr_index];
- }
- offset += 4;
- }
- if (flags & 0x0002) { // Bytes field is present
- if (byte_number) {
- if (source->readAt(offset, buffer, 4) < 4) {
- return false;
- }
- *byte_number = U32_AT(buffer);
- }
- offset += 4;
- }
- if (flags & 0x0004) { // TOC field is present
- if (table_of_contents) {
- if (source->readAt(offset + 1, table_of_contents, 99) < 99) {
- return false;
- }
- }
- offset += 100;
- }
- if (flags & 0x0008) { // Quality indicator field is present
- if (quality_indicator) {
- if (source->readAt(offset, buffer, 4) < 4) {
- return false;
- }
- *quality_indicator = U32_AT(buffer);
- }
- }
- return true;
-}
-
static bool Resync(
const sp<DataSource> &source, uint32_t match_header,
- off_t *inout_pos, uint32_t *out_header) {
+ off64_t *inout_pos, off64_t *post_id3_pos, uint32_t *out_header) {
+ if (post_id3_pos != NULL) {
+ *post_id3_pos = 0;
+ }
+
if (*inout_pos == 0) {
// Skip an optional ID3 header if syncing at the very beginning
// of the datasource.
@@ -340,9 +220,13 @@
LOGV("skipped ID3 tag, new starting offset is %ld (0x%08lx)",
*inout_pos, *inout_pos);
}
+
+ if (post_id3_pos != NULL) {
+ *post_id3_pos = *inout_pos;
+ }
}
- off_t pos = *inout_pos;
+ off64_t pos = *inout_pos;
bool valid = false;
do {
if (pos >= *inout_pos + 128 * 1024) {
@@ -365,8 +249,9 @@
size_t frame_size;
int sample_rate, num_channels, bitrate;
- if (!get_mp3_frame_size(header, &frame_size,
- &sample_rate, &num_channels, &bitrate)) {
+ if (!MP3Extractor::get_mp3_frame_size(
+ header, &frame_size,
+ &sample_rate, &num_channels, &bitrate)) {
++pos;
continue;
}
@@ -376,7 +261,7 @@
// We found what looks like a valid frame,
// now find its successors.
- off_t test_pos = pos + frame_size;
+ off64_t test_pos = pos + frame_size;
valid = true;
for (int j = 0; j < 3; ++j) {
@@ -396,7 +281,8 @@
}
size_t test_frame_size;
- if (!get_mp3_frame_size(test_header, &test_frame_size)) {
+ if (!MP3Extractor::get_mp3_frame_size(
+ test_header, &test_frame_size)) {
valid = false;
break;
}
@@ -426,8 +312,8 @@
public:
MP3Source(
const sp<MetaData> &meta, const sp<DataSource> &source,
- off_t first_frame_pos, uint32_t fixed_header,
- int32_t byte_number, const char *table_of_contents);
+ off64_t first_frame_pos, uint32_t fixed_header,
+ const sp<MP3Seeker> &seeker);
virtual status_t start(MetaData *params = NULL);
virtual status_t stop();
@@ -443,14 +329,12 @@
private:
sp<MetaData> mMeta;
sp<DataSource> mDataSource;
- off_t mFirstFramePos;
+ off64_t mFirstFramePos;
uint32_t mFixedHeader;
- off_t mCurrentPos;
+ off64_t mCurrentPos;
int64_t mCurrentTimeUs;
bool mStarted;
- int32_t mByteNumber; // total number of bytes in this MP3
- // TOC entries in XING header. Skip the first one since it's always 0.
- char mTableOfContents[99];
+ sp<MP3Seeker> mSeeker;
MediaBufferGroup *mGroup;
MP3Source(const MP3Source &);
@@ -462,25 +346,28 @@
: mInitCheck(NO_INIT),
mDataSource(source),
mFirstFramePos(-1),
- mFixedHeader(0),
- mByteNumber(0) {
- off_t pos = 0;
+ mFixedHeader(0) {
+ off64_t pos = 0;
+ off64_t post_id3_pos;
uint32_t header;
bool success;
int64_t meta_offset;
uint32_t meta_header;
+ int64_t meta_post_id3_offset;
if (meta != NULL
&& meta->findInt64("offset", &meta_offset)
- && meta->findInt32("header", (int32_t *)&meta_header)) {
+ && meta->findInt32("header", (int32_t *)&meta_header)
+ && meta->findInt64("post-id3-offset", &meta_post_id3_offset)) {
// The sniffer has already done all the hard work for us, simply
// accept its judgement.
- pos = (off_t)meta_offset;
+ pos = (off64_t)meta_offset;
header = meta_header;
+ post_id3_pos = (off64_t)meta_post_id3_offset;
success = true;
} else {
- success = Resync(mDataSource, 0, &pos, &header);
+ success = Resync(mDataSource, 0, &pos, &post_id3_pos, &header);
}
if (!success) {
@@ -505,21 +392,27 @@
mMeta->setInt32(kKeyBitRate, bitrate * 1000);
mMeta->setInt32(kKeyChannelCount, num_channels);
- int64_t duration;
- parse_xing_header(
- mDataSource, mFirstFramePos, NULL, &mByteNumber,
- mTableOfContents, NULL, &duration);
- if (duration > 0) {
- mMeta->setInt64(kKeyDuration, duration);
- } else {
- off_t fileSize;
+ mSeeker = XINGSeeker::CreateFromSource(mDataSource, mFirstFramePos);
+
+ if (mSeeker == NULL) {
+ mSeeker = VBRISeeker::CreateFromSource(mDataSource, post_id3_pos);
+ }
+
+ int64_t durationUs;
+
+ if (mSeeker == NULL || !mSeeker->getDuration(&durationUs)) {
+ off64_t fileSize;
if (mDataSource->getSize(&fileSize) == OK) {
- mMeta->setInt64(
- kKeyDuration,
- 8000LL * (fileSize - mFirstFramePos) / bitrate);
+ durationUs = 8000LL * (fileSize - mFirstFramePos) / bitrate;
+ } else {
+ durationUs = -1;
}
}
+ if (durationUs >= 0) {
+ mMeta->setInt64(kKeyDuration, durationUs);
+ }
+
mInitCheck = OK;
}
@@ -534,7 +427,7 @@
return new MP3Source(
mMeta, mDataSource, mFirstFramePos, mFixedHeader,
- mByteNumber, mTableOfContents);
+ mSeeker);
}
sp<MetaData> MP3Extractor::getTrackMetaData(size_t index, uint32_t flags) {
@@ -549,8 +442,8 @@
MP3Source::MP3Source(
const sp<MetaData> &meta, const sp<DataSource> &source,
- off_t first_frame_pos, uint32_t fixed_header,
- int32_t byte_number, const char *table_of_contents)
+ off64_t first_frame_pos, uint32_t fixed_header,
+ const sp<MP3Seeker> &seeker)
: mMeta(meta),
mDataSource(source),
mFirstFramePos(first_frame_pos),
@@ -558,9 +451,8 @@
mCurrentPos(0),
mCurrentTimeUs(0),
mStarted(false),
- mByteNumber(byte_number),
+ mSeeker(seeker),
mGroup(NULL) {
- memcpy (mTableOfContents, table_of_contents, sizeof(mTableOfContents));
}
MP3Source::~MP3Source() {
@@ -607,43 +499,21 @@
int64_t seekTimeUs;
ReadOptions::SeekMode mode;
if (options != NULL && options->getSeekTo(&seekTimeUs, &mode)) {
- int32_t bitrate;
- if (!mMeta->findInt32(kKeyBitRate, &bitrate)) {
- // bitrate is in bits/sec.
- LOGI("no bitrate");
+ int64_t actualSeekTimeUs = seekTimeUs;
+ if (mSeeker == NULL
+ || !mSeeker->getOffsetForTime(&actualSeekTimeUs, &mCurrentPos)) {
+ int32_t bitrate;
+ if (!mMeta->findInt32(kKeyBitRate, &bitrate)) {
+ // bitrate is in bits/sec.
+ LOGI("no bitrate");
- return ERROR_UNSUPPORTED;
- }
-
- mCurrentTimeUs = seekTimeUs;
- // interpolate in TOC to get file seek point in bytes
- int64_t duration;
- if ((mByteNumber > 0) && (mTableOfContents[0] > 0)
- && mMeta->findInt64(kKeyDuration, &duration)) {
- float percent = (float)seekTimeUs * 100 / duration;
- float fx;
- if( percent <= 0.0f ) {
- fx = 0.0f;
- } else if( percent >= 100.0f ) {
- fx = 256.0f;
- } else {
- int a = (int)percent;
- float fa, fb;
- if ( a == 0 ) {
- fa = 0.0f;
- } else {
- fa = (float)mTableOfContents[a-1];
- }
- if ( a < 99 ) {
- fb = (float)mTableOfContents[a];
- } else {
- fb = 256.0f;
- }
- fx = fa + (fb-fa)*(percent-a);
+ return ERROR_UNSUPPORTED;
}
- mCurrentPos = mFirstFramePos + (int)((1.0f/256.0f)*fx*mByteNumber);
- } else {
+
+ mCurrentTimeUs = seekTimeUs;
mCurrentPos = mFirstFramePos + seekTimeUs * bitrate / 8000000;
+ } else {
+ mCurrentTimeUs = actualSeekTimeUs;
}
}
@@ -667,15 +537,16 @@
uint32_t header = U32_AT((const uint8_t *)buffer->data());
if ((header & kMask) == (mFixedHeader & kMask)
- && get_mp3_frame_size(header, &frame_size, NULL, NULL, &bitrate)) {
+ && MP3Extractor::get_mp3_frame_size(
+ header, &frame_size, NULL, NULL, &bitrate)) {
break;
}
// Lost sync.
LOGV("lost sync! header = 0x%08x, old header = 0x%08x\n", header, mFixedHeader);
- off_t pos = mCurrentPos;
- if (!Resync(mDataSource, mFixedHeader, &pos, NULL)) {
+ off64_t pos = mCurrentPos;
+ if (!Resync(mDataSource, mFixedHeader, &pos, NULL, NULL)) {
LOGE("Unable to resync. Signalling end of stream.");
buffer->release();
@@ -780,15 +651,17 @@
bool SniffMP3(
const sp<DataSource> &source, String8 *mimeType,
float *confidence, sp<AMessage> *meta) {
- off_t pos = 0;
+ off64_t pos = 0;
+ off64_t post_id3_pos;
uint32_t header;
- if (!Resync(source, 0, &pos, &header)) {
+ if (!Resync(source, 0, &pos, &post_id3_pos, &header)) {
return false;
}
*meta = new AMessage;
(*meta)->setInt64("offset", pos);
(*meta)->setInt32("header", header);
+ (*meta)->setInt64("post-id3-offset", post_id3_pos);
*mimeType = MEDIA_MIMETYPE_AUDIO_MPEG;
*confidence = 0.2f;
diff --git a/media/libstagefright/MPEG2TSWriter.cpp b/media/libstagefright/MPEG2TSWriter.cpp
index 81a2b0d..4d8165e 100644
--- a/media/libstagefright/MPEG2TSWriter.cpp
+++ b/media/libstagefright/MPEG2TSWriter.cpp
@@ -410,7 +410,7 @@
////////////////////////////////////////////////////////////////////////////////
MPEG2TSWriter::MPEG2TSWriter(int fd)
- : mFile(fdopen(fd, "wb")),
+ : mFile(fdopen(dup(fd), "wb")),
mStarted(false),
mNumSourcesDone(0),
mNumTSPacketsWritten(0),
diff --git a/media/libstagefright/MPEG4Extractor.cpp b/media/libstagefright/MPEG4Extractor.cpp
index 03682e2..bbe99d3 100644
--- a/media/libstagefright/MPEG4Extractor.cpp
+++ b/media/libstagefright/MPEG4Extractor.cpp
@@ -98,11 +98,11 @@
MPEG4DataSource(const sp<DataSource> &source);
virtual status_t initCheck() const;
- virtual ssize_t readAt(off_t offset, void *data, size_t size);
- virtual status_t getSize(off_t *size);
+ virtual ssize_t readAt(off64_t offset, void *data, size_t size);
+ virtual status_t getSize(off64_t *size);
virtual uint32_t flags();
- status_t setCachedRange(off_t offset, size_t size);
+ status_t setCachedRange(off64_t offset, size_t size);
protected:
virtual ~MPEG4DataSource();
@@ -111,7 +111,7 @@
Mutex mLock;
sp<DataSource> mSource;
- off_t mCachedOffset;
+ off64_t mCachedOffset;
size_t mCachedSize;
uint8_t *mCache;
@@ -146,7 +146,7 @@
return mSource->initCheck();
}
-ssize_t MPEG4DataSource::readAt(off_t offset, void *data, size_t size) {
+ssize_t MPEG4DataSource::readAt(off64_t offset, void *data, size_t size) {
Mutex::Autolock autoLock(mLock);
if (offset >= mCachedOffset
@@ -158,7 +158,7 @@
return mSource->readAt(offset, data, size);
}
-status_t MPEG4DataSource::getSize(off_t *size) {
+status_t MPEG4DataSource::getSize(off64_t *size) {
return mSource->getSize(size);
}
@@ -166,7 +166,7 @@
return mSource->flags();
}
-status_t MPEG4DataSource::setCachedRange(off_t offset, size_t size) {
+status_t MPEG4DataSource::setCachedRange(off64_t offset, size_t size) {
Mutex::Autolock autoLock(mLock);
clearCache();
@@ -363,7 +363,7 @@
return OK;
}
- off_t offset = 0;
+ off64_t offset = 0;
status_t err;
while ((err = parseChunk(&offset, 0)) == OK) {
}
@@ -404,7 +404,7 @@
}
// Reads an encoded integer 7 bits at a time until it encounters the high bit clear.
-int32_t readSize(off_t offset,
+int32_t readSize(off64_t offset,
const sp<DataSource> DataSource, uint8_t *numOfBytes) {
uint32_t size = 0;
uint8_t data;
@@ -424,7 +424,7 @@
return size;
}
-status_t MPEG4Extractor::parseDrmSINF(off_t *offset, off_t data_offset) {
+status_t MPEG4Extractor::parseDrmSINF(off64_t *offset, off64_t data_offset) {
uint8_t updateIdTag;
if (mDataSource->readAt(data_offset, &updateIdTag, 1) < 1) {
return ERROR_IO;
@@ -596,14 +596,14 @@
s->setTo(tmp);
}
-status_t MPEG4Extractor::parseChunk(off_t *offset, int depth) {
+status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) {
uint32_t hdr[2];
if (mDataSource->readAt(*offset, hdr, 8) < 8) {
return ERROR_IO;
}
uint64_t chunk_size = ntohl(hdr[0]);
uint32_t chunk_type = ntohl(hdr[1]);
- off_t data_offset = *offset + 8;
+ off64_t data_offset = *offset + 8;
if (chunk_size == 1) {
if (mDataSource->readAt(*offset + 8, &chunk_size, 8) < 8) {
@@ -644,11 +644,11 @@
PathAdder autoAdder(&mPath, chunk_type);
- off_t chunk_data_size = *offset + chunk_size - data_offset;
+ off64_t chunk_data_size = *offset + chunk_size - data_offset;
if (chunk_type != FOURCC('c', 'p', 'r', 't')
&& mPath.size() == 5 && underMetaDataPath(mPath)) {
- off_t stop_offset = *offset + chunk_size;
+ off64_t stop_offset = *offset + chunk_size;
*offset = data_offset;
while (*offset < stop_offset) {
status_t err = parseChunk(offset, depth + 1);
@@ -715,7 +715,7 @@
track->meta->setCString(kKeyMIMEType, "application/octet-stream");
}
- off_t stop_offset = *offset + chunk_size;
+ off64_t stop_offset = *offset + chunk_size;
*offset = data_offset;
while (*offset < stop_offset) {
status_t err = parseChunk(offset, depth + 1);
@@ -788,7 +788,7 @@
return ERROR_IO;
}
- off_t timescale_offset;
+ off64_t timescale_offset;
if (version == 1) {
timescale_offset = data_offset + 4 + 16;
@@ -838,7 +838,7 @@
}
uint8_t buffer[8];
- if (chunk_data_size < (off_t)sizeof(buffer)) {
+ if (chunk_data_size < (off64_t)sizeof(buffer)) {
return ERROR_MALFORMED;
}
@@ -862,7 +862,7 @@
break;
}
- off_t stop_offset = *offset + chunk_size;
+ off64_t stop_offset = *offset + chunk_size;
*offset = data_offset + 8;
for (uint32_t i = 0; i < entry_count; ++i) {
status_t err = parseChunk(offset, depth + 1);
@@ -919,7 +919,7 @@
mLastTrack->meta->setInt32(kKeyChannelCount, num_channels);
mLastTrack->meta->setInt32(kKeySampleRate, sample_rate);
- off_t stop_offset = *offset + chunk_size;
+ off64_t stop_offset = *offset + chunk_size;
*offset = data_offset + sizeof(buffer);
while (*offset < stop_offset) {
status_t err = parseChunk(offset, depth + 1);
@@ -962,7 +962,7 @@
mLastTrack->meta->setInt32(kKeyWidth, width);
mLastTrack->meta->setInt32(kKeyHeight, height);
- off_t stop_offset = *offset + chunk_size;
+ off64_t stop_offset = *offset + chunk_size;
*offset = data_offset + sizeof(buffer);
while (*offset < stop_offset) {
status_t err = parseChunk(offset, depth + 1);
@@ -1069,7 +1069,7 @@
}
uint8_t buffer[256];
- if (chunk_data_size > (off_t)sizeof(buffer)) {
+ if (chunk_data_size > (off64_t)sizeof(buffer)) {
return ERROR_BUFFER_TOO_SMALL;
}
@@ -1108,7 +1108,7 @@
case FOURCC('a', 'v', 'c', 'C'):
{
char buffer[256];
- if (chunk_data_size > (off_t)sizeof(buffer)) {
+ if (chunk_data_size > (off64_t)sizeof(buffer)) {
return ERROR_BUFFER_TOO_SMALL;
}
@@ -1127,7 +1127,7 @@
case FOURCC('m', 'e', 't', 'a'):
{
uint8_t buffer[4];
- if (chunk_data_size < (off_t)sizeof(buffer)) {
+ if (chunk_data_size < (off64_t)sizeof(buffer)) {
return ERROR_MALFORMED;
}
@@ -1147,7 +1147,7 @@
return OK;
}
- off_t stop_offset = *offset + chunk_size;
+ off64_t stop_offset = *offset + chunk_size;
*offset = data_offset + sizeof(buffer);
while (*offset < stop_offset) {
status_t err = parseChunk(offset, depth + 1);
@@ -1232,7 +1232,7 @@
}
status_t MPEG4Extractor::parseTrackHeader(
- off_t data_offset, off_t data_size) {
+ off64_t data_offset, off64_t data_size) {
if (data_size < 4) {
return ERROR_MALFORMED;
}
@@ -1246,7 +1246,7 @@
uint8_t buffer[36 + 60];
- if (data_size != (off_t)dynSize + 60) {
+ if (data_size != (off64_t)dynSize + 60) {
return ERROR_MALFORMED;
}
@@ -1263,7 +1263,9 @@
mtime = U64_AT(&buffer[12]);
id = U32_AT(&buffer[20]);
duration = U64_AT(&buffer[28]);
- } else if (version == 0) {
+ } else {
+ CHECK_EQ((unsigned)version, 0u);
+
ctime = U32_AT(&buffer[4]);
mtime = U32_AT(&buffer[8]);
id = U32_AT(&buffer[12]);
@@ -1316,7 +1318,7 @@
return OK;
}
-status_t MPEG4Extractor::parseMetaData(off_t offset, size_t size) {
+status_t MPEG4Extractor::parseMetaData(off64_t offset, size_t size) {
if (size < 4) {
return ERROR_MALFORMED;
}
@@ -1805,7 +1807,7 @@
// fall through
}
- off_t offset;
+ off64_t offset;
size_t size;
uint32_t dts;
bool isSyncSample;
diff --git a/media/libstagefright/MPEG4Writer.cpp b/media/libstagefright/MPEG4Writer.cpp
index cbb1604..6760707 100644
--- a/media/libstagefright/MPEG4Writer.cpp
+++ b/media/libstagefright/MPEG4Writer.cpp
@@ -33,6 +33,10 @@
#include <media/stagefright/MediaSource.h>
#include <media/stagefright/Utils.h>
#include <media/mediarecorder.h>
+#include <sys/types.h>
+#include <sys/stat.h>
+#include <fcntl.h>
+#include <unistd.h>
#include "include/ESDS.h"
@@ -64,7 +68,7 @@
bool isAvc() const { return mIsAvc; }
bool isAudio() const { return mIsAudio; }
bool isMPEG4() const { return mIsMPEG4; }
- void addChunkOffset(off_t offset);
+ void addChunkOffset(off64_t offset);
status_t dump(int fd, const Vector<String16>& args) const;
private:
@@ -99,7 +103,7 @@
List<MediaBuffer *> mChunkSamples;
size_t mNumStcoTableEntries;
- List<off_t> mChunkOffsets;
+ List<off64_t> mChunkOffsets;
size_t mNumStscTableEntries;
struct StscTableEntry {
@@ -214,7 +218,8 @@
};
MPEG4Writer::MPEG4Writer(const char *filename)
- : mFile(fopen(filename, "wb")),
+ : mFd(-1),
+ mInitCheck(NO_INIT),
mUse4ByteNalLength(true),
mUse32BitOffset(true),
mIsFileSizeLimitExplicitlyRequested(false),
@@ -224,11 +229,16 @@
mMdatOffset(0),
mEstimatedMoovBoxSize(0),
mInterleaveDurationUs(1000000) {
- CHECK(mFile != NULL);
+
+ mFd = open(filename, O_CREAT | O_LARGEFILE | O_TRUNC);
+ if (mFd >= 0) {
+ mInitCheck = OK;
+ }
}
MPEG4Writer::MPEG4Writer(int fd)
- : mFile(fdopen(fd, "wb")),
+ : mFd(dup(fd)),
+ mInitCheck(mFd < 0? NO_INIT: OK),
mUse4ByteNalLength(true),
mUse32BitOffset(true),
mIsFileSizeLimitExplicitlyRequested(false),
@@ -238,7 +248,6 @@
mMdatOffset(0),
mEstimatedMoovBoxSize(0),
mInterleaveDurationUs(1000000) {
- CHECK(mFile != NULL);
}
MPEG4Writer::~MPEG4Writer() {
@@ -368,7 +377,7 @@
}
status_t MPEG4Writer::start(MetaData *param) {
- if (mFile == NULL) {
+ if (mInitCheck != OK) {
return UNKNOWN_ERROR;
}
@@ -459,13 +468,13 @@
mEstimatedMoovBoxSize = estimateMoovBoxSize(bitRate);
}
CHECK(mEstimatedMoovBoxSize >= 8);
- fseeko(mFile, mFreeBoxOffset, SEEK_SET);
+ lseek64(mFd, mFreeBoxOffset, SEEK_SET);
writeInt32(mEstimatedMoovBoxSize);
write("free", 4);
mMdatOffset = mFreeBoxOffset + mEstimatedMoovBoxSize;
mOffset = mMdatOffset;
- fseeko(mFile, mMdatOffset, SEEK_SET);
+ lseek64(mFd, mMdatOffset, SEEK_SET);
if (mUse32BitOffset) {
write("????mdat", 8);
} else {
@@ -491,7 +500,7 @@
}
status_t MPEG4Writer::pause() {
- if (mFile == NULL) {
+ if (mInitCheck != OK) {
return OK;
}
mPaused = true;
@@ -507,7 +516,7 @@
}
void MPEG4Writer::stopWriterThread() {
- LOGV("stopWriterThread");
+ LOGD("Stopping writer thread");
{
Mutex::Autolock autolock(mLock);
@@ -518,6 +527,7 @@
void *dummy;
pthread_join(mThread, &dummy);
+ LOGD("Writer thread stopped");
}
/*
@@ -572,8 +582,9 @@
writeInt32(0x40000000); // w
}
+
status_t MPEG4Writer::stop() {
- if (mFile == NULL) {
+ if (mInitCheck != OK) {
return OK;
}
@@ -596,28 +607,28 @@
// Do not write out movie header on error.
if (err != OK) {
- fflush(mFile);
- fclose(mFile);
- mFile = NULL;
+ close(mFd);
+ mFd = -1;
+ mInitCheck = NO_INIT;
mStarted = false;
return err;
}
// Fix up the size of the 'mdat' chunk.
if (mUse32BitOffset) {
- fseeko(mFile, mMdatOffset, SEEK_SET);
+ lseek64(mFd, mMdatOffset, SEEK_SET);
int32_t size = htonl(static_cast<int32_t>(mOffset - mMdatOffset));
- fwrite(&size, 1, 4, mFile);
+ ::write(mFd, &size, 4);
} else {
- fseeko(mFile, mMdatOffset + 8, SEEK_SET);
+ lseek64(mFd, mMdatOffset + 8, SEEK_SET);
int64_t size = mOffset - mMdatOffset;
size = hton64(size);
- fwrite(&size, 1, 8, mFile);
+ ::write(mFd, &size, 8);
}
- fseeko(mFile, mOffset, SEEK_SET);
+ lseek64(mFd, mOffset, SEEK_SET);
time_t now = time(NULL);
- const off_t moovOffset = mOffset;
+ const off64_t moovOffset = mOffset;
mWriteMoovBoxToMemory = true;
mMoovBoxBuffer = (uint8_t *) malloc(mEstimatedMoovBoxSize);
mMoovBoxBufferOffset = 0;
@@ -637,7 +648,7 @@
writeInt16(0); // reserved
writeInt32(0); // reserved
writeInt32(0); // reserved
- writeCompositionMatrix(0);
+ writeCompositionMatrix(0); // matrix
writeInt32(0); // predefined
writeInt32(0); // predefined
writeInt32(0); // predefined
@@ -659,12 +670,12 @@
CHECK(mMoovBoxBufferOffset + 8 <= mEstimatedMoovBoxSize);
// Moov box
- fseeko(mFile, mFreeBoxOffset, SEEK_SET);
+ lseek64(mFd, mFreeBoxOffset, SEEK_SET);
mOffset = mFreeBoxOffset;
- write(mMoovBoxBuffer, 1, mMoovBoxBufferOffset, mFile);
+ write(mMoovBoxBuffer, 1, mMoovBoxBufferOffset);
// Free box
- fseeko(mFile, mOffset, SEEK_SET);
+ lseek64(mFd, mOffset, SEEK_SET);
writeInt32(mEstimatedMoovBoxSize - mMoovBoxBufferOffset);
write("free", 4);
@@ -678,9 +689,9 @@
CHECK(mBoxes.empty());
- fflush(mFile);
- fclose(mFile);
- mFile = NULL;
+ close(mFd);
+ mFd = -1;
+ mInitCheck = NO_INIT;
mStarted = false;
return err;
}
@@ -698,11 +709,12 @@
mLock.unlock();
}
-off_t MPEG4Writer::addSample_l(MediaBuffer *buffer) {
- off_t old_offset = mOffset;
+off64_t MPEG4Writer::addSample_l(MediaBuffer *buffer) {
+ off64_t old_offset = mOffset;
- fwrite((const uint8_t *)buffer->data() + buffer->range_offset(),
- 1, buffer->range_length(), mFile);
+ ::write(mFd,
+ (const uint8_t *)buffer->data() + buffer->range_offset(),
+ buffer->range_length());
mOffset += buffer->range_length();
@@ -723,33 +735,34 @@
}
}
-off_t MPEG4Writer::addLengthPrefixedSample_l(MediaBuffer *buffer) {
- off_t old_offset = mOffset;
+off64_t MPEG4Writer::addLengthPrefixedSample_l(MediaBuffer *buffer) {
+ off64_t old_offset = mOffset;
size_t length = buffer->range_length();
if (mUse4ByteNalLength) {
uint8_t x = length >> 24;
- fwrite(&x, 1, 1, mFile);
+ ::write(mFd, &x, 1);
x = (length >> 16) & 0xff;
- fwrite(&x, 1, 1, mFile);
+ ::write(mFd, &x, 1);
x = (length >> 8) & 0xff;
- fwrite(&x, 1, 1, mFile);
+ ::write(mFd, &x, 1);
x = length & 0xff;
- fwrite(&x, 1, 1, mFile);
+ ::write(mFd, &x, 1);
- fwrite((const uint8_t *)buffer->data() + buffer->range_offset(),
- 1, length, mFile);
+ ::write(mFd,
+ (const uint8_t *)buffer->data() + buffer->range_offset(),
+ length);
+
mOffset += length + 4;
} else {
CHECK(length < 65536);
uint8_t x = length >> 8;
- fwrite(&x, 1, 1, mFile);
+ ::write(mFd, &x, 1);
x = length & 0xff;
- fwrite(&x, 1, 1, mFile);
- fwrite((const uint8_t *)buffer->data() + buffer->range_offset(),
- 1, length, mFile);
+ ::write(mFd, &x, 1);
+ ::write(mFd, (const uint8_t *)buffer->data() + buffer->range_offset(), length);
mOffset += length + 2;
}
@@ -757,19 +770,21 @@
}
size_t MPEG4Writer::write(
- const void *ptr, size_t size, size_t nmemb, FILE *stream) {
+ const void *ptr, size_t size, size_t nmemb) {
const size_t bytes = size * nmemb;
if (mWriteMoovBoxToMemory) {
- off_t moovBoxSize = 8 + mMoovBoxBufferOffset + bytes;
+ // This happens only when we write the moov box at the end of
+ // recording, not for each output video/audio frame we receive.
+ off64_t moovBoxSize = 8 + mMoovBoxBufferOffset + bytes;
if (moovBoxSize > mEstimatedMoovBoxSize) {
- for (List<off_t>::iterator it = mBoxes.begin();
+ for (List<off64_t>::iterator it = mBoxes.begin();
it != mBoxes.end(); ++it) {
(*it) += mOffset;
}
- fseeko(mFile, mOffset, SEEK_SET);
- fwrite(mMoovBoxBuffer, 1, mMoovBoxBufferOffset, stream);
- fwrite(ptr, size, nmemb, stream);
+ lseek64(mFd, mOffset, SEEK_SET);
+ ::write(mFd, mMoovBoxBuffer, mMoovBoxBufferOffset);
+ ::write(mFd, ptr, size * nmemb);
mOffset += (bytes + mMoovBoxBufferOffset);
free(mMoovBoxBuffer);
mMoovBoxBuffer = NULL;
@@ -781,7 +796,7 @@
mMoovBoxBufferOffset += bytes;
}
} else {
- fwrite(ptr, size, nmemb, stream);
+ ::write(mFd, ptr, size * nmemb);
mOffset += bytes;
}
return bytes;
@@ -800,51 +815,51 @@
void MPEG4Writer::endBox() {
CHECK(!mBoxes.empty());
- off_t offset = *--mBoxes.end();
+ off64_t offset = *--mBoxes.end();
mBoxes.erase(--mBoxes.end());
if (mWriteMoovBoxToMemory) {
int32_t x = htonl(mMoovBoxBufferOffset - offset);
memcpy(mMoovBoxBuffer + offset, &x, 4);
} else {
- fseeko(mFile, offset, SEEK_SET);
+ lseek64(mFd, offset, SEEK_SET);
writeInt32(mOffset - offset);
mOffset -= 4;
- fseeko(mFile, mOffset, SEEK_SET);
+ lseek64(mFd, mOffset, SEEK_SET);
}
}
void MPEG4Writer::writeInt8(int8_t x) {
- write(&x, 1, 1, mFile);
+ write(&x, 1, 1);
}
void MPEG4Writer::writeInt16(int16_t x) {
x = htons(x);
- write(&x, 1, 2, mFile);
+ write(&x, 1, 2);
}
void MPEG4Writer::writeInt32(int32_t x) {
x = htonl(x);
- write(&x, 1, 4, mFile);
+ write(&x, 1, 4);
}
void MPEG4Writer::writeInt64(int64_t x) {
x = hton64(x);
- write(&x, 1, 8, mFile);
+ write(&x, 1, 8);
}
void MPEG4Writer::writeCString(const char *s) {
size_t n = strlen(s);
- write(s, 1, n + 1, mFile);
+ write(s, 1, n + 1);
}
void MPEG4Writer::writeFourcc(const char *s) {
CHECK_EQ(strlen(s), 4);
- write(s, 1, 4, mFile);
+ write(s, 1, 4);
}
void MPEG4Writer::write(const void *data, size_t size) {
- write(data, 1, size, mFile);
+ write(data, 1, size);
}
bool MPEG4Writer::isFileStreamable() const {
@@ -985,7 +1000,7 @@
++mNumSttsTableEntries;
}
-void MPEG4Writer::Track::addChunkOffset(off_t offset) {
+void MPEG4Writer::Track::addChunkOffset(off64_t offset) {
++mNumStcoTableEntries;
mChunkOffsets.push_back(offset);
}
@@ -1100,7 +1115,7 @@
for (List<MediaBuffer *>::iterator it = chunkIt->mSamples.begin();
it != chunkIt->mSamples.end(); ++it) {
- off_t offset = info->mTrack->isAvc()
+ off64_t offset = info->mTrack->isAvc()
? addLengthPrefixedSample_l(*it)
: addSample_l(*it);
if (it == chunkIt->mSamples.begin()) {
@@ -1225,7 +1240,7 @@
}
int32_t rotationDegrees;
- if (!mIsAudio && params && params->findInt32(kKeyRotationDegree, &rotationDegrees)) {
+ if (!mIsAudio && params && params->findInt32(kKeyRotation, &rotationDegrees)) {
mRotation = rotationDegrees;
}
@@ -1279,6 +1294,7 @@
}
status_t MPEG4Writer::Track::stop() {
+ LOGD("Stopping %s track", mIsAudio? "Audio": "Video");
if (mDone) {
return OK;
}
@@ -1290,6 +1306,7 @@
status_t err = (status_t) dummy;
+ LOGD("Stopping %s track source", mIsAudio? "Audio": "Video");
{
status_t status = mSource->stop();
if (err == OK && status != OK && status != ERROR_END_OF_STREAM) {
@@ -1297,6 +1314,7 @@
}
}
+ LOGD("%s track stopped", mIsAudio? "Audio": "Video");
return err;
}
@@ -1921,7 +1939,7 @@
trackProgressStatus(timestampUs);
}
if (mOwner->numTracks() == 1) {
- off_t offset = mIsAvc? mOwner->addLengthPrefixedSample_l(copy)
+ off64_t offset = mIsAvc? mOwner->addLengthPrefixedSample_l(copy)
: mOwner->addSample_l(copy);
if (mChunkOffsets.empty()) {
addChunkOffset(offset);
@@ -2122,7 +2140,7 @@
mOwner->writeInt16(mIsAudio ? 0x100 : 0); // volume
mOwner->writeInt16(0); // reserved
- mOwner->writeCompositionMatrix(mRotation);
+ mOwner->writeCompositionMatrix(mRotation); // matrix
if (mIsAudio) {
mOwner->writeInt32(0);
@@ -2260,6 +2278,9 @@
CHECK(mCodecSpecificData);
CHECK(mCodecSpecificDataSize > 0);
+ // Make sure all sizes encode to a single byte.
+ CHECK(mCodecSpecificDataSize + 23 < 128);
+
mOwner->writeInt32(0); // version=0, flags=0
mOwner->writeInt8(0x03); // ES_DescrTag
mOwner->writeInt8(23 + mCodecSpecificDataSize);
@@ -2469,7 +2490,7 @@
mOwner->beginBox(use32BitOffset? "stco": "co64");
mOwner->writeInt32(0); // version=0, flags=0
mOwner->writeInt32(mNumStcoTableEntries);
- for (List<off_t>::iterator it = mChunkOffsets.begin();
+ for (List<off64_t>::iterator it = mChunkOffsets.begin();
it != mChunkOffsets.end(); ++it) {
if (use32BitOffset) {
mOwner->writeInt32(static_cast<int32_t>(*it));
diff --git a/media/libstagefright/MediaBuffer.cpp b/media/libstagefright/MediaBuffer.cpp
index b973745..cbccd31 100644
--- a/media/libstagefright/MediaBuffer.cpp
+++ b/media/libstagefright/MediaBuffer.cpp
@@ -25,6 +25,8 @@
#include <media/stagefright/MediaDebug.h>
#include <media/stagefright/MetaData.h>
+#include <ui/GraphicBuffer.h>
+
namespace android {
// XXX make this truly atomic.
@@ -61,6 +63,20 @@
mOriginal(NULL) {
}
+MediaBuffer::MediaBuffer(const sp<GraphicBuffer>& graphicBuffer)
+ : mObserver(NULL),
+ mNextBuffer(NULL),
+ mRefCount(0),
+ mData(NULL),
+ mSize(1),
+ mRangeOffset(0),
+ mRangeLength(mSize),
+ mGraphicBuffer(graphicBuffer),
+ mOwnsData(false),
+ mMetaData(new MetaData),
+ mOriginal(NULL) {
+}
+
void MediaBuffer::release() {
if (mObserver == NULL) {
CHECK_EQ(mRefCount, 0);
@@ -92,10 +108,12 @@
}
void *MediaBuffer::data() const {
+ CHECK(mGraphicBuffer == NULL);
return mData;
}
size_t MediaBuffer::size() const {
+ CHECK(mGraphicBuffer == NULL);
return mSize;
}
@@ -108,15 +126,19 @@
}
void MediaBuffer::set_range(size_t offset, size_t length) {
- if (offset + length > mSize) {
+ if ((mGraphicBuffer == NULL) && (offset + length > mSize)) {
LOGE("offset = %d, length = %d, mSize = %d", offset, length, mSize);
}
- CHECK(offset + length <= mSize);
+ CHECK((mGraphicBuffer != NULL) || (offset + length <= mSize));
mRangeOffset = offset;
mRangeLength = length;
}
+sp<GraphicBuffer> MediaBuffer::graphicBuffer() const {
+ return mGraphicBuffer;
+}
+
sp<MetaData> MediaBuffer::meta_data() {
return mMetaData;
}
@@ -158,6 +180,8 @@
}
MediaBuffer *MediaBuffer::clone() {
+ CHECK_EQ(mGraphicBuffer, NULL);
+
MediaBuffer *buffer = new MediaBuffer(mData, mSize);
buffer->set_range(mRangeOffset, mRangeLength);
buffer->mMetaData = new MetaData(*mMetaData.get());
@@ -169,4 +193,3 @@
}
} // namespace android
-
diff --git a/media/libstagefright/MediaDefs.cpp b/media/libstagefright/MediaDefs.cpp
index 7648d42..4599fca 100644
--- a/media/libstagefright/MediaDefs.cpp
+++ b/media/libstagefright/MediaDefs.cpp
@@ -42,4 +42,6 @@
const char *MEDIA_MIMETYPE_CONTAINER_MATROSKA = "video/x-matroska";
const char *MEDIA_MIMETYPE_CONTAINER_MPEG2TS = "video/mp2ts";
+const char *MEDIA_MIMETYPE_CONTAINER_WVM = "video/wvm";
+
} // namespace android
diff --git a/media/libstagefright/MediaExtractor.cpp b/media/libstagefright/MediaExtractor.cpp
index 965c370..d12ac64 100644
--- a/media/libstagefright/MediaExtractor.cpp
+++ b/media/libstagefright/MediaExtractor.cpp
@@ -25,6 +25,7 @@
#include "include/OggExtractor.h"
#include "include/MPEG2TSExtractor.h"
#include "include/DRMExtractor.h"
+#include "include/WVMExtractor.h"
#include "matroska/MatroskaExtractor.h"
@@ -92,6 +93,8 @@
return new MatroskaExtractor(source);
} else if (!strcasecmp(mime, MEDIA_MIMETYPE_CONTAINER_MPEG2TS)) {
return new MPEG2TSExtractor(source);
+ } else if (!strcasecmp(mime, MEDIA_MIMETYPE_CONTAINER_WVM)) {
+ return new WVMExtractor(source);
}
return NULL;
diff --git a/media/libstagefright/MediaSourceSplitter.cpp b/media/libstagefright/MediaSourceSplitter.cpp
new file mode 100644
index 0000000..abc7012
--- /dev/null
+++ b/media/libstagefright/MediaSourceSplitter.cpp
@@ -0,0 +1,234 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "MediaSourceSplitter"
+#include <utils/Log.h>
+
+#include <media/stagefright/MediaSourceSplitter.h>
+#include <media/stagefright/MediaDebug.h>
+#include <media/stagefright/MediaBuffer.h>
+#include <media/stagefright/MetaData.h>
+
+namespace android {
+
+MediaSourceSplitter::MediaSourceSplitter(sp<MediaSource> mediaSource) {
+ mNumberOfClients = 0;
+ mSource = mediaSource;
+ mSourceStarted = false;
+
+ mNumberOfClientsStarted = 0;
+ mNumberOfCurrentReads = 0;
+ mCurrentReadBit = 0;
+ mLastReadCompleted = true;
+}
+
+MediaSourceSplitter::~MediaSourceSplitter() {
+}
+
+sp<MediaSource> MediaSourceSplitter::createClient() {
+ Mutex::Autolock autoLock(mLock);
+
+ sp<MediaSource> client = new Client(this, mNumberOfClients++);
+ mClientsStarted.push(false);
+ mClientsDesiredReadBit.push(0);
+ return client;
+}
+
+status_t MediaSourceSplitter::start(int clientId, MetaData *params) {
+ Mutex::Autolock autoLock(mLock);
+
+ LOGV("start client (%d)", clientId);
+ if (mClientsStarted[clientId]) {
+ return OK;
+ }
+
+ mNumberOfClientsStarted++;
+
+ if (!mSourceStarted) {
+ LOGV("Starting real source from client (%d)", clientId);
+ status_t err = mSource->start(params);
+
+ if (err == OK) {
+ mSourceStarted = true;
+ mClientsStarted.editItemAt(clientId) = true;
+ mClientsDesiredReadBit.editItemAt(clientId) = !mCurrentReadBit;
+ }
+
+ return err;
+ } else {
+ mClientsStarted.editItemAt(clientId) = true;
+ if (mLastReadCompleted) {
+ // Last read was completed. So join in the threads for the next read.
+ mClientsDesiredReadBit.editItemAt(clientId) = !mCurrentReadBit;
+ } else {
+ // Last read is ongoing. So join in the threads for the current read.
+ mClientsDesiredReadBit.editItemAt(clientId) = mCurrentReadBit;
+ }
+ return OK;
+ }
+}
+
+status_t MediaSourceSplitter::stop(int clientId) {
+ Mutex::Autolock autoLock(mLock);
+
+ LOGV("stop client (%d)", clientId);
+ CHECK(clientId >= 0 && clientId < mNumberOfClients);
+ CHECK(mClientsStarted[clientId]);
+
+ if (--mNumberOfClientsStarted == 0) {
+ LOGV("Stopping real source from client (%d)", clientId);
+ status_t err = mSource->stop();
+ mSourceStarted = false;
+ mClientsStarted.editItemAt(clientId) = false;
+ return err;
+ } else {
+ mClientsStarted.editItemAt(clientId) = false;
+ if (!mLastReadCompleted && (mClientsDesiredReadBit[clientId] == mCurrentReadBit)) {
+ // !mLastReadCompleted implies that buffer has been read from source, but all
+ // clients haven't read it.
+ // mClientsDesiredReadBit[clientId] == mCurrentReadBit implies that this
+ // client would have wanted to read from this buffer. (i.e. it has not yet
+ // called read() for the current read buffer.)
+ // Since other threads may be waiting for all the clients' reads to complete,
+ // signal that this read has been aborted.
+ signalReadComplete_lock(true);
+ }
+ return OK;
+ }
+}
+
+sp<MetaData> MediaSourceSplitter::getFormat(int clientId) {
+ Mutex::Autolock autoLock(mLock);
+
+ LOGV("getFormat client (%d)", clientId);
+ return mSource->getFormat();
+}
+
+status_t MediaSourceSplitter::read(int clientId,
+ MediaBuffer **buffer, const MediaSource::ReadOptions *options) {
+ Mutex::Autolock autoLock(mLock);
+
+ CHECK(clientId >= 0 && clientId < mNumberOfClients);
+
+ LOGV("read client (%d)", clientId);
+ *buffer = NULL;
+
+ if (!mClientsStarted[clientId]) {
+ return OK;
+ }
+
+ if (mCurrentReadBit != mClientsDesiredReadBit[clientId]) {
+ // Desired buffer has not been read from source yet.
+
+ // If the current client is the special client with clientId = 0
+ // then read from source, else wait until the client 0 has finished
+ // reading from source.
+ if (clientId == 0) {
+ // Wait for all client's last read to complete first so as to not
+ // corrupt the buffer at mLastReadMediaBuffer.
+ waitForAllClientsLastRead_lock(clientId);
+
+ readFromSource_lock(options);
+ *buffer = mLastReadMediaBuffer;
+ } else {
+ waitForReadFromSource_lock(clientId);
+
+ *buffer = mLastReadMediaBuffer;
+ (*buffer)->add_ref();
+ }
+ CHECK(mCurrentReadBit == mClientsDesiredReadBit[clientId]);
+ } else {
+ // Desired buffer has already been read from source. Use the cached data.
+ CHECK(clientId != 0);
+
+ *buffer = mLastReadMediaBuffer;
+ (*buffer)->add_ref();
+ }
+
+ mClientsDesiredReadBit.editItemAt(clientId) = !mClientsDesiredReadBit[clientId];
+ signalReadComplete_lock(false);
+
+ return mLastReadStatus;
+}
+
+void MediaSourceSplitter::readFromSource_lock(const MediaSource::ReadOptions *options) {
+ mLastReadStatus = mSource->read(&mLastReadMediaBuffer , options);
+
+ mCurrentReadBit = !mCurrentReadBit;
+ mLastReadCompleted = false;
+ mReadFromSourceCondition.broadcast();
+}
+
+void MediaSourceSplitter::waitForReadFromSource_lock(int32_t clientId) {
+ mReadFromSourceCondition.wait(mLock);
+}
+
+void MediaSourceSplitter::waitForAllClientsLastRead_lock(int32_t clientId) {
+ if (mLastReadCompleted) {
+ return;
+ }
+ mAllReadsCompleteCondition.wait(mLock);
+ CHECK(mLastReadCompleted);
+}
+
+void MediaSourceSplitter::signalReadComplete_lock(bool readAborted) {
+ if (!readAborted) {
+ mNumberOfCurrentReads++;
+ }
+
+ if (mNumberOfCurrentReads == mNumberOfClientsStarted) {
+ mLastReadCompleted = true;
+ mNumberOfCurrentReads = 0;
+ mAllReadsCompleteCondition.broadcast();
+ }
+}
+
+status_t MediaSourceSplitter::pause(int clientId) {
+ return ERROR_UNSUPPORTED;
+}
+
+// Client
+
+MediaSourceSplitter::Client::Client(
+ sp<MediaSourceSplitter> splitter,
+ int32_t clientId) {
+ mSplitter = splitter;
+ mClientId = clientId;
+}
+
+status_t MediaSourceSplitter::Client::start(MetaData *params) {
+ return mSplitter->start(mClientId, params);
+}
+
+status_t MediaSourceSplitter::Client::stop() {
+ return mSplitter->stop(mClientId);
+}
+
+sp<MetaData> MediaSourceSplitter::Client::getFormat() {
+ return mSplitter->getFormat(mClientId);
+}
+
+status_t MediaSourceSplitter::Client::read(
+ MediaBuffer **buffer, const ReadOptions *options) {
+ return mSplitter->read(mClientId, buffer, options);
+}
+
+status_t MediaSourceSplitter::Client::pause() {
+ return mSplitter->pause(mClientId);
+}
+
+} // namespace android
diff --git a/media/libstagefright/MetaData.cpp b/media/libstagefright/MetaData.cpp
index 63b476e..884f3b4 100644
--- a/media/libstagefright/MetaData.cpp
+++ b/media/libstagefright/MetaData.cpp
@@ -70,6 +70,19 @@
return setData(key, TYPE_POINTER, &value, sizeof(value));
}
+bool MetaData::setRect(
+ uint32_t key,
+ int32_t left, int32_t top,
+ int32_t right, int32_t bottom) {
+ Rect r;
+ r.mLeft = left;
+ r.mTop = top;
+ r.mRight = right;
+ r.mBottom = bottom;
+
+ return setData(key, TYPE_RECT, &r, sizeof(r));
+}
+
bool MetaData::findCString(uint32_t key, const char **value) {
uint32_t type;
const void *data;
@@ -143,6 +156,28 @@
return true;
}
+bool MetaData::findRect(
+ uint32_t key,
+ int32_t *left, int32_t *top,
+ int32_t *right, int32_t *bottom) {
+ uint32_t type;
+ const void *data;
+ size_t size;
+ if (!findData(key, &type, &data, &size) || type != TYPE_RECT) {
+ return false;
+ }
+
+ CHECK_EQ(size, sizeof(Rect));
+
+ const Rect *r = (const Rect *)data;
+ *left = r->mLeft;
+ *top = r->mTop;
+ *right = r->mRight;
+ *bottom = r->mBottom;
+
+ return true;
+}
+
bool MetaData::setData(
uint32_t key, uint32_t type, const void *data, size_t size) {
bool overwrote_existing = true;
diff --git a/media/libstagefright/NuCachedSource2.cpp b/media/libstagefright/NuCachedSource2.cpp
index b67002d..829ab20 100644
--- a/media/libstagefright/NuCachedSource2.cpp
+++ b/media/libstagefright/NuCachedSource2.cpp
@@ -201,7 +201,7 @@
return mSource->initCheck();
}
-status_t NuCachedSource2::getSize(off_t *size) {
+status_t NuCachedSource2::getSize(off64_t *size) {
return mSource->getSize(size);
}
@@ -325,15 +325,17 @@
mCondition.signal();
}
-void NuCachedSource2::restartPrefetcherIfNecessary_l() {
+void NuCachedSource2::restartPrefetcherIfNecessary_l(
+ bool ignoreLowWaterThreshold) {
static const size_t kGrayArea = 256 * 1024;
if (mFetching || mFinalStatus != OK) {
return;
}
- if (mCacheOffset + mCache->totalSize() - mLastAccessPos
- >= kLowWaterThreshold) {
+ if (!ignoreLowWaterThreshold
+ && mCacheOffset + mCache->totalSize() - mLastAccessPos
+ >= kLowWaterThreshold) {
return;
}
@@ -351,7 +353,7 @@
mFetching = true;
}
-ssize_t NuCachedSource2::readAt(off_t offset, void *data, size_t size) {
+ssize_t NuCachedSource2::readAt(off64_t offset, void *data, size_t size) {
Mutex::Autolock autoSerializer(mSerializer);
LOGV("readAt offset %ld, size %d", offset, size);
@@ -406,27 +408,27 @@
size_t NuCachedSource2::approxDataRemaining_l(bool *eos) {
*eos = (mFinalStatus != OK);
- off_t lastBytePosCached = mCacheOffset + mCache->totalSize();
+ off64_t lastBytePosCached = mCacheOffset + mCache->totalSize();
if (mLastAccessPos < lastBytePosCached) {
return lastBytePosCached - mLastAccessPos;
}
return 0;
}
-ssize_t NuCachedSource2::readInternal(off_t offset, void *data, size_t size) {
+ssize_t NuCachedSource2::readInternal(off64_t offset, void *data, size_t size) {
LOGV("readInternal offset %ld size %d", offset, size);
Mutex::Autolock autoLock(mLock);
if (offset < mCacheOffset
- || offset >= (off_t)(mCacheOffset + mCache->totalSize())) {
- static const off_t kPadding = 32768;
+ || offset >= (off64_t)(mCacheOffset + mCache->totalSize())) {
+ static const off64_t kPadding = 32768;
// In the presence of multiple decoded streams, once of them will
// trigger this seek request, the other one will request data "nearby"
// soon, adjust the seek position so that that subsequent request
// does not trigger another seek.
- off_t seekOffset = (offset > kPadding) ? offset - kPadding : 0;
+ off64_t seekOffset = (offset > kPadding) ? offset - kPadding : 0;
seekInternal_l(seekOffset);
}
@@ -455,15 +457,15 @@
return -EAGAIN;
}
-status_t NuCachedSource2::seekInternal_l(off_t offset) {
+status_t NuCachedSource2::seekInternal_l(off64_t offset) {
mLastAccessPos = offset;
if (offset >= mCacheOffset
- && offset <= (off_t)(mCacheOffset + mCache->totalSize())) {
+ && offset <= (off64_t)(mCacheOffset + mCache->totalSize())) {
return OK;
}
- LOGI("new range: offset= %ld", offset);
+ LOGI("new range: offset= %lld", offset);
mCacheOffset = offset;
@@ -510,5 +512,22 @@
mSuspended = true;
}
+void NuCachedSource2::resumeFetchingIfNecessary() {
+ Mutex::Autolock autoLock(mLock);
+
+ restartPrefetcherIfNecessary_l(true /* ignore low water threshold */);
+}
+
+DecryptHandle* NuCachedSource2::DrmInitialization(DrmManagerClient* client) {
+ return mSource->DrmInitialization(client);
+}
+
+void NuCachedSource2::getDrmInfo(DecryptHandle **handle, DrmManagerClient **client) {
+ mSource->getDrmInfo(handle, client);
+}
+
+String8 NuCachedSource2::getUri() {
+ return mSource->getUri();
+}
} // namespace android
diff --git a/media/libstagefright/NuHTTPDataSource.cpp b/media/libstagefright/NuHTTPDataSource.cpp
index 04cca47..269b233 100644
--- a/media/libstagefright/NuHTTPDataSource.cpp
+++ b/media/libstagefright/NuHTTPDataSource.cpp
@@ -5,6 +5,7 @@
#include "include/NuHTTPDataSource.h"
#include <cutils/properties.h>
+#include <media/stagefright/foundation/ALooper.h>
#include <media/stagefright/MediaDebug.h>
#include <media/stagefright/MediaErrors.h>
@@ -69,7 +70,12 @@
mContentLength(0),
mContentLengthValid(false),
mHasChunkedTransferEncoding(false),
- mChunkDataBytesLeft(0) {
+ mChunkDataBytesLeft(0),
+ mNumBandwidthHistoryItems(0),
+ mTotalTransferTimeUs(0),
+ mTotalTransferBytes(0),
+ mDecryptHandle(NULL),
+ mDrmManagerClient(NULL) {
}
NuHTTPDataSource::~NuHTTPDataSource() {
@@ -78,7 +84,7 @@
status_t NuHTTPDataSource::connect(
const char *uri,
const KeyedVector<String8, String8> *overrides,
- off_t offset) {
+ off64_t offset) {
String8 headers;
MakeFullHeaders(overrides, &headers);
@@ -88,9 +94,12 @@
status_t NuHTTPDataSource::connect(
const char *uri,
const String8 &headers,
- off_t offset) {
+ off64_t offset) {
String8 host, path;
unsigned port;
+
+ mUri = uri;
+
if (!ParseURL(uri, &host, &port, &path)) {
return ERROR_MALFORMED;
}
@@ -106,8 +115,8 @@
status_t NuHTTPDataSource::connect(
const char *host, unsigned port, const char *path,
const String8 &headers,
- off_t offset) {
- LOGI("connect to %s:%u%s @%ld", host, port, path, offset);
+ off64_t offset) {
+ LOGI("connect to %s:%u%s @%lld", host, port, path, offset);
bool needsToReconnect = true;
@@ -153,7 +162,7 @@
if (offset != 0) {
char rangeHeader[128];
- sprintf(rangeHeader, "Range: bytes=%ld-\r\n", offset);
+ sprintf(rangeHeader, "Range: bytes=%lld-\r\n", offset);
request.append(rangeHeader);
}
@@ -169,7 +178,7 @@
}
if (IsRedirectStatusCode(httpStatus)) {
- string value;
+ AString value;
CHECK(mHTTP.find_header_value("Location", &value));
mState = DISCONNECTED;
@@ -189,9 +198,8 @@
mHasChunkedTransferEncoding = false;
{
- string value;
- if (mHTTP.find_header_value("Transfer-Encoding", &value)
- || mHTTP.find_header_value("Transfer-encoding", &value)) {
+ AString value;
+ if (mHTTP.find_header_value("Transfer-Encoding", &value)) {
// We don't currently support any transfer encodings but
// chunked.
@@ -213,11 +221,11 @@
applyTimeoutResponse();
if (offset == 0) {
- string value;
+ AString value;
unsigned long x;
- if (mHTTP.find_header_value(string("Content-Length"), &value)
+ if (mHTTP.find_header_value(AString("Content-Length"), &value)
&& ParseSingleUnsignedLong(value.c_str(), &x)) {
- mContentLength = (off_t)x;
+ mContentLength = (off64_t)x;
mContentLengthValid = true;
} else {
LOGW("Server did not give us the content length!");
@@ -230,9 +238,9 @@
return ERROR_UNSUPPORTED;
}
- string value;
+ AString value;
unsigned long x;
- if (mHTTP.find_header_value(string("Content-Range"), &value)) {
+ if (mHTTP.find_header_value(AString("Content-Range"), &value)) {
const char *slashPos = strchr(value.c_str(), '/');
if (slashPos != NULL
&& ParseSingleUnsignedLong(slashPos + 1, &x)) {
@@ -322,7 +330,7 @@
return n;
}
-ssize_t NuHTTPDataSource::readAt(off_t offset, void *data, size_t size) {
+ssize_t NuHTTPDataSource::readAt(off64_t offset, void *data, size_t size) {
LOGV("readAt offset %ld, size %d", offset, size);
Mutex::Autolock autoLock(mLock);
@@ -349,6 +357,8 @@
size_t numBytesRead = 0;
while (numBytesRead < size) {
+ int64_t startTimeUs = ALooper::GetNowUs();
+
ssize_t n =
internalRead((uint8_t *)data + numBytesRead, size - numBytesRead);
@@ -356,6 +366,9 @@
return n;
}
+ int64_t delayUs = ALooper::GetNowUs() - startTimeUs;
+ addBandwidthMeasurement_l(n, delayUs);
+
numBytesRead += (size_t)n;
if (n == 0) {
@@ -374,7 +387,7 @@
return numBytesRead;
}
-status_t NuHTTPDataSource::getSize(off_t *size) {
+status_t NuHTTPDataSource::getSize(off64_t *size) {
*size = 0;
if (mState != CONNECTED) {
@@ -425,7 +438,7 @@
}
void NuHTTPDataSource::applyTimeoutResponse() {
- string timeout;
+ AString timeout;
if (mHTTP.find_header_value("X-SocketTimeout", &timeout)) {
const char *s = timeout.c_str();
char *end;
@@ -440,4 +453,64 @@
}
}
+bool NuHTTPDataSource::estimateBandwidth(int32_t *bandwidth_bps) {
+ Mutex::Autolock autoLock(mLock);
+
+ if (mNumBandwidthHistoryItems < 10) {
+ return false;
+ }
+
+ *bandwidth_bps = ((double)mTotalTransferBytes * 8E6 / mTotalTransferTimeUs);
+
+ return true;
+}
+
+void NuHTTPDataSource::addBandwidthMeasurement_l(
+ size_t numBytes, int64_t delayUs) {
+ BandwidthEntry entry;
+ entry.mDelayUs = delayUs;
+ entry.mNumBytes = numBytes;
+ mTotalTransferTimeUs += delayUs;
+ mTotalTransferBytes += numBytes;
+
+ mBandwidthHistory.push_back(entry);
+ if (++mNumBandwidthHistoryItems > 100) {
+ BandwidthEntry *entry = &*mBandwidthHistory.begin();
+ mTotalTransferTimeUs -= entry->mDelayUs;
+ mTotalTransferBytes -= entry->mNumBytes;
+ mBandwidthHistory.erase(mBandwidthHistory.begin());
+ --mNumBandwidthHistoryItems;
+ }
+}
+
+DecryptHandle* NuHTTPDataSource::DrmInitialization(DrmManagerClient* client) {
+ if (client == NULL) {
+ return NULL;
+ }
+ mDrmManagerClient = client;
+
+ if (mDecryptHandle == NULL) {
+ /* Note if redirect occurs, mUri is the redirect uri instead of the
+ * original one
+ */
+ mDecryptHandle = mDrmManagerClient->openDecryptSession(mUri);
+ }
+
+ if (mDecryptHandle == NULL) {
+ mDrmManagerClient = NULL;
+ }
+
+ return mDecryptHandle;
+}
+
+void NuHTTPDataSource::getDrmInfo(DecryptHandle **handle, DrmManagerClient **client) {
+ *handle = mDecryptHandle;
+
+ *client = mDrmManagerClient;
+}
+
+String8 NuHTTPDataSource::getUri() {
+ return mUri;
+}
+
} // namespace android
diff --git a/media/libstagefright/OMXCodec.cpp b/media/libstagefright/OMXCodec.cpp
index 980da77..3108e4e 100644
--- a/media/libstagefright/OMXCodec.cpp
+++ b/media/libstagefright/OMXCodec.cpp
@@ -38,10 +38,11 @@
#include <binder/IServiceManager.h>
#include <binder/MemoryDealer.h>
#include <binder/ProcessState.h>
+#include <media/stagefright/foundation/ADebug.h>
#include <media/IMediaPlayerService.h>
+#include <media/stagefright/HardwareAPI.h>
#include <media/stagefright/MediaBuffer.h>
#include <media/stagefright/MediaBufferGroup.h>
-#include <media/stagefright/MediaDebug.h>
#include <media/stagefright/MediaDefs.h>
#include <media/stagefright/MediaExtractor.h>
#include <media/stagefright/MetaData.h>
@@ -53,6 +54,7 @@
#include <OMX_Component.h>
#include "include/ThreadedSource.h"
+#include "include/avc_utils.h"
namespace android {
@@ -152,37 +154,37 @@
static const CodecInfo kDecoderInfo[] = {
{ MEDIA_MIMETYPE_IMAGE_JPEG, "OMX.TI.JPEG.decode" },
+// { MEDIA_MIMETYPE_AUDIO_MPEG, "OMX.Nvidia.mp3.decoder" },
// { MEDIA_MIMETYPE_AUDIO_MPEG, "OMX.TI.MP3.decode" },
{ MEDIA_MIMETYPE_AUDIO_MPEG, "MP3Decoder" },
-// { MEDIA_MIMETYPE_AUDIO_MPEG, "OMX.PV.mp3dec" },
// { MEDIA_MIMETYPE_AUDIO_AMR_NB, "OMX.TI.AMR.decode" },
+// { MEDIA_MIMETYPE_AUDIO_AMR_NB, "OMX.Nvidia.amr.decoder" },
{ MEDIA_MIMETYPE_AUDIO_AMR_NB, "AMRNBDecoder" },
-// { MEDIA_MIMETYPE_AUDIO_AMR_NB, "OMX.PV.amrdec" },
+// { MEDIA_MIMETYPE_AUDIO_AMR_NB, "OMX.Nvidia.amrwb.decoder" },
{ MEDIA_MIMETYPE_AUDIO_AMR_WB, "OMX.TI.WBAMR.decode" },
{ MEDIA_MIMETYPE_AUDIO_AMR_WB, "AMRWBDecoder" },
-// { MEDIA_MIMETYPE_AUDIO_AMR_WB, "OMX.PV.amrdec" },
+// { MEDIA_MIMETYPE_AUDIO_AAC, "OMX.Nvidia.aac.decoder" },
{ MEDIA_MIMETYPE_AUDIO_AAC, "OMX.TI.AAC.decode" },
{ MEDIA_MIMETYPE_AUDIO_AAC, "AACDecoder" },
-// { MEDIA_MIMETYPE_AUDIO_AAC, "OMX.PV.aacdec" },
{ MEDIA_MIMETYPE_AUDIO_G711_ALAW, "G711Decoder" },
{ MEDIA_MIMETYPE_AUDIO_G711_MLAW, "G711Decoder" },
+ { MEDIA_MIMETYPE_VIDEO_MPEG4, "OMX.Nvidia.mp4.decode" },
{ MEDIA_MIMETYPE_VIDEO_MPEG4, "OMX.qcom.7x30.video.decoder.mpeg4" },
{ MEDIA_MIMETYPE_VIDEO_MPEG4, "OMX.qcom.video.decoder.mpeg4" },
{ MEDIA_MIMETYPE_VIDEO_MPEG4, "OMX.TI.Video.Decoder" },
{ MEDIA_MIMETYPE_VIDEO_MPEG4, "OMX.SEC.MPEG4.Decoder" },
{ MEDIA_MIMETYPE_VIDEO_MPEG4, "M4vH263Decoder" },
-// { MEDIA_MIMETYPE_VIDEO_MPEG4, "OMX.PV.mpeg4dec" },
+ { MEDIA_MIMETYPE_VIDEO_H263, "OMX.Nvidia.h263.decode" },
{ MEDIA_MIMETYPE_VIDEO_H263, "OMX.qcom.7x30.video.decoder.h263" },
{ MEDIA_MIMETYPE_VIDEO_H263, "OMX.qcom.video.decoder.h263" },
{ MEDIA_MIMETYPE_VIDEO_H263, "OMX.SEC.H263.Decoder" },
{ MEDIA_MIMETYPE_VIDEO_H263, "M4vH263Decoder" },
-// { MEDIA_MIMETYPE_VIDEO_H263, "OMX.PV.h263dec" },
+ { MEDIA_MIMETYPE_VIDEO_AVC, "OMX.Nvidia.h264.decode" },
{ MEDIA_MIMETYPE_VIDEO_AVC, "OMX.qcom.7x30.video.decoder.avc" },
{ MEDIA_MIMETYPE_VIDEO_AVC, "OMX.qcom.video.decoder.avc" },
{ MEDIA_MIMETYPE_VIDEO_AVC, "OMX.TI.Video.Decoder" },
{ MEDIA_MIMETYPE_VIDEO_AVC, "OMX.SEC.AVC.Decoder" },
{ MEDIA_MIMETYPE_VIDEO_AVC, "AVCDecoder" },
-// { MEDIA_MIMETYPE_VIDEO_AVC, "OMX.PV.avcdec" },
{ MEDIA_MIMETYPE_AUDIO_VORBIS, "VorbisDecoder" },
{ MEDIA_MIMETYPE_VIDEO_VPX, "VPXDecoder" },
};
@@ -194,25 +196,24 @@
{ MEDIA_MIMETYPE_AUDIO_AMR_WB, "AMRWBEncoder" },
{ MEDIA_MIMETYPE_AUDIO_AAC, "OMX.TI.AAC.encode" },
{ MEDIA_MIMETYPE_AUDIO_AAC, "AACEncoder" },
-// { MEDIA_MIMETYPE_AUDIO_AAC, "OMX.PV.aacenc" },
{ MEDIA_MIMETYPE_VIDEO_MPEG4, "OMX.qcom.7x30.video.encoder.mpeg4" },
{ MEDIA_MIMETYPE_VIDEO_MPEG4, "OMX.qcom.video.encoder.mpeg4" },
{ MEDIA_MIMETYPE_VIDEO_MPEG4, "OMX.TI.Video.encoder" },
+ { MEDIA_MIMETYPE_VIDEO_MPEG4, "OMX.Nvidia.mp4.encoder" },
{ MEDIA_MIMETYPE_VIDEO_MPEG4, "OMX.SEC.MPEG4.Encoder" },
{ MEDIA_MIMETYPE_VIDEO_MPEG4, "M4vH263Encoder" },
-// { MEDIA_MIMETYPE_VIDEO_MPEG4, "OMX.PV.mpeg4enc" },
{ MEDIA_MIMETYPE_VIDEO_H263, "OMX.qcom.7x30.video.encoder.h263" },
{ MEDIA_MIMETYPE_VIDEO_H263, "OMX.qcom.video.encoder.h263" },
{ MEDIA_MIMETYPE_VIDEO_H263, "OMX.TI.Video.encoder" },
+ { MEDIA_MIMETYPE_VIDEO_H263, "OMX.Nvidia.h263.encoder" },
{ MEDIA_MIMETYPE_VIDEO_H263, "OMX.SEC.H263.Encoder" },
{ MEDIA_MIMETYPE_VIDEO_H263, "M4vH263Encoder" },
-// { MEDIA_MIMETYPE_VIDEO_H263, "OMX.PV.h263enc" },
{ MEDIA_MIMETYPE_VIDEO_AVC, "OMX.qcom.7x30.video.encoder.avc" },
{ MEDIA_MIMETYPE_VIDEO_AVC, "OMX.qcom.video.encoder.avc" },
{ MEDIA_MIMETYPE_VIDEO_AVC, "OMX.TI.Video.encoder" },
+ { MEDIA_MIMETYPE_VIDEO_AVC, "OMX.Nvidia.h264.encoder" },
{ MEDIA_MIMETYPE_VIDEO_AVC, "OMX.SEC.AVC.Encoder" },
{ MEDIA_MIMETYPE_VIDEO_AVC, "AVCEncoder" },
-// { MEDIA_MIMETYPE_VIDEO_AVC, "OMX.PV.avcenc" },
};
#undef OPTIONAL
@@ -264,39 +265,6 @@
return NULL;
}
-enum {
- kAVCProfileBaseline = 0x42,
- kAVCProfileMain = 0x4d,
- kAVCProfileExtended = 0x58,
- kAVCProfileHigh = 0x64,
- kAVCProfileHigh10 = 0x6e,
- kAVCProfileHigh422 = 0x7a,
- kAVCProfileHigh444 = 0xf4,
- kAVCProfileCAVLC444Intra = 0x2c
-};
-
-static const char *AVCProfileToString(uint8_t profile) {
- switch (profile) {
- case kAVCProfileBaseline:
- return "Baseline";
- case kAVCProfileMain:
- return "Main";
- case kAVCProfileExtended:
- return "Extended";
- case kAVCProfileHigh:
- return "High";
- case kAVCProfileHigh10:
- return "High 10";
- case kAVCProfileHigh422:
- return "High 422";
- case kAVCProfileHigh444:
- return "High 444";
- case kAVCProfileCAVLC444Intra:
- return "CAVLC 444 Intra";
- default: return "Unknown";
- }
-}
-
template<class T>
static void InitOMXParams(T *params) {
params->nSize = sizeof(T);
@@ -307,16 +275,15 @@
}
static bool IsSoftwareCodec(const char *componentName) {
- if (!strncmp("OMX.PV.", componentName, 7)) {
- return true;
+ if (!strncmp("OMX.", componentName, 4)) {
+ return false;
}
- return false;
+ return true;
}
// A sort order in which non-OMX components are first,
-// followed by software codecs, i.e. OMX.PV.*, followed
-// by all the others.
+// followed by software codecs, and followed by all the others.
static int CompareSoftwareCodecsFirst(
const String8 *elem1, const String8 *elem2) {
bool isNotOMX1 = strncmp(elem1->string(), "OMX.", 4);
@@ -350,9 +317,13 @@
const char *componentName, bool isEncoder) {
uint32_t quirks = 0;
- if (!strcmp(componentName, "OMX.PV.avcdec")) {
- quirks |= kWantsNALFragments;
+ if (!strcmp(componentName, "OMX.Nvidia.amr.decoder") ||
+ !strcmp(componentName, "OMX.Nvidia.amrwb.decoder") ||
+ !strcmp(componentName, "OMX.Nvidia.aac.decoder") ||
+ !strcmp(componentName, "OMX.Nvidia.mp3.decoder")) {
+ quirks |= kDecoderLiesAboutNumberOfChannels;
}
+
if (!strcmp(componentName, "OMX.TI.MP3.decode")) {
quirks |= kNeedsFlushBeforeDisable;
quirks |= kDecoderLiesAboutNumberOfChannels;
@@ -412,13 +383,6 @@
quirks |= kOutputBuffersAreUnreadable;
}
- if (!strncmp(componentName, "OMX.SEC.", 8) && isEncoder) {
- // These input buffers contain meta data (for instance,
- // information helps locate the actual YUV data, or
- // the physical address of the YUV data).
- quirks |= kStoreMetaDataInInputVideoBuffers;
- }
-
return quirks;
}
@@ -454,7 +418,16 @@
continue;
}
- matchingCodecs->push(String8(componentName));
+ // When requesting software-only codecs, only push software codecs
+ // When requesting hardware-only codecs, only push hardware codecs
+ // When there is request neither for software-only nor for
+ // hardware-only codecs, push all codecs
+ if (((flags & kSoftwareCodecsOnly) && IsSoftwareCodec(componentName)) ||
+ ((flags & kHardwareCodecsOnly) && !IsSoftwareCodec(componentName)) ||
+ (!(flags & (kSoftwareCodecsOnly | kHardwareCodecsOnly)))) {
+
+ matchingCodecs->push(String8(componentName));
+ }
}
if (flags & kPreferSoftwareCodecs) {
@@ -468,7 +441,8 @@
const sp<MetaData> &meta, bool createEncoder,
const sp<MediaSource> &source,
const char *matchComponentName,
- uint32_t flags) {
+ uint32_t flags,
+ const sp<ANativeWindow> &nativeWindow) {
const char *mime;
bool success = meta->findCString(kKeyMIMEType, &mime);
CHECK(success);
@@ -524,7 +498,7 @@
sp<OMXCodec> codec = new OMXCodec(
omx, node, quirks,
createEncoder, mime, componentName,
- source);
+ source, nativeWindow);
observer->setCodec(codec);
@@ -542,13 +516,17 @@
}
status_t OMXCodec::configureCodec(const sp<MetaData> &meta, uint32_t flags) {
+ mIsMetaDataStoredInVideoBuffers = false;
+ if (flags & kStoreMetaDataInVideoBuffers) {
+ mIsMetaDataStoredInVideoBuffers = true;
+ }
if (!(flags & kIgnoreCodecSpecificData)) {
uint32_t type;
const void *data;
size_t size;
if (meta->findData(kKeyESDS, &type, &data, &size)) {
ESDS esds((const char *)data, size);
- CHECK_EQ(esds.InitCheck(), OK);
+ CHECK_EQ(esds.InitCheck(), (status_t)OK);
const void *codec_specific_data;
size_t codec_specific_data_size;
@@ -563,7 +541,7 @@
const uint8_t *ptr = (const uint8_t *)data;
CHECK(size >= 7);
- CHECK_EQ(ptr[0], 1); // configurationVersion == 1
+ CHECK_EQ((unsigned)ptr[0], 1u); // configurationVersion == 1
uint8_t profile = ptr[1];
uint8_t level = ptr[3];
@@ -617,7 +595,7 @@
size -= length;
}
- CODEC_LOGV(
+ CODEC_LOGI(
"AVC profile = %d (%s), level = %d",
(int)profile, AVCProfileToString(profile), level);
@@ -732,6 +710,16 @@
mQuirks &= ~kOutputBuffersAreUnreadable;
}
+ if (mNativeWindow != NULL
+ && !mIsEncoder
+ && !strncasecmp(mMIME, "video/", 6)
+ && !strncmp(mComponentName, "OMX.", 4)) {
+ status_t err = initNativeWindow();
+ if (err != OK) {
+ return err;
+ }
+ }
+
return OK;
}
@@ -742,7 +730,7 @@
status_t err = mOMX->getParameter(
mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
- CHECK_EQ(err, OK);
+ CHECK_EQ(err, (status_t)OK);
if ((portIndex == kPortIndexInput && (mQuirks & kInputBufferSizesAreBogus))
|| (def.nBufferSize < size)) {
@@ -751,11 +739,11 @@
err = mOMX->setParameter(
mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
- CHECK_EQ(err, OK);
+ CHECK_EQ(err, (status_t)OK);
err = mOMX->getParameter(
mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
- CHECK_EQ(err, OK);
+ CHECK_EQ(err, (status_t)OK);
// Make sure the setting actually stuck.
if (portIndex == kPortIndexInput
@@ -915,7 +903,7 @@
int32_t width, height, frameRate, bitRate, stride, sliceHeight;
bool success = meta->findInt32(kKeyWidth, &width);
success = success && meta->findInt32(kKeyHeight, &height);
- success = success && meta->findInt32(kKeySampleRate, &frameRate);
+ success = success && meta->findInt32(kKeyFrameRate, &frameRate);
success = success && meta->findInt32(kKeyBitRate, &bitRate);
success = success && meta->findInt32(kKeyStride, &stride);
success = success && meta->findInt32(kKeySliceHeight, &sliceHeight);
@@ -935,7 +923,7 @@
}
OMX_COLOR_FORMATTYPE colorFormat;
- CHECK_EQ(OK, findTargetColorFormat(meta, &colorFormat));
+ CHECK_EQ((status_t)OK, findTargetColorFormat(meta, &colorFormat));
status_t err;
OMX_PARAM_PORTDEFINITIONTYPE def;
@@ -944,19 +932,19 @@
//////////////////////// Input port /////////////////////////
CHECK_EQ(setVideoPortFormatType(
kPortIndexInput, OMX_VIDEO_CodingUnused,
- colorFormat), OK);
+ colorFormat), (status_t)OK);
InitOMXParams(&def);
def.nPortIndex = kPortIndexInput;
err = mOMX->getParameter(
mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
- CHECK_EQ(err, OK);
+ CHECK_EQ(err, (status_t)OK);
def.nBufferSize = getFrameSize(colorFormat,
stride > 0? stride: -stride, sliceHeight);
- CHECK_EQ(def.eDomain, OMX_PortDomainVideo);
+ CHECK_EQ((int)def.eDomain, (int)OMX_PortDomainVideo);
video_def->nFrameWidth = width;
video_def->nFrameHeight = height;
@@ -968,20 +956,20 @@
err = mOMX->setParameter(
mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
- CHECK_EQ(err, OK);
+ CHECK_EQ(err, (status_t)OK);
//////////////////////// Output port /////////////////////////
CHECK_EQ(setVideoPortFormatType(
kPortIndexOutput, compressionFormat, OMX_COLOR_FormatUnused),
- OK);
+ (status_t)OK);
InitOMXParams(&def);
def.nPortIndex = kPortIndexOutput;
err = mOMX->getParameter(
mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
- CHECK_EQ(err, OK);
- CHECK_EQ(def.eDomain, OMX_PortDomainVideo);
+ CHECK_EQ(err, (status_t)OK);
+ CHECK_EQ((int)def.eDomain, (int)OMX_PortDomainVideo);
video_def->nFrameWidth = width;
video_def->nFrameHeight = height;
@@ -996,23 +984,23 @@
err = mOMX->setParameter(
mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
- CHECK_EQ(err, OK);
+ CHECK_EQ(err, (status_t)OK);
/////////////////// Codec-specific ////////////////////////
switch (compressionFormat) {
case OMX_VIDEO_CodingMPEG4:
{
- CHECK_EQ(setupMPEG4EncoderParameters(meta), OK);
+ CHECK_EQ(setupMPEG4EncoderParameters(meta), (status_t)OK);
break;
}
case OMX_VIDEO_CodingH263:
- CHECK_EQ(setupH263EncoderParameters(meta), OK);
+ CHECK_EQ(setupH263EncoderParameters(meta), (status_t)OK);
break;
case OMX_VIDEO_CodingAVC:
{
- CHECK_EQ(setupAVCEncoderParameters(meta), OK);
+ CHECK_EQ(setupAVCEncoderParameters(meta), (status_t)OK);
break;
}
@@ -1071,7 +1059,7 @@
status_t err = mOMX->getParameter(
mNode, OMX_IndexParamVideoBitrate,
&bitrateType, sizeof(bitrateType));
- CHECK_EQ(err, OK);
+ CHECK_EQ(err, (status_t)OK);
bitrateType.eControlRate = OMX_Video_ControlRateVariable;
bitrateType.nTargetBitrate = bitRate;
@@ -1079,7 +1067,7 @@
err = mOMX->setParameter(
mNode, OMX_IndexParamVideoBitrate,
&bitrateType, sizeof(bitrateType));
- CHECK_EQ(err, OK);
+ CHECK_EQ(err, (status_t)OK);
return OK;
}
@@ -1135,7 +1123,7 @@
status_t OMXCodec::setupH263EncoderParameters(const sp<MetaData>& meta) {
int32_t iFramesInterval, frameRate, bitRate;
bool success = meta->findInt32(kKeyBitRate, &bitRate);
- success = success && meta->findInt32(kKeySampleRate, &frameRate);
+ success = success && meta->findInt32(kKeyFrameRate, &frameRate);
success = success && meta->findInt32(kKeyIFramesInterval, &iFramesInterval);
CHECK(success);
OMX_VIDEO_PARAM_H263TYPE h263type;
@@ -1144,7 +1132,7 @@
status_t err = mOMX->getParameter(
mNode, OMX_IndexParamVideoH263, &h263type, sizeof(h263type));
- CHECK_EQ(err, OK);
+ CHECK_EQ(err, (status_t)OK);
h263type.nAllowedPictureTypes =
OMX_VIDEO_PictureTypeI | OMX_VIDEO_PictureTypeP;
@@ -1171,10 +1159,10 @@
err = mOMX->setParameter(
mNode, OMX_IndexParamVideoH263, &h263type, sizeof(h263type));
- CHECK_EQ(err, OK);
+ CHECK_EQ(err, (status_t)OK);
- CHECK_EQ(setupBitRate(bitRate), OK);
- CHECK_EQ(setupErrorCorrectionParameters(), OK);
+ CHECK_EQ(setupBitRate(bitRate), (status_t)OK);
+ CHECK_EQ(setupErrorCorrectionParameters(), (status_t)OK);
return OK;
}
@@ -1182,7 +1170,7 @@
status_t OMXCodec::setupMPEG4EncoderParameters(const sp<MetaData>& meta) {
int32_t iFramesInterval, frameRate, bitRate;
bool success = meta->findInt32(kKeyBitRate, &bitRate);
- success = success && meta->findInt32(kKeySampleRate, &frameRate);
+ success = success && meta->findInt32(kKeyFrameRate, &frameRate);
success = success && meta->findInt32(kKeyIFramesInterval, &iFramesInterval);
CHECK(success);
OMX_VIDEO_PARAM_MPEG4TYPE mpeg4type;
@@ -1191,7 +1179,7 @@
status_t err = mOMX->getParameter(
mNode, OMX_IndexParamVideoMpeg4, &mpeg4type, sizeof(mpeg4type));
- CHECK_EQ(err, OK);
+ CHECK_EQ(err, (status_t)OK);
mpeg4type.nSliceHeaderSpacing = 0;
mpeg4type.bSVH = OMX_FALSE;
@@ -1223,10 +1211,10 @@
err = mOMX->setParameter(
mNode, OMX_IndexParamVideoMpeg4, &mpeg4type, sizeof(mpeg4type));
- CHECK_EQ(err, OK);
+ CHECK_EQ(err, (status_t)OK);
- CHECK_EQ(setupBitRate(bitRate), OK);
- CHECK_EQ(setupErrorCorrectionParameters(), OK);
+ CHECK_EQ(setupBitRate(bitRate), (status_t)OK);
+ CHECK_EQ(setupErrorCorrectionParameters(), (status_t)OK);
return OK;
}
@@ -1234,7 +1222,7 @@
status_t OMXCodec::setupAVCEncoderParameters(const sp<MetaData>& meta) {
int32_t iFramesInterval, frameRate, bitRate;
bool success = meta->findInt32(kKeyBitRate, &bitRate);
- success = success && meta->findInt32(kKeySampleRate, &frameRate);
+ success = success && meta->findInt32(kKeyFrameRate, &frameRate);
success = success && meta->findInt32(kKeyIFramesInterval, &iFramesInterval);
CHECK(success);
@@ -1244,7 +1232,7 @@
status_t err = mOMX->getParameter(
mNode, OMX_IndexParamVideoAvc, &h264type, sizeof(h264type));
- CHECK_EQ(err, OK);
+ CHECK_EQ(err, (status_t)OK);
h264type.nAllowedPictureTypes =
OMX_VIDEO_PictureTypeI | OMX_VIDEO_PictureTypeP;
@@ -1290,11 +1278,15 @@
h264type.bMBAFF = OMX_FALSE;
h264type.eLoopFilterMode = OMX_VIDEO_AVCLoopFilterEnable;
+ if (!strcasecmp("OMX.Nvidia.h264.encoder", mComponentName)) {
+ h264type.eLevel = OMX_VIDEO_AVCLevelMax;
+ }
+
err = mOMX->setParameter(
mNode, OMX_IndexParamVideoAvc, &h264type, sizeof(h264type));
- CHECK_EQ(err, OK);
+ CHECK_EQ(err, (status_t)OK);
- CHECK_EQ(setupBitRate(bitRate), OK);
+ CHECK_EQ(setupBitRate(bitRate), (status_t)OK);
return OK;
}
@@ -1332,8 +1324,8 @@
status_t err = mOMX->getParameter(
mNode, OMX_IndexParamVideoPortFormat,
&format, sizeof(format));
- CHECK_EQ(err, OK);
- CHECK_EQ(format.eCompressionFormat, OMX_VIDEO_CodingUnused);
+ CHECK_EQ(err, (status_t)OK);
+ CHECK_EQ((int)format.eCompressionFormat, (int)OMX_VIDEO_CodingUnused);
static const int OMX_QCOM_COLOR_FormatYVU420SemiPlanar = 0x7FA30C00;
@@ -1361,7 +1353,7 @@
err = mOMX->getParameter(
mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
- CHECK_EQ(err, OK);
+ CHECK_EQ(err, (status_t)OK);
#if 1
// XXX Need a (much) better heuristic to compute input buffer sizes.
@@ -1371,7 +1363,7 @@
}
#endif
- CHECK_EQ(def.eDomain, OMX_PortDomainVideo);
+ CHECK_EQ((int)def.eDomain, (int)OMX_PortDomainVideo);
video_def->nFrameWidth = width;
video_def->nFrameHeight = height;
@@ -1393,8 +1385,8 @@
err = mOMX->getParameter(
mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
- CHECK_EQ(err, OK);
- CHECK_EQ(def.eDomain, OMX_PortDomainVideo);
+ CHECK_EQ(err, (status_t)OK);
+ CHECK_EQ((int)def.eDomain, (int)OMX_PortDomainVideo);
#if 0
def.nBufferSize =
@@ -1415,7 +1407,8 @@
bool isEncoder,
const char *mime,
const char *componentName,
- const sp<MediaSource> &source)
+ const sp<MediaSource> &source,
+ const sp<ANativeWindow> &nativeWindow)
: mOMX(omx),
mOMXLivesLocally(omx->livesLocally(getpid())),
mNode(node),
@@ -1435,7 +1428,8 @@
mTargetTimeUs(-1),
mSkipTimeUs(-1),
mLeftOverBuffer(NULL),
- mPaused(false) {
+ mPaused(false),
+ mNativeWindow(nativeWindow) {
mPortStatus[kPortIndexInput] = ENABLED;
mPortStatus[kPortIndexOutput] = ENABLED;
@@ -1516,7 +1510,7 @@
CHECK(mState == LOADED || mState == ERROR);
status_t err = mOMX->freeNode(mNode);
- CHECK_EQ(err, OK);
+ CHECK_EQ(err, (status_t)OK);
mNode = NULL;
setState(DEAD);
@@ -1533,21 +1527,21 @@
status_t OMXCodec::init() {
// mLock is held.
- CHECK_EQ(mState, LOADED);
+ CHECK_EQ((int)mState, (int)LOADED);
status_t err;
if (!(mQuirks & kRequiresLoadedToIdleAfterAllocation)) {
err = mOMX->sendCommand(mNode, OMX_CommandStateSet, OMX_StateIdle);
- CHECK_EQ(err, OK);
+ CHECK_EQ(err, (status_t)OK);
setState(LOADED_TO_IDLE);
}
err = allocateBuffers();
- CHECK_EQ(err, OK);
+ CHECK_EQ(err, (status_t)OK);
if (mQuirks & kRequiresLoadedToIdleAfterAllocation) {
err = mOMX->sendCommand(mNode, OMX_CommandStateSet, OMX_StateIdle);
- CHECK_EQ(err, OK);
+ CHECK_EQ(err, (status_t)OK);
setState(LOADED_TO_IDLE);
}
@@ -1579,6 +1573,10 @@
}
status_t OMXCodec::allocateBuffersOnPort(OMX_U32 portIndex) {
+ if (mNativeWindow != NULL && portIndex == kPortIndexOutput) {
+ return allocateOutputBuffersFromNativeWindow();
+ }
+
OMX_PARAM_PORTDEFINITIONTYPE def;
InitOMXParams(&def);
def.nPortIndex = portIndex;
@@ -1590,6 +1588,14 @@
return err;
}
+ if (mIsMetaDataStoredInVideoBuffers && portIndex == kPortIndexInput) {
+ err = mOMX->storeMetaDataInBuffers(mNode, kPortIndexInput, OMX_TRUE);
+ if (err != OK) {
+ LOGE("Storing meta data in video buffers is not supported");
+ return err;
+ }
+ }
+
CODEC_LOGI("allocating %lu buffers of size %lu on %s port",
def.nBufferCountActual, def.nBufferSize,
portIndex == kPortIndexInput ? "input" : "output");
@@ -1645,6 +1651,7 @@
info.mBuffer = buffer;
info.mOwnedByComponent = false;
+ info.mOwnedByNativeWindow = false;
info.mMem = mem;
info.mMediaBuffer = NULL;
@@ -1671,6 +1678,173 @@
return OK;
}
+status_t OMXCodec::allocateOutputBuffersFromNativeWindow() {
+ // Get the number of buffers needed.
+ OMX_PARAM_PORTDEFINITIONTYPE def;
+ InitOMXParams(&def);
+ def.nPortIndex = kPortIndexOutput;
+
+ status_t err = mOMX->getParameter(
+ mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
+ if (err != OK) {
+ return err;
+ }
+
+ err = native_window_set_buffers_geometry(
+ mNativeWindow.get(),
+ def.format.video.nFrameWidth,
+ def.format.video.nFrameHeight,
+ def.format.video.eColorFormat);
+
+ if (err != 0) {
+ LOGE("native_window_set_buffers_geometry failed: %s (%d)",
+ strerror(-err), -err);
+ return err;
+ }
+
+ // Increase the buffer count by one to allow for the ANativeWindow to hold
+ // on to one of the buffers.
+ def.nBufferCountActual++;
+ err = mOMX->setParameter(
+ mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
+ if (err != OK) {
+ return err;
+ }
+
+ // Set up the native window.
+ // XXX TODO: Get the gralloc usage flags from the OMX plugin!
+ err = native_window_set_usage(
+ mNativeWindow.get(), GRALLOC_USAGE_HW_TEXTURE | GRALLOC_USAGE_EXTERNAL_DISP);
+ if (err != 0) {
+ LOGE("native_window_set_usage failed: %s (%d)", strerror(-err), -err);
+ return err;
+ }
+
+ err = native_window_set_buffer_count(
+ mNativeWindow.get(), def.nBufferCountActual);
+ if (err != 0) {
+ LOGE("native_window_set_buffer_count failed: %s (%d)", strerror(-err),
+ -err);
+ return err;
+ }
+
+ // XXX TODO: Do something so the ANativeWindow knows that we'll need to get
+ // the same set of buffers.
+
+ CODEC_LOGI("allocating %lu buffers from a native window of size %lu on "
+ "output port", def.nBufferCountActual, def.nBufferSize);
+
+ // Dequeue buffers and send them to OMX
+ OMX_U32 i;
+ for (i = 0; i < def.nBufferCountActual; i++) {
+ android_native_buffer_t* buf;
+ err = mNativeWindow->dequeueBuffer(mNativeWindow.get(), &buf);
+ if (err != 0) {
+ LOGE("dequeueBuffer failed: %s (%d)", strerror(-err), -err);
+ break;
+ }
+
+ sp<GraphicBuffer> graphicBuffer(new GraphicBuffer(buf, false));
+ IOMX::buffer_id bufferId;
+ err = mOMX->useGraphicBuffer(mNode, kPortIndexOutput, graphicBuffer,
+ &bufferId);
+ if (err != 0) {
+ break;
+ }
+
+ CODEC_LOGV("registered graphic buffer with ID %p (pointer = %p)",
+ bufferId, graphicBuffer.get());
+
+ BufferInfo info;
+ info.mData = NULL;
+ info.mSize = def.nBufferSize;
+ info.mBuffer = bufferId;
+ info.mOwnedByComponent = false;
+ info.mOwnedByNativeWindow = false;
+ info.mMem = NULL;
+ info.mMediaBuffer = new MediaBuffer(graphicBuffer);
+ info.mMediaBuffer->setObserver(this);
+
+ mPortBuffers[kPortIndexOutput].push(info);
+ }
+
+ OMX_U32 cancelStart;
+ OMX_U32 cancelEnd;
+
+ if (err != 0) {
+ // If an error occurred while dequeuing we need to cancel any buffers
+ // that were dequeued.
+ cancelStart = 0;
+ cancelEnd = i;
+ } else {
+ // Return the last two buffers to the native window.
+ // XXX TODO: The number of buffers the native window owns should probably be
+ // queried from it when we put the native window in fixed buffer pool mode
+ // (which needs to be implemented). Currently it's hard-coded to 2.
+ cancelStart = def.nBufferCountActual - 2;
+ cancelEnd = def.nBufferCountActual;
+ }
+
+ for (OMX_U32 i = cancelStart; i < cancelEnd; i++) {
+ BufferInfo *info = &mPortBuffers[kPortIndexOutput].editItemAt(i);
+ cancelBufferToNativeWindow(info);
+ }
+
+ return err;
+}
+
+status_t OMXCodec::cancelBufferToNativeWindow(BufferInfo *info) {
+ CHECK(!info->mOwnedByNativeWindow);
+ CODEC_LOGV("Calling cancelBuffer on buffer %p", info->mBuffer);
+ int err = mNativeWindow->cancelBuffer(
+ mNativeWindow.get(), info->mMediaBuffer->graphicBuffer().get());
+ if (err != 0) {
+ CODEC_LOGE("cancelBuffer failed w/ error 0x%08x", err);
+
+ setState(ERROR);
+ return err;
+ }
+ info->mOwnedByNativeWindow = true;
+ return OK;
+}
+
+OMXCodec::BufferInfo* OMXCodec::dequeueBufferFromNativeWindow() {
+ // Dequeue the next buffer from the native window.
+ android_native_buffer_t* buf;
+ int err = mNativeWindow->dequeueBuffer(mNativeWindow.get(), &buf);
+ if (err != 0) {
+ CODEC_LOGE("dequeueBuffer failed w/ error 0x%08x", err);
+
+ setState(ERROR);
+ return 0;
+ }
+
+ // Determine which buffer we just dequeued.
+ Vector<BufferInfo> *buffers = &mPortBuffers[kPortIndexOutput];
+ BufferInfo *bufInfo = 0;
+ for (size_t i = 0; i < buffers->size(); i++) {
+ sp<GraphicBuffer> graphicBuffer = buffers->itemAt(i).
+ mMediaBuffer->graphicBuffer();
+ if (graphicBuffer->handle == buf->handle) {
+ bufInfo = &buffers->editItemAt(i);
+ break;
+ }
+ }
+
+ if (bufInfo == 0) {
+ CODEC_LOGE("dequeued unrecognized buffer: %p", buf);
+
+ setState(ERROR);
+ return 0;
+ }
+
+ // The native window no longer owns the buffer.
+ CHECK(bufInfo->mOwnedByNativeWindow);
+ bufInfo->mOwnedByNativeWindow = false;
+
+ return bufInfo;
+}
+
void OMXCodec::on_message(const omx_message &msg) {
Mutex::Autolock autoLock(mLock);
@@ -1702,27 +1876,30 @@
"an EMPTY_BUFFER_DONE.", buffer);
}
- {
- BufferInfo *info = &buffers->editItemAt(i);
- info->mOwnedByComponent = false;
- if (info->mMediaBuffer != NULL) {
- // It is time to release the media buffers storing meta data
- info->mMediaBuffer->release();
- info->mMediaBuffer = NULL;
+ BufferInfo* info = &buffers->editItemAt(i);
+ info->mOwnedByComponent = false;
+
+ // Buffer could not be released until empty buffer done is called.
+ if (info->mMediaBuffer != NULL) {
+ if (mIsEncoder &&
+ (mQuirks & kAvoidMemcopyInputRecordingFrames)) {
+ // If zero-copy mode is enabled this will send the
+ // input buffer back to the upstream source.
+ restorePatchedDataPointer(info);
}
+
+ info->mMediaBuffer->release();
+ info->mMediaBuffer = NULL;
}
if (mPortStatus[kPortIndexInput] == DISABLING) {
CODEC_LOGV("Port is disabled, freeing buffer %p", buffer);
- status_t err =
- mOMX->freeBuffer(mNode, kPortIndexInput, buffer);
- CHECK_EQ(err, OK);
-
- buffers->removeAt(i);
+ status_t err = freeBuffer(kPortIndexInput, i);
+ CHECK_EQ(err, (status_t)OK);
} else if (mState != ERROR
&& mPortStatus[kPortIndexInput] != SHUTTING_DOWN) {
- CHECK_EQ(mPortStatus[kPortIndexInput], ENABLED);
+ CHECK_EQ((int)mPortStatus[kPortIndexInput], (int)ENABLED);
drainInputBuffer(&buffers->editItemAt(i));
}
break;
@@ -1759,11 +1936,9 @@
if (mPortStatus[kPortIndexOutput] == DISABLING) {
CODEC_LOGV("Port is disabled, freeing buffer %p", buffer);
- status_t err =
- mOMX->freeBuffer(mNode, kPortIndexOutput, buffer);
- CHECK_EQ(err, OK);
+ status_t err = freeBuffer(kPortIndexOutput, i);
+ CHECK_EQ(err, (status_t)OK);
- buffers->removeAt(i);
#if 0
} else if (mPortStatus[kPortIndexOutput] == ENABLED
&& (flags & OMX_BUFFERFLAG_EOS)) {
@@ -1772,7 +1947,7 @@
mBufferFilled.signal();
#endif
} else if (mPortStatus[kPortIndexOutput] != SHUTTING_DOWN) {
- CHECK_EQ(mPortStatus[kPortIndexOutput], ENABLED);
+ CHECK_EQ((int)mPortStatus[kPortIndexOutput], (int)ENABLED);
if (info->mMediaBuffer == NULL) {
CHECK(mOMXLivesLocally);
@@ -1791,8 +1966,10 @@
}
MediaBuffer *buffer = info->mMediaBuffer;
+ bool isGraphicBuffer = buffer->graphicBuffer() != NULL;
- if (msg.u.extended_buffer_data.range_offset
+ if (!isGraphicBuffer
+ && msg.u.extended_buffer_data.range_offset
+ msg.u.extended_buffer_data.range_length
> buffer->size()) {
CODEC_LOGE(
@@ -1816,7 +1993,7 @@
buffer->meta_data()->setInt32(kKeyIsCodecConfig, true);
}
- if (mQuirks & kOutputBuffersAreUnreadable) {
+ if (isGraphicBuffer || mQuirks & kOutputBuffersAreUnreadable) {
buffer->meta_data()->setInt32(kKeyIsUnreadable, true);
}
@@ -1868,48 +2045,6 @@
}
}
-void OMXCodec::onEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) {
- switch (event) {
- case OMX_EventCmdComplete:
- {
- onCmdComplete((OMX_COMMANDTYPE)data1, data2);
- break;
- }
-
- case OMX_EventError:
- {
- CODEC_LOGE("ERROR(0x%08lx, %ld)", data1, data2);
-
- setState(ERROR);
- break;
- }
-
- case OMX_EventPortSettingsChanged:
- {
- onPortSettingsChanged(data1);
- break;
- }
-
-#if 0
- case OMX_EventBufferFlag:
- {
- CODEC_LOGV("EVENT_BUFFER_FLAG(%ld)", data1);
-
- if (data1 == kPortIndexOutput) {
- mNoMoreOutputData = true;
- }
- break;
- }
-#endif
-
- default:
- {
- CODEC_LOGV("EVENT(%d, %ld, %ld)", event, data1, data2);
- break;
- }
- }
-}
-
// Has the format changed in any way that the client would have to be aware of?
static bool formatHasNotablyChanged(
const sp<MetaData> &from, const sp<MetaData> &to) {
@@ -1954,6 +2089,21 @@
if (height_from != height_to) {
return true;
}
+
+ int32_t left_from, top_from, right_from, bottom_from;
+ CHECK(from->findRect(
+ kKeyCropRect,
+ &left_from, &top_from, &right_from, &bottom_from));
+
+ int32_t left_to, top_to, right_to, bottom_to;
+ CHECK(to->findRect(
+ kKeyCropRect,
+ &left_to, &top_to, &right_to, &bottom_to));
+
+ if (left_to != left_from || top_to != top_from
+ || right_to != right_from || bottom_to != bottom_from) {
+ return true;
+ }
} else if (!strcasecmp(mime_from, MEDIA_MIMETYPE_AUDIO_RAW)) {
int32_t numChannels_from, numChannels_to;
CHECK(from->findInt32(kKeyChannelCount, &numChannels_from));
@@ -1975,6 +2125,78 @@
return false;
}
+void OMXCodec::onEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) {
+ switch (event) {
+ case OMX_EventCmdComplete:
+ {
+ onCmdComplete((OMX_COMMANDTYPE)data1, data2);
+ break;
+ }
+
+ case OMX_EventError:
+ {
+ CODEC_LOGE("ERROR(0x%08lx, %ld)", data1, data2);
+
+ setState(ERROR);
+ break;
+ }
+
+ case OMX_EventPortSettingsChanged:
+ {
+ CODEC_LOGV("OMX_EventPortSettingsChanged(port=%ld, data2=0x%08lx)",
+ data1, data2);
+
+ if (data2 == 0 || data2 == OMX_IndexParamPortDefinition) {
+ onPortSettingsChanged(data1);
+ } else if (data1 == kPortIndexOutput
+ && data2 == OMX_IndexConfigCommonOutputCrop) {
+
+ sp<MetaData> oldOutputFormat = mOutputFormat;
+ initOutputFormat(mSource->getFormat());
+
+ if (formatHasNotablyChanged(oldOutputFormat, mOutputFormat)) {
+ mOutputPortSettingsHaveChanged = true;
+
+ if (mNativeWindow != NULL) {
+ int32_t left, top, right, bottom;
+ CHECK(mOutputFormat->findRect(
+ kKeyCropRect,
+ &left, &top, &right, &bottom));
+
+ android_native_rect_t crop;
+ crop.left = left;
+ crop.top = top;
+ crop.right = right;
+ crop.bottom = bottom;
+
+ CHECK_EQ(0, native_window_set_crop(
+ mNativeWindow.get(), &crop));
+ }
+ }
+ }
+ break;
+ }
+
+#if 0
+ case OMX_EventBufferFlag:
+ {
+ CODEC_LOGV("EVENT_BUFFER_FLAG(%ld)", data1);
+
+ if (data1 == kPortIndexOutput) {
+ mNoMoreOutputData = true;
+ }
+ break;
+ }
+#endif
+
+ default:
+ {
+ CODEC_LOGV("EVENT(%d, %ld, %ld)", event, data1, data2);
+ break;
+ }
+ }
+}
+
void OMXCodec::onCmdComplete(OMX_COMMANDTYPE cmd, OMX_U32 data) {
switch (cmd) {
case OMX_CommandStateSet:
@@ -1989,13 +2211,13 @@
CODEC_LOGV("PORT_DISABLED(%ld)", portIndex);
CHECK(mState == EXECUTING || mState == RECONFIGURING);
- CHECK_EQ(mPortStatus[portIndex], DISABLING);
- CHECK_EQ(mPortBuffers[portIndex].size(), 0);
+ CHECK_EQ((int)mPortStatus[portIndex], (int)DISABLING);
+ CHECK_EQ(mPortBuffers[portIndex].size(), 0u);
mPortStatus[portIndex] = DISABLED;
if (mState == RECONFIGURING) {
- CHECK_EQ(portIndex, kPortIndexOutput);
+ CHECK_EQ(portIndex, (OMX_U32)kPortIndexOutput);
sp<MetaData> oldOutputFormat = mOutputFormat;
initOutputFormat(mSource->getFormat());
@@ -2009,7 +2231,7 @@
enablePortAsync(portIndex);
status_t err = allocateBuffersOnPort(portIndex);
- CHECK_EQ(err, OK);
+ CHECK_EQ(err, (status_t)OK);
}
break;
}
@@ -2020,12 +2242,12 @@
CODEC_LOGV("PORT_ENABLED(%ld)", portIndex);
CHECK(mState == EXECUTING || mState == RECONFIGURING);
- CHECK_EQ(mPortStatus[portIndex], ENABLING);
+ CHECK_EQ((int)mPortStatus[portIndex], (int)ENABLING);
mPortStatus[portIndex] = ENABLED;
if (mState == RECONFIGURING) {
- CHECK_EQ(portIndex, kPortIndexOutput);
+ CHECK_EQ(portIndex, (OMX_U32)kPortIndexOutput);
setState(EXECUTING);
@@ -2040,14 +2262,14 @@
CODEC_LOGV("FLUSH_DONE(%ld)", portIndex);
- CHECK_EQ(mPortStatus[portIndex], SHUTTING_DOWN);
+ CHECK_EQ((int)mPortStatus[portIndex], (int)SHUTTING_DOWN);
mPortStatus[portIndex] = ENABLED;
CHECK_EQ(countBuffersWeOwn(mPortBuffers[portIndex]),
mPortBuffers[portIndex].size());
if (mState == RECONFIGURING) {
- CHECK_EQ(portIndex, kPortIndexOutput);
+ CHECK_EQ(portIndex, (OMX_U32)kPortIndexOutput);
disablePortAsync(portIndex);
} else if (mState == EXECUTING_TO_IDLE) {
@@ -2061,7 +2283,7 @@
status_t err =
mOMX->sendCommand(mNode, OMX_CommandStateSet, OMX_StateIdle);
- CHECK_EQ(err, OK);
+ CHECK_EQ(err, (status_t)OK);
}
} else {
// We're flushing both ports in preparation for seeking.
@@ -2101,11 +2323,11 @@
status_t err = mOMX->sendCommand(
mNode, OMX_CommandStateSet, OMX_StateExecuting);
- CHECK_EQ(err, OK);
+ CHECK_EQ(err, (status_t)OK);
setState(IDLE_TO_EXECUTING);
} else {
- CHECK_EQ(mState, EXECUTING_TO_IDLE);
+ CHECK_EQ((int)mState, (int)EXECUTING_TO_IDLE);
CHECK_EQ(
countBuffersWeOwn(mPortBuffers[kPortIndexInput]),
@@ -2118,13 +2340,13 @@
status_t err = mOMX->sendCommand(
mNode, OMX_CommandStateSet, OMX_StateLoaded);
- CHECK_EQ(err, OK);
+ CHECK_EQ(err, (status_t)OK);
err = freeBuffersOnPort(kPortIndexInput);
- CHECK_EQ(err, OK);
+ CHECK_EQ(err, (status_t)OK);
err = freeBuffersOnPort(kPortIndexOutput);
- CHECK_EQ(err, OK);
+ CHECK_EQ(err, (status_t)OK);
mPortStatus[kPortIndexInput] = ENABLED;
mPortStatus[kPortIndexOutput] = ENABLED;
@@ -2136,7 +2358,7 @@
case OMX_StateExecuting:
{
- CHECK_EQ(mState, IDLE_TO_EXECUTING);
+ CHECK_EQ((int)mState, (int)IDLE_TO_EXECUTING);
CODEC_LOGV("Now Executing.");
@@ -2152,7 +2374,7 @@
case OMX_StateLoaded:
{
- CHECK_EQ(mState, IDLE_TO_LOADED);
+ CHECK_EQ((int)mState, (int)IDLE_TO_LOADED);
CODEC_LOGV("Now Loaded.");
@@ -2199,28 +2421,16 @@
continue;
}
- CHECK_EQ(info->mOwnedByComponent, false);
+ CHECK_EQ((int)info->mOwnedByComponent, (int)false);
CODEC_LOGV("freeing buffer %p on port %ld", info->mBuffer, portIndex);
- status_t err =
- mOMX->freeBuffer(mNode, portIndex, info->mBuffer);
+ status_t err = freeBuffer(portIndex, i);
if (err != OK) {
stickyErr = err;
}
- if (info->mMediaBuffer != NULL) {
- info->mMediaBuffer->setObserver(NULL);
-
- // Make sure nobody but us owns this buffer at this point.
- CHECK_EQ(info->mMediaBuffer->refcount(), 0);
-
- info->mMediaBuffer->release();
- info->mMediaBuffer = NULL;
- }
-
- buffers->removeAt(i);
}
CHECK(onlyThoseWeOwn || buffers->isEmpty());
@@ -2228,11 +2438,42 @@
return stickyErr;
}
+status_t OMXCodec::freeBuffer(OMX_U32 portIndex, size_t bufIndex) {
+ Vector<BufferInfo> *buffers = &mPortBuffers[portIndex];
+
+ BufferInfo *info = &buffers->editItemAt(bufIndex);
+
+ status_t err = mOMX->freeBuffer(mNode, portIndex, info->mBuffer);
+
+ if (err == OK && info->mMediaBuffer != NULL) {
+ CHECK_EQ(portIndex, (OMX_U32)kPortIndexOutput);
+ info->mMediaBuffer->setObserver(NULL);
+
+ // Make sure nobody but us owns this buffer at this point.
+ CHECK_EQ(info->mMediaBuffer->refcount(), 0);
+
+ // Cancel the buffer if it belongs to an ANativeWindow.
+ sp<GraphicBuffer> graphicBuffer = info->mMediaBuffer->graphicBuffer();
+ if (!info->mOwnedByNativeWindow && graphicBuffer != 0) {
+ err = cancelBufferToNativeWindow(info);
+ }
+
+ info->mMediaBuffer->release();
+ info->mMediaBuffer = NULL;
+ }
+
+ if (err == OK) {
+ buffers->removeAt(bufIndex);
+ }
+
+ return err;
+}
+
void OMXCodec::onPortSettingsChanged(OMX_U32 portIndex) {
CODEC_LOGV("PORT_SETTINGS_CHANGED(%ld)", portIndex);
- CHECK_EQ(mState, EXECUTING);
- CHECK_EQ(portIndex, kPortIndexOutput);
+ CHECK_EQ((int)mState, (int)EXECUTING);
+ CHECK_EQ(portIndex, (OMX_U32)kPortIndexOutput);
setState(RECONFIGURING);
if (mQuirks & kNeedsFlushBeforeDisable) {
@@ -2252,7 +2493,7 @@
portIndex, countBuffersWeOwn(mPortBuffers[portIndex]),
mPortBuffers[portIndex].size());
- CHECK_EQ(mPortStatus[portIndex], ENABLED);
+ CHECK_EQ((int)mPortStatus[portIndex], (int)ENABLED);
mPortStatus[portIndex] = SHUTTING_DOWN;
if ((mQuirks & kRequiresFlushCompleteEmulation)
@@ -2266,7 +2507,7 @@
status_t err =
mOMX->sendCommand(mNode, OMX_CommandFlush, portIndex);
- CHECK_EQ(err, OK);
+ CHECK_EQ(err, (status_t)OK);
return true;
}
@@ -2274,12 +2515,13 @@
void OMXCodec::disablePortAsync(OMX_U32 portIndex) {
CHECK(mState == EXECUTING || mState == RECONFIGURING);
- CHECK_EQ(mPortStatus[portIndex], ENABLED);
+ CHECK_EQ((int)mPortStatus[portIndex], (int)ENABLED);
mPortStatus[portIndex] = DISABLING;
+ CODEC_LOGV("sending OMX_CommandPortDisable(%ld)", portIndex);
status_t err =
mOMX->sendCommand(mNode, OMX_CommandPortDisable, portIndex);
- CHECK_EQ(err, OK);
+ CHECK_EQ(err, (status_t)OK);
freeBuffersOnPort(portIndex, true);
}
@@ -2287,16 +2529,17 @@
void OMXCodec::enablePortAsync(OMX_U32 portIndex) {
CHECK(mState == EXECUTING || mState == RECONFIGURING);
- CHECK_EQ(mPortStatus[portIndex], DISABLED);
+ CHECK_EQ((int)mPortStatus[portIndex], (int)DISABLED);
mPortStatus[portIndex] = ENABLING;
+ CODEC_LOGV("sending OMX_CommandPortEnable(%ld)", portIndex);
status_t err =
mOMX->sendCommand(mNode, OMX_CommandPortEnable, portIndex);
- CHECK_EQ(err, OK);
+ CHECK_EQ(err, (status_t)OK);
}
void OMXCodec::fillOutputBuffers() {
- CHECK_EQ(mState, EXECUTING);
+ CHECK_EQ((int)mState, (int)EXECUTING);
// This is a workaround for some decoders not properly reporting
// end-of-output-stream. If we own all input buffers and also own
@@ -2315,7 +2558,10 @@
Vector<BufferInfo> *buffers = &mPortBuffers[kPortIndexOutput];
for (size_t i = 0; i < buffers->size(); ++i) {
- fillOutputBuffer(&buffers->editItemAt(i));
+ BufferInfo *info = &buffers->editItemAt(i);
+ if (!info->mOwnedByNativeWindow) {
+ fillOutputBuffer(&buffers->editItemAt(i));
+ }
}
}
@@ -2329,7 +2575,7 @@
}
void OMXCodec::drainInputBuffer(BufferInfo *info) {
- CHECK_EQ(info->mOwnedByComponent, false);
+ CHECK_EQ((int)info->mOwnedByComponent, (int)false);
if (mSignalledEOS) {
return;
@@ -2366,7 +2612,7 @@
mNode, info->mBuffer, 0, size,
OMX_BUFFERFLAG_ENDOFFRAME | OMX_BUFFERFLAG_CODECCONFIG,
0);
- CHECK_EQ(err, OK);
+ CHECK_EQ(err, (status_t)OK);
info->mOwnedByComponent = true;
@@ -2450,16 +2696,16 @@
break;
}
- // Do not release the media buffer if it stores meta data
- // instead of YUV data. The release is delayed until
- // EMPTY_BUFFER_DONE callback is received.
bool releaseBuffer = true;
if (mIsEncoder && (mQuirks & kAvoidMemcopyInputRecordingFrames)) {
CHECK(mOMXLivesLocally && offset == 0);
OMX_BUFFERHEADERTYPE *header = (OMX_BUFFERHEADERTYPE *) info->mBuffer;
+ CHECK(header->pBuffer == info->mData);
header->pBuffer = (OMX_U8 *) srcBuffer->data() + srcBuffer->range_offset();
+ releaseBuffer = false;
+ info->mMediaBuffer = srcBuffer;
} else {
- if (mQuirks & kStoreMetaDataInInputVideoBuffers) {
+ if (mIsMetaDataStoredInVideoBuffers) {
releaseBuffer = false;
info->mMediaBuffer = srcBuffer;
}
@@ -2534,7 +2780,7 @@
}
void OMXCodec::fillOutputBuffer(BufferInfo *info) {
- CHECK_EQ(info->mOwnedByComponent, false);
+ CHECK_EQ((int)info->mOwnedByComponent, (int)false);
if (mNoMoreOutputData) {
CODEC_LOGV("There is no more output data available, not "
@@ -2542,7 +2788,25 @@
return;
}
- CODEC_LOGV("Calling fill_buffer on buffer %p", info->mBuffer);
+ if (info->mMediaBuffer != NULL) {
+ sp<GraphicBuffer> graphicBuffer = info->mMediaBuffer->graphicBuffer();
+ if (graphicBuffer != 0) {
+ // When using a native buffer we need to lock the buffer before
+ // giving it to OMX.
+ CHECK(!info->mOwnedByNativeWindow);
+ CODEC_LOGV("Calling lockBuffer on %p", info->mBuffer);
+ int err = mNativeWindow->lockBuffer(mNativeWindow.get(),
+ graphicBuffer.get());
+ if (err != 0) {
+ CODEC_LOGE("lockBuffer failed w/ error 0x%08x", err);
+
+ setState(ERROR);
+ return;
+ }
+ }
+ }
+
+ CODEC_LOGV("Calling fillBuffer on buffer %p", info->mBuffer);
status_t err = mOMX->fillBuffer(mNode, info->mBuffer);
if (err != OK) {
@@ -2597,10 +2861,10 @@
def.nPortIndex = portIndex;
status_t err = mOMX->getParameter(
mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
- CHECK_EQ(err, OK);
+ CHECK_EQ(err, (status_t)OK);
def.format.audio.eEncoding = OMX_AUDIO_CodingPCM;
CHECK_EQ(mOMX->setParameter(mNode, OMX_IndexParamPortDefinition,
- &def, sizeof(def)), OK);
+ &def, sizeof(def)), (status_t)OK);
// pcm param
OMX_AUDIO_PARAM_PCMMODETYPE pcmParams;
@@ -2610,7 +2874,7 @@
err = mOMX->getParameter(
mNode, OMX_IndexParamAudioPcm, &pcmParams, sizeof(pcmParams));
- CHECK_EQ(err, OK);
+ CHECK_EQ(err, (status_t)OK);
pcmParams.nChannels = numChannels;
pcmParams.eNumData = OMX_NumericalDataSigned;
@@ -2631,7 +2895,7 @@
err = mOMX->setParameter(
mNode, OMX_IndexParamAudioPcm, &pcmParams, sizeof(pcmParams));
- CHECK_EQ(err, OK);
+ CHECK_EQ(err, (status_t)OK);
}
static OMX_AUDIO_AMRBANDMODETYPE pickModeFromBitRate(bool isAMRWB, int32_t bps) {
@@ -2688,13 +2952,13 @@
status_t err =
mOMX->getParameter(mNode, OMX_IndexParamAudioAmr, &def, sizeof(def));
- CHECK_EQ(err, OK);
+ CHECK_EQ(err, (status_t)OK);
def.eAMRFrameFormat = OMX_AUDIO_AMRFrameFormatFSF;
def.eAMRBandMode = pickModeFromBitRate(isWAMR, bitRate);
err = mOMX->setParameter(mNode, OMX_IndexParamAudioAmr, &def, sizeof(def));
- CHECK_EQ(err, OK);
+ CHECK_EQ(err, (status_t)OK);
////////////////////////
@@ -2723,33 +2987,33 @@
status_t err = OMX_ErrorNone;
while (OMX_ErrorNone == err) {
CHECK_EQ(mOMX->getParameter(mNode, OMX_IndexParamAudioPortFormat,
- &format, sizeof(format)), OK);
+ &format, sizeof(format)), (status_t)OK);
if (format.eEncoding == OMX_AUDIO_CodingAAC) {
break;
}
format.nIndex++;
}
- CHECK_EQ(OK, err);
+ CHECK_EQ((status_t)OK, err);
CHECK_EQ(mOMX->setParameter(mNode, OMX_IndexParamAudioPortFormat,
- &format, sizeof(format)), OK);
+ &format, sizeof(format)), (status_t)OK);
// port definition
OMX_PARAM_PORTDEFINITIONTYPE def;
InitOMXParams(&def);
def.nPortIndex = kPortIndexOutput;
CHECK_EQ(mOMX->getParameter(mNode, OMX_IndexParamPortDefinition,
- &def, sizeof(def)), OK);
+ &def, sizeof(def)), (status_t)OK);
def.format.audio.bFlagErrorConcealment = OMX_TRUE;
def.format.audio.eEncoding = OMX_AUDIO_CodingAAC;
CHECK_EQ(mOMX->setParameter(mNode, OMX_IndexParamPortDefinition,
- &def, sizeof(def)), OK);
+ &def, sizeof(def)), (status_t)OK);
// profile
OMX_AUDIO_PARAM_AACPROFILETYPE profile;
InitOMXParams(&profile);
profile.nPortIndex = kPortIndexOutput;
CHECK_EQ(mOMX->getParameter(mNode, OMX_IndexParamAudioAac,
- &profile, sizeof(profile)), OK);
+ &profile, sizeof(profile)), (status_t)OK);
profile.nChannels = numChannels;
profile.eChannelMode = (numChannels == 1?
OMX_AUDIO_ChannelModeMono: OMX_AUDIO_ChannelModeStereo);
@@ -2762,7 +3026,7 @@
profile.eAACProfile = OMX_AUDIO_AACObjectLC;
profile.eAACStreamFormat = OMX_AUDIO_AACStreamFormatMP4FF;
CHECK_EQ(mOMX->setParameter(mNode, OMX_IndexParamAudioAac,
- &profile, sizeof(profile)), OK);
+ &profile, sizeof(profile)), (status_t)OK);
} else {
OMX_AUDIO_PARAM_AACPROFILETYPE profile;
@@ -2771,7 +3035,7 @@
status_t err = mOMX->getParameter(
mNode, OMX_IndexParamAudioAac, &profile, sizeof(profile));
- CHECK_EQ(err, OK);
+ CHECK_EQ(err, (status_t)OK);
profile.nChannels = numChannels;
profile.nSampleRate = sampleRate;
@@ -2779,7 +3043,7 @@
err = mOMX->setParameter(
mNode, OMX_IndexParamAudioAac, &profile, sizeof(profile));
- CHECK_EQ(err, OK);
+ CHECK_EQ(err, (status_t)OK);
}
}
@@ -2791,10 +3055,10 @@
OMX_INDEXTYPE index;
status_t err = mOMX->get_extension_index(
mNode, "OMX.TI.JPEG.decode.Config.OutputColorFormat", &index);
- CHECK_EQ(err, OK);
+ CHECK_EQ(err, (status_t)OK);
err = mOMX->set_config(mNode, index, &format, sizeof(format));
- CHECK_EQ(err, OK);
+ CHECK_EQ(err, (status_t)OK);
#endif
OMX_PARAM_PORTDEFINITIONTYPE def;
@@ -2803,13 +3067,13 @@
status_t err = mOMX->getParameter(
mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
- CHECK_EQ(err, OK);
+ CHECK_EQ(err, (status_t)OK);
- CHECK_EQ(def.eDomain, OMX_PortDomainImage);
+ CHECK_EQ((int)def.eDomain, (int)OMX_PortDomainImage);
OMX_IMAGE_PORTDEFINITIONTYPE *imageDef = &def.format.image;
- CHECK_EQ(imageDef->eCompressionFormat, OMX_IMAGE_CodingUnused);
+ CHECK_EQ((int)imageDef->eCompressionFormat, (int)OMX_IMAGE_CodingUnused);
imageDef->eColorFormat = format;
imageDef->nFrameWidth = width;
imageDef->nFrameHeight = height;
@@ -2852,7 +3116,7 @@
err = mOMX->setParameter(
mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
- CHECK_EQ(err, OK);
+ CHECK_EQ(err, (status_t)OK);
}
void OMXCodec::setJPEGInputFormat(
@@ -2863,12 +3127,12 @@
status_t err = mOMX->getParameter(
mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
- CHECK_EQ(err, OK);
+ CHECK_EQ(err, (status_t)OK);
- CHECK_EQ(def.eDomain, OMX_PortDomainImage);
+ CHECK_EQ((int)def.eDomain, (int)OMX_PortDomainImage);
OMX_IMAGE_PORTDEFINITIONTYPE *imageDef = &def.format.image;
- CHECK_EQ(imageDef->eCompressionFormat, OMX_IMAGE_CodingJPEG);
+ CHECK_EQ((int)imageDef->eCompressionFormat, (int)OMX_IMAGE_CodingJPEG);
imageDef->nFrameWidth = width;
imageDef->nFrameHeight = height;
@@ -2877,7 +3141,7 @@
err = mOMX->setParameter(
mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
- CHECK_EQ(err, OK);
+ CHECK_EQ(err, (status_t)OK);
}
void OMXCodec::addCodecSpecificData(const void *data, size_t size) {
@@ -2978,7 +3242,7 @@
status_t err =
mOMX->sendCommand(mNode, OMX_CommandStateSet, OMX_StateIdle);
- CHECK_EQ(err, OK);
+ CHECK_EQ(err, (status_t)OK);
}
while (mState != LOADED && mState != ERROR) {
@@ -3070,7 +3334,7 @@
mFilledBuffers.clear();
- CHECK_EQ(mState, EXECUTING);
+ CHECK_EQ((int)mState, (int)EXECUTING);
bool emulateInputFlushCompletion = !flushPortAsync(kPortIndexInput);
bool emulateOutputFlushCompletion = !flushPortAsync(kPortIndexOutput);
@@ -3124,8 +3388,33 @@
BufferInfo *info = &buffers->editItemAt(i);
if (info->mMediaBuffer == buffer) {
- CHECK_EQ(mPortStatus[kPortIndexOutput], ENABLED);
- fillOutputBuffer(info);
+ CHECK_EQ((int)mPortStatus[kPortIndexOutput], (int)ENABLED);
+ if (buffer->graphicBuffer() == 0) {
+ fillOutputBuffer(info);
+ } else {
+ sp<MetaData> metaData = info->mMediaBuffer->meta_data();
+ int32_t rendered = 0;
+ if (!metaData->findInt32(kKeyRendered, &rendered)) {
+ rendered = 0;
+ }
+ if (!rendered) {
+ status_t err = cancelBufferToNativeWindow(info);
+ if (err < 0) {
+ return;
+ }
+ } else {
+ info->mOwnedByNativeWindow = true;
+ }
+
+ // Dequeue the next buffer from the native window.
+ BufferInfo *nextBufInfo = dequeueBufferFromNativeWindow();
+ if (nextBufInfo == 0) {
+ return;
+ }
+
+ // Give the buffer to the OMX node to fill.
+ fillOutputBuffer(nextBufInfo);
+ }
return;
}
}
@@ -3351,7 +3640,7 @@
status_t err = mOMX->getParameter(
mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
- CHECK_EQ(err, OK);
+ CHECK_EQ(err, (status_t)OK);
printf("%s Port = {\n", portIndex == kPortIndexInput ? "Input" : "Output");
@@ -3417,7 +3706,7 @@
err = mOMX->getParameter(
mNode, OMX_IndexParamAudioPcm, ¶ms, sizeof(params));
- CHECK_EQ(err, OK);
+ CHECK_EQ(err, (status_t)OK);
printf(" nSamplingRate = %ld\n", params.nSamplingRate);
printf(" nChannels = %ld\n", params.nChannels);
@@ -3436,7 +3725,7 @@
err = mOMX->getParameter(
mNode, OMX_IndexParamAudioAmr, &amr, sizeof(amr));
- CHECK_EQ(err, OK);
+ CHECK_EQ(err, (status_t)OK);
printf(" nChannels = %ld\n", amr.nChannels);
printf(" eAMRBandMode = %s\n",
@@ -3458,6 +3747,18 @@
printf("}\n");
}
+status_t OMXCodec::initNativeWindow() {
+ // Enable use of a GraphicBuffer as the output for this node. This must
+ // happen before getting the IndexParamPortDefinition parameter because it
+ // will affect the pixel format that the node reports.
+ status_t err = mOMX->enableGraphicBuffers(mNode, kPortIndexOutput, OMX_TRUE);
+ if (err != 0) {
+ return err;
+ }
+
+ return OK;
+}
+
void OMXCodec::initOutputFormat(const sp<MetaData> &inputFormat) {
mOutputFormat = new MetaData;
mOutputFormat->setCString(kKeyDecoderComponent, mComponentName);
@@ -3474,13 +3775,14 @@
status_t err = mOMX->getParameter(
mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
- CHECK_EQ(err, OK);
+ CHECK_EQ(err, (status_t)OK);
switch (def.eDomain) {
case OMX_PortDomainImage:
{
OMX_IMAGE_PORTDEFINITIONTYPE *imageDef = &def.format.image;
- CHECK_EQ(imageDef->eCompressionFormat, OMX_IMAGE_CodingUnused);
+ CHECK_EQ((int)imageDef->eCompressionFormat,
+ (int)OMX_IMAGE_CodingUnused);
mOutputFormat->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_RAW);
mOutputFormat->setInt32(kKeyColorFormat, imageDef->eColorFormat);
@@ -3500,11 +3802,11 @@
err = mOMX->getParameter(
mNode, OMX_IndexParamAudioPcm, ¶ms, sizeof(params));
- CHECK_EQ(err, OK);
+ CHECK_EQ(err, (status_t)OK);
- CHECK_EQ(params.eNumData, OMX_NumericalDataSigned);
- CHECK_EQ(params.nBitPerSample, 16);
- CHECK_EQ(params.ePCMMode, OMX_AUDIO_PCMModeLinear);
+ CHECK_EQ((int)params.eNumData, (int)OMX_NumericalDataSigned);
+ CHECK_EQ(params.nBitPerSample, 16u);
+ CHECK_EQ((int)params.ePCMMode, (int)OMX_AUDIO_PCMModeLinear);
int32_t numChannels, sampleRate;
inputFormat->findInt32(kKeyChannelCount, &numChannels);
@@ -3538,9 +3840,9 @@
err = mOMX->getParameter(
mNode, OMX_IndexParamAudioAmr, &amr, sizeof(amr));
- CHECK_EQ(err, OK);
+ CHECK_EQ(err, (status_t)OK);
- CHECK_EQ(amr.nChannels, 1);
+ CHECK_EQ(amr.nChannels, 1u);
mOutputFormat->setInt32(kKeyChannelCount, 1);
if (amr.eAMRBandMode >= OMX_AUDIO_AMRBandModeNB0
@@ -3592,18 +3894,40 @@
CHECK(!"Unknown compression format.");
}
- if (!strcmp(mComponentName, "OMX.PV.avcdec")) {
- // This component appears to be lying to me.
- mOutputFormat->setInt32(
- kKeyWidth, (video_def->nFrameWidth + 15) & -16);
- mOutputFormat->setInt32(
- kKeyHeight, (video_def->nFrameHeight + 15) & -16);
- } else {
- mOutputFormat->setInt32(kKeyWidth, video_def->nFrameWidth);
- mOutputFormat->setInt32(kKeyHeight, video_def->nFrameHeight);
+ mOutputFormat->setInt32(kKeyWidth, video_def->nFrameWidth);
+ mOutputFormat->setInt32(kKeyHeight, video_def->nFrameHeight);
+ mOutputFormat->setInt32(kKeyColorFormat, video_def->eColorFormat);
+
+ if (!mIsEncoder) {
+ OMX_CONFIG_RECTTYPE rect;
+ status_t err =
+ mOMX->getConfig(
+ mNode, OMX_IndexConfigCommonOutputCrop,
+ &rect, sizeof(rect));
+
+ if (err == OK) {
+ CHECK_GE(rect.nLeft, 0);
+ CHECK_GE(rect.nTop, 0);
+ CHECK_GE(rect.nWidth, 0u);
+ CHECK_GE(rect.nHeight, 0u);
+ CHECK_LE(rect.nLeft + rect.nWidth - 1, video_def->nFrameWidth);
+ CHECK_LE(rect.nTop + rect.nHeight - 1, video_def->nFrameHeight);
+
+ mOutputFormat->setRect(
+ kKeyCropRect,
+ rect.nLeft,
+ rect.nTop,
+ rect.nLeft + rect.nWidth - 1,
+ rect.nTop + rect.nHeight - 1);
+ } else {
+ mOutputFormat->setRect(
+ kKeyCropRect,
+ 0, 0,
+ video_def->nFrameWidth - 1,
+ video_def->nFrameHeight - 1);
+ }
}
- mOutputFormat->setInt32(kKeyColorFormat, video_def->eColorFormat);
break;
}
@@ -3693,8 +4017,30 @@
caps->mProfileLevels.push(profileLevel);
}
- CHECK_EQ(omx->freeNode(node), OK);
+ // Color format query
+ OMX_VIDEO_PARAM_PORTFORMATTYPE portFormat;
+ InitOMXParams(&portFormat);
+ portFormat.nPortIndex = queryDecoders ? 1 : 0;
+ for (portFormat.nIndex = 0;; ++portFormat.nIndex) {
+ err = omx->getParameter(
+ node, OMX_IndexParamVideoPortFormat,
+ &portFormat, sizeof(portFormat));
+ if (err != OK) {
+ break;
+ }
+ caps->mColorFormats.push(portFormat.eColorFormat);
+ }
+
+ CHECK_EQ(omx->freeNode(node), (status_t)OK);
}
}
+void OMXCodec::restorePatchedDataPointer(BufferInfo *info) {
+ CHECK(mIsEncoder && (mQuirks & kAvoidMemcopyInputRecordingFrames));
+ CHECK(mOMXLivesLocally);
+
+ OMX_BUFFERHEADERTYPE *header = (OMX_BUFFERHEADERTYPE *)info->mBuffer;
+ header->pBuffer = (OMX_U8 *)info->mData;
+}
+
} // namespace android
diff --git a/media/libstagefright/OggExtractor.cpp b/media/libstagefright/OggExtractor.cpp
index 43938b2..4b8a014 100644
--- a/media/libstagefright/OggExtractor.cpp
+++ b/media/libstagefright/OggExtractor.cpp
@@ -73,7 +73,7 @@
// Returns an approximate bitrate in bits per second.
uint64_t approxBitrate();
- status_t seekToOffset(off_t offset);
+ status_t seekToOffset(off64_t offset);
status_t readNextPacket(MediaBuffer **buffer);
status_t init();
@@ -91,7 +91,7 @@
};
sp<DataSource> mSource;
- off_t mOffset;
+ off64_t mOffset;
Page mCurrentPage;
uint64_t mPrevGranulePosition;
size_t mCurrentPageSize;
@@ -99,7 +99,7 @@
uint64_t mCurrentPageSamples;
size_t mNextLaceIndex;
- off_t mFirstDataOffset;
+ off64_t mFirstDataOffset;
vorbis_info mVi;
vorbis_comment mVc;
@@ -107,8 +107,8 @@
sp<MetaData> mMeta;
sp<MetaData> mFileMeta;
- ssize_t readPage(off_t offset, Page *page);
- status_t findNextPage(off_t startOffset, off_t *pageOffset);
+ ssize_t readPage(off64_t offset, Page *page);
+ status_t findNextPage(off64_t startOffset, off64_t *pageOffset);
status_t verifyHeader(
MediaBuffer *buffer, uint8_t type);
@@ -116,7 +116,7 @@
void parseFileMetaData();
void extractAlbumArt(const void *data, size_t size);
- uint64_t findPrevGranulePosition(off_t pageOffset);
+ uint64_t findPrevGranulePosition(off64_t pageOffset);
MyVorbisExtractor(const MyVorbisExtractor &);
MyVorbisExtractor &operator=(const MyVorbisExtractor &);
@@ -162,7 +162,7 @@
int64_t seekTimeUs;
ReadOptions::SeekMode mode;
if (options && options->getSeekTo(&seekTimeUs, &mode)) {
- off_t pos = seekTimeUs * mExtractor->mImpl->approxBitrate() / 8000000ll;
+ off64_t pos = seekTimeUs * mExtractor->mImpl->approxBitrate() / 8000000ll;
LOGV("seeking to offset %ld", pos);
if (mExtractor->mImpl->seekToOffset(pos) != OK) {
@@ -220,7 +220,7 @@
}
status_t MyVorbisExtractor::findNextPage(
- off_t startOffset, off_t *pageOffset) {
+ off64_t startOffset, off64_t *pageOffset) {
*pageOffset = startOffset;
for (;;) {
@@ -250,9 +250,9 @@
// it (if any) and return its granule position.
// To do this we back up from the "current" page's offset until we find any
// page preceding it and then scan forward to just before the current page.
-uint64_t MyVorbisExtractor::findPrevGranulePosition(off_t pageOffset) {
- off_t prevPageOffset = 0;
- off_t prevGuess = pageOffset;
+uint64_t MyVorbisExtractor::findPrevGranulePosition(off64_t pageOffset) {
+ off64_t prevPageOffset = 0;
+ off64_t prevGuess = pageOffset;
for (;;) {
if (prevGuess >= 5000) {
prevGuess -= 5000;
@@ -292,14 +292,14 @@
}
}
-status_t MyVorbisExtractor::seekToOffset(off_t offset) {
+status_t MyVorbisExtractor::seekToOffset(off64_t offset) {
if (mFirstDataOffset >= 0 && offset < mFirstDataOffset) {
// Once we know where the actual audio data starts (past the headers)
// don't ever seek to anywhere before that.
offset = mFirstDataOffset;
}
- off_t pageOffset;
+ off64_t pageOffset;
status_t err = findNextPage(offset, &pageOffset);
if (err != OK) {
@@ -324,7 +324,7 @@
return OK;
}
-ssize_t MyVorbisExtractor::readPage(off_t offset, Page *page) {
+ssize_t MyVorbisExtractor::readPage(off64_t offset, Page *page) {
uint8_t header[27];
if (mSource->readAt(offset, header, sizeof(header))
< (ssize_t)sizeof(header)) {
@@ -410,7 +410,7 @@
}
if (mNextLaceIndex < mCurrentPage.mNumSegments) {
- off_t dataOffset = mOffset + 27 + mCurrentPage.mNumSegments;
+ off64_t dataOffset = mOffset + 27 + mCurrentPage.mNumSegments;
for (size_t j = 0; j < mNextLaceIndex; ++j) {
dataOffset += mCurrentPage.mLace[j];
}
@@ -609,7 +609,7 @@
LOGV("nominal-bitrate = %ld", mVi.bitrate_nominal);
LOGV("window-bitrate = %ld", mVi.bitrate_window);
- off_t size;
+ off64_t size;
if (mSource->getSize(&size) == OK) {
uint64_t bps = approxBitrate();
diff --git a/media/libstagefright/SampleIterator.cpp b/media/libstagefright/SampleIterator.cpp
index 7155c61..062ab9b 100644
--- a/media/libstagefright/SampleIterator.cpp
+++ b/media/libstagefright/SampleIterator.cpp
@@ -179,7 +179,7 @@
return OK;
}
-status_t SampleIterator::getChunkOffset(uint32_t chunk, off_t *offset) {
+status_t SampleIterator::getChunkOffset(uint32_t chunk, off64_t *offset) {
*offset = 0;
if (chunk >= mTable->mNumChunkOffsets) {
diff --git a/media/libstagefright/SampleTable.cpp b/media/libstagefright/SampleTable.cpp
index 27faf4f..a9163fc 100644
--- a/media/libstagefright/SampleTable.cpp
+++ b/media/libstagefright/SampleTable.cpp
@@ -76,7 +76,7 @@
}
status_t SampleTable::setChunkOffsetParams(
- uint32_t type, off_t data_offset, size_t data_size) {
+ uint32_t type, off64_t data_offset, size_t data_size) {
if (mChunkOffsetOffset >= 0) {
return ERROR_MALFORMED;
}
@@ -117,7 +117,7 @@
}
status_t SampleTable::setSampleToChunkParams(
- off_t data_offset, size_t data_size) {
+ off64_t data_offset, size_t data_size) {
if (mSampleToChunkOffset >= 0) {
return ERROR_MALFORMED;
}
@@ -168,7 +168,7 @@
}
status_t SampleTable::setSampleSizeParams(
- uint32_t type, off_t data_offset, size_t data_size) {
+ uint32_t type, off64_t data_offset, size_t data_size) {
if (mSampleSizeOffset >= 0) {
return ERROR_MALFORMED;
}
@@ -228,7 +228,7 @@
}
status_t SampleTable::setTimeToSampleParams(
- off_t data_offset, size_t data_size) {
+ off64_t data_offset, size_t data_size) {
if (mTimeToSample != NULL || data_size < 8) {
return ERROR_MALFORMED;
}
@@ -260,7 +260,7 @@
return OK;
}
-status_t SampleTable::setSyncSampleParams(off_t data_offset, size_t data_size) {
+status_t SampleTable::setSyncSampleParams(off64_t data_offset, size_t data_size) {
if (mSyncSampleOffset >= 0 || data_size < 8) {
return ERROR_MALFORMED;
}
@@ -281,7 +281,7 @@
mNumSyncSamples = U32_AT(&header[4]);
if (mNumSyncSamples < 2) {
- LOGW("Table of sync samples is empty or has only a single entry!");
+ LOGV("Table of sync samples is empty or has only a single entry!");
}
mSyncSamples = new uint32_t[mNumSyncSamples];
@@ -419,8 +419,10 @@
++left;
}
+ if (left > 0) {
+ --left;
+ }
- --left;
uint32_t x;
if (mDataSource->readAt(
mSyncSampleOffset + 8 + left * 4, &x, 4) != 4) {
@@ -557,7 +559,7 @@
status_t SampleTable::getMetaDataForSample(
uint32_t sampleIndex,
- off_t *offset,
+ off64_t *offset,
size_t *size,
uint32_t *decodingTime,
bool *isSyncSample) {
diff --git a/media/libstagefright/ShoutcastSource.cpp b/media/libstagefright/ShoutcastSource.cpp
index 23b7681..783f2d0 100644
--- a/media/libstagefright/ShoutcastSource.cpp
+++ b/media/libstagefright/ShoutcastSource.cpp
@@ -14,7 +14,6 @@
* limitations under the License.
*/
-#include "include/stagefright_string.h"
#include "include/HTTPStream.h"
#include <stdlib.h>
@@ -34,7 +33,7 @@
mBytesUntilMetaData(0),
mGroup(NULL),
mStarted(false) {
- string metaint;
+ AString metaint;
if (mHttp->find_header_value("icy-metaint", &metaint)) {
char *end;
const char *start = metaint.c_str();
diff --git a/media/libstagefright/StagefrightMediaScanner.cpp b/media/libstagefright/StagefrightMediaScanner.cpp
index 8426ee7..86e0e73 100644
--- a/media/libstagefright/StagefrightMediaScanner.cpp
+++ b/media/libstagefright/StagefrightMediaScanner.cpp
@@ -39,7 +39,7 @@
".mp3", ".mp4", ".m4a", ".3gp", ".3gpp", ".3g2", ".3gpp2",
".mpeg", ".ogg", ".mid", ".smf", ".imy", ".wma", ".aac",
".wav", ".amr", ".midi", ".xmf", ".rtttl", ".rtx", ".ota",
- ".mkv", ".mka", ".webm", ".ts"
+ ".mkv", ".mka", ".webm", ".ts", ".fl"
};
static const size_t kNumValidExtensions =
sizeof(kValidExtensions) / sizeof(kValidExtensions[0]);
@@ -175,11 +175,11 @@
char *StagefrightMediaScanner::extractAlbumArt(int fd) {
LOGV("extractAlbumArt %d", fd);
- off_t size = lseek(fd, 0, SEEK_END);
+ off64_t size = lseek64(fd, 0, SEEK_END);
if (size < 0) {
return NULL;
}
- lseek(fd, 0, SEEK_SET);
+ lseek64(fd, 0, SEEK_SET);
if (mRetriever->setDataSource(fd, 0, size) == OK
&& mRetriever->setMode(
diff --git a/media/libstagefright/StagefrightMetadataRetriever.cpp b/media/libstagefright/StagefrightMetadataRetriever.cpp
index 9b2dec9..763a914 100644
--- a/media/libstagefright/StagefrightMetadataRetriever.cpp
+++ b/media/libstagefright/StagefrightMetadataRetriever.cpp
@@ -191,17 +191,26 @@
CHECK(meta->findInt32(kKeyWidth, &width));
CHECK(meta->findInt32(kKeyHeight, &height));
+ int32_t crop_left, crop_top, crop_right, crop_bottom;
+ if (!meta->findRect(
+ kKeyCropRect,
+ &crop_left, &crop_top, &crop_right, &crop_bottom)) {
+ crop_left = crop_top = 0;
+ crop_right = width - 1;
+ crop_bottom = height - 1;
+ }
+
int32_t rotationAngle;
if (!trackMeta->findInt32(kKeyRotation, &rotationAngle)) {
rotationAngle = 0; // By default, no rotation
}
VideoFrame *frame = new VideoFrame;
- frame->mWidth = width;
- frame->mHeight = height;
- frame->mDisplayWidth = width;
- frame->mDisplayHeight = height;
- frame->mSize = width * height * 2;
+ frame->mWidth = crop_right - crop_left + 1;
+ frame->mHeight = crop_bottom - crop_top + 1;
+ frame->mDisplayWidth = frame->mWidth;
+ frame->mDisplayHeight = frame->mHeight;
+ frame->mSize = frame->mWidth * frame->mHeight * 2;
frame->mData = new uint8_t[frame->mSize];
frame->mRotationAngle = rotationAngle;
@@ -213,10 +222,13 @@
CHECK(converter.isValid());
converter.convert(
- width, height,
(const uint8_t *)buffer->data() + buffer->range_offset(),
- 0,
- frame->mData, width * 2);
+ width, height,
+ crop_left, crop_top, crop_right, crop_bottom,
+ frame->mData,
+ frame->mWidth,
+ frame->mHeight,
+ 0, 0, frame->mWidth - 1, frame->mHeight - 1);
buffer->release();
buffer = NULL;
@@ -418,5 +430,4 @@
}
}
-
} // namespace android
diff --git a/media/libstagefright/ThrottledSource.cpp b/media/libstagefright/ThrottledSource.cpp
index 4711f7c..88e07b0 100644
--- a/media/libstagefright/ThrottledSource.cpp
+++ b/media/libstagefright/ThrottledSource.cpp
@@ -41,7 +41,7 @@
return mSource->initCheck();
}
-ssize_t ThrottledSource::readAt(off_t offset, void *data, size_t size) {
+ssize_t ThrottledSource::readAt(off64_t offset, void *data, size_t size) {
Mutex::Autolock autoLock(mLock);
ssize_t n = mSource->readAt(offset, data, size);
@@ -72,7 +72,7 @@
return n;
}
-status_t ThrottledSource::getSize(off_t *size) {
+status_t ThrottledSource::getSize(off64_t *size) {
return mSource->getSize(size);
}
diff --git a/media/libstagefright/VBRISeeker.cpp b/media/libstagefright/VBRISeeker.cpp
new file mode 100644
index 0000000..48bddc2
--- /dev/null
+++ b/media/libstagefright/VBRISeeker.cpp
@@ -0,0 +1,164 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "VBRISeeker"
+#include <utils/Log.h>
+
+#include "include/VBRISeeker.h"
+
+#include "include/MP3Extractor.h"
+
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/DataSource.h>
+#include <media/stagefright/Utils.h>
+
+namespace android {
+
+static uint32_t U24_AT(const uint8_t *ptr) {
+ return ptr[0] << 16 | ptr[1] << 8 | ptr[2];
+}
+
+// static
+sp<VBRISeeker> VBRISeeker::CreateFromSource(
+ const sp<DataSource> &source, off64_t post_id3_pos) {
+ off64_t pos = post_id3_pos;
+
+ uint8_t header[4];
+ ssize_t n = source->readAt(pos, header, sizeof(header));
+ if (n < (ssize_t)sizeof(header)) {
+ return NULL;
+ }
+
+ uint32_t tmp = U32_AT(&header[0]);
+ size_t frameSize;
+ int sampleRate;
+ if (!MP3Extractor::get_mp3_frame_size(tmp, &frameSize, &sampleRate)) {
+ return NULL;
+ }
+
+ // VBRI header follows 32 bytes after the header _ends_.
+ pos += sizeof(header) + 32;
+
+ uint8_t vbriHeader[26];
+ n = source->readAt(pos, vbriHeader, sizeof(vbriHeader));
+ if (n < (ssize_t)sizeof(vbriHeader)) {
+ return NULL;
+ }
+
+ if (memcmp(vbriHeader, "VBRI", 4)) {
+ return NULL;
+ }
+
+ size_t numFrames = U32_AT(&vbriHeader[14]);
+
+ int64_t durationUs =
+ numFrames * 1000000ll * (sampleRate >= 32000 ? 1152 : 576) / sampleRate;
+
+ LOGV("duration = %.2f secs", durationUs / 1E6);
+
+ size_t numEntries = U16_AT(&vbriHeader[18]);
+ size_t entrySize = U16_AT(&vbriHeader[22]);
+ size_t scale = U16_AT(&vbriHeader[20]);
+
+ LOGV("%d entries, scale=%d, size_per_entry=%d",
+ numEntries,
+ scale,
+ entrySize);
+
+ size_t totalEntrySize = numEntries * entrySize;
+ uint8_t *buffer = new uint8_t[totalEntrySize];
+
+ n = source->readAt(pos + sizeof(vbriHeader), buffer, totalEntrySize);
+ if (n < (ssize_t)totalEntrySize) {
+ delete[] buffer;
+ buffer = NULL;
+
+ return NULL;
+ }
+
+ sp<VBRISeeker> seeker = new VBRISeeker;
+ seeker->mBasePos = post_id3_pos;
+ seeker->mDurationUs = durationUs;
+
+ off64_t offset = post_id3_pos;
+ for (size_t i = 0; i < numEntries; ++i) {
+ uint32_t numBytes;
+ switch (entrySize) {
+ case 1: numBytes = buffer[i]; break;
+ case 2: numBytes = U16_AT(buffer + 2 * i); break;
+ case 3: numBytes = U24_AT(buffer + 3 * i); break;
+ default:
+ {
+ CHECK_EQ(entrySize, 4u);
+ numBytes = U32_AT(buffer + 4 * i); break;
+ }
+ }
+
+ numBytes *= scale;
+
+ seeker->mSegments.push(numBytes);
+
+ LOGV("entry #%d: %d offset 0x%08lx", i, numBytes, offset);
+ offset += numBytes;
+ }
+
+ delete[] buffer;
+ buffer = NULL;
+
+ LOGI("Found VBRI header.");
+
+ return seeker;
+}
+
+VBRISeeker::VBRISeeker()
+ : mDurationUs(-1) {
+}
+
+bool VBRISeeker::getDuration(int64_t *durationUs) {
+ if (mDurationUs < 0) {
+ return false;
+ }
+
+ *durationUs = mDurationUs;
+
+ return true;
+}
+
+bool VBRISeeker::getOffsetForTime(int64_t *timeUs, off64_t *pos) {
+ if (mDurationUs < 0) {
+ return false;
+ }
+
+ int64_t segmentDurationUs = mDurationUs / mSegments.size();
+
+ int64_t nowUs = 0;
+ *pos = mBasePos;
+ size_t segmentIndex = 0;
+ while (segmentIndex < mSegments.size() && nowUs < *timeUs) {
+ nowUs += segmentDurationUs;
+ *pos += mSegments.itemAt(segmentIndex++);
+ }
+
+ LOGV("getOffsetForTime %lld us => 0x%08lx", *timeUs, *pos);
+
+ *timeUs = nowUs;
+
+ return true;
+}
+
+} // namespace android
+
diff --git a/media/libstagefright/VideoSourceDownSampler.cpp b/media/libstagefright/VideoSourceDownSampler.cpp
new file mode 100644
index 0000000..ea7b09a
--- /dev/null
+++ b/media/libstagefright/VideoSourceDownSampler.cpp
@@ -0,0 +1,142 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "VideoSourceDownSampler"
+
+#include <media/stagefright/VideoSourceDownSampler.h>
+#include <media/stagefright/MediaBuffer.h>
+#include <media/stagefright/MediaDebug.h>
+#include <media/stagefright/MetaData.h>
+#include <media/stagefright/YUVImage.h>
+#include <media/stagefright/YUVCanvas.h>
+#include "OMX_Video.h"
+
+namespace android {
+
+VideoSourceDownSampler::VideoSourceDownSampler(const sp<MediaSource> &videoSource,
+ int32_t width, int32_t height) {
+ LOGV("Construct VideoSourceDownSampler");
+ CHECK(width > 0);
+ CHECK(height > 0);
+
+ mRealVideoSource = videoSource;
+ mWidth = width;
+ mHeight = height;
+
+ mMeta = new MetaData(*(mRealVideoSource->getFormat()));
+ CHECK(mMeta->findInt32(kKeyWidth, &mRealSourceWidth));
+ CHECK(mMeta->findInt32(kKeyHeight, &mRealSourceHeight));
+
+ if ((mWidth != mRealSourceWidth) || (mHeight != mRealSourceHeight)) {
+ // Change meta data for width and height.
+ CHECK(mWidth <= mRealSourceWidth);
+ CHECK(mHeight <= mRealSourceHeight);
+
+ mNeedDownSampling = true;
+ computeDownSamplingParameters();
+ mMeta->setInt32(kKeyWidth, mWidth);
+ mMeta->setInt32(kKeyHeight, mHeight);
+ } else {
+ mNeedDownSampling = false;
+ }
+}
+
+VideoSourceDownSampler::~VideoSourceDownSampler() {
+}
+
+void VideoSourceDownSampler::computeDownSamplingParameters() {
+ mDownSampleSkipX = mRealSourceWidth / mWidth;
+ mDownSampleSkipY = mRealSourceHeight / mHeight;
+
+ mDownSampleOffsetX = mRealSourceWidth - mDownSampleSkipX * mWidth;
+ mDownSampleOffsetY = mRealSourceHeight - mDownSampleSkipY * mHeight;
+}
+
+void VideoSourceDownSampler::downSampleYUVImage(
+ const MediaBuffer &sourceBuffer, MediaBuffer **buffer) const {
+ // find the YUV format
+ int32_t srcFormat;
+ CHECK(mMeta->findInt32(kKeyColorFormat, &srcFormat));
+ YUVImage::YUVFormat yuvFormat;
+ if (srcFormat == OMX_COLOR_FormatYUV420SemiPlanar) {
+ yuvFormat = YUVImage::YUV420SemiPlanar;
+ } else if (srcFormat == OMX_COLOR_FormatYUV420Planar) {
+ yuvFormat = YUVImage::YUV420Planar;
+ }
+
+ // allocate mediaBuffer for down sampled image and setup a canvas.
+ *buffer = new MediaBuffer(YUVImage::bufferSize(yuvFormat, mWidth, mHeight));
+ YUVImage yuvDownSampledImage(yuvFormat,
+ mWidth, mHeight,
+ (uint8_t *)(*buffer)->data());
+ YUVCanvas yuvCanvasDownSample(yuvDownSampledImage);
+
+ YUVImage yuvImageSource(yuvFormat,
+ mRealSourceWidth, mRealSourceHeight,
+ (uint8_t *)sourceBuffer.data());
+ yuvCanvasDownSample.downsample(mDownSampleOffsetX, mDownSampleOffsetY,
+ mDownSampleSkipX, mDownSampleSkipY,
+ yuvImageSource);
+}
+
+status_t VideoSourceDownSampler::start(MetaData *params) {
+ LOGV("start");
+ return mRealVideoSource->start();
+}
+
+status_t VideoSourceDownSampler::stop() {
+ LOGV("stop");
+ return mRealVideoSource->stop();
+}
+
+sp<MetaData> VideoSourceDownSampler::getFormat() {
+ LOGV("getFormat");
+ return mMeta;
+}
+
+status_t VideoSourceDownSampler::read(
+ MediaBuffer **buffer, const ReadOptions *options) {
+ LOGV("read");
+ MediaBuffer *realBuffer;
+ status_t err = mRealVideoSource->read(&realBuffer, options);
+
+ if (mNeedDownSampling) {
+ downSampleYUVImage(*realBuffer, buffer);
+
+ int64_t frameTime;
+ realBuffer->meta_data()->findInt64(kKeyTime, &frameTime);
+ (*buffer)->meta_data()->setInt64(kKeyTime, frameTime);
+
+ // We just want this buffer to be deleted when the encoder releases it.
+ // So don't add a reference to it and set the observer to NULL.
+ (*buffer)->setObserver(NULL);
+
+ // The original buffer is no longer required. Release it.
+ realBuffer->release();
+ } else {
+ *buffer = realBuffer;
+ }
+
+ return err;
+}
+
+status_t VideoSourceDownSampler::pause() {
+ LOGV("pause");
+ return mRealVideoSource->pause();
+}
+
+} // namespace android
diff --git a/media/libstagefright/WAVExtractor.cpp b/media/libstagefright/WAVExtractor.cpp
index aff06bc..446021c 100644
--- a/media/libstagefright/WAVExtractor.cpp
+++ b/media/libstagefright/WAVExtractor.cpp
@@ -51,7 +51,7 @@
const sp<MetaData> &meta,
uint16_t waveFormat,
int32_t bitsPerSample,
- off_t offset, size_t size);
+ off64_t offset, size_t size);
virtual status_t start(MetaData *params = NULL);
virtual status_t stop();
@@ -72,11 +72,11 @@
int32_t mSampleRate;
int32_t mNumChannels;
int32_t mBitsPerSample;
- off_t mOffset;
+ off64_t mOffset;
size_t mSize;
bool mStarted;
MediaBufferGroup *mGroup;
- off_t mCurrentPos;
+ off64_t mCurrentPos;
WAVSource(const WAVSource &);
WAVSource &operator=(const WAVSource &);
@@ -139,7 +139,7 @@
size_t totalSize = U32_LE_AT(&header[4]);
- off_t offset = 12;
+ off64_t offset = 12;
size_t remainingSize = totalSize;
while (remainingSize >= 8) {
uint8_t chunkHeader[8];
@@ -251,7 +251,7 @@
const sp<MetaData> &meta,
uint16_t waveFormat,
int32_t bitsPerSample,
- off_t offset, size_t size)
+ off64_t offset, size_t size)
: mDataSource(dataSource),
mMeta(meta),
mWaveFormat(waveFormat),
@@ -335,7 +335,7 @@
mBitsPerSample == 8 ? kMaxFrameSize / 2 : kMaxFrameSize;
size_t maxBytesAvailable =
- (mCurrentPos - mOffset >= (off_t)mSize)
+ (mCurrentPos - mOffset >= (off64_t)mSize)
? 0 : mSize - (mCurrentPos - mOffset);
if (maxBytesToRead > maxBytesAvailable) {
diff --git a/media/libstagefright/WVMExtractor.cpp b/media/libstagefright/WVMExtractor.cpp
new file mode 100644
index 0000000..7c72852
--- /dev/null
+++ b/media/libstagefright/WVMExtractor.cpp
@@ -0,0 +1,104 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "WVMExtractor"
+#include <utils/Log.h>
+
+#include "include/WVMExtractor.h"
+
+#include <arpa/inet.h>
+#include <utils/String8.h>
+#include <media/stagefright/Utils.h>
+#include <media/stagefright/DataSource.h>
+#include <media/stagefright/MediaSource.h>
+#include <media/stagefright/MediaDefs.h>
+#include <media/stagefright/MetaData.h>
+#include <media/stagefright/MediaErrors.h>
+#include <media/stagefright/MediaBuffer.h>
+#include <media/stagefright/MediaDebug.h>
+#include <dlfcn.h>
+
+#include <utils/Errors.h>
+
+/* The extractor lifetime is short - just long enough to get
+ * the media sources constructed - so the shared lib needs to remain open
+ * beyond the lifetime of the extractor. So keep the handle as a global
+ * rather than a member of the extractor
+ */
+void *gVendorLibHandle = NULL;
+
+namespace android {
+
+static Mutex gWVMutex;
+
+WVMExtractor::WVMExtractor(const sp<DataSource> &source)
+ : mDataSource(source) {
+ {
+ Mutex::Autolock autoLock(gWVMutex);
+ if (gVendorLibHandle == NULL) {
+ gVendorLibHandle = dlopen("libwvm.so", RTLD_NOW);
+ }
+
+ if (gVendorLibHandle == NULL) {
+ LOGE("Failed to open libwvm.so");
+ return;
+ }
+ }
+
+ typedef MediaExtractor *(*GetInstanceFunc)(sp<DataSource>);
+ GetInstanceFunc getInstanceFunc =
+ (GetInstanceFunc) dlsym(gVendorLibHandle,
+ "_ZN7android11GetInstanceENS_2spINS_10DataSourceEEE");
+
+ if (getInstanceFunc) {
+ LOGD("Calling GetInstanceFunc");
+ mImpl = (*getInstanceFunc)(source);
+ CHECK(mImpl != NULL);
+ } else {
+ LOGE("Failed to locate GetInstance in libwvm.so");
+ }
+}
+
+WVMExtractor::~WVMExtractor() {
+}
+
+size_t WVMExtractor::countTracks() {
+ return (mImpl != NULL) ? mImpl->countTracks() : 0;
+}
+
+sp<MediaSource> WVMExtractor::getTrack(size_t index) {
+ if (mImpl == NULL) {
+ return NULL;
+ }
+ return mImpl->getTrack(index);
+}
+
+sp<MetaData> WVMExtractor::getTrackMetaData(size_t index, uint32_t flags) {
+ if (mImpl == NULL) {
+ return NULL;
+ }
+ return mImpl->getTrackMetaData(index, flags);
+}
+
+sp<MetaData> WVMExtractor::getMetaData() {
+ if (mImpl == NULL) {
+ return NULL;
+ }
+ return mImpl->getMetaData();
+}
+
+} //namespace android
+
diff --git a/media/libstagefright/XINGSeeker.cpp b/media/libstagefright/XINGSeeker.cpp
new file mode 100644
index 0000000..616836c
--- /dev/null
+++ b/media/libstagefright/XINGSeeker.cpp
@@ -0,0 +1,223 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "include/XINGSeeker.h"
+
+#include <media/stagefright/DataSource.h>
+#include <media/stagefright/Utils.h>
+
+namespace android {
+
+static bool parse_xing_header(
+ const sp<DataSource> &source, off64_t first_frame_pos,
+ int32_t *frame_number = NULL, int32_t *byte_number = NULL,
+ char *table_of_contents = NULL, int32_t *quality_indicator = NULL,
+ int64_t *duration = NULL);
+
+// static
+sp<XINGSeeker> XINGSeeker::CreateFromSource(
+ const sp<DataSource> &source, off64_t first_frame_pos) {
+ sp<XINGSeeker> seeker = new XINGSeeker;
+
+ seeker->mFirstFramePos = first_frame_pos;
+
+ if (!parse_xing_header(
+ source, first_frame_pos,
+ NULL, &seeker->mSizeBytes, seeker->mTableOfContents,
+ NULL, &seeker->mDurationUs)) {
+ return NULL;
+ }
+
+ LOGI("Found XING header.");
+
+ return seeker;
+}
+
+XINGSeeker::XINGSeeker()
+ : mDurationUs(-1),
+ mSizeBytes(0) {
+}
+
+bool XINGSeeker::getDuration(int64_t *durationUs) {
+ if (mDurationUs < 0) {
+ return false;
+ }
+
+ *durationUs = mDurationUs;
+
+ return true;
+}
+
+bool XINGSeeker::getOffsetForTime(int64_t *timeUs, off64_t *pos) {
+ if (mSizeBytes == 0 || mTableOfContents[0] <= 0 || mDurationUs < 0) {
+ return false;
+ }
+
+ float percent = (float)(*timeUs) * 100 / mDurationUs;
+ float fx;
+ if( percent <= 0.0f ) {
+ fx = 0.0f;
+ } else if( percent >= 100.0f ) {
+ fx = 256.0f;
+ } else {
+ int a = (int)percent;
+ float fa, fb;
+ if ( a == 0 ) {
+ fa = 0.0f;
+ } else {
+ fa = (float)mTableOfContents[a-1];
+ }
+ if ( a < 99 ) {
+ fb = (float)mTableOfContents[a];
+ } else {
+ fb = 256.0f;
+ }
+ fx = fa + (fb-fa)*(percent-a);
+ }
+
+ *pos = (int)((1.0f/256.0f)*fx*mSizeBytes) + mFirstFramePos;
+
+ return true;
+}
+
+static bool parse_xing_header(
+ const sp<DataSource> &source, off64_t first_frame_pos,
+ int32_t *frame_number, int32_t *byte_number,
+ char *table_of_contents, int32_t *quality_indicator,
+ int64_t *duration) {
+ if (frame_number) {
+ *frame_number = 0;
+ }
+ if (byte_number) {
+ *byte_number = 0;
+ }
+ if (table_of_contents) {
+ table_of_contents[0] = 0;
+ }
+ if (quality_indicator) {
+ *quality_indicator = 0;
+ }
+ if (duration) {
+ *duration = 0;
+ }
+
+ uint8_t buffer[4];
+ int offset = first_frame_pos;
+ if (source->readAt(offset, &buffer, 4) < 4) { // get header
+ return false;
+ }
+ offset += 4;
+
+ uint8_t id, layer, sr_index, mode;
+ layer = (buffer[1] >> 1) & 3;
+ id = (buffer[1] >> 3) & 3;
+ sr_index = (buffer[2] >> 2) & 3;
+ mode = (buffer[3] >> 6) & 3;
+ if (layer == 0) {
+ return false;
+ }
+ if (id == 1) {
+ return false;
+ }
+ if (sr_index == 3) {
+ return false;
+ }
+ // determine offset of XING header
+ if(id&1) { // mpeg1
+ if (mode != 3) offset += 32;
+ else offset += 17;
+ } else { // mpeg2
+ if (mode != 3) offset += 17;
+ else offset += 9;
+ }
+
+ if (source->readAt(offset, &buffer, 4) < 4) { // XING header ID
+ return false;
+ }
+ offset += 4;
+ // Check XING ID
+ if ((buffer[0] != 'X') || (buffer[1] != 'i')
+ || (buffer[2] != 'n') || (buffer[3] != 'g')) {
+ if ((buffer[0] != 'I') || (buffer[1] != 'n')
+ || (buffer[2] != 'f') || (buffer[3] != 'o')) {
+ return false;
+ }
+ }
+
+ if (source->readAt(offset, &buffer, 4) < 4) { // flags
+ return false;
+ }
+ offset += 4;
+ uint32_t flags = U32_AT(buffer);
+
+ if (flags & 0x0001) { // Frames field is present
+ if (source->readAt(offset, buffer, 4) < 4) {
+ return false;
+ }
+ if (frame_number) {
+ *frame_number = U32_AT(buffer);
+ }
+ int32_t frame = U32_AT(buffer);
+ // Samples per Frame: 1. index = MPEG Version ID, 2. index = Layer
+ const int samplesPerFrames[2][3] =
+ {
+ { 384, 1152, 576 }, // MPEG 2, 2.5: layer1, layer2, layer3
+ { 384, 1152, 1152 }, // MPEG 1: layer1, layer2, layer3
+ };
+ // sampling rates in hertz: 1. index = MPEG Version ID, 2. index = sampling rate index
+ const int samplingRates[4][3] =
+ {
+ { 11025, 12000, 8000, }, // MPEG 2.5
+ { 0, 0, 0, }, // reserved
+ { 22050, 24000, 16000, }, // MPEG 2
+ { 44100, 48000, 32000, } // MPEG 1
+ };
+ if (duration) {
+ *duration = (int64_t)frame * samplesPerFrames[id&1][3-layer] * 1000000LL
+ / samplingRates[id][sr_index];
+ }
+ offset += 4;
+ }
+ if (flags & 0x0002) { // Bytes field is present
+ if (byte_number) {
+ if (source->readAt(offset, buffer, 4) < 4) {
+ return false;
+ }
+ *byte_number = U32_AT(buffer);
+ }
+ offset += 4;
+ }
+ if (flags & 0x0004) { // TOC field is present
+ if (table_of_contents) {
+ if (source->readAt(offset + 1, table_of_contents, 99) < 99) {
+ return false;
+ }
+ }
+ offset += 100;
+ }
+ if (flags & 0x0008) { // Quality indicator field is present
+ if (quality_indicator) {
+ if (source->readAt(offset, buffer, 4) < 4) {
+ return false;
+ }
+ *quality_indicator = U32_AT(buffer);
+ }
+ }
+ return true;
+}
+
+} // namespace android
+
diff --git a/media/libstagefright/avc_utils.cpp b/media/libstagefright/avc_utils.cpp
index 478e40c..2fe5e18 100644
--- a/media/libstagefright/avc_utils.cpp
+++ b/media/libstagefright/avc_utils.cpp
@@ -218,6 +218,28 @@
return NULL;
}
+const char *AVCProfileToString(uint8_t profile) {
+ switch (profile) {
+ case kAVCProfileBaseline:
+ return "Baseline";
+ case kAVCProfileMain:
+ return "Main";
+ case kAVCProfileExtended:
+ return "Extended";
+ case kAVCProfileHigh:
+ return "High";
+ case kAVCProfileHigh10:
+ return "High 10";
+ case kAVCProfileHigh422:
+ return "High 422";
+ case kAVCProfileHigh444:
+ return "High 444";
+ case kAVCProfileCAVLC444Intra:
+ return "CAVLC 444 Intra";
+ default: return "Unknown";
+ }
+}
+
sp<MetaData> MakeAVCCodecSpecificData(const sp<ABuffer> &accessUnit) {
const uint8_t *data = accessUnit->data();
size_t size = accessUnit->size();
@@ -244,6 +266,10 @@
*out++ = 0x01; // configurationVersion
memcpy(out, seqParamSet->data() + 1, 3); // profile/level...
+
+ uint8_t profile = out[0];
+ uint8_t level = out[2];
+
out += 3;
*out++ = (0x3f << 2) | 1; // lengthSize == 2 bytes
*out++ = 0xe0 | 1;
@@ -271,7 +297,8 @@
meta->setInt32(kKeyWidth, width);
meta->setInt32(kKeyHeight, height);
- LOGI("found AVC codec config (%d x %d)", width, height);
+ LOGI("found AVC codec config (%d x %d, %s-profile level %d.%d)",
+ width, height, AVCProfileToString(profile), level / 10, level % 10);
return meta;
}
diff --git a/media/libstagefright/codecs/aacenc/AACEncoder.cpp b/media/libstagefright/codecs/aacenc/AACEncoder.cpp
index df9f107..9524884 100644
--- a/media/libstagefright/codecs/aacenc/AACEncoder.cpp
+++ b/media/libstagefright/codecs/aacenc/AACEncoder.cpp
@@ -36,6 +36,7 @@
mStarted(false),
mBufferGroup(NULL),
mInputBuffer(NULL),
+ mInputFrame(NULL),
mEncoderHandle(NULL),
mApiHandle(NULL),
mMemOperator(NULL) {
@@ -45,6 +46,7 @@
CHECK(mApiHandle == NULL && mEncoderHandle == NULL);
CHECK(mMeta->findInt32(kKeySampleRate, &mSampleRate));
CHECK(mMeta->findInt32(kKeyChannelCount, &mChannels));
+ CHECK(mChannels <= 2 && mChannels >= 1);
CHECK(mMeta->findInt32(kKeyBitRate, &mBitRate));
mApiHandle = new VO_AUDIO_CODECAPI;
@@ -145,6 +147,10 @@
mNumInputSamples = 0;
mAnchorTimeUs = 0;
mFrameCount = 0;
+
+ mInputFrame = new int16_t[mChannels * kNumSamplesPerFrame];
+ CHECK(mInputFrame != NULL);
+
mSource->start(params);
mStarted = true;
@@ -176,6 +182,10 @@
mApiHandle = NULL;
mStarted = false;
+ if (mInputFrame) {
+ delete[] mInputFrame;
+ mInputFrame = NULL;
+ }
return OK;
}
@@ -222,7 +232,8 @@
buffer->meta_data()->setInt32(kKeyIsCodecConfig, false);
}
- while (mNumInputSamples < kNumSamplesPerFrame) {
+ const int32_t nSamples = mChannels * kNumSamplesPerFrame;
+ while (mNumInputSamples < nSamples) {
if (mInputBuffer == NULL) {
if (mSource->read(&mInputBuffer, options) != OK) {
if (mNumInputSamples == 0) {
@@ -231,7 +242,7 @@
}
memset(&mInputFrame[mNumInputSamples],
0,
- sizeof(int16_t) * (kNumSamplesPerFrame - mNumInputSamples));
+ sizeof(int16_t) * (nSamples - mNumInputSamples));
mNumInputSamples = 0;
break;
}
@@ -250,8 +261,7 @@
} else {
readFromSource = false;
}
- size_t copy =
- (kNumSamplesPerFrame - mNumInputSamples) * sizeof(int16_t);
+ size_t copy = (nSamples - mNumInputSamples) * sizeof(int16_t);
if (copy > mInputBuffer->range_length()) {
copy = mInputBuffer->range_length();
@@ -271,8 +281,8 @@
mInputBuffer = NULL;
}
mNumInputSamples += copy / sizeof(int16_t);
- if (mNumInputSamples >= kNumSamplesPerFrame) {
- mNumInputSamples %= kNumSamplesPerFrame;
+ if (mNumInputSamples >= nSamples) {
+ mNumInputSamples %= nSamples;
break;
}
}
@@ -280,7 +290,7 @@
VO_CODECBUFFER inputData;
memset(&inputData, 0, sizeof(inputData));
inputData.Buffer = (unsigned char*) mInputFrame;
- inputData.Length = kNumSamplesPerFrame * sizeof(int16_t);
+ inputData.Length = nSamples * sizeof(int16_t);
CHECK(VO_ERR_NONE == mApiHandle->SetInputData(mEncoderHandle,&inputData));
VO_CODECBUFFER outputData;
@@ -289,15 +299,21 @@
memset(&outputInfo, 0, sizeof(outputInfo));
VO_U32 ret = VO_ERR_NONE;
- outputData.Buffer = outPtr;
- outputData.Length = buffer->size();
- ret = mApiHandle->GetOutputData(mEncoderHandle, &outputData, &outputInfo);
- CHECK(ret == VO_ERR_NONE || ret == VO_ERR_INPUT_BUFFER_SMALL);
- CHECK(outputData.Length != 0);
- buffer->set_range(0, outputData.Length);
+ size_t nOutputBytes = 0;
+ do {
+ outputData.Buffer = outPtr;
+ outputData.Length = buffer->size() - nOutputBytes;
+ ret = mApiHandle->GetOutputData(mEncoderHandle, &outputData, &outputInfo);
+ if (ret == VO_ERR_NONE) {
+ outPtr += outputData.Length;
+ nOutputBytes += outputData.Length;
+ }
+ } while (ret != VO_ERR_INPUT_BUFFER_SMALL);
+ buffer->set_range(0, nOutputBytes);
int64_t mediaTimeUs =
((mFrameCount - 1) * 1000000LL * kNumSamplesPerFrame) / mSampleRate;
+
buffer->meta_data()->setInt64(kKeyTime, mAnchorTimeUs + mediaTimeUs);
if (readFromSource && wallClockTimeUs != -1) {
buffer->meta_data()->setInt64(kKeyDriftTime, mediaTimeUs - wallClockTimeUs);
diff --git a/media/libstagefright/codecs/avc/dec/AVCDecoder.cpp b/media/libstagefright/codecs/avc/dec/AVCDecoder.cpp
index 868c514..5bbba35 100644
--- a/media/libstagefright/codecs/avc/dec/AVCDecoder.cpp
+++ b/media/libstagefright/codecs/avc/dec/AVCDecoder.cpp
@@ -73,6 +73,7 @@
CHECK(mSource->getFormat()->findInt32(kKeyHeight, &height));
mFormat->setInt32(kKeyWidth, width);
mFormat->setInt32(kKeyHeight, height);
+ mFormat->setRect(kKeyCropRect, 0, 0, width - 1, height - 1);
mFormat->setInt32(kKeyColorFormat, OMX_COLOR_FormatYUV420Planar);
mFormat->setCString(kKeyDecoderComponent, "AVCDecoder");
@@ -418,16 +419,32 @@
crop_top = crop_left = 0;
}
- int32_t aligned_width = (crop_right - crop_left + 1 + 15) & ~15;
- int32_t aligned_height = (crop_bottom - crop_top + 1 + 15) & ~15;
+ int32_t prevCropLeft, prevCropTop;
+ int32_t prevCropRight, prevCropBottom;
+ if (!mFormat->findRect(
+ kKeyCropRect,
+ &prevCropLeft, &prevCropTop,
+ &prevCropRight, &prevCropBottom)) {
+ prevCropLeft = prevCropTop = 0;
+ prevCropRight = width - 1;
+ prevCropBottom = height - 1;
+ }
int32_t oldWidth, oldHeight;
CHECK(mFormat->findInt32(kKeyWidth, &oldWidth));
CHECK(mFormat->findInt32(kKeyHeight, &oldHeight));
- if (oldWidth != aligned_width || oldHeight != aligned_height) {
- mFormat->setInt32(kKeyWidth, aligned_width);
- mFormat->setInt32(kKeyHeight, aligned_height);
+ if (oldWidth != width || oldHeight != height
+ || prevCropLeft != crop_left
+ || prevCropTop != crop_top
+ || prevCropRight != crop_right
+ || prevCropBottom != crop_bottom) {
+ mFormat->setRect(
+ kKeyCropRect,
+ crop_left, crop_top, crop_right, crop_bottom);
+
+ mFormat->setInt32(kKeyWidth, width);
+ mFormat->setInt32(kKeyHeight, height);
err = INFO_FORMAT_CHANGED;
} else {
diff --git a/media/libstagefright/codecs/avc/enc/AVCEncoder.cpp b/media/libstagefright/codecs/avc/enc/AVCEncoder.cpp
index 52a391f..e6a0976 100644
--- a/media/libstagefright/codecs/avc/enc/AVCEncoder.cpp
+++ b/media/libstagefright/codecs/avc/enc/AVCEncoder.cpp
@@ -33,6 +33,80 @@
namespace android {
+static status_t ConvertOmxAvcProfileToAvcSpecProfile(
+ int32_t omxProfile, AVCProfile* pvProfile) {
+ LOGV("ConvertOmxAvcProfileToAvcSpecProfile: %d", omxProfile);
+ switch (omxProfile) {
+ case OMX_VIDEO_AVCProfileBaseline:
+ *pvProfile = AVC_BASELINE;
+ return OK;
+ default:
+ LOGE("Unsupported omx profile: %d", omxProfile);
+ }
+ return BAD_VALUE;
+}
+
+static status_t ConvertOmxAvcLevelToAvcSpecLevel(
+ int32_t omxLevel, AVCLevel *pvLevel) {
+ LOGV("ConvertOmxAvcLevelToAvcSpecLevel: %d", omxLevel);
+ AVCLevel level = AVC_LEVEL5_1;
+ switch (omxLevel) {
+ case OMX_VIDEO_AVCLevel1:
+ level = AVC_LEVEL1_B;
+ break;
+ case OMX_VIDEO_AVCLevel1b:
+ level = AVC_LEVEL1;
+ break;
+ case OMX_VIDEO_AVCLevel11:
+ level = AVC_LEVEL1_1;
+ break;
+ case OMX_VIDEO_AVCLevel12:
+ level = AVC_LEVEL1_2;
+ break;
+ case OMX_VIDEO_AVCLevel13:
+ level = AVC_LEVEL1_3;
+ break;
+ case OMX_VIDEO_AVCLevel2:
+ level = AVC_LEVEL2;
+ break;
+ case OMX_VIDEO_AVCLevel21:
+ level = AVC_LEVEL2_1;
+ break;
+ case OMX_VIDEO_AVCLevel22:
+ level = AVC_LEVEL2_2;
+ break;
+ case OMX_VIDEO_AVCLevel3:
+ level = AVC_LEVEL3;
+ break;
+ case OMX_VIDEO_AVCLevel31:
+ level = AVC_LEVEL3_1;
+ break;
+ case OMX_VIDEO_AVCLevel32:
+ level = AVC_LEVEL3_2;
+ break;
+ case OMX_VIDEO_AVCLevel4:
+ level = AVC_LEVEL4;
+ break;
+ case OMX_VIDEO_AVCLevel41:
+ level = AVC_LEVEL4_1;
+ break;
+ case OMX_VIDEO_AVCLevel42:
+ level = AVC_LEVEL4_2;
+ break;
+ case OMX_VIDEO_AVCLevel5:
+ level = AVC_LEVEL5;
+ break;
+ case OMX_VIDEO_AVCLevel51:
+ level = AVC_LEVEL5_1;
+ break;
+ default:
+ LOGE("Unknown omx level: %d", omxLevel);
+ return BAD_VALUE;
+ }
+ *pvLevel = level;
+ return OK;
+}
+
inline static void ConvertYUV420SemiPlanarToYUV420Planar(
uint8_t *inyuv, uint8_t* outyuv,
int32_t width, int32_t height) {
@@ -133,7 +207,7 @@
LOGV("initCheck");
CHECK(meta->findInt32(kKeyWidth, &mVideoWidth));
CHECK(meta->findInt32(kKeyHeight, &mVideoHeight));
- CHECK(meta->findInt32(kKeySampleRate, &mVideoFrameRate));
+ CHECK(meta->findInt32(kKeyFrameRate, &mVideoFrameRate));
CHECK(meta->findInt32(kKeyBitRate, &mVideoBitRate));
// XXX: Add more color format support
@@ -231,10 +305,16 @@
mEncParams->level = AVC_LEVEL3_2;
int32_t profile, level;
if (meta->findInt32(kKeyVideoProfile, &profile)) {
- mEncParams->profile = (AVCProfile) profile;
+ if (OK != ConvertOmxAvcProfileToAvcSpecProfile(
+ profile, &mEncParams->profile)) {
+ return BAD_VALUE;
+ }
}
if (meta->findInt32(kKeyVideoLevel, &level)) {
- mEncParams->level = (AVCLevel) level;
+ if (OK != ConvertOmxAvcLevelToAvcSpecLevel(
+ level, &mEncParams->level)) {
+ return BAD_VALUE;
+ }
}
@@ -242,7 +322,7 @@
mFormat->setInt32(kKeyWidth, mVideoWidth);
mFormat->setInt32(kKeyHeight, mVideoHeight);
mFormat->setInt32(kKeyBitRate, mVideoBitRate);
- mFormat->setInt32(kKeySampleRate, mVideoFrameRate);
+ mFormat->setInt32(kKeyFrameRate, mVideoFrameRate);
mFormat->setInt32(kKeyColorFormat, mVideoColorFormat);
mFormat->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_AVC);
mFormat->setCString(kKeyDecoderComponent, "AVCEncoder");
diff --git a/media/libstagefright/codecs/m4v_h263/enc/M4vH263Encoder.cpp b/media/libstagefright/codecs/m4v_h263/enc/M4vH263Encoder.cpp
index a011137..c7a475b 100644
--- a/media/libstagefright/codecs/m4v_h263/enc/M4vH263Encoder.cpp
+++ b/media/libstagefright/codecs/m4v_h263/enc/M4vH263Encoder.cpp
@@ -32,6 +32,109 @@
namespace android {
+static status_t ConvertOmxProfileLevel(
+ MP4EncodingMode mode,
+ int32_t omxProfile,
+ int32_t omxLevel,
+ ProfileLevelType* pvProfileLevel) {
+ LOGV("ConvertOmxProfileLevel: %d/%d/%d", mode, omxProfile, omxLevel);
+ ProfileLevelType profileLevel;
+ if (mode == H263_MODE) {
+ switch (omxProfile) {
+ case OMX_VIDEO_H263ProfileBaseline:
+ if (omxLevel > OMX_VIDEO_H263Level45) {
+ LOGE("Unsupported level (%d) for H263", omxLevel);
+ return BAD_VALUE;
+ } else {
+ LOGW("PV does not support level configuration for H263");
+ profileLevel = CORE_PROFILE_LEVEL2;
+ break;
+ }
+ break;
+ default:
+ LOGE("Unsupported profile (%d) for H263", omxProfile);
+ return BAD_VALUE;
+ }
+ } else { // MPEG4
+ switch (omxProfile) {
+ case OMX_VIDEO_MPEG4ProfileSimple:
+ switch (omxLevel) {
+ case OMX_VIDEO_MPEG4Level0b:
+ profileLevel = SIMPLE_PROFILE_LEVEL0;
+ break;
+ case OMX_VIDEO_MPEG4Level1:
+ profileLevel = SIMPLE_PROFILE_LEVEL1;
+ break;
+ case OMX_VIDEO_MPEG4Level2:
+ profileLevel = SIMPLE_PROFILE_LEVEL2;
+ break;
+ case OMX_VIDEO_MPEG4Level3:
+ profileLevel = SIMPLE_PROFILE_LEVEL3;
+ break;
+ default:
+ LOGE("Unsupported level (%d) for MPEG4 simple profile",
+ omxLevel);
+ return BAD_VALUE;
+ }
+ break;
+ case OMX_VIDEO_MPEG4ProfileSimpleScalable:
+ switch (omxLevel) {
+ case OMX_VIDEO_MPEG4Level0b:
+ profileLevel = SIMPLE_SCALABLE_PROFILE_LEVEL0;
+ break;
+ case OMX_VIDEO_MPEG4Level1:
+ profileLevel = SIMPLE_SCALABLE_PROFILE_LEVEL1;
+ break;
+ case OMX_VIDEO_MPEG4Level2:
+ profileLevel = SIMPLE_SCALABLE_PROFILE_LEVEL2;
+ break;
+ default:
+ LOGE("Unsupported level (%d) for MPEG4 simple "
+ "scalable profile", omxLevel);
+ return BAD_VALUE;
+ }
+ break;
+ case OMX_VIDEO_MPEG4ProfileCore:
+ switch (omxLevel) {
+ case OMX_VIDEO_MPEG4Level1:
+ profileLevel = CORE_PROFILE_LEVEL1;
+ break;
+ case OMX_VIDEO_MPEG4Level2:
+ profileLevel = CORE_PROFILE_LEVEL2;
+ break;
+ default:
+ LOGE("Unsupported level (%d) for MPEG4 core "
+ "profile", omxLevel);
+ return BAD_VALUE;
+ }
+ break;
+ case OMX_VIDEO_MPEG4ProfileCoreScalable:
+ switch (omxLevel) {
+ case OMX_VIDEO_MPEG4Level1:
+ profileLevel = CORE_SCALABLE_PROFILE_LEVEL1;
+ break;
+ case OMX_VIDEO_MPEG4Level2:
+ profileLevel = CORE_SCALABLE_PROFILE_LEVEL2;
+ break;
+ case OMX_VIDEO_MPEG4Level3:
+ profileLevel = CORE_SCALABLE_PROFILE_LEVEL3;
+ break;
+ default:
+ LOGE("Unsupported level (%d) for MPEG4 core "
+ "scalable profile", omxLevel);
+ return BAD_VALUE;
+ }
+ break;
+ default:
+ LOGE("Unsupported MPEG4 profile (%d)", omxProfile);
+ return BAD_VALUE;
+ }
+ }
+
+ *pvProfileLevel = profileLevel;
+ return OK;
+}
+
inline static void ConvertYUV420SemiPlanarToYUV420Planar(
uint8_t *inyuv, uint8_t* outyuv,
int32_t width, int32_t height) {
@@ -97,7 +200,7 @@
LOGV("initCheck");
CHECK(meta->findInt32(kKeyWidth, &mVideoWidth));
CHECK(meta->findInt32(kKeyHeight, &mVideoHeight));
- CHECK(meta->findInt32(kKeySampleRate, &mVideoFrameRate));
+ CHECK(meta->findInt32(kKeyFrameRate, &mVideoFrameRate));
CHECK(meta->findInt32(kKeyBitRate, &mVideoBitRate));
// XXX: Add more color format support
@@ -150,9 +253,14 @@
// If profile and level setting is not correct, failure
// is reported when the encoder is initialized.
mEncParams->profile_level = CORE_PROFILE_LEVEL2;
- int32_t profileLevel;
- if (meta->findInt32(kKeyVideoLevel, &profileLevel)) {
- mEncParams->profile_level = (ProfileLevelType)profileLevel;
+ int32_t profile, level;
+ if (meta->findInt32(kKeyVideoProfile, &profile) &&
+ meta->findInt32(kKeyVideoLevel, &level)) {
+ if (OK != ConvertOmxProfileLevel(
+ mEncParams->encMode, profile, level,
+ &mEncParams->profile_level)) {
+ return BAD_VALUE;
+ }
}
mEncParams->packetSize = 32;
@@ -191,7 +299,7 @@
mFormat->setInt32(kKeyWidth, mVideoWidth);
mFormat->setInt32(kKeyHeight, mVideoHeight);
mFormat->setInt32(kKeyBitRate, mVideoBitRate);
- mFormat->setInt32(kKeySampleRate, mVideoFrameRate);
+ mFormat->setInt32(kKeyFrameRate, mVideoFrameRate);
mFormat->setInt32(kKeyColorFormat, mVideoColorFormat);
mFormat->setCString(kKeyMIMEType, mime);
diff --git a/media/libstagefright/colorconversion/Android.mk b/media/libstagefright/colorconversion/Android.mk
index 0dcbd73..62ba40f 100644
--- a/media/libstagefright/colorconversion/Android.mk
+++ b/media/libstagefright/colorconversion/Android.mk
@@ -6,17 +6,9 @@
SoftwareRenderer.cpp
LOCAL_C_INCLUDES := \
- $(TOP)/frameworks/base/include/media/stagefright/openmax
-
-LOCAL_SHARED_LIBRARIES := \
- libbinder \
- libmedia \
- libutils \
- libui \
- libcutils \
- libsurfaceflinger_client\
- libcamera_client
+ $(TOP)/frameworks/base/include/media/stagefright/openmax \
+ $(TOP)/hardware/msm7k
LOCAL_MODULE:= libstagefright_color_conversion
-include $(BUILD_SHARED_LIBRARY)
+include $(BUILD_STATIC_LIBRARY)
diff --git a/media/libstagefright/colorconversion/ColorConverter.cpp b/media/libstagefright/colorconversion/ColorConverter.cpp
index 5b16997..600f040 100644
--- a/media/libstagefright/colorconversion/ColorConverter.cpp
+++ b/media/libstagefright/colorconversion/ColorConverter.cpp
@@ -50,31 +50,64 @@
}
}
-void ColorConverter::convert(
+ColorConverter::BitmapParams::BitmapParams(
+ void *bits,
size_t width, size_t height,
- const void *srcBits, size_t srcSkip,
- void *dstBits, size_t dstSkip) {
+ size_t cropLeft, size_t cropTop,
+ size_t cropRight, size_t cropBottom)
+ : mBits(bits),
+ mWidth(width),
+ mHeight(height),
+ mCropLeft(cropLeft),
+ mCropTop(cropTop),
+ mCropRight(cropRight),
+ mCropBottom(cropBottom) {
+}
+
+size_t ColorConverter::BitmapParams::cropWidth() const {
+ return mCropRight - mCropLeft + 1;
+}
+
+size_t ColorConverter::BitmapParams::cropHeight() const {
+ return mCropBottom - mCropTop + 1;
+}
+
+void ColorConverter::convert(
+ const void *srcBits,
+ size_t srcWidth, size_t srcHeight,
+ size_t srcCropLeft, size_t srcCropTop,
+ size_t srcCropRight, size_t srcCropBottom,
+ void *dstBits,
+ size_t dstWidth, size_t dstHeight,
+ size_t dstCropLeft, size_t dstCropTop,
+ size_t dstCropRight, size_t dstCropBottom) {
CHECK_EQ(mDstFormat, OMX_COLOR_Format16bitRGB565);
+ BitmapParams src(
+ const_cast<void *>(srcBits),
+ srcWidth, srcHeight,
+ srcCropLeft, srcCropTop, srcCropRight, srcCropBottom);
+
+ BitmapParams dst(
+ dstBits,
+ dstWidth, dstHeight,
+ dstCropLeft, dstCropTop, dstCropRight, dstCropBottom);
+
switch (mSrcFormat) {
case OMX_COLOR_FormatYUV420Planar:
- convertYUV420Planar(
- width, height, srcBits, srcSkip, dstBits, dstSkip);
+ convertYUV420Planar(src, dst);
break;
case OMX_COLOR_FormatCbYCrY:
- convertCbYCrY(
- width, height, srcBits, srcSkip, dstBits, dstSkip);
+ convertCbYCrY(src, dst);
break;
case OMX_QCOM_COLOR_FormatYVU420SemiPlanar:
- convertQCOMYUV420SemiPlanar(
- width, height, srcBits, srcSkip, dstBits, dstSkip);
+ convertQCOMYUV420SemiPlanar(src, dst);
break;
case OMX_COLOR_FormatYUV420SemiPlanar:
- convertYUV420SemiPlanar(
- width, height, srcBits, srcSkip, dstBits, dstSkip);
+ convertYUV420SemiPlanar(src, dst);
break;
default:
@@ -86,25 +119,27 @@
}
void ColorConverter::convertCbYCrY(
- size_t width, size_t height,
- const void *srcBits, size_t srcSkip,
- void *dstBits, size_t dstSkip) {
- CHECK_EQ(srcSkip, 0); // Doesn't really make sense for YUV formats.
- CHECK(dstSkip >= width * 2);
- CHECK((dstSkip & 3) == 0);
+ const BitmapParams &src, const BitmapParams &dst) {
+ // XXX Untested
uint8_t *kAdjustedClip = initClip();
- uint32_t *dst_ptr = (uint32_t *)dstBits;
+ CHECK((src.mCropLeft & 1) == 0);
+ CHECK_EQ(src.cropWidth(), dst.cropWidth());
+ CHECK_EQ(src.cropHeight(), dst.cropHeight());
- const uint8_t *src = (const uint8_t *)srcBits;
+ uint32_t *dst_ptr = (uint32_t *)dst.mBits
+ + (dst.mCropTop * dst.mWidth + dst.mCropLeft) / 2;
- for (size_t y = 0; y < height; ++y) {
- for (size_t x = 0; x < width; x += 2) {
- signed y1 = (signed)src[2 * x + 1] - 16;
- signed y2 = (signed)src[2 * x + 3] - 16;
- signed u = (signed)src[2 * x] - 128;
- signed v = (signed)src[2 * x + 2] - 128;
+ const uint8_t *src_ptr = (const uint8_t *)src.mBits
+ + (src.mCropTop * dst.mWidth + src.mCropLeft) * 2;
+
+ for (size_t y = 0; y < src.cropHeight(); ++y) {
+ for (size_t x = 0; x < src.cropWidth(); x += 2) {
+ signed y1 = (signed)src_ptr[2 * x + 1] - 16;
+ signed y2 = (signed)src_ptr[2 * x + 3] - 16;
+ signed u = (signed)src_ptr[2 * x] - 128;
+ signed v = (signed)src_ptr[2 * x + 2] - 128;
signed u_b = u * 517;
signed u_g = -u * 100;
@@ -134,32 +169,35 @@
dst_ptr[x / 2] = (rgb2 << 16) | rgb1;
}
- src += width * 2;
- dst_ptr += dstSkip / 4;
+ src_ptr += src.mWidth * 2;
+ dst_ptr += dst.mWidth / 2;
}
}
void ColorConverter::convertYUV420Planar(
- size_t width, size_t height,
- const void *srcBits, size_t srcSkip,
- void *dstBits, size_t dstSkip) {
- CHECK_EQ(srcSkip, 0); // Doesn't really make sense for YUV formats.
- CHECK(dstSkip >= width * 2);
- CHECK((dstSkip & 3) == 0);
-
+ const BitmapParams &src, const BitmapParams &dst) {
uint8_t *kAdjustedClip = initClip();
- uint32_t *dst_ptr = (uint32_t *)dstBits;
- const uint8_t *src_y = (const uint8_t *)srcBits;
+ CHECK((dst.mWidth & 3) == 0);
+ CHECK((src.mCropLeft & 1) == 0);
+ CHECK_EQ(src.cropWidth(), dst.cropWidth());
+ CHECK_EQ(src.cropHeight(), dst.cropHeight());
+
+ uint32_t *dst_ptr = (uint32_t *)dst.mBits
+ + (dst.mCropTop * dst.mWidth + dst.mCropLeft) / 2;
+
+ const uint8_t *src_y =
+ (const uint8_t *)src.mBits + src.mCropTop * src.mWidth + src.mCropLeft;
const uint8_t *src_u =
- (const uint8_t *)src_y + width * height;
+ (const uint8_t *)src_y + src.mWidth * src.mHeight
+ + src.mCropTop * (src.mWidth / 2) + src.mCropLeft / 2;
const uint8_t *src_v =
- (const uint8_t *)src_u + (width / 2) * (height / 2);
+ src_u + (src.mWidth / 2) * (src.mHeight / 2);
- for (size_t y = 0; y < height; ++y) {
- for (size_t x = 0; x < width; x += 2) {
+ for (size_t y = 0; y < src.cropHeight(); ++y) {
+ for (size_t x = 0; x < src.cropWidth(); x += 2) {
// B = 1.164 * (Y - 16) + 2.018 * (U - 128)
// G = 1.164 * (Y - 16) - 0.813 * (V - 128) - 0.391 * (U - 128)
// R = 1.164 * (Y - 16) + 1.596 * (V - 128)
@@ -212,35 +250,38 @@
dst_ptr[x / 2] = (rgb2 << 16) | rgb1;
}
- src_y += width;
+ src_y += src.mWidth;
if (y & 1) {
- src_u += width / 2;
- src_v += width / 2;
+ src_u += src.mWidth / 2;
+ src_v += src.mWidth / 2;
}
- dst_ptr += dstSkip / 4;
+ dst_ptr += dst.mWidth / 2;
}
}
void ColorConverter::convertQCOMYUV420SemiPlanar(
- size_t width, size_t height,
- const void *srcBits, size_t srcSkip,
- void *dstBits, size_t dstSkip) {
- CHECK_EQ(srcSkip, 0); // Doesn't really make sense for YUV formats.
- CHECK(dstSkip >= width * 2);
- CHECK((dstSkip & 3) == 0);
-
+ const BitmapParams &src, const BitmapParams &dst) {
uint8_t *kAdjustedClip = initClip();
- uint32_t *dst_ptr = (uint32_t *)dstBits;
- const uint8_t *src_y = (const uint8_t *)srcBits;
+ CHECK((dst.mWidth & 3) == 0);
+ CHECK((src.mCropLeft & 1) == 0);
+ CHECK_EQ(src.cropWidth(), dst.cropWidth());
+ CHECK_EQ(src.cropHeight(), dst.cropHeight());
+
+ uint32_t *dst_ptr = (uint32_t *)dst.mBits
+ + (dst.mCropTop * dst.mWidth + dst.mCropLeft) / 2;
+
+ const uint8_t *src_y =
+ (const uint8_t *)src.mBits + src.mCropTop * src.mWidth + src.mCropLeft;
const uint8_t *src_u =
- (const uint8_t *)src_y + width * height;
+ (const uint8_t *)src_y + src.mWidth * src.mHeight
+ + src.mCropTop * src.mWidth + src.mCropLeft;
- for (size_t y = 0; y < height; ++y) {
- for (size_t x = 0; x < width; x += 2) {
+ for (size_t y = 0; y < src.cropHeight(); ++y) {
+ for (size_t x = 0; x < src.cropWidth(); x += 2) {
signed y1 = (signed)src_y[x] - 16;
signed y2 = (signed)src_y[x + 1] - 16;
@@ -275,34 +316,39 @@
dst_ptr[x / 2] = (rgb2 << 16) | rgb1;
}
- src_y += width;
+ src_y += src.mWidth;
if (y & 1) {
- src_u += width;
+ src_u += src.mWidth;
}
- dst_ptr += dstSkip / 4;
+ dst_ptr += dst.mWidth / 2;
}
}
void ColorConverter::convertYUV420SemiPlanar(
- size_t width, size_t height,
- const void *srcBits, size_t srcSkip,
- void *dstBits, size_t dstSkip) {
- CHECK_EQ(srcSkip, 0); // Doesn't really make sense for YUV formats.
- CHECK(dstSkip >= width * 2);
- CHECK((dstSkip & 3) == 0);
+ const BitmapParams &src, const BitmapParams &dst) {
+ // XXX Untested
uint8_t *kAdjustedClip = initClip();
- uint32_t *dst_ptr = (uint32_t *)dstBits;
- const uint8_t *src_y = (const uint8_t *)srcBits;
+ CHECK((dst.mWidth & 3) == 0);
+ CHECK((src.mCropLeft & 1) == 0);
+ CHECK_EQ(src.cropWidth(), dst.cropWidth());
+ CHECK_EQ(src.cropHeight(), dst.cropHeight());
+
+ uint32_t *dst_ptr = (uint32_t *)dst.mBits
+ + (dst.mCropTop * dst.mWidth + dst.mCropLeft) / 2;
+
+ const uint8_t *src_y =
+ (const uint8_t *)src.mBits + src.mCropTop * src.mWidth + src.mCropLeft;
const uint8_t *src_u =
- (const uint8_t *)src_y + width * height;
+ (const uint8_t *)src_y + src.mWidth * src.mHeight
+ + src.mCropTop * src.mWidth + src.mCropLeft;
- for (size_t y = 0; y < height; ++y) {
- for (size_t x = 0; x < width; x += 2) {
+ for (size_t y = 0; y < src.cropHeight(); ++y) {
+ for (size_t x = 0; x < src.cropWidth(); x += 2) {
signed y1 = (signed)src_y[x] - 16;
signed y2 = (signed)src_y[x + 1] - 16;
@@ -337,13 +383,13 @@
dst_ptr[x / 2] = (rgb2 << 16) | rgb1;
}
- src_y += width;
+ src_y += src.mWidth;
if (y & 1) {
- src_u += width;
+ src_u += src.mWidth;
}
- dst_ptr += dstSkip / 4;
+ dst_ptr += dst.mWidth / 2;
}
}
diff --git a/media/libstagefright/colorconversion/SoftwareRenderer.cpp b/media/libstagefright/colorconversion/SoftwareRenderer.cpp
index 93ec79d..70408d7 100644
--- a/media/libstagefright/colorconversion/SoftwareRenderer.cpp
+++ b/media/libstagefright/colorconversion/SoftwareRenderer.cpp
@@ -21,91 +21,129 @@
#include <binder/MemoryHeapBase.h>
#include <binder/MemoryHeapPmem.h>
-#include <media/stagefright/MediaDebug.h>
-#include <surfaceflinger/ISurface.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/MetaData.h>
+#include <surfaceflinger/Surface.h>
+#include <ui/android_native_buffer.h>
+#include <ui/GraphicBufferMapper.h>
namespace android {
SoftwareRenderer::SoftwareRenderer(
- OMX_COLOR_FORMATTYPE colorFormat,
- const sp<ISurface> &surface,
- size_t displayWidth, size_t displayHeight,
- size_t decodedWidth, size_t decodedHeight,
- int32_t rotationDegrees)
- : mInitCheck(NO_INIT),
- mColorFormat(colorFormat),
- mConverter(colorFormat, OMX_COLOR_Format16bitRGB565),
- mISurface(surface),
- mDisplayWidth(displayWidth),
- mDisplayHeight(displayHeight),
- mDecodedWidth(decodedWidth),
- mDecodedHeight(decodedHeight),
- mFrameSize(mDecodedWidth * mDecodedHeight * 2), // RGB565
- mIndex(0) {
- mMemoryHeap = new MemoryHeapBase("/dev/pmem_adsp", 2 * mFrameSize);
- if (mMemoryHeap->heapID() < 0) {
- LOGI("Creating physical memory heap failed, reverting to regular heap.");
- mMemoryHeap = new MemoryHeapBase(2 * mFrameSize);
- } else {
- sp<MemoryHeapPmem> pmemHeap = new MemoryHeapPmem(mMemoryHeap);
- pmemHeap->slap();
- mMemoryHeap = pmemHeap;
+ const sp<Surface> &surface, const sp<MetaData> &meta)
+ : mConverter(NULL),
+ mYUVMode(None),
+ mSurface(surface) {
+ int32_t tmp;
+ CHECK(meta->findInt32(kKeyColorFormat, &tmp));
+ mColorFormat = (OMX_COLOR_FORMATTYPE)tmp;
+
+ CHECK(meta->findInt32(kKeyWidth, &mWidth));
+ CHECK(meta->findInt32(kKeyHeight, &mHeight));
+
+ if (!meta->findRect(
+ kKeyCropRect,
+ &mCropLeft, &mCropTop, &mCropRight, &mCropBottom)) {
+ mCropLeft = mCropTop = 0;
+ mCropRight = mWidth - 1;
+ mCropBottom = mHeight - 1;
}
- CHECK(mISurface.get() != NULL);
- CHECK(mDecodedWidth > 0);
- CHECK(mDecodedHeight > 0);
- CHECK(mMemoryHeap->heapID() >= 0);
- CHECK(mConverter.isValid());
+ int32_t rotationDegrees;
+ if (!meta->findInt32(kKeyRotation, &rotationDegrees)) {
+ rotationDegrees = 0;
+ }
- uint32_t orientation;
+ int halFormat;
+ switch (mColorFormat) {
+ default:
+ halFormat = HAL_PIXEL_FORMAT_RGB_565;
+
+ mConverter = new ColorConverter(
+ mColorFormat, OMX_COLOR_Format16bitRGB565);
+ CHECK(mConverter->isValid());
+ break;
+ }
+
+ CHECK(mSurface.get() != NULL);
+ CHECK(mWidth > 0);
+ CHECK(mHeight > 0);
+ CHECK(mConverter == NULL || mConverter->isValid());
+
+ CHECK_EQ(0,
+ native_window_set_usage(
+ mSurface.get(),
+ GRALLOC_USAGE_SW_READ_NEVER | GRALLOC_USAGE_SW_WRITE_OFTEN
+ | GRALLOC_USAGE_HW_TEXTURE | GRALLOC_USAGE_EXTERNAL_DISP));
+
+ CHECK_EQ(0, native_window_set_buffer_count(mSurface.get(), 2));
+
+ // Width must be multiple of 32???
+ CHECK_EQ(0, native_window_set_buffers_geometry(
+ mSurface.get(),
+ mCropRight - mCropLeft + 1,
+ mCropBottom - mCropTop + 1,
+ halFormat));
+
+ uint32_t transform;
switch (rotationDegrees) {
- case 0: orientation = ISurface::BufferHeap::ROT_0; break;
- case 90: orientation = ISurface::BufferHeap::ROT_90; break;
- case 180: orientation = ISurface::BufferHeap::ROT_180; break;
- case 270: orientation = ISurface::BufferHeap::ROT_270; break;
- default: orientation = ISurface::BufferHeap::ROT_0; break;
+ case 0: transform = 0; break;
+ case 90: transform = HAL_TRANSFORM_ROT_90; break;
+ case 180: transform = HAL_TRANSFORM_ROT_180; break;
+ case 270: transform = HAL_TRANSFORM_ROT_270; break;
+ default: transform = 0; break;
}
- ISurface::BufferHeap bufferHeap(
- mDisplayWidth, mDisplayHeight,
- mDecodedWidth, mDecodedHeight,
- PIXEL_FORMAT_RGB_565,
- orientation, 0,
- mMemoryHeap);
-
- status_t err = mISurface->registerBuffers(bufferHeap);
-
- if (err != OK) {
- LOGW("ISurface failed to register buffers (0x%08x)", err);
+ if (transform) {
+ CHECK_EQ(0, native_window_set_buffers_transform(
+ mSurface.get(), transform));
}
-
- mInitCheck = err;
}
SoftwareRenderer::~SoftwareRenderer() {
- mISurface->unregisterBuffers();
-}
-
-status_t SoftwareRenderer::initCheck() const {
- return mInitCheck;
+ delete mConverter;
+ mConverter = NULL;
}
void SoftwareRenderer::render(
const void *data, size_t size, void *platformPrivate) {
- if (mInitCheck != OK) {
+ android_native_buffer_t *buf;
+ int err;
+ if ((err = mSurface->dequeueBuffer(mSurface.get(), &buf)) != 0) {
+ LOGW("Surface::dequeueBuffer returned error %d", err);
return;
}
- size_t offset = mIndex * mFrameSize;
- void *dst = (uint8_t *)mMemoryHeap->getBase() + offset;
+ CHECK_EQ(0, mSurface->lockBuffer(mSurface.get(), buf));
- mConverter.convert(
- mDecodedWidth, mDecodedHeight,
- data, 0, dst, 2 * mDecodedWidth);
+ GraphicBufferMapper &mapper = GraphicBufferMapper::get();
- mISurface->postBuffer(offset);
- mIndex = 1 - mIndex;
+ Rect bounds(mWidth, mHeight);
+
+ void *dst;
+ CHECK_EQ(0, mapper.lock(
+ buf->handle, GRALLOC_USAGE_SW_WRITE_OFTEN, bounds, &dst));
+
+ if (mConverter) {
+ mConverter->convert(
+ data,
+ mWidth, mHeight,
+ mCropLeft, mCropTop, mCropRight, mCropBottom,
+ dst,
+ buf->stride, buf->height,
+ 0, 0,
+ mCropRight - mCropLeft,
+ mCropBottom - mCropTop);
+ } else {
+ TRESPASS();
+ }
+
+ CHECK_EQ(0, mapper.unlock(buf->handle));
+
+ if ((err = mSurface->queueBuffer(mSurface.get(), buf)) != 0) {
+ LOGW("Surface::queueBuffer returned error %d", err);
+ }
+ buf = NULL;
}
} // namespace android
diff --git a/media/libstagefright/httplive/Android.mk b/media/libstagefright/httplive/Android.mk
index cc7dd4f..3aabf5f 100644
--- a/media/libstagefright/httplive/Android.mk
+++ b/media/libstagefright/httplive/Android.mk
@@ -9,7 +9,8 @@
LOCAL_C_INCLUDES:= \
$(JNI_H_INCLUDE) \
$(TOP)/frameworks/base/include/media/stagefright/openmax \
- $(TOP)/frameworks/base/media/libstagefright
+ $(TOP)/frameworks/base/media/libstagefright \
+ $(TOP)/external/openssl/include
LOCAL_MODULE:= libstagefright_httplive
diff --git a/media/libstagefright/httplive/LiveSource.cpp b/media/libstagefright/httplive/LiveSource.cpp
index 29c7b04..4b4c3d2 100644
--- a/media/libstagefright/httplive/LiveSource.cpp
+++ b/media/libstagefright/httplive/LiveSource.cpp
@@ -22,9 +22,14 @@
#include "include/M3UParser.h"
#include "include/NuHTTPDataSource.h"
+#include <cutils/properties.h>
+#include <media/stagefright/foundation/hexdump.h>
#include <media/stagefright/foundation/ABuffer.h>
+#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/FileSource.h>
-#include <media/stagefright/MediaDebug.h>
+
+#include <ctype.h>
+#include <openssl/aes.h>
namespace android {
@@ -38,7 +43,9 @@
mSourceSize(0),
mOffsetBias(0),
mSignalDiscontinuity(false),
- mPrevBandwidthIndex(-1) {
+ mPrevBandwidthIndex(-1),
+ mAESKey((AES_KEY *)malloc(sizeof(AES_KEY))),
+ mStreamEncrypted(false) {
if (switchToNext()) {
mInitCheck = OK;
@@ -47,6 +54,8 @@
}
LiveSource::~LiveSource() {
+ free(mAESKey);
+ mAESKey = NULL;
}
status_t LiveSource::initCheck() const {
@@ -68,7 +77,77 @@
return (double)rand() / RAND_MAX;
}
-bool LiveSource::loadPlaylist(bool fetchMaster) {
+size_t LiveSource::getBandwidthIndex() {
+ if (mBandwidthItems.size() == 0) {
+ return 0;
+ }
+
+#if 1
+ int32_t bandwidthBps;
+ if (mSource != NULL && mSource->estimateBandwidth(&bandwidthBps)) {
+ LOGI("bandwidth estimated at %.2f kbps", bandwidthBps / 1024.0f);
+ } else {
+ LOGI("no bandwidth estimate.");
+ return 0; // Pick the lowest bandwidth stream by default.
+ }
+
+ char value[PROPERTY_VALUE_MAX];
+ if (property_get("media.httplive.max-bw", value, NULL)) {
+ char *end;
+ long maxBw = strtoul(value, &end, 10);
+ if (end > value && *end == '\0') {
+ if (maxBw > 0 && bandwidthBps > maxBw) {
+ LOGV("bandwidth capped to %ld bps", maxBw);
+ bandwidthBps = maxBw;
+ }
+ }
+ }
+
+ // Consider only 80% of the available bandwidth usable.
+ bandwidthBps = (bandwidthBps * 8) / 10;
+
+ // Pick the highest bandwidth stream below or equal to estimated bandwidth.
+
+ size_t index = mBandwidthItems.size() - 1;
+ while (index > 0 && mBandwidthItems.itemAt(index).mBandwidth
+ > (size_t)bandwidthBps) {
+ --index;
+ }
+#elif 0
+ // Change bandwidth at random()
+ size_t index = uniformRand() * mBandwidthItems.size();
+#elif 0
+ // There's a 50% chance to stay on the current bandwidth and
+ // a 50% chance to switch to the next higher bandwidth (wrapping around
+ // to lowest)
+ const size_t kMinIndex = 0;
+
+ size_t index;
+ if (mPrevBandwidthIndex < 0) {
+ index = kMinIndex;
+ } else if (uniformRand() < 0.5) {
+ index = (size_t)mPrevBandwidthIndex;
+ } else {
+ index = mPrevBandwidthIndex + 1;
+ if (index == mBandwidthItems.size()) {
+ index = kMinIndex;
+ }
+ }
+#elif 0
+ // Pick the highest bandwidth stream below or equal to 1.2 Mbit/sec
+
+ size_t index = mBandwidthItems.size() - 1;
+ while (index > 0 && mBandwidthItems.itemAt(index).mBandwidth > 1200000) {
+ --index;
+ }
+#else
+ size_t index = mBandwidthItems.size() - 1; // Highest bandwidth stream
+#endif
+
+ return index;
+}
+
+bool LiveSource::loadPlaylist(bool fetchMaster, size_t bandwidthIndex) {
mSignalDiscontinuity = false;
mPlaylist.clear();
@@ -112,49 +191,35 @@
mBandwidthItems.sort(SortByBandwidth);
+#if 1 // XXX
+ if (mBandwidthItems.size() > 1) {
+ // Remove the lowest bandwidth stream, this is sometimes
+ // an AAC program stream, which we don't support at this point.
+ mBandwidthItems.removeItemsAt(0);
+ }
+#endif
+
for (size_t i = 0; i < mBandwidthItems.size(); ++i) {
const BandwidthItem &item = mBandwidthItems.itemAt(i);
LOGV("item #%d: %s", i, item.mURI.c_str());
}
+
+ bandwidthIndex = getBandwidthIndex();
}
}
if (mBandwidthItems.size() > 0) {
-#if 0
- // Change bandwidth at random()
- size_t index = uniformRand() * mBandwidthItems.size();
-#elif 0
- // There's a 50% chance to stay on the current bandwidth and
- // a 50% chance to switch to the next higher bandwidth (wrapping around
- // to lowest)
- size_t index;
- if (uniformRand() < 0.5) {
- index = mPrevBandwidthIndex < 0 ? 0 : (size_t)mPrevBandwidthIndex;
- } else {
- if (mPrevBandwidthIndex < 0) {
- index = 0;
- } else {
- index = mPrevBandwidthIndex + 1;
- if (index == mBandwidthItems.size()) {
- index = 0;
- }
- }
- }
-#else
- // Stay on the lowest bandwidth available.
- size_t index = mBandwidthItems.size() - 1; // Highest bandwidth stream
-#endif
+ mURL = mBandwidthItems.editItemAt(bandwidthIndex).mURI;
- mURL = mBandwidthItems.editItemAt(index).mURI;
-
- if (mPrevBandwidthIndex >= 0 && (size_t)mPrevBandwidthIndex != index) {
+ if (mPrevBandwidthIndex >= 0
+ && (size_t)mPrevBandwidthIndex != bandwidthIndex) {
// If we switched streams because of bandwidth changes,
// we'll signal this discontinuity by inserting a
// special transport stream packet into the stream.
mSignalDiscontinuity = true;
}
- mPrevBandwidthIndex = index;
+ mPrevBandwidthIndex = bandwidthIndex;
} else {
mURL = mMasterURL;
}
@@ -199,18 +264,33 @@
mOffsetBias += mSourceSize;
mSourceSize = 0;
+ size_t bandwidthIndex = getBandwidthIndex();
+
if (mLastFetchTimeUs < 0 || getNowUs() >= mLastFetchTimeUs + 15000000ll
- || mPlaylistIndex == mPlaylist->size()) {
+ || mPlaylistIndex == mPlaylist->size()
+ || (ssize_t)bandwidthIndex != mPrevBandwidthIndex) {
int32_t nextSequenceNumber =
mPlaylistIndex + mFirstItemSequenceNumber;
- if (!loadPlaylist(mLastFetchTimeUs < 0)) {
+ if (!loadPlaylist(mLastFetchTimeUs < 0, bandwidthIndex)) {
LOGE("failed to reload playlist");
return false;
}
if (mLastFetchTimeUs < 0) {
- mPlaylistIndex = 0;
+ if (isSeekable()) {
+ mPlaylistIndex = 0;
+ } else {
+ // This is live streamed content, the first seqnum in the
+ // various bandwidth' streams may be slightly off, so don't
+ // start at the very first entry.
+ // With a segment duration of 6-10secs, this really only
+ // delays playback up to 30secs compared to real time.
+ mPlaylistIndex = 3;
+ if (mPlaylistIndex >= mPlaylist->size()) {
+ mPlaylistIndex = mPlaylist->size() - 1;
+ }
+ }
} else {
if (nextSequenceNumber < mFirstItemSequenceNumber
|| nextSequenceNumber
@@ -227,6 +307,10 @@
mLastFetchTimeUs = getNowUs();
}
+ if (!setupCipher()) {
+ return false;
+ }
+
AString uri;
sp<AMessage> itemMeta;
CHECK(mPlaylist->itemAt(mPlaylistIndex, &uri, &itemMeta));
@@ -243,16 +327,131 @@
}
mPlaylistIndex++;
+
+ return true;
+}
+
+bool LiveSource::setupCipher() {
+ sp<AMessage> itemMeta;
+ bool found = false;
+ AString method;
+
+ for (ssize_t i = mPlaylistIndex; i >= 0; --i) {
+ AString uri;
+ CHECK(mPlaylist->itemAt(i, &uri, &itemMeta));
+
+ if (itemMeta->findString("cipher-method", &method)) {
+ found = true;
+ break;
+ }
+ }
+
+ if (!found) {
+ method = "NONE";
+ }
+
+ mStreamEncrypted = false;
+
+ if (method == "AES-128") {
+ AString keyURI;
+ if (!itemMeta->findString("cipher-uri", &keyURI)) {
+ LOGE("Missing key uri");
+ return false;
+ }
+
+ if (keyURI.size() >= 2
+ && keyURI.c_str()[0] == '"'
+ && keyURI.c_str()[keyURI.size() - 1] == '"') {
+ // Remove surrounding quotes.
+ AString tmp(keyURI, 1, keyURI.size() - 2);
+ keyURI = tmp;
+ }
+
+ ssize_t index = mAESKeyForURI.indexOfKey(keyURI);
+
+ sp<ABuffer> key;
+ if (index >= 0) {
+ key = mAESKeyForURI.valueAt(index);
+ } else {
+ key = new ABuffer(16);
+
+ sp<NuHTTPDataSource> keySource = new NuHTTPDataSource;
+ status_t err = keySource->connect(keyURI.c_str());
+
+ if (err == OK) {
+ size_t offset = 0;
+ while (offset < 16) {
+ ssize_t n = keySource->readAt(
+ offset, key->data() + offset, 16 - offset);
+ if (n <= 0) {
+ err = ERROR_IO;
+ break;
+ }
+
+ offset += n;
+ }
+ }
+
+ if (err != OK) {
+ LOGE("failed to fetch cipher key from '%s'.", keyURI.c_str());
+ return false;
+ }
+
+ mAESKeyForURI.add(keyURI, key);
+ }
+
+ if (AES_set_decrypt_key(key->data(), 128, (AES_KEY *)mAESKey) != 0) {
+ LOGE("failed to set AES decryption key.");
+ return false;
+ }
+
+ AString iv;
+ if (itemMeta->findString("cipher-iv", &iv)) {
+ if ((!iv.startsWith("0x") && !iv.startsWith("0X"))
+ || iv.size() != 16 * 2 + 2) {
+ LOGE("malformed cipher IV '%s'.", iv.c_str());
+ return false;
+ }
+
+ memset(mAESIVec, 0, sizeof(mAESIVec));
+ for (size_t i = 0; i < 16; ++i) {
+ char c1 = tolower(iv.c_str()[2 + 2 * i]);
+ char c2 = tolower(iv.c_str()[3 + 2 * i]);
+ if (!isxdigit(c1) || !isxdigit(c2)) {
+ LOGE("malformed cipher IV '%s'.", iv.c_str());
+ return false;
+ }
+ uint8_t nibble1 = isdigit(c1) ? c1 - '0' : c1 - 'a' + 10;
+ uint8_t nibble2 = isdigit(c2) ? c2 - '0' : c2 - 'a' + 10;
+
+ mAESIVec[i] = nibble1 << 4 | nibble2;
+ }
+ } else {
+ size_t seqNum = mPlaylistIndex + mFirstItemSequenceNumber;
+
+ memset(mAESIVec, 0, sizeof(mAESIVec));
+ mAESIVec[15] = seqNum & 0xff;
+ mAESIVec[14] = (seqNum >> 8) & 0xff;
+ mAESIVec[13] = (seqNum >> 16) & 0xff;
+ mAESIVec[12] = (seqNum >> 24) & 0xff;
+ }
+
+ mStreamEncrypted = true;
+ } else if (!(method == "NONE")) {
+ LOGE("Unsupported cipher method '%s'", method.c_str());
+ return false;
+ }
+
return true;
}
static const ssize_t kHeaderSize = 188;
-ssize_t LiveSource::readAt(off_t offset, void *data, size_t size) {
+ssize_t LiveSource::readAt(off64_t offset, void *data, size_t size) {
CHECK(offset >= mOffsetBias);
offset -= mOffsetBias;
- off_t delta = mSignalDiscontinuity ? kHeaderSize : 0;
+ off64_t delta = mSignalDiscontinuity ? kHeaderSize : 0;
if (offset >= mSourceSize + delta) {
CHECK_EQ(offset, mSourceSize + delta);
@@ -279,6 +478,7 @@
return avail;
}
+ bool done = false;
size_t numRead = 0;
while (numRead < size) {
ssize_t n = mSource->readAt(
@@ -289,7 +489,44 @@
break;
}
+ if (mStreamEncrypted) {
+ size_t nmod = n % 16;
+ CHECK(nmod == 0);
+
+ sp<ABuffer> tmp = new ABuffer(n);
+
+ AES_cbc_encrypt((const unsigned char *)data + numRead,
+ tmp->data(),
+ n,
+ (const AES_KEY *)mAESKey,
+ mAESIVec,
+ AES_DECRYPT);
+
+ if (mSourceSize == (off64_t)(offset + numRead - delta + n)) {
+ // check for padding at the end of the file.
+
+ size_t pad = tmp->data()[n - 1];
+ CHECK_GT(pad, 0u);
+ CHECK_LE(pad, 16u);
+ CHECK_GE((size_t)n, pad);
+ for (size_t i = 0; i < pad; ++i) {
+ CHECK_EQ((unsigned)tmp->data()[n - 1 - i], pad);
+ }
+
+ n -= pad;
+ mSourceSize -= pad;
+
+ done = true;
+ }
+
+ memcpy((uint8_t *)data + numRead, tmp->data(), n);
+ }
+
numRead += n;
+
+ if (done) {
+ break;
+ }
}
return numRead;
@@ -314,7 +551,7 @@
source = mSource;
}
- off_t size;
+ off64_t size;
status_t err = source->getSize(&size);
if (err != OK) {
@@ -380,19 +617,17 @@
return false;
}
- size_t newPlaylistIndex = mFirstItemSequenceNumber + index;
-
- if (newPlaylistIndex == mPlaylistIndex) {
+ if (index == mPlaylistIndex) {
return false;
}
- mPlaylistIndex = newPlaylistIndex;
+ mPlaylistIndex = index;
+
+ LOGV("seeking to index %lld", index);
switchToNext();
mOffsetBias = 0;
- LOGV("seeking to index %lld", index);
-
return true;
}
diff --git a/media/libstagefright/httplive/M3UParser.cpp b/media/libstagefright/httplive/M3UParser.cpp
index 90f3d6d..b166cc3 100644
--- a/media/libstagefright/httplive/M3UParser.cpp
+++ b/media/libstagefright/httplive/M3UParser.cpp
@@ -158,6 +158,11 @@
return ERROR_MALFORMED;
}
err = parseMetaData(line, &mMeta, "media-sequence");
+ } else if (line.startsWith("#EXT-X-KEY")) {
+ if (mIsVariantPlaylist) {
+ return ERROR_MALFORMED;
+ }
+ err = parseCipherInfo(line, &itemMeta);
} else if (line.startsWith("#EXT-X-ENDLIST")) {
mIsComplete = true;
} else if (line.startsWith("#EXTINF")) {
@@ -292,6 +297,57 @@
}
// static
+status_t M3UParser::parseCipherInfo(
+ const AString &line, sp<AMessage> *meta) {
+ ssize_t colonPos = line.find(":");
+
+ if (colonPos < 0) {
+ return ERROR_MALFORMED;
+ }
+
+ size_t offset = colonPos + 1;
+
+ while (offset < line.size()) {
+ ssize_t end = line.find(",", offset);
+ if (end < 0) {
+ end = line.size();
+ }
+
+ AString attr(line, offset, end - offset);
+ attr.trim();
+
+ offset = end + 1;
+
+ ssize_t equalPos = attr.find("=");
+ if (equalPos < 0) {
+ continue;
+ }
+
+ AString key(attr, 0, equalPos);
+ key.trim();
+
+ AString val(attr, equalPos + 1, attr.size() - equalPos - 1);
+ val.trim();
+
+ LOGV("key=%s value=%s", key.c_str(), val.c_str());
+
+ key.tolower();
+
+ if (key == "method" || key == "uri" || key == "iv") {
+ if (meta->get() == NULL) {
+ *meta = new AMessage;
+ }
+
+ key.insert(AString("cipher-"), 0);
+
+ (*meta)->setString(key.c_str(), val.c_str(), val.size());
+ }
+ }
+
+ return OK;
+}
+
+// static
status_t M3UParser::ParseInt32(const char *s, int32_t *x) {
char *end;
long lval = strtol(s, &end, 10);
diff --git a/media/libstagefright/id3/ID3.cpp b/media/libstagefright/id3/ID3.cpp
index da340f7..e9131a6 100644
--- a/media/libstagefright/id3/ID3.cpp
+++ b/media/libstagefright/id3/ID3.cpp
@@ -769,8 +769,8 @@
bool ID3::parseV1(const sp<DataSource> &source) {
const size_t V1_TAG_SIZE = 128;
- off_t size;
- if (source->getSize(&size) != OK || size < (off_t)V1_TAG_SIZE) {
+ off64_t size;
+ if (source->getSize(&size) != OK || size < (off64_t)V1_TAG_SIZE) {
return false;
}
diff --git a/media/libstagefright/include/AACEncoder.h b/media/libstagefright/include/AACEncoder.h
index ecc533f..3d5fc60 100644
--- a/media/libstagefright/include/AACEncoder.h
+++ b/media/libstagefright/include/AACEncoder.h
@@ -60,7 +60,7 @@
kNumSamplesPerFrame = 1024,
};
- int16_t mInputFrame[kNumSamplesPerFrame];
+ int16_t *mInputFrame;
uint8_t mAudioSpecificConfigData[2]; // auido specific data
void *mEncoderHandle;
diff --git a/media/libstagefright/include/AwesomePlayer.h b/media/libstagefright/include/AwesomePlayer.h
index e352928..46f4a35 100644
--- a/media/libstagefright/include/AwesomePlayer.h
+++ b/media/libstagefright/include/AwesomePlayer.h
@@ -48,7 +48,6 @@
struct AwesomeRenderer : public RefBase {
AwesomeRenderer() {}
- virtual status_t initCheck() const = 0;
virtual void render(MediaBuffer *buffer) = 0;
private:
@@ -68,6 +67,8 @@
status_t setDataSource(int fd, int64_t offset, int64_t length);
+ status_t setDataSource(const sp<IStreamSource> &source);
+
void reset();
status_t prepare();
@@ -80,7 +81,7 @@
bool isPlaying() const;
- void setISurface(const sp<ISurface> &isurface);
+ void setSurface(const sp<Surface> &surface);
void setAudioSink(const sp<MediaPlayerBase::AudioSink> &audioSink);
status_t setLooping(bool shouldLoop);
@@ -89,11 +90,6 @@
status_t seekTo(int64_t timeUs);
- status_t getVideoDimensions(int32_t *width, int32_t *height) const;
-
- status_t suspend();
- status_t resume();
-
// This is a mask of MediaExtractor::Flags.
uint32_t flags() const;
@@ -115,6 +111,11 @@
AUDIO_AT_EOS = 256,
VIDEO_AT_EOS = 512,
AUTO_LOOPING = 1024,
+
+ // We are basically done preparing but are currently buffering
+ // sufficient data to begin playback and finish the preparation phase
+ // for good.
+ PREPARING_CONNECTED = 2048,
};
mutable Mutex mLock;
@@ -125,7 +126,7 @@
bool mQueueStarted;
wp<MediaPlayerBase> mListener;
- sp<ISurface> mISurface;
+ sp<Surface> mSurface;
sp<MediaPlayerBase::AudioSink> mAudioSink;
SystemTimeSource mSystemTimeSource;
@@ -149,7 +150,6 @@
uint32_t mFlags;
uint32_t mExtractorFlags;
- int32_t mVideoWidth, mVideoHeight;
int64_t mTimeSourceDeltaUs;
int64_t mVideoTimeUs;
@@ -183,7 +183,6 @@
void postCheckAudioStatusEvent_l();
status_t play_l();
- MediaBuffer *mLastVideoBuffer;
MediaBuffer *mVideoBuffer;
sp<NuHTTPDataSource> mConnectingDataSource;
@@ -194,32 +193,6 @@
sp<ARTPSession> mRTPSession;
sp<UDPPusher> mRTPPusher, mRTCPPusher;
- struct SuspensionState {
- String8 mUri;
- KeyedVector<String8, String8> mUriHeaders;
- sp<DataSource> mFileSource;
-
- uint32_t mFlags;
- int64_t mPositionUs;
-
- void *mLastVideoFrame;
- size_t mLastVideoFrameSize;
- int32_t mColorFormat;
- int32_t mVideoWidth, mVideoHeight;
- int32_t mDecodedWidth, mDecodedHeight;
-
- SuspensionState()
- : mLastVideoFrame(NULL) {
- }
-
- ~SuspensionState() {
- if (mLastVideoFrame) {
- free(mLastVideoFrame);
- mLastVideoFrame = NULL;
- }
- }
- } *mSuspensionState;
-
DrmManagerClient *mDrmManagerClient;
DecryptHandle *mDecryptHandle;
@@ -233,7 +206,8 @@
void partial_reset_l();
status_t seekTo_l(int64_t timeUs);
status_t pause_l(bool at_eos = false);
- status_t initRenderer_l();
+ void initRenderer_l();
+ void notifyVideoSize_l();
void seekAudioIfNecessary_l();
void cancelPlayerEvents(bool keepBufferingGoing = false);
@@ -267,6 +241,7 @@
bool getBitrate(int64_t *bitrate);
void finishSeekIfNecessary(int64_t videoTimeUs);
+ void ensureCacheIsFetching_l();
AwesomePlayer(const AwesomePlayer &);
AwesomePlayer &operator=(const AwesomePlayer &);
diff --git a/media/libstagefright/include/HTTPStream.h b/media/libstagefright/include/HTTPStream.h
index 793798f..545cd0c 100644
--- a/media/libstagefright/include/HTTPStream.h
+++ b/media/libstagefright/include/HTTPStream.h
@@ -18,10 +18,9 @@
#define HTTP_STREAM_H_
-#include "stagefright_string.h"
-
#include <sys/types.h>
+#include <media/stagefright/foundation/AString.h>
#include <media/stagefright/MediaErrors.h>
#include <utils/KeyedVector.h>
#include <utils/threads.h>
@@ -50,7 +49,7 @@
static const char *kStatusKey;
bool find_header_value(
- const string &key, string *value) const;
+ const AString &key, AString *value) const;
// Pass a negative value to disable the timeout.
void setReceiveTimeout(int seconds);
@@ -70,7 +69,7 @@
Mutex mLock;
int mSocket;
- KeyedVector<string, string> mHeaders;
+ KeyedVector<AString, AString> mHeaders;
HTTPStream(const HTTPStream &);
HTTPStream &operator=(const HTTPStream &);
diff --git a/media/libstagefright/include/LiveSource.h b/media/libstagefright/include/LiveSource.h
index 55dd45e..38fe328 100644
--- a/media/libstagefright/include/LiveSource.h
+++ b/media/libstagefright/include/LiveSource.h
@@ -21,6 +21,7 @@
#include <media/stagefright/foundation/ABase.h>
#include <media/stagefright/foundation/AString.h>
#include <media/stagefright/DataSource.h>
+#include <utils/KeyedVector.h>
#include <utils/Vector.h>
namespace android {
@@ -34,7 +35,7 @@
virtual status_t initCheck() const;
- virtual ssize_t readAt(off_t offset, void *data, size_t size);
+ virtual ssize_t readAt(off64_t offset, void *data, size_t size);
virtual uint32_t flags() {
return kWantsPrefetching;
@@ -66,20 +67,29 @@
int64_t mLastFetchTimeUs;
sp<NuHTTPDataSource> mSource;
- off_t mSourceSize;
- off_t mOffsetBias;
+ off64_t mSourceSize;
+ off64_t mOffsetBias;
bool mSignalDiscontinuity;
ssize_t mPrevBandwidthIndex;
+ void *mAESKey;
+ unsigned char mAESIVec[16];
+ bool mStreamEncrypted;
+
+ KeyedVector<AString, sp<ABuffer> > mAESKeyForURI;
+
status_t fetchM3U(const char *url, sp<ABuffer> *buffer);
static int SortByBandwidth(const BandwidthItem *a, const BandwidthItem *b);
bool switchToNext();
- bool loadPlaylist(bool fetchMaster);
+ bool loadPlaylist(bool fetchMaster, size_t bandwidthIndex);
void determineSeekability();
+ size_t getBandwidthIndex();
+ bool setupCipher();
+
DISALLOW_EVIL_CONSTRUCTORS(LiveSource);
};
diff --git a/media/libstagefright/include/M3UParser.h b/media/libstagefright/include/M3UParser.h
index bd9eebe..531d184 100644
--- a/media/libstagefright/include/M3UParser.h
+++ b/media/libstagefright/include/M3UParser.h
@@ -66,6 +66,9 @@
static status_t parseStreamInf(
const AString &line, sp<AMessage> *meta);
+ static status_t parseCipherInfo(
+ const AString &line, sp<AMessage> *meta);
+
static status_t ParseInt32(const char *s, int32_t *x);
DISALLOW_EVIL_CONSTRUCTORS(M3UParser);
diff --git a/media/libstagefright/include/MP3Extractor.h b/media/libstagefright/include/MP3Extractor.h
index 30136e7..728980e 100644
--- a/media/libstagefright/include/MP3Extractor.h
+++ b/media/libstagefright/include/MP3Extractor.h
@@ -24,6 +24,7 @@
struct AMessage;
class DataSource;
+struct MP3Seeker;
class String8;
class MP3Extractor : public MediaExtractor {
@@ -37,15 +38,19 @@
virtual sp<MetaData> getMetaData();
+ static bool get_mp3_frame_size(
+ uint32_t header, size_t *frame_size,
+ int *out_sampling_rate = NULL, int *out_channels = NULL,
+ int *out_bitrate = NULL);
+
private:
status_t mInitCheck;
sp<DataSource> mDataSource;
- off_t mFirstFramePos;
+ off64_t mFirstFramePos;
sp<MetaData> mMeta;
uint32_t mFixedHeader;
- int32_t mByteNumber; // total number of bytes in this MP3
- char mTableOfContents[99]; // TOC entries in XING header
+ sp<MP3Seeker> mSeeker;
MP3Extractor(const MP3Extractor &);
MP3Extractor &operator=(const MP3Extractor &);
diff --git a/media/libstagefright/include/MP3Seeker.h b/media/libstagefright/include/MP3Seeker.h
new file mode 100644
index 0000000..599542e
--- /dev/null
+++ b/media/libstagefright/include/MP3Seeker.h
@@ -0,0 +1,46 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef MP3_SEEKER_H_
+
+#define MP3_SEEKER_H_
+
+#include <media/stagefright/foundation/ABase.h>
+#include <utils/RefBase.h>
+
+namespace android {
+
+struct MP3Seeker : public RefBase {
+ MP3Seeker() {}
+
+ virtual bool getDuration(int64_t *durationUs) = 0;
+
+ // Given a request seek time in "*timeUs", find the byte offset closest
+ // to that position and return it in "*pos". Update "*timeUs" to reflect
+ // the actual time that seekpoint represents.
+ virtual bool getOffsetForTime(int64_t *timeUs, off64_t *pos) = 0;
+
+protected:
+ virtual ~MP3Seeker() {}
+
+private:
+ DISALLOW_EVIL_CONSTRUCTORS(MP3Seeker);
+};
+
+} // namespace android
+
+#endif // MP3_SEEKER_H_
+
diff --git a/media/libstagefright/include/MPEG2TSExtractor.h b/media/libstagefright/include/MPEG2TSExtractor.h
index d83b538..58401d4 100644
--- a/media/libstagefright/include/MPEG2TSExtractor.h
+++ b/media/libstagefright/include/MPEG2TSExtractor.h
@@ -43,7 +43,7 @@
Vector<sp<AnotherPacketSource> > mSourceImpls;
- off_t mOffset;
+ off64_t mOffset;
void init();
status_t feedMore();
diff --git a/media/libstagefright/include/MPEG4Extractor.h b/media/libstagefright/include/MPEG4Extractor.h
index bc2e4dc..04e8a6a 100644
--- a/media/libstagefright/include/MPEG4Extractor.h
+++ b/media/libstagefright/include/MPEG4Extractor.h
@@ -67,8 +67,8 @@
Vector<uint32_t> mPath;
status_t readMetaData();
- status_t parseChunk(off_t *offset, int depth);
- status_t parseMetaData(off_t offset, size_t size);
+ status_t parseChunk(off64_t *offset, int depth);
+ status_t parseMetaData(off64_t offset, size_t size);
status_t updateAudioTrackInfoFromESDS_MPEG4Audio(
const void *esds_data, size_t esds_size);
@@ -86,9 +86,9 @@
SINF *mFirstSINF;
bool mIsDrm;
- status_t parseDrmSINF(off_t *offset, off_t data_offset);
+ status_t parseDrmSINF(off64_t *offset, off64_t data_offset);
- status_t parseTrackHeader(off_t data_offset, off_t data_size);
+ status_t parseTrackHeader(off64_t data_offset, off64_t data_size);
MPEG4Extractor(const MPEG4Extractor &);
MPEG4Extractor &operator=(const MPEG4Extractor &);
diff --git a/media/libstagefright/include/NuCachedSource2.h b/media/libstagefright/include/NuCachedSource2.h
index 1fb2088..f0f7daf 100644
--- a/media/libstagefright/include/NuCachedSource2.h
+++ b/media/libstagefright/include/NuCachedSource2.h
@@ -32,11 +32,14 @@
virtual status_t initCheck() const;
- virtual ssize_t readAt(off_t offset, void *data, size_t size);
+ virtual ssize_t readAt(off64_t offset, void *data, size_t size);
- virtual status_t getSize(off_t *size);
+ virtual status_t getSize(off64_t *size);
virtual uint32_t flags();
+ virtual DecryptHandle* DrmInitialization(DrmManagerClient *client);
+ virtual void getDrmInfo(DecryptHandle **handle, DrmManagerClient **client);
+ virtual String8 getUri();
////////////////////////////////////////////////////////////////////////////
size_t cachedSize();
@@ -45,6 +48,8 @@
void suspend();
void clearCacheAndResume();
+ void resumeFetchingIfNecessary();
+
protected:
virtual ~NuCachedSource2();
@@ -54,7 +59,7 @@
enum {
kPageSize = 65536,
kHighWaterThreshold = 5 * 1024 * 1024,
- kLowWaterThreshold = 512 * 1024,
+ kLowWaterThreshold = 1024 * 1024,
// Read data after a 15 sec timeout whether we're actively
// fetching or not.
@@ -76,9 +81,9 @@
Condition mCondition;
PageCache *mCache;
- off_t mCacheOffset;
+ off64_t mCacheOffset;
status_t mFinalStatus;
- off_t mLastAccessPos;
+ off64_t mLastAccessPos;
sp<AMessage> mAsyncResult;
bool mFetching;
int64_t mLastFetchTimeUs;
@@ -90,11 +95,11 @@
void onSuspend();
void fetchInternal();
- ssize_t readInternal(off_t offset, void *data, size_t size);
- status_t seekInternal_l(off_t offset);
+ ssize_t readInternal(off64_t offset, void *data, size_t size);
+ status_t seekInternal_l(off64_t offset);
size_t approxDataRemaining_l(bool *eos);
- void restartPrefetcherIfNecessary_l();
+ void restartPrefetcherIfNecessary_l(bool ignoreLowWaterThreshold = false);
DISALLOW_EVIL_CONSTRUCTORS(NuCachedSource2);
};
diff --git a/media/libstagefright/include/NuHTTPDataSource.h b/media/libstagefright/include/NuHTTPDataSource.h
index 3c88cc2..c8e93be 100644
--- a/media/libstagefright/include/NuHTTPDataSource.h
+++ b/media/libstagefright/include/NuHTTPDataSource.h
@@ -3,6 +3,7 @@
#define NU_HTTP_DATA_SOURCE_H_
#include <media/stagefright/DataSource.h>
+#include <utils/List.h>
#include <utils/String8.h>
#include <utils/threads.h>
@@ -16,16 +17,24 @@
status_t connect(
const char *uri,
const KeyedVector<String8, String8> *headers = NULL,
- off_t offset = 0);
+ off64_t offset = 0);
void disconnect();
virtual status_t initCheck() const;
- virtual ssize_t readAt(off_t offset, void *data, size_t size);
- virtual status_t getSize(off_t *size);
+ virtual ssize_t readAt(off64_t offset, void *data, size_t size);
+ virtual status_t getSize(off64_t *size);
virtual uint32_t flags();
+ // Returns true if bandwidth could successfully be estimated,
+ // false otherwise.
+ bool estimateBandwidth(int32_t *bandwidth_bps);
+
+ virtual DecryptHandle* DrmInitialization(DrmManagerClient *client);
+ virtual void getDrmInfo(DecryptHandle **handle, DrmManagerClient **client);
+ virtual String8 getUri();
+
protected:
virtual ~NuHTTPDataSource();
@@ -36,6 +45,11 @@
CONNECTED
};
+ struct BandwidthEntry {
+ int64_t mDelayUs;
+ size_t mNumBytes;
+ };
+
Mutex mLock;
State mState;
@@ -44,10 +58,11 @@
unsigned mPort;
String8 mPath;
String8 mHeaders;
+ String8 mUri;
HTTPStream mHTTP;
- off_t mOffset;
- off_t mContentLength;
+ off64_t mOffset;
+ off64_t mContentLength;
bool mContentLengthValid;
bool mHasChunkedTransferEncoding;
@@ -55,18 +70,27 @@
// chunk header (or -1 if no more chunks).
ssize_t mChunkDataBytesLeft;
+ List<BandwidthEntry> mBandwidthHistory;
+ size_t mNumBandwidthHistoryItems;
+ int64_t mTotalTransferTimeUs;
+ size_t mTotalTransferBytes;
+
+ DecryptHandle *mDecryptHandle;
+ DrmManagerClient *mDrmManagerClient;
+
status_t connect(
- const char *uri, const String8 &headers, off_t offset);
+ const char *uri, const String8 &headers, off64_t offset);
status_t connect(
const char *host, unsigned port, const char *path,
const String8 &headers,
- off_t offset);
+ off64_t offset);
// Read up to "size" bytes of data, respect transfer encoding.
ssize_t internalRead(void *data, size_t size);
void applyTimeoutResponse();
+ void addBandwidthMeasurement_l(size_t numBytes, int64_t delayUs);
static void MakeFullHeaders(
const KeyedVector<String8, String8> *overrides,
diff --git a/media/libstagefright/include/OMX.h b/media/libstagefright/include/OMX.h
index 72ab5aa..5fed98a 100644
--- a/media/libstagefright/include/OMX.h
+++ b/media/libstagefright/include/OMX.h
@@ -59,10 +59,20 @@
node_id node, OMX_INDEXTYPE index,
const void *params, size_t size);
+ virtual status_t enableGraphicBuffers(
+ node_id node, OMX_U32 port_index, OMX_BOOL enable);
+
+ virtual status_t storeMetaDataInBuffers(
+ node_id node, OMX_U32 port_index, OMX_BOOL enable);
+
virtual status_t useBuffer(
node_id node, OMX_U32 port_index, const sp<IMemory> ¶ms,
buffer_id *buffer);
+ virtual status_t useGraphicBuffer(
+ node_id node, OMX_U32 port_index,
+ const sp<GraphicBuffer> &graphicBuffer, buffer_id *buffer);
+
virtual status_t allocateBuffer(
node_id node, OMX_U32 port_index, size_t size,
buffer_id *buffer, void **buffer_data);
@@ -87,14 +97,6 @@
const char *parameter_name,
OMX_INDEXTYPE *index);
- virtual sp<IOMXRenderer> createRenderer(
- const sp<ISurface> &surface,
- const char *componentName,
- OMX_COLOR_FORMATTYPE colorFormat,
- size_t encodedWidth, size_t encodedHeight,
- size_t displayWidth, size_t displayHeight,
- int32_t rotationDegrees);
-
virtual void binderDied(const wp<IBinder> &the_late_who);
OMX_ERRORTYPE OnEvent(
diff --git a/media/libstagefright/include/OMXNodeInstance.h b/media/libstagefright/include/OMXNodeInstance.h
index b5b31ac..86c102c 100644
--- a/media/libstagefright/include/OMXNodeInstance.h
+++ b/media/libstagefright/include/OMXNodeInstance.h
@@ -49,10 +49,17 @@
status_t getConfig(OMX_INDEXTYPE index, void *params, size_t size);
status_t setConfig(OMX_INDEXTYPE index, const void *params, size_t size);
+ status_t enableGraphicBuffers(OMX_U32 portIndex, OMX_BOOL enable);
+ status_t storeMetaDataInBuffers(OMX_U32 portIndex, OMX_BOOL enable);
+
status_t useBuffer(
OMX_U32 portIndex, const sp<IMemory> ¶ms,
OMX::buffer_id *buffer);
+ status_t useGraphicBuffer(
+ OMX_U32 portIndex, const sp<GraphicBuffer> &graphicBuffer,
+ OMX::buffer_id *buffer);
+
status_t allocateBuffer(
OMX_U32 portIndex, size_t size, OMX::buffer_id *buffer,
void **buffer_data);
@@ -125,4 +132,3 @@
} // namespace android
#endif // OMX_NODE_INSTANCE_H_
-
diff --git a/media/libstagefright/include/SampleIterator.h b/media/libstagefright/include/SampleIterator.h
index a5eaed9..b5a043c 100644
--- a/media/libstagefright/include/SampleIterator.h
+++ b/media/libstagefright/include/SampleIterator.h
@@ -27,7 +27,7 @@
uint32_t getChunkIndex() const { return mCurrentChunkIndex; }
uint32_t getDescIndex() const { return mChunkDesc; }
- off_t getSampleOffset() const { return mCurrentSampleOffset; }
+ off64_t getSampleOffset() const { return mCurrentSampleOffset; }
size_t getSampleSize() const { return mCurrentSampleSize; }
uint32_t getSampleTime() const { return mCurrentSampleTime; }
@@ -48,7 +48,7 @@
uint32_t mChunkDesc;
uint32_t mCurrentChunkIndex;
- off_t mCurrentChunkOffset;
+ off64_t mCurrentChunkOffset;
Vector<size_t> mCurrentChunkSampleSizes;
uint32_t mTimeToSampleIndex;
@@ -58,13 +58,13 @@
uint32_t mTTSDuration;
uint32_t mCurrentSampleIndex;
- off_t mCurrentSampleOffset;
+ off64_t mCurrentSampleOffset;
size_t mCurrentSampleSize;
uint32_t mCurrentSampleTime;
void reset();
status_t findChunkRange(uint32_t sampleIndex);
- status_t getChunkOffset(uint32_t chunk, off_t *offset);
+ status_t getChunkOffset(uint32_t chunk, off64_t *offset);
status_t findSampleTime(uint32_t sampleIndex, uint32_t *time);
SampleIterator(const SampleIterator &);
diff --git a/media/libstagefright/include/SampleTable.h b/media/libstagefright/include/SampleTable.h
index f830690..c5e8136 100644
--- a/media/libstagefright/include/SampleTable.h
+++ b/media/libstagefright/include/SampleTable.h
@@ -36,17 +36,17 @@
// type can be 'stco' or 'co64'.
status_t setChunkOffsetParams(
- uint32_t type, off_t data_offset, size_t data_size);
+ uint32_t type, off64_t data_offset, size_t data_size);
- status_t setSampleToChunkParams(off_t data_offset, size_t data_size);
+ status_t setSampleToChunkParams(off64_t data_offset, size_t data_size);
// type can be 'stsz' or 'stz2'.
status_t setSampleSizeParams(
- uint32_t type, off_t data_offset, size_t data_size);
+ uint32_t type, off64_t data_offset, size_t data_size);
- status_t setTimeToSampleParams(off_t data_offset, size_t data_size);
+ status_t setTimeToSampleParams(off64_t data_offset, size_t data_size);
- status_t setSyncSampleParams(off_t data_offset, size_t data_size);
+ status_t setSyncSampleParams(off64_t data_offset, size_t data_size);
////////////////////////////////////////////////////////////////////////////
@@ -58,7 +58,7 @@
status_t getMetaDataForSample(
uint32_t sampleIndex,
- off_t *offset,
+ off64_t *offset,
size_t *size,
uint32_t *decodingTime,
bool *isSyncSample = NULL);
@@ -89,14 +89,14 @@
sp<DataSource> mDataSource;
Mutex mLock;
- off_t mChunkOffsetOffset;
+ off64_t mChunkOffsetOffset;
uint32_t mChunkOffsetType;
uint32_t mNumChunkOffsets;
- off_t mSampleToChunkOffset;
+ off64_t mSampleToChunkOffset;
uint32_t mNumSampleToChunkOffsets;
- off_t mSampleSizeOffset;
+ off64_t mSampleSizeOffset;
uint32_t mSampleSizeFieldSize;
uint32_t mDefaultSampleSize;
uint32_t mNumSampleSizes;
@@ -104,7 +104,7 @@
uint32_t mTimeToSampleCount;
uint32_t *mTimeToSample;
- off_t mSyncSampleOffset;
+ off64_t mSyncSampleOffset;
uint32_t mNumSyncSamples;
uint32_t *mSyncSamples;
size_t mLastSyncSampleIndex;
diff --git a/media/libstagefright/include/SoftwareRenderer.h b/media/libstagefright/include/SoftwareRenderer.h
index 89d7cc4..90d3fe1 100644
--- a/media/libstagefright/include/SoftwareRenderer.h
+++ b/media/libstagefright/include/SoftwareRenderer.h
@@ -19,40 +19,34 @@
#define SOFTWARE_RENDERER_H_
#include <media/stagefright/ColorConverter.h>
-#include <media/stagefright/VideoRenderer.h>
#include <utils/RefBase.h>
namespace android {
-class ISurface;
-class MemoryHeapBase;
+struct MetaData;
+class Surface;
-class SoftwareRenderer : public VideoRenderer {
+class SoftwareRenderer {
public:
SoftwareRenderer(
- OMX_COLOR_FORMATTYPE colorFormat,
- const sp<ISurface> &surface,
- size_t displayWidth, size_t displayHeight,
- size_t decodedWidth, size_t decodedHeight,
- int32_t rotationDegrees = 0);
+ const sp<Surface> &surface, const sp<MetaData> &meta);
- virtual ~SoftwareRenderer();
+ ~SoftwareRenderer();
- status_t initCheck() const;
-
- virtual void render(
+ void render(
const void *data, size_t size, void *platformPrivate);
private:
- status_t mInitCheck;
+ enum YUVMode {
+ None,
+ };
+
OMX_COLOR_FORMATTYPE mColorFormat;
- ColorConverter mConverter;
- sp<ISurface> mISurface;
- size_t mDisplayWidth, mDisplayHeight;
- size_t mDecodedWidth, mDecodedHeight;
- size_t mFrameSize;
- sp<MemoryHeapBase> mMemoryHeap;
- int mIndex;
+ ColorConverter *mConverter;
+ YUVMode mYUVMode;
+ sp<Surface> mSurface;
+ int32_t mWidth, mHeight;
+ int32_t mCropLeft, mCropTop, mCropRight, mCropBottom;
SoftwareRenderer(const SoftwareRenderer &);
SoftwareRenderer &operator=(const SoftwareRenderer &);
diff --git a/media/libstagefright/include/ThrottledSource.h b/media/libstagefright/include/ThrottledSource.h
index 88164b3..8928a4a 100644
--- a/media/libstagefright/include/ThrottledSource.h
+++ b/media/libstagefright/include/ThrottledSource.h
@@ -30,9 +30,9 @@
virtual status_t initCheck() const;
- virtual ssize_t readAt(off_t offset, void *data, size_t size);
+ virtual ssize_t readAt(off64_t offset, void *data, size_t size);
- virtual status_t getSize(off_t *size);
+ virtual status_t getSize(off64_t *size);
virtual uint32_t flags();
private:
diff --git a/media/libstagefright/include/VBRISeeker.h b/media/libstagefright/include/VBRISeeker.h
new file mode 100644
index 0000000..1a2bf9f
--- /dev/null
+++ b/media/libstagefright/include/VBRISeeker.h
@@ -0,0 +1,50 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef VBRI_SEEKER_H_
+
+#define VBRI_SEEKER_H_
+
+#include "include/MP3Seeker.h"
+
+#include <utils/Vector.h>
+
+namespace android {
+
+struct DataSource;
+
+struct VBRISeeker : public MP3Seeker {
+ static sp<VBRISeeker> CreateFromSource(
+ const sp<DataSource> &source, off64_t post_id3_pos);
+
+ virtual bool getDuration(int64_t *durationUs);
+ virtual bool getOffsetForTime(int64_t *timeUs, off64_t *pos);
+
+private:
+ off64_t mBasePos;
+ int64_t mDurationUs;
+ Vector<uint32_t> mSegments;
+
+ VBRISeeker();
+
+ DISALLOW_EVIL_CONSTRUCTORS(VBRISeeker);
+};
+
+} // namespace android
+
+#endif // VBRI_SEEKER_H_
+
+
diff --git a/media/libstagefright/include/WAVExtractor.h b/media/libstagefright/include/WAVExtractor.h
index df6d3e7..9de197f 100644
--- a/media/libstagefright/include/WAVExtractor.h
+++ b/media/libstagefright/include/WAVExtractor.h
@@ -48,7 +48,7 @@
uint16_t mNumChannels;
uint32_t mSampleRate;
uint16_t mBitsPerSample;
- off_t mDataOffset;
+ off64_t mDataOffset;
size_t mDataSize;
sp<MetaData> mTrackMeta;
diff --git a/media/libstagefright/include/WVMExtractor.h b/media/libstagefright/include/WVMExtractor.h
new file mode 100644
index 0000000..0da45a8
--- /dev/null
+++ b/media/libstagefright/include/WVMExtractor.h
@@ -0,0 +1,51 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef WVM_EXTRACTOR_H_
+
+#define WVM_EXTRACTOR_H_
+
+#include <media/stagefright/MediaExtractor.h>
+
+namespace android {
+
+class DataSource;
+
+class WVMExtractor : public MediaExtractor {
+public:
+ WVMExtractor(const sp<DataSource> &source);
+
+ virtual size_t countTracks();
+ virtual sp<MediaSource> getTrack(size_t index);
+ virtual sp<MetaData> getTrackMetaData(size_t index, uint32_t flags);
+ virtual sp<MetaData> getMetaData();
+
+protected:
+ virtual ~WVMExtractor();
+
+private:
+ sp<DataSource> mDataSource;
+ sp<MediaExtractor> mImpl;
+
+ WVMExtractor(const WVMExtractor &);
+ WVMExtractor &operator=(const WVMExtractor &);
+
+};
+
+} // namespace android
+
+#endif // DRM_EXTRACTOR_H_
+
diff --git a/media/libstagefright/include/XINGSeeker.h b/media/libstagefright/include/XINGSeeker.h
new file mode 100644
index 0000000..d5a484e
--- /dev/null
+++ b/media/libstagefright/include/XINGSeeker.h
@@ -0,0 +1,50 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef XING_SEEKER_H_
+
+#define XING_SEEKER_H_
+
+#include "include/MP3Seeker.h"
+
+namespace android {
+
+struct DataSource;
+
+struct XINGSeeker : public MP3Seeker {
+ static sp<XINGSeeker> CreateFromSource(
+ const sp<DataSource> &source, off64_t first_frame_pos);
+
+ virtual bool getDuration(int64_t *durationUs);
+ virtual bool getOffsetForTime(int64_t *timeUs, off64_t *pos);
+
+private:
+ int64_t mFirstFramePos;
+ int64_t mDurationUs;
+ int32_t mSizeBytes;
+
+ // TOC entries in XING header. Skip the first one since it's always 0.
+ char mTableOfContents[99];
+
+ XINGSeeker();
+
+ DISALLOW_EVIL_CONSTRUCTORS(XINGSeeker);
+};
+
+} // namespace android
+
+#endif // XING_SEEKER_H_
+
diff --git a/media/libstagefright/include/avc_utils.h b/media/libstagefright/include/avc_utils.h
index 62cfc36..3aeb07f 100644
--- a/media/libstagefright/include/avc_utils.h
+++ b/media/libstagefright/include/avc_utils.h
@@ -24,6 +24,17 @@
struct ABitReader;
+enum {
+ kAVCProfileBaseline = 0x42,
+ kAVCProfileMain = 0x4d,
+ kAVCProfileExtended = 0x58,
+ kAVCProfileHigh = 0x64,
+ kAVCProfileHigh10 = 0x6e,
+ kAVCProfileHigh422 = 0x7a,
+ kAVCProfileHigh444 = 0xf4,
+ kAVCProfileCAVLC444Intra = 0x2c
+};
+
void FindAVCDimensions(
const sp<ABuffer> &seqParamSet, int32_t *width, int32_t *height);
@@ -39,6 +50,8 @@
bool IsIDR(const sp<ABuffer> &accessUnit);
+const char *AVCProfileToString(uint8_t profile);
+
} // namespace android
#endif // AVC_UTILS_H_
diff --git a/media/libstagefright/include/stagefright_string.h b/media/libstagefright/include/stagefright_string.h
deleted file mode 100644
index 5dc7116..0000000
--- a/media/libstagefright/include/stagefright_string.h
+++ /dev/null
@@ -1,54 +0,0 @@
-/*
- * Copyright (C) 2009 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef STRING_H_
-
-#define STRING_H_
-
-#include <utils/String8.h>
-
-namespace android {
-
-class string {
-public:
- typedef size_t size_type;
- static size_type npos;
-
- string();
- string(const char *s);
- string(const char *s, size_t length);
- string(const string &from, size_type start, size_type length = npos);
-
- const char *c_str() const;
- size_type size() const;
-
- void clear();
- void erase(size_type from, size_type length);
-
- size_type find(char c) const;
-
- bool operator<(const string &other) const;
- bool operator==(const string &other) const;
-
- string &operator+=(char c);
-
-private:
- String8 mString;
-};
-
-} // namespace android
-
-#endif // STRING_H_
diff --git a/media/libstagefright/matroska/MatroskaExtractor.cpp b/media/libstagefright/matroska/MatroskaExtractor.cpp
index d16476d..e0ac49f 100644
--- a/media/libstagefright/matroska/MatroskaExtractor.cpp
+++ b/media/libstagefright/matroska/MatroskaExtractor.cpp
@@ -22,13 +22,15 @@
#include "mkvparser.hpp"
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/hexdump.h>
#include <media/stagefright/DataSource.h>
#include <media/stagefright/MediaBuffer.h>
-#include <media/stagefright/MediaDebug.h>
#include <media/stagefright/MediaDefs.h>
#include <media/stagefright/MediaErrors.h>
#include <media/stagefright/MediaSource.h>
#include <media/stagefright/MetaData.h>
+#include <media/stagefright/Utils.h>
#include <utils/String8.h>
namespace android {
@@ -56,7 +58,7 @@
}
virtual int Length(long long* total, long long* available) {
- off_t size;
+ off64_t size;
if (mSource->getSize(&size) != OK) {
return -1;
}
@@ -81,46 +83,6 @@
////////////////////////////////////////////////////////////////////////////////
-#include <ctype.h>
-static void hexdump(const void *_data, size_t size) {
- const uint8_t *data = (const uint8_t *)_data;
- size_t offset = 0;
- while (offset < size) {
- printf("0x%04x ", offset);
-
- size_t n = size - offset;
- if (n > 16) {
- n = 16;
- }
-
- for (size_t i = 0; i < 16; ++i) {
- if (i == 8) {
- printf(" ");
- }
-
- if (offset + i < size) {
- printf("%02x ", data[offset + i]);
- } else {
- printf(" ");
- }
- }
-
- printf(" ");
-
- for (size_t i = 0; i < n; ++i) {
- if (isprint(data[offset + i])) {
- printf("%c", data[offset + i]);
- } else {
- printf(".");
- }
- }
-
- printf("\n");
-
- offset += 16;
- }
-}
-
struct BlockIterator {
BlockIterator(mkvparser::Segment *segment, unsigned long trackNum);
@@ -167,6 +129,7 @@
size_t mTrackIndex;
Type mType;
BlockIterator mBlockIter;
+ size_t mNALSizeLen; // for type AVC
status_t advance();
@@ -180,13 +143,26 @@
mTrackIndex(index),
mType(OTHER),
mBlockIter(mExtractor->mSegment,
- mExtractor->mTracks.itemAt(index).mTrackNum) {
+ mExtractor->mTracks.itemAt(index).mTrackNum),
+ mNALSizeLen(0) {
+ sp<MetaData> meta = mExtractor->mTracks.itemAt(index).mMeta;
+
const char *mime;
- CHECK(mExtractor->mTracks.itemAt(index).mMeta->
- findCString(kKeyMIMEType, &mime));
+ CHECK(meta->findCString(kKeyMIMEType, &mime));
if (!strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_AVC)) {
mType = AVC;
+
+ uint32_t dummy;
+ const uint8_t *avcc;
+ size_t avccSize;
+ CHECK(meta->findData(
+ kKeyAVCC, &dummy, (const void **)&avcc, &avccSize));
+
+ CHECK_GE(avccSize, 5u);
+
+ mNALSizeLen = 1 + (avcc[4] & 3);
+ LOGV("mNALSizeLen = %d", mNALSizeLen);
} else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AAC)) {
mType = AAC;
}
@@ -276,6 +252,10 @@
////////////////////////////////////////////////////////////////////////////////
+static unsigned U24_AT(const uint8_t *ptr) {
+ return ptr[0] << 16 | ptr[1] << 8 | ptr[2];
+}
+
status_t MatroskaSource::read(
MediaBuffer **out, const ReadOptions *options) {
*out = NULL;
@@ -286,6 +266,7 @@
mBlockIter.seek(seekTimeUs);
}
+again:
if (mBlockIter.eos()) {
return ERROR_END_OF_STREAM;
}
@@ -294,38 +275,70 @@
size_t size = block->GetSize();
int64_t timeUs = mBlockIter.blockTimeUs();
- MediaBuffer *buffer = new MediaBuffer(size + 2);
+ // In the case of AVC content, each NAL unit is prefixed by
+ // mNALSizeLen bytes of length. We want to prefix the data with
+ // a four-byte 0x00000001 startcode instead of the length prefix.
+ // mNALSizeLen ranges from 1 through 4 bytes, so add an extra
+ // 3 bytes of padding to the buffer start.
+ static const size_t kPadding = 3;
+
+ MediaBuffer *buffer = new MediaBuffer(size + kPadding);
buffer->meta_data()->setInt64(kKeyTime, timeUs);
buffer->meta_data()->setInt32(kKeyIsSyncFrame, block->IsKey());
long res = block->Read(
- mExtractor->mReader, (unsigned char *)buffer->data() + 2);
+ mExtractor->mReader, (unsigned char *)buffer->data() + kPadding);
if (res != 0) {
return ERROR_END_OF_STREAM;
}
- buffer->set_range(2, size);
+ buffer->set_range(kPadding, size);
if (mType == AVC) {
- CHECK(size >= 2);
+ CHECK_GE(size, mNALSizeLen);
uint8_t *data = (uint8_t *)buffer->data();
- unsigned NALsize = data[2] << 8 | data[3];
- CHECK_EQ(size, NALsize + 2);
+ size_t NALsize;
+ switch (mNALSizeLen) {
+ case 1: NALsize = data[kPadding]; break;
+ case 2: NALsize = U16_AT(&data[kPadding]); break;
+ case 3: NALsize = U24_AT(&data[kPadding]); break;
+ case 4: NALsize = U32_AT(&data[kPadding]); break;
+ default:
+ TRESPASS();
+ }
- memcpy(data, "\x00\x00\x00\x01", 4);
- buffer->set_range(0, size + 2);
+ CHECK_GE(size, NALsize + mNALSizeLen);
+ if (size > NALsize + mNALSizeLen) {
+ LOGW("discarding %d bytes of data.", size - NALsize - mNALSizeLen);
+ }
+
+ // actual data starts at &data[kPadding + mNALSizeLen]
+
+ memcpy(&data[mNALSizeLen - 1], "\x00\x00\x00\x01", 4);
+ buffer->set_range(mNALSizeLen - 1, NALsize + 4);
} else if (mType == AAC) {
// There's strange junk at the beginning...
- const uint8_t *data = (const uint8_t *)buffer->data() + 2;
+ const uint8_t *data = (const uint8_t *)buffer->data() + kPadding;
+
+ // hexdump(data, size);
+
size_t offset = 0;
while (offset < size && data[offset] != 0x21) {
++offset;
}
- buffer->set_range(2 + offset, size - offset);
+
+ if (size == offset) {
+ buffer->release();
+
+ mBlockIter.advance();
+ goto again;
+ }
+
+ buffer->set_range(kPadding + offset, size - offset);
}
*out = buffer;
diff --git a/media/libstagefright/mpeg2ts/ATSParser.cpp b/media/libstagefright/mpeg2ts/ATSParser.cpp
index c88c6c1..f06a1bb 100644
--- a/media/libstagefright/mpeg2ts/ATSParser.cpp
+++ b/media/libstagefright/mpeg2ts/ATSParser.cpp
@@ -274,6 +274,8 @@
mQueue(streamType == 0x1b
? ElementaryStreamQueue::H264 : ElementaryStreamQueue::AAC) {
mBuffer->setRange(0, 0);
+
+ LOGV("new stream PID 0x%02x, type 0x%02x", elementaryPID, streamType);
}
ATSParser::Stream::~Stream() {
@@ -307,7 +309,8 @@
}
void ATSParser::Stream::signalDiscontinuity(bool isASeek) {
- LOGV("Stream discontinuity");
+ isASeek = false; // Always signal a "real" discontinuity
+
mPayloadStarted = false;
mBuffer->setRange(0, 0);
@@ -317,7 +320,9 @@
// This is only a "minor" discontinuity, we stay within the same
// bitstream.
- mSource->clear();
+ if (mSource != NULL) {
+ mSource->clear();
+ }
return;
}
diff --git a/media/libstagefright/mpeg2ts/ESQueue.cpp b/media/libstagefright/mpeg2ts/ESQueue.cpp
index b0b9e66..f11b3c3 100644
--- a/media/libstagefright/mpeg2ts/ESQueue.cpp
+++ b/media/libstagefright/mpeg2ts/ESQueue.cpp
@@ -41,7 +41,10 @@
}
void ElementaryStreamQueue::clear() {
- mBuffer->setRange(0, 0);
+ if (mBuffer != NULL) {
+ mBuffer->setRange(0, 0);
+ }
+
mTimestamps.clear();
mFormat.clear();
}
diff --git a/media/libstagefright/omx/Android.mk b/media/libstagefright/omx/Android.mk
index ead1675..6e069c8 100644
--- a/media/libstagefright/omx/Android.mk
+++ b/media/libstagefright/omx/Android.mk
@@ -31,7 +31,6 @@
libutils \
libui \
libcutils \
- libstagefright_color_conversion
ifneq ($(BUILD_WITHOUT_PV),true)
LOCAL_SHARED_LIBRARIES += \
diff --git a/media/libstagefright/omx/OMX.cpp b/media/libstagefright/omx/OMX.cpp
index f19c16a..4e9920b 100644
--- a/media/libstagefright/omx/OMX.cpp
+++ b/media/libstagefright/omx/OMX.cpp
@@ -24,14 +24,11 @@
#include <sys/resource.h>
#include "../include/OMX.h"
-#include "OMXRenderer.h"
#include "../include/OMXNodeInstance.h"
-#include "../include/SoftwareRenderer.h"
#include <binder/IMemory.h>
#include <media/stagefright/MediaDebug.h>
-#include <media/stagefright/VideoRenderer.h>
#include <utils/threads.h>
#include "OMXMaster.h"
@@ -289,6 +286,16 @@
index, params, size);
}
+status_t OMX::enableGraphicBuffers(
+ node_id node, OMX_U32 port_index, OMX_BOOL enable) {
+ return findInstance(node)->enableGraphicBuffers(port_index, enable);
+}
+
+status_t OMX::storeMetaDataInBuffers(
+ node_id node, OMX_U32 port_index, OMX_BOOL enable) {
+ return findInstance(node)->storeMetaDataInBuffers(port_index, enable);
+}
+
status_t OMX::useBuffer(
node_id node, OMX_U32 port_index, const sp<IMemory> ¶ms,
buffer_id *buffer) {
@@ -296,6 +303,13 @@
port_index, params, buffer);
}
+status_t OMX::useGraphicBuffer(
+ node_id node, OMX_U32 port_index,
+ const sp<GraphicBuffer> &graphicBuffer, buffer_id *buffer) {
+ return findInstance(node)->useGraphicBuffer(
+ port_index, graphicBuffer, buffer);
+}
+
status_t OMX::allocateBuffer(
node_id node, OMX_U32 port_index, size_t size,
buffer_id *buffer, void **buffer_data) {
@@ -425,135 +439,4 @@
mNodeIDToInstance.removeItem(node);
}
-////////////////////////////////////////////////////////////////////////////////
-
-struct SharedVideoRenderer : public VideoRenderer {
- SharedVideoRenderer(void *libHandle, VideoRenderer *obj)
- : mLibHandle(libHandle),
- mObj(obj) {
- }
-
- virtual ~SharedVideoRenderer() {
- delete mObj;
- mObj = NULL;
-
- dlclose(mLibHandle);
- mLibHandle = NULL;
- }
-
- virtual void render(
- const void *data, size_t size, void *platformPrivate) {
- return mObj->render(data, size, platformPrivate);
- }
-
-private:
- void *mLibHandle;
- VideoRenderer *mObj;
-
- SharedVideoRenderer(const SharedVideoRenderer &);
- SharedVideoRenderer &operator=(const SharedVideoRenderer &);
-};
-
-sp<IOMXRenderer> OMX::createRenderer(
- const sp<ISurface> &surface,
- const char *componentName,
- OMX_COLOR_FORMATTYPE colorFormat,
- size_t encodedWidth, size_t encodedHeight,
- size_t displayWidth, size_t displayHeight,
- int32_t rotationDegrees) {
- Mutex::Autolock autoLock(mLock);
-
- VideoRenderer *impl = NULL;
-
- void *libHandle = dlopen("libstagefrighthw.so", RTLD_NOW);
-
- if (libHandle) {
- typedef VideoRenderer *(*CreateRendererWithRotationFunc)(
- const sp<ISurface> &surface,
- const char *componentName,
- OMX_COLOR_FORMATTYPE colorFormat,
- size_t displayWidth, size_t displayHeight,
- size_t decodedWidth, size_t decodedHeight,
- int32_t rotationDegrees);
-
- typedef VideoRenderer *(*CreateRendererFunc)(
- const sp<ISurface> &surface,
- const char *componentName,
- OMX_COLOR_FORMATTYPE colorFormat,
- size_t displayWidth, size_t displayHeight,
- size_t decodedWidth, size_t decodedHeight);
-
- CreateRendererWithRotationFunc funcWithRotation =
- (CreateRendererWithRotationFunc)dlsym(
- libHandle,
- "_Z26createRendererWithRotationRKN7android2spINS_8"
- "ISurfaceEEEPKc20OMX_COLOR_FORMATTYPEjjjji");
-
- if (funcWithRotation) {
- impl = (*funcWithRotation)(
- surface, componentName, colorFormat,
- displayWidth, displayHeight, encodedWidth, encodedHeight,
- rotationDegrees);
- } else {
- CreateRendererFunc func =
- (CreateRendererFunc)dlsym(
- libHandle,
- "_Z14createRendererRKN7android2spINS_8ISurfaceEEEPKc20"
- "OMX_COLOR_FORMATTYPEjjjj");
-
- if (func) {
- impl = (*func)(surface, componentName, colorFormat,
- displayWidth, displayHeight, encodedWidth, encodedHeight);
- }
- }
-
- if (impl) {
- impl = new SharedVideoRenderer(libHandle, impl);
- libHandle = NULL;
- }
-
- if (libHandle) {
- dlclose(libHandle);
- libHandle = NULL;
- }
- }
-
- if (!impl) {
- LOGW("Using software renderer.");
- impl = new SoftwareRenderer(
- colorFormat,
- surface,
- displayWidth, displayHeight,
- encodedWidth, encodedHeight);
-
- if (((SoftwareRenderer *)impl)->initCheck() != OK) {
- delete impl;
- impl = NULL;
-
- return NULL;
- }
- }
-
- return new OMXRenderer(impl);
-}
-
-OMXRenderer::OMXRenderer(VideoRenderer *impl)
- : mImpl(impl) {
-}
-
-OMXRenderer::~OMXRenderer() {
- delete mImpl;
- mImpl = NULL;
-}
-
-void OMXRenderer::render(IOMX::buffer_id buffer) {
- OMX_BUFFERHEADERTYPE *header = (OMX_BUFFERHEADERTYPE *)buffer;
-
- mImpl->render(
- header->pBuffer + header->nOffset,
- header->nFilledLen,
- header->pPlatformPrivate);
-}
-
} // namespace android
-
diff --git a/media/libstagefright/omx/OMXNodeInstance.cpp b/media/libstagefright/omx/OMXNodeInstance.cpp
index 5db516e..9b6d441 100644
--- a/media/libstagefright/omx/OMXNodeInstance.cpp
+++ b/media/libstagefright/omx/OMXNodeInstance.cpp
@@ -24,6 +24,7 @@
#include <OMX_Component.h>
#include <binder/IMemory.h>
+#include <media/stagefright/HardwareAPI.h>
#include <media/stagefright/MediaDebug.h>
#include <media/stagefright/MediaErrors.h>
@@ -40,6 +41,11 @@
mIsBackup(false) {
}
+ BufferMeta(const sp<GraphicBuffer> &graphicBuffer)
+ : mGraphicBuffer(graphicBuffer),
+ mIsBackup(false) {
+ }
+
void CopyFromOMX(const OMX_BUFFERHEADERTYPE *header) {
if (!mIsBackup) {
return;
@@ -61,6 +67,7 @@
}
private:
+ sp<GraphicBuffer> mGraphicBuffer;
sp<IMemory> mMem;
size_t mSize;
bool mIsBackup;
@@ -240,6 +247,74 @@
return StatusFromOMXError(err);
}
+status_t OMXNodeInstance::enableGraphicBuffers(
+ OMX_U32 portIndex, OMX_BOOL enable) {
+ Mutex::Autolock autoLock(mLock);
+
+ OMX_INDEXTYPE index;
+ OMX_ERRORTYPE err = OMX_GetExtensionIndex(
+ mHandle,
+ const_cast<OMX_STRING>("OMX.google.android.index.enableAndroidNativeBuffers"),
+ &index);
+
+ if (err != OMX_ErrorNone) {
+ LOGE("OMX_GetExtensionIndex failed");
+
+ return StatusFromOMXError(err);
+ }
+
+ OMX_VERSIONTYPE ver;
+ ver.s.nVersionMajor = 1;
+ ver.s.nVersionMinor = 0;
+ ver.s.nRevision = 0;
+ ver.s.nStep = 0;
+ EnableAndroidNativeBuffersParams params = {
+ sizeof(EnableAndroidNativeBuffersParams), ver, portIndex, enable,
+ };
+
+ err = OMX_SetParameter(mHandle, index, ¶ms);
+
+ if (err != OMX_ErrorNone) {
+ LOGE("OMX_EnableAndroidNativeBuffers failed with error %d (0x%08x)",
+ err, err);
+
+ return UNKNOWN_ERROR;
+ }
+
+ return OK;
+}
+
+status_t OMXNodeInstance::storeMetaDataInBuffers(
+ OMX_U32 portIndex,
+ OMX_BOOL enable) {
+ Mutex::Autolock autolock(mLock);
+
+ OMX_INDEXTYPE index;
+ OMX_STRING name = const_cast<OMX_STRING>(
+ "OMX.google.android.index.storeMetaDataInBuffers");
+
+ OMX_ERRORTYPE err = OMX_GetExtensionIndex(mHandle, name, &index);
+ if (err != OMX_ErrorNone) {
+ LOGE("OMX_GetExtensionIndex %s failed", name);
+ return StatusFromOMXError(err);
+ }
+
+ StoreMetaDataInBuffersParams params;
+ memset(¶ms, 0, sizeof(params));
+ params.nSize = sizeof(params);
+
+ // Version: 1.0.0.0
+ params.nVersion.s.nVersionMajor = 1;
+
+ params.nPortIndex = portIndex;
+ params.bStoreMetaData = enable;
+ if ((err = OMX_SetParameter(mHandle, index, ¶ms)) != OMX_ErrorNone) {
+ LOGE("OMX_SetParameter() failed for StoreMetaDataInBuffers: 0x%08x", err);
+ return UNKNOWN_ERROR;
+ }
+ return err;
+}
+
status_t OMXNodeInstance::useBuffer(
OMX_U32 portIndex, const sp<IMemory> ¶ms,
OMX::buffer_id *buffer) {
@@ -273,6 +348,60 @@
return OK;
}
+status_t OMXNodeInstance::useGraphicBuffer(
+ OMX_U32 portIndex, const sp<GraphicBuffer>& graphicBuffer,
+ OMX::buffer_id *buffer) {
+ Mutex::Autolock autoLock(mLock);
+
+ OMX_INDEXTYPE index;
+ OMX_ERRORTYPE err = OMX_GetExtensionIndex(
+ mHandle,
+ const_cast<OMX_STRING>("OMX.google.android.index.useAndroidNativeBuffer"),
+ &index);
+
+ if (err != OMX_ErrorNone) {
+ LOGE("OMX_GetExtensionIndex failed");
+
+ return StatusFromOMXError(err);
+ }
+
+ BufferMeta *bufferMeta = new BufferMeta(graphicBuffer);
+
+ OMX_BUFFERHEADERTYPE *header;
+
+ OMX_VERSIONTYPE ver;
+ ver.s.nVersionMajor = 1;
+ ver.s.nVersionMinor = 0;
+ ver.s.nRevision = 0;
+ ver.s.nStep = 0;
+ UseAndroidNativeBufferParams params = {
+ sizeof(UseAndroidNativeBufferParams), ver, portIndex, bufferMeta,
+ &header, graphicBuffer,
+ };
+
+ err = OMX_SetParameter(mHandle, index, ¶ms);
+
+ if (err != OMX_ErrorNone) {
+ LOGE("OMX_UseAndroidNativeBuffer failed with error %d (0x%08x)", err,
+ err);
+
+ delete bufferMeta;
+ bufferMeta = NULL;
+
+ *buffer = 0;
+
+ return UNKNOWN_ERROR;
+ }
+
+ CHECK_EQ(header->pAppPrivate, bufferMeta);
+
+ *buffer = header;
+
+ addActiveBuffer(portIndex, *buffer);
+
+ return OK;
+}
+
status_t OMXNodeInstance::allocateBuffer(
OMX_U32 portIndex, size_t size, OMX::buffer_id *buffer,
void **buffer_data) {
@@ -498,4 +627,3 @@
}
} // namespace android
-
diff --git a/media/libstagefright/omx/OMXRenderer.h b/media/libstagefright/omx/OMXRenderer.h
deleted file mode 100644
index 4d194ce..0000000
--- a/media/libstagefright/omx/OMXRenderer.h
+++ /dev/null
@@ -1,44 +0,0 @@
-/*
- * Copyright (C) 2009 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef OMX_RENDERER_H_
-
-#define OMX_RENDERER_H_
-
-#include <media/IOMX.h>
-
-namespace android {
-
-class VideoRenderer;
-
-class OMXRenderer : public BnOMXRenderer {
-public:
- // Assumes ownership of "impl".
- OMXRenderer(VideoRenderer *impl);
- virtual ~OMXRenderer();
-
- virtual void render(IOMX::buffer_id buffer);
-
-private:
- VideoRenderer *mImpl;
-
- OMXRenderer(const OMXRenderer &);
- OMXRenderer &operator=(const OMXRenderer &);
-};
-
-} // namespace android
-
-#endif // OMX_RENDERER_H_
diff --git a/media/libstagefright/rtsp/AMPEG4AudioAssembler.cpp b/media/libstagefright/rtsp/AMPEG4AudioAssembler.cpp
index b0d2c64..bbde516 100644
--- a/media/libstagefright/rtsp/AMPEG4AudioAssembler.cpp
+++ b/media/libstagefright/rtsp/AMPEG4AudioAssembler.cpp
@@ -18,18 +18,381 @@
#include "ARTPSource.h"
+#include <media/stagefright/foundation/hexdump.h>
+#include <media/stagefright/foundation/ABitReader.h>
#include <media/stagefright/foundation/ABuffer.h>
#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/MediaErrors.h>
+
+#include <ctype.h>
namespace android {
-AMPEG4AudioAssembler::AMPEG4AudioAssembler(const sp<AMessage> ¬ify)
+static bool GetAttribute(const char *s, const char *key, AString *value) {
+ value->clear();
+
+ size_t keyLen = strlen(key);
+
+ for (;;) {
+ while (isspace(*s)) {
+ ++s;
+ }
+
+ const char *colonPos = strchr(s, ';');
+
+ size_t len =
+ (colonPos == NULL) ? strlen(s) : colonPos - s;
+
+ if (len >= keyLen + 1 && s[keyLen] == '=' && !strncmp(s, key, keyLen)) {
+ value->setTo(&s[keyLen + 1], len - keyLen - 1);
+ return true;
+ }
+
+ if (colonPos == NULL) {
+ return false;
+ }
+
+ s = colonPos + 1;
+ }
+}
+
+static sp<ABuffer> decodeHex(const AString &s) {
+ if ((s.size() % 2) != 0) {
+ return NULL;
+ }
+
+ size_t outLen = s.size() / 2;
+ sp<ABuffer> buffer = new ABuffer(outLen);
+ uint8_t *out = buffer->data();
+
+ uint8_t accum = 0;
+ for (size_t i = 0; i < s.size(); ++i) {
+ char c = s.c_str()[i];
+ unsigned value;
+ if (c >= '0' && c <= '9') {
+ value = c - '0';
+ } else if (c >= 'a' && c <= 'f') {
+ value = c - 'a' + 10;
+ } else if (c >= 'A' && c <= 'F') {
+ value = c - 'A' + 10;
+ } else {
+ return NULL;
+ }
+
+ accum = (accum << 4) | value;
+
+ if (i & 1) {
+ *out++ = accum;
+
+ accum = 0;
+ }
+ }
+
+ return buffer;
+}
+
+static status_t parseAudioObjectType(
+ ABitReader *bits, unsigned *audioObjectType) {
+ *audioObjectType = bits->getBits(5);
+ if ((*audioObjectType) == 31) {
+ *audioObjectType = 32 + bits->getBits(6);
+ }
+
+ return OK;
+}
+
+static status_t parseGASpecificConfig(
+ ABitReader *bits,
+ unsigned audioObjectType, unsigned channelConfiguration) {
+ unsigned frameLengthFlag = bits->getBits(1);
+ unsigned dependsOnCoreCoder = bits->getBits(1);
+ if (dependsOnCoreCoder) {
+ /* unsigned coreCoderDelay = */bits->getBits(1);
+ }
+ unsigned extensionFlag = bits->getBits(1);
+
+ if (!channelConfiguration) {
+ // program_config_element
+ return ERROR_UNSUPPORTED; // XXX to be implemented
+ }
+
+ if (audioObjectType == 6 || audioObjectType == 20) {
+ /* unsigned layerNr = */bits->getBits(3);
+ }
+
+ if (extensionFlag) {
+ if (audioObjectType == 22) {
+ /* unsigned numOfSubFrame = */bits->getBits(5);
+ /* unsigned layerLength = */bits->getBits(11);
+ } else if (audioObjectType == 17 || audioObjectType == 19
+ || audioObjectType == 20 || audioObjectType == 23) {
+ /* unsigned aacSectionDataResilienceFlag = */bits->getBits(1);
+ /* unsigned aacScalefactorDataResilienceFlag = */bits->getBits(1);
+ /* unsigned aacSpectralDataResilienceFlag = */bits->getBits(1);
+ }
+
+ unsigned extensionFlag3 = bits->getBits(1);
+ CHECK_EQ(extensionFlag3, 0u); // TBD in version 3
+ }
+
+ return OK;
+}
+
+static status_t parseAudioSpecificConfig(ABitReader *bits) {
+ unsigned audioObjectType;
+ CHECK_EQ(parseAudioObjectType(bits, &audioObjectType), (status_t)OK);
+
+ unsigned samplingFreqIndex = bits->getBits(4);
+ if (samplingFreqIndex == 0x0f) {
+ /* unsigned samplingFrequency = */bits->getBits(24);
+ }
+
+ unsigned channelConfiguration = bits->getBits(4);
+
+ unsigned extensionAudioObjectType = 0;
+ unsigned sbrPresent = 0;
+
+ if (audioObjectType == 5) {
+ extensionAudioObjectType = audioObjectType;
+ sbrPresent = 1;
+ unsigned extensionSamplingFreqIndex = bits->getBits(4);
+ if (extensionSamplingFreqIndex == 0x0f) {
+ /* unsigned extensionSamplingFrequency = */bits->getBits(24);
+ }
+ CHECK_EQ(parseAudioObjectType(bits, &audioObjectType), (status_t)OK);
+ }
+
+ CHECK((audioObjectType >= 1 && audioObjectType <= 4)
+ || (audioObjectType >= 6 && audioObjectType <= 7)
+ || audioObjectType == 17
+ || (audioObjectType >= 19 && audioObjectType <= 23));
+
+ CHECK_EQ(parseGASpecificConfig(
+ bits, audioObjectType, channelConfiguration), (status_t)OK);
+
+ if (audioObjectType == 17
+ || (audioObjectType >= 19 && audioObjectType <= 27)) {
+ unsigned epConfig = bits->getBits(2);
+ if (epConfig == 2 || epConfig == 3) {
+ // ErrorProtectionSpecificConfig
+ return ERROR_UNSUPPORTED; // XXX to be implemented
+
+ if (epConfig == 3) {
+ unsigned directMapping = bits->getBits(1);
+ CHECK_EQ(directMapping, 1u);
+ }
+ }
+ }
+
+#if 0
+ // This is not supported here as the upper layers did not explicitly
+ // signal the length of AudioSpecificConfig.
+
+ if (extensionAudioObjectType != 5 && bits->numBitsLeft() >= 16) {
+ unsigned syncExtensionType = bits->getBits(11);
+ if (syncExtensionType == 0x2b7) {
+ CHECK_EQ(parseAudioObjectType(bits, &extensionAudioObjectType),
+ (status_t)OK);
+
+ sbrPresent = bits->getBits(1);
+
+ if (sbrPresent == 1) {
+ unsigned extensionSamplingFreqIndex = bits->getBits(4);
+ if (extensionSamplingFreqIndex == 0x0f) {
+ /* unsigned extensionSamplingFrequency = */bits->getBits(24);
+ }
+ }
+ }
+ }
+#endif
+
+ return OK;
+}
+
+static status_t parseStreamMuxConfig(
+ ABitReader *bits,
+ unsigned *numSubFrames,
+ unsigned *frameLengthType,
+ bool *otherDataPresent,
+ unsigned *otherDataLenBits) {
+ unsigned audioMuxVersion = bits->getBits(1);
+
+ unsigned audioMuxVersionA = 0;
+ if (audioMuxVersion == 1) {
+ audioMuxVersionA = bits->getBits(1);
+ }
+
+ CHECK_EQ(audioMuxVersionA, 0u); // otherwise future spec
+
+ if (audioMuxVersion != 0) {
+ return ERROR_UNSUPPORTED; // XXX to be implemented;
+ }
+ CHECK_EQ(audioMuxVersion, 0u); // XXX to be implemented
+
+ unsigned allStreamsSameTimeFraming = bits->getBits(1);
+ CHECK_EQ(allStreamsSameTimeFraming, 1u); // There's only one stream.
+
+ *numSubFrames = bits->getBits(6);
+ unsigned numProgram = bits->getBits(4);
+ CHECK_EQ(numProgram, 0u); // disabled in RTP LATM
+
+ unsigned numLayer = bits->getBits(3);
+ CHECK_EQ(numLayer, 0u); // disabled in RTP LATM
+
+ if (audioMuxVersion == 0) {
+ // AudioSpecificConfig
+ CHECK_EQ(parseAudioSpecificConfig(bits), (status_t)OK);
+ } else {
+ TRESPASS(); // XXX to be implemented
+ }
+
+ *frameLengthType = bits->getBits(3);
+ switch (*frameLengthType) {
+ case 0:
+ {
+ /* unsigned bufferFullness = */bits->getBits(8);
+
+ // The "coreFrameOffset" does not apply since there's only
+ // a single layer.
+ break;
+ }
+
+ case 1:
+ {
+ /* unsigned frameLength = */bits->getBits(9);
+ break;
+ }
+
+ case 3:
+ case 4:
+ case 5:
+ {
+ /* unsigned CELPframeLengthTableIndex = */bits->getBits(6);
+ break;
+ }
+
+ case 6:
+ case 7:
+ {
+ /* unsigned HVXCframeLengthTableIndex = */bits->getBits(1);
+ break;
+ }
+
+ default:
+ break;
+ }
+
+ *otherDataPresent = bits->getBits(1);
+ *otherDataLenBits = 0;
+ if (*otherDataPresent) {
+ if (audioMuxVersion == 1) {
+ TRESPASS(); // XXX to be implemented
+ } else {
+ *otherDataLenBits = 0;
+
+ unsigned otherDataLenEsc;
+ do {
+ (*otherDataLenBits) <<= 8;
+ otherDataLenEsc = bits->getBits(1);
+ unsigned otherDataLenTmp = bits->getBits(8);
+ (*otherDataLenBits) += otherDataLenTmp;
+ } while (otherDataLenEsc);
+ }
+ }
+
+ unsigned crcCheckPresent = bits->getBits(1);
+ if (crcCheckPresent) {
+ /* unsigned crcCheckSum = */bits->getBits(8);
+ }
+
+ return OK;
+}
+
+sp<ABuffer> AMPEG4AudioAssembler::removeLATMFraming(const sp<ABuffer> &buffer) {
+ CHECK(!mMuxConfigPresent); // XXX to be implemented
+
+ sp<ABuffer> out = new ABuffer(buffer->size());
+ out->setRange(0, 0);
+
+ size_t offset = 0;
+ uint8_t *ptr = buffer->data();
+
+ for (size_t i = 0; i <= mNumSubFrames; ++i) {
+ // parse PayloadLengthInfo
+
+ unsigned payloadLength = 0;
+
+ switch (mFrameLengthType) {
+ case 0:
+ {
+ unsigned muxSlotLengthBytes = 0;
+ unsigned tmp;
+ do {
+ CHECK_LT(offset, buffer->size());
+ tmp = ptr[offset++];
+ muxSlotLengthBytes += tmp;
+ } while (tmp == 0xff);
+
+ payloadLength = muxSlotLengthBytes;
+ break;
+ }
+
+ default:
+ TRESPASS(); // XXX to be implemented
+ break;
+ }
+
+ CHECK_LE(offset + payloadLength, buffer->size());
+
+ memcpy(out->data() + out->size(), &ptr[offset], payloadLength);
+ out->setRange(0, out->size() + payloadLength);
+
+ offset += payloadLength;
+
+ if (mOtherDataPresent) {
+ // We want to stay byte-aligned.
+
+ CHECK((mOtherDataLenBits % 8) == 0);
+ CHECK_LE(offset + (mOtherDataLenBits / 8), buffer->size());
+ offset += mOtherDataLenBits / 8;
+ }
+ }
+
+ CHECK_EQ(offset, buffer->size());
+
+ return out;
+}
+
+AMPEG4AudioAssembler::AMPEG4AudioAssembler(
+ const sp<AMessage> ¬ify, const AString ¶ms)
: mNotifyMsg(notify),
+ mMuxConfigPresent(false),
mAccessUnitRTPTime(0),
mNextExpectedSeqNoValid(false),
mNextExpectedSeqNo(0),
mAccessUnitDamaged(false) {
+ AString val;
+ if (!GetAttribute(params.c_str(), "cpresent", &val)) {
+ mMuxConfigPresent = true;
+ } else if (val == "0") {
+ mMuxConfigPresent = false;
+ } else {
+ CHECK(val == "1");
+ mMuxConfigPresent = true;
+ }
+
+ CHECK(GetAttribute(params.c_str(), "config", &val));
+
+ sp<ABuffer> config = decodeHex(val);
+ CHECK(config != NULL);
+
+ ABitReader bits(config->data(), config->size());
+ status_t err = parseStreamMuxConfig(
+ &bits, &mNumSubFrames, &mFrameLengthType,
+ &mOtherDataPresent, &mOtherDataLenBits);
+
+ CHECK_EQ(err, (status_t)NO_ERROR);
}
AMPEG4AudioAssembler::~AMPEG4AudioAssembler() {
@@ -108,13 +471,7 @@
while (it != mPackets.end()) {
const sp<ABuffer> &unit = *it;
- size_t n = 0;
- while (unit->data()[n] == 0xff) {
- ++n;
- }
- ++n;
-
- totalSize += unit->size() - n;
+ totalSize += unit->size();
++it;
}
@@ -124,20 +481,13 @@
while (it != mPackets.end()) {
const sp<ABuffer> &unit = *it;
- size_t n = 0;
- while (unit->data()[n] == 0xff) {
- ++n;
- }
- ++n;
-
memcpy((uint8_t *)accessUnit->data() + offset,
- unit->data() + n, unit->size() - n);
-
- offset += unit->size() - n;
+ unit->data(), unit->size());
++it;
}
+ accessUnit = removeLATMFraming(accessUnit);
CopyTimes(accessUnit, *mPackets.begin());
#if 0
diff --git a/media/libstagefright/rtsp/AMPEG4AudioAssembler.h b/media/libstagefright/rtsp/AMPEG4AudioAssembler.h
index bf9f204..9cef94c 100644
--- a/media/libstagefright/rtsp/AMPEG4AudioAssembler.h
+++ b/media/libstagefright/rtsp/AMPEG4AudioAssembler.h
@@ -27,9 +27,11 @@
namespace android {
struct AMessage;
+struct AString;
struct AMPEG4AudioAssembler : public ARTPAssembler {
- AMPEG4AudioAssembler(const sp<AMessage> ¬ify);
+ AMPEG4AudioAssembler(
+ const sp<AMessage> ¬ify, const AString ¶ms);
protected:
virtual ~AMPEG4AudioAssembler();
@@ -40,6 +42,13 @@
private:
sp<AMessage> mNotifyMsg;
+
+ bool mMuxConfigPresent;
+ unsigned mNumSubFrames;
+ unsigned mFrameLengthType;
+ bool mOtherDataPresent;
+ unsigned mOtherDataLenBits;
+
uint32_t mAccessUnitRTPTime;
bool mNextExpectedSeqNoValid;
uint32_t mNextExpectedSeqNo;
@@ -49,6 +58,8 @@
AssemblyStatus addPacket(const sp<ARTPSource> &source);
void submitAccessUnit();
+ sp<ABuffer> removeLATMFraming(const sp<ABuffer> &buffer);
+
DISALLOW_EVIL_CONSTRUCTORS(AMPEG4AudioAssembler);
};
diff --git a/media/libstagefright/rtsp/ARTPSource.cpp b/media/libstagefright/rtsp/ARTPSource.cpp
index 2518264..5aae4e7 100644
--- a/media/libstagefright/rtsp/ARTPSource.cpp
+++ b/media/libstagefright/rtsp/ARTPSource.cpp
@@ -57,7 +57,7 @@
mAssembler = new AAVCAssembler(notify);
mIssueFIRRequests = true;
} else if (!strncmp(desc.c_str(), "MP4A-LATM/", 10)) {
- mAssembler = new AMPEG4AudioAssembler(notify);
+ mAssembler = new AMPEG4AudioAssembler(notify, params);
} else if (!strncmp(desc.c_str(), "H263-1998/", 10)
|| !strncmp(desc.c_str(), "H263-2000/", 10)) {
mAssembler = new AH263Assembler(notify);
diff --git a/media/libstagefright/rtsp/ARTPWriter.cpp b/media/libstagefright/rtsp/ARTPWriter.cpp
index 155fd96..5a033e1 100644
--- a/media/libstagefright/rtsp/ARTPWriter.cpp
+++ b/media/libstagefright/rtsp/ARTPWriter.cpp
@@ -46,7 +46,7 @@
ARTPWriter::ARTPWriter(int fd)
: mFlags(0),
- mFd(fd),
+ mFd(dup(fd)),
mLooper(new ALooper),
mReflector(new AHandlerReflector<ARTPWriter>(this)) {
CHECK_GE(fd, 0);
diff --git a/media/libstagefright/rtsp/ARTSPConnection.cpp b/media/libstagefright/rtsp/ARTSPConnection.cpp
index f928c06..e936923 100644
--- a/media/libstagefright/rtsp/ARTSPConnection.cpp
+++ b/media/libstagefright/rtsp/ARTSPConnection.cpp
@@ -23,11 +23,13 @@
#include <media/stagefright/foundation/ABuffer.h>
#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/foundation/base64.h>
#include <media/stagefright/MediaErrors.h>
#include <arpa/inet.h>
#include <fcntl.h>
#include <netdb.h>
+#include <openssl/md5.h>
#include <sys/socket.h>
namespace android {
@@ -37,6 +39,7 @@
ARTSPConnection::ARTSPConnection()
: mState(DISCONNECTED),
+ mAuthType(NONE),
mSocket(-1),
mConnectionID(0),
mNextCSeq(0),
@@ -114,10 +117,13 @@
// static
bool ARTSPConnection::ParseURL(
- const char *url, AString *host, unsigned *port, AString *path) {
+ const char *url, AString *host, unsigned *port, AString *path,
+ AString *user, AString *pass) {
host->clear();
*port = 0;
path->clear();
+ user->clear();
+ pass->clear();
if (strncasecmp("rtsp://", url, 7)) {
return false;
@@ -133,6 +139,24 @@
path->setTo(slashPos);
}
+ ssize_t atPos = host->find("@");
+
+ if (atPos >= 0) {
+ // Split of user:pass@ from hostname.
+
+ AString userPass(*host, 0, atPos);
+ host->erase(0, atPos + 1);
+
+ ssize_t colonPos = userPass.find(":");
+
+ if (colonPos < 0) {
+ *user = userPass;
+ } else {
+ user->setTo(userPass, 0, colonPos);
+ pass->setTo(userPass, colonPos + 1, userPass.size() - colonPos - 1);
+ }
+ }
+
const char *colonPos = strchr(host->c_str(), ':');
if (colonPos != NULL) {
@@ -187,7 +211,12 @@
AString host, path;
unsigned port;
- if (!ParseURL(url.c_str(), &host, &port, &path)) {
+ if (!ParseURL(url.c_str(), &host, &port, &path, &mUser, &mPass)
+ || (mUser.size() > 0 && mPass.size() == 0)) {
+ // If we have a user name but no password we have to give up
+ // right here, since we currently have no way of asking the user
+ // for this information.
+
LOGE("Malformed rtsp url %s", url.c_str());
reply->setInt32("result", ERROR_MALFORMED);
@@ -197,6 +226,10 @@
return;
}
+ if (mUser.size() > 0) {
+ LOGV("user = '%s', pass = '%s'", mUser.c_str(), mPass.c_str());
+ }
+
struct hostent *ent = gethostbyname(host.c_str());
if (ent == NULL) {
LOGE("Unknown host %s", host.c_str());
@@ -262,6 +295,11 @@
reply->setInt32("result", OK);
mState = DISCONNECTED;
+ mUser.clear();
+ mPass.clear();
+ mAuthType = NONE;
+ mNonce.clear();
+
reply->post();
}
@@ -335,6 +373,12 @@
AString request;
CHECK(msg->findString("request", &request));
+ // Just in case we need to re-issue the request with proper authentication
+ // later, stash it away.
+ reply->setString("original-request", request.c_str(), request.size());
+
+ addAuthentication(&request);
+
// Find the boundary between headers and the body.
ssize_t i = request.find("\r\n\r\n");
CHECK_GE(i, 0);
@@ -347,7 +391,7 @@
request.insert(cseqHeader, i + 2);
- LOGV("%s", request.c_str());
+ LOGV("request: '%s'", request.c_str());
size_t numBytesSent = 0;
while (numBytesSent < request.size()) {
@@ -612,6 +656,30 @@
}
}
+ if (response->mStatusCode == 401) {
+ if (mAuthType == NONE && mUser.size() > 0
+ && parseAuthMethod(response)) {
+ ssize_t i;
+ CHECK_EQ((status_t)OK, findPendingRequest(response, &i));
+ CHECK_GE(i, 0);
+
+ sp<AMessage> reply = mPendingRequests.valueAt(i);
+ mPendingRequests.removeItemsAt(i);
+
+ AString request;
+ CHECK(reply->findString("original-request", &request));
+
+ sp<AMessage> msg = new AMessage(kWhatSendRequest, id());
+ msg->setMessage("reply", reply);
+ msg->setString("request", request.c_str(), request.size());
+
+ LOGI("re-sending request with authentication headers...");
+ onSendRequest(msg);
+
+ return true;
+ }
+ }
+
return notifyResponseListener(response);
}
@@ -628,26 +696,47 @@
return true;
}
-bool ARTSPConnection::notifyResponseListener(
- const sp<ARTSPResponse> &response) {
+status_t ARTSPConnection::findPendingRequest(
+ const sp<ARTSPResponse> &response, ssize_t *index) const {
+ *index = 0;
+
ssize_t i = response->mHeaders.indexOfKey("cseq");
if (i < 0) {
- return true;
+ // This is an unsolicited server->client message.
+ return OK;
}
AString value = response->mHeaders.valueAt(i);
unsigned long cseq;
if (!ParseSingleUnsignedLong(value.c_str(), &cseq)) {
- return false;
+ return ERROR_MALFORMED;
}
i = mPendingRequests.indexOfKey(cseq);
if (i < 0) {
- // Unsolicited response?
- TRESPASS();
+ return -ENOENT;
+ }
+
+ *index = i;
+
+ return OK;
+}
+
+bool ARTSPConnection::notifyResponseListener(
+ const sp<ARTSPResponse> &response) {
+ ssize_t i;
+ status_t err = findPendingRequest(response, &i);
+
+ if (err == OK && i < 0) {
+ // An unsolicited server response is not a problem.
+ return true;
+ }
+
+ if (err != OK) {
+ return false;
}
sp<AMessage> reply = mPendingRequests.valueAt(i);
@@ -660,4 +749,160 @@
return true;
}
+bool ARTSPConnection::parseAuthMethod(const sp<ARTSPResponse> &response) {
+ ssize_t i = response->mHeaders.indexOfKey("www-authenticate");
+
+ if (i < 0) {
+ return false;
+ }
+
+ AString value = response->mHeaders.valueAt(i);
+
+ if (!strncmp(value.c_str(), "Basic", 5)) {
+ mAuthType = BASIC;
+ } else {
+#if !defined(HAVE_ANDROID_OS)
+ // We don't have access to the MD5 implementation on the simulator,
+ // so we won't support digest authentication.
+ return false;
+#endif
+
+ CHECK(!strncmp(value.c_str(), "Digest", 6));
+ mAuthType = DIGEST;
+
+ i = value.find("nonce=");
+ CHECK_GE(i, 0);
+ CHECK_EQ(value.c_str()[i + 6], '\"');
+ ssize_t j = value.find("\"", i + 7);
+ CHECK_GE(j, 0);
+
+ mNonce.setTo(value, i + 7, j - i - 7);
+ }
+
+ return true;
+}
+
+#if defined(HAVE_ANDROID_OS)
+static void H(const AString &s, AString *out) {
+ out->clear();
+
+ MD5_CTX m;
+ MD5_Init(&m);
+ MD5_Update(&m, s.c_str(), s.size());
+
+ uint8_t key[16];
+ MD5_Final(key, &m);
+
+ for (size_t i = 0; i < 16; ++i) {
+ char nibble = key[i] >> 4;
+ if (nibble <= 9) {
+ nibble += '0';
+ } else {
+ nibble += 'a' - 10;
+ }
+ out->append(&nibble, 1);
+
+ nibble = key[i] & 0x0f;
+ if (nibble <= 9) {
+ nibble += '0';
+ } else {
+ nibble += 'a' - 10;
+ }
+ out->append(&nibble, 1);
+ }
+}
+#endif
+
+static void GetMethodAndURL(
+ const AString &request, AString *method, AString *url) {
+ ssize_t space1 = request.find(" ");
+ CHECK_GE(space1, 0);
+
+ ssize_t space2 = request.find(" ", space1 + 1);
+ CHECK_GE(space2, 0);
+
+ method->setTo(request, 0, space1);
+ url->setTo(request, space1 + 1, space2 - space1);
+}
+
+void ARTSPConnection::addAuthentication(AString *request) {
+ if (mAuthType == NONE) {
+ return;
+ }
+
+ // Find the boundary between headers and the body.
+ ssize_t i = request->find("\r\n\r\n");
+ CHECK_GE(i, 0);
+
+ if (mAuthType == BASIC) {
+ AString tmp;
+ tmp.append(mUser);
+ tmp.append(":");
+ tmp.append(mPass);
+
+ AString out;
+ encodeBase64(tmp.c_str(), tmp.size(), &out);
+
+ AString fragment;
+ fragment.append("Authorization: Basic ");
+ fragment.append(out);
+ fragment.append("\r\n");
+
+ request->insert(fragment, i + 2);
+
+ return;
+ }
+
+#if defined(HAVE_ANDROID_OS)
+ CHECK_EQ((int)mAuthType, (int)DIGEST);
+
+ AString method, url;
+ GetMethodAndURL(*request, &method, &url);
+
+ AString A1;
+ A1.append(mUser);
+ A1.append(":");
+ A1.append("Streaming Server");
+ A1.append(":");
+ A1.append(mPass);
+
+ AString A2;
+ A2.append(method);
+ A2.append(":");
+ A2.append(url);
+
+ AString HA1, HA2;
+ H(A1, &HA1);
+ H(A2, &HA2);
+
+ AString tmp;
+ tmp.append(HA1);
+ tmp.append(":");
+ tmp.append(mNonce);
+ tmp.append(":");
+ tmp.append(HA2);
+
+ AString digest;
+ H(tmp, &digest);
+
+ AString fragment;
+ fragment.append("Authorization: Digest ");
+ fragment.append("nonce=\"");
+ fragment.append(mNonce);
+ fragment.append("\", ");
+ fragment.append("username=\"");
+ fragment.append(mUser);
+ fragment.append("\", ");
+ fragment.append("uri=\"");
+ fragment.append(url);
+ fragment.append("\", ");
+ fragment.append("response=\"");
+ fragment.append(digest);
+ fragment.append("\"");
+ fragment.append("\r\n");
+
+ request->insert(fragment, i + 2);
+#endif
+}
+
} // namespace android
diff --git a/media/libstagefright/rtsp/ARTSPConnection.h b/media/libstagefright/rtsp/ARTSPConnection.h
index 96e0d5b..19be2a6 100644
--- a/media/libstagefright/rtsp/ARTSPConnection.h
+++ b/media/libstagefright/rtsp/ARTSPConnection.h
@@ -42,6 +42,10 @@
void observeBinaryData(const sp<AMessage> &reply);
+ static bool ParseURL(
+ const char *url, AString *host, unsigned *port, AString *path,
+ AString *user, AString *pass);
+
protected:
virtual ~ARTSPConnection();
virtual void onMessageReceived(const sp<AMessage> &msg);
@@ -62,9 +66,18 @@
kWhatObserveBinaryData = 'obin',
};
+ enum AuthType {
+ NONE,
+ BASIC,
+ DIGEST
+ };
+
static const int64_t kSelectTimeoutUs;
State mState;
+ AString mUser, mPass;
+ AuthType mAuthType;
+ AString mNonce;
int mSocket;
int32_t mConnectionID;
int32_t mNextCSeq;
@@ -90,8 +103,11 @@
sp<ABuffer> receiveBinaryData();
bool notifyResponseListener(const sp<ARTSPResponse> &response);
- static bool ParseURL(
- const char *url, AString *host, unsigned *port, AString *path);
+ bool parseAuthMethod(const sp<ARTSPResponse> &response);
+ void addAuthentication(AString *request);
+
+ status_t findPendingRequest(
+ const sp<ARTSPResponse> &response, ssize_t *index) const;
static bool ParseSingleUnsignedLong(
const char *from, unsigned long *x);
diff --git a/media/libstagefright/rtsp/ASessionDescription.cpp b/media/libstagefright/rtsp/ASessionDescription.cpp
index 612caff..547fbab 100644
--- a/media/libstagefright/rtsp/ASessionDescription.cpp
+++ b/media/libstagefright/rtsp/ASessionDescription.cpp
@@ -53,21 +53,30 @@
mFormats.push(AString("[root]"));
AString desc((const char *)data, size);
- LOGI("%s", desc.c_str());
size_t i = 0;
for (;;) {
- ssize_t eolPos = desc.find("\r\n", i);
+ ssize_t eolPos = desc.find("\n", i);
+
if (eolPos < 0) {
break;
}
- AString line(desc, i, eolPos - i);
+ AString line;
+ if ((size_t)eolPos > i && desc.c_str()[eolPos - 1] == '\r') {
+ // We accept both '\n' and '\r\n' line endings, if it's
+ // the latter, strip the '\r' as well.
+ line.setTo(desc, i, eolPos - i - 1);
+ } else {
+ line.setTo(desc, i, eolPos - i);
+ }
if (line.size() < 2 || line.c_str()[1] != '=') {
return false;
}
+ LOGI("%s", line.c_str());
+
switch (line.c_str()[0]) {
case 'v':
{
@@ -141,7 +150,7 @@
}
}
- i = eolPos + 2;
+ i = eolPos + 1;
}
return true;
@@ -245,7 +254,7 @@
return false;
}
- if (value == "npt=now-") {
+ if (value == "npt=now-" || value == "npt=0-") {
return false;
}
diff --git a/media/libstagefright/rtsp/Android.mk b/media/libstagefright/rtsp/Android.mk
index 081ae32..0bbadc1 100644
--- a/media/libstagefright/rtsp/Android.mk
+++ b/media/libstagefright/rtsp/Android.mk
@@ -23,6 +23,7 @@
$(JNI_H_INCLUDE) \
$(TOP)/frameworks/base/include/media/stagefright/openmax \
$(TOP)/frameworks/base/media/libstagefright/include \
+ $(TOP)/external/openssl/include
LOCAL_MODULE:= libstagefright_rtsp
diff --git a/media/libstagefright/rtsp/MyHandler.h b/media/libstagefright/rtsp/MyHandler.h
index 6943608..9bb8c46 100644
--- a/media/libstagefright/rtsp/MyHandler.h
+++ b/media/libstagefright/rtsp/MyHandler.h
@@ -96,6 +96,7 @@
mNetLooper(new ALooper),
mConn(new ARTSPConnection),
mRTPConn(new ARTPConnection),
+ mOriginalSessionURL(url),
mSessionURL(url),
mSetupTracksSuccessful(false),
mSeekPending(false),
@@ -113,6 +114,23 @@
mNetLooper->start(false /* runOnCallingThread */,
false /* canCallJava */,
PRIORITY_HIGHEST);
+
+ // Strip any authentication info from the session url, we don't
+ // want to transmit user/pass in cleartext.
+ AString host, path, user, pass;
+ unsigned port;
+ if (ARTSPConnection::ParseURL(
+ mSessionURL.c_str(), &host, &port, &path, &user, &pass)
+ && user.size() > 0) {
+ mSessionURL.clear();
+ mSessionURL.append("rtsp://");
+ mSessionURL.append(host);
+ mSessionURL.append(":");
+ mSessionURL.append(StringPrintf("%u", port));
+ mSessionURL.append(path);
+
+ LOGI("rewritten session url: '%s'", mSessionURL.c_str());
+ }
}
void connect(const sp<AMessage> &doneMsg) {
@@ -126,7 +144,7 @@
mConn->observeBinaryData(notify);
sp<AMessage> reply = new AMessage('conn', id());
- mConn->connect(mSessionURL.c_str(), reply);
+ mConn->connect(mOriginalSessionURL.c_str(), reply);
}
void disconnect(const sp<AMessage> &doneMsg) {
@@ -312,7 +330,7 @@
int32_t reconnect;
if (msg->findInt32("reconnect", &reconnect) && reconnect) {
sp<AMessage> reply = new AMessage('conn', id());
- mConn->connect(mSessionURL.c_str(), reply);
+ mConn->connect(mOriginalSessionURL.c_str(), reply);
} else {
(new AMessage('quit', id()))->post();
}
@@ -922,7 +940,7 @@
CHECK(GetAttribute(range.c_str(), "npt", &val));
float npt1, npt2;
- if (val == "now-") {
+ if (val == "now-" || val == "0-") {
// This is a live stream and therefore not seekable.
return;
} else {
@@ -992,6 +1010,7 @@
sp<ARTSPConnection> mConn;
sp<ARTPConnection> mRTPConn;
sp<ASessionDescription> mSessionDesc;
+ AString mOriginalSessionURL; // This one still has user:pass@
AString mSessionURL;
AString mBaseURL;
AString mSessionID;
diff --git a/media/libstagefright/string.cpp b/media/libstagefright/string.cpp
deleted file mode 100644
index 8b2c36c..0000000
--- a/media/libstagefright/string.cpp
+++ /dev/null
@@ -1,92 +0,0 @@
-/*
- * Copyright (C) 2009 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "include/stagefright_string.h"
-
-#include <media/stagefright/MediaDebug.h>
-
-namespace android {
-
-// static
-string::size_type string::npos = (string::size_type)-1;
-
-string::string() {
-}
-
-string::string(const char *s, size_t length)
- : mString(s, length) {
-}
-
-string::string(const string &from, size_type start, size_type length) {
- CHECK(start <= from.size());
- if (length == npos) {
- length = from.size() - start;
- } else {
- CHECK(start + length <= from.size());
- }
-
- mString.setTo(from.c_str() + start, length);
-}
-
-string::string(const char *s)
- : mString(s) {
-}
-
-const char *string::c_str() const {
- return mString.string();
-}
-
-string::size_type string::size() const {
- return mString.length();
-}
-
-void string::clear() {
- mString = String8();
-}
-
-string::size_type string::find(char c) const {
- char s[2];
- s[0] = c;
- s[1] = '\0';
-
- ssize_t index = mString.find(s);
-
- return index < 0 ? npos : (size_type)index;
-}
-
-bool string::operator<(const string &other) const {
- return mString < other.mString;
-}
-
-bool string::operator==(const string &other) const {
- return mString == other.mString;
-}
-
-string &string::operator+=(char c) {
- mString.append(&c, 1);
-
- return *this;
-}
-
-void string::erase(size_t from, size_t length) {
- String8 s(mString.string(), from);
- s.append(mString.string() + from + length);
-
- mString = s;
-}
-
-} // namespace android
-
diff --git a/media/libstagefright/yuv/Android.mk b/media/libstagefright/yuv/Android.mk
new file mode 100644
index 0000000..0794ad1
--- /dev/null
+++ b/media/libstagefright/yuv/Android.mk
@@ -0,0 +1,13 @@
+LOCAL_PATH:= $(call my-dir)
+include $(CLEAR_VARS)
+
+LOCAL_SRC_FILES:= \
+ YUVImage.cpp \
+ YUVCanvas.cpp
+
+LOCAL_SHARED_LIBRARIES := \
+ libcutils
+
+LOCAL_MODULE:= libstagefright_yuv
+
+include $(BUILD_SHARED_LIBRARY)
diff --git a/media/libstagefright/yuv/YUVCanvas.cpp b/media/libstagefright/yuv/YUVCanvas.cpp
new file mode 100644
index 0000000..38aa779
--- /dev/null
+++ b/media/libstagefright/yuv/YUVCanvas.cpp
@@ -0,0 +1,111 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_NDEBUG 0
+#define LOG_TAG "YUVCanvas"
+
+#include <media/stagefright/MediaDebug.h>
+#include <media/stagefright/YUVCanvas.h>
+#include <media/stagefright/YUVImage.h>
+#include <ui/Rect.h>
+
+namespace android {
+
+YUVCanvas::YUVCanvas(YUVImage &yuvImage)
+ : mYUVImage(yuvImage) {
+}
+
+YUVCanvas::~YUVCanvas() {
+}
+
+void YUVCanvas::FillYUV(uint8_t yValue, uint8_t uValue, uint8_t vValue) {
+ for (int32_t y = 0; y < mYUVImage.height(); ++y) {
+ for (int32_t x = 0; x < mYUVImage.width(); ++x) {
+ mYUVImage.setPixelValue(x, y, yValue, uValue, vValue);
+ }
+ }
+}
+
+void YUVCanvas::FillYUVRectangle(const Rect& rect,
+ uint8_t yValue, uint8_t uValue, uint8_t vValue) {
+ for (int32_t y = rect.top; y < rect.bottom; ++y) {
+ for (int32_t x = rect.left; x < rect.right; ++x) {
+ mYUVImage.setPixelValue(x, y, yValue, uValue, vValue);
+ }
+ }
+}
+
+void YUVCanvas::CopyImageRect(
+ const Rect& srcRect,
+ int32_t destStartX, int32_t destStartY,
+ const YUVImage &srcImage) {
+
+ // Try fast copy first
+ if (YUVImage::fastCopyRectangle(
+ srcRect,
+ destStartX, destStartY,
+ srcImage, mYUVImage)) {
+ return;
+ }
+
+ int32_t srcStartX = srcRect.left;
+ int32_t srcStartY = srcRect.top;
+ for (int32_t offsetY = 0; offsetY < srcRect.height(); ++offsetY) {
+ for (int32_t offsetX = 0; offsetX < srcRect.width(); ++offsetX) {
+ int32_t srcX = srcStartX + offsetX;
+ int32_t srcY = srcStartY + offsetY;
+
+ int32_t destX = destStartX + offsetX;
+ int32_t destY = destStartY + offsetY;
+
+ uint8_t yValue;
+ uint8_t uValue;
+ uint8_t vValue;
+
+ srcImage.getPixelValue(srcX, srcY, &yValue, &uValue, &vValue);
+ mYUVImage.setPixelValue(destX, destY, yValue, uValue, vValue);
+ }
+ }
+}
+
+void YUVCanvas::downsample(
+ int32_t srcOffsetX, int32_t srcOffsetY,
+ int32_t skipX, int32_t skipY,
+ const YUVImage &srcImage) {
+ // TODO: Add a low pass filter for downsampling.
+
+ // Check that srcImage is big enough to fill mYUVImage.
+ CHECK((srcOffsetX + (mYUVImage.width() - 1) * skipX) < srcImage.width());
+ CHECK((srcOffsetY + (mYUVImage.height() - 1) * skipY) < srcImage.height());
+
+ uint8_t yValue;
+ uint8_t uValue;
+ uint8_t vValue;
+
+ int32_t srcY = srcOffsetY;
+ for (int32_t y = 0; y < mYUVImage.height(); ++y) {
+ int32_t srcX = srcOffsetX;
+ for (int32_t x = 0; x < mYUVImage.width(); ++x) {
+ srcImage.getPixelValue(srcX, srcY, &yValue, &uValue, &vValue);
+ mYUVImage.setPixelValue(x, y, yValue, uValue, vValue);
+
+ srcX += skipX;
+ }
+ srcY += skipY;
+ }
+}
+
+} // namespace android
diff --git a/media/libstagefright/yuv/YUVImage.cpp b/media/libstagefright/yuv/YUVImage.cpp
new file mode 100644
index 0000000..b712062
--- /dev/null
+++ b/media/libstagefright/yuv/YUVImage.cpp
@@ -0,0 +1,413 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_NDEBUG 0
+#define LOG_TAG "YUVImage"
+
+#include <media/stagefright/YUVImage.h>
+#include <ui/Rect.h>
+#include <media/stagefright/MediaDebug.h>
+
+namespace android {
+
+YUVImage::YUVImage(YUVFormat yuvFormat, int32_t width, int32_t height) {
+ mYUVFormat = yuvFormat;
+ mWidth = width;
+ mHeight = height;
+
+ size_t numberOfBytes = bufferSize(yuvFormat, width, height);
+ uint8_t *buffer = new uint8_t[numberOfBytes];
+ mBuffer = buffer;
+ mOwnBuffer = true;
+
+ initializeYUVPointers();
+}
+
+YUVImage::YUVImage(YUVFormat yuvFormat, int32_t width, int32_t height, uint8_t *buffer) {
+ mYUVFormat = yuvFormat;
+ mWidth = width;
+ mHeight = height;
+ mBuffer = buffer;
+ mOwnBuffer = false;
+
+ initializeYUVPointers();
+}
+
+//static
+size_t YUVImage::bufferSize(YUVFormat yuvFormat, int32_t width, int32_t height) {
+ int32_t numberOfPixels = width*height;
+ size_t numberOfBytes = 0;
+ if (yuvFormat == YUV420Planar || yuvFormat == YUV420SemiPlanar) {
+ // Y takes numberOfPixels bytes and U/V take numberOfPixels/4 bytes each.
+ numberOfBytes = (size_t)(numberOfPixels + (numberOfPixels >> 1));
+ } else {
+ LOGE("Format not supported");
+ }
+ return numberOfBytes;
+}
+
+bool YUVImage::initializeYUVPointers() {
+ int32_t numberOfPixels = mWidth * mHeight;
+
+ if (mYUVFormat == YUV420Planar) {
+ mYdata = (uint8_t *)mBuffer;
+ mUdata = mYdata + numberOfPixels;
+ mVdata = mUdata + (numberOfPixels >> 2);
+ } else if (mYUVFormat == YUV420SemiPlanar) {
+ // U and V channels are interleaved as VUVUVU.
+ // So V data starts at the end of Y channel and
+ // U data starts right after V's start.
+ mYdata = (uint8_t *)mBuffer;
+ mVdata = mYdata + numberOfPixels;
+ mUdata = mVdata + 1;
+ } else {
+ LOGE("Format not supported");
+ return false;
+ }
+ return true;
+}
+
+YUVImage::~YUVImage() {
+ if (mOwnBuffer) delete[] mBuffer;
+}
+
+bool YUVImage::getOffsets(int32_t x, int32_t y,
+ int32_t *yOffset, int32_t *uOffset, int32_t *vOffset) const {
+ *yOffset = y*mWidth + x;
+
+ int32_t uvOffset = (y >> 1) * (mWidth >> 1) + (x >> 1);
+ if (mYUVFormat == YUV420Planar) {
+ *uOffset = uvOffset;
+ *vOffset = uvOffset;
+ } else if (mYUVFormat == YUV420SemiPlanar) {
+ // Since U and V channels are interleaved, offsets need
+ // to be doubled.
+ *uOffset = 2*uvOffset;
+ *vOffset = 2*uvOffset;
+ } else {
+ LOGE("Format not supported");
+ return false;
+ }
+
+ return true;
+}
+
+bool YUVImage::getOffsetIncrementsPerDataRow(
+ int32_t *yDataOffsetIncrement,
+ int32_t *uDataOffsetIncrement,
+ int32_t *vDataOffsetIncrement) const {
+ *yDataOffsetIncrement = mWidth;
+
+ int32_t uvDataOffsetIncrement = mWidth >> 1;
+
+ if (mYUVFormat == YUV420Planar) {
+ *uDataOffsetIncrement = uvDataOffsetIncrement;
+ *vDataOffsetIncrement = uvDataOffsetIncrement;
+ } else if (mYUVFormat == YUV420SemiPlanar) {
+ // Since U and V channels are interleaved, offsets need
+ // to be doubled.
+ *uDataOffsetIncrement = 2*uvDataOffsetIncrement;
+ *vDataOffsetIncrement = 2*uvDataOffsetIncrement;
+ } else {
+ LOGE("Format not supported");
+ return false;
+ }
+
+ return true;
+}
+
+uint8_t* YUVImage::getYAddress(int32_t offset) const {
+ return mYdata + offset;
+}
+
+uint8_t* YUVImage::getUAddress(int32_t offset) const {
+ return mUdata + offset;
+}
+
+uint8_t* YUVImage::getVAddress(int32_t offset) const {
+ return mVdata + offset;
+}
+
+bool YUVImage::getYUVAddresses(int32_t x, int32_t y,
+ uint8_t **yAddr, uint8_t **uAddr, uint8_t **vAddr) const {
+ int32_t yOffset;
+ int32_t uOffset;
+ int32_t vOffset;
+ if (!getOffsets(x, y, &yOffset, &uOffset, &vOffset)) return false;
+
+ *yAddr = getYAddress(yOffset);
+ *uAddr = getUAddress(uOffset);
+ *vAddr = getVAddress(vOffset);
+
+ return true;
+}
+
+bool YUVImage::validPixel(int32_t x, int32_t y) const {
+ return (x >= 0 && x < mWidth &&
+ y >= 0 && y < mHeight);
+}
+
+bool YUVImage::getPixelValue(int32_t x, int32_t y,
+ uint8_t *yPtr, uint8_t *uPtr, uint8_t *vPtr) const {
+ CHECK(validPixel(x, y));
+
+ uint8_t *yAddr;
+ uint8_t *uAddr;
+ uint8_t *vAddr;
+ if (!getYUVAddresses(x, y, &yAddr, &uAddr, &vAddr)) return false;
+
+ *yPtr = *yAddr;
+ *uPtr = *uAddr;
+ *vPtr = *vAddr;
+
+ return true;
+}
+
+bool YUVImage::setPixelValue(int32_t x, int32_t y,
+ uint8_t yValue, uint8_t uValue, uint8_t vValue) {
+ CHECK(validPixel(x, y));
+
+ uint8_t *yAddr;
+ uint8_t *uAddr;
+ uint8_t *vAddr;
+ if (!getYUVAddresses(x, y, &yAddr, &uAddr, &vAddr)) return false;
+
+ *yAddr = yValue;
+ *uAddr = uValue;
+ *vAddr = vValue;
+
+ return true;
+}
+
+void YUVImage::fastCopyRectangle420Planar(
+ const Rect& srcRect,
+ int32_t destStartX, int32_t destStartY,
+ const YUVImage &srcImage, YUVImage &destImage) {
+ CHECK(srcImage.mYUVFormat == YUV420Planar);
+ CHECK(destImage.mYUVFormat == YUV420Planar);
+
+ int32_t srcStartX = srcRect.left;
+ int32_t srcStartY = srcRect.top;
+ int32_t width = srcRect.width();
+ int32_t height = srcRect.height();
+
+ // Get source and destination start addresses
+ uint8_t *ySrcAddrBase;
+ uint8_t *uSrcAddrBase;
+ uint8_t *vSrcAddrBase;
+ srcImage.getYUVAddresses(srcStartX, srcStartY,
+ &ySrcAddrBase, &uSrcAddrBase, &vSrcAddrBase);
+
+ uint8_t *yDestAddrBase;
+ uint8_t *uDestAddrBase;
+ uint8_t *vDestAddrBase;
+ destImage.getYUVAddresses(destStartX, destStartY,
+ &yDestAddrBase, &uDestAddrBase, &vDestAddrBase);
+
+ // Get source and destination offset increments incurred in going
+ // from one data row to next.
+ int32_t ySrcOffsetIncrement;
+ int32_t uSrcOffsetIncrement;
+ int32_t vSrcOffsetIncrement;
+ srcImage.getOffsetIncrementsPerDataRow(
+ &ySrcOffsetIncrement, &uSrcOffsetIncrement, &vSrcOffsetIncrement);
+
+ int32_t yDestOffsetIncrement;
+ int32_t uDestOffsetIncrement;
+ int32_t vDestOffsetIncrement;
+ destImage.getOffsetIncrementsPerDataRow(
+ &yDestOffsetIncrement, &uDestOffsetIncrement, &vDestOffsetIncrement);
+
+ // Copy Y
+ {
+ size_t numberOfYBytesPerRow = (size_t) width;
+ uint8_t *ySrcAddr = ySrcAddrBase;
+ uint8_t *yDestAddr = yDestAddrBase;
+ for (int32_t offsetY = 0; offsetY < height; ++offsetY) {
+ memcpy(yDestAddr, ySrcAddr, numberOfYBytesPerRow);
+
+ ySrcAddr += ySrcOffsetIncrement;
+ yDestAddr += yDestOffsetIncrement;
+ }
+ }
+
+ // Copy U
+ {
+ size_t numberOfUBytesPerRow = (size_t) (width >> 1);
+ uint8_t *uSrcAddr = uSrcAddrBase;
+ uint8_t *uDestAddr = uDestAddrBase;
+ // Every other row has an entry for U/V channel values. Hence only
+ // go half the height.
+ for (int32_t offsetY = 0; offsetY < (height >> 1); ++offsetY) {
+ memcpy(uDestAddr, uSrcAddr, numberOfUBytesPerRow);
+
+ uSrcAddr += uSrcOffsetIncrement;
+ uDestAddr += uDestOffsetIncrement;
+ }
+ }
+
+ // Copy V
+ {
+ size_t numberOfVBytesPerRow = (size_t) (width >> 1);
+ uint8_t *vSrcAddr = vSrcAddrBase;
+ uint8_t *vDestAddr = vDestAddrBase;
+ // Every other pixel row has a U/V data row. Hence only go half the height.
+ for (int32_t offsetY = 0; offsetY < (height >> 1); ++offsetY) {
+ memcpy(vDestAddr, vSrcAddr, numberOfVBytesPerRow);
+
+ vSrcAddr += vSrcOffsetIncrement;
+ vDestAddr += vDestOffsetIncrement;
+ }
+ }
+}
+
+void YUVImage::fastCopyRectangle420SemiPlanar(
+ const Rect& srcRect,
+ int32_t destStartX, int32_t destStartY,
+ const YUVImage &srcImage, YUVImage &destImage) {
+ CHECK(srcImage.mYUVFormat == YUV420SemiPlanar);
+ CHECK(destImage.mYUVFormat == YUV420SemiPlanar);
+
+ int32_t srcStartX = srcRect.left;
+ int32_t srcStartY = srcRect.top;
+ int32_t width = srcRect.width();
+ int32_t height = srcRect.height();
+
+ // Get source and destination start addresses
+ uint8_t *ySrcAddrBase;
+ uint8_t *uSrcAddrBase;
+ uint8_t *vSrcAddrBase;
+ srcImage.getYUVAddresses(srcStartX, srcStartY,
+ &ySrcAddrBase, &uSrcAddrBase, &vSrcAddrBase);
+
+ uint8_t *yDestAddrBase;
+ uint8_t *uDestAddrBase;
+ uint8_t *vDestAddrBase;
+ destImage.getYUVAddresses(destStartX, destStartY,
+ &yDestAddrBase, &uDestAddrBase, &vDestAddrBase);
+
+ // Get source and destination offset increments incurred in going
+ // from one data row to next.
+ int32_t ySrcOffsetIncrement;
+ int32_t uSrcOffsetIncrement;
+ int32_t vSrcOffsetIncrement;
+ srcImage.getOffsetIncrementsPerDataRow(
+ &ySrcOffsetIncrement, &uSrcOffsetIncrement, &vSrcOffsetIncrement);
+
+ int32_t yDestOffsetIncrement;
+ int32_t uDestOffsetIncrement;
+ int32_t vDestOffsetIncrement;
+ destImage.getOffsetIncrementsPerDataRow(
+ &yDestOffsetIncrement, &uDestOffsetIncrement, &vDestOffsetIncrement);
+
+ // Copy Y
+ {
+ size_t numberOfYBytesPerRow = (size_t) width;
+ uint8_t *ySrcAddr = ySrcAddrBase;
+ uint8_t *yDestAddr = yDestAddrBase;
+ for (int32_t offsetY = 0; offsetY < height; ++offsetY) {
+ memcpy(yDestAddr, ySrcAddr, numberOfYBytesPerRow);
+
+ ySrcAddr = ySrcAddr + ySrcOffsetIncrement;
+ yDestAddr = yDestAddr + yDestOffsetIncrement;
+ }
+ }
+
+ // Copy UV
+ {
+ // UV are interleaved. So number of UV bytes per row is 2*(width/2).
+ size_t numberOfUVBytesPerRow = (size_t) width;
+ uint8_t *vSrcAddr = vSrcAddrBase;
+ uint8_t *vDestAddr = vDestAddrBase;
+ // Every other pixel row has a U/V data row. Hence only go half the height.
+ for (int32_t offsetY = 0; offsetY < (height >> 1); ++offsetY) {
+ memcpy(vDestAddr, vSrcAddr, numberOfUVBytesPerRow);
+
+ vSrcAddr += vSrcOffsetIncrement;
+ vDestAddr += vDestOffsetIncrement;
+ }
+ }
+}
+
+// static
+bool YUVImage::fastCopyRectangle(
+ const Rect& srcRect,
+ int32_t destStartX, int32_t destStartY,
+ const YUVImage &srcImage, YUVImage &destImage) {
+ if (srcImage.mYUVFormat == destImage.mYUVFormat) {
+ if (srcImage.mYUVFormat == YUV420Planar) {
+ fastCopyRectangle420Planar(
+ srcRect,
+ destStartX, destStartY,
+ srcImage, destImage);
+ } else if (srcImage.mYUVFormat == YUV420SemiPlanar) {
+ fastCopyRectangle420SemiPlanar(
+ srcRect,
+ destStartX, destStartY,
+ srcImage, destImage);
+ }
+ return true;
+ }
+ return false;
+}
+
+uint8_t clamp(uint8_t v, uint8_t minValue, uint8_t maxValue) {
+ CHECK(maxValue >= minValue);
+
+ if (v < minValue) return minValue;
+ else if (v > maxValue) return maxValue;
+ else return v;
+}
+
+void YUVImage::yuv2rgb(uint8_t yValue, uint8_t uValue, uint8_t vValue,
+ uint8_t *r, uint8_t *g, uint8_t *b) const {
+ *r = yValue + (1.370705 * (vValue-128));
+ *g = yValue - (0.698001 * (vValue-128)) - (0.337633 * (uValue-128));
+ *b = yValue + (1.732446 * (uValue-128));
+
+ *r = clamp(*r, 0, 255);
+ *g = clamp(*g, 0, 255);
+ *b = clamp(*b, 0, 255);
+}
+
+bool YUVImage::writeToPPM(const char *filename) const {
+ FILE *fp = fopen(filename, "w");
+ if (fp == NULL) {
+ return false;
+ }
+ fprintf(fp, "P3\n");
+ fprintf(fp, "%d %d\n", mWidth, mHeight);
+ fprintf(fp, "255\n");
+ for (int32_t y = 0; y < mHeight; ++y) {
+ for (int32_t x = 0; x < mWidth; ++x) {
+ uint8_t yValue;
+ uint8_t uValue;
+ uint8_t vValue;
+ getPixelValue(x, y, &yValue, &uValue, & vValue);
+
+ uint8_t rValue;
+ uint8_t gValue;
+ uint8_t bValue;
+ yuv2rgb(yValue, uValue, vValue, &rValue, &gValue, &bValue);
+
+ fprintf(fp, "%d %d %d\n", (int32_t)rValue, (int32_t)gValue, (int32_t)bValue);
+ }
+ }
+ fclose(fp);
+ return true;
+}
+
+} // namespace android
diff --git a/media/mtp/Android.mk b/media/mtp/Android.mk
new file mode 100644
index 0000000..b7e1a2a
--- /dev/null
+++ b/media/mtp/Android.mk
@@ -0,0 +1,49 @@
+#
+# Copyright (C) 2010 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+LOCAL_PATH:= $(call my-dir)
+
+ifneq ($(TARGET_SIMULATOR),true)
+
+include $(CLEAR_VARS)
+
+LOCAL_SRC_FILES:= \
+ MtpClient.cpp \
+ MtpDataPacket.cpp \
+ MtpDebug.cpp \
+ MtpDevice.cpp \
+ MtpEventPacket.cpp \
+ MtpDeviceInfo.cpp \
+ MtpObjectInfo.cpp \
+ MtpPacket.cpp \
+ MtpProperty.cpp \
+ MtpRequestPacket.cpp \
+ MtpResponsePacket.cpp \
+ MtpServer.cpp \
+ MtpStorageInfo.cpp \
+ MtpStringBuffer.cpp \
+ MtpStorage.cpp \
+ MtpUtils.cpp \
+ PtpCursor.cpp \
+
+LOCAL_MODULE:= libmtp
+
+LOCAL_CFLAGS := -DMTP_DEVICE -DMTP_HOST
+
+include $(BUILD_STATIC_LIBRARY)
+
+endif
+
diff --git a/media/mtp/MtpClient.cpp b/media/mtp/MtpClient.cpp
new file mode 100644
index 0000000..ceb6a43
--- /dev/null
+++ b/media/mtp/MtpClient.cpp
@@ -0,0 +1,262 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "MtpClient"
+
+#include "MtpDebug.h"
+#include "MtpClient.h"
+#include "MtpDevice.h"
+
+#include <stdio.h>
+#include <stdlib.h>
+#include <sys/types.h>
+#include <sys/ioctl.h>
+#include <sys/stat.h>
+#include <fcntl.h>
+#include <errno.h>
+
+#include <usbhost/usbhost.h>
+#include <linux/version.h>
+#if LINUX_VERSION_CODE > KERNEL_VERSION(2, 6, 20)
+#include <linux/usb/ch9.h>
+#else
+#include <linux/usb_ch9.h>
+#endif
+
+namespace android {
+
+static bool isMtpDevice(uint16_t vendor, uint16_t product) {
+ // Sandisk Sansa Fuze
+ if (vendor == 0x0781 && product == 0x74c2)
+ return true;
+ // Samsung YP-Z5
+ if (vendor == 0x04e8 && product == 0x503c)
+ return true;
+ return false;
+}
+
+class MtpClientThread : public Thread {
+private:
+ MtpClient* mClient;
+
+public:
+ MtpClientThread(MtpClient* client)
+ : mClient(client)
+ {
+ }
+
+ virtual bool threadLoop() {
+ return mClient->threadLoop();
+ }
+};
+
+
+MtpClient::MtpClient()
+ : mThread(NULL),
+ mUsbHostContext(NULL),
+ mDone(false)
+{
+}
+
+MtpClient::~MtpClient() {
+ usb_host_cleanup(mUsbHostContext);
+}
+
+bool MtpClient::start() {
+ Mutex::Autolock autoLock(mMutex);
+
+ if (mThread)
+ return true;
+
+ mUsbHostContext = usb_host_init();
+ if (!mUsbHostContext)
+ return false;
+
+ mThread = new MtpClientThread(this);
+ mThread->run("MtpClientThread");
+ // wait for the thread to do initial device discovery before returning
+ mThreadStartCondition.wait(mMutex);
+
+ return true;
+}
+
+void MtpClient::stop() {
+ mDone = true;
+}
+
+MtpDevice* MtpClient::getDevice(int id) {
+ for (int i = 0; i < mDeviceList.size(); i++) {
+ MtpDevice* device = mDeviceList[i];
+ if (device->getID() == id)
+ return device;
+ }
+ return NULL;
+}
+
+bool MtpClient::usbDeviceAdded(const char *devname) {
+ struct usb_descriptor_header* desc;
+ struct usb_descriptor_iter iter;
+
+ struct usb_device *device = usb_device_open(devname);
+ if (!device) {
+ LOGE("usb_device_open failed\n");
+ return mDone;
+ }
+
+ usb_descriptor_iter_init(device, &iter);
+
+ while ((desc = usb_descriptor_iter_next(&iter)) != NULL) {
+ if (desc->bDescriptorType == USB_DT_INTERFACE) {
+ struct usb_interface_descriptor *interface = (struct usb_interface_descriptor *)desc;
+
+ if (interface->bInterfaceClass == USB_CLASS_STILL_IMAGE &&
+ interface->bInterfaceSubClass == 1 && // Still Image Capture
+ interface->bInterfaceProtocol == 1) // Picture Transfer Protocol (PIMA 15470)
+ {
+ LOGD("Found camera: \"%s\" \"%s\"\n", usb_device_get_manufacturer_name(device),
+ usb_device_get_product_name(device));
+ } else if (interface->bInterfaceClass == 0xFF &&
+ interface->bInterfaceSubClass == 0xFF &&
+ interface->bInterfaceProtocol == 0) {
+ char* interfaceName = usb_device_get_string(device, interface->iInterface);
+ if (!interfaceName || strcmp(interfaceName, "MTP"))
+ continue;
+ // Looks like an android style MTP device
+ LOGD("Found MTP device: \"%s\" \"%s\"\n", usb_device_get_manufacturer_name(device),
+ usb_device_get_product_name(device));
+ } else {
+ // look for special cased devices based on vendor/product ID
+ // we are doing this mainly for testing purposes
+ uint16_t vendor = usb_device_get_vendor_id(device);
+ uint16_t product = usb_device_get_product_id(device);
+ if (!isMtpDevice(vendor, product)) {
+ // not an MTP or PTP device
+ continue;
+ }
+ // request MTP OS string and descriptor
+ // some music players need to see this before entering MTP mode.
+ char buffer[256];
+ memset(buffer, 0, sizeof(buffer));
+ int ret = usb_device_send_control(device,
+ USB_DIR_IN|USB_RECIP_DEVICE|USB_TYPE_STANDARD,
+ USB_REQ_GET_DESCRIPTOR, (USB_DT_STRING << 8) | 0xEE,
+ 0, sizeof(buffer), buffer);
+ printf("usb_device_send_control returned %d errno: %d\n", ret, errno);
+ if (ret > 0) {
+ printf("got MTP string %s\n", buffer);
+ ret = usb_device_send_control(device,
+ USB_DIR_IN|USB_RECIP_DEVICE|USB_TYPE_VENDOR, 1,
+ 0, 4, sizeof(buffer), buffer);
+ printf("OS descriptor got %d\n", ret);
+ } else {
+ printf("no MTP string\n");
+ }
+ }
+
+ // if we got here, then we have a likely MTP or PTP device
+
+ // interface should be followed by three endpoints
+ struct usb_endpoint_descriptor *ep;
+ struct usb_endpoint_descriptor *ep_in_desc = NULL;
+ struct usb_endpoint_descriptor *ep_out_desc = NULL;
+ struct usb_endpoint_descriptor *ep_intr_desc = NULL;
+ for (int i = 0; i < 3; i++) {
+ ep = (struct usb_endpoint_descriptor *)usb_descriptor_iter_next(&iter);
+ if (!ep || ep->bDescriptorType != USB_DT_ENDPOINT) {
+ LOGE("endpoints not found\n");
+ return mDone;
+ }
+ if (ep->bmAttributes == USB_ENDPOINT_XFER_BULK) {
+ if (ep->bEndpointAddress & USB_ENDPOINT_DIR_MASK)
+ ep_in_desc = ep;
+ else
+ ep_out_desc = ep;
+ } else if (ep->bmAttributes == USB_ENDPOINT_XFER_INT &&
+ ep->bEndpointAddress & USB_ENDPOINT_DIR_MASK) {
+ ep_intr_desc = ep;
+ }
+ }
+ if (!ep_in_desc || !ep_out_desc || !ep_intr_desc) {
+ LOGE("endpoints not found\n");
+ return mDone;
+ }
+
+ struct usb_endpoint *ep_in = usb_endpoint_open(device, ep_in_desc);
+ struct usb_endpoint *ep_out = usb_endpoint_open(device, ep_out_desc);
+ struct usb_endpoint *ep_intr = usb_endpoint_open(device, ep_intr_desc);
+
+ if (usb_device_claim_interface(device, interface->bInterfaceNumber)) {
+ LOGE("usb_device_claim_interface failed errno: %d\n", errno);
+ usb_endpoint_close(ep_in);
+ usb_endpoint_close(ep_out);
+ usb_endpoint_close(ep_intr);
+ return mDone;
+ }
+
+ MtpDevice* mtpDevice = new MtpDevice(device, interface->bInterfaceNumber,
+ ep_in, ep_out, ep_intr);
+ mDeviceList.add(mtpDevice);
+ mtpDevice->initialize();
+ deviceAdded(mtpDevice);
+ return mDone;
+ }
+ }
+
+ usb_device_close(device);
+ return mDone;
+}
+
+bool MtpClient::usbDeviceRemoved(const char *devname) {
+ for (int i = 0; i < mDeviceList.size(); i++) {
+ MtpDevice* device = mDeviceList[i];
+ if (!strcmp(devname, device->getDeviceName())) {
+ deviceRemoved(device);
+ mDeviceList.removeAt(i);
+ delete device;
+ LOGD("Camera removed!\n");
+ break;
+ }
+ }
+ return mDone;
+}
+
+bool MtpClient::usbDiscoveryDone() {
+ Mutex::Autolock autoLock(mMutex);
+ mThreadStartCondition.signal();
+ return mDone;
+}
+
+bool MtpClient::threadLoop() {
+ usb_host_run(mUsbHostContext, usb_device_added, usb_device_removed, usb_discovery_done, this);
+ return false;
+}
+
+int MtpClient::usb_device_added(const char *devname, void* client_data) {
+ LOGD("usb_device_added %s\n", devname);
+ return ((MtpClient *)client_data)->usbDeviceAdded(devname);
+}
+
+int MtpClient::usb_device_removed(const char *devname, void* client_data) {
+ LOGD("usb_device_removed %s\n", devname);
+ return ((MtpClient *)client_data)->usbDeviceRemoved(devname);
+}
+
+int MtpClient::usb_discovery_done(void* client_data) {
+ LOGD("usb_discovery_done\n");
+ return ((MtpClient *)client_data)->usbDiscoveryDone();
+}
+
+} // namespace android
diff --git a/media/mtp/MtpClient.h b/media/mtp/MtpClient.h
new file mode 100644
index 0000000..fa5c527
--- /dev/null
+++ b/media/mtp/MtpClient.h
@@ -0,0 +1,68 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef _MTP_CLIENT_H
+#define _MTP_CLIENT_H
+
+#include "MtpTypes.h"
+
+#include <utils/threads.h>
+
+struct usb_host_context;
+
+namespace android {
+
+class MtpClientThread;
+
+class MtpClient {
+private:
+ MtpDeviceList mDeviceList;
+ MtpClientThread* mThread;
+ Condition mThreadStartCondition;
+ Mutex mMutex;
+ struct usb_host_context* mUsbHostContext;
+ bool mDone;
+
+public:
+ MtpClient();
+ virtual ~MtpClient();
+
+ bool start();
+ void stop();
+
+ inline MtpDeviceList& getDeviceList() { return mDeviceList; }
+ MtpDevice* getDevice(int id);
+
+
+ virtual void deviceAdded(MtpDevice *device) = 0;
+ virtual void deviceRemoved(MtpDevice *device) = 0;
+
+private:
+ // these return true if we should stop monitoring USB and clean up
+ bool usbDeviceAdded(const char *devname);
+ bool usbDeviceRemoved(const char *devname);
+ bool usbDiscoveryDone();
+
+ friend class MtpClientThread;
+ bool threadLoop();
+ static int usb_device_added(const char *devname, void* client_data);
+ static int usb_device_removed(const char *devname, void* client_data);
+ static int usb_discovery_done(void* client_data);
+};
+
+}; // namespace android
+
+#endif // _MTP_CLIENT_H
diff --git a/media/mtp/MtpDataPacket.cpp b/media/mtp/MtpDataPacket.cpp
new file mode 100644
index 0000000..eac1f7e
--- /dev/null
+++ b/media/mtp/MtpDataPacket.cpp
@@ -0,0 +1,490 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "MtpDataPacket"
+
+#include <stdio.h>
+#include <sys/types.h>
+#include <fcntl.h>
+
+#include <usbhost/usbhost.h>
+
+#include "MtpDataPacket.h"
+#include "MtpStringBuffer.h"
+
+namespace android {
+
+MtpDataPacket::MtpDataPacket()
+ : MtpPacket(512),
+ mOffset(MTP_CONTAINER_HEADER_SIZE)
+{
+}
+
+MtpDataPacket::~MtpDataPacket() {
+}
+
+void MtpDataPacket::reset() {
+ MtpPacket::reset();
+ mOffset = MTP_CONTAINER_HEADER_SIZE;
+}
+
+void MtpDataPacket::setOperationCode(MtpOperationCode code) {
+ MtpPacket::putUInt16(MTP_CONTAINER_CODE_OFFSET, code);
+}
+
+void MtpDataPacket::setTransactionID(MtpTransactionID id) {
+ MtpPacket::putUInt32(MTP_CONTAINER_TRANSACTION_ID_OFFSET, id);
+}
+
+uint16_t MtpDataPacket::getUInt16() {
+ int offset = mOffset;
+ uint16_t result = (uint16_t)mBuffer[offset] | ((uint16_t)mBuffer[offset + 1] << 8);
+ mOffset += 2;
+ return result;
+}
+
+uint32_t MtpDataPacket::getUInt32() {
+ int offset = mOffset;
+ uint32_t result = (uint32_t)mBuffer[offset] | ((uint32_t)mBuffer[offset + 1] << 8) |
+ ((uint32_t)mBuffer[offset + 2] << 16) | ((uint32_t)mBuffer[offset + 3] << 24);
+ mOffset += 4;
+ return result;
+}
+
+uint64_t MtpDataPacket::getUInt64() {
+ int offset = mOffset;
+ uint64_t result = (uint64_t)mBuffer[offset] | ((uint64_t)mBuffer[offset + 1] << 8) |
+ ((uint64_t)mBuffer[offset + 2] << 16) | ((uint64_t)mBuffer[offset + 3] << 24) |
+ ((uint64_t)mBuffer[offset + 4] << 32) | ((uint64_t)mBuffer[offset + 5] << 40) |
+ ((uint64_t)mBuffer[offset + 6] << 48) | ((uint64_t)mBuffer[offset + 7] << 56);
+ mOffset += 8;
+ return result;
+}
+
+void MtpDataPacket::getUInt128(uint128_t& value) {
+ value[0] = getUInt32();
+ value[1] = getUInt32();
+ value[2] = getUInt32();
+ value[3] = getUInt32();
+}
+
+void MtpDataPacket::getString(MtpStringBuffer& string)
+{
+ string.readFromPacket(this);
+}
+
+Int8List* MtpDataPacket::getAInt8() {
+ Int8List* result = new Int8List;
+ int count = getUInt32();
+ for (int i = 0; i < count; i++)
+ result->push(getInt8());
+ return result;
+}
+
+UInt8List* MtpDataPacket::getAUInt8() {
+ UInt8List* result = new UInt8List;
+ int count = getUInt32();
+ for (int i = 0; i < count; i++)
+ result->push(getUInt8());
+ return result;
+}
+
+Int16List* MtpDataPacket::getAInt16() {
+ Int16List* result = new Int16List;
+ int count = getUInt32();
+ for (int i = 0; i < count; i++)
+ result->push(getInt16());
+ return result;
+}
+
+UInt16List* MtpDataPacket::getAUInt16() {
+ UInt16List* result = new UInt16List;
+ int count = getUInt32();
+ for (int i = 0; i < count; i++)
+ result->push(getUInt16());
+ return result;
+}
+
+Int32List* MtpDataPacket::getAInt32() {
+ Int32List* result = new Int32List;
+ int count = getUInt32();
+ for (int i = 0; i < count; i++)
+ result->push(getInt32());
+ return result;
+}
+
+UInt32List* MtpDataPacket::getAUInt32() {
+ UInt32List* result = new UInt32List;
+ int count = getUInt32();
+ for (int i = 0; i < count; i++)
+ result->push(getUInt32());
+ return result;
+}
+
+Int64List* MtpDataPacket::getAInt64() {
+ Int64List* result = new Int64List;
+ int count = getUInt32();
+ for (int i = 0; i < count; i++)
+ result->push(getInt64());
+ return result;
+}
+
+UInt64List* MtpDataPacket::getAUInt64() {
+ UInt64List* result = new UInt64List;
+ int count = getUInt32();
+ for (int i = 0; i < count; i++)
+ result->push(getUInt64());
+ return result;
+}
+
+void MtpDataPacket::putInt8(int8_t value) {
+ allocate(mOffset + 1);
+ mBuffer[mOffset++] = (uint8_t)value;
+ if (mPacketSize < mOffset)
+ mPacketSize = mOffset;
+}
+
+void MtpDataPacket::putUInt8(uint8_t value) {
+ allocate(mOffset + 1);
+ mBuffer[mOffset++] = (uint8_t)value;
+ if (mPacketSize < mOffset)
+ mPacketSize = mOffset;
+}
+
+void MtpDataPacket::putInt16(int16_t value) {
+ allocate(mOffset + 2);
+ mBuffer[mOffset++] = (uint8_t)(value & 0xFF);
+ mBuffer[mOffset++] = (uint8_t)((value >> 8) & 0xFF);
+ if (mPacketSize < mOffset)
+ mPacketSize = mOffset;
+}
+
+void MtpDataPacket::putUInt16(uint16_t value) {
+ allocate(mOffset + 2);
+ mBuffer[mOffset++] = (uint8_t)(value & 0xFF);
+ mBuffer[mOffset++] = (uint8_t)((value >> 8) & 0xFF);
+ if (mPacketSize < mOffset)
+ mPacketSize = mOffset;
+}
+
+void MtpDataPacket::putInt32(int32_t value) {
+ allocate(mOffset + 4);
+ mBuffer[mOffset++] = (uint8_t)(value & 0xFF);
+ mBuffer[mOffset++] = (uint8_t)((value >> 8) & 0xFF);
+ mBuffer[mOffset++] = (uint8_t)((value >> 16) & 0xFF);
+ mBuffer[mOffset++] = (uint8_t)((value >> 24) & 0xFF);
+ if (mPacketSize < mOffset)
+ mPacketSize = mOffset;
+}
+
+void MtpDataPacket::putUInt32(uint32_t value) {
+ allocate(mOffset + 4);
+ mBuffer[mOffset++] = (uint8_t)(value & 0xFF);
+ mBuffer[mOffset++] = (uint8_t)((value >> 8) & 0xFF);
+ mBuffer[mOffset++] = (uint8_t)((value >> 16) & 0xFF);
+ mBuffer[mOffset++] = (uint8_t)((value >> 24) & 0xFF);
+ if (mPacketSize < mOffset)
+ mPacketSize = mOffset;
+}
+
+void MtpDataPacket::putInt64(int64_t value) {
+ allocate(mOffset + 8);
+ mBuffer[mOffset++] = (uint8_t)(value & 0xFF);
+ mBuffer[mOffset++] = (uint8_t)((value >> 8) & 0xFF);
+ mBuffer[mOffset++] = (uint8_t)((value >> 16) & 0xFF);
+ mBuffer[mOffset++] = (uint8_t)((value >> 24) & 0xFF);
+ mBuffer[mOffset++] = (uint8_t)((value >> 32) & 0xFF);
+ mBuffer[mOffset++] = (uint8_t)((value >> 40) & 0xFF);
+ mBuffer[mOffset++] = (uint8_t)((value >> 48) & 0xFF);
+ mBuffer[mOffset++] = (uint8_t)((value >> 56) & 0xFF);
+ if (mPacketSize < mOffset)
+ mPacketSize = mOffset;
+}
+
+void MtpDataPacket::putUInt64(uint64_t value) {
+ allocate(mOffset + 8);
+ mBuffer[mOffset++] = (uint8_t)(value & 0xFF);
+ mBuffer[mOffset++] = (uint8_t)((value >> 8) & 0xFF);
+ mBuffer[mOffset++] = (uint8_t)((value >> 16) & 0xFF);
+ mBuffer[mOffset++] = (uint8_t)((value >> 24) & 0xFF);
+ mBuffer[mOffset++] = (uint8_t)((value >> 32) & 0xFF);
+ mBuffer[mOffset++] = (uint8_t)((value >> 40) & 0xFF);
+ mBuffer[mOffset++] = (uint8_t)((value >> 48) & 0xFF);
+ mBuffer[mOffset++] = (uint8_t)((value >> 56) & 0xFF);
+ if (mPacketSize < mOffset)
+ mPacketSize = mOffset;
+}
+
+void MtpDataPacket::putInt128(const int128_t& value) {
+ putInt32(value[0]);
+ putInt32(value[1]);
+ putInt32(value[2]);
+ putInt32(value[3]);
+}
+
+void MtpDataPacket::putUInt128(const uint128_t& value) {
+ putUInt32(value[0]);
+ putUInt32(value[1]);
+ putUInt32(value[2]);
+ putUInt32(value[3]);
+}
+
+void MtpDataPacket::putInt128(int64_t value) {
+ putInt64(value);
+ putInt64(value < 0 ? -1 : 0);
+}
+
+void MtpDataPacket::putUInt128(uint64_t value) {
+ putUInt64(value);
+ putUInt64(0);
+}
+
+void MtpDataPacket::putAInt8(const int8_t* values, int count) {
+ putUInt32(count);
+ for (int i = 0; i < count; i++)
+ putInt8(*values++);
+}
+
+void MtpDataPacket::putAUInt8(const uint8_t* values, int count) {
+ putUInt32(count);
+ for (int i = 0; i < count; i++)
+ putUInt8(*values++);
+}
+
+void MtpDataPacket::putAInt16(const int16_t* values, int count) {
+ putUInt32(count);
+ for (int i = 0; i < count; i++)
+ putInt16(*values++);
+}
+
+void MtpDataPacket::putAUInt16(const uint16_t* values, int count) {
+ putUInt32(count);
+ for (int i = 0; i < count; i++)
+ putUInt16(*values++);
+}
+
+void MtpDataPacket::putAUInt16(const UInt16List* values) {
+ size_t count = (values ? values->size() : 0);
+ putUInt32(count);
+ for (size_t i = 0; i < count; i++)
+ putUInt16((*values)[i]);
+}
+
+void MtpDataPacket::putAInt32(const int32_t* values, int count) {
+ putUInt32(count);
+ for (int i = 0; i < count; i++)
+ putInt32(*values++);
+}
+
+void MtpDataPacket::putAUInt32(const uint32_t* values, int count) {
+ putUInt32(count);
+ for (int i = 0; i < count; i++)
+ putUInt32(*values++);
+}
+
+void MtpDataPacket::putAUInt32(const UInt32List* list) {
+ if (!list) {
+ putEmptyArray();
+ } else {
+ size_t size = list->size();
+ putUInt32(size);
+ for (size_t i = 0; i < size; i++)
+ putUInt32((*list)[i]);
+ }
+}
+
+void MtpDataPacket::putAInt64(const int64_t* values, int count) {
+ putUInt32(count);
+ for (int i = 0; i < count; i++)
+ putInt64(*values++);
+}
+
+void MtpDataPacket::putAUInt64(const uint64_t* values, int count) {
+ putUInt32(count);
+ for (int i = 0; i < count; i++)
+ putUInt64(*values++);
+}
+
+void MtpDataPacket::putString(const MtpStringBuffer& string) {
+ string.writeToPacket(this);
+}
+
+void MtpDataPacket::putString(const char* s) {
+ MtpStringBuffer string(s);
+ string.writeToPacket(this);
+}
+
+void MtpDataPacket::putString(const uint16_t* string) {
+ int count = 0;
+ for (int i = 0; i < 256; i++) {
+ if (string[i])
+ count++;
+ else
+ break;
+ }
+ putUInt8(count > 0 ? count + 1 : 0);
+ for (int i = 0; i < count; i++)
+ putUInt16(string[i]);
+ // only terminate with zero if string is not empty
+ if (count > 0)
+ putUInt16(0);
+}
+
+#ifdef MTP_DEVICE
+int MtpDataPacket::read(int fd) {
+ // first read the header
+ int ret = ::read(fd, mBuffer, MTP_CONTAINER_HEADER_SIZE);
+ if (ret != MTP_CONTAINER_HEADER_SIZE)
+ return -1;
+ // then the following data
+ int total = MtpPacket::getUInt32(MTP_CONTAINER_LENGTH_OFFSET);
+ allocate(total);
+ int remaining = total - MTP_CONTAINER_HEADER_SIZE;
+ ret = ::read(fd, &mBuffer[0] + MTP_CONTAINER_HEADER_SIZE, remaining);
+ if (ret != remaining)
+ return -1;
+
+ mPacketSize = total;
+ mOffset = MTP_CONTAINER_HEADER_SIZE;
+ return total;
+}
+
+int MtpDataPacket::readDataHeader(int fd) {
+ int ret = ::read(fd, mBuffer, MTP_CONTAINER_HEADER_SIZE);
+ if (ret > 0)
+ mPacketSize = ret;
+ else
+ mPacketSize = 0;
+ return ret;
+}
+
+int MtpDataPacket::write(int fd) {
+ MtpPacket::putUInt32(MTP_CONTAINER_LENGTH_OFFSET, mPacketSize);
+ MtpPacket::putUInt16(MTP_CONTAINER_TYPE_OFFSET, MTP_CONTAINER_TYPE_DATA);
+ // send header separately from data
+ int ret = ::write(fd, mBuffer, MTP_CONTAINER_HEADER_SIZE);
+ if (ret == MTP_CONTAINER_HEADER_SIZE)
+ ret = ::write(fd, mBuffer + MTP_CONTAINER_HEADER_SIZE,
+ mPacketSize - MTP_CONTAINER_HEADER_SIZE);
+ return (ret < 0 ? ret : 0);
+}
+
+int MtpDataPacket::writeDataHeader(int fd, uint32_t length) {
+ MtpPacket::putUInt32(MTP_CONTAINER_LENGTH_OFFSET, length);
+ MtpPacket::putUInt16(MTP_CONTAINER_TYPE_OFFSET, MTP_CONTAINER_TYPE_DATA);
+ int ret = ::write(fd, mBuffer, MTP_CONTAINER_HEADER_SIZE);
+ return (ret < 0 ? ret : 0);
+}
+#endif // MTP_DEVICE
+
+#ifdef MTP_HOST
+int MtpDataPacket::read(struct usb_endpoint *ep) {
+ // first read the header
+ int length = transfer(ep, mBuffer, mBufferSize);
+ if (length >= MTP_CONTAINER_HEADER_SIZE) {
+ // look at the length field to see if the data spans multiple packets
+ uint32_t totalLength = MtpPacket::getUInt32(MTP_CONTAINER_LENGTH_OFFSET);
+ while (totalLength > length) {
+ allocate(length + mAllocationIncrement);
+ int ret = transfer(ep, mBuffer + length, mAllocationIncrement);
+ if (ret >= 0)
+ length += ret;
+ else {
+ length = ret;
+ break;
+ }
+ }
+ }
+ if (length >= 0)
+ mPacketSize = length;
+ return length;
+}
+
+int MtpDataPacket::readData(struct usb_endpoint *ep, void* buffer, int length) {
+ int read = 0;
+ while (read < length) {
+ int ret = transfer(ep, (char *)buffer + read, length - read);
+ if (ret < 0) {
+ return ret;
+ }
+ read += ret;
+ }
+ return read;
+}
+
+// Queue a read request. Call readDataWait to wait for result
+int MtpDataPacket::readDataAsync(struct usb_endpoint *ep, void* buffer, int length) {
+ if (usb_endpoint_queue(ep, buffer, length)) {
+ LOGE("usb_endpoint_queue failed, errno: %d", errno);
+ return -1;
+ }
+ return 0;
+}
+
+// Wait for result of readDataAsync
+int MtpDataPacket::readDataWait(struct usb_endpoint *ep) {
+ int ep_num;
+ return usb_endpoint_wait(usb_endpoint_get_device(ep), &ep_num);
+}
+
+int MtpDataPacket::readDataHeader(struct usb_endpoint *ep) {
+ int length = transfer(ep, mBuffer, usb_endpoint_max_packet(ep));
+ if (length >= 0)
+ mPacketSize = length;
+ return length;
+}
+
+int MtpDataPacket::writeDataHeader(struct usb_endpoint *ep, uint32_t length) {
+ MtpPacket::putUInt32(MTP_CONTAINER_LENGTH_OFFSET, length);
+ MtpPacket::putUInt16(MTP_CONTAINER_TYPE_OFFSET, MTP_CONTAINER_TYPE_DATA);
+ int ret = transfer(ep, mBuffer, MTP_CONTAINER_HEADER_SIZE);
+ return (ret < 0 ? ret : 0);
+}
+
+int MtpDataPacket::write(struct usb_endpoint *ep) {
+ MtpPacket::putUInt32(MTP_CONTAINER_LENGTH_OFFSET, mPacketSize);
+ MtpPacket::putUInt16(MTP_CONTAINER_TYPE_OFFSET, MTP_CONTAINER_TYPE_DATA);
+
+ // send header separately from data
+ int ret = transfer(ep, mBuffer, MTP_CONTAINER_HEADER_SIZE);
+ if (ret == MTP_CONTAINER_HEADER_SIZE)
+ ret = transfer(ep, mBuffer + MTP_CONTAINER_HEADER_SIZE,
+ mPacketSize - MTP_CONTAINER_HEADER_SIZE);
+ return (ret < 0 ? ret : 0);
+}
+
+int MtpDataPacket::write(struct usb_endpoint *ep, void* buffer, uint32_t length) {
+ int ret = transfer(ep, buffer, length);
+ return (ret < 0 ? ret : 0);
+}
+
+#endif // MTP_HOST
+
+void* MtpDataPacket::getData(int& outLength) const {
+ int length = mPacketSize - MTP_CONTAINER_HEADER_SIZE;
+ if (length > 0) {
+ void* result = malloc(length);
+ if (result) {
+ memcpy(result, mBuffer + MTP_CONTAINER_HEADER_SIZE, length);
+ outLength = length;
+ return result;
+ }
+ }
+ outLength = 0;
+ return NULL;
+}
+
+} // namespace android
diff --git a/media/mtp/MtpDataPacket.h b/media/mtp/MtpDataPacket.h
new file mode 100644
index 0000000..3ae6226
--- /dev/null
+++ b/media/mtp/MtpDataPacket.h
@@ -0,0 +1,121 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef _MTP_DATA_PACKET_H
+#define _MTP_DATA_PACKET_H
+
+#include "MtpPacket.h"
+#include "mtp.h"
+
+namespace android {
+
+class MtpStringBuffer;
+
+class MtpDataPacket : public MtpPacket {
+private:
+ // current offset for get/put methods
+ int mOffset;
+
+public:
+ MtpDataPacket();
+ virtual ~MtpDataPacket();
+
+ virtual void reset();
+
+ void setOperationCode(MtpOperationCode code);
+ void setTransactionID(MtpTransactionID id);
+
+ inline uint8_t getUInt8() { return (uint8_t)mBuffer[mOffset++]; }
+ inline int8_t getInt8() { return (int8_t)mBuffer[mOffset++]; }
+ uint16_t getUInt16();
+ inline int16_t getInt16() { return (int16_t)getUInt16(); }
+ uint32_t getUInt32();
+ inline int32_t getInt32() { return (int32_t)getUInt32(); }
+ uint64_t getUInt64();
+ inline int64_t getInt64() { return (int64_t)getUInt64(); }
+ void getUInt128(uint128_t& value);
+ inline void getInt128(int128_t& value) { getUInt128((uint128_t&)value); }
+ void getString(MtpStringBuffer& string);
+
+ Int8List* getAInt8();
+ UInt8List* getAUInt8();
+ Int16List* getAInt16();
+ UInt16List* getAUInt16();
+ Int32List* getAInt32();
+ UInt32List* getAUInt32();
+ Int64List* getAInt64();
+ UInt64List* getAUInt64();
+
+ void putInt8(int8_t value);
+ void putUInt8(uint8_t value);
+ void putInt16(int16_t value);
+ void putUInt16(uint16_t value);
+ void putInt32(int32_t value);
+ void putUInt32(uint32_t value);
+ void putInt64(int64_t value);
+ void putUInt64(uint64_t value);
+ void putInt128(const int128_t& value);
+ void putUInt128(const uint128_t& value);
+ void putInt128(int64_t value);
+ void putUInt128(uint64_t value);
+
+ void putAInt8(const int8_t* values, int count);
+ void putAUInt8(const uint8_t* values, int count);
+ void putAInt16(const int16_t* values, int count);
+ void putAUInt16(const uint16_t* values, int count);
+ void putAUInt16(const UInt16List* values);
+ void putAInt32(const int32_t* values, int count);
+ void putAUInt32(const uint32_t* values, int count);
+ void putAUInt32(const UInt32List* list);
+ void putAInt64(const int64_t* values, int count);
+ void putAUInt64(const uint64_t* values, int count);
+ void putString(const MtpStringBuffer& string);
+ void putString(const char* string);
+ void putString(const uint16_t* string);
+ inline void putEmptyString() { putUInt8(0); }
+ inline void putEmptyArray() { putUInt32(0); }
+
+
+#ifdef MTP_DEVICE
+ // fill our buffer with data from the given file descriptor
+ int read(int fd);
+ int readDataHeader(int fd);
+
+ // write our data to the given file descriptor
+ int write(int fd);
+ int writeDataHeader(int fd, uint32_t length);
+#endif
+
+#ifdef MTP_HOST
+ int read(struct usb_endpoint *ep);
+ int readData(struct usb_endpoint *ep, void* buffer, int length);
+ int readDataAsync(struct usb_endpoint *ep, void* buffer, int length);
+ int readDataWait(struct usb_endpoint *ep);
+ int readDataHeader(struct usb_endpoint *ep);
+
+ int writeDataHeader(struct usb_endpoint *ep, uint32_t length);
+ int write(struct usb_endpoint *ep);
+ int write(struct usb_endpoint *ep, void* buffer, uint32_t length);
+#endif
+
+ inline bool hasData() const { return mPacketSize > MTP_CONTAINER_HEADER_SIZE; }
+ inline uint32_t getContainerLength() const { return MtpPacket::getUInt32(MTP_CONTAINER_LENGTH_OFFSET); }
+ void* getData(int& outLength) const;
+};
+
+}; // namespace android
+
+#endif // _MTP_DATA_PACKET_H
diff --git a/media/mtp/MtpDatabase.h b/media/mtp/MtpDatabase.h
new file mode 100644
index 0000000..900b517
--- /dev/null
+++ b/media/mtp/MtpDatabase.h
@@ -0,0 +1,111 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef _MTP_DATABASE_H
+#define _MTP_DATABASE_H
+
+#include "MtpTypes.h"
+
+namespace android {
+
+class MtpDataPacket;
+class MtpProperty;
+
+class MtpDatabase {
+public:
+ virtual ~MtpDatabase() {}
+
+ // called from SendObjectInfo to reserve a database entry for the incoming file
+ virtual MtpObjectHandle beginSendObject(const char* path,
+ MtpObjectFormat format,
+ MtpObjectHandle parent,
+ MtpStorageID storage,
+ uint64_t size,
+ time_t modified) = 0;
+
+ // called to report success or failure of the SendObject file transfer
+ // success should signal a notification of the new object's creation,
+ // failure should remove the database entry created in beginSendObject
+ virtual void endSendObject(const char* path,
+ MtpObjectHandle handle,
+ MtpObjectFormat format,
+ int64_t size,
+ bool succeeded) = 0;
+
+ virtual MtpObjectHandleList* getObjectList(MtpStorageID storageID,
+ MtpObjectFormat format,
+ MtpObjectHandle parent) = 0;
+
+ virtual int getNumObjects(MtpStorageID storageID,
+ MtpObjectFormat format,
+ MtpObjectHandle parent) = 0;
+
+ // callee should delete[] the results from these
+ // results can be NULL
+ virtual MtpObjectFormatList* getSupportedPlaybackFormats() = 0;
+ virtual MtpObjectFormatList* getSupportedCaptureFormats() = 0;
+ virtual MtpObjectPropertyList* getSupportedObjectProperties(MtpObjectFormat format) = 0;
+ virtual MtpDevicePropertyList* getSupportedDeviceProperties() = 0;
+
+ virtual MtpResponseCode getObjectPropertyValue(MtpObjectHandle handle,
+ MtpObjectProperty property,
+ MtpDataPacket& packet) = 0;
+
+ virtual MtpResponseCode setObjectPropertyValue(MtpObjectHandle handle,
+ MtpObjectProperty property,
+ MtpDataPacket& packet) = 0;
+
+ virtual MtpResponseCode getDevicePropertyValue(MtpDeviceProperty property,
+ MtpDataPacket& packet) = 0;
+
+ virtual MtpResponseCode setDevicePropertyValue(MtpDeviceProperty property,
+ MtpDataPacket& packet) = 0;
+
+ virtual MtpResponseCode resetDeviceProperty(MtpDeviceProperty property) = 0;
+
+ virtual MtpResponseCode getObjectPropertyList(MtpObjectHandle handle,
+ MtpObjectFormat format,
+ MtpObjectProperty property,
+ int groupCode, int depth,
+ MtpDataPacket& packet) = 0;
+
+ virtual MtpResponseCode getObjectInfo(MtpObjectHandle handle,
+ MtpDataPacket& packet) = 0;
+
+ virtual MtpResponseCode getObjectFilePath(MtpObjectHandle handle,
+ MtpString& filePath,
+ int64_t& fileLength) = 0;
+
+ virtual MtpResponseCode deleteFile(MtpObjectHandle handle) = 0;
+
+ virtual MtpObjectHandleList* getObjectReferences(MtpObjectHandle handle) = 0;
+
+ virtual MtpResponseCode setObjectReferences(MtpObjectHandle handle,
+ MtpObjectHandleList* references) = 0;
+
+ virtual MtpProperty* getObjectPropertyDesc(MtpObjectProperty property,
+ MtpObjectFormat format) = 0;
+
+ virtual MtpProperty* getDevicePropertyDesc(MtpDeviceProperty property) = 0;
+
+ virtual void sessionStarted() = 0;
+
+ virtual void sessionEnded() = 0;
+};
+
+}; // namespace android
+
+#endif // _MTP_DATABASE_H
diff --git a/media/mtp/MtpDebug.cpp b/media/mtp/MtpDebug.cpp
new file mode 100644
index 0000000..1668ecf
--- /dev/null
+++ b/media/mtp/MtpDebug.cpp
@@ -0,0 +1,396 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "MtpDebug.h"
+
+namespace android {
+
+struct CodeEntry {
+ const char* name;
+ uint16_t code;
+};
+
+static const CodeEntry sOperationCodes[] = {
+ { "MTP_OPERATION_GET_DEVICE_INFO", 0x1001 },
+ { "MTP_OPERATION_OPEN_SESSION", 0x1002 },
+ { "MTP_OPERATION_CLOSE_SESSION", 0x1003 },
+ { "MTP_OPERATION_GET_STORAGE_IDS", 0x1004 },
+ { "MTP_OPERATION_GET_STORAGE_INFO", 0x1005 },
+ { "MTP_OPERATION_GET_NUM_OBJECTS", 0x1006 },
+ { "MTP_OPERATION_GET_OBJECT_HANDLES", 0x1007 },
+ { "MTP_OPERATION_GET_OBJECT_INFO", 0x1008 },
+ { "MTP_OPERATION_GET_OBJECT", 0x1009 },
+ { "MTP_OPERATION_GET_THUMB", 0x100A },
+ { "MTP_OPERATION_DELETE_OBJECT", 0x100B },
+ { "MTP_OPERATION_SEND_OBJECT_INFO", 0x100C },
+ { "MTP_OPERATION_SEND_OBJECT", 0x100D },
+ { "MTP_OPERATION_INITIATE_CAPTURE", 0x100E },
+ { "MTP_OPERATION_FORMAT_STORE", 0x100F },
+ { "MTP_OPERATION_RESET_DEVICE", 0x1010 },
+ { "MTP_OPERATION_SELF_TEST", 0x1011 },
+ { "MTP_OPERATION_SET_OBJECT_PROTECTION", 0x1012 },
+ { "MTP_OPERATION_POWER_DOWN", 0x1013 },
+ { "MTP_OPERATION_GET_DEVICE_PROP_DESC", 0x1014 },
+ { "MTP_OPERATION_GET_DEVICE_PROP_VALUE", 0x1015 },
+ { "MTP_OPERATION_SET_DEVICE_PROP_VALUE", 0x1016 },
+ { "MTP_OPERATION_RESET_DEVICE_PROP_VALUE", 0x1017 },
+ { "MTP_OPERATION_TERMINATE_OPEN_CAPTURE", 0x1018 },
+ { "MTP_OPERATION_MOVE_OBJECT", 0x1019 },
+ { "MTP_OPERATION_COPY_OBJECT", 0x101A },
+ { "MTP_OPERATION_GET_PARTIAL_OBJECT", 0x101B },
+ { "MTP_OPERATION_INITIATE_OPEN_CAPTURE", 0x101C },
+ { "MTP_OPERATION_GET_OBJECT_PROPS_SUPPORTED", 0x9801 },
+ { "MTP_OPERATION_GET_OBJECT_PROP_DESC", 0x9802 },
+ { "MTP_OPERATION_GET_OBJECT_PROP_VALUE", 0x9803 },
+ { "MTP_OPERATION_SET_OBJECT_PROP_VALUE", 0x9804 },
+ { "MTP_OPERATION_GET_OBJECT_PROP_LIST", 0x9805 },
+ { "MTP_OPERATION_SET_OBJECT_PROP_LIST", 0x9806 },
+ { "MTP_OPERATION_GET_INTERDEPENDENT_PROP_DESC", 0x9807 },
+ { "MTP_OPERATION_SEND_OBJECT_PROP_LIST", 0x9808 },
+ { "MTP_OPERATION_GET_OBJECT_REFERENCES", 0x9810 },
+ { "MTP_OPERATION_SET_OBJECT_REFERENCES", 0x9811 },
+ { "MTP_OPERATION_SKIP", 0x9820 },
+ { 0, 0 },
+};
+
+static const CodeEntry sFormatCodes[] = {
+ { "MTP_FORMAT_UNDEFINED", 0x3000 },
+ { "MTP_FORMAT_ASSOCIATION", 0x3001 },
+ { "MTP_FORMAT_SCRIPT", 0x3002 },
+ { "MTP_FORMAT_EXECUTABLE", 0x3003 },
+ { "MTP_FORMAT_TEXT", 0x3004 },
+ { "MTP_FORMAT_HTML", 0x3005 },
+ { "MTP_FORMAT_DPOF", 0x3006 },
+ { "MTP_FORMAT_AIFF", 0x3007 },
+ { "MTP_FORMAT_WAV", 0x3008 },
+ { "MTP_FORMAT_MP3", 0x3009 },
+ { "MTP_FORMAT_AVI", 0x300A },
+ { "MTP_FORMAT_MPEG", 0x300B },
+ { "MTP_FORMAT_ASF", 0x300C },
+ { "MTP_FORMAT_DEFINED", 0x3800 },
+ { "MTP_FORMAT_EXIF_JPEG", 0x3801 },
+ { "MTP_FORMAT_TIFF_EP", 0x3802 },
+ { "MTP_FORMAT_FLASHPIX", 0x3803 },
+ { "MTP_FORMAT_BMP", 0x3804 },
+ { "MTP_FORMAT_CIFF", 0x3805 },
+ { "MTP_FORMAT_GIF", 0x3807 },
+ { "MTP_FORMAT_JFIF", 0x3808 },
+ { "MTP_FORMAT_CD", 0x3809 },
+ { "MTP_FORMAT_PICT", 0x380A },
+ { "MTP_FORMAT_PNG", 0x380B },
+ { "MTP_FORMAT_TIFF", 0x380D },
+ { "MTP_FORMAT_TIFF_IT", 0x380E },
+ { "MTP_FORMAT_JP2", 0x380F },
+ { "MTP_FORMAT_JPX", 0x3810 },
+ { "MTP_FORMAT_UNDEFINED_FIRMWARE", 0xB802 },
+ { "MTP_FORMAT_WINDOWS_IMAGE_FORMAT", 0xB881 },
+ { "MTP_FORMAT_UNDEFINED_AUDIO", 0xB900 },
+ { "MTP_FORMAT_WMA", 0xB901 },
+ { "MTP_FORMAT_OGG", 0xB902 },
+ { "MTP_FORMAT_AAC", 0xB903 },
+ { "MTP_FORMAT_AUDIBLE", 0xB904 },
+ { "MTP_FORMAT_FLAC", 0xB906 },
+ { "MTP_FORMAT_UNDEFINED_VIDEO", 0xB980 },
+ { "MTP_FORMAT_WMV", 0xB981 },
+ { "MTP_FORMAT_MP4_CONTAINER", 0xB982 },
+ { "MTP_FORMAT_MP2", 0xB983 },
+ { "MTP_FORMAT_3GP_CONTAINER", 0xB984 },
+ { "MTP_FORMAT_UNDEFINED_COLLECTION", 0xBA00 },
+ { "MTP_FORMAT_ABSTRACT_MULTIMEDIA_ALBUM", 0xBA01 },
+ { "MTP_FORMAT_ABSTRACT_IMAGE_ALBUM", 0xBA02 },
+ { "MTP_FORMAT_ABSTRACT_AUDIO_ALBUM", 0xBA03 },
+ { "MTP_FORMAT_ABSTRACT_VIDEO_ALBUM", 0xBA04 },
+ { "MTP_FORMAT_ABSTRACT_AV_PLAYLIST", 0xBA05 },
+ { "MTP_FORMAT_ABSTRACT_CONTACT_GROUP", 0xBA06 },
+ { "MTP_FORMAT_ABSTRACT_MESSAGE_FOLDER", 0xBA07 },
+ { "MTP_FORMAT_ABSTRACT_CHAPTERED_PRODUCTION", 0xBA08 },
+ { "MTP_FORMAT_ABSTRACT_AUDIO_PLAYLIST", 0xBA09 },
+ { "MTP_FORMAT_ABSTRACT_VIDEO_PLAYLIST", 0xBA0A },
+ { "MTP_FORMAT_ABSTRACT_MEDIACAST", 0xBA0B },
+ { "MTP_FORMAT_WPL_PLAYLIST", 0xBA10 },
+ { "MTP_FORMAT_M3U_PLAYLIST", 0xBA11 },
+ { "MTP_FORMAT_MPL_PLAYLIST", 0xBA12 },
+ { "MTP_FORMAT_ASX_PLAYLIST", 0xBA13 },
+ { "MTP_FORMAT_PLS_PLAYLIST", 0xBA14 },
+ { "MTP_FORMAT_UNDEFINED_DOCUMENT", 0xBA80 },
+ { "MTP_FORMAT_ABSTRACT_DOCUMENT", 0xBA81 },
+ { "MTP_FORMAT_XML_DOCUMENT", 0xBA82 },
+ { "MTP_FORMAT_MS_WORD_DOCUMENT", 0xBA83 },
+ { "MTP_FORMAT_MHT_COMPILED_HTML_DOCUMENT", 0xBA84 },
+ { "MTP_FORMAT_MS_EXCEL_SPREADSHEET", 0xBA85 },
+ { "MTP_FORMAT_MS_POWERPOINT_PRESENTATION", 0xBA86 },
+ { "MTP_FORMAT_UNDEFINED_MESSAGE", 0xBB00 },
+ { "MTP_FORMAT_ABSTRACT_MESSSAGE", 0xBB01 },
+ { "MTP_FORMAT_UNDEFINED_CONTACT", 0xBB80 },
+ { "MTP_FORMAT_ABSTRACT_CONTACT", 0xBB81 },
+ { "MTP_FORMAT_VCARD_2", 0xBB82 },
+ { 0, 0 },
+};
+
+static const CodeEntry sObjectPropCodes[] = {
+ { "MTP_PROPERTY_STORAGE_ID", 0xDC01 },
+ { "MTP_PROPERTY_OBJECT_FORMAT", 0xDC02 },
+ { "MTP_PROPERTY_PROTECTION_STATUS", 0xDC03 },
+ { "MTP_PROPERTY_OBJECT_SIZE", 0xDC04 },
+ { "MTP_PROPERTY_ASSOCIATION_TYPE", 0xDC05 },
+ { "MTP_PROPERTY_ASSOCIATION_DESC", 0xDC06 },
+ { "MTP_PROPERTY_OBJECT_FILE_NAME", 0xDC07 },
+ { "MTP_PROPERTY_DATE_CREATED", 0xDC08 },
+ { "MTP_PROPERTY_DATE_MODIFIED", 0xDC09 },
+ { "MTP_PROPERTY_KEYWORDS", 0xDC0A },
+ { "MTP_PROPERTY_PARENT_OBJECT", 0xDC0B },
+ { "MTP_PROPERTY_ALLOWED_FOLDER_CONTENTS", 0xDC0C },
+ { "MTP_PROPERTY_HIDDEN", 0xDC0D },
+ { "MTP_PROPERTY_SYSTEM_OBJECT", 0xDC0E },
+ { "MTP_PROPERTY_PERSISTENT_UID", 0xDC41 },
+ { "MTP_PROPERTY_SYNC_ID", 0xDC42 },
+ { "MTP_PROPERTY_PROPERTY_BAG", 0xDC43 },
+ { "MTP_PROPERTY_NAME", 0xDC44 },
+ { "MTP_PROPERTY_CREATED_BY", 0xDC45 },
+ { "MTP_PROPERTY_ARTIST", 0xDC46 },
+ { "MTP_PROPERTY_DATE_AUTHORED", 0xDC47 },
+ { "MTP_PROPERTY_DESCRIPTION", 0xDC48 },
+ { "MTP_PROPERTY_URL_REFERENCE", 0xDC49 },
+ { "MTP_PROPERTY_LANGUAGE_LOCALE", 0xDC4A },
+ { "MTP_PROPERTY_COPYRIGHT_INFORMATION", 0xDC4B },
+ { "MTP_PROPERTY_SOURCE", 0xDC4C },
+ { "MTP_PROPERTY_ORIGIN_LOCATION", 0xDC4D },
+ { "MTP_PROPERTY_DATE_ADDED", 0xDC4E },
+ { "MTP_PROPERTY_NON_CONSUMABLE", 0xDC4F },
+ { "MTP_PROPERTY_CORRUPT_UNPLAYABLE", 0xDC50 },
+ { "MTP_PROPERTY_PRODUCER_SERIAL_NUMBER", 0xDC51 },
+ { "MTP_PROPERTY_REPRESENTATIVE_SAMPLE_FORMAT", 0xDC81 },
+ { "MTP_PROPERTY_REPRESENTATIVE_SAMPLE_SIZE", 0xDC82 },
+ { "MTP_PROPERTY_REPRESENTATIVE_SAMPLE_HEIGHT", 0xDC83 },
+ { "MTP_PROPERTY_REPRESENTATIVE_SAMPLE_WIDTH", 0xDC84 },
+ { "MTP_PROPERTY_REPRESENTATIVE_SAMPLE_DURATION", 0xDC85 },
+ { "MTP_PROPERTY_REPRESENTATIVE_SAMPLE_DATA", 0xDC86 },
+ { "MTP_PROPERTY_WIDTH", 0xDC87 },
+ { "MTP_PROPERTY_HEIGHT", 0xDC88 },
+ { "MTP_PROPERTY_DURATION", 0xDC89 },
+ { "MTP_PROPERTY_RATING", 0xDC8A },
+ { "MTP_PROPERTY_TRACK", 0xDC8B },
+ { "MTP_PROPERTY_GENRE", 0xDC8C },
+ { "MTP_PROPERTY_CREDITS", 0xDC8D },
+ { "MTP_PROPERTY_LYRICS", 0xDC8E },
+ { "MTP_PROPERTY_SUBSCRIPTION_CONTENT_ID", 0xDC8F },
+ { "MTP_PROPERTY_PRODUCED_BY", 0xDC90 },
+ { "MTP_PROPERTY_USE_COUNT", 0xDC91 },
+ { "MTP_PROPERTY_SKIP_COUNT", 0xDC92 },
+ { "MTP_PROPERTY_LAST_ACCESSED", 0xDC93 },
+ { "MTP_PROPERTY_PARENTAL_RATING", 0xDC94 },
+ { "MTP_PROPERTY_META_GENRE", 0xDC95 },
+ { "MTP_PROPERTY_COMPOSER", 0xDC96 },
+ { "MTP_PROPERTY_EFFECTIVE_RATING", 0xDC97 },
+ { "MTP_PROPERTY_SUBTITLE", 0xDC98 },
+ { "MTP_PROPERTY_ORIGINAL_RELEASE_DATE", 0xDC99 },
+ { "MTP_PROPERTY_ALBUM_NAME", 0xDC9A },
+ { "MTP_PROPERTY_ALBUM_ARTIST", 0xDC9B },
+ { "MTP_PROPERTY_MOOD", 0xDC9C },
+ { "MTP_PROPERTY_DRM_STATUS", 0xDC9D },
+ { "MTP_PROPERTY_SUB_DESCRIPTION", 0xDC9E },
+ { "MTP_PROPERTY_IS_CROPPED", 0xDCD1 },
+ { "MTP_PROPERTY_IS_COLOUR_CORRECTED", 0xDCD2 },
+ { "MTP_PROPERTY_IMAGE_BIT_DEPTH", 0xDCD3 },
+ { "MTP_PROPERTY_F_NUMBER", 0xDCD4 },
+ { "MTP_PROPERTY_EXPOSURE_TIME", 0xDCD5 },
+ { "MTP_PROPERTY_EXPOSURE_INDEX", 0xDCD6 },
+ { "MTP_PROPERTY_TOTAL_BITRATE", 0xDE91 },
+ { "MTP_PROPERTY_BITRATE_TYPE", 0xDE92 },
+ { "MTP_PROPERTY_SAMPLE_RATE", 0xDE93 },
+ { "MTP_PROPERTY_NUMBER_OF_CHANNELS", 0xDE94 },
+ { "MTP_PROPERTY_AUDIO_BIT_DEPTH", 0xDE95 },
+ { "MTP_PROPERTY_SCAN_TYPE", 0xDE97 },
+ { "MTP_PROPERTY_AUDIO_WAVE_CODEC", 0xDE99 },
+ { "MTP_PROPERTY_AUDIO_BITRATE", 0xDE9A },
+ { "MTP_PROPERTY_VIDEO_FOURCC_CODEC", 0xDE9B },
+ { "MTP_PROPERTY_VIDEO_BITRATE", 0xDE9C },
+ { "MTP_PROPERTY_FRAMES_PER_THOUSAND_SECONDS", 0xDE9D },
+ { "MTP_PROPERTY_KEYFRAME_DISTANCE", 0xDE9E },
+ { "MTP_PROPERTY_BUFFER_SIZE", 0xDE9F },
+ { "MTP_PROPERTY_ENCODING_QUALITY", 0xDEA0 },
+ { "MTP_PROPERTY_ENCODING_PROFILE", 0xDEA1 },
+ { "MTP_PROPERTY_DISPLAY_NAME", 0xDCE0 },
+ { "MTP_PROPERTY_BODY_TEXT", 0xDCE1 },
+ { "MTP_PROPERTY_SUBJECT", 0xDCE2 },
+ { "MTP_PROPERTY_PRIORITY", 0xDCE3 },
+ { "MTP_PROPERTY_GIVEN_NAME", 0xDD00 },
+ { "MTP_PROPERTY_MIDDLE_NAMES", 0xDD01 },
+ { "MTP_PROPERTY_FAMILY_NAME", 0xDD02 },
+ { "MTP_PROPERTY_PREFIX", 0xDD03 },
+ { "MTP_PROPERTY_SUFFIX", 0xDD04 },
+ { "MTP_PROPERTY_PHONETIC_GIVEN_NAME", 0xDD05 },
+ { "MTP_PROPERTY_PHONETIC_FAMILY_NAME", 0xDD06 },
+ { "MTP_PROPERTY_EMAIL_PRIMARY", 0xDD07 },
+ { "MTP_PROPERTY_EMAIL_PERSONAL_1", 0xDD08 },
+ { "MTP_PROPERTY_EMAIL_PERSONAL_2", 0xDD09 },
+ { "MTP_PROPERTY_EMAIL_BUSINESS_1", 0xDD0A },
+ { "MTP_PROPERTY_EMAIL_BUSINESS_2", 0xDD0B },
+ { "MTP_PROPERTY_EMAIL_OTHERS", 0xDD0C },
+ { "MTP_PROPERTY_PHONE_NUMBER_PRIMARY", 0xDD0D },
+ { "MTP_PROPERTY_PHONE_NUMBER_PERSONAL", 0xDD0E },
+ { "MTP_PROPERTY_PHONE_NUMBER_PERSONAL_2", 0xDD0F },
+ { "MTP_PROPERTY_PHONE_NUMBER_BUSINESS", 0xDD10 },
+ { "MTP_PROPERTY_PHONE_NUMBER_BUSINESS_2", 0xDD11 },
+ { "MTP_PROPERTY_PHONE_NUMBER_MOBILE", 0xDD12 },
+ { "MTP_PROPERTY_PHONE_NUMBER_MOBILE_2", 0xDD13 },
+ { "MTP_PROPERTY_FAX_NUMBER_PRIMARY", 0xDD14 },
+ { "MTP_PROPERTY_FAX_NUMBER_PERSONAL", 0xDD15 },
+ { "MTP_PROPERTY_FAX_NUMBER_BUSINESS", 0xDD16 },
+ { "MTP_PROPERTY_PAGER_NUMBER", 0xDD17 },
+ { "MTP_PROPERTY_PHONE_NUMBER_OTHERS", 0xDD18 },
+ { "MTP_PROPERTY_PRIMARY_WEB_ADDRESS", 0xDD19 },
+ { "MTP_PROPERTY_PERSONAL_WEB_ADDRESS", 0xDD1A },
+ { "MTP_PROPERTY_BUSINESS_WEB_ADDRESS", 0xDD1B },
+ { "MTP_PROPERTY_INSTANT_MESSANGER_ADDRESS", 0xDD1C },
+ { "MTP_PROPERTY_INSTANT_MESSANGER_ADDRESS_2", 0xDD1D },
+ { "MTP_PROPERTY_INSTANT_MESSANGER_ADDRESS_3", 0xDD1E },
+ { "MTP_PROPERTY_POSTAL_ADDRESS_PERSONAL_FULL", 0xDD1F },
+ { "MTP_PROPERTY_POSTAL_ADDRESS_PERSONAL_LINE_1", 0xDD20 },
+ { "MTP_PROPERTY_POSTAL_ADDRESS_PERSONAL_LINE_2", 0xDD21 },
+ { "MTP_PROPERTY_POSTAL_ADDRESS_PERSONAL_CITY", 0xDD22 },
+ { "MTP_PROPERTY_POSTAL_ADDRESS_PERSONAL_REGION", 0xDD23 },
+ { "MTP_PROPERTY_POSTAL_ADDRESS_PERSONAL_POSTAL_CODE", 0xDD24 },
+ { "MTP_PROPERTY_POSTAL_ADDRESS_PERSONAL_COUNTRY", 0xDD25 },
+ { "MTP_PROPERTY_POSTAL_ADDRESS_BUSINESS_FULL", 0xDD26 },
+ { "MTP_PROPERTY_POSTAL_ADDRESS_BUSINESS_LINE_1", 0xDD27 },
+ { "MTP_PROPERTY_POSTAL_ADDRESS_BUSINESS_LINE_2", 0xDD28 },
+ { "MTP_PROPERTY_POSTAL_ADDRESS_BUSINESS_CITY", 0xDD29 },
+ { "MTP_PROPERTY_POSTAL_ADDRESS_BUSINESS_REGION", 0xDD2A },
+ { "MTP_PROPERTY_POSTAL_ADDRESS_BUSINESS_POSTAL_CODE", 0xDD2B },
+ { "MTP_PROPERTY_POSTAL_ADDRESS_BUSINESS_COUNTRY", 0xDD2C },
+ { "MTP_PROPERTY_POSTAL_ADDRESS_OTHER_FULL", 0xDD2D },
+ { "MTP_PROPERTY_POSTAL_ADDRESS_OTHER_LINE_1", 0xDD2E },
+ { "MTP_PROPERTY_POSTAL_ADDRESS_OTHER_LINE_2", 0xDD2F },
+ { "MTP_PROPERTY_POSTAL_ADDRESS_OTHER_CITY", 0xDD30 },
+ { "MTP_PROPERTY_POSTAL_ADDRESS_OTHER_REGION", 0xDD31 },
+ { "MTP_PROPERTY_POSTAL_ADDRESS_OTHER_POSTAL_CODE", 0xDD32 },
+ { "MTP_PROPERTY_POSTAL_ADDRESS_OTHER_COUNTRY", 0xDD33 },
+ { "MTP_PROPERTY_ORGANIZATION_NAME", 0xDD34 },
+ { "MTP_PROPERTY_PHONETIC_ORGANIZATION_NAME", 0xDD35 },
+ { "MTP_PROPERTY_ROLE", 0xDD36 },
+ { "MTP_PROPERTY_BIRTHDATE", 0xDD37 },
+ { "MTP_PROPERTY_MESSAGE_TO", 0xDD40 },
+ { "MTP_PROPERTY_MESSAGE_CC", 0xDD41 },
+ { "MTP_PROPERTY_MESSAGE_BCC", 0xDD42 },
+ { "MTP_PROPERTY_MESSAGE_READ", 0xDD43 },
+ { "MTP_PROPERTY_MESSAGE_RECEIVED_TIME", 0xDD44 },
+ { "MTP_PROPERTY_MESSAGE_SENDER", 0xDD45 },
+ { "MTP_PROPERTY_ACTIVITY_BEGIN_TIME", 0xDD50 },
+ { "MTP_PROPERTY_ACTIVITY_END_TIME", 0xDD51 },
+ { "MTP_PROPERTY_ACTIVITY_LOCATION", 0xDD52 },
+ { "MTP_PROPERTY_ACTIVITY_REQUIRED_ATTENDEES", 0xDD54 },
+ { "MTP_PROPERTY_ACTIVITY_OPTIONAL_ATTENDEES", 0xDD55 },
+ { "MTP_PROPERTY_ACTIVITY_RESOURCES", 0xDD56 },
+ { "MTP_PROPERTY_ACTIVITY_ACCEPTED", 0xDD57 },
+ { "MTP_PROPERTY_ACTIVITY_TENTATIVE", 0xDD58 },
+ { "MTP_PROPERTY_ACTIVITY_DECLINED", 0xDD59 },
+ { "MTP_PROPERTY_ACTIVITY_REMAINDER_TIME", 0xDD5A },
+ { "MTP_PROPERTY_ACTIVITY_OWNER", 0xDD5B },
+ { "MTP_PROPERTY_ACTIVITY_STATUS", 0xDD5C },
+ { "MTP_PROPERTY_OWNER", 0xDD5D },
+ { "MTP_PROPERTY_EDITOR", 0xDD5E },
+ { "MTP_PROPERTY_WEBMASTER", 0xDD5F },
+ { "MTP_PROPERTY_URL_SOURCE", 0xDD60 },
+ { "MTP_PROPERTY_URL_DESTINATION", 0xDD61 },
+ { "MTP_PROPERTY_TIME_BOOKMARK", 0xDD62 },
+ { "MTP_PROPERTY_OBJECT_BOOKMARK", 0xDD63 },
+ { "MTP_PROPERTY_BYTE_BOOKMARK", 0xDD64 },
+ { "MTP_PROPERTY_LAST_BUILD_DATE", 0xDD70 },
+ { "MTP_PROPERTY_TIME_TO_LIVE", 0xDD71 },
+ { "MTP_PROPERTY_MEDIA_GUID", 0xDD72 },
+ { 0, 0 },
+};
+
+static const CodeEntry sDevicePropCodes[] = {
+ { "MTP_DEVICE_PROPERTY_UNDEFINED", 0x5000 },
+ { "MTP_DEVICE_PROPERTY_BATTERY_LEVEL", 0x5001 },
+ { "MTP_DEVICE_PROPERTY_FUNCTIONAL_MODE", 0x5002 },
+ { "MTP_DEVICE_PROPERTY_IMAGE_SIZE", 0x5003 },
+ { "MTP_DEVICE_PROPERTY_COMPRESSION_SETTING", 0x5004 },
+ { "MTP_DEVICE_PROPERTY_WHITE_BALANCE", 0x5005 },
+ { "MTP_DEVICE_PROPERTY_RGB_GAIN", 0x5006 },
+ { "MTP_DEVICE_PROPERTY_F_NUMBER", 0x5007 },
+ { "MTP_DEVICE_PROPERTY_FOCAL_LENGTH", 0x5008 },
+ { "MTP_DEVICE_PROPERTY_FOCUS_DISTANCE", 0x5009 },
+ { "MTP_DEVICE_PROPERTY_FOCUS_MODE", 0x500A },
+ { "MTP_DEVICE_PROPERTY_EXPOSURE_METERING_MODE", 0x500B },
+ { "MTP_DEVICE_PROPERTY_FLASH_MODE", 0x500C },
+ { "MTP_DEVICE_PROPERTY_EXPOSURE_TIME", 0x500D },
+ { "MTP_DEVICE_PROPERTY_EXPOSURE_PROGRAM_MODE", 0x500E },
+ { "MTP_DEVICE_PROPERTY_EXPOSURE_INDEX", 0x500F },
+ { "MTP_DEVICE_PROPERTY_EXPOSURE_BIAS_COMPENSATION", 0x5010 },
+ { "MTP_DEVICE_PROPERTY_DATETIME", 0x5011 },
+ { "MTP_DEVICE_PROPERTY_CAPTURE_DELAY", 0x5012 },
+ { "MTP_DEVICE_PROPERTY_STILL_CAPTURE_MODE", 0x5013 },
+ { "MTP_DEVICE_PROPERTY_CONTRAST", 0x5014 },
+ { "MTP_DEVICE_PROPERTY_SHARPNESS", 0x5015 },
+ { "MTP_DEVICE_PROPERTY_DIGITAL_ZOOM", 0x5016 },
+ { "MTP_DEVICE_PROPERTY_EFFECT_MODE", 0x5017 },
+ { "MTP_DEVICE_PROPERTY_BURST_NUMBER", 0x5018 },
+ { "MTP_DEVICE_PROPERTY_BURST_INTERVAL", 0x5019 },
+ { "MTP_DEVICE_PROPERTY_TIMELAPSE_NUMBER", 0x501A },
+ { "MTP_DEVICE_PROPERTY_TIMELAPSE_INTERVAL", 0x501B },
+ { "MTP_DEVICE_PROPERTY_FOCUS_METERING_MODE", 0x501C },
+ { "MTP_DEVICE_PROPERTY_UPLOAD_URL", 0x501D },
+ { "MTP_DEVICE_PROPERTY_ARTIST", 0x501E },
+ { "MTP_DEVICE_PROPERTY_COPYRIGHT_INFO", 0x501F },
+ { "MTP_DEVICE_PROPERTY_SYNCHRONIZATION_PARTNER", 0xD401 },
+ { "MTP_DEVICE_PROPERTY_DEVICE_FRIENDLY_NAME", 0xD402 },
+ { "MTP_DEVICE_PROPERTY_VOLUME", 0xD403 },
+ { "MTP_DEVICE_PROPERTY_SUPPORTED_FORMATS_ORDERED", 0xD404 },
+ { "MTP_DEVICE_PROPERTY_DEVICE_ICON", 0xD405 },
+ { "MTP_DEVICE_PROPERTY_PLAYBACK_RATE", 0xD410 },
+ { "MTP_DEVICE_PROPERTY_PLAYBACK_OBJECT", 0xD411 },
+ { "MTP_DEVICE_PROPERTY_PLAYBACK_CONTAINER_INDEX", 0xD412 },
+ { "MTP_DEVICE_PROPERTY_SESSION_INITIATOR_VERSION_INFO", 0xD406 },
+ { "MTP_DEVICE_PROPERTY_PERCEIVED_DEVICE_TYPE", 0xD407 },
+ { 0, 0 },
+};
+
+static const char* getCodeName(uint16_t code, const CodeEntry* table) {
+ const CodeEntry* entry = table;
+ while (entry->name) {
+ if (entry->code == code)
+ return entry->name;
+ entry++;
+ }
+ return "UNKNOWN";
+}
+
+const char* MtpDebug::getOperationCodeName(MtpOperationCode code) {
+ return getCodeName(code, sOperationCodes);
+}
+
+const char* MtpDebug::getFormatCodeName(MtpObjectFormat code) {
+ if (code == 0)
+ return "NONE";
+ return getCodeName(code, sFormatCodes);
+}
+
+const char* MtpDebug::getObjectPropCodeName(MtpPropertyCode code) {
+ if (code == 0)
+ return "NONE";
+ return getCodeName(code, sObjectPropCodes);
+}
+
+const char* MtpDebug::getDevicePropCodeName(MtpPropertyCode code) {
+ if (code == 0)
+ return "NONE";
+ return getCodeName(code, sDevicePropCodes);
+}
+
+} // namespace android
diff --git a/media/mtp/MtpDebug.h b/media/mtp/MtpDebug.h
new file mode 100644
index 0000000..5b53e31
--- /dev/null
+++ b/media/mtp/MtpDebug.h
@@ -0,0 +1,37 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef _MTP_DEBUG_H
+#define _MTP_DEBUG_H
+
+// #define LOG_NDEBUG 0
+#include <utils/Log.h>
+
+#include "MtpTypes.h"
+
+namespace android {
+
+class MtpDebug {
+public:
+ static const char* getOperationCodeName(MtpOperationCode code);
+ static const char* getFormatCodeName(MtpObjectFormat code);
+ static const char* getObjectPropCodeName(MtpPropertyCode code);
+ static const char* getDevicePropCodeName(MtpPropertyCode code);
+};
+
+}; // namespace android
+
+#endif // _MTP_DEBUG_H
diff --git a/media/mtp/MtpDevice.cpp b/media/mtp/MtpDevice.cpp
new file mode 100644
index 0000000..416ebfe
--- /dev/null
+++ b/media/mtp/MtpDevice.cpp
@@ -0,0 +1,495 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "MtpDevice"
+
+#include "MtpDebug.h"
+#include "MtpDevice.h"
+#include "MtpDeviceInfo.h"
+#include "MtpObjectInfo.h"
+#include "MtpProperty.h"
+#include "MtpStorageInfo.h"
+#include "MtpStringBuffer.h"
+#include "MtpUtils.h"
+
+#include <stdio.h>
+#include <stdlib.h>
+#include <sys/types.h>
+#include <sys/ioctl.h>
+#include <sys/stat.h>
+#include <fcntl.h>
+#include <errno.h>
+#include <endian.h>
+
+#include <usbhost/usbhost.h>
+
+namespace android {
+
+MtpDevice::MtpDevice(struct usb_device* device, int interface,
+ struct usb_endpoint *ep_in, struct usb_endpoint *ep_out,
+ struct usb_endpoint *ep_intr)
+ : mDevice(device),
+ mInterface(interface),
+ mEndpointIn(ep_in),
+ mEndpointOut(ep_out),
+ mEndpointIntr(ep_intr),
+ mDeviceInfo(NULL),
+ mID(usb_device_get_unique_id(device)),
+ mSessionID(0),
+ mTransactionID(0)
+{
+}
+
+MtpDevice::~MtpDevice() {
+ close();
+ for (int i = 0; i < mDeviceProperties.size(); i++)
+ delete mDeviceProperties[i];
+}
+
+void MtpDevice::initialize() {
+ openSession();
+ mDeviceInfo = getDeviceInfo();
+ if (mDeviceInfo) {
+ mDeviceInfo->print();
+
+ if (mDeviceInfo->mDeviceProperties) {
+ int count = mDeviceInfo->mDeviceProperties->size();
+ for (int i = 0; i < count; i++) {
+ MtpDeviceProperty propCode = (*mDeviceInfo->mDeviceProperties)[i];
+ MtpProperty* property = getDevicePropDesc(propCode);
+ if (property) {
+ property->print();
+ mDeviceProperties.push(property);
+ }
+ }
+ }
+ }
+}
+
+void MtpDevice::close() {
+ if (mDevice) {
+ usb_device_release_interface(mDevice, mInterface);
+ usb_device_close(mDevice);
+ mDevice = NULL;
+ }
+}
+
+const char* MtpDevice::getDeviceName() {
+ if (mDevice)
+ return usb_device_get_name(mDevice);
+ else
+ return "???";
+}
+
+bool MtpDevice::openSession() {
+ Mutex::Autolock autoLock(mMutex);
+
+ mSessionID = 0;
+ mTransactionID = 0;
+ MtpSessionID newSession = 1;
+ mRequest.reset();
+ mRequest.setParameter(1, newSession);
+ if (!sendRequest(MTP_OPERATION_OPEN_SESSION))
+ return false;
+ MtpResponseCode ret = readResponse();
+ if (ret == MTP_RESPONSE_SESSION_ALREADY_OPEN)
+ newSession = mResponse.getParameter(1);
+ else if (ret != MTP_RESPONSE_OK)
+ return false;
+
+ mSessionID = newSession;
+ mTransactionID = 1;
+ return true;
+}
+
+bool MtpDevice::closeSession() {
+ // FIXME
+ return true;
+}
+
+MtpDeviceInfo* MtpDevice::getDeviceInfo() {
+ Mutex::Autolock autoLock(mMutex);
+
+ mRequest.reset();
+ if (!sendRequest(MTP_OPERATION_GET_DEVICE_INFO))
+ return NULL;
+ if (!readData())
+ return NULL;
+ MtpResponseCode ret = readResponse();
+ if (ret == MTP_RESPONSE_OK) {
+ MtpDeviceInfo* info = new MtpDeviceInfo;
+ info->read(mData);
+ return info;
+ }
+ return NULL;
+}
+
+MtpStorageIDList* MtpDevice::getStorageIDs() {
+ Mutex::Autolock autoLock(mMutex);
+
+ mRequest.reset();
+ if (!sendRequest(MTP_OPERATION_GET_STORAGE_IDS))
+ return NULL;
+ if (!readData())
+ return NULL;
+ MtpResponseCode ret = readResponse();
+ if (ret == MTP_RESPONSE_OK) {
+ return mData.getAUInt32();
+ }
+ return NULL;
+}
+
+MtpStorageInfo* MtpDevice::getStorageInfo(MtpStorageID storageID) {
+ Mutex::Autolock autoLock(mMutex);
+
+ mRequest.reset();
+ mRequest.setParameter(1, storageID);
+ if (!sendRequest(MTP_OPERATION_GET_STORAGE_INFO))
+ return NULL;
+ if (!readData())
+ return NULL;
+ MtpResponseCode ret = readResponse();
+ if (ret == MTP_RESPONSE_OK) {
+ MtpStorageInfo* info = new MtpStorageInfo(storageID);
+ info->read(mData);
+ return info;
+ }
+ return NULL;
+}
+
+MtpObjectHandleList* MtpDevice::getObjectHandles(MtpStorageID storageID,
+ MtpObjectFormat format, MtpObjectHandle parent) {
+ Mutex::Autolock autoLock(mMutex);
+
+ mRequest.reset();
+ mRequest.setParameter(1, storageID);
+ mRequest.setParameter(2, format);
+ mRequest.setParameter(3, parent);
+ if (!sendRequest(MTP_OPERATION_GET_OBJECT_HANDLES))
+ return NULL;
+ if (!readData())
+ return NULL;
+ MtpResponseCode ret = readResponse();
+ if (ret == MTP_RESPONSE_OK) {
+ return mData.getAUInt32();
+ }
+ return NULL;
+}
+
+MtpObjectInfo* MtpDevice::getObjectInfo(MtpObjectHandle handle) {
+ Mutex::Autolock autoLock(mMutex);
+
+ // FIXME - we might want to add some caching here
+
+ mRequest.reset();
+ mRequest.setParameter(1, handle);
+ if (!sendRequest(MTP_OPERATION_GET_OBJECT_INFO))
+ return NULL;
+ if (!readData())
+ return NULL;
+ MtpResponseCode ret = readResponse();
+ if (ret == MTP_RESPONSE_OK) {
+ MtpObjectInfo* info = new MtpObjectInfo(handle);
+ info->read(mData);
+ return info;
+ }
+ return NULL;
+}
+
+void* MtpDevice::getThumbnail(MtpObjectHandle handle, int& outLength) {
+ Mutex::Autolock autoLock(mMutex);
+
+ mRequest.reset();
+ mRequest.setParameter(1, handle);
+ if (sendRequest(MTP_OPERATION_GET_THUMB) && readData()) {
+ MtpResponseCode ret = readResponse();
+ if (ret == MTP_RESPONSE_OK) {
+ return mData.getData(outLength);
+ }
+ }
+ outLength = 0;
+ return NULL;
+}
+
+MtpObjectHandle MtpDevice::sendObjectInfo(MtpObjectInfo* info) {
+ Mutex::Autolock autoLock(mMutex);
+
+ mRequest.reset();
+ MtpObjectHandle parent = info->mParent;
+ if (parent == 0)
+ parent = MTP_PARENT_ROOT;
+
+ mRequest.setParameter(1, info->mStorageID);
+ mRequest.setParameter(2, info->mParent);
+
+ mData.putUInt32(info->mStorageID);
+ mData.putUInt16(info->mFormat);
+ mData.putUInt16(info->mProtectionStatus);
+ mData.putUInt32(info->mCompressedSize);
+ mData.putUInt16(info->mThumbFormat);
+ mData.putUInt32(info->mThumbCompressedSize);
+ mData.putUInt32(info->mThumbPixWidth);
+ mData.putUInt32(info->mThumbPixHeight);
+ mData.putUInt32(info->mImagePixWidth);
+ mData.putUInt32(info->mImagePixHeight);
+ mData.putUInt32(info->mImagePixDepth);
+ mData.putUInt32(info->mParent);
+ mData.putUInt16(info->mAssociationType);
+ mData.putUInt32(info->mAssociationDesc);
+ mData.putUInt32(info->mSequenceNumber);
+ mData.putString(info->mName);
+
+ char created[100], modified[100];
+ formatDateTime(info->mDateCreated, created, sizeof(created));
+ formatDateTime(info->mDateModified, modified, sizeof(modified));
+
+ mData.putString(created);
+ mData.putString(modified);
+ if (info->mKeywords)
+ mData.putString(info->mKeywords);
+ else
+ mData.putEmptyString();
+
+ if (sendRequest(MTP_OPERATION_SEND_OBJECT_INFO) && sendData()) {
+ MtpResponseCode ret = readResponse();
+ if (ret == MTP_RESPONSE_OK) {
+ info->mStorageID = mResponse.getParameter(1);
+ info->mParent = mResponse.getParameter(2);
+ info->mHandle = mResponse.getParameter(3);
+ return info->mHandle;
+ }
+ }
+ return (MtpObjectHandle)-1;
+}
+
+bool MtpDevice::sendObject(MtpObjectInfo* info, int srcFD) {
+ Mutex::Autolock autoLock(mMutex);
+
+ int remaining = info->mCompressedSize;
+ mRequest.reset();
+ mRequest.setParameter(1, info->mHandle);
+ if (sendRequest(MTP_OPERATION_SEND_OBJECT)) {
+ // send data header
+ writeDataHeader(MTP_OPERATION_SEND_OBJECT, remaining);
+
+ char buffer[65536];
+ while (remaining > 0) {
+ int count = read(srcFD, buffer, sizeof(buffer));
+ if (count > 0) {
+ int written = mData.write(mEndpointOut, buffer, count);
+ // FIXME check error
+ remaining -= count;
+ } else {
+ break;
+ }
+ }
+ }
+ MtpResponseCode ret = readResponse();
+ return (remaining == 0 && ret == MTP_RESPONSE_OK);
+}
+
+bool MtpDevice::deleteObject(MtpObjectHandle handle) {
+ Mutex::Autolock autoLock(mMutex);
+
+ mRequest.reset();
+ mRequest.setParameter(1, handle);
+ if (sendRequest(MTP_OPERATION_DELETE_OBJECT)) {
+ MtpResponseCode ret = readResponse();
+ if (ret == MTP_RESPONSE_OK)
+ return true;
+ }
+ return false;
+}
+
+MtpObjectHandle MtpDevice::getParent(MtpObjectHandle handle) {
+ MtpObjectInfo* info = getObjectInfo(handle);
+ if (info)
+ return info->mParent;
+ else
+ return -1;
+}
+
+MtpObjectHandle MtpDevice::getStorageID(MtpObjectHandle handle) {
+ MtpObjectInfo* info = getObjectInfo(handle);
+ if (info)
+ return info->mStorageID;
+ else
+ return -1;
+}
+
+MtpProperty* MtpDevice::getDevicePropDesc(MtpDeviceProperty code) {
+ Mutex::Autolock autoLock(mMutex);
+
+ mRequest.reset();
+ mRequest.setParameter(1, code);
+ if (!sendRequest(MTP_OPERATION_GET_DEVICE_PROP_DESC))
+ return NULL;
+ if (!readData())
+ return NULL;
+ MtpResponseCode ret = readResponse();
+ if (ret == MTP_RESPONSE_OK) {
+ MtpProperty* property = new MtpProperty;
+ property->read(mData);
+ return property;
+ }
+ return NULL;
+}
+
+// reads the object's data and writes it to the specified file path
+bool MtpDevice::readObject(MtpObjectHandle handle, const char* destPath, int group, int perm) {
+ LOGD("readObject: %s", destPath);
+ int fd = ::open(destPath, O_RDWR | O_CREAT | O_TRUNC);
+ if (fd < 0) {
+ LOGE("open failed for %s", destPath);
+ return false;
+ }
+
+ fchown(fd, getuid(), group);
+ // set permissions
+ int mask = umask(0);
+ fchmod(fd, perm);
+ umask(mask);
+
+ Mutex::Autolock autoLock(mMutex);
+ bool result = false;
+
+ mRequest.reset();
+ mRequest.setParameter(1, handle);
+ if (sendRequest(MTP_OPERATION_GET_OBJECT)
+ && mData.readDataHeader(mEndpointIn)) {
+ uint32_t length = mData.getContainerLength();
+ if (length < MTP_CONTAINER_HEADER_SIZE)
+ goto fail;
+ length -= MTP_CONTAINER_HEADER_SIZE;
+ uint32_t remaining = length;
+
+ int initialDataLength = 0;
+ void* initialData = mData.getData(initialDataLength);
+ if (initialData) {
+ if (initialDataLength > 0) {
+ if (write(fd, initialData, initialDataLength) != initialDataLength)
+ goto fail;
+ remaining -= initialDataLength;
+ }
+ free(initialData);
+ }
+
+ // USB reads greater than 16K don't work
+ char buffer1[16384], buffer2[16384];
+ char* readBuffer = buffer1;
+ char* writeBuffer = NULL;
+ int writeLength = 0;
+
+ while (remaining > 0 || writeBuffer) {
+ if (remaining > 0) {
+ // queue up a read request
+ int readSize = (remaining > sizeof(buffer1) ? sizeof(buffer1) : remaining);
+ if (mData.readDataAsync(mEndpointIn, readBuffer, readSize)) {
+ LOGE("readDataAsync failed");
+ goto fail;
+ }
+ } else {
+ readBuffer = NULL;
+ }
+
+ if (writeBuffer) {
+ // write previous buffer
+ if (write(fd, writeBuffer, writeLength) != writeLength) {
+ LOGE("write failed");
+ // wait for pending read before failing
+ if (readBuffer)
+ mData.readDataWait(mEndpointIn);
+ goto fail;
+ }
+ writeBuffer = NULL;
+ }
+
+ // wait for read to complete
+ if (readBuffer) {
+ int read = mData.readDataWait(mEndpointIn);
+ if (read < 0)
+ goto fail;
+
+ writeBuffer = readBuffer;
+ writeLength = read;
+ remaining -= read;
+ readBuffer = (readBuffer == buffer1 ? buffer2 : buffer1);
+ }
+ }
+
+ MtpResponseCode response = readResponse();
+ if (response == MTP_RESPONSE_OK)
+ result = true;
+ }
+
+fail:
+ ::close(fd);
+ return result;
+}
+
+bool MtpDevice::sendRequest(MtpOperationCode operation) {
+ LOGV("sendRequest: %s\n", MtpDebug::getOperationCodeName(operation));
+ mRequest.setOperationCode(operation);
+ if (mTransactionID > 0)
+ mRequest.setTransactionID(mTransactionID++);
+ int ret = mRequest.write(mEndpointOut);
+ mRequest.dump();
+ return (ret > 0);
+}
+
+bool MtpDevice::sendData() {
+ LOGV("sendData\n");
+ mData.setOperationCode(mRequest.getOperationCode());
+ mData.setTransactionID(mRequest.getTransactionID());
+ int ret = mData.write(mEndpointOut);
+ mData.dump();
+ return (ret > 0);
+}
+
+bool MtpDevice::readData() {
+ mData.reset();
+ int ret = mData.read(mEndpointIn);
+ LOGV("readData returned %d\n", ret);
+ if (ret >= MTP_CONTAINER_HEADER_SIZE) {
+ mData.dump();
+ return true;
+ }
+ else {
+ LOGV("readResponse failed\n");
+ return false;
+ }
+}
+
+bool MtpDevice::writeDataHeader(MtpOperationCode operation, int dataLength) {
+ mData.setOperationCode(operation);
+ mData.setTransactionID(mRequest.getTransactionID());
+ return (!mData.writeDataHeader(mEndpointOut, dataLength));
+}
+
+MtpResponseCode MtpDevice::readResponse() {
+ LOGV("readResponse\n");
+ int ret = mResponse.read(mEndpointIn);
+ if (ret >= MTP_CONTAINER_HEADER_SIZE) {
+ mResponse.dump();
+ return mResponse.getResponseCode();
+ }
+ else {
+ LOGD("readResponse failed\n");
+ return -1;
+ }
+}
+
+} // namespace android
diff --git a/media/mtp/MtpDevice.h b/media/mtp/MtpDevice.h
new file mode 100644
index 0000000..21c85d5
--- /dev/null
+++ b/media/mtp/MtpDevice.h
@@ -0,0 +1,104 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef _MTP_DEVICE_H
+#define _MTP_DEVICE_H
+
+#include "MtpRequestPacket.h"
+#include "MtpDataPacket.h"
+#include "MtpResponsePacket.h"
+#include "MtpTypes.h"
+
+#include <utils/threads.h>
+
+struct usb_device;
+
+namespace android {
+
+class MtpDeviceInfo;
+class MtpObjectInfo;
+class MtpStorageInfo;
+
+class MtpDevice {
+private:
+ struct usb_device* mDevice;
+ int mInterface;
+ struct usb_endpoint* mEndpointIn;
+ struct usb_endpoint* mEndpointOut;
+ struct usb_endpoint* mEndpointIntr;
+ MtpDeviceInfo* mDeviceInfo;
+ MtpPropertyList mDeviceProperties;
+
+ // a unique ID for the device
+ int mID;
+
+ // current session ID
+ MtpSessionID mSessionID;
+ // current transaction ID
+ MtpTransactionID mTransactionID;
+
+ MtpRequestPacket mRequest;
+ MtpDataPacket mData;
+ MtpResponsePacket mResponse;
+
+ // to ensure only one MTP transaction at a time
+ Mutex mMutex;
+
+public:
+ MtpDevice(struct usb_device* device, int interface,
+ struct usb_endpoint *ep_in, struct usb_endpoint *ep_out,
+ struct usb_endpoint *ep_intr);
+ virtual ~MtpDevice();
+
+ inline int getID() const { return mID; }
+
+ void initialize();
+ void close();
+ const char* getDeviceName();
+
+ bool openSession();
+ bool closeSession();
+
+ MtpDeviceInfo* getDeviceInfo();
+ MtpStorageIDList* getStorageIDs();
+ MtpStorageInfo* getStorageInfo(MtpStorageID storageID);
+ MtpObjectHandleList* getObjectHandles(MtpStorageID storageID, MtpObjectFormat format,
+ MtpObjectHandle parent);
+ MtpObjectInfo* getObjectInfo(MtpObjectHandle handle);
+ void* getThumbnail(MtpObjectHandle handle, int& outLength);
+ MtpObjectHandle sendObjectInfo(MtpObjectInfo* info);
+ bool sendObject(MtpObjectInfo* info, int srcFD);
+ bool deleteObject(MtpObjectHandle handle);
+ MtpObjectHandle getParent(MtpObjectHandle handle);
+ MtpObjectHandle getStorageID(MtpObjectHandle handle);
+
+ MtpProperty* getDevicePropDesc(MtpDeviceProperty code);
+
+ bool readObject(MtpObjectHandle handle, const char* destPath, int group,
+ int perm);
+
+private:
+ bool sendRequest(MtpOperationCode operation);
+ bool sendData();
+ bool readData();
+ bool writeDataHeader(MtpOperationCode operation, int dataLength);
+ MtpResponseCode readResponse();
+
+};
+
+}; // namespace android
+
+#endif // _MTP_DEVICE_H
diff --git a/media/mtp/MtpDeviceInfo.cpp b/media/mtp/MtpDeviceInfo.cpp
new file mode 100644
index 0000000..5a9322e
--- /dev/null
+++ b/media/mtp/MtpDeviceInfo.cpp
@@ -0,0 +1,97 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "MtpDeviceInfo"
+
+#include "MtpDebug.h"
+#include "MtpDataPacket.h"
+#include "MtpDeviceInfo.h"
+#include "MtpStringBuffer.h"
+
+namespace android {
+
+MtpDeviceInfo::MtpDeviceInfo()
+ : mStandardVersion(0),
+ mVendorExtensionID(0),
+ mVendorExtensionVersion(0),
+ mVendorExtensionDesc(NULL),
+ mFunctionalCode(0),
+ mOperations(NULL),
+ mEvents(NULL),
+ mDeviceProperties(NULL),
+ mCaptureFormats(NULL),
+ mPlaybackFormats(NULL),
+ mManufacturer(NULL),
+ mModel(NULL),
+ mVersion(NULL),
+ mSerial(NULL)
+{
+}
+
+MtpDeviceInfo::~MtpDeviceInfo() {
+ if (mVendorExtensionDesc)
+ free(mVendorExtensionDesc);
+ delete mOperations;
+ delete mEvents;
+ delete mDeviceProperties;
+ delete mCaptureFormats;
+ delete mPlaybackFormats;
+ if (mManufacturer)
+ free(mManufacturer);
+ if (mModel)
+ free(mModel);
+ if (mVersion)
+ free(mVersion);
+ if (mSerial)
+ free(mSerial);
+}
+
+void MtpDeviceInfo::read(MtpDataPacket& packet) {
+ MtpStringBuffer string;
+
+ // read the device info
+ mStandardVersion = packet.getUInt16();
+ mVendorExtensionID = packet.getUInt32();
+ mVendorExtensionVersion = packet.getUInt16();
+
+ packet.getString(string);
+ mVendorExtensionDesc = strdup((const char *)string);
+
+ mFunctionalCode = packet.getUInt16();
+ mOperations = packet.getAUInt16();
+ mEvents = packet.getAUInt16();
+ mDeviceProperties = packet.getAUInt16();
+ mCaptureFormats = packet.getAUInt16();
+ mPlaybackFormats = packet.getAUInt16();
+
+ packet.getString(string);
+ mManufacturer = strdup((const char *)string);
+ packet.getString(string);
+ mModel = strdup((const char *)string);
+ packet.getString(string);
+ mVersion = strdup((const char *)string);
+ packet.getString(string);
+ mSerial = strdup((const char *)string);
+}
+
+void MtpDeviceInfo::print() {
+ LOGV("Device Info:\n\tmStandardVersion: %d\n\tmVendorExtensionID: %d\n\tmVendorExtensionVersiony: %d\n",
+ mStandardVersion, mVendorExtensionID, mVendorExtensionVersion);
+ LOGV("\tmVendorExtensionDesc: %s\n\tmFunctionalCode: %d\n\tmManufacturer: %s\n\tmModel: %s\n\tmVersion: %s\n\tmSerial: %s\n",
+ mVendorExtensionDesc, mFunctionalCode, mManufacturer, mModel, mVersion, mSerial);
+}
+
+} // namespace android
diff --git a/media/mtp/MtpDeviceInfo.h b/media/mtp/MtpDeviceInfo.h
new file mode 100644
index 0000000..2abaa10
--- /dev/null
+++ b/media/mtp/MtpDeviceInfo.h
@@ -0,0 +1,54 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef _MTP_DEVICE_INFO_H
+#define _MTP_DEVICE_INFO_H
+
+struct stat;
+
+namespace android {
+
+class MtpDataPacket;
+
+class MtpDeviceInfo {
+public:
+ uint16_t mStandardVersion;
+ uint32_t mVendorExtensionID;
+ uint16_t mVendorExtensionVersion;
+ char* mVendorExtensionDesc;
+ uint16_t mFunctionalCode;
+ UInt16List* mOperations;
+ UInt16List* mEvents;
+ MtpDevicePropertyList* mDeviceProperties;
+ MtpObjectFormatList* mCaptureFormats;
+ MtpObjectFormatList* mPlaybackFormats;
+ char* mManufacturer;
+ char* mModel;
+ char* mVersion;
+ char* mSerial;
+
+public:
+ MtpDeviceInfo();
+ virtual ~MtpDeviceInfo();
+
+ void read(MtpDataPacket& packet);
+
+ void print();
+};
+
+}; // namespace android
+
+#endif // _MTP_DEVICE_INFO_H
diff --git a/media/mtp/MtpEventPacket.cpp b/media/mtp/MtpEventPacket.cpp
new file mode 100644
index 0000000..fc74542
--- /dev/null
+++ b/media/mtp/MtpEventPacket.cpp
@@ -0,0 +1,67 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "MtpEventPacket"
+
+#include <stdio.h>
+#include <sys/types.h>
+#include <fcntl.h>
+#include <sys/ioctl.h>
+
+#ifdef MTP_DEVICE
+#include <linux/usb/f_mtp.h>
+#endif
+
+#include "MtpEventPacket.h"
+
+namespace android {
+
+MtpEventPacket::MtpEventPacket()
+ : MtpPacket(512)
+{
+}
+
+MtpEventPacket::~MtpEventPacket() {
+}
+
+#ifdef MTP_DEVICE
+int MtpEventPacket::write(int fd) {
+ struct mtp_event event;
+
+ putUInt32(MTP_CONTAINER_LENGTH_OFFSET, mPacketSize);
+ putUInt16(MTP_CONTAINER_TYPE_OFFSET, MTP_CONTAINER_TYPE_EVENT);
+
+ event.data = mBuffer;
+ event.length = mPacketSize;
+ int ret = ::ioctl(fd, MTP_SEND_EVENT, (unsigned long)&event);
+ return (ret < 0 ? ret : 0);
+}
+#endif
+
+#ifdef MTP_HOST
+ // read our buffer from the given endpoint
+int MtpEventPacket::read(struct usb_endpoint *ep) {
+ int ret = transfer(ep, mBuffer, mBufferSize);
+ if (ret >= 0)
+ mPacketSize = ret;
+ else
+ mPacketSize = 0;
+ return ret;
+}
+#endif
+
+} // namespace android
+
diff --git a/media/mtp/MtpEventPacket.h b/media/mtp/MtpEventPacket.h
new file mode 100644
index 0000000..30ae869
--- /dev/null
+++ b/media/mtp/MtpEventPacket.h
@@ -0,0 +1,48 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef _MTP_EVENT_PACKET_H
+#define _MTP_EVENT_PACKET_H
+
+#include "MtpPacket.h"
+#include "mtp.h"
+
+namespace android {
+
+class MtpEventPacket : public MtpPacket {
+
+public:
+ MtpEventPacket();
+ virtual ~MtpEventPacket();
+
+#ifdef MTP_DEVICE
+ // write our data to the given file descriptor
+ int write(int fd);
+#endif
+
+#ifdef MTP_HOST
+ // read our buffer from the given endpoint
+ int read(struct usb_endpoint *ep);
+#endif
+
+ inline MtpEventCode getEventCode() const { return getContainerCode(); }
+ inline void setEventCode(MtpEventCode code)
+ { return setContainerCode(code); }
+};
+
+}; // namespace android
+
+#endif // _MTP_EVENT_PACKET_H
diff --git a/media/mtp/MtpObjectInfo.cpp b/media/mtp/MtpObjectInfo.cpp
new file mode 100644
index 0000000..ea68c3b
--- /dev/null
+++ b/media/mtp/MtpObjectInfo.cpp
@@ -0,0 +1,108 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "MtpObjectInfo"
+
+#include "MtpDebug.h"
+#include "MtpDataPacket.h"
+#include "MtpObjectInfo.h"
+#include "MtpStringBuffer.h"
+#include "MtpUtils.h"
+
+namespace android {
+
+MtpObjectInfo::MtpObjectInfo(MtpObjectHandle handle)
+ : mHandle(handle),
+ mStorageID(0),
+ mFormat(0),
+ mProtectionStatus(0),
+ mCompressedSize(0),
+ mThumbFormat(0),
+ mThumbCompressedSize(0),
+ mThumbPixWidth(0),
+ mThumbPixHeight(0),
+ mImagePixWidth(0),
+ mImagePixHeight(0),
+ mImagePixDepth(0),
+ mParent(0),
+ mAssociationType(0),
+ mAssociationDesc(0),
+ mSequenceNumber(0),
+ mName(NULL),
+ mDateCreated(0),
+ mDateModified(0),
+ mKeywords(NULL)
+{
+}
+
+MtpObjectInfo::~MtpObjectInfo() {
+ if (mName)
+ free(mName);
+ if (mKeywords)
+ free(mKeywords);
+}
+
+void MtpObjectInfo::read(MtpDataPacket& packet) {
+ MtpStringBuffer string;
+ time_t time;
+
+ mStorageID = packet.getUInt32();
+ mFormat = packet.getUInt16();
+ mProtectionStatus = packet.getUInt16();
+ mCompressedSize = packet.getUInt32();
+ mThumbFormat = packet.getUInt16();
+ mThumbCompressedSize = packet.getUInt32();
+ mThumbPixWidth = packet.getUInt32();
+ mThumbPixHeight = packet.getUInt32();
+ mImagePixWidth = packet.getUInt32();
+ mImagePixHeight = packet.getUInt32();
+ mImagePixDepth = packet.getUInt32();
+ mParent = packet.getUInt32();
+ mAssociationType = packet.getUInt16();
+ mAssociationDesc = packet.getUInt32();
+ mSequenceNumber = packet.getUInt32();
+
+ packet.getString(string);
+ mName = strdup((const char *)string);
+
+ packet.getString(string);
+ if (parseDateTime((const char*)string, time))
+ mDateCreated = time;
+
+ packet.getString(string);
+ if (parseDateTime((const char*)string, time))
+ mDateModified = time;
+
+ packet.getString(string);
+ mKeywords = strdup((const char *)string);
+}
+
+void MtpObjectInfo::print() {
+ LOGD("MtpObject Info %08X: %s\n", mHandle, mName);
+ LOGD(" mStorageID: %08X mFormat: %04X mProtectionStatus: %d\n",
+ mStorageID, mFormat, mProtectionStatus);
+ LOGD(" mCompressedSize: %d mThumbFormat: %04X mThumbCompressedSize: %d\n",
+ mCompressedSize, mFormat, mThumbCompressedSize);
+ LOGD(" mThumbPixWidth: %d mThumbPixHeight: %d\n", mThumbPixWidth, mThumbPixHeight);
+ LOGD(" mImagePixWidth: %d mImagePixHeight: %d mImagePixDepth: %d\n",
+ mImagePixWidth, mImagePixHeight, mImagePixDepth);
+ LOGD(" mParent: %08X mAssociationType: %04X mAssociationDesc: %04X\n",
+ mParent, mAssociationType, mAssociationDesc);
+ LOGD(" mSequenceNumber: %d mDateCreated: %ld mDateModified: %ld mKeywords: %s\n",
+ mSequenceNumber, mDateCreated, mDateModified, mKeywords);
+}
+
+} // namespace android
diff --git a/media/mtp/MtpObjectInfo.h b/media/mtp/MtpObjectInfo.h
new file mode 100644
index 0000000..c7a449c
--- /dev/null
+++ b/media/mtp/MtpObjectInfo.h
@@ -0,0 +1,60 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef _MTP_OBJECT_INFO_H
+#define _MTP_OBJECT_INFO_H
+
+#include "MtpTypes.h"
+
+namespace android {
+
+class MtpDataPacket;
+
+class MtpObjectInfo {
+public:
+ MtpObjectHandle mHandle;
+ MtpStorageID mStorageID;
+ MtpObjectFormat mFormat;
+ uint16_t mProtectionStatus;
+ uint32_t mCompressedSize;
+ MtpObjectFormat mThumbFormat;
+ uint32_t mThumbCompressedSize;
+ uint32_t mThumbPixWidth;
+ uint32_t mThumbPixHeight;
+ uint32_t mImagePixWidth;
+ uint32_t mImagePixHeight;
+ uint32_t mImagePixDepth;
+ MtpObjectHandle mParent;
+ uint16_t mAssociationType;
+ uint32_t mAssociationDesc;
+ uint32_t mSequenceNumber;
+ char* mName;
+ time_t mDateCreated;
+ time_t mDateModified;
+ char* mKeywords;
+
+public:
+ MtpObjectInfo(MtpObjectHandle handle);
+ virtual ~MtpObjectInfo();
+
+ void read(MtpDataPacket& packet);
+
+ void print();
+};
+
+}; // namespace android
+
+#endif // _MTP_OBJECT_INFO_H
diff --git a/media/mtp/MtpPacket.cpp b/media/mtp/MtpPacket.cpp
new file mode 100644
index 0000000..bd6196f
--- /dev/null
+++ b/media/mtp/MtpPacket.cpp
@@ -0,0 +1,154 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "MtpPacket"
+
+#include "MtpDebug.h"
+#include "MtpPacket.h"
+#include "mtp.h"
+
+#include <stdio.h>
+#include <stdlib.h>
+#include <stdio.h>
+
+#include <usbhost/usbhost.h>
+
+namespace android {
+
+MtpPacket::MtpPacket(int bufferSize)
+ : mBuffer(NULL),
+ mBufferSize(bufferSize),
+ mAllocationIncrement(bufferSize),
+ mPacketSize(0)
+{
+ mBuffer = (uint8_t *)malloc(bufferSize);
+ if (!mBuffer) {
+ LOGE("out of memory!");
+ abort();
+ }
+}
+
+MtpPacket::~MtpPacket() {
+ if (mBuffer)
+ free(mBuffer);
+}
+
+void MtpPacket::reset() {
+ allocate(MTP_CONTAINER_HEADER_SIZE);
+ mPacketSize = MTP_CONTAINER_HEADER_SIZE;
+ memset(mBuffer, 0, mBufferSize);
+}
+
+void MtpPacket::allocate(int length) {
+ if (length > mBufferSize) {
+ int newLength = length + mAllocationIncrement;
+ mBuffer = (uint8_t *)realloc(mBuffer, newLength);
+ if (!mBuffer) {
+ LOGE("out of memory!");
+ abort();
+ }
+ mBufferSize = newLength;
+ }
+}
+
+void MtpPacket::dump() {
+#define DUMP_BYTES_PER_ROW 16
+ char buffer[500];
+ char* bufptr = buffer;
+
+ for (int i = 0; i < mPacketSize; i++) {
+ sprintf(bufptr, "%02X ", mBuffer[i]);
+ bufptr += strlen(bufptr);
+ if (i % DUMP_BYTES_PER_ROW == (DUMP_BYTES_PER_ROW - 1)) {
+ LOGV("%s", buffer);
+ bufptr = buffer;
+ }
+ }
+ if (bufptr != buffer) {
+ // print last line
+ LOGV("%s", buffer);
+ }
+ LOGV("\n");
+}
+
+uint16_t MtpPacket::getUInt16(int offset) const {
+ return ((uint16_t)mBuffer[offset + 1] << 8) | (uint16_t)mBuffer[offset];
+}
+
+uint32_t MtpPacket::getUInt32(int offset) const {
+ return ((uint32_t)mBuffer[offset + 3] << 24) | ((uint32_t)mBuffer[offset + 2] << 16) |
+ ((uint32_t)mBuffer[offset + 1] << 8) | (uint32_t)mBuffer[offset];
+}
+
+void MtpPacket::putUInt16(int offset, uint16_t value) {
+ mBuffer[offset++] = (uint8_t)(value & 0xFF);
+ mBuffer[offset++] = (uint8_t)((value >> 8) & 0xFF);
+}
+
+void MtpPacket::putUInt32(int offset, uint32_t value) {
+ mBuffer[offset++] = (uint8_t)(value & 0xFF);
+ mBuffer[offset++] = (uint8_t)((value >> 8) & 0xFF);
+ mBuffer[offset++] = (uint8_t)((value >> 16) & 0xFF);
+ mBuffer[offset++] = (uint8_t)((value >> 24) & 0xFF);
+}
+
+uint16_t MtpPacket::getContainerCode() const {
+ return getUInt16(MTP_CONTAINER_CODE_OFFSET);
+}
+
+void MtpPacket::setContainerCode(uint16_t code) {
+ putUInt16(MTP_CONTAINER_CODE_OFFSET, code);
+}
+
+MtpTransactionID MtpPacket::getTransactionID() const {
+ return getUInt32(MTP_CONTAINER_TRANSACTION_ID_OFFSET);
+}
+
+void MtpPacket::setTransactionID(MtpTransactionID id) {
+ putUInt32(MTP_CONTAINER_TRANSACTION_ID_OFFSET, id);
+}
+
+uint32_t MtpPacket::getParameter(int index) const {
+ if (index < 1 || index > 5) {
+ LOGE("index %d out of range in MtpPacket::getParameter", index);
+ return 0;
+ }
+ return getUInt32(MTP_CONTAINER_PARAMETER_OFFSET + (index - 1) * sizeof(uint32_t));
+}
+
+void MtpPacket::setParameter(int index, uint32_t value) {
+ if (index < 1 || index > 5) {
+ LOGE("index %d out of range in MtpPacket::setParameter", index);
+ return;
+ }
+ int offset = MTP_CONTAINER_PARAMETER_OFFSET + (index - 1) * sizeof(uint32_t);
+ if (mPacketSize < offset + sizeof(uint32_t))
+ mPacketSize = offset + sizeof(uint32_t);
+ putUInt32(offset, value);
+}
+
+#ifdef MTP_HOST
+int MtpPacket::transfer(struct usb_endpoint *ep, void* buffer, int length) {
+ if (usb_endpoint_queue(ep, buffer, length)) {
+ LOGE("usb_endpoint_queue failed, errno: %d", errno);
+ return -1;
+ }
+ int ep_num;
+ return usb_endpoint_wait(usb_endpoint_get_device(ep), &ep_num);
+}
+#endif
+
+} // namespace android
diff --git a/media/mtp/MtpPacket.h b/media/mtp/MtpPacket.h
new file mode 100644
index 0000000..9c8d6da
--- /dev/null
+++ b/media/mtp/MtpPacket.h
@@ -0,0 +1,69 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef _MTP_PACKET_H
+#define _MTP_PACKET_H
+
+#include "MtpTypes.h"
+
+struct usb_endpoint;
+
+namespace android {
+
+class MtpPacket {
+
+protected:
+ uint8_t* mBuffer;
+ // current size of the buffer
+ int mBufferSize;
+ // number of bytes to add when resizing the buffer
+ int mAllocationIncrement;
+ // size of the data in the packet
+ int mPacketSize;
+
+public:
+ MtpPacket(int bufferSize);
+ virtual ~MtpPacket();
+
+ // sets packet size to the default container size and sets buffer to zero
+ virtual void reset();
+
+ void allocate(int length);
+ void dump();
+
+ uint16_t getContainerCode() const;
+ void setContainerCode(uint16_t code);
+
+ MtpTransactionID getTransactionID() const;
+ void setTransactionID(MtpTransactionID id);
+
+ uint32_t getParameter(int index) const;
+ void setParameter(int index, uint32_t value);
+
+#ifdef MTP_HOST
+ int transfer(struct usb_endpoint *ep, void* buffer, int length);
+#endif
+
+protected:
+ uint16_t getUInt16(int offset) const;
+ uint32_t getUInt32(int offset) const;
+ void putUInt16(int offset, uint16_t value);
+ void putUInt32(int offset, uint32_t value);
+};
+
+}; // namespace android
+
+#endif // _MTP_PACKET_H
diff --git a/media/mtp/MtpProperty.cpp b/media/mtp/MtpProperty.cpp
new file mode 100644
index 0000000..42945f5
--- /dev/null
+++ b/media/mtp/MtpProperty.cpp
@@ -0,0 +1,449 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "MtpProperty"
+
+#include "MtpDataPacket.h"
+#include "MtpProperty.h"
+#include "MtpStringBuffer.h"
+#include "MtpUtils.h"
+
+namespace android {
+
+MtpProperty::MtpProperty()
+ : mCode(0),
+ mType(0),
+ mWriteable(false),
+ mDefaultArrayLength(0),
+ mDefaultArrayValues(NULL),
+ mCurrentArrayLength(0),
+ mCurrentArrayValues(NULL),
+ mGroupCode(0),
+ mFormFlag(kFormNone),
+ mEnumLength(0),
+ mEnumValues(NULL)
+{
+ memset(&mDefaultValue, 0, sizeof(mDefaultValue));
+ memset(&mCurrentValue, 0, sizeof(mCurrentValue));
+ memset(&mMinimumValue, 0, sizeof(mMinimumValue));
+ memset(&mMaximumValue, 0, sizeof(mMaximumValue));
+}
+
+MtpProperty::MtpProperty(MtpPropertyCode propCode,
+ MtpDataType type,
+ bool writeable,
+ int defaultValue)
+ : mCode(propCode),
+ mType(type),
+ mWriteable(writeable),
+ mDefaultArrayLength(0),
+ mDefaultArrayValues(NULL),
+ mCurrentArrayLength(0),
+ mCurrentArrayValues(NULL),
+ mGroupCode(-1), // disable multiple properties in GetObjectPropList for now
+ mFormFlag(kFormNone),
+ mEnumLength(0),
+ mEnumValues(NULL)
+{
+ memset(&mDefaultValue, 0, sizeof(mDefaultValue));
+ memset(&mCurrentValue, 0, sizeof(mCurrentValue));
+ memset(&mMinimumValue, 0, sizeof(mMinimumValue));
+ memset(&mMaximumValue, 0, sizeof(mMaximumValue));
+
+ if (defaultValue) {
+ switch (type) {
+ case MTP_TYPE_INT8:
+ mDefaultValue.u.i8 = defaultValue;
+ break;
+ case MTP_TYPE_UINT8:
+ mDefaultValue.u.u8 = defaultValue;
+ break;
+ case MTP_TYPE_INT16:
+ mDefaultValue.u.i16 = defaultValue;
+ break;
+ case MTP_TYPE_UINT16:
+ mDefaultValue.u.u16 = defaultValue;
+ break;
+ case MTP_TYPE_INT32:
+ mDefaultValue.u.i32 = defaultValue;
+ break;
+ case MTP_TYPE_UINT32:
+ mDefaultValue.u.u32 = defaultValue;
+ break;
+ case MTP_TYPE_INT64:
+ mDefaultValue.u.i64 = defaultValue;
+ break;
+ case MTP_TYPE_UINT64:
+ mDefaultValue.u.u64 = defaultValue;
+ break;
+ default:
+ LOGE("unknown type %04X in MtpProperty::MtpProperty", type);
+ }
+ }
+}
+
+MtpProperty::~MtpProperty() {
+ if (mType == MTP_TYPE_STR) {
+ // free all strings
+ free(mDefaultValue.str);
+ free(mCurrentValue.str);
+ free(mMinimumValue.str);
+ free(mMaximumValue.str);
+ if (mDefaultArrayValues) {
+ for (int i = 0; i < mDefaultArrayLength; i++)
+ free(mDefaultArrayValues[i].str);
+ }
+ if (mCurrentArrayValues) {
+ for (int i = 0; i < mCurrentArrayLength; i++)
+ free(mCurrentArrayValues[i].str);
+ }
+ if (mEnumValues) {
+ for (int i = 0; i < mEnumLength; i++)
+ free(mEnumValues[i].str);
+ }
+ }
+ delete[] mDefaultArrayValues;
+ delete[] mCurrentArrayValues;
+ delete[] mEnumValues;
+}
+
+void MtpProperty::read(MtpDataPacket& packet) {
+ bool deviceProp = isDeviceProperty();
+
+ mCode = packet.getUInt16();
+ mType = packet.getUInt16();
+ mWriteable = (packet.getUInt8() == 1);
+ switch (mType) {
+ case MTP_TYPE_AINT8:
+ case MTP_TYPE_AUINT8:
+ case MTP_TYPE_AINT16:
+ case MTP_TYPE_AUINT16:
+ case MTP_TYPE_AINT32:
+ case MTP_TYPE_AUINT32:
+ case MTP_TYPE_AINT64:
+ case MTP_TYPE_AUINT64:
+ case MTP_TYPE_AINT128:
+ case MTP_TYPE_AUINT128:
+ mDefaultArrayValues = readArrayValues(packet, mDefaultArrayLength);
+ if (deviceProp)
+ mCurrentArrayValues = readArrayValues(packet, mCurrentArrayLength);
+ break;
+ default:
+ readValue(packet, mDefaultValue);
+ if (deviceProp)
+ readValue(packet, mCurrentValue);
+ }
+ if (!deviceProp)
+ mGroupCode = packet.getUInt32();
+ mFormFlag = packet.getUInt8();
+
+ if (mFormFlag == kFormRange) {
+ readValue(packet, mMinimumValue);
+ readValue(packet, mMaximumValue);
+ readValue(packet, mStepSize);
+ } else if (mFormFlag == kFormEnum) {
+ mEnumLength = packet.getUInt16();
+ mEnumValues = new MtpPropertyValue[mEnumLength];
+ for (int i = 0; i < mEnumLength; i++)
+ readValue(packet, mEnumValues[i]);
+ }
+}
+
+void MtpProperty::write(MtpDataPacket& packet) {
+ bool deviceProp = isDeviceProperty();
+
+ packet.putUInt16(mCode);
+ packet.putUInt16(mType);
+ packet.putUInt8(mWriteable ? 1 : 0);
+
+ switch (mType) {
+ case MTP_TYPE_AINT8:
+ case MTP_TYPE_AUINT8:
+ case MTP_TYPE_AINT16:
+ case MTP_TYPE_AUINT16:
+ case MTP_TYPE_AINT32:
+ case MTP_TYPE_AUINT32:
+ case MTP_TYPE_AINT64:
+ case MTP_TYPE_AUINT64:
+ case MTP_TYPE_AINT128:
+ case MTP_TYPE_AUINT128:
+ writeArrayValues(packet, mDefaultArrayValues, mDefaultArrayLength);
+ if (deviceProp)
+ writeArrayValues(packet, mCurrentArrayValues, mCurrentArrayLength);
+ break;
+ default:
+ writeValue(packet, mDefaultValue);
+ if (deviceProp)
+ writeValue(packet, mCurrentValue);
+ }
+ packet.putUInt32(mGroupCode);
+ if (!deviceProp)
+ packet.putUInt8(mFormFlag);
+ if (mFormFlag == kFormRange) {
+ writeValue(packet, mMinimumValue);
+ writeValue(packet, mMaximumValue);
+ writeValue(packet, mStepSize);
+ } else if (mFormFlag == kFormEnum) {
+ packet.putUInt16(mEnumLength);
+ for (int i = 0; i < mEnumLength; i++)
+ writeValue(packet, mEnumValues[i]);
+ }
+}
+
+void MtpProperty::setDefaultValue(const uint16_t* string) {
+ free(mDefaultValue.str);
+ if (string) {
+ MtpStringBuffer buffer(string);
+ mDefaultValue.str = strdup(buffer);
+ }
+ else
+ mDefaultValue.str = NULL;
+}
+
+void MtpProperty::setCurrentValue(const uint16_t* string) {
+ free(mCurrentValue.str);
+ if (string) {
+ MtpStringBuffer buffer(string);
+ mCurrentValue.str = strdup(buffer);
+ }
+ else
+ mCurrentValue.str = NULL;
+}
+
+void MtpProperty::setFormRange(int min, int max, int step) {
+ mFormFlag = kFormRange;
+ switch (mType) {
+ case MTP_TYPE_INT8:
+ mMinimumValue.u.i8 = min;
+ mMaximumValue.u.i8 = max;
+ mStepSize.u.i8 = step;
+ break;
+ case MTP_TYPE_UINT8:
+ mMinimumValue.u.u8 = min;
+ mMaximumValue.u.u8 = max;
+ mStepSize.u.u8 = step;
+ break;
+ case MTP_TYPE_INT16:
+ mMinimumValue.u.i16 = min;
+ mMaximumValue.u.i16 = max;
+ mStepSize.u.i16 = step;
+ break;
+ case MTP_TYPE_UINT16:
+ mMinimumValue.u.u16 = min;
+ mMaximumValue.u.u16 = max;
+ mStepSize.u.u16 = step;
+ break;
+ case MTP_TYPE_INT32:
+ mMinimumValue.u.i32 = min;
+ mMaximumValue.u.i32 = max;
+ mStepSize.u.i32 = step;
+ break;
+ case MTP_TYPE_UINT32:
+ mMinimumValue.u.u32 = min;
+ mMaximumValue.u.u32 = max;
+ mStepSize.u.u32 = step;
+ break;
+ case MTP_TYPE_INT64:
+ mMinimumValue.u.i64 = min;
+ mMaximumValue.u.i64 = max;
+ mStepSize.u.i64 = step;
+ break;
+ case MTP_TYPE_UINT64:
+ mMinimumValue.u.u64 = min;
+ mMaximumValue.u.u64 = max;
+ mStepSize.u.u64 = step;
+ break;
+ default:
+ LOGE("unsupported type for MtpProperty::setRange");
+ break;
+ }
+}
+
+void MtpProperty::setFormEnum(const int* values, int count) {
+ mFormFlag = kFormEnum;
+ delete[] mEnumValues;
+ mEnumValues = new MtpPropertyValue[count];
+ mEnumLength = count;
+
+ for (int i = 0; i < count; i++) {
+ int value = *values++;
+ switch (mType) {
+ case MTP_TYPE_INT8:
+ mEnumValues[i].u.i8 = value;
+ break;
+ case MTP_TYPE_UINT8:
+ mEnumValues[i].u.u8 = value;
+ break;
+ case MTP_TYPE_INT16:
+ mEnumValues[i].u.i16 = value;
+ break;
+ case MTP_TYPE_UINT16:
+ mEnumValues[i].u.u16 = value;
+ break;
+ case MTP_TYPE_INT32:
+ mEnumValues[i].u.i32 = value;
+ break;
+ case MTP_TYPE_UINT32:
+ mEnumValues[i].u.u32 = value;
+ break;
+ case MTP_TYPE_INT64:
+ mEnumValues[i].u.i64 = value;
+ break;
+ case MTP_TYPE_UINT64:
+ mEnumValues[i].u.u64 = value;
+ break;
+ default:
+ LOGE("unsupported type for MtpProperty::setEnum");
+ break;
+ }
+ }
+}
+
+void MtpProperty::setFormDateTime() {
+ mFormFlag = kFormDateTime;
+}
+
+void MtpProperty::print() {
+ LOGV("MtpProperty %04X\n", mCode);
+ LOGV(" type %04X\n", mType);
+ LOGV(" writeable %s\n", (mWriteable ? "true" : "false"));
+}
+
+void MtpProperty::readValue(MtpDataPacket& packet, MtpPropertyValue& value) {
+ MtpStringBuffer stringBuffer;
+
+ switch (mType) {
+ case MTP_TYPE_INT8:
+ case MTP_TYPE_AINT8:
+ value.u.i8 = packet.getInt8();
+ break;
+ case MTP_TYPE_UINT8:
+ case MTP_TYPE_AUINT8:
+ value.u.u8 = packet.getUInt8();
+ break;
+ case MTP_TYPE_INT16:
+ case MTP_TYPE_AINT16:
+ value.u.i16 = packet.getInt16();
+ break;
+ case MTP_TYPE_UINT16:
+ case MTP_TYPE_AUINT16:
+ value.u.u16 = packet.getUInt16();
+ break;
+ case MTP_TYPE_INT32:
+ case MTP_TYPE_AINT32:
+ value.u.i32 = packet.getInt32();
+ break;
+ case MTP_TYPE_UINT32:
+ case MTP_TYPE_AUINT32:
+ value.u.u32 = packet.getUInt32();
+ break;
+ case MTP_TYPE_INT64:
+ case MTP_TYPE_AINT64:
+ value.u.i64 = packet.getInt64();
+ break;
+ case MTP_TYPE_UINT64:
+ case MTP_TYPE_AUINT64:
+ value.u.u64 = packet.getUInt64();
+ break;
+ case MTP_TYPE_INT128:
+ case MTP_TYPE_AINT128:
+ packet.getInt128(value.u.i128);
+ break;
+ case MTP_TYPE_UINT128:
+ case MTP_TYPE_AUINT128:
+ packet.getUInt128(value.u.u128);
+ break;
+ case MTP_TYPE_STR:
+ packet.getString(stringBuffer);
+ value.str = strdup(stringBuffer);
+ break;
+ default:
+ LOGE("unknown type %04X in MtpProperty::readValue", mType);
+ }
+}
+
+void MtpProperty::writeValue(MtpDataPacket& packet, MtpPropertyValue& value) {
+ MtpStringBuffer stringBuffer;
+
+ switch (mType) {
+ case MTP_TYPE_INT8:
+ case MTP_TYPE_AINT8:
+ packet.putInt8(value.u.i8);
+ break;
+ case MTP_TYPE_UINT8:
+ case MTP_TYPE_AUINT8:
+ packet.putUInt8(value.u.u8);
+ break;
+ case MTP_TYPE_INT16:
+ case MTP_TYPE_AINT16:
+ packet.putInt16(value.u.i16);
+ break;
+ case MTP_TYPE_UINT16:
+ case MTP_TYPE_AUINT16:
+ packet.putUInt16(value.u.u16);
+ break;
+ case MTP_TYPE_INT32:
+ case MTP_TYPE_AINT32:
+ packet.putInt32(value.u.i32);
+ break;
+ case MTP_TYPE_UINT32:
+ case MTP_TYPE_AUINT32:
+ packet.putUInt32(value.u.u32);
+ break;
+ case MTP_TYPE_INT64:
+ case MTP_TYPE_AINT64:
+ packet.putInt64(value.u.i64);
+ break;
+ case MTP_TYPE_UINT64:
+ case MTP_TYPE_AUINT64:
+ packet.putUInt64(value.u.u64);
+ break;
+ case MTP_TYPE_INT128:
+ case MTP_TYPE_AINT128:
+ packet.putInt128(value.u.i128);
+ break;
+ case MTP_TYPE_UINT128:
+ case MTP_TYPE_AUINT128:
+ packet.putUInt128(value.u.u128);
+ break;
+ case MTP_TYPE_STR:
+ if (value.str)
+ packet.putString(value.str);
+ else
+ packet.putEmptyString();
+ break;
+ default:
+ LOGE("unknown type %04X in MtpProperty::writeValue", mType);
+ }
+}
+
+MtpPropertyValue* MtpProperty::readArrayValues(MtpDataPacket& packet, int& length) {
+ length = packet.getUInt32();
+ if (length == 0)
+ return NULL;
+ MtpPropertyValue* result = new MtpPropertyValue[length];
+ for (int i = 0; i < length; i++)
+ readValue(packet, result[i]);
+ return result;
+}
+
+void MtpProperty::writeArrayValues(MtpDataPacket& packet, MtpPropertyValue* values, int length) {
+ packet.putUInt32(length);
+ for (int i = 0; i < length; i++)
+ writeValue(packet, values[i]);
+}
+
+} // namespace android
diff --git a/media/mtp/MtpProperty.h b/media/mtp/MtpProperty.h
new file mode 100644
index 0000000..f783a87
--- /dev/null
+++ b/media/mtp/MtpProperty.h
@@ -0,0 +1,113 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef _MTP_PROPERTY_H
+#define _MTP_PROPERTY_H
+
+#include "MtpTypes.h"
+
+namespace android {
+
+class MtpDataPacket;
+
+struct MtpPropertyValue {
+ union {
+ int8_t i8;
+ uint8_t u8;
+ int16_t i16;
+ uint16_t u16;
+ int32_t i32;
+ uint32_t u32;
+ int64_t i64;
+ uint64_t u64;
+ int128_t i128;
+ uint128_t u128;
+ } u;
+ // string in UTF8 format
+ char* str;
+};
+
+class MtpProperty {
+public:
+ MtpPropertyCode mCode;
+ MtpDataType mType;
+ bool mWriteable;
+ MtpPropertyValue mDefaultValue;
+ MtpPropertyValue mCurrentValue;
+
+ // for array types
+ int mDefaultArrayLength;
+ MtpPropertyValue* mDefaultArrayValues;
+ int mCurrentArrayLength;
+ MtpPropertyValue* mCurrentArrayValues;
+
+ enum {
+ kFormNone = 0,
+ kFormRange = 1,
+ kFormEnum = 2,
+ kFormDateTime = 3,
+ };
+
+ uint32_t mGroupCode;
+ uint8_t mFormFlag;
+
+ // for range form
+ MtpPropertyValue mMinimumValue;
+ MtpPropertyValue mMaximumValue;
+ MtpPropertyValue mStepSize;
+
+ // for enum form
+ int mEnumLength;
+ MtpPropertyValue* mEnumValues;
+
+public:
+ MtpProperty();
+ MtpProperty(MtpPropertyCode propCode,
+ MtpDataType type,
+ bool writeable = false,
+ int defaultValue = 0);
+ virtual ~MtpProperty();
+
+ inline MtpPropertyCode getPropertyCode() const { return mCode; }
+
+ void read(MtpDataPacket& packet);
+ void write(MtpDataPacket& packet);
+
+ void setDefaultValue(const uint16_t* string);
+ void setCurrentValue(const uint16_t* string);
+
+ void setFormRange(int min, int max, int step);
+ void setFormEnum(const int* values, int count);
+ void setFormDateTime();
+
+ void print();
+
+ inline bool isDeviceProperty() const {
+ return ( ((mCode & 0xF000) == 0x5000)
+ || ((mCode & 0xF800) == 0xD000));
+ }
+
+private:
+ void readValue(MtpDataPacket& packet, MtpPropertyValue& value);
+ void writeValue(MtpDataPacket& packet, MtpPropertyValue& value);
+ MtpPropertyValue* readArrayValues(MtpDataPacket& packet, int& length);
+ void writeArrayValues(MtpDataPacket& packet,
+ MtpPropertyValue* values, int length);
+};
+
+}; // namespace android
+
+#endif // _MTP_PROPERTY_H
diff --git a/media/mtp/MtpRequestPacket.cpp b/media/mtp/MtpRequestPacket.cpp
new file mode 100644
index 0000000..8ece580
--- /dev/null
+++ b/media/mtp/MtpRequestPacket.cpp
@@ -0,0 +1,56 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "MtpRequestPacket"
+
+#include <stdio.h>
+#include <sys/types.h>
+#include <fcntl.h>
+
+#include "MtpRequestPacket.h"
+
+namespace android {
+
+MtpRequestPacket::MtpRequestPacket()
+ : MtpPacket(512)
+{
+}
+
+MtpRequestPacket::~MtpRequestPacket() {
+}
+
+#ifdef MTP_DEVICE
+int MtpRequestPacket::read(int fd) {
+ int ret = ::read(fd, mBuffer, mBufferSize);
+ if (ret >= 0)
+ mPacketSize = ret;
+ else
+ mPacketSize = 0;
+ return ret;
+}
+#endif
+
+#ifdef MTP_HOST
+ // write our buffer to the given endpoint (host mode)
+int MtpRequestPacket::write(struct usb_endpoint *ep)
+{
+ putUInt32(MTP_CONTAINER_LENGTH_OFFSET, mPacketSize);
+ putUInt16(MTP_CONTAINER_TYPE_OFFSET, MTP_CONTAINER_TYPE_COMMAND);
+ return transfer(ep, mBuffer, mPacketSize);
+}
+#endif
+
+} // namespace android
diff --git a/media/mtp/MtpRequestPacket.h b/media/mtp/MtpRequestPacket.h
new file mode 100644
index 0000000..df518f2
--- /dev/null
+++ b/media/mtp/MtpRequestPacket.h
@@ -0,0 +1,48 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef _MTP_REQUEST_PACKET_H
+#define _MTP_REQUEST_PACKET_H
+
+#include "MtpPacket.h"
+#include "mtp.h"
+
+namespace android {
+
+class MtpRequestPacket : public MtpPacket {
+
+public:
+ MtpRequestPacket();
+ virtual ~MtpRequestPacket();
+
+#ifdef MTP_DEVICE
+ // fill our buffer with data from the given file descriptor
+ int read(int fd);
+#endif
+
+#ifdef MTP_HOST
+ // write our buffer to the given endpoint
+ int write(struct usb_endpoint *ep);
+#endif
+
+ inline MtpOperationCode getOperationCode() const { return getContainerCode(); }
+ inline void setOperationCode(MtpOperationCode code)
+ { return setContainerCode(code); }
+};
+
+}; // namespace android
+
+#endif // _MTP_REQUEST_PACKET_H
diff --git a/media/mtp/MtpResponsePacket.cpp b/media/mtp/MtpResponsePacket.cpp
new file mode 100644
index 0000000..3ef714e
--- /dev/null
+++ b/media/mtp/MtpResponsePacket.cpp
@@ -0,0 +1,57 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "MtpResponsePacket"
+
+#include <stdio.h>
+#include <sys/types.h>
+#include <fcntl.h>
+
+#include "MtpResponsePacket.h"
+
+namespace android {
+
+MtpResponsePacket::MtpResponsePacket()
+ : MtpPacket(512)
+{
+}
+
+MtpResponsePacket::~MtpResponsePacket() {
+}
+
+#ifdef MTP_DEVICE
+int MtpResponsePacket::write(int fd) {
+ putUInt32(MTP_CONTAINER_LENGTH_OFFSET, mPacketSize);
+ putUInt16(MTP_CONTAINER_TYPE_OFFSET, MTP_CONTAINER_TYPE_RESPONSE);
+ int ret = ::write(fd, mBuffer, mPacketSize);
+ return (ret < 0 ? ret : 0);
+}
+#endif
+
+#ifdef MTP_HOST
+ // read our buffer from the given endpoint
+int MtpResponsePacket::read(struct usb_endpoint *ep) {
+ int ret = transfer(ep, mBuffer, mBufferSize);
+ if (ret >= 0)
+ mPacketSize = ret;
+ else
+ mPacketSize = 0;
+ return ret;
+}
+#endif
+
+} // namespace android
+
diff --git a/media/mtp/MtpResponsePacket.h b/media/mtp/MtpResponsePacket.h
new file mode 100644
index 0000000..373f8f9
--- /dev/null
+++ b/media/mtp/MtpResponsePacket.h
@@ -0,0 +1,48 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef _MTP_RESPONSE_PACKET_H
+#define _MTP_RESPONSE_PACKET_H
+
+#include "MtpPacket.h"
+#include "mtp.h"
+
+namespace android {
+
+class MtpResponsePacket : public MtpPacket {
+
+public:
+ MtpResponsePacket();
+ virtual ~MtpResponsePacket();
+
+#ifdef MTP_DEVICE
+ // write our data to the given file descriptor
+ int write(int fd);
+#endif
+
+#ifdef MTP_HOST
+ // read our buffer from the given endpoint
+ int read(struct usb_endpoint *ep);
+#endif
+
+ inline MtpResponseCode getResponseCode() const { return getContainerCode(); }
+ inline void setResponseCode(MtpResponseCode code)
+ { return setContainerCode(code); }
+};
+
+}; // namespace android
+
+#endif // _MTP_RESPONSE_PACKET_H
diff --git a/media/mtp/MtpServer.cpp b/media/mtp/MtpServer.cpp
new file mode 100644
index 0000000..c3755f3
--- /dev/null
+++ b/media/mtp/MtpServer.cpp
@@ -0,0 +1,871 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <stdio.h>
+#include <stdlib.h>
+#include <sys/types.h>
+#include <sys/ioctl.h>
+#include <sys/stat.h>
+#include <fcntl.h>
+#include <errno.h>
+#include <sys/stat.h>
+#include <dirent.h>
+
+#include <cutils/properties.h>
+
+#define LOG_TAG "MtpServer"
+
+#include "MtpDebug.h"
+#include "MtpDatabase.h"
+#include "MtpProperty.h"
+#include "MtpServer.h"
+#include "MtpStorage.h"
+#include "MtpStringBuffer.h"
+
+#include <linux/usb/f_mtp.h>
+
+namespace android {
+
+static const MtpOperationCode kSupportedOperationCodes[] = {
+ MTP_OPERATION_GET_DEVICE_INFO,
+ MTP_OPERATION_OPEN_SESSION,
+ MTP_OPERATION_CLOSE_SESSION,
+ MTP_OPERATION_GET_STORAGE_IDS,
+ MTP_OPERATION_GET_STORAGE_INFO,
+ MTP_OPERATION_GET_NUM_OBJECTS,
+ MTP_OPERATION_GET_OBJECT_HANDLES,
+ MTP_OPERATION_GET_OBJECT_INFO,
+ MTP_OPERATION_GET_OBJECT,
+// MTP_OPERATION_GET_THUMB,
+ MTP_OPERATION_DELETE_OBJECT,
+ MTP_OPERATION_SEND_OBJECT_INFO,
+ MTP_OPERATION_SEND_OBJECT,
+// MTP_OPERATION_INITIATE_CAPTURE,
+// MTP_OPERATION_FORMAT_STORE,
+// MTP_OPERATION_RESET_DEVICE,
+// MTP_OPERATION_SELF_TEST,
+// MTP_OPERATION_SET_OBJECT_PROTECTION,
+// MTP_OPERATION_POWER_DOWN,
+ MTP_OPERATION_GET_DEVICE_PROP_DESC,
+ MTP_OPERATION_GET_DEVICE_PROP_VALUE,
+ MTP_OPERATION_SET_DEVICE_PROP_VALUE,
+ MTP_OPERATION_RESET_DEVICE_PROP_VALUE,
+// MTP_OPERATION_TERMINATE_OPEN_CAPTURE,
+// MTP_OPERATION_MOVE_OBJECT,
+// MTP_OPERATION_COPY_OBJECT,
+ MTP_OPERATION_GET_PARTIAL_OBJECT,
+// MTP_OPERATION_INITIATE_OPEN_CAPTURE,
+ MTP_OPERATION_GET_OBJECT_PROPS_SUPPORTED,
+ MTP_OPERATION_GET_OBJECT_PROP_DESC,
+ MTP_OPERATION_GET_OBJECT_PROP_VALUE,
+ MTP_OPERATION_SET_OBJECT_PROP_VALUE,
+ MTP_OPERATION_GET_OBJECT_PROP_LIST,
+// MTP_OPERATION_SET_OBJECT_PROP_LIST,
+// MTP_OPERATION_GET_INTERDEPENDENT_PROP_DESC,
+// MTP_OPERATION_SEND_OBJECT_PROP_LIST,
+ MTP_OPERATION_GET_OBJECT_REFERENCES,
+ MTP_OPERATION_SET_OBJECT_REFERENCES,
+// MTP_OPERATION_SKIP,
+};
+
+static const MtpEventCode kSupportedEventCodes[] = {
+ MTP_EVENT_OBJECT_ADDED,
+ MTP_EVENT_OBJECT_REMOVED,
+};
+
+MtpServer::MtpServer(int fd, MtpDatabase* database,
+ int fileGroup, int filePerm, int directoryPerm)
+ : mFD(fd),
+ mDatabase(database),
+ mFileGroup(fileGroup),
+ mFilePermission(filePerm),
+ mDirectoryPermission(directoryPerm),
+ mSessionID(0),
+ mSessionOpen(false),
+ mSendObjectHandle(kInvalidObjectHandle),
+ mSendObjectFormat(0),
+ mSendObjectFileSize(0)
+{
+}
+
+MtpServer::~MtpServer() {
+}
+
+void MtpServer::addStorage(const char* filePath) {
+ int index = mStorages.size() + 1;
+ index |= index << 16; // set high and low part to our index
+ MtpStorage* storage = new MtpStorage(index, filePath, mDatabase);
+ addStorage(storage);
+}
+
+MtpStorage* MtpServer::getStorage(MtpStorageID id) {
+ for (int i = 0; i < mStorages.size(); i++) {
+ MtpStorage* storage = mStorages[i];
+ if (storage->getStorageID() == id)
+ return storage;
+ }
+ return NULL;
+}
+
+void MtpServer::run() {
+ int fd = mFD;
+
+ LOGV("MtpServer::run fd: %d\n", fd);
+
+ while (1) {
+ int ret = mRequest.read(fd);
+ if (ret < 0) {
+ LOGE("request read returned %d, errno: %d", ret, errno);
+ if (errno == ECANCELED) {
+ // return to top of loop and wait for next command
+ continue;
+ }
+ break;
+ }
+ MtpOperationCode operation = mRequest.getOperationCode();
+ MtpTransactionID transaction = mRequest.getTransactionID();
+
+ LOGV("operation: %s", MtpDebug::getOperationCodeName(operation));
+ mRequest.dump();
+
+ // FIXME need to generalize this
+ bool dataIn = (operation == MTP_OPERATION_SEND_OBJECT_INFO
+ || operation == MTP_OPERATION_SET_OBJECT_REFERENCES
+ || operation == MTP_OPERATION_SET_OBJECT_PROP_VALUE
+ || operation == MTP_OPERATION_SET_DEVICE_PROP_VALUE);
+ if (dataIn) {
+ int ret = mData.read(fd);
+ if (ret < 0) {
+ LOGE("data read returned %d, errno: %d", ret, errno);
+ if (errno == ECANCELED) {
+ // return to top of loop and wait for next command
+ continue;
+ }
+ break;
+ }
+ LOGV("received data:");
+ mData.dump();
+ } else {
+ mData.reset();
+ }
+
+ if (handleRequest()) {
+ if (!dataIn && mData.hasData()) {
+ mData.setOperationCode(operation);
+ mData.setTransactionID(transaction);
+ LOGV("sending data:");
+ mData.dump();
+ ret = mData.write(fd);
+ if (ret < 0) {
+ LOGE("request write returned %d, errno: %d", ret, errno);
+ if (errno == ECANCELED) {
+ // return to top of loop and wait for next command
+ continue;
+ }
+ break;
+ }
+ }
+
+ mResponse.setTransactionID(transaction);
+ LOGV("sending response %04X", mResponse.getResponseCode());
+ ret = mResponse.write(fd);
+ mResponse.dump();
+ if (ret < 0) {
+ LOGE("request write returned %d, errno: %d", ret, errno);
+ if (errno == ECANCELED) {
+ // return to top of loop and wait for next command
+ continue;
+ }
+ break;
+ }
+ } else {
+ LOGV("skipping response\n");
+ }
+ }
+
+ if (mSessionOpen)
+ mDatabase->sessionEnded();
+}
+
+void MtpServer::sendObjectAdded(MtpObjectHandle handle) {
+ if (mSessionOpen) {
+ LOGD("sendObjectAdded %d\n", handle);
+ mEvent.setEventCode(MTP_EVENT_OBJECT_ADDED);
+ mEvent.setTransactionID(mRequest.getTransactionID());
+ mEvent.setParameter(1, handle);
+ int ret = mEvent.write(mFD);
+ LOGD("mEvent.write returned %d\n", ret);
+ }
+}
+
+void MtpServer::sendObjectRemoved(MtpObjectHandle handle) {
+ if (mSessionOpen) {
+ LOGD("sendObjectRemoved %d\n", handle);
+ mEvent.setEventCode(MTP_EVENT_OBJECT_REMOVED);
+ mEvent.setTransactionID(mRequest.getTransactionID());
+ mEvent.setParameter(1, handle);
+ int ret = mEvent.write(mFD);
+ LOGD("mEvent.write returned %d\n", ret);
+ }
+}
+
+bool MtpServer::handleRequest() {
+ MtpOperationCode operation = mRequest.getOperationCode();
+ MtpResponseCode response;
+
+ mResponse.reset();
+
+ if (mSendObjectHandle != kInvalidObjectHandle && operation != MTP_OPERATION_SEND_OBJECT) {
+ // FIXME - need to delete mSendObjectHandle from the database
+ LOGE("expected SendObject after SendObjectInfo");
+ mSendObjectHandle = kInvalidObjectHandle;
+ }
+
+ switch (operation) {
+ case MTP_OPERATION_GET_DEVICE_INFO:
+ response = doGetDeviceInfo();
+ break;
+ case MTP_OPERATION_OPEN_SESSION:
+ response = doOpenSession();
+ break;
+ case MTP_OPERATION_CLOSE_SESSION:
+ response = doCloseSession();
+ break;
+ case MTP_OPERATION_GET_STORAGE_IDS:
+ response = doGetStorageIDs();
+ break;
+ case MTP_OPERATION_GET_STORAGE_INFO:
+ response = doGetStorageInfo();
+ break;
+ case MTP_OPERATION_GET_OBJECT_PROPS_SUPPORTED:
+ response = doGetObjectPropsSupported();
+ break;
+ case MTP_OPERATION_GET_OBJECT_HANDLES:
+ response = doGetObjectHandles();
+ break;
+ case MTP_OPERATION_GET_NUM_OBJECTS:
+ response = doGetNumObjects();
+ break;
+ case MTP_OPERATION_GET_OBJECT_REFERENCES:
+ response = doGetObjectReferences();
+ break;
+ case MTP_OPERATION_SET_OBJECT_REFERENCES:
+ response = doSetObjectReferences();
+ break;
+ case MTP_OPERATION_GET_OBJECT_PROP_VALUE:
+ response = doGetObjectPropValue();
+ break;
+ case MTP_OPERATION_SET_OBJECT_PROP_VALUE:
+ response = doSetObjectPropValue();
+ break;
+ case MTP_OPERATION_GET_DEVICE_PROP_VALUE:
+ response = doGetDevicePropValue();
+ break;
+ case MTP_OPERATION_SET_DEVICE_PROP_VALUE:
+ response = doSetDevicePropValue();
+ break;
+ case MTP_OPERATION_RESET_DEVICE_PROP_VALUE:
+ response = doResetDevicePropValue();
+ break;
+ case MTP_OPERATION_GET_OBJECT_PROP_LIST:
+ response = doGetObjectPropList();
+ break;
+ case MTP_OPERATION_GET_OBJECT_INFO:
+ response = doGetObjectInfo();
+ break;
+ case MTP_OPERATION_GET_OBJECT:
+ response = doGetObject();
+ break;
+ case MTP_OPERATION_GET_PARTIAL_OBJECT:
+ response = doGetPartialObject();
+ break;
+ case MTP_OPERATION_SEND_OBJECT_INFO:
+ response = doSendObjectInfo();
+ break;
+ case MTP_OPERATION_SEND_OBJECT:
+ response = doSendObject();
+ break;
+ case MTP_OPERATION_DELETE_OBJECT:
+ response = doDeleteObject();
+ break;
+ case MTP_OPERATION_GET_OBJECT_PROP_DESC:
+ response = doGetObjectPropDesc();
+ break;
+ case MTP_OPERATION_GET_DEVICE_PROP_DESC:
+ response = doGetDevicePropDesc();
+ break;
+ default:
+ LOGE("got unsupported command %s", MtpDebug::getOperationCodeName(operation));
+ response = MTP_RESPONSE_OPERATION_NOT_SUPPORTED;
+ break;
+ }
+
+ if (response == MTP_RESPONSE_TRANSACTION_CANCELLED)
+ return false;
+ mResponse.setResponseCode(response);
+ return true;
+}
+
+MtpResponseCode MtpServer::doGetDeviceInfo() {
+ MtpStringBuffer string;
+ char prop_value[PROPERTY_VALUE_MAX];
+
+ MtpObjectFormatList* playbackFormats = mDatabase->getSupportedPlaybackFormats();
+ MtpObjectFormatList* captureFormats = mDatabase->getSupportedCaptureFormats();
+ MtpDevicePropertyList* deviceProperties = mDatabase->getSupportedDeviceProperties();
+
+ // fill in device info
+ mData.putUInt16(MTP_STANDARD_VERSION);
+ mData.putUInt32(6); // MTP Vendor Extension ID
+ mData.putUInt16(MTP_STANDARD_VERSION);
+ string.set("microsoft.com: 1.0;");
+ mData.putString(string); // MTP Extensions
+ mData.putUInt16(0); //Functional Mode
+ mData.putAUInt16(kSupportedOperationCodes,
+ sizeof(kSupportedOperationCodes) / sizeof(uint16_t)); // Operations Supported
+ mData.putAUInt16(kSupportedEventCodes,
+ sizeof(kSupportedEventCodes) / sizeof(uint16_t)); // Events Supported
+ mData.putAUInt16(deviceProperties); // Device Properties Supported
+ mData.putAUInt16(captureFormats); // Capture Formats
+ mData.putAUInt16(playbackFormats); // Playback Formats
+ // FIXME
+ string.set("Google, Inc.");
+ mData.putString(string); // Manufacturer
+
+ property_get("ro.product.model", prop_value, "MTP Device");
+ string.set(prop_value);
+ mData.putString(string); // Model
+ string.set("1.0");
+ mData.putString(string); // Device Version
+
+ property_get("ro.serialno", prop_value, "????????");
+ string.set(prop_value);
+ mData.putString(string); // Serial Number
+
+ delete playbackFormats;
+ delete captureFormats;
+ delete deviceProperties;
+
+ return MTP_RESPONSE_OK;
+}
+
+MtpResponseCode MtpServer::doOpenSession() {
+ if (mSessionOpen) {
+ mResponse.setParameter(1, mSessionID);
+ return MTP_RESPONSE_SESSION_ALREADY_OPEN;
+ }
+ mSessionID = mRequest.getParameter(1);
+ mSessionOpen = true;
+
+ mDatabase->sessionStarted();
+
+ return MTP_RESPONSE_OK;
+}
+
+MtpResponseCode MtpServer::doCloseSession() {
+ if (!mSessionOpen)
+ return MTP_RESPONSE_SESSION_NOT_OPEN;
+ mSessionID = 0;
+ mSessionOpen = false;
+ mDatabase->sessionEnded();
+ return MTP_RESPONSE_OK;
+}
+
+MtpResponseCode MtpServer::doGetStorageIDs() {
+ if (!mSessionOpen)
+ return MTP_RESPONSE_SESSION_NOT_OPEN;
+
+ int count = mStorages.size();
+ mData.putUInt32(count);
+ for (int i = 0; i < count; i++)
+ mData.putUInt32(mStorages[i]->getStorageID());
+
+ return MTP_RESPONSE_OK;
+}
+
+MtpResponseCode MtpServer::doGetStorageInfo() {
+ MtpStringBuffer string;
+
+ if (!mSessionOpen)
+ return MTP_RESPONSE_SESSION_NOT_OPEN;
+ MtpStorageID id = mRequest.getParameter(1);
+ MtpStorage* storage = getStorage(id);
+ if (!storage)
+ return MTP_RESPONSE_INVALID_STORAGE_ID;
+
+ mData.putUInt16(storage->getType());
+ mData.putUInt16(storage->getFileSystemType());
+ mData.putUInt16(storage->getAccessCapability());
+ mData.putUInt64(storage->getMaxCapacity());
+ mData.putUInt64(storage->getFreeSpace());
+ mData.putUInt32(1024*1024*1024); // Free Space in Objects
+ string.set(storage->getDescription());
+ mData.putString(string);
+ mData.putEmptyString(); // Volume Identifier
+
+ return MTP_RESPONSE_OK;
+}
+
+MtpResponseCode MtpServer::doGetObjectPropsSupported() {
+ if (!mSessionOpen)
+ return MTP_RESPONSE_SESSION_NOT_OPEN;
+ MtpObjectFormat format = mRequest.getParameter(1);
+ MtpDevicePropertyList* properties = mDatabase->getSupportedObjectProperties(format);
+ mData.putAUInt16(properties);
+ delete properties;
+ return MTP_RESPONSE_OK;
+}
+
+MtpResponseCode MtpServer::doGetObjectHandles() {
+ if (!mSessionOpen)
+ return MTP_RESPONSE_SESSION_NOT_OPEN;
+ MtpStorageID storageID = mRequest.getParameter(1); // 0xFFFFFFFF for all storage
+ MtpObjectFormat format = mRequest.getParameter(2); // 0 for all formats
+ MtpObjectHandle parent = mRequest.getParameter(3); // 0xFFFFFFFF for objects with no parent
+ // 0x00000000 for all objects?
+ if (parent == 0xFFFFFFFF)
+ parent = 0;
+
+ MtpObjectHandleList* handles = mDatabase->getObjectList(storageID, format, parent);
+ mData.putAUInt32(handles);
+ delete handles;
+ return MTP_RESPONSE_OK;
+}
+
+MtpResponseCode MtpServer::doGetNumObjects() {
+ if (!mSessionOpen)
+ return MTP_RESPONSE_SESSION_NOT_OPEN;
+ MtpStorageID storageID = mRequest.getParameter(1); // 0xFFFFFFFF for all storage
+ MtpObjectFormat format = mRequest.getParameter(2); // 0 for all formats
+ MtpObjectHandle parent = mRequest.getParameter(3); // 0xFFFFFFFF for objects with no parent
+ // 0x00000000 for all objects?
+ if (parent == 0xFFFFFFFF)
+ parent = 0;
+
+ int count = mDatabase->getNumObjects(storageID, format, parent);
+ if (count >= 0) {
+ mResponse.setParameter(1, count);
+ return MTP_RESPONSE_OK;
+ } else {
+ mResponse.setParameter(1, 0);
+ return MTP_RESPONSE_INVALID_OBJECT_HANDLE;
+ }
+}
+
+MtpResponseCode MtpServer::doGetObjectReferences() {
+ if (!mSessionOpen)
+ return MTP_RESPONSE_SESSION_NOT_OPEN;
+ MtpStorageID handle = mRequest.getParameter(1);
+
+ // FIXME - check for invalid object handle
+ MtpObjectHandleList* handles = mDatabase->getObjectReferences(handle);
+ if (handles) {
+ mData.putAUInt32(handles);
+ delete handles;
+ } else {
+ mData.putEmptyArray();
+ }
+ return MTP_RESPONSE_OK;
+}
+
+MtpResponseCode MtpServer::doSetObjectReferences() {
+ if (!mSessionOpen)
+ return MTP_RESPONSE_SESSION_NOT_OPEN;
+ MtpStorageID handle = mRequest.getParameter(1);
+ MtpObjectHandleList* references = mData.getAUInt32();
+ MtpResponseCode result = mDatabase->setObjectReferences(handle, references);
+ delete references;
+ return result;
+}
+
+MtpResponseCode MtpServer::doGetObjectPropValue() {
+ MtpObjectHandle handle = mRequest.getParameter(1);
+ MtpObjectProperty property = mRequest.getParameter(2);
+ LOGD("GetObjectPropValue %d %s\n", handle,
+ MtpDebug::getObjectPropCodeName(property));
+
+ return mDatabase->getObjectPropertyValue(handle, property, mData);
+}
+
+MtpResponseCode MtpServer::doSetObjectPropValue() {
+ MtpObjectHandle handle = mRequest.getParameter(1);
+ MtpObjectProperty property = mRequest.getParameter(2);
+ LOGD("SetObjectPropValue %d %s\n", handle,
+ MtpDebug::getObjectPropCodeName(property));
+
+ return mDatabase->setObjectPropertyValue(handle, property, mData);
+}
+
+MtpResponseCode MtpServer::doGetDevicePropValue() {
+ MtpDeviceProperty property = mRequest.getParameter(1);
+ LOGD("GetDevicePropValue %s\n",
+ MtpDebug::getDevicePropCodeName(property));
+
+ return mDatabase->getDevicePropertyValue(property, mData);
+}
+
+MtpResponseCode MtpServer::doSetDevicePropValue() {
+ MtpDeviceProperty property = mRequest.getParameter(1);
+ LOGD("SetDevicePropValue %s\n",
+ MtpDebug::getDevicePropCodeName(property));
+
+ return mDatabase->setDevicePropertyValue(property, mData);
+}
+
+MtpResponseCode MtpServer::doResetDevicePropValue() {
+ MtpDeviceProperty property = mRequest.getParameter(1);
+ LOGD("ResetDevicePropValue %s\n",
+ MtpDebug::getDevicePropCodeName(property));
+
+ return mDatabase->resetDeviceProperty(property);
+}
+
+MtpResponseCode MtpServer::doGetObjectPropList() {
+
+ MtpObjectHandle handle = mRequest.getParameter(1);
+ MtpObjectFormat format = mRequest.getParameter(2);
+ MtpDeviceProperty property = mRequest.getParameter(3);
+ int groupCode = mRequest.getParameter(4);
+ int depth = mRequest.getParameter(5);
+ LOGD("GetObjectPropList %d format: %s property: %s group: %d depth: %d\n",
+ handle, MtpDebug::getFormatCodeName(format),
+ MtpDebug::getObjectPropCodeName(property), groupCode, depth);
+
+ return mDatabase->getObjectPropertyList(handle, format, property, groupCode, depth, mData);
+}
+
+MtpResponseCode MtpServer::doGetObjectInfo() {
+ MtpObjectHandle handle = mRequest.getParameter(1);
+ return mDatabase->getObjectInfo(handle, mData);
+}
+
+MtpResponseCode MtpServer::doGetObject() {
+ MtpObjectHandle handle = mRequest.getParameter(1);
+ MtpString pathBuf;
+ int64_t fileLength;
+ int result = mDatabase->getObjectFilePath(handle, pathBuf, fileLength);
+ if (result != MTP_RESPONSE_OK)
+ return result;
+
+ const char* filePath = (const char *)pathBuf;
+ mtp_file_range mfr;
+ mfr.fd = open(filePath, O_RDONLY);
+ if (mfr.fd < 0) {
+ return MTP_RESPONSE_GENERAL_ERROR;
+ }
+ mfr.offset = 0;
+ mfr.length = fileLength;
+
+ // send data header
+ mData.setOperationCode(mRequest.getOperationCode());
+ mData.setTransactionID(mRequest.getTransactionID());
+ mData.writeDataHeader(mFD, fileLength + MTP_CONTAINER_HEADER_SIZE);
+
+ // then transfer the file
+ int ret = ioctl(mFD, MTP_SEND_FILE, (unsigned long)&mfr);
+ close(mfr.fd);
+ if (ret < 0) {
+ if (errno == ECANCELED)
+ return MTP_RESPONSE_TRANSACTION_CANCELLED;
+ else
+ return MTP_RESPONSE_GENERAL_ERROR;
+ }
+ return MTP_RESPONSE_OK;
+}
+
+MtpResponseCode MtpServer::doGetPartialObject() {
+ MtpObjectHandle handle = mRequest.getParameter(1);
+ uint32_t offset = mRequest.getParameter(2);
+ uint32_t length = mRequest.getParameter(3);
+ MtpString pathBuf;
+ int64_t fileLength;
+ int result = mDatabase->getObjectFilePath(handle, pathBuf, fileLength);
+ if (result != MTP_RESPONSE_OK)
+ return result;
+ if (offset + length > fileLength)
+ length = fileLength - offset;
+
+ const char* filePath = (const char *)pathBuf;
+ mtp_file_range mfr;
+ mfr.fd = open(filePath, O_RDONLY);
+ if (mfr.fd < 0) {
+ return MTP_RESPONSE_GENERAL_ERROR;
+ }
+ mfr.offset = offset;
+ mfr.length = length;
+ mResponse.setParameter(1, length);
+
+ // send data header
+ mData.setOperationCode(mRequest.getOperationCode());
+ mData.setTransactionID(mRequest.getTransactionID());
+ mData.writeDataHeader(mFD, length + MTP_CONTAINER_HEADER_SIZE);
+
+ // then transfer the file
+ int ret = ioctl(mFD, MTP_SEND_FILE, (unsigned long)&mfr);
+ close(mfr.fd);
+ if (ret < 0) {
+ if (errno == ECANCELED)
+ return MTP_RESPONSE_TRANSACTION_CANCELLED;
+ else
+ return MTP_RESPONSE_GENERAL_ERROR;
+ }
+ return MTP_RESPONSE_OK;
+}
+
+MtpResponseCode MtpServer::doSendObjectInfo() {
+ MtpString path;
+ MtpStorageID storageID = mRequest.getParameter(1);
+ MtpStorage* storage = getStorage(storageID);
+ MtpObjectHandle parent = mRequest.getParameter(2);
+ if (!storage)
+ return MTP_RESPONSE_INVALID_STORAGE_ID;
+
+ // special case the root
+ if (parent == MTP_PARENT_ROOT) {
+ path = storage->getPath();
+ parent = 0;
+ } else {
+ int64_t dummy;
+ int result = mDatabase->getObjectFilePath(parent, path, dummy);
+ if (result != MTP_RESPONSE_OK)
+ return result;
+ }
+
+ // read only the fields we need
+ mData.getUInt32(); // storage ID
+ MtpObjectFormat format = mData.getUInt16();
+ mData.getUInt16(); // protection status
+ mSendObjectFileSize = mData.getUInt32();
+ mData.getUInt16(); // thumb format
+ mData.getUInt32(); // thumb compressed size
+ mData.getUInt32(); // thumb pix width
+ mData.getUInt32(); // thumb pix height
+ mData.getUInt32(); // image pix width
+ mData.getUInt32(); // image pix height
+ mData.getUInt32(); // image bit depth
+ mData.getUInt32(); // parent
+ uint16_t associationType = mData.getUInt16();
+ uint32_t associationDesc = mData.getUInt32(); // association desc
+ mData.getUInt32(); // sequence number
+ MtpStringBuffer name, created, modified;
+ mData.getString(name); // file name
+ mData.getString(created); // date created
+ mData.getString(modified); // date modified
+ // keywords follow
+
+ LOGD("name: %s format: %04X\n", (const char *)name, format);
+ time_t modifiedTime;
+ if (!parseDateTime(modified, modifiedTime))
+ modifiedTime = 0;
+
+ if (path[path.size() - 1] != '/')
+ path += "/";
+ path += (const char *)name;
+
+ MtpObjectHandle handle = mDatabase->beginSendObject((const char*)path,
+ format, parent, storageID, mSendObjectFileSize, modifiedTime);
+ if (handle == kInvalidObjectHandle) {
+ return MTP_RESPONSE_GENERAL_ERROR;
+ }
+
+ if (format == MTP_FORMAT_ASSOCIATION) {
+ mode_t mask = umask(0);
+ int ret = mkdir((const char *)path, mDirectoryPermission);
+ umask(mask);
+ if (ret && ret != -EEXIST)
+ return MTP_RESPONSE_GENERAL_ERROR;
+ chown((const char *)path, getuid(), mFileGroup);
+ } else {
+ mSendObjectFilePath = path;
+ // save the handle for the SendObject call, which should follow
+ mSendObjectHandle = handle;
+ mSendObjectFormat = format;
+ }
+
+ mResponse.setParameter(1, storageID);
+ mResponse.setParameter(2, parent);
+ mResponse.setParameter(3, handle);
+
+ return MTP_RESPONSE_OK;
+}
+
+MtpResponseCode MtpServer::doSendObject() {
+ MtpResponseCode result = MTP_RESPONSE_OK;
+ mode_t mask;
+ int ret;
+ uint64_t actualSize = -1;
+
+ if (mSendObjectHandle == kInvalidObjectHandle) {
+ LOGE("Expected SendObjectInfo before SendObject");
+ result = MTP_RESPONSE_NO_VALID_OBJECT_INFO;
+ goto done;
+ }
+
+ // read the header
+ ret = mData.readDataHeader(mFD);
+ // FIXME - check for errors here.
+
+ // reset so we don't attempt to send this back
+ mData.reset();
+
+ mtp_file_range mfr;
+ mfr.fd = open(mSendObjectFilePath, O_RDWR | O_CREAT | O_TRUNC);
+ if (mfr.fd < 0) {
+ result = MTP_RESPONSE_GENERAL_ERROR;
+ goto done;
+ }
+ fchown(mfr.fd, getuid(), mFileGroup);
+ // set permissions
+ mask = umask(0);
+ fchmod(mfr.fd, mFilePermission);
+ umask(mask);
+
+ mfr.offset = 0;
+ mfr.length = mSendObjectFileSize;
+
+ LOGD("receiving %s\n", (const char *)mSendObjectFilePath);
+ // transfer the file
+ ret = ioctl(mFD, MTP_RECEIVE_FILE, (unsigned long)&mfr);
+ close(mfr.fd);
+
+ LOGV("MTP_RECEIVE_FILE returned %d", ret);
+
+ if (ret < 0) {
+ unlink(mSendObjectFilePath);
+ if (errno == ECANCELED)
+ result = MTP_RESPONSE_TRANSACTION_CANCELLED;
+ else
+ result = MTP_RESPONSE_GENERAL_ERROR;
+ } else if (mSendObjectFileSize == 0xFFFFFFFF) {
+ // actual size is likely > 4 gig so stat the file to compute actual length
+ struct stat s;
+ if (lstat(mSendObjectFilePath, &s) == 0) {
+ actualSize = s.st_size;
+ LOGD("actualSize: %lld\n", actualSize);
+ }
+ }
+
+done:
+ mDatabase->endSendObject(mSendObjectFilePath, mSendObjectHandle, mSendObjectFormat,
+ actualSize, result == MTP_RESPONSE_OK);
+ mSendObjectHandle = kInvalidObjectHandle;
+ mSendObjectFormat = 0;
+ return result;
+}
+
+static void deleteRecursive(const char* path) {
+ char pathbuf[PATH_MAX];
+ int pathLength = strlen(path);
+ if (pathLength >= sizeof(pathbuf) - 1) {
+ LOGE("path too long: %s\n", path);
+ }
+ strcpy(pathbuf, path);
+ if (pathbuf[pathLength - 1] != '/') {
+ pathbuf[pathLength++] = '/';
+ }
+ char* fileSpot = pathbuf + pathLength;
+ int pathRemaining = sizeof(pathbuf) - pathLength - 1;
+
+ DIR* dir = opendir(path);
+ if (!dir) {
+ LOGE("opendir %s failed: %s", path, strerror(errno));
+ return;
+ }
+
+ struct dirent* entry;
+ while ((entry = readdir(dir))) {
+ const char* name = entry->d_name;
+
+ // ignore "." and ".."
+ if (name[0] == '.' && (name[1] == 0 || (name[1] == '.' && name[2] == 0))) {
+ continue;
+ }
+
+ int nameLength = strlen(name);
+ if (nameLength > pathRemaining) {
+ LOGE("path %s/%s too long\n", path, name);
+ continue;
+ }
+ strcpy(fileSpot, name);
+
+ int type = entry->d_type;
+ if (entry->d_type == DT_DIR) {
+ deleteRecursive(pathbuf);
+ rmdir(pathbuf);
+ } else {
+ unlink(pathbuf);
+ }
+ }
+ closedir(dir);
+}
+
+static void deletePath(const char* path) {
+ struct stat statbuf;
+ if (stat(path, &statbuf) == 0) {
+ if (S_ISDIR(statbuf.st_mode)) {
+ deleteRecursive(path);
+ rmdir(path);
+ } else {
+ unlink(path);
+ }
+ } else {
+ LOGE("deletePath stat failed for %s: %s", path, strerror(errno));
+ }
+}
+
+MtpResponseCode MtpServer::doDeleteObject() {
+ MtpObjectHandle handle = mRequest.getParameter(1);
+ MtpObjectFormat format = mRequest.getParameter(2);
+ // FIXME - support deleting all objects if handle is 0xFFFFFFFF
+ // FIXME - implement deleting objects by format
+
+ MtpString filePath;
+ int64_t fileLength;
+ int result = mDatabase->getObjectFilePath(handle, filePath, fileLength);
+ if (result == MTP_RESPONSE_OK) {
+ LOGV("deleting %s", (const char *)filePath);
+ deletePath((const char *)filePath);
+ return mDatabase->deleteFile(handle);
+ } else {
+ return result;
+ }
+}
+
+MtpResponseCode MtpServer::doGetObjectPropDesc() {
+ MtpObjectProperty propCode = mRequest.getParameter(1);
+ MtpObjectFormat format = mRequest.getParameter(2);
+ LOGD("GetObjectPropDesc %s %s\n", MtpDebug::getObjectPropCodeName(propCode),
+ MtpDebug::getFormatCodeName(format));
+ MtpProperty* property = mDatabase->getObjectPropertyDesc(propCode, format);
+ if (!property)
+ return MTP_RESPONSE_OBJECT_PROP_NOT_SUPPORTED;
+ property->write(mData);
+ delete property;
+ return MTP_RESPONSE_OK;
+}
+
+MtpResponseCode MtpServer::doGetDevicePropDesc() {
+ MtpDeviceProperty propCode = mRequest.getParameter(1);
+ LOGD("GetDevicePropDesc %s\n", MtpDebug::getDevicePropCodeName(propCode));
+ MtpProperty* property = mDatabase->getDevicePropertyDesc(propCode);
+ if (!property)
+ return MTP_RESPONSE_DEVICE_PROP_NOT_SUPPORTED;
+ property->write(mData);
+ delete property;
+ return MTP_RESPONSE_OK;
+}
+
+} // namespace android
diff --git a/media/mtp/MtpServer.h b/media/mtp/MtpServer.h
new file mode 100644
index 0000000..5aee4ea
--- /dev/null
+++ b/media/mtp/MtpServer.h
@@ -0,0 +1,109 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef _MTP_SERVER_H
+#define _MTP_SERVER_H
+
+#include "MtpRequestPacket.h"
+#include "MtpDataPacket.h"
+#include "MtpResponsePacket.h"
+#include "MtpEventPacket.h"
+#include "mtp.h"
+
+#include "MtpUtils.h"
+
+namespace android {
+
+class MtpDatabase;
+class MtpStorage;
+
+class MtpServer {
+
+private:
+ // file descriptor for MTP kernel driver
+ int mFD;
+
+ MtpDatabase* mDatabase;
+
+ // group to own new files and folders
+ int mFileGroup;
+ // permissions for new files and directories
+ int mFilePermission;
+ int mDirectoryPermission;
+
+ // current session ID
+ MtpSessionID mSessionID;
+ // true if we have an open session and mSessionID is valid
+ bool mSessionOpen;
+
+ MtpRequestPacket mRequest;
+ MtpDataPacket mData;
+ MtpResponsePacket mResponse;
+ MtpEventPacket mEvent;
+
+ MtpStorageList mStorages;
+
+ // handle for new object, set by SendObjectInfo and used by SendObject
+ MtpObjectHandle mSendObjectHandle;
+ MtpObjectFormat mSendObjectFormat;
+ MtpString mSendObjectFilePath;
+ size_t mSendObjectFileSize;
+
+public:
+ MtpServer(int fd, MtpDatabase* database,
+ int fileGroup, int filePerm, int directoryPerm);
+ virtual ~MtpServer();
+
+ void addStorage(const char* filePath);
+ inline void addStorage(MtpStorage* storage) { mStorages.push(storage); }
+ MtpStorage* getStorage(MtpStorageID id);
+ void run();
+
+ void sendObjectAdded(MtpObjectHandle handle);
+ void sendObjectRemoved(MtpObjectHandle handle);
+
+private:
+ bool handleRequest();
+
+ MtpResponseCode doGetDeviceInfo();
+ MtpResponseCode doOpenSession();
+ MtpResponseCode doCloseSession();
+ MtpResponseCode doGetStorageIDs();
+ MtpResponseCode doGetStorageInfo();
+ MtpResponseCode doGetObjectPropsSupported();
+ MtpResponseCode doGetObjectHandles();
+ MtpResponseCode doGetNumObjects();
+ MtpResponseCode doGetObjectReferences();
+ MtpResponseCode doSetObjectReferences();
+ MtpResponseCode doGetObjectPropValue();
+ MtpResponseCode doSetObjectPropValue();
+ MtpResponseCode doGetDevicePropValue();
+ MtpResponseCode doSetDevicePropValue();
+ MtpResponseCode doResetDevicePropValue();
+ MtpResponseCode doGetObjectPropList();
+ MtpResponseCode doGetObjectInfo();
+ MtpResponseCode doGetObject();
+ MtpResponseCode doGetPartialObject();
+ MtpResponseCode doSendObjectInfo();
+ MtpResponseCode doSendObject();
+ MtpResponseCode doDeleteObject();
+ MtpResponseCode doGetObjectPropDesc();
+ MtpResponseCode doGetDevicePropDesc();
+};
+
+}; // namespace android
+
+#endif // _MTP_SERVER_H
diff --git a/media/mtp/MtpStorage.cpp b/media/mtp/MtpStorage.cpp
new file mode 100644
index 0000000..eccf186
--- /dev/null
+++ b/media/mtp/MtpStorage.cpp
@@ -0,0 +1,80 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "MtpStorage"
+
+#include "MtpDebug.h"
+#include "MtpDatabase.h"
+#include "MtpStorage.h"
+
+#include <sys/types.h>
+#include <sys/stat.h>
+#include <sys/statfs.h>
+#include <unistd.h>
+#include <dirent.h>
+#include <errno.h>
+#include <string.h>
+#include <stdio.h>
+#include <limits.h>
+
+namespace android {
+
+MtpStorage::MtpStorage(MtpStorageID id, const char* filePath, MtpDatabase* db)
+ : mStorageID(id),
+ mFilePath(filePath),
+ mDatabase(db),
+ mMaxCapacity(0)
+{
+ LOGD("MtpStorage id: %d path: %s\n", id, filePath);
+}
+
+MtpStorage::~MtpStorage() {
+}
+
+int MtpStorage::getType() const {
+ return MTP_STORAGE_FIXED_RAM;
+}
+
+int MtpStorage::getFileSystemType() const {
+ return MTP_STORAGE_FILESYSTEM_HIERARCHICAL;
+}
+
+int MtpStorage::getAccessCapability() const {
+ return MTP_STORAGE_READ_WRITE;
+}
+
+uint64_t MtpStorage::getMaxCapacity() {
+ if (mMaxCapacity == 0) {
+ struct statfs stat;
+ if (statfs(mFilePath, &stat))
+ return -1;
+ mMaxCapacity = (uint64_t)stat.f_blocks * (uint64_t)stat.f_bsize;
+ }
+ return mMaxCapacity;
+}
+
+uint64_t MtpStorage::getFreeSpace() {
+ struct statfs stat;
+ if (statfs(mFilePath, &stat))
+ return -1;
+ return (uint64_t)stat.f_bavail * (uint64_t)stat.f_bsize;
+}
+
+const char* MtpStorage::getDescription() const {
+ return "Device Storage";
+}
+
+} // namespace android
diff --git a/media/mtp/MtpStorage.h b/media/mtp/MtpStorage.h
new file mode 100644
index 0000000..b13b926
--- /dev/null
+++ b/media/mtp/MtpStorage.h
@@ -0,0 +1,50 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef _MTP_STORAGE_H
+#define _MTP_STORAGE_H
+
+#include "mtp.h"
+
+namespace android {
+
+class MtpDatabase;
+
+class MtpStorage {
+
+private:
+ MtpStorageID mStorageID;
+ const char* mFilePath;
+ MtpDatabase* mDatabase;
+ uint64_t mMaxCapacity;
+
+public:
+ MtpStorage(MtpStorageID id, const char* filePath, MtpDatabase* db);
+ virtual ~MtpStorage();
+
+ inline MtpStorageID getStorageID() const { return mStorageID; }
+ int getType() const;
+ int getFileSystemType() const;
+ int getAccessCapability() const;
+ uint64_t getMaxCapacity();
+ uint64_t getFreeSpace();
+ const char* getDescription() const;
+ inline const char* getPath() const { return mFilePath; }
+};
+
+}; // namespace android
+
+#endif // _MTP_STORAGE_H
diff --git a/media/mtp/MtpStorageInfo.cpp b/media/mtp/MtpStorageInfo.cpp
new file mode 100644
index 0000000..ca64ac0
--- /dev/null
+++ b/media/mtp/MtpStorageInfo.cpp
@@ -0,0 +1,72 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "MtpStorageInfo"
+
+#include "MtpDebug.h"
+#include "MtpDataPacket.h"
+#include "MtpStorageInfo.h"
+#include "MtpStringBuffer.h"
+
+namespace android {
+
+MtpStorageInfo::MtpStorageInfo(MtpStorageID id)
+ : mStorageID(id),
+ mStorageType(0),
+ mFileSystemType(0),
+ mAccessCapability(0),
+ mMaxCapacity(0),
+ mFreeSpaceBytes(0),
+ mFreeSpaceObjects(0),
+ mStorageDescription(NULL),
+ mVolumeIdentifier(NULL)
+{
+}
+
+MtpStorageInfo::~MtpStorageInfo() {
+ if (mStorageDescription)
+ free(mStorageDescription);
+ if (mVolumeIdentifier)
+ free(mVolumeIdentifier);
+}
+
+void MtpStorageInfo::read(MtpDataPacket& packet) {
+ MtpStringBuffer string;
+
+ // read the device info
+ mStorageType = packet.getUInt16();
+ mFileSystemType = packet.getUInt16();
+ mAccessCapability = packet.getUInt16();
+ mMaxCapacity = packet.getUInt64();
+ mFreeSpaceBytes = packet.getUInt64();
+ mFreeSpaceObjects = packet.getUInt32();
+
+ packet.getString(string);
+ mStorageDescription = strdup((const char *)string);
+ packet.getString(string);
+ mVolumeIdentifier = strdup((const char *)string);
+}
+
+void MtpStorageInfo::print() {
+ LOGD("Storage Info %08X:\n\tmStorageType: %d\n\tmFileSystemType: %d\n\tmAccessCapability: %d\n",
+ mStorageID, mStorageType, mFileSystemType, mAccessCapability);
+ LOGD("\tmMaxCapacity: %lld\n\tmFreeSpaceBytes: %lld\n\tmFreeSpaceObjects: %d\n",
+ mMaxCapacity, mFreeSpaceBytes, mFreeSpaceObjects);
+ LOGD("\tmStorageDescription: %s\n\tmVolumeIdentifier: %s\n",
+ mStorageDescription, mVolumeIdentifier);
+}
+
+} // namespace android
diff --git a/media/mtp/MtpStorageInfo.h b/media/mtp/MtpStorageInfo.h
new file mode 100644
index 0000000..2cb626e
--- /dev/null
+++ b/media/mtp/MtpStorageInfo.h
@@ -0,0 +1,49 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef _MTP_STORAGE_INFO_H
+#define _MTP_STORAGE_INFO_H
+
+#include "MtpTypes.h"
+
+namespace android {
+
+class MtpDataPacket;
+
+class MtpStorageInfo {
+public:
+ MtpStorageID mStorageID;
+ uint16_t mStorageType;
+ uint16_t mFileSystemType;
+ uint16_t mAccessCapability;
+ uint64_t mMaxCapacity;
+ uint64_t mFreeSpaceBytes;
+ uint32_t mFreeSpaceObjects;
+ char* mStorageDescription;
+ char* mVolumeIdentifier;
+
+public:
+ MtpStorageInfo(MtpStorageID id);
+ virtual ~MtpStorageInfo();
+
+ void read(MtpDataPacket& packet);
+
+ void print();
+};
+
+}; // namespace android
+
+#endif // _MTP_STORAGE_INFO_H
diff --git a/media/mtp/MtpStringBuffer.cpp b/media/mtp/MtpStringBuffer.cpp
new file mode 100644
index 0000000..fe8cf04
--- /dev/null
+++ b/media/mtp/MtpStringBuffer.cpp
@@ -0,0 +1,171 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "MtpStringBuffer"
+
+#include <string.h>
+
+#include "MtpDataPacket.h"
+#include "MtpStringBuffer.h"
+
+namespace android {
+
+MtpStringBuffer::MtpStringBuffer()
+ : mCharCount(0),
+ mByteCount(1)
+{
+ mBuffer[0] = 0;
+}
+
+MtpStringBuffer::MtpStringBuffer(const char* src)
+ : mCharCount(0),
+ mByteCount(1)
+{
+ set(src);
+}
+
+MtpStringBuffer::MtpStringBuffer(const uint16_t* src)
+ : mCharCount(0),
+ mByteCount(1)
+{
+ set(src);
+}
+
+MtpStringBuffer::MtpStringBuffer(const MtpStringBuffer& src)
+ : mCharCount(src.mCharCount),
+ mByteCount(src.mByteCount)
+{
+ memcpy(mBuffer, src.mBuffer, mByteCount);
+}
+
+
+MtpStringBuffer::~MtpStringBuffer() {
+}
+
+void MtpStringBuffer::set(const char* src) {
+ int length = strlen(src);
+ if (length >= sizeof(mBuffer))
+ length = sizeof(mBuffer) - 1;
+ memcpy(mBuffer, src, length);
+
+ // count the characters
+ int count = 0;
+ char ch;
+ while ((ch = *src++) != 0) {
+ if ((ch & 0x80) == 0) {
+ // single byte character
+ } else if ((ch & 0xE0) == 0xC0) {
+ // two byte character
+ if (! *src++) {
+ // last character was truncated, so ignore last byte
+ length--;
+ break;
+ }
+ } else if ((ch & 0xF0) == 0xE0) {
+ // 3 byte char
+ if (! *src++) {
+ // last character was truncated, so ignore last byte
+ length--;
+ break;
+ }
+ if (! *src++) {
+ // last character was truncated, so ignore last two bytes
+ length -= 2;
+ break;
+ }
+ }
+ count++;
+ }
+
+ mByteCount = length + 1;
+ mBuffer[length] = 0;
+ mCharCount = count;
+}
+
+void MtpStringBuffer::set(const uint16_t* src) {
+ int count = 0;
+ uint16_t ch;
+ uint8_t* dest = mBuffer;
+
+ while ((ch = *src++) != 0 && count < 255) {
+ if (ch >= 0x0800) {
+ *dest++ = (uint8_t)(0xE0 | (ch >> 12));
+ *dest++ = (uint8_t)(0x80 | ((ch >> 6) & 0x3F));
+ *dest++ = (uint8_t)(0x80 | (ch & 0x3F));
+ } else if (ch >= 0x80) {
+ *dest++ = (uint8_t)(0xC0 | (ch >> 6));
+ *dest++ = (uint8_t)(0x80 | (ch & 0x3F));
+ } else {
+ *dest++ = ch;
+ }
+ count++;
+ }
+ *dest++ = 0;
+ mCharCount = count;
+ mByteCount = dest - mBuffer;
+}
+
+void MtpStringBuffer::readFromPacket(MtpDataPacket* packet) {
+ int count = packet->getUInt8();
+ uint8_t* dest = mBuffer;
+ for (int i = 0; i < count; i++) {
+ uint16_t ch = packet->getUInt16();
+ if (ch >= 0x0800) {
+ *dest++ = (uint8_t)(0xE0 | (ch >> 12));
+ *dest++ = (uint8_t)(0x80 | ((ch >> 6) & 0x3F));
+ *dest++ = (uint8_t)(0x80 | (ch & 0x3F));
+ } else if (ch >= 0x80) {
+ *dest++ = (uint8_t)(0xC0 | (ch >> 6));
+ *dest++ = (uint8_t)(0x80 | (ch & 0x3F));
+ } else {
+ *dest++ = ch;
+ }
+ }
+ *dest++ = 0;
+ mCharCount = count;
+ mByteCount = dest - mBuffer;
+}
+
+void MtpStringBuffer::writeToPacket(MtpDataPacket* packet) const {
+ int count = mCharCount;
+ const uint8_t* src = mBuffer;
+ packet->putUInt8(count > 0 ? count + 1 : 0);
+
+ // expand utf8 to 16 bit chars
+ for (int i = 0; i < count; i++) {
+ uint16_t ch;
+ uint16_t ch1 = *src++;
+ if ((ch1 & 0x80) == 0) {
+ // single byte character
+ ch = ch1;
+ } else if ((ch1 & 0xE0) == 0xC0) {
+ // two byte character
+ uint16_t ch2 = *src++;
+ ch = ((ch1 & 0x1F) << 6) | (ch2 & 0x3F);
+ } else {
+ // three byte character
+ uint16_t ch2 = *src++;
+ uint16_t ch3 = *src++;
+ ch = ((ch1 & 0x0F) << 12) | ((ch2 & 0x3F) << 6) | (ch3 & 0x3F);
+ }
+ packet->putUInt16(ch);
+ }
+ // only terminate with zero if string is not empty
+ if (count > 0)
+ packet->putUInt16(0);
+}
+
+} // namespace android
diff --git a/media/mtp/MtpStringBuffer.h b/media/mtp/MtpStringBuffer.h
new file mode 100644
index 0000000..cbc8307
--- /dev/null
+++ b/media/mtp/MtpStringBuffer.h
@@ -0,0 +1,57 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef _MTP_STRING_BUFFER_H
+#define _MTP_STRING_BUFFER_H
+
+#include <stdint.h>
+
+namespace android {
+
+class MtpDataPacket;
+
+// Represents a utf8 string, with a maximum of 255 characters
+class MtpStringBuffer {
+
+private:
+ // mBuffer contains string in UTF8 format
+ // maximum 3 bytes/character, with 1 extra for zero termination
+ uint8_t mBuffer[255 * 3 + 1];
+ int mCharCount;
+ int mByteCount;
+
+public:
+ MtpStringBuffer();
+ MtpStringBuffer(const char* src);
+ MtpStringBuffer(const uint16_t* src);
+ MtpStringBuffer(const MtpStringBuffer& src);
+ virtual ~MtpStringBuffer();
+
+ void set(const char* src);
+ void set(const uint16_t* src);
+
+ void readFromPacket(MtpDataPacket* packet);
+ void writeToPacket(MtpDataPacket* packet) const;
+
+ inline int getCharCount() const { return mCharCount; }
+ inline int getByteCount() const { return mByteCount; }
+
+ inline operator const char*() const { return (const char *)mBuffer; }
+};
+
+}; // namespace android
+
+#endif // _MTP_STRING_BUFFER_H
diff --git a/media/mtp/MtpTypes.h b/media/mtp/MtpTypes.h
new file mode 100644
index 0000000..720c854
--- /dev/null
+++ b/media/mtp/MtpTypes.h
@@ -0,0 +1,78 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef _MTP_TYPES_H
+#define _MTP_TYPES_H
+
+#include <stdint.h>
+#include "utils/String8.h"
+#include "utils/Vector.h"
+
+namespace android {
+
+typedef int32_t int128_t[4];
+typedef uint32_t uint128_t[4];
+
+typedef uint16_t MtpOperationCode;
+typedef uint16_t MtpResponseCode;
+typedef uint16_t MtpEventCode;
+typedef uint32_t MtpSessionID;
+typedef uint32_t MtpStorageID;
+typedef uint32_t MtpTransactionID;
+typedef uint16_t MtpPropertyCode;
+typedef uint16_t MtpDataType;
+typedef uint16_t MtpObjectFormat;
+typedef MtpPropertyCode MtpDeviceProperty;
+typedef MtpPropertyCode MtpObjectProperty;
+
+// object handles are unique across all storage but only within a single session.
+// object handles cannot be reused after an object is deleted.
+// values 0x00000000 and 0xFFFFFFFF are reserved for special purposes.
+typedef uint32_t MtpObjectHandle;
+
+// Special values
+#define MTP_PARENT_ROOT 0xFFFFFFFF // parent is root of the storage
+#define kInvalidObjectHandle 0xFFFFFFFF
+
+class MtpStorage;
+class MtpDevice;
+class MtpProperty;
+
+typedef Vector<MtpStorage *> MtpStorageList;
+typedef Vector<MtpDevice*> MtpDeviceList;
+typedef Vector<MtpProperty*> MtpPropertyList;
+
+typedef Vector<uint8_t> UInt8List;
+typedef Vector<uint16_t> UInt16List;
+typedef Vector<uint32_t> UInt32List;
+typedef Vector<uint64_t> UInt64List;
+typedef Vector<int8_t> Int8List;
+typedef Vector<int16_t> Int16List;
+typedef Vector<int32_t> Int32List;
+typedef Vector<int64_t> Int64List;
+
+typedef UInt16List MtpObjectPropertyList;
+typedef UInt16List MtpDevicePropertyList;
+typedef UInt16List MtpObjectFormatList;
+typedef UInt32List MtpObjectHandleList;
+typedef UInt16List MtpObjectPropertyList;
+typedef UInt32List MtpStorageIDList;
+
+typedef String8 MtpString;
+
+}; // namespace android
+
+#endif // _MTP_TYPES_H
diff --git a/media/mtp/MtpUtils.cpp b/media/mtp/MtpUtils.cpp
new file mode 100644
index 0000000..ab01ef5
--- /dev/null
+++ b/media/mtp/MtpUtils.cpp
@@ -0,0 +1,78 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "MtpUtils"
+
+#include <stdio.h>
+#include <time.h>
+
+#include <cutils/tztime.h>
+#include "MtpUtils.h"
+
+namespace android {
+
+/*
+DateTime strings follow a compatible subset of the definition found in ISO 8601, and
+take the form of a Unicode string formatted as: "YYYYMMDDThhmmss.s". In this
+representation, YYYY shall be replaced by the year, MM replaced by the month (01-12),
+DD replaced by the day (01-31), T is a constant character 'T' delimiting time from date,
+hh is replaced by the hour (00-23), mm is replaced by the minute (00-59), and ss by the
+second (00-59). The ".s" is optional, and represents tenths of a second.
+*/
+
+bool parseDateTime(const char* dateTime, time_t& outSeconds) {
+ int year, month, day, hour, minute, second;
+ struct tm tm;
+
+ if (sscanf(dateTime, "%04d%02d%02dT%02d%02d%02d",
+ &year, &month, &day, &hour, &minute, &second) != 6)
+ return false;
+ const char* tail = dateTime + 15;
+ // skip optional tenth of second
+ if (tail[0] == '.' && tail[1])
+ tail += 2;
+ //FIXME - support +/-hhmm
+ bool useUTC = (tail[0] == 'Z');
+
+ // hack to compute timezone
+ time_t dummy;
+ localtime_r(&dummy, &tm);
+
+ tm.tm_sec = second;
+ tm.tm_min = minute;
+ tm.tm_hour = hour;
+ tm.tm_mday = day;
+ tm.tm_mon = month;
+ tm.tm_year = year - 1900;
+ tm.tm_wday = 0;
+ tm.tm_isdst = -1;
+ if (useUTC)
+ outSeconds = mktime(&tm);
+ else
+ outSeconds = mktime_tz(&tm, tm.tm_zone);
+
+ return true;
+}
+
+void formatDateTime(time_t seconds, char* buffer, int bufferLength) {
+ struct tm tm;
+
+ localtime_r(&seconds, &tm);
+ snprintf(buffer, bufferLength, "%04d%02d%02dT%02d%02d%02d",
+ tm.tm_year + 1900, tm.tm_mon, tm.tm_mday, tm.tm_hour, tm.tm_min, tm.tm_sec);
+}
+
+} // namespace android
diff --git a/media/mtp/MtpUtils.h b/media/mtp/MtpUtils.h
new file mode 100644
index 0000000..61f9055
--- /dev/null
+++ b/media/mtp/MtpUtils.h
@@ -0,0 +1,29 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef _MTP_UTILS_H
+#define _MTP_UTILS_H
+
+#include <stdint.h>
+
+namespace android {
+
+bool parseDateTime(const char* dateTime, time_t& outSeconds);
+void formatDateTime(time_t seconds, char* buffer, int bufferLength);
+
+}; // namespace android
+
+#endif // _MTP_UTILS_H
diff --git a/media/mtp/PtpCursor.cpp b/media/mtp/PtpCursor.cpp
new file mode 100644
index 0000000..7294cef
--- /dev/null
+++ b/media/mtp/PtpCursor.cpp
@@ -0,0 +1,461 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "PtpCursor"
+
+#include "MtpDebug.h"
+#include "MtpClient.h"
+#include "PtpCursor.h"
+#include "MtpDevice.h"
+#include "MtpDeviceInfo.h"
+#include "MtpObjectInfo.h"
+#include "MtpStorageInfo.h"
+
+
+#include "binder/CursorWindow.h"
+
+namespace android {
+
+/* Device Column IDs */
+/* These must match the values in PtpCursor.java */
+#define DEVICE_ROW_ID 1
+#define DEVICE_MANUFACTURER 2
+#define DEVICE_MODEL 3
+
+/* Storage Column IDs */
+/* These must match the values in PtpCursor.java */
+#define STORAGE_ROW_ID 101
+#define STORAGE_IDENTIFIER 102
+#define STORAGE_DESCRIPTION 103
+
+/* Object Column IDs */
+/* These must match the values in PtpCursor.java */
+#define OBJECT_ROW_ID 201
+#define OBJECT_STORAGE_ID 202
+#define OBJECT_FORMAT 203
+#define OBJECT_PROTECTION_STATUS 204
+#define OBJECT_SIZE 205
+#define OBJECT_THUMB_FORMAT 206
+#define OBJECT_THUMB_SIZE 207
+#define OBJECT_THUMB_WIDTH 208
+#define OBJECT_THUMB_HEIGHT 209
+#define OBJECT_IMAGE_WIDTH 210
+#define OBJECT_IMAGE_HEIGHT 211
+#define OBJECT_IMAGE_DEPTH 212
+#define OBJECT_PARENT 213
+#define OBJECT_ASSOCIATION_TYPE 214
+#define OBJECT_ASSOCIATION_DESC 215
+#define OBJECT_SEQUENCE_NUMBER 216
+#define OBJECT_NAME 217
+#define OBJECT_DATE_CREATED 218
+#define OBJECT_DATE_MODIFIED 219
+#define OBJECT_KEYWORDS 220
+#define OBJECT_THUMB 221
+
+PtpCursor::PtpCursor(MtpClient* client, int queryType, int deviceID,
+ MtpStorageID storageID, MtpObjectHandle objectID,
+ int columnCount, int* columns)
+ : mClient(client),
+ mQueryType(queryType),
+ mDeviceID(deviceID),
+ mStorageID(storageID),
+ mQbjectID(objectID),
+ mColumnCount(columnCount),
+ mColumns(NULL)
+{
+ if (columns) {
+ mColumns = new int[columnCount];
+ memcpy(mColumns, columns, columnCount * sizeof(int));
+ }
+}
+
+PtpCursor::~PtpCursor() {
+ delete[] mColumns;
+}
+
+int PtpCursor::fillWindow(CursorWindow* window, int startPos) {
+ LOGD("PtpCursor::fillWindow mQueryType: %d\n", mQueryType);
+
+ switch (mQueryType) {
+ case DEVICE:
+ return fillDevices(window, startPos);
+ case DEVICE_ID:
+ return fillDevice(window, startPos);
+ case STORAGE:
+ return fillStorages(window, startPos);
+ case STORAGE_ID:
+ return fillStorage(window, startPos);
+ case OBJECT:
+ return fillObjects(window, 0, startPos);
+ case OBJECT_ID:
+ return fillObject(window, startPos);
+ case STORAGE_CHILDREN:
+ return fillObjects(window, -1, startPos);
+ case OBJECT_CHILDREN:
+ return fillObjects(window, mQbjectID, startPos);
+ default:
+ LOGE("PtpCursor::fillWindow: unknown query type %d\n", mQueryType);
+ return 0;
+ }
+}
+
+int PtpCursor::fillDevices(CursorWindow* window, int startPos) {
+ int count = 0;
+ MtpDeviceList& deviceList = mClient->getDeviceList();
+ for (int i = 0; i < deviceList.size(); i++) {
+ MtpDevice* device = deviceList[i];
+ if (fillDevice(window, device, startPos)) {
+ count++;
+ startPos++;
+ } else {
+ break;
+ }
+ }
+ return count;
+}
+
+int PtpCursor::fillDevice(CursorWindow* window, int startPos) {
+ MtpDevice* device = mClient->getDevice(mDeviceID);
+ if (device && fillDevice(window, device, startPos))
+ return 1;
+ else
+ return 0;
+}
+
+int PtpCursor::fillStorages(CursorWindow* window, int startPos) {
+ int count = 0;
+ MtpDevice* device = mClient->getDevice(mDeviceID);
+ if (!device)
+ return 0;
+ MtpStorageIDList* storageIDs = device->getStorageIDs();
+ if (!storageIDs)
+ return 0;
+
+ for (int i = 0; i < storageIDs->size(); i++) {
+ MtpStorageID storageID = (*storageIDs)[i];
+ if (fillStorage(window, device, storageID, startPos)) {
+ count++;
+ startPos++;
+ } else {
+ break;
+ }
+ }
+ delete storageIDs;
+ return count;
+}
+
+int PtpCursor::fillStorage(CursorWindow* window, int startPos) {
+ MtpDevice* device = mClient->getDevice(mDeviceID);
+ if (device && fillStorage(window, device, mStorageID, startPos))
+ return 1;
+ else
+ return 0;
+}
+
+int PtpCursor::fillObjects(CursorWindow* window, int parent, int startPos) {
+ int count = 0;
+ MtpDevice* device = mClient->getDevice(mDeviceID);
+ if (!device)
+ return 0;
+ MtpObjectHandleList* handles = device->getObjectHandles(mStorageID, 0, parent);
+ if (!handles)
+ return 0;
+
+ for (int i = 0; i < handles->size(); i++) {
+ MtpObjectHandle handle = (*handles)[i];
+ if (fillObject(window, device, handle, startPos)) {
+ count++;
+ startPos++;
+ } else {
+ break;
+ }
+ }
+ delete handles;
+ return count;
+}
+
+int PtpCursor::fillObject(CursorWindow* window, int startPos) {
+ MtpDevice* device = mClient->getDevice(mDeviceID);
+ if (device && fillObject(window, device, mQbjectID, startPos))
+ return 1;
+ else
+ return 0;
+}
+
+bool PtpCursor::fillDevice(CursorWindow* window, MtpDevice* device, int row) {
+ MtpDeviceInfo* deviceInfo = device->getDeviceInfo();
+ if (!deviceInfo)
+ return false;
+ if (!prepareRow(window))
+ return false;
+
+ for (int i = 0; i < mColumnCount; i++) {
+ switch (mColumns[i]) {
+ case DEVICE_ROW_ID:
+ if (!putLong(window, device->getID(), row, i))
+ return false;
+ break;
+ case DEVICE_MANUFACTURER:
+ if (!putString(window, deviceInfo->mManufacturer, row, i))
+ return false;
+ break;
+ case DEVICE_MODEL:
+ if (!putString(window, deviceInfo->mModel, row, i))
+ return false;
+ break;
+ default:
+ LOGE("fillDevice: unknown column %d\n", mColumns[i]);
+ return false;
+ }
+ }
+
+ return true;
+}
+
+bool PtpCursor::fillStorage(CursorWindow* window, MtpDevice* device,
+ MtpStorageID storageID, int row) {
+
+LOGD("fillStorage %d\n", storageID);
+
+ MtpStorageInfo* storageInfo = device->getStorageInfo(storageID);
+ if (!storageInfo)
+ return false;
+ if (!prepareRow(window)) {
+ delete storageInfo;
+ return false;
+ }
+
+ const char* text;
+ for (int i = 0; i < mColumnCount; i++) {
+ switch (mColumns[i]) {
+ case STORAGE_ROW_ID:
+ if (!putLong(window, storageID, row, i))
+ goto fail;
+ break;
+ case STORAGE_IDENTIFIER:
+ text = storageInfo->mVolumeIdentifier;
+ if (!text || !text[0])
+ text = "Camera Storage";
+ if (!putString(window, text, row, i))
+ goto fail;
+ break;
+ case STORAGE_DESCRIPTION:
+ text = storageInfo->mStorageDescription;
+ if (!text || !text[0])
+ text = "Storage Description";
+ if (!putString(window, text, row, i))
+ goto fail;
+ break;
+ default:
+ LOGE("fillStorage: unknown column %d\n", mColumns[i]);
+ goto fail;
+ }
+ }
+
+ delete storageInfo;
+ return true;
+
+fail:
+ delete storageInfo;
+ return false;
+}
+
+bool PtpCursor::fillObject(CursorWindow* window, MtpDevice* device,
+ MtpObjectHandle objectID, int row) {
+
+ MtpObjectInfo* objectInfo = device->getObjectInfo(objectID);
+ if (!objectInfo)
+ return false;
+ // objectInfo->print();
+ if (!prepareRow(window)) {
+ delete objectInfo;
+ return false;
+ }
+
+ for (int i = 0; i < mColumnCount; i++) {
+ switch (mColumns[i]) {
+ case OBJECT_ROW_ID:
+ if (!putLong(window, objectID, row, i))
+ goto fail;
+ break;
+ case OBJECT_STORAGE_ID:
+ if (!putLong(window, objectInfo->mStorageID, row, i))
+ goto fail;
+ break;
+ case OBJECT_FORMAT:
+ if (!putLong(window, objectInfo->mFormat, row, i))
+ goto fail;
+ break;
+ case OBJECT_PROTECTION_STATUS:
+ if (!putLong(window, objectInfo->mProtectionStatus, row, i))
+ goto fail;
+ break;
+ case OBJECT_SIZE:
+ if (!putLong(window, objectInfo->mCompressedSize, row, i))
+ goto fail;
+ break;
+ case OBJECT_THUMB_FORMAT:
+ if (!putLong(window, objectInfo->mThumbFormat, row, i))
+ goto fail;
+ break;
+ case OBJECT_THUMB_SIZE:
+ if (!putLong(window, objectInfo->mThumbCompressedSize, row, i))
+ goto fail;
+ break;
+ case OBJECT_THUMB_WIDTH:
+ if (!putLong(window, objectInfo->mThumbPixWidth, row, i))
+ goto fail;
+ break;
+ case OBJECT_THUMB_HEIGHT:
+ if (!putLong(window, objectInfo->mThumbPixHeight, row, i))
+ goto fail;
+ break;
+ case OBJECT_IMAGE_WIDTH:
+ if (!putLong(window, objectInfo->mImagePixWidth, row, i))
+ goto fail;
+ break;
+ case OBJECT_IMAGE_HEIGHT:
+ if (!putLong(window, objectInfo->mImagePixHeight, row, i))
+ goto fail;
+ break;
+ case OBJECT_IMAGE_DEPTH:
+ if (!putLong(window, objectInfo->mImagePixDepth, row, i))
+ goto fail;
+ break;
+ case OBJECT_PARENT:
+ if (!putLong(window, objectInfo->mParent, row, i))
+ goto fail;
+ break;
+ case OBJECT_ASSOCIATION_TYPE:
+ if (!putLong(window, objectInfo->mAssociationType, row, i))
+ goto fail;
+ break;
+ case OBJECT_ASSOCIATION_DESC:
+ if (!putLong(window, objectInfo->mAssociationDesc, row, i))
+ goto fail;
+ break;
+ case OBJECT_SEQUENCE_NUMBER:
+ if (!putLong(window, objectInfo->mSequenceNumber, row, i))
+ goto fail;
+ break;
+ case OBJECT_NAME:
+ if (!putString(window, objectInfo->mName, row, i))
+ goto fail;
+ break;
+ case OBJECT_DATE_CREATED:
+ if (!putLong(window, objectInfo->mDateCreated, row, i))
+ goto fail;
+ break;
+ case OBJECT_DATE_MODIFIED:
+ if (!putLong(window, objectInfo->mDateModified, row, i))
+ goto fail;
+ break;
+ case OBJECT_KEYWORDS:
+ if (!putString(window, objectInfo->mKeywords, row, i))
+ goto fail;
+ break;
+ case OBJECT_THUMB:
+ if (!putThumbnail(window, objectID, objectInfo->mFormat, row, i))
+ goto fail;
+ break;
+ default:
+ LOGE("fillObject: unknown column %d\n", mColumns[i]);
+ goto fail;
+ }
+ }
+
+ delete objectInfo;
+ return true;
+
+fail:
+ delete objectInfo;
+ return false;
+}
+
+bool PtpCursor::prepareRow(CursorWindow* window) {
+ if (!window->setNumColumns(mColumnCount)) {
+ LOGE("Failed to change column count from %d to %d", window->getNumColumns(), mColumnCount);
+ return false;
+ }
+ field_slot_t * fieldDir = window->allocRow();
+ if (!fieldDir) {
+ LOGE("Failed allocating fieldDir");
+ return false;
+ }
+ return true;
+}
+
+
+bool PtpCursor::putLong(CursorWindow* window, int64_t value, int row, int column) {
+ if (!window->putLong(row, column, value)) {
+ window->freeLastRow();
+ LOGE("Failed allocating space for a long in column %d", column);
+ return false;
+ }
+ return true;
+}
+
+bool PtpCursor::putString(CursorWindow* window, const char* text, int row, int column) {
+ int size = strlen(text) + 1;
+ int offset = window->alloc(size);
+ if (!offset) {
+ window->freeLastRow();
+ LOGE("Failed allocating %u bytes for text/blob %s", size, text);
+ return false;
+ }
+ window->copyIn(offset, (const uint8_t*)text, size);
+
+ // This must be updated after the call to alloc(), since that
+ // may move the field around in the window
+ field_slot_t * fieldSlot = window->getFieldSlot(row, column);
+ fieldSlot->type = FIELD_TYPE_STRING;
+ fieldSlot->data.buffer.offset = offset;
+ fieldSlot->data.buffer.size = size;
+ return true;
+}
+
+bool PtpCursor::putThumbnail(CursorWindow* window, MtpObjectHandle objectID,
+ MtpObjectFormat format, int row, int column) {
+ MtpDevice* device = mClient->getDevice(mDeviceID);
+ void* thumbnail;
+ int size, offset;
+ if (format == MTP_FORMAT_ASSOCIATION) {
+ thumbnail = NULL;
+ size = offset = 0;
+ } else {
+ thumbnail = device->getThumbnail(objectID, size);
+
+ LOGV("putThumbnail: %p, size: %d\n", thumbnail, size);
+ offset = window->alloc(size);
+ if (!offset) {
+ window->freeLastRow();
+ LOGE("Failed allocating %u bytes for thumbnail", size);
+ return false;
+ }
+ }
+ if (thumbnail)
+ window->copyIn(offset, (const uint8_t*)thumbnail, size);
+
+ // This must be updated after the call to alloc(), since that
+ // may move the field around in the window
+ field_slot_t * fieldSlot = window->getFieldSlot(row, column);
+ fieldSlot->type = FIELD_TYPE_BLOB;
+ fieldSlot->data.buffer.offset = offset;
+ fieldSlot->data.buffer.size = size;
+ return true;
+}
+
+} // namespace android
diff --git a/media/mtp/PtpCursor.h b/media/mtp/PtpCursor.h
new file mode 100644
index 0000000..38a1d47
--- /dev/null
+++ b/media/mtp/PtpCursor.h
@@ -0,0 +1,78 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef _PTP_CURSOR_H
+#define _PTP_CURSOR_H
+
+#include "MtpTypes.h"
+
+namespace android {
+
+class CursorWindow;
+
+class PtpCursor {
+private:
+ enum {
+ DEVICE = 1,
+ DEVICE_ID = 2,
+ STORAGE = 3,
+ STORAGE_ID = 4,
+ OBJECT = 5,
+ OBJECT_ID = 6,
+ STORAGE_CHILDREN = 7,
+ OBJECT_CHILDREN = 8,
+ };
+
+ MtpClient* mClient;
+ int mQueryType;
+ int mDeviceID;
+ MtpStorageID mStorageID;
+ MtpObjectHandle mQbjectID;
+ int mColumnCount;
+ int* mColumns;
+
+public:
+ PtpCursor(MtpClient* client, int queryType, int deviceID,
+ MtpStorageID storageID, MtpObjectHandle objectID,
+ int columnCount, int* columns);
+ virtual ~PtpCursor();
+
+ int fillWindow(CursorWindow* window, int startPos);
+
+private:
+ int fillDevices(CursorWindow* window, int startPos);
+ int fillDevice(CursorWindow* window, int startPos);
+ int fillStorages(CursorWindow* window, int startPos);
+ int fillStorage(CursorWindow* window, int startPos);
+ int fillObjects(CursorWindow* window, int parent, int startPos);
+ int fillObject(CursorWindow* window, int startPos);
+
+ bool fillDevice(CursorWindow* window, MtpDevice* device, int startPos);
+ bool fillStorage(CursorWindow* window, MtpDevice* device,
+ MtpStorageID storageID, int row);
+ bool fillObject(CursorWindow* window, MtpDevice* device,
+ MtpObjectHandle objectID, int row);
+
+ bool prepareRow(CursorWindow* window);
+ bool putLong(CursorWindow* window, int64_t value, int row, int column);
+ bool putString(CursorWindow* window, const char* text, int row, int column);
+ bool putThumbnail(CursorWindow* window, MtpObjectHandle objectID,
+ MtpObjectFormat format, int row, int column);
+};
+
+}; // namespace android
+
+#endif // _PTP_CURSOR_H
diff --git a/media/mtp/mtp.h b/media/mtp/mtp.h
new file mode 100644
index 0000000..8bc2e22
--- /dev/null
+++ b/media/mtp/mtp.h
@@ -0,0 +1,479 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef _MTP_H
+#define _MTP_H
+
+#include <stdint.h>
+#include <stdlib.h>
+
+#define MTP_STANDARD_VERSION 100
+
+// Container Types
+#define MTP_CONTAINER_TYPE_UNDEFINED 0
+#define MTP_CONTAINER_TYPE_COMMAND 1
+#define MTP_CONTAINER_TYPE_DATA 2
+#define MTP_CONTAINER_TYPE_RESPONSE 3
+#define MTP_CONTAINER_TYPE_EVENT 4
+
+// Container Offsets
+#define MTP_CONTAINER_LENGTH_OFFSET 0
+#define MTP_CONTAINER_TYPE_OFFSET 4
+#define MTP_CONTAINER_CODE_OFFSET 6
+#define MTP_CONTAINER_TRANSACTION_ID_OFFSET 8
+#define MTP_CONTAINER_PARAMETER_OFFSET 12
+#define MTP_CONTAINER_HEADER_SIZE 12
+
+// MTP Data Types
+#define MTP_TYPE_UNDEFINED 0x0000 // Undefined
+#define MTP_TYPE_INT8 0x0001 // Signed 8-bit integer
+#define MTP_TYPE_UINT8 0x0002 // Unsigned 8-bit integer
+#define MTP_TYPE_INT16 0x0003 // Signed 16-bit integer
+#define MTP_TYPE_UINT16 0x0004 // Unsigned 16-bit integer
+#define MTP_TYPE_INT32 0x0005 // Signed 32-bit integer
+#define MTP_TYPE_UINT32 0x0006 // Unsigned 32-bit integer
+#define MTP_TYPE_INT64 0x0007 // Signed 64-bit integer
+#define MTP_TYPE_UINT64 0x0008 // Unsigned 64-bit integer
+#define MTP_TYPE_INT128 0x0009 // Signed 128-bit integer
+#define MTP_TYPE_UINT128 0x000A // Unsigned 128-bit integer
+#define MTP_TYPE_AINT8 0x4001 // Array of signed 8-bit integers
+#define MTP_TYPE_AUINT8 0x4002 // Array of unsigned 8-bit integers
+#define MTP_TYPE_AINT16 0x4003 // Array of signed 16-bit integers
+#define MTP_TYPE_AUINT16 0x4004 // Array of unsigned 16-bit integers
+#define MTP_TYPE_AINT32 0x4005 // Array of signed 32-bit integers
+#define MTP_TYPE_AUINT32 0x4006 // Array of unsigned 32-bit integers
+#define MTP_TYPE_AINT64 0x4007 // Array of signed 64-bit integers
+#define MTP_TYPE_AUINT64 0x4008 // Array of unsigned 64-bit integers
+#define MTP_TYPE_AINT128 0x4009 // Array of signed 128-bit integers
+#define MTP_TYPE_AUINT128 0x400A // Array of unsigned 128-bit integers
+#define MTP_TYPE_STR 0xFFFF // Variable-length Unicode string
+
+// MTP Format Codes
+#define MTP_FORMAT_UNDEFINED 0x3000 // Undefined object
+#define MTP_FORMAT_ASSOCIATION 0x3001 // Association (for example, a folder)
+#define MTP_FORMAT_SCRIPT 0x3002 // Device model-specific script
+#define MTP_FORMAT_EXECUTABLE 0x3003 // Device model-specific binary executable
+#define MTP_FORMAT_TEXT 0x3004 // Text file
+#define MTP_FORMAT_HTML 0x3005 // Hypertext Markup Language file (text)
+#define MTP_FORMAT_DPOF 0x3006 // Digital Print Order Format file (text)
+#define MTP_FORMAT_AIFF 0x3007 // Audio clip
+#define MTP_FORMAT_WAV 0x3008 // Audio clip
+#define MTP_FORMAT_MP3 0x3009 // Audio clip
+#define MTP_FORMAT_AVI 0x300A // Video clip
+#define MTP_FORMAT_MPEG 0x300B // Video clip
+#define MTP_FORMAT_ASF 0x300C // Microsoft Advanced Streaming Format (video)
+#define MTP_FORMAT_DEFINED 0x3800 // Unknown image object
+#define MTP_FORMAT_EXIF_JPEG 0x3801 // Exchangeable File Format, JEIDA standard
+#define MTP_FORMAT_TIFF_EP 0x3802 // Tag Image File Format for Electronic Photography
+#define MTP_FORMAT_FLASHPIX 0x3803 // Structured Storage Image Format
+#define MTP_FORMAT_BMP 0x3804 // Microsoft Windows Bitmap file
+#define MTP_FORMAT_CIFF 0x3805 // Canon Camera Image File Format
+#define MTP_FORMAT_GIF 0x3807 // Graphics Interchange Format
+#define MTP_FORMAT_JFIF 0x3808 // JPEG File Interchange Format
+#define MTP_FORMAT_CD 0x3809 // PhotoCD Image Pac
+#define MTP_FORMAT_PICT 0x380A // Quickdraw Image Format
+#define MTP_FORMAT_PNG 0x380B // Portable Network Graphics
+#define MTP_FORMAT_TIFF 0x380D // Tag Image File Format
+#define MTP_FORMAT_TIFF_IT 0x380E // Tag Image File Format for Information Technology (graphic arts)
+#define MTP_FORMAT_JP2 0x380F // JPEG2000 Baseline File Format
+#define MTP_FORMAT_JPX 0x3810 // JPEG2000 Extended File Format
+#define MTP_FORMAT_UNDEFINED_FIRMWARE 0xB802
+#define MTP_FORMAT_WINDOWS_IMAGE_FORMAT 0xB881
+#define MTP_FORMAT_UNDEFINED_AUDIO 0xB900
+#define MTP_FORMAT_WMA 0xB901
+#define MTP_FORMAT_OGG 0xB902
+#define MTP_FORMAT_AAC 0xB903
+#define MTP_FORMAT_AUDIBLE 0xB904
+#define MTP_FORMAT_FLAC 0xB906
+#define MTP_FORMAT_UNDEFINED_VIDEO 0xB980
+#define MTP_FORMAT_WMV 0xB981
+#define MTP_FORMAT_MP4_CONTAINER 0xB982 // ISO 14496-1
+#define MTP_FORMAT_MP2 0xB983
+#define MTP_FORMAT_3GP_CONTAINER 0xB984 // 3GPP file format. Details: http://www.3gpp.org/ftp/Specs/html-info/26244.htm (page title - \u201cTransparent end-to-end packet switched streaming service, 3GPP file format\u201d).
+#define MTP_FORMAT_UNDEFINED_COLLECTION 0xBA00
+#define MTP_FORMAT_ABSTRACT_MULTIMEDIA_ALBUM 0xBA01
+#define MTP_FORMAT_ABSTRACT_IMAGE_ALBUM 0xBA02
+#define MTP_FORMAT_ABSTRACT_AUDIO_ALBUM 0xBA03
+#define MTP_FORMAT_ABSTRACT_VIDEO_ALBUM 0xBA04
+#define MTP_FORMAT_ABSTRACT_AV_PLAYLIST 0xBA05
+#define MTP_FORMAT_ABSTRACT_CONTACT_GROUP 0xBA06
+#define MTP_FORMAT_ABSTRACT_MESSAGE_FOLDER 0xBA07
+#define MTP_FORMAT_ABSTRACT_CHAPTERED_PRODUCTION 0xBA08
+#define MTP_FORMAT_ABSTRACT_AUDIO_PLAYLIST 0xBA09
+#define MTP_FORMAT_ABSTRACT_VIDEO_PLAYLIST 0xBA0A
+#define MTP_FORMAT_ABSTRACT_MEDIACAST 0xBA0B // For use with mediacasts; references multimedia enclosures of RSS feeds or episodic content
+#define MTP_FORMAT_WPL_PLAYLIST 0xBA10
+#define MTP_FORMAT_M3U_PLAYLIST 0xBA11
+#define MTP_FORMAT_MPL_PLAYLIST 0xBA12
+#define MTP_FORMAT_ASX_PLAYLIST 0xBA13
+#define MTP_FORMAT_PLS_PLAYLIST 0xBA14
+#define MTP_FORMAT_UNDEFINED_DOCUMENT 0xBA80
+#define MTP_FORMAT_ABSTRACT_DOCUMENT 0xBA81
+#define MTP_FORMAT_XML_DOCUMENT 0xBA82
+#define MTP_FORMAT_MS_WORD_DOCUMENT 0xBA83
+#define MTP_FORMAT_MHT_COMPILED_HTML_DOCUMENT 0xBA84
+#define MTP_FORMAT_MS_EXCEL_SPREADSHEET 0xBA85
+#define MTP_FORMAT_MS_POWERPOINT_PRESENTATION 0xBA86
+#define MTP_FORMAT_UNDEFINED_MESSAGE 0xBB00
+#define MTP_FORMAT_ABSTRACT_MESSSAGE 0xBB01
+#define MTP_FORMAT_UNDEFINED_CONTACT 0xBB80
+#define MTP_FORMAT_ABSTRACT_CONTACT 0xBB81
+#define MTP_FORMAT_VCARD_2 0xBB82
+
+// MTP Object Property Codes
+#define MTP_PROPERTY_STORAGE_ID 0xDC01
+#define MTP_PROPERTY_OBJECT_FORMAT 0xDC02
+#define MTP_PROPERTY_PROTECTION_STATUS 0xDC03
+#define MTP_PROPERTY_OBJECT_SIZE 0xDC04
+#define MTP_PROPERTY_ASSOCIATION_TYPE 0xDC05
+#define MTP_PROPERTY_ASSOCIATION_DESC 0xDC06
+#define MTP_PROPERTY_OBJECT_FILE_NAME 0xDC07
+#define MTP_PROPERTY_DATE_CREATED 0xDC08
+#define MTP_PROPERTY_DATE_MODIFIED 0xDC09
+#define MTP_PROPERTY_KEYWORDS 0xDC0A
+#define MTP_PROPERTY_PARENT_OBJECT 0xDC0B
+#define MTP_PROPERTY_ALLOWED_FOLDER_CONTENTS 0xDC0C
+#define MTP_PROPERTY_HIDDEN 0xDC0D
+#define MTP_PROPERTY_SYSTEM_OBJECT 0xDC0E
+#define MTP_PROPERTY_PERSISTENT_UID 0xDC41
+#define MTP_PROPERTY_SYNC_ID 0xDC42
+#define MTP_PROPERTY_PROPERTY_BAG 0xDC43
+#define MTP_PROPERTY_NAME 0xDC44
+#define MTP_PROPERTY_CREATED_BY 0xDC45
+#define MTP_PROPERTY_ARTIST 0xDC46
+#define MTP_PROPERTY_DATE_AUTHORED 0xDC47
+#define MTP_PROPERTY_DESCRIPTION 0xDC48
+#define MTP_PROPERTY_URL_REFERENCE 0xDC49
+#define MTP_PROPERTY_LANGUAGE_LOCALE 0xDC4A
+#define MTP_PROPERTY_COPYRIGHT_INFORMATION 0xDC4B
+#define MTP_PROPERTY_SOURCE 0xDC4C
+#define MTP_PROPERTY_ORIGIN_LOCATION 0xDC4D
+#define MTP_PROPERTY_DATE_ADDED 0xDC4E
+#define MTP_PROPERTY_NON_CONSUMABLE 0xDC4F
+#define MTP_PROPERTY_CORRUPT_UNPLAYABLE 0xDC50
+#define MTP_PROPERTY_PRODUCER_SERIAL_NUMBER 0xDC51
+#define MTP_PROPERTY_REPRESENTATIVE_SAMPLE_FORMAT 0xDC81
+#define MTP_PROPERTY_REPRESENTATIVE_SAMPLE_SIZE 0xDC82
+#define MTP_PROPERTY_REPRESENTATIVE_SAMPLE_HEIGHT 0xDC83
+#define MTP_PROPERTY_REPRESENTATIVE_SAMPLE_WIDTH 0xDC84
+#define MTP_PROPERTY_REPRESENTATIVE_SAMPLE_DURATION 0xDC85
+#define MTP_PROPERTY_REPRESENTATIVE_SAMPLE_DATA 0xDC86
+#define MTP_PROPERTY_WIDTH 0xDC87
+#define MTP_PROPERTY_HEIGHT 0xDC88
+#define MTP_PROPERTY_DURATION 0xDC89
+#define MTP_PROPERTY_RATING 0xDC8A
+#define MTP_PROPERTY_TRACK 0xDC8B
+#define MTP_PROPERTY_GENRE 0xDC8C
+#define MTP_PROPERTY_CREDITS 0xDC8D
+#define MTP_PROPERTY_LYRICS 0xDC8E
+#define MTP_PROPERTY_SUBSCRIPTION_CONTENT_ID 0xDC8F
+#define MTP_PROPERTY_PRODUCED_BY 0xDC90
+#define MTP_PROPERTY_USE_COUNT 0xDC91
+#define MTP_PROPERTY_SKIP_COUNT 0xDC92
+#define MTP_PROPERTY_LAST_ACCESSED 0xDC93
+#define MTP_PROPERTY_PARENTAL_RATING 0xDC94
+#define MTP_PROPERTY_META_GENRE 0xDC95
+#define MTP_PROPERTY_COMPOSER 0xDC96
+#define MTP_PROPERTY_EFFECTIVE_RATING 0xDC97
+#define MTP_PROPERTY_SUBTITLE 0xDC98
+#define MTP_PROPERTY_ORIGINAL_RELEASE_DATE 0xDC99
+#define MTP_PROPERTY_ALBUM_NAME 0xDC9A
+#define MTP_PROPERTY_ALBUM_ARTIST 0xDC9B
+#define MTP_PROPERTY_MOOD 0xDC9C
+#define MTP_PROPERTY_DRM_STATUS 0xDC9D
+#define MTP_PROPERTY_SUB_DESCRIPTION 0xDC9E
+#define MTP_PROPERTY_IS_CROPPED 0xDCD1
+#define MTP_PROPERTY_IS_COLOUR_CORRECTED 0xDCD2
+#define MTP_PROPERTY_IMAGE_BIT_DEPTH 0xDCD3
+#define MTP_PROPERTY_F_NUMBER 0xDCD4
+#define MTP_PROPERTY_EXPOSURE_TIME 0xDCD5
+#define MTP_PROPERTY_EXPOSURE_INDEX 0xDCD6
+#define MTP_PROPERTY_TOTAL_BITRATE 0xDE91
+#define MTP_PROPERTY_BITRATE_TYPE 0xDE92
+#define MTP_PROPERTY_SAMPLE_RATE 0xDE93
+#define MTP_PROPERTY_NUMBER_OF_CHANNELS 0xDE94
+#define MTP_PROPERTY_AUDIO_BIT_DEPTH 0xDE95
+#define MTP_PROPERTY_SCAN_TYPE 0xDE97
+#define MTP_PROPERTY_AUDIO_WAVE_CODEC 0xDE99
+#define MTP_PROPERTY_AUDIO_BITRATE 0xDE9A
+#define MTP_PROPERTY_VIDEO_FOURCC_CODEC 0xDE9B
+#define MTP_PROPERTY_VIDEO_BITRATE 0xDE9C
+#define MTP_PROPERTY_FRAMES_PER_THOUSAND_SECONDS 0xDE9D
+#define MTP_PROPERTY_KEYFRAME_DISTANCE 0xDE9E
+#define MTP_PROPERTY_BUFFER_SIZE 0xDE9F
+#define MTP_PROPERTY_ENCODING_QUALITY 0xDEA0
+#define MTP_PROPERTY_ENCODING_PROFILE 0xDEA1
+#define MTP_PROPERTY_DISPLAY_NAME 0xDCE0
+#define MTP_PROPERTY_BODY_TEXT 0xDCE1
+#define MTP_PROPERTY_SUBJECT 0xDCE2
+#define MTP_PROPERTY_PRIORITY 0xDCE3
+#define MTP_PROPERTY_GIVEN_NAME 0xDD00
+#define MTP_PROPERTY_MIDDLE_NAMES 0xDD01
+#define MTP_PROPERTY_FAMILY_NAME 0xDD02
+#define MTP_PROPERTY_PREFIX 0xDD03
+#define MTP_PROPERTY_SUFFIX 0xDD04
+#define MTP_PROPERTY_PHONETIC_GIVEN_NAME 0xDD05
+#define MTP_PROPERTY_PHONETIC_FAMILY_NAME 0xDD06
+#define MTP_PROPERTY_EMAIL_PRIMARY 0xDD07
+#define MTP_PROPERTY_EMAIL_PERSONAL_1 0xDD08
+#define MTP_PROPERTY_EMAIL_PERSONAL_2 0xDD09
+#define MTP_PROPERTY_EMAIL_BUSINESS_1 0xDD0A
+#define MTP_PROPERTY_EMAIL_BUSINESS_2 0xDD0B
+#define MTP_PROPERTY_EMAIL_OTHERS 0xDD0C
+#define MTP_PROPERTY_PHONE_NUMBER_PRIMARY 0xDD0D
+#define MTP_PROPERTY_PHONE_NUMBER_PERSONAL 0xDD0E
+#define MTP_PROPERTY_PHONE_NUMBER_PERSONAL_2 0xDD0F
+#define MTP_PROPERTY_PHONE_NUMBER_BUSINESS 0xDD10
+#define MTP_PROPERTY_PHONE_NUMBER_BUSINESS_2 0xDD11
+#define MTP_PROPERTY_PHONE_NUMBER_MOBILE 0xDD12
+#define MTP_PROPERTY_PHONE_NUMBER_MOBILE_2 0xDD13
+#define MTP_PROPERTY_FAX_NUMBER_PRIMARY 0xDD14
+#define MTP_PROPERTY_FAX_NUMBER_PERSONAL 0xDD15
+#define MTP_PROPERTY_FAX_NUMBER_BUSINESS 0xDD16
+#define MTP_PROPERTY_PAGER_NUMBER 0xDD17
+#define MTP_PROPERTY_PHONE_NUMBER_OTHERS 0xDD18
+#define MTP_PROPERTY_PRIMARY_WEB_ADDRESS 0xDD19
+#define MTP_PROPERTY_PERSONAL_WEB_ADDRESS 0xDD1A
+#define MTP_PROPERTY_BUSINESS_WEB_ADDRESS 0xDD1B
+#define MTP_PROPERTY_INSTANT_MESSANGER_ADDRESS 0xDD1C
+#define MTP_PROPERTY_INSTANT_MESSANGER_ADDRESS_2 0xDD1D
+#define MTP_PROPERTY_INSTANT_MESSANGER_ADDRESS_3 0xDD1E
+#define MTP_PROPERTY_POSTAL_ADDRESS_PERSONAL_FULL 0xDD1F
+#define MTP_PROPERTY_POSTAL_ADDRESS_PERSONAL_LINE_1 0xDD20
+#define MTP_PROPERTY_POSTAL_ADDRESS_PERSONAL_LINE_2 0xDD21
+#define MTP_PROPERTY_POSTAL_ADDRESS_PERSONAL_CITY 0xDD22
+#define MTP_PROPERTY_POSTAL_ADDRESS_PERSONAL_REGION 0xDD23
+#define MTP_PROPERTY_POSTAL_ADDRESS_PERSONAL_POSTAL_CODE 0xDD24
+#define MTP_PROPERTY_POSTAL_ADDRESS_PERSONAL_COUNTRY 0xDD25
+#define MTP_PROPERTY_POSTAL_ADDRESS_BUSINESS_FULL 0xDD26
+#define MTP_PROPERTY_POSTAL_ADDRESS_BUSINESS_LINE_1 0xDD27
+#define MTP_PROPERTY_POSTAL_ADDRESS_BUSINESS_LINE_2 0xDD28
+#define MTP_PROPERTY_POSTAL_ADDRESS_BUSINESS_CITY 0xDD29
+#define MTP_PROPERTY_POSTAL_ADDRESS_BUSINESS_REGION 0xDD2A
+#define MTP_PROPERTY_POSTAL_ADDRESS_BUSINESS_POSTAL_CODE 0xDD2B
+#define MTP_PROPERTY_POSTAL_ADDRESS_BUSINESS_COUNTRY 0xDD2C
+#define MTP_PROPERTY_POSTAL_ADDRESS_OTHER_FULL 0xDD2D
+#define MTP_PROPERTY_POSTAL_ADDRESS_OTHER_LINE_1 0xDD2E
+#define MTP_PROPERTY_POSTAL_ADDRESS_OTHER_LINE_2 0xDD2F
+#define MTP_PROPERTY_POSTAL_ADDRESS_OTHER_CITY 0xDD30
+#define MTP_PROPERTY_POSTAL_ADDRESS_OTHER_REGION 0xDD31
+#define MTP_PROPERTY_POSTAL_ADDRESS_OTHER_POSTAL_CODE 0xDD32
+#define MTP_PROPERTY_POSTAL_ADDRESS_OTHER_COUNTRY 0xDD33
+#define MTP_PROPERTY_ORGANIZATION_NAME 0xDD34
+#define MTP_PROPERTY_PHONETIC_ORGANIZATION_NAME 0xDD35
+#define MTP_PROPERTY_ROLE 0xDD36
+#define MTP_PROPERTY_BIRTHDATE 0xDD37
+#define MTP_PROPERTY_MESSAGE_TO 0xDD40
+#define MTP_PROPERTY_MESSAGE_CC 0xDD41
+#define MTP_PROPERTY_MESSAGE_BCC 0xDD42
+#define MTP_PROPERTY_MESSAGE_READ 0xDD43
+#define MTP_PROPERTY_MESSAGE_RECEIVED_TIME 0xDD44
+#define MTP_PROPERTY_MESSAGE_SENDER 0xDD45
+#define MTP_PROPERTY_ACTIVITY_BEGIN_TIME 0xDD50
+#define MTP_PROPERTY_ACTIVITY_END_TIME 0xDD51
+#define MTP_PROPERTY_ACTIVITY_LOCATION 0xDD52
+#define MTP_PROPERTY_ACTIVITY_REQUIRED_ATTENDEES 0xDD54
+#define MTP_PROPERTY_ACTIVITY_OPTIONAL_ATTENDEES 0xDD55
+#define MTP_PROPERTY_ACTIVITY_RESOURCES 0xDD56
+#define MTP_PROPERTY_ACTIVITY_ACCEPTED 0xDD57
+#define MTP_PROPERTY_ACTIVITY_TENTATIVE 0xDD58
+#define MTP_PROPERTY_ACTIVITY_DECLINED 0xDD59
+#define MTP_PROPERTY_ACTIVITY_REMAINDER_TIME 0xDD5A
+#define MTP_PROPERTY_ACTIVITY_OWNER 0xDD5B
+#define MTP_PROPERTY_ACTIVITY_STATUS 0xDD5C
+#define MTP_PROPERTY_OWNER 0xDD5D
+#define MTP_PROPERTY_EDITOR 0xDD5E
+#define MTP_PROPERTY_WEBMASTER 0xDD5F
+#define MTP_PROPERTY_URL_SOURCE 0xDD60
+#define MTP_PROPERTY_URL_DESTINATION 0xDD61
+#define MTP_PROPERTY_TIME_BOOKMARK 0xDD62
+#define MTP_PROPERTY_OBJECT_BOOKMARK 0xDD63
+#define MTP_PROPERTY_BYTE_BOOKMARK 0xDD64
+#define MTP_PROPERTY_LAST_BUILD_DATE 0xDD70
+#define MTP_PROPERTY_TIME_TO_LIVE 0xDD71
+#define MTP_PROPERTY_MEDIA_GUID 0xDD72
+
+// MTP Device Property Codes
+#define MTP_DEVICE_PROPERTY_UNDEFINED 0x5000
+#define MTP_DEVICE_PROPERTY_BATTERY_LEVEL 0x5001
+#define MTP_DEVICE_PROPERTY_FUNCTIONAL_MODE 0x5002
+#define MTP_DEVICE_PROPERTY_IMAGE_SIZE 0x5003
+#define MTP_DEVICE_PROPERTY_COMPRESSION_SETTING 0x5004
+#define MTP_DEVICE_PROPERTY_WHITE_BALANCE 0x5005
+#define MTP_DEVICE_PROPERTY_RGB_GAIN 0x5006
+#define MTP_DEVICE_PROPERTY_F_NUMBER 0x5007
+#define MTP_DEVICE_PROPERTY_FOCAL_LENGTH 0x5008
+#define MTP_DEVICE_PROPERTY_FOCUS_DISTANCE 0x5009
+#define MTP_DEVICE_PROPERTY_FOCUS_MODE 0x500A
+#define MTP_DEVICE_PROPERTY_EXPOSURE_METERING_MODE 0x500B
+#define MTP_DEVICE_PROPERTY_FLASH_MODE 0x500C
+#define MTP_DEVICE_PROPERTY_EXPOSURE_TIME 0x500D
+#define MTP_DEVICE_PROPERTY_EXPOSURE_PROGRAM_MODE 0x500E
+#define MTP_DEVICE_PROPERTY_EXPOSURE_INDEX 0x500F
+#define MTP_DEVICE_PROPERTY_EXPOSURE_BIAS_COMPENSATION 0x5010
+#define MTP_DEVICE_PROPERTY_DATETIME 0x5011
+#define MTP_DEVICE_PROPERTY_CAPTURE_DELAY 0x5012
+#define MTP_DEVICE_PROPERTY_STILL_CAPTURE_MODE 0x5013
+#define MTP_DEVICE_PROPERTY_CONTRAST 0x5014
+#define MTP_DEVICE_PROPERTY_SHARPNESS 0x5015
+#define MTP_DEVICE_PROPERTY_DIGITAL_ZOOM 0x5016
+#define MTP_DEVICE_PROPERTY_EFFECT_MODE 0x5017
+#define MTP_DEVICE_PROPERTY_BURST_NUMBER 0x5018
+#define MTP_DEVICE_PROPERTY_BURST_INTERVAL 0x5019
+#define MTP_DEVICE_PROPERTY_TIMELAPSE_NUMBER 0x501A
+#define MTP_DEVICE_PROPERTY_TIMELAPSE_INTERVAL 0x501B
+#define MTP_DEVICE_PROPERTY_FOCUS_METERING_MODE 0x501C
+#define MTP_DEVICE_PROPERTY_UPLOAD_URL 0x501D
+#define MTP_DEVICE_PROPERTY_ARTIST 0x501E
+#define MTP_DEVICE_PROPERTY_COPYRIGHT_INFO 0x501F
+#define MTP_DEVICE_PROPERTY_SYNCHRONIZATION_PARTNER 0xD401
+#define MTP_DEVICE_PROPERTY_DEVICE_FRIENDLY_NAME 0xD402
+#define MTP_DEVICE_PROPERTY_VOLUME 0xD403
+#define MTP_DEVICE_PROPERTY_SUPPORTED_FORMATS_ORDERED 0xD404
+#define MTP_DEVICE_PROPERTY_DEVICE_ICON 0xD405
+#define MTP_DEVICE_PROPERTY_PLAYBACK_RATE 0xD410
+#define MTP_DEVICE_PROPERTY_PLAYBACK_OBJECT 0xD411
+#define MTP_DEVICE_PROPERTY_PLAYBACK_CONTAINER_INDEX 0xD412
+#define MTP_DEVICE_PROPERTY_SESSION_INITIATOR_VERSION_INFO 0xD406
+#define MTP_DEVICE_PROPERTY_PERCEIVED_DEVICE_TYPE 0xD407
+
+// MTP Operation Codes
+#define MTP_OPERATION_GET_DEVICE_INFO 0x1001
+#define MTP_OPERATION_OPEN_SESSION 0x1002
+#define MTP_OPERATION_CLOSE_SESSION 0x1003
+#define MTP_OPERATION_GET_STORAGE_IDS 0x1004
+#define MTP_OPERATION_GET_STORAGE_INFO 0x1005
+#define MTP_OPERATION_GET_NUM_OBJECTS 0x1006
+#define MTP_OPERATION_GET_OBJECT_HANDLES 0x1007
+#define MTP_OPERATION_GET_OBJECT_INFO 0x1008
+#define MTP_OPERATION_GET_OBJECT 0x1009
+#define MTP_OPERATION_GET_THUMB 0x100A
+#define MTP_OPERATION_DELETE_OBJECT 0x100B
+#define MTP_OPERATION_SEND_OBJECT_INFO 0x100C
+#define MTP_OPERATION_SEND_OBJECT 0x100D
+#define MTP_OPERATION_INITIATE_CAPTURE 0x100E
+#define MTP_OPERATION_FORMAT_STORE 0x100F
+#define MTP_OPERATION_RESET_DEVICE 0x1010
+#define MTP_OPERATION_SELF_TEST 0x1011
+#define MTP_OPERATION_SET_OBJECT_PROTECTION 0x1012
+#define MTP_OPERATION_POWER_DOWN 0x1013
+#define MTP_OPERATION_GET_DEVICE_PROP_DESC 0x1014
+#define MTP_OPERATION_GET_DEVICE_PROP_VALUE 0x1015
+#define MTP_OPERATION_SET_DEVICE_PROP_VALUE 0x1016
+#define MTP_OPERATION_RESET_DEVICE_PROP_VALUE 0x1017
+#define MTP_OPERATION_TERMINATE_OPEN_CAPTURE 0x1018
+#define MTP_OPERATION_MOVE_OBJECT 0x1019
+#define MTP_OPERATION_COPY_OBJECT 0x101A
+#define MTP_OPERATION_GET_PARTIAL_OBJECT 0x101B
+#define MTP_OPERATION_INITIATE_OPEN_CAPTURE 0x101C
+#define MTP_OPERATION_GET_OBJECT_PROPS_SUPPORTED 0x9801
+#define MTP_OPERATION_GET_OBJECT_PROP_DESC 0x9802
+#define MTP_OPERATION_GET_OBJECT_PROP_VALUE 0x9803
+#define MTP_OPERATION_SET_OBJECT_PROP_VALUE 0x9804
+#define MTP_OPERATION_GET_OBJECT_PROP_LIST 0x9805
+#define MTP_OPERATION_SET_OBJECT_PROP_LIST 0x9806
+#define MTP_OPERATION_GET_INTERDEPENDENT_PROP_DESC 0x9807
+#define MTP_OPERATION_SEND_OBJECT_PROP_LIST 0x9808
+#define MTP_OPERATION_GET_OBJECT_REFERENCES 0x9810
+#define MTP_OPERATION_SET_OBJECT_REFERENCES 0x9811
+#define MTP_OPERATION_SKIP 0x9820
+
+// MTP Response Codes
+#define MTP_RESPONSE_UNDEFINED 0x2000
+#define MTP_RESPONSE_OK 0x2001
+#define MTP_RESPONSE_GENERAL_ERROR 0x2002
+#define MTP_RESPONSE_SESSION_NOT_OPEN 0x2003
+#define MTP_RESPONSE_INVALID_TRANSACTION_ID 0x2004
+#define MTP_RESPONSE_OPERATION_NOT_SUPPORTED 0x2005
+#define MTP_RESPONSE_PARAMETER_NOT_SUPPORTED 0x2006
+#define MTP_RESPONSE_INCOMPLETE_TRANSFER 0x2007
+#define MTP_RESPONSE_INVALID_STORAGE_ID 0x2008
+#define MTP_RESPONSE_INVALID_OBJECT_HANDLE 0x2009
+#define MTP_RESPONSE_DEVICE_PROP_NOT_SUPPORTED 0x200A
+#define MTP_RESPONSE_INVALID_OBJECT_FORMAT_CODE 0x200B
+#define MTP_RESPONSE_STORAGE_FULL 0x200C
+#define MTP_RESPONSE_OBJECT_WRITE_PROTECTED 0x200D
+#define MTP_RESPONSE_STORE_READ_ONLY 0x200E
+#define MTP_RESPONSE_ACCESS_DENIED 0x200F
+#define MTP_RESPONSE_NO_THUMBNAIL_PRESENT 0x2010
+#define MTP_RESPONSE_SELF_TEST_FAILED 0x2011
+#define MTP_RESPONSE_PARTIAL_DELETION 0x2012
+#define MTP_RESPONSE_STORE_NOT_AVAILABLE 0x2013
+#define MTP_RESPONSE_SPECIFICATION_BY_FORMAT_UNSUPPORTED 0x2014
+#define MTP_RESPONSE_NO_VALID_OBJECT_INFO 0x2015
+#define MTP_RESPONSE_INVALID_CODE_FORMAT 0x2016
+#define MTP_RESPONSE_UNKNOWN_VENDOR_CODE 0x2017
+#define MTP_RESPONSE_CAPTURE_ALREADY_TERMINATED 0x2018
+#define MTP_RESPONSE_DEVICE_BUSY 0x2019
+#define MTP_RESPONSE_INVALID_PARENT_OBJECT 0x201A
+#define MTP_RESPONSE_INVALID_DEVICE_PROP_FORMAT 0x201B
+#define MTP_RESPONSE_INVALID_DEVICE_PROP_VALUE 0x201C
+#define MTP_RESPONSE_INVALID_PARAMETER 0x201D
+#define MTP_RESPONSE_SESSION_ALREADY_OPEN 0x201E
+#define MTP_RESPONSE_TRANSACTION_CANCELLED 0x201F
+#define MTP_RESPONSE_SPECIFICATION_OF_DESTINATION_UNSUPPORTED 0x2020
+#define MTP_RESPONSE_INVALID_OBJECT_PROP_CODE 0xA801
+#define MTP_RESPONSE_INVALID_OBJECT_PROP_FORMAT 0xA802
+#define MTP_RESPONSE_INVALID_OBJECT_PROP_VALUE 0xA803
+#define MTP_RESPONSE_INVALID_OBJECT_REFERENCE 0xA804
+#define MTP_RESPONSE_GROUP_NOT_SUPPORTED 0xA805
+#define MTP_RESPONSE_INVALID_DATASET 0xA806
+#define MTP_RESPONSE_SPECIFICATION_BY_GROUP_UNSUPPORTED 0xA807
+#define MTP_RESPONSE_SPECIFICATION_BY_DEPTH_UNSUPPORTED 0xA808
+#define MTP_RESPONSE_OBJECT_TOO_LARGE 0xA809
+#define MTP_RESPONSE_OBJECT_PROP_NOT_SUPPORTED 0xA80A
+
+// MTP Event Codes
+#define MTP_EVENT_UNDEFINED 0x4000
+#define MTP_EVENT_CANCEL_TRANSACTION 0x4001
+#define MTP_EVENT_OBJECT_ADDED 0x4002
+#define MTP_EVENT_OBJECT_REMOVED 0x4003
+#define MTP_EVENT_STORE_ADDED 0x4004
+#define MTP_EVENT_STORE_REMOVED 0x4005
+#define MTP_EVENT_DEVICE_PROP_CHANGED 0x4006
+#define MTP_EVENT_OBJECT_INFO_CHANGED 0x4007
+#define MTP_EVENT_DEVICE_INFO_CHANGED 0x4008
+#define MTP_EVENT_REQUEST_OBJECT_TRANSFER 0x4009
+#define MTP_EVENT_STORE_FULL 0x400A
+#define MTP_EVENT_DEVICE_RESET 0x400B
+#define MTP_EVENT_STORAGE_INFO_CHANGED 0x400C
+#define MTP_EVENT_CAPTURE_COMPLETE 0x400D
+#define MTP_EVENT_UNREPORTED_STATUS 0x400E
+#define MTP_EVENT_OBJECT_PROP_CHANGED 0xC801
+#define MTP_EVENT_OBJECT_PROP_DESC_CHANGED 0xC802
+#define MTP_EVENT_OBJECT_REFERENCES_CHANGED 0xC803
+
+// Storage Type
+#define MTP_STORAGE_FIXED_ROM 0x0001
+#define MTP_STORAGE_REMOVABLE_ROM 0x0002
+#define MTP_STORAGE_FIXED_RAM 0x0003
+#define MTP_STORAGE_REMOVABLE_RAM 0x0004
+
+// Storage File System
+#define MTP_STORAGE_FILESYSTEM_FLAT 0x0001
+#define MTP_STORAGE_FILESYSTEM_HIERARCHICAL 0x0002
+#define MTP_STORAGE_FILESYSTEM_DCF 0x0003
+
+// Storage Access Capability
+#define MTP_STORAGE_READ_WRITE 0x0000
+#define MTP_STORAGE_READ_ONLY_WITHOUT_DELETE 0x0001
+#define MTP_STORAGE_READ_ONLY_WITH_DELETE 0x0002
+
+// Association Type
+#define MTP_ASSOCIATION_TYPE_UNDEFINED 0x0000
+#define MTP_ASSOCIATION_TYPE_GENERIC_FOLDER 0x0001
+
+#endif // _MTP_H
diff --git a/services/audioflinger/A2dpAudioInterface.cpp b/services/audioflinger/A2dpAudioInterface.cpp
index 995e31c..aee01ab 100644
--- a/services/audioflinger/A2dpAudioInterface.cpp
+++ b/services/audioflinger/A2dpAudioInterface.cpp
@@ -23,10 +23,13 @@
#include "A2dpAudioInterface.h"
#include "audio/liba2dp.h"
-
+#include <hardware_legacy/power.h>
namespace android {
+static const char *sA2dpWakeLock = "A2dpOutputStream";
+#define MAX_WRITE_RETRIES 5
+
// ----------------------------------------------------------------------------
//AudioHardwareInterface* A2dpAudioInterface::createA2dpInterface()
@@ -263,44 +266,55 @@
A2dpAudioInterface::A2dpAudioStreamOut::~A2dpAudioStreamOut()
{
LOGV("A2dpAudioStreamOut destructor");
- standby();
close();
LOGV("A2dpAudioStreamOut destructor returning from close()");
}
ssize_t A2dpAudioInterface::A2dpAudioStreamOut::write(const void* buffer, size_t bytes)
{
- Mutex::Autolock lock(mLock);
-
- size_t remaining = bytes;
status_t status = -1;
+ {
+ Mutex::Autolock lock(mLock);
- if (!mBluetoothEnabled || mClosing || mSuspended) {
- LOGV("A2dpAudioStreamOut::write(), but bluetooth disabled \
- mBluetoothEnabled %d, mClosing %d, mSuspended %d",
- mBluetoothEnabled, mClosing, mSuspended);
- goto Error;
- }
+ size_t remaining = bytes;
- status = init();
- if (status < 0)
- goto Error;
-
- while (remaining > 0) {
- status = a2dp_write(mData, buffer, remaining);
- if (status <= 0) {
- LOGE("a2dp_write failed err: %d\n", status);
+ if (!mBluetoothEnabled || mClosing || mSuspended) {
+ LOGV("A2dpAudioStreamOut::write(), but bluetooth disabled \
+ mBluetoothEnabled %d, mClosing %d, mSuspended %d",
+ mBluetoothEnabled, mClosing, mSuspended);
goto Error;
}
- remaining -= status;
- buffer = ((char *)buffer) + status;
+
+ if (mStandby) {
+ acquire_wake_lock (PARTIAL_WAKE_LOCK, sA2dpWakeLock);
+ mStandby = false;
+ }
+
+ status = init();
+ if (status < 0)
+ goto Error;
+
+ int retries = MAX_WRITE_RETRIES;
+ while (remaining > 0 && retries) {
+ status = a2dp_write(mData, buffer, remaining);
+ if (status < 0) {
+ LOGE("a2dp_write failed err: %d\n", status);
+ goto Error;
+ }
+ if (status == 0) {
+ retries--;
+ }
+ remaining -= status;
+ buffer = (char *)buffer + status;
+ }
+
+ return bytes;
+
}
-
- mStandby = false;
-
- return bytes;
-
Error:
+
+ standby();
+
// Simulate audio output timing in case of error
usleep(((bytes * 1000 )/ frameSize() / sampleRate()) * 1000);
@@ -324,19 +338,22 @@
status_t A2dpAudioInterface::A2dpAudioStreamOut::standby()
{
- int result = 0;
-
- if (mClosing) {
- LOGV("Ignore standby, closing");
- return result;
- }
-
Mutex::Autolock lock(mLock);
+ return standby_l();
+}
+
+status_t A2dpAudioInterface::A2dpAudioStreamOut::standby_l()
+{
+ int result = NO_ERROR;
if (!mStandby) {
- result = a2dp_stop(mData);
- if (result == 0)
- mStandby = true;
+ LOGV_IF(mClosing || !mBluetoothEnabled, "Standby skip stop: closing %d enabled %d",
+ mClosing, mBluetoothEnabled);
+ if (!mClosing && mBluetoothEnabled) {
+ result = a2dp_stop(mData);
+ }
+ release_wake_lock(sA2dpWakeLock);
+ mStandby = true;
}
return result;
@@ -362,6 +379,9 @@
key = String8("closing");
if (param.get(key, value) == NO_ERROR) {
mClosing = (value == "true");
+ if (mClosing) {
+ standby();
+ }
param.remove(key);
}
key = AudioParameter::keyRouting;
@@ -444,6 +464,7 @@
status_t A2dpAudioInterface::A2dpAudioStreamOut::close_l()
{
+ standby_l();
if (mData) {
LOGV("A2dpAudioStreamOut::close_l() calling a2dp_cleanup(mData)");
a2dp_cleanup(mData);
diff --git a/services/audioflinger/A2dpAudioInterface.h b/services/audioflinger/A2dpAudioInterface.h
index 48154f9..cef1926 100644
--- a/services/audioflinger/A2dpAudioInterface.h
+++ b/services/audioflinger/A2dpAudioInterface.h
@@ -103,6 +103,7 @@
status_t setAddress(const char* address);
status_t setBluetoothEnabled(bool enabled);
status_t setSuspended(bool onOff);
+ status_t standby_l();
private:
int mFd;
diff --git a/services/audioflinger/AudioFlinger.cpp b/services/audioflinger/AudioFlinger.cpp
index cd9b07e..51b5947 100644
--- a/services/audioflinger/AudioFlinger.cpp
+++ b/services/audioflinger/AudioFlinger.cpp
@@ -343,7 +343,7 @@
lSessionId = *sessionId;
} else {
// if no audio session id is provided, create one here
- lSessionId = nextUniqueId();
+ lSessionId = nextUniqueId_l();
if (sessionId != NULL) {
*sessionId = lSessionId;
}
@@ -3699,7 +3699,7 @@
if (sessionId != NULL && *sessionId != AudioSystem::SESSION_OUTPUT_MIX) {
lSessionId = *sessionId;
} else {
- lSessionId = nextUniqueId();
+ lSessionId = nextUniqueId_l();
if (sessionId != NULL) {
*sessionId = lSessionId;
}
@@ -4300,7 +4300,7 @@
mHardwareStatus = AUDIO_HW_IDLE;
if (output != 0) {
- int id = nextUniqueId();
+ int id = nextUniqueId_l();
if ((flags & AudioSystem::OUTPUT_FLAG_DIRECT) ||
(format != AudioSystem::PCM_16_BIT) ||
(channels != AudioSystem::CHANNEL_OUT_STEREO)) {
@@ -4348,7 +4348,7 @@
return 0;
}
- int id = nextUniqueId();
+ int id = nextUniqueId_l();
DuplicatingThread *thread = new DuplicatingThread(this, thread1, id);
thread->addOutputTrack(thread2);
mPlaybackThreads.add(id, thread);
@@ -4473,7 +4473,7 @@
}
if (input != 0) {
- int id = nextUniqueId();
+ int id = nextUniqueId_l();
// Start record thread
thread = new RecordThread(this, input, reqSamplingRate, reqChannels, id);
mRecordThreads.add(id, thread);
@@ -4543,7 +4543,8 @@
int AudioFlinger::newAudioSessionId()
{
- return nextUniqueId();
+ AutoMutex _l(mLock);
+ return nextUniqueId_l();
}
// checkPlaybackThread_l() must be called with AudioFlinger::mLock held
@@ -4578,9 +4579,10 @@
return thread;
}
-int AudioFlinger::nextUniqueId()
+// nextUniqueId_l() must be called with AudioFlinger::mLock held
+int AudioFlinger::nextUniqueId_l()
{
- return android_atomic_inc(&mNextUniqueId);
+ return mNextUniqueId++;
}
// ----------------------------------------------------------------------------
@@ -4967,7 +4969,7 @@
LOGV("createEffect_l() got effect %p on chain %p", effect == 0 ? 0 : effect.get(), chain.get());
if (effect == 0) {
- int id = mAudioFlinger->nextUniqueId();
+ int id = mAudioFlinger->nextUniqueId_l();
// Check CPU and memory usage
lStatus = AudioSystem::registerEffect(desc, mId, chain->strategy(), sessionId, id);
if (lStatus != NO_ERROR) {
@@ -5808,7 +5810,8 @@
const uint32_t AudioFlinger::EffectModule::sModeConvTable[] = {
AUDIO_MODE_NORMAL, // AudioSystem::MODE_NORMAL
AUDIO_MODE_RINGTONE, // AudioSystem::MODE_RINGTONE
- AUDIO_MODE_IN_CALL // AudioSystem::MODE_IN_CALL
+ AUDIO_MODE_IN_CALL, // AudioSystem::MODE_IN_CALL
+ AUDIO_MODE_IN_CALL // AudioSystem::MODE_IN_COMMUNICATION, same conversion as for MODE_IN_CALL
};
int AudioFlinger::EffectModule::modeAudioSystemToEffectApi(uint32_t mode)
diff --git a/services/audioflinger/AudioFlinger.h b/services/audioflinger/AudioFlinger.h
index 5917632..f0ef867 100644
--- a/services/audioflinger/AudioFlinger.h
+++ b/services/audioflinger/AudioFlinger.h
@@ -785,7 +785,7 @@
float streamVolumeInternal(int stream) const { return mStreamTypes[stream].volume; }
void audioConfigChanged_l(int event, int ioHandle, void *param2);
- int nextUniqueId();
+ int nextUniqueId_l();
status_t moveEffectChain_l(int session,
AudioFlinger::PlaybackThread *srcThread,
AudioFlinger::PlaybackThread *dstThread,
diff --git a/services/audioflinger/AudioPolicyManagerBase.cpp b/services/audioflinger/AudioPolicyManagerBase.cpp
index ee9297f..175f613 100644
--- a/services/audioflinger/AudioPolicyManagerBase.cpp
+++ b/services/audioflinger/AudioPolicyManagerBase.cpp
@@ -235,7 +235,7 @@
// if leaving call state, handle special case of active streams
// pertaining to sonification strategy see handleIncallSonification()
- if (mPhoneState == AudioSystem::MODE_IN_CALL) {
+ if (isInCall()) {
LOGV("setPhoneState() in call state management: new state is %d", state);
for (int stream = 0; stream < AudioSystem::NUM_STREAM_TYPES; stream++) {
handleIncallSonification(stream, false, true);
@@ -248,16 +248,21 @@
bool force = false;
// are we entering or starting a call
- if ((oldState != AudioSystem::MODE_IN_CALL) && (state == AudioSystem::MODE_IN_CALL)) {
+ if (!isStateInCall(oldState) && isStateInCall(state)) {
LOGV(" Entering call in setPhoneState()");
// force routing command to audio hardware when starting a call
// even if no device change is needed
force = true;
- } else if ((oldState == AudioSystem::MODE_IN_CALL) && (state != AudioSystem::MODE_IN_CALL)) {
+ } else if (isStateInCall(oldState) && !isStateInCall(state)) {
LOGV(" Exiting call in setPhoneState()");
// force routing command to audio hardware when exiting a call
// even if no device change is needed
force = true;
+ } else if (isStateInCall(state) && (state != oldState)) {
+ LOGV(" Switching between telephony and VoIP in setPhoneState()");
+ // force routing command to audio hardware when switching between telephony and VoIP
+ // even if no device change is needed
+ force = true;
}
// check for device and output changes triggered by new phone state
@@ -272,7 +277,7 @@
// force routing command to audio hardware when ending call
// even if no device change is needed
- if (oldState == AudioSystem::MODE_IN_CALL && newDevice == 0) {
+ if (isStateInCall(oldState) && newDevice == 0) {
newDevice = hwOutputDesc->device();
}
@@ -280,7 +285,7 @@
// immediately and delay the route change to avoid sending the ring tone
// tail into the earpiece or headset.
int delayMs = 0;
- if (state == AudioSystem::MODE_IN_CALL && oldState == AudioSystem::MODE_RINGTONE) {
+ if (isStateInCall(state) && oldState == AudioSystem::MODE_RINGTONE) {
// delay the device change command by twice the output latency to have some margin
// and be sure that audio buffers not yet affected by the mute are out when
// we actually apply the route change
@@ -293,7 +298,7 @@
// if entering in call state, handle special case of active streams
// pertaining to sonification strategy see handleIncallSonification()
- if (state == AudioSystem::MODE_IN_CALL) {
+ if (isStateInCall(state)) {
LOGV("setPhoneState() in call state management: new state is %d", state);
// unmute the ringing tone after a sufficient delay if it was muted before
// setting output device above
@@ -338,7 +343,9 @@
break;
case AudioSystem::FOR_MEDIA:
if (config != AudioSystem::FORCE_HEADPHONES && config != AudioSystem::FORCE_BT_A2DP &&
- config != AudioSystem::FORCE_WIRED_ACCESSORY && config != AudioSystem::FORCE_NONE) {
+ config != AudioSystem::FORCE_WIRED_ACCESSORY &&
+ config != AudioSystem::FORCE_ANALOG_DOCK &&
+ config != AudioSystem::FORCE_DIGITAL_DOCK && config != AudioSystem::FORCE_NONE) {
LOGW("setForceUse() invalid config %d for FOR_MEDIA", config);
return;
}
@@ -354,7 +361,10 @@
break;
case AudioSystem::FOR_DOCK:
if (config != AudioSystem::FORCE_NONE && config != AudioSystem::FORCE_BT_CAR_DOCK &&
- config != AudioSystem::FORCE_BT_DESK_DOCK && config != AudioSystem::FORCE_WIRED_ACCESSORY) {
+ config != AudioSystem::FORCE_BT_DESK_DOCK &&
+ config != AudioSystem::FORCE_WIRED_ACCESSORY &&
+ config != AudioSystem::FORCE_ANALOG_DOCK &&
+ config != AudioSystem::FORCE_DIGITAL_DOCK) {
LOGW("setForceUse() invalid config %d for FOR_DOCK", config);
}
forceVolumeReeval = true;
@@ -564,7 +574,7 @@
setOutputDevice(output, getNewDevice(output));
// handle special case for sonification while in call
- if (mPhoneState == AudioSystem::MODE_IN_CALL) {
+ if (isInCall()) {
handleIncallSonification(stream, true, false);
}
@@ -589,7 +599,7 @@
routing_strategy strategy = getStrategy((AudioSystem::stream_type)stream);
// handle special case for sonification while in call
- if (mPhoneState == AudioSystem::MODE_IN_CALL) {
+ if (isInCall()) {
handleIncallSonification(stream, false, false);
}
@@ -738,10 +748,8 @@
AudioParameter param = AudioParameter();
param.addInt(String8(AudioParameter::keyRouting), (int)inputDesc->mDevice);
- // use Voice Recognition mode or not for this input based on input source
- int vr_enabled = inputDesc->mInputSource == AUDIO_SOURCE_VOICE_RECOGNITION ? 1 : 0;
- param.addInt(String8("vr_mode"), vr_enabled);
- LOGV("AudioPolicyManager::startInput(%d), setting vr_mode to %d", inputDesc->mInputSource, vr_enabled);
+ param.addInt(String8(AudioParameter::keyInputSource), (int)inputDesc->mInputSource);
+ LOGV("AudioPolicyManager::startInput() input source = %d", inputDesc->mInputSource);
mpClientInterface->setParameters(input, param.toString());
@@ -1000,8 +1008,9 @@
#ifdef AUDIO_POLICY_TEST
Thread(false),
#endif //AUDIO_POLICY_TEST
- mPhoneState(AudioSystem::MODE_NORMAL), mRingerMode(0), mMusicStopTime(0),
- mLimitRingtoneVolume(false), mTotalEffectsCpuLoad(0), mTotalEffectsMemory(0),
+ mPhoneState(AudioSystem::MODE_NORMAL), mRingerMode(0),
+ mMusicStopTime(0), mLimitRingtoneVolume(false), mLastVoiceVolume(-1.0f),
+ mTotalEffectsCpuLoad(0), mTotalEffectsMemory(0),
mA2dpSuspended(false)
{
mpClientInterface = clientInterface;
@@ -1343,6 +1352,7 @@
void AudioPolicyManagerBase::closeA2dpOutputs()
{
+
LOGV("setDeviceConnectionState() closing A2DP and duplicated output!");
if (mDuplicatedOutput != 0) {
@@ -1493,7 +1503,7 @@
// use device for strategy media
// 4: the strategy DTMF is active on the hardware output:
// use device for strategy DTMF
- if (mPhoneState == AudioSystem::MODE_IN_CALL ||
+ if (isInCall() ||
outputDesc->isUsedByStrategy(STRATEGY_PHONE)) {
device = getDeviceForStrategy(STRATEGY_PHONE, fromCache);
} else if (outputDesc->isUsedByStrategy(STRATEGY_SONIFICATION)) {
@@ -1548,7 +1558,7 @@
switch (strategy) {
case STRATEGY_DTMF:
- if (mPhoneState != AudioSystem::MODE_IN_CALL) {
+ if (!isInCall()) {
// when off call, DTMF strategy follows the same rules as MEDIA strategy
device = getDeviceForStrategy(STRATEGY_MEDIA, false);
break;
@@ -1561,7 +1571,7 @@
// of priority
switch (mForceUse[AudioSystem::FOR_COMMUNICATION]) {
case AudioSystem::FORCE_BT_SCO:
- if (mPhoneState != AudioSystem::MODE_IN_CALL || strategy != STRATEGY_DTMF) {
+ if (!isInCall() || strategy != STRATEGY_DTMF) {
device = mAvailableOutputDevices & AudioSystem::DEVICE_OUT_BLUETOOTH_SCO_CARKIT;
if (device) break;
}
@@ -1577,9 +1587,11 @@
if (device) break;
device = mAvailableOutputDevices & AudioSystem::DEVICE_OUT_WIRED_HEADSET;
if (device) break;
+ device = mAvailableOutputDevices & AudioSystem::DEVICE_OUT_ANLG_DOCK_HEADSET;
+ if (device) break;
#ifdef WITH_A2DP
// when not in a phone call, phone strategy should route STREAM_VOICE_CALL to A2DP
- if (mPhoneState != AudioSystem::MODE_IN_CALL) {
+ if (!isInCall()) {
device = mAvailableOutputDevices & AudioSystem::DEVICE_OUT_BLUETOOTH_A2DP;
if (device) break;
device = mAvailableOutputDevices & AudioSystem::DEVICE_OUT_BLUETOOTH_A2DP_HEADPHONES;
@@ -1593,14 +1605,14 @@
break;
case AudioSystem::FORCE_SPEAKER:
- if (mPhoneState != AudioSystem::MODE_IN_CALL || strategy != STRATEGY_DTMF) {
+ if (!isInCall() || strategy != STRATEGY_DTMF) {
device = mAvailableOutputDevices & AudioSystem::DEVICE_OUT_BLUETOOTH_SCO_CARKIT;
if (device) break;
}
#ifdef WITH_A2DP
// when not in a phone call, phone strategy should route STREAM_VOICE_CALL to
// A2DP speaker when forcing to speaker output
- if (mPhoneState != AudioSystem::MODE_IN_CALL) {
+ if (!isInCall()) {
device = mAvailableOutputDevices & AudioSystem::DEVICE_OUT_BLUETOOTH_A2DP_SPEAKER;
if (device) break;
}
@@ -1617,7 +1629,7 @@
// If incall, just select the STRATEGY_PHONE device: The rest of the behavior is handled by
// handleIncallSonification().
- if (mPhoneState == AudioSystem::MODE_IN_CALL) {
+ if (isInCall()) {
device = getDeviceForStrategy(STRATEGY_PHONE, false);
break;
}
@@ -1636,6 +1648,12 @@
if (device2 == 0) {
device2 = mAvailableOutputDevices & AudioSystem::DEVICE_OUT_WIRED_HEADSET;
}
+ if (device2 == 0) {
+ device2 = mAvailableOutputDevices & AudioSystem::DEVICE_OUT_ANLG_DOCK_HEADSET;
+ }
+ if (device2 == 0) {
+ device2 = mAvailableOutputDevices & AudioSystem::DEVICE_OUT_DGTL_DOCK_HEADSET;
+ }
#ifdef WITH_A2DP
if (mA2dpOutput != 0) {
if (strategy == STRATEGY_SONIFICATION && !a2dpUsedForSonification()) {
@@ -1738,6 +1756,7 @@
case AUDIO_SOURCE_DEFAULT:
case AUDIO_SOURCE_MIC:
case AUDIO_SOURCE_VOICE_RECOGNITION:
+ case AUDIO_SOURCE_VOICE_COMMUNICATION:
if (mForceUse[AudioSystem::FOR_RECORD] == AudioSystem::FORCE_BT_SCO &&
mAvailableInputDevices & AudioSystem::DEVICE_IN_BLUETOOTH_SCO_HEADSET) {
device = AudioSystem::DEVICE_IN_BLUETOOTH_SCO_HEADSET;
@@ -1800,7 +1819,9 @@
(AudioSystem::DEVICE_OUT_BLUETOOTH_A2DP |
AudioSystem::DEVICE_OUT_BLUETOOTH_A2DP_HEADPHONES |
AudioSystem::DEVICE_OUT_WIRED_HEADSET |
- AudioSystem::DEVICE_OUT_WIRED_HEADPHONE)) &&
+ AudioSystem::DEVICE_OUT_WIRED_HEADPHONE |
+ AudioSystem::DEVICE_OUT_ANLG_DOCK_HEADSET |
+ AudioSystem::DEVICE_OUT_DGTL_DOCK_HEADSET)) &&
(getStrategy((AudioSystem::stream_type)stream) == STRATEGY_SONIFICATION) &&
streamDesc.mCanBeMuted) {
volume *= SONIFICATION_HEADSET_VOLUME_FACTOR;
@@ -1838,29 +1859,38 @@
}
float volume = computeVolume(stream, index, output, device);
- // do not set volume if the float value did not change
- if (volume != mOutputs.valueFor(output)->mCurVolume[stream] || force) {
+ // We actually change the volume if:
+ // - the float value returned by computeVolume() changed
+ // - the force flag is set
+ if (volume != mOutputs.valueFor(output)->mCurVolume[stream] ||
+ force) {
mOutputs.valueFor(output)->mCurVolume[stream] = volume;
LOGV("setStreamVolume() for output %d stream %d, volume %f, delay %d", output, stream, volume, delayMs);
if (stream == AudioSystem::VOICE_CALL ||
stream == AudioSystem::DTMF ||
stream == AudioSystem::BLUETOOTH_SCO) {
- float voiceVolume = -1.0;
// offset value to reflect actual hardware volume that never reaches 0
// 1% corresponds roughly to first step in VOICE_CALL stream volume setting (see AudioService.java)
volume = 0.01 + 0.99 * volume;
- if (stream == AudioSystem::VOICE_CALL) {
- voiceVolume = (float)index/(float)mStreams[stream].mIndexMax;
- } else if (stream == AudioSystem::BLUETOOTH_SCO) {
- voiceVolume = 1.0;
- }
- if (voiceVolume >= 0 && output == mHardwareOutput) {
- mpClientInterface->setVoiceVolume(voiceVolume, delayMs);
- }
}
mpClientInterface->setStreamVolume((AudioSystem::stream_type)stream, volume, output, delayMs);
}
+ if (stream == AudioSystem::VOICE_CALL ||
+ stream == AudioSystem::BLUETOOTH_SCO) {
+ float voiceVolume;
+ // Force voice volume to max for bluetooth SCO as volume is managed by the headset
+ if (stream == AudioSystem::VOICE_CALL) {
+ voiceVolume = (float)index/(float)mStreams[stream].mIndexMax;
+ } else {
+ voiceVolume = 1.0;
+ }
+ if (voiceVolume != mLastVoiceVolume && output == mHardwareOutput) {
+ mpClientInterface->setVoiceVolume(voiceVolume, delayMs);
+ mLastVoiceVolume = voiceVolume;
+ }
+ }
+
return NO_ERROR;
}
@@ -1950,6 +1980,16 @@
}
}
+bool AudioPolicyManagerBase::isInCall()
+{
+ return isStateInCall(mPhoneState);
+}
+
+bool AudioPolicyManagerBase::isStateInCall(int state) {
+ return ((state == AudioSystem::MODE_IN_CALL) ||
+ (state == AudioSystem::MODE_IN_COMMUNICATION));
+}
+
bool AudioPolicyManagerBase::needsDirectOuput(AudioSystem::stream_type stream,
uint32_t samplingRate,
uint32_t format,
diff --git a/services/camera/libcameraservice/CameraHardwareStub.cpp b/services/camera/libcameraservice/CameraHardwareStub.cpp
index b3e0ee6..07b5a37 100644
--- a/services/camera/libcameraservice/CameraHardwareStub.cpp
+++ b/services/camera/libcameraservice/CameraHardwareStub.cpp
@@ -101,9 +101,9 @@
mFakeCamera = 0; // paranoia
}
-sp<IMemoryHeap> CameraHardwareStub::getPreviewHeap() const
+status_t CameraHardwareStub::setPreviewWindow(const sp<ANativeWindow>& buf)
{
- return mPreviewHeap;
+ return NO_ERROR;
}
sp<IMemoryHeap> CameraHardwareStub::getRawHeap() const
diff --git a/services/camera/libcameraservice/CameraHardwareStub.h b/services/camera/libcameraservice/CameraHardwareStub.h
index d3427ba..9b66a76 100644
--- a/services/camera/libcameraservice/CameraHardwareStub.h
+++ b/services/camera/libcameraservice/CameraHardwareStub.h
@@ -29,7 +29,7 @@
class CameraHardwareStub : public CameraHardwareInterface {
public:
- virtual sp<IMemoryHeap> getPreviewHeap() const;
+ virtual status_t setPreviewWindow(const sp<ANativeWindow>& buf);
virtual sp<IMemoryHeap> getRawHeap() const;
virtual void setCallbacks(notify_callback notify_cb,
diff --git a/services/camera/libcameraservice/CameraService.cpp b/services/camera/libcameraservice/CameraService.cpp
index a64ddcf..535f07f 100644
--- a/services/camera/libcameraservice/CameraService.cpp
+++ b/services/camera/libcameraservice/CameraService.cpp
@@ -26,6 +26,7 @@
#include <binder/MemoryBase.h>
#include <binder/MemoryHeapBase.h>
#include <cutils/atomic.h>
+#include <cutils/properties.h>
#include <hardware/hardware.h>
#include <media/AudioSystem.h>
#include <media/mediaplayer.h>
@@ -307,7 +308,6 @@
mClientPid = clientPid;
mUseOverlay = mHardware->useOverlay();
mMsgEnabled = 0;
-
mHardware->setCallbacks(notifyCallback,
dataCallback,
dataCallbackTimestamp,
@@ -324,6 +324,7 @@
mPreviewCallbackFlag = FRAME_CALLBACK_FLAG_NOOP;
mOrientation = getOrientation(0, mCameraFacing == CAMERA_FACING_FRONT);
mOrientationChanged = false;
+ mPlayShutterSound = true;
cameraService->setCameraBusy(cameraId);
cameraService->loadSound();
LOG1("Client::Client X (pid %d)", callingPid);
@@ -341,18 +342,6 @@
int callingPid = getCallingPid();
LOG1("Client::~Client E (pid %d, this %p)", callingPid, this);
- if (mSurface != 0 && !mUseOverlay) {
- pthread_t thr;
- // We unregister the buffers in a different thread because binder does
- // not let us make sychronous transactions in a binder destructor (that
- // is, upon our reaching a refcount of zero.)
- pthread_create(&thr,
- NULL, // attr
- unregister_surface,
- mSurface.get());
- pthread_join(thr, NULL);
- }
-
// set mClientPid to let disconnet() tear down the hardware
mClientPid = callingPid;
disconnect();
@@ -470,6 +459,11 @@
if (mUseOverlay) {
mOverlayRef = 0;
}
+ // Release the held ANativeWindow resources.
+ if (mPreviewWindow != 0) {
+ mPreviewWindow = 0;
+ mHardware->setPreviewWindow(mPreviewWindow);
+ }
mHardware.clear();
mCameraService->removeClient(mCameraClient);
@@ -480,8 +474,8 @@
// ----------------------------------------------------------------------------
-// set the ISurface that the preview will use
-status_t CameraService::Client::setPreviewDisplay(const sp<ISurface>& surface) {
+// set the Surface that the preview will use
+status_t CameraService::Client::setPreviewDisplay(const sp<Surface>& surface) {
LOG1("setPreviewDisplay(%p) (pid %d)", surface.get(), getCallingPid());
Mutex::Autolock lock(mLock);
status_t result = checkPidAndHardware();
@@ -491,7 +485,7 @@
// return if no change in surface.
// asBinder() is safe on NULL (returns NULL)
- if (surface->asBinder() == mSurface->asBinder()) {
+ if (getISurface(surface)->asBinder() == mSurface->asBinder()) {
return result;
}
@@ -502,44 +496,30 @@
sp<Overlay> dummy;
mHardware->setOverlay(dummy);
mOverlayRef = 0;
- } else {
- mSurface->unregisterBuffers();
}
}
- mSurface = surface;
+ if (surface != 0) {
+ mSurface = getISurface(surface);
+ } else {
+ mSurface = 0;
+ }
+ mPreviewWindow = surface;
mOverlayRef = 0;
// If preview has been already started, set overlay or register preview
// buffers now.
if (mHardware->previewEnabled()) {
if (mUseOverlay) {
result = setOverlay();
- } else if (mSurface != 0) {
- result = registerPreviewBuffers();
+ } else if (mPreviewWindow != 0) {
+ native_window_set_buffers_transform(mPreviewWindow.get(),
+ mOrientation);
+ result = mHardware->setPreviewWindow(mPreviewWindow);
}
}
return result;
}
-status_t CameraService::Client::registerPreviewBuffers() {
- int w, h;
- CameraParameters params(mHardware->getParameters());
- params.getPreviewSize(&w, &h);
-
- // FIXME: don't use a hardcoded format here.
- ISurface::BufferHeap buffers(w, h, w, h,
- HAL_PIXEL_FORMAT_YCrCb_420_SP,
- mOrientation,
- 0,
- mHardware->getPreviewHeap());
-
- status_t result = mSurface->registerBuffers(buffers);
- if (result != NO_ERROR) {
- LOGE("registerBuffers failed with status %d", result);
- }
- return result;
-}
-
status_t CameraService::Client::setOverlay() {
int w, h;
CameraParameters params(mHardware->getParameters());
@@ -597,16 +577,10 @@
if (checkPidAndHardware() != NO_ERROR) return;
mPreviewCallbackFlag = callback_flag;
-
- // If we don't use overlay, we always need the preview frame for display.
- // If we do use overlay, we only need the preview frame if the user
- // wants the data.
- if (mUseOverlay) {
- if(mPreviewCallbackFlag & FRAME_CALLBACK_FLAG_ENABLE_MASK) {
- enableMsgType(CAMERA_MSG_PREVIEW_FRAME);
- } else {
- disableMsgType(CAMERA_MSG_PREVIEW_FRAME);
- }
+ if (mPreviewCallbackFlag & FRAME_CALLBACK_FLAG_ENABLE_MASK) {
+ enableMsgType(CAMERA_MSG_PREVIEW_FRAME);
+ } else {
+ disableMsgType(CAMERA_MSG_PREVIEW_FRAME);
}
}
@@ -631,14 +605,14 @@
switch(mode) {
case CAMERA_PREVIEW_MODE:
- if (mSurface == 0) {
+ if (mSurface == 0 && mPreviewWindow == 0) {
LOG1("mSurface is not set yet.");
// still able to start preview in this case.
}
return startPreviewMode();
case CAMERA_RECORDING_MODE:
- if (mSurface == 0) {
- LOGE("mSurface must be set before startRecordingMode.");
+ if (mSurface == 0 && mPreviewWindow == 0) {
+ LOGE("mSurface or mPreviewWindow must be set before startRecordingMode.");
return INVALID_OPERATION;
}
return startRecordingMode();
@@ -664,16 +638,12 @@
if (result != NO_ERROR) return result;
result = mHardware->startPreview();
} else {
- enableMsgType(CAMERA_MSG_PREVIEW_FRAME);
- result = mHardware->startPreview();
- if (result != NO_ERROR) return result;
- // If preview display has been set, register preview buffers now.
- if (mSurface != 0) {
- // Unregister here because the surface may be previously registered
- // with the raw (snapshot) heap.
- mSurface->unregisterBuffers();
- result = registerPreviewBuffers();
+ if (mPreviewWindow != 0) {
+ native_window_set_buffers_transform(mPreviewWindow.get(),
+ mOrientation);
}
+ mHardware->setPreviewWindow(mPreviewWindow);
+ result = mHardware->startPreview();
}
return result;
}
@@ -711,13 +681,10 @@
Mutex::Autolock lock(mLock);
if (checkPidAndHardware() != NO_ERROR) return;
+
disableMsgType(CAMERA_MSG_PREVIEW_FRAME);
mHardware->stopPreview();
- if (mSurface != 0 && !mUseOverlay) {
- mSurface->unregisterBuffers();
- }
-
mPreviewBuffer.clear();
}
@@ -741,6 +708,30 @@
mHardware->releaseRecordingFrame(mem);
}
+int32_t CameraService::Client::getNumberOfVideoBuffers() const {
+ LOG1("getNumberOfVideoBuffers");
+ Mutex::Autolock lock(mLock);
+ if (checkPidAndHardware() != NO_ERROR) return 0;
+ return mHardware->getNumberOfVideoBuffers();
+}
+
+sp<IMemory> CameraService::Client::getVideoBuffer(int32_t index) const {
+ LOG1("getVideoBuffer: %d", index);
+ Mutex::Autolock lock(mLock);
+ if (checkPidAndHardware() != NO_ERROR) return 0;
+ return mHardware->getVideoBuffer(index);
+}
+
+status_t CameraService::Client::storeMetaDataInBuffers(bool enabled)
+{
+ LOG1("storeMetaDataInBuffers: %s", enabled? "true": "false");
+ Mutex::Autolock lock(mLock);
+ if (checkPidAndHardware() != NO_ERROR) {
+ return UNKNOWN_ERROR;
+ }
+ return mHardware->storeMetaDataInBuffers(enabled);
+}
+
bool CameraService::Client::previewEnabled() {
LOG1("previewEnabled (pid %d)", getCallingPid());
@@ -815,6 +806,35 @@
return params;
}
+// enable shutter sound
+status_t CameraService::Client::enableShutterSound(bool enable) {
+ LOG1("enableShutterSound (pid %d)", getCallingPid());
+
+ status_t result = checkPidAndHardware();
+ if (result != NO_ERROR) return result;
+
+ if (enable) {
+ mPlayShutterSound = true;
+ return OK;
+ }
+
+ // Disabling shutter sound may not be allowed. In that case only
+ // allow the mediaserver process to disable the sound.
+ char value[PROPERTY_VALUE_MAX];
+ property_get("ro.camera.sound.forced", value, "0");
+ if (strcmp(value, "0") != 0) {
+ // Disabling shutter sound is not allowed. Deny if the current
+ // process is not mediaserver.
+ if (getCallingPid() != getpid()) {
+ LOGE("Failed to disable shutter sound. Permission denied (pid %d)", getCallingPid());
+ return PERMISSION_DENIED;
+ }
+ }
+
+ mPlayShutterSound = false;
+ return OK;
+}
+
status_t CameraService::Client::sendCommand(int32_t cmd, int32_t arg1, int32_t arg2) {
LOG1("sendCommand (pid %d)", getCallingPid());
int orientation;
@@ -836,6 +856,20 @@
if (mOverlayRef != 0) mOrientationChanged = true;
}
return OK;
+ } else if (cmd == CAMERA_CMD_ENABLE_SHUTTER_SOUND) {
+ switch (arg1) {
+ case 0:
+ enableShutterSound(false);
+ break;
+ case 1:
+ enableShutterSound(true);
+ break;
+ default:
+ return BAD_VALUE;
+ }
+ return OK;
+ } else if (cmd == CAMERA_CMD_PLAY_RECORDING_SOUND) {
+ mCameraService->playSound(SOUND_RECORDING);
}
return mHardware->sendCommand(cmd, arg1, arg2);
@@ -996,11 +1030,8 @@
// "size" is the width and height of yuv picture for registerBuffer.
// If it is NULL, use the picture size from parameters.
void CameraService::Client::handleShutter(image_rect_type *size) {
- mCameraService->playSound(SOUND_SHUTTER);
-
- // Screen goes black after the buffer is unregistered.
- if (mSurface != 0 && !mUseOverlay) {
- mSurface->unregisterBuffers();
+ if (mPlayShutterSound) {
+ mCameraService->playSound(SOUND_SHUTTER);
}
sp<ICameraClient> c = mCameraClient;
@@ -1030,7 +1061,6 @@
HAL_PIXEL_FORMAT_YCrCb_420_SP, mOrientation, 0,
mHardware->getRawHeap());
- mSurface->registerBuffers(buffers);
IPCThreadState::self()->flushCommands();
}
@@ -1043,12 +1073,6 @@
size_t size;
sp<IMemoryHeap> heap = mem->getMemory(&offset, &size);
- if (!mUseOverlay) {
- if (mSurface != 0) {
- mSurface->postBuffer(offset);
- }
- }
-
// local copy of the callback flags
int flags = mPreviewCallbackFlag;
@@ -1069,9 +1093,7 @@
mPreviewCallbackFlag &= ~(FRAME_CALLBACK_FLAG_ONE_SHOT_MASK |
FRAME_CALLBACK_FLAG_COPY_OUT_MASK |
FRAME_CALLBACK_FLAG_ENABLE_MASK);
- if (mUseOverlay) {
- disableMsgType(CAMERA_MSG_PREVIEW_FRAME);
- }
+ disableMsgType(CAMERA_MSG_PREVIEW_FRAME);
}
if (c != 0) {
@@ -1108,11 +1130,6 @@
size_t size;
sp<IMemoryHeap> heap = mem->getMemory(&offset, &size);
- // Put the YUV version of the snapshot in the preview display.
- if (mSurface != 0 && !mUseOverlay) {
- mSurface->postBuffer(offset);
- }
-
sp<ICameraClient> c = mCameraClient;
mLock.unlock();
if (c != 0) {
@@ -1292,4 +1309,12 @@
return NO_ERROR;
}
+sp<ISurface> CameraService::getISurface(const sp<Surface>& surface) {
+ if (surface != 0) {
+ return surface->getISurface();
+ } else {
+ return sp<ISurface>(0);
+ }
+}
+
}; // namespace android
diff --git a/services/camera/libcameraservice/CameraService.h b/services/camera/libcameraservice/CameraService.h
index f09773d..60e0d04 100644
--- a/services/camera/libcameraservice/CameraService.h
+++ b/services/camera/libcameraservice/CameraService.h
@@ -79,6 +79,12 @@
sp<MediaPlayer> mSoundPlayer[NUM_SOUNDS];
int mSoundRef; // reference count (release all MediaPlayer when 0)
+ // Used by Client objects to extract the ISurface from a Surface object.
+ // This is used because making Client a friend class of Surface would
+ // require including this header in Surface.h since Client is a nested
+ // class.
+ static sp<ISurface> getISurface(const sp<Surface>& surface);
+
class Client : public BnCamera
{
public:
@@ -87,11 +93,14 @@
virtual status_t connect(const sp<ICameraClient>& client);
virtual status_t lock();
virtual status_t unlock();
- virtual status_t setPreviewDisplay(const sp<ISurface>& surface);
+ virtual status_t setPreviewDisplay(const sp<Surface>& surface);
virtual void setPreviewCallbackFlag(int flag);
virtual status_t startPreview();
virtual void stopPreview();
virtual bool previewEnabled();
+ virtual int32_t getNumberOfVideoBuffers() const;
+ virtual sp<IMemory> getVideoBuffer(int32_t index) const;
+ virtual status_t storeMetaDataInBuffers(bool enabled);
virtual status_t startRecording();
virtual void stopRecording();
virtual bool recordingEnabled();
@@ -133,6 +142,9 @@
status_t startPreviewMode();
status_t startRecordingMode();
+ // internal function used by sendCommand to enable/disable shutter sound.
+ status_t enableShutterSound(bool enable);
+
// these are static callback functions
static void notifyCallback(int32_t msgType, int32_t ext1, int32_t ext2, void* user);
static void dataCallback(int32_t msgType, const sp<IMemory>& dataPtr, void* user);
@@ -171,10 +183,12 @@
int mOrientation; // Current display orientation
// True if display orientation has been changed. This is only used in overlay.
int mOrientationChanged;
+ bool mPlayShutterSound;
// Ensures atomicity among the public methods
mutable Mutex mLock;
sp<ISurface> mSurface;
+ sp<ANativeWindow> mPreviewWindow;
// If the user want us to return a copy of the preview frame (instead
// of the original one), we allocate mPreviewBuffer and reuse it if possible.