resolved conflicts for merge of 566be7c3 to master

Change-Id: I7b1cc71057b2bd4f771e7bcf508a8c3abd6017ce
diff --git a/camera/CameraMetadata.cpp b/camera/CameraMetadata.cpp
index 7765914..6b726e0 100644
--- a/camera/CameraMetadata.cpp
+++ b/camera/CameraMetadata.cpp
@@ -25,6 +25,9 @@
 
 namespace android {
 
+#define ALIGN_TO(val, alignment) \
+    (((uintptr_t)(val) + ((alignment) - 1)) & ~((alignment) - 1))
+
 typedef Parcel::WritableBlob WritableBlob;
 typedef Parcel::ReadableBlob ReadableBlob;
 
@@ -431,40 +434,70 @@
         *out = NULL;
     }
 
-    // arg0 = metadataSize (int32)
-    int32_t metadataSizeTmp = -1;
-    if ((err = data.readInt32(&metadataSizeTmp)) != OK) {
+    // See CameraMetadata::writeToParcel for parcel data layout diagram and explanation.
+    // arg0 = blobSize (int32)
+    int32_t blobSizeTmp = -1;
+    if ((err = data.readInt32(&blobSizeTmp)) != OK) {
         ALOGE("%s: Failed to read metadata size (error %d %s)",
               __FUNCTION__, err, strerror(-err));
         return err;
     }
-    const size_t metadataSize = static_cast<size_t>(metadataSizeTmp);
+    const size_t blobSize = static_cast<size_t>(blobSizeTmp);
+    const size_t alignment = get_camera_metadata_alignment();
 
-    if (metadataSize == 0) {
+    // Special case: zero blob size means zero sized (NULL) metadata.
+    if (blobSize == 0) {
         ALOGV("%s: Read 0-sized metadata", __FUNCTION__);
         return OK;
     }
 
-    // NOTE: this doesn't make sense to me. shouldnt the blob
+    if (blobSize <= alignment) {
+        ALOGE("%s: metadata blob is malformed, blobSize(%zu) should be larger than alignment(%zu)",
+                __FUNCTION__, blobSize, alignment);
+        return BAD_VALUE;
+    }
+
+    const size_t metadataSize = blobSize - alignment;
+
+    // NOTE: this doesn't make sense to me. shouldn't the blob
     // know how big it is? why do we have to specify the size
     // to Parcel::readBlob ?
-
     ReadableBlob blob;
     // arg1 = metadata (blob)
     do {
-        if ((err = data.readBlob(metadataSize, &blob)) != OK) {
-            ALOGE("%s: Failed to read metadata blob (sized %d). Possible "
+        if ((err = data.readBlob(blobSize, &blob)) != OK) {
+            ALOGE("%s: Failed to read metadata blob (sized %zu). Possible "
                   " serialization bug. Error %d %s",
-                  __FUNCTION__, metadataSize, err, strerror(-err));
+                  __FUNCTION__, blobSize, err, strerror(-err));
             break;
         }
-        const camera_metadata_t* tmp =
-                       reinterpret_cast<const camera_metadata_t*>(blob.data());
 
+        // arg2 = offset (blob)
+        // Must be after blob since we don't know offset until after writeBlob.
+        int32_t offsetTmp;
+        if ((err = data.readInt32(&offsetTmp)) != OK) {
+            ALOGE("%s: Failed to read metadata offsetTmp (error %d %s)",
+                  __FUNCTION__, err, strerror(-err));
+            break;
+        }
+        const size_t offset = static_cast<size_t>(offsetTmp);
+        if (offset >= alignment) {
+            ALOGE("%s: metadata offset(%zu) should be less than alignment(%zu)",
+                    __FUNCTION__, blobSize, alignment);
+            err = BAD_VALUE;
+            break;
+        }
+
+        const uintptr_t metadataStart = reinterpret_cast<uintptr_t>(blob.data()) + offset;
+        const camera_metadata_t* tmp =
+                       reinterpret_cast<const camera_metadata_t*>(metadataStart);
+        ALOGV("%s: alignment is: %zu, metadata start: %p, offset: %zu",
+                __FUNCTION__, alignment, tmp, offset);
         metadata = allocate_copy_camera_metadata_checked(tmp, metadataSize);
         if (metadata == NULL) {
             // We consider that allocation only fails if the validation
             // also failed, therefore the readFromParcel was a failure.
+            ALOGE("%s: metadata allocation and copy failed", __FUNCTION__);
             err = BAD_VALUE;
         }
     } while(0);
@@ -485,38 +518,79 @@
                                        const camera_metadata_t* metadata) {
     status_t res = OK;
 
-    // arg0 = metadataSize (int32)
+    /**
+     * Below is the camera metadata parcel layout:
+     *
+     * |--------------------------------------------|
+     * |             arg0: blobSize                 |
+     * |              (length = 4)                  |
+     * |--------------------------------------------|<--Skip the rest if blobSize == 0.
+     * |                                            |
+     * |                                            |
+     * |              arg1: blob                    |
+     * | (length = variable, see arg1 layout below) |
+     * |                                            |
+     * |                                            |
+     * |--------------------------------------------|
+     * |              arg2: offset                  |
+     * |              (length = 4)                  |
+     * |--------------------------------------------|
+     */
 
+    // arg0 = blobSize (int32)
     if (metadata == NULL) {
+        // Write zero blobSize for null metadata.
         return data.writeInt32(0);
     }
 
+    /**
+     * Always make the blob size sufficiently larger, as we need put alignment
+     * padding and metadata into the blob. Since we don't know the alignment
+     * offset before writeBlob. Then write the metadata to aligned offset.
+     */
     const size_t metadataSize = get_camera_metadata_compact_size(metadata);
-    res = data.writeInt32(static_cast<int32_t>(metadataSize));
+    const size_t alignment = get_camera_metadata_alignment();
+    const size_t blobSize = metadataSize + alignment;
+    res = data.writeInt32(static_cast<int32_t>(blobSize));
     if (res != OK) {
         return res;
     }
 
-    // arg1 = metadata (blob)
+    size_t offset = 0;
+    /**
+     * arg1 = metadata (blob).
+     *
+     * The blob size is the sum of front padding size, metadata size and back padding
+     * size, which is equal to metadataSize + alignment.
+     *
+     * The blob layout is:
+     * |------------------------------------|<----Start address of the blob (unaligned).
+     * |           front padding            |
+     * |          (size = offset)           |
+     * |------------------------------------|<----Aligned start address of metadata.
+     * |                                    |
+     * |                                    |
+     * |            metadata                |
+     * |       (size = metadataSize)        |
+     * |                                    |
+     * |                                    |
+     * |------------------------------------|
+     * |           back padding             |
+     * |     (size = alignment - offset)    |
+     * |------------------------------------|<----End address of blob.
+     *                                            (Blob start address + blob size).
+     */
     WritableBlob blob;
     do {
-        res = data.writeBlob(metadataSize, &blob);
+        res = data.writeBlob(blobSize, &blob);
         if (res != OK) {
             break;
         }
-        copy_camera_metadata(blob.data(), metadataSize, metadata);
-
-        IF_ALOGV() {
-            if (validate_camera_metadata_structure(
-                        (const camera_metadata_t*)blob.data(),
-                        &metadataSize) != OK) {
-                ALOGV("%s: Failed to validate metadata %p after writing blob",
-                       __FUNCTION__, blob.data());
-            } else {
-                ALOGV("%s: Metadata written to blob. Validation success",
-                        __FUNCTION__);
-            }
-        }
+        const uintptr_t metadataStart = ALIGN_TO(blob.data(), alignment);
+        offset = metadataStart - reinterpret_cast<uintptr_t>(blob.data());
+        ALOGV("%s: alignment is: %zu, metadata start: %p, offset: %zu",
+                __FUNCTION__, alignment, metadataStart, offset);
+        copy_camera_metadata(reinterpret_cast<void*>(metadataStart), metadataSize, metadata);
 
         // Not too big of a problem since receiving side does hard validation
         // Don't check the size since the compact size could be larger
@@ -528,6 +602,9 @@
     } while(false);
     blob.release();
 
+    // arg2 = offset (int32)
+    res = data.writeInt32(static_cast<int32_t>(offset));
+
     return res;
 }
 
diff --git a/cmds/screenrecord/screenrecord.cpp b/cmds/screenrecord/screenrecord.cpp
index 61f83e3..b6f150c 100644
--- a/cmds/screenrecord/screenrecord.cpp
+++ b/cmds/screenrecord/screenrecord.cpp
@@ -45,6 +45,7 @@
 #include <signal.h>
 #include <getopt.h>
 #include <sys/wait.h>
+#include <termios.h>
 #include <assert.h>
 
 #include "screenrecord.h"
@@ -61,6 +62,7 @@
 // Command-line parameters.
 static bool gVerbose = false;           // chatty on stdout
 static bool gRotate = false;            // rotate 90 degrees
+static bool gRawOutput = false;         // generate raw H.264 byte stream output
 static bool gSizeSpecified = false;     // was size explicitly requested?
 static bool gWantInfoScreen = false;    // do we want initial info screen?
 static bool gWantFrameTime = false;     // do we want times on each frame?
@@ -298,10 +300,12 @@
  * input frames are coming from the virtual display as fast as SurfaceFlinger
  * wants to send them.
  *
+ * Exactly one of muxer or rawFp must be non-null.
+ *
  * The muxer must *not* have been started before calling.
  */
 static status_t runEncoder(const sp<MediaCodec>& encoder,
-        const sp<MediaMuxer>& muxer, const sp<IBinder>& mainDpy,
+        const sp<MediaMuxer>& muxer, FILE* rawFp, const sp<IBinder>& mainDpy,
         const sp<IBinder>& virtualDpy, uint8_t orientation) {
     static int kTimeout = 250000;   // be responsive on signal
     status_t err;
@@ -311,6 +315,8 @@
     int64_t endWhenNsec = startWhenNsec + seconds_to_nanoseconds(gTimeLimitSec);
     DisplayInfo mainDpyInfo;
 
+    assert((rawFp == NULL && muxer != NULL) || (rawFp != NULL && muxer == NULL));
+
     Vector<sp<ABuffer> > buffers;
     err = encoder->getOutputBuffers(&buffers);
     if (err != NO_ERROR) {
@@ -342,15 +348,16 @@
         case NO_ERROR:
             // got a buffer
             if ((flags & MediaCodec::BUFFER_FLAG_CODECCONFIG) != 0) {
-                // ignore this -- we passed the CSD into MediaMuxer when
-                // we got the format change notification
-                ALOGV("Got codec config buffer (%u bytes); ignoring", size);
-                size = 0;
+                ALOGV("Got codec config buffer (%u bytes)", size);
+                if (muxer != NULL) {
+                    // ignore this -- we passed the CSD into MediaMuxer when
+                    // we got the format change notification
+                    size = 0;
+                }
             }
             if (size != 0) {
                 ALOGV("Got data in buffer %d, size=%d, pts=%lld",
                         bufIndex, size, ptsUsec);
-                assert(trackIdx != -1);
 
                 { // scope
                     ATRACE_NAME("orientation");
@@ -379,14 +386,23 @@
                     ptsUsec = systemTime(SYSTEM_TIME_MONOTONIC) / 1000;
                 }
 
-                // The MediaMuxer docs are unclear, but it appears that we
-                // need to pass either the full set of BufferInfo flags, or
-                // (flags & BUFFER_FLAG_SYNCFRAME).
-                //
-                // If this blocks for too long we could drop frames.  We may
-                // want to queue these up and do them on a different thread.
-                { // scope
+                if (muxer == NULL) {
+                    fwrite(buffers[bufIndex]->data(), 1, size, rawFp);
+                    // Flush the data immediately in case we're streaming.
+                    // We don't want to do this if all we've written is
+                    // the SPS/PPS data because mplayer gets confused.
+                    if ((flags & MediaCodec::BUFFER_FLAG_CODECCONFIG) == 0) {
+                        fflush(rawFp);
+                    }
+                } else {
+                    // The MediaMuxer docs are unclear, but it appears that we
+                    // need to pass either the full set of BufferInfo flags, or
+                    // (flags & BUFFER_FLAG_SYNCFRAME).
+                    //
+                    // If this blocks for too long we could drop frames.  We may
+                    // want to queue these up and do them on a different thread.
                     ATRACE_NAME("write sample");
+                    assert(trackIdx != -1);
                     err = muxer->writeSampleData(buffers[bufIndex], trackIdx,
                             ptsUsec, flags);
                     if (err != NO_ERROR) {
@@ -418,12 +434,14 @@
                 ALOGV("Encoder format changed");
                 sp<AMessage> newFormat;
                 encoder->getOutputFormat(&newFormat);
-                trackIdx = muxer->addTrack(newFormat);
-                ALOGV("Starting muxer");
-                err = muxer->start();
-                if (err != NO_ERROR) {
-                    fprintf(stderr, "Unable to start muxer (err=%d)\n", err);
-                    return err;
+                if (muxer != NULL) {
+                    trackIdx = muxer->addTrack(newFormat);
+                    ALOGV("Starting muxer");
+                    err = muxer->start();
+                    if (err != NO_ERROR) {
+                        fprintf(stderr, "Unable to start muxer (err=%d)\n", err);
+                        return err;
+                    }
                 }
             }
             break;
@@ -457,6 +475,44 @@
 }
 
 /*
+ * Raw H.264 byte stream output requested.  Send the output to stdout
+ * if desired.  If the output is a tty, reconfigure it to avoid the
+ * CRLF line termination that we see with "adb shell" commands.
+ */
+static FILE* prepareRawOutput(const char* fileName) {
+    FILE* rawFp = NULL;
+
+    if (strcmp(fileName, "-") == 0) {
+        if (gVerbose) {
+            fprintf(stderr, "ERROR: verbose output and '-' not compatible");
+            return NULL;
+        }
+        rawFp = stdout;
+    } else {
+        rawFp = fopen(fileName, "w");
+        if (rawFp == NULL) {
+            fprintf(stderr, "fopen raw failed: %s\n", strerror(errno));
+            return NULL;
+        }
+    }
+
+    int fd = fileno(rawFp);
+    if (isatty(fd)) {
+        // best effort -- reconfigure tty for "raw"
+        ALOGD("raw video output to tty (fd=%d)", fd);
+        struct termios term;
+        if (tcgetattr(fd, &term) == 0) {
+            cfmakeraw(&term);
+            if (tcsetattr(fd, TCSANOW, &term) == 0) {
+                ALOGD("tty successfully configured for raw");
+            }
+        }
+    }
+
+    return rawFp;
+}
+
+/*
  * Main "do work" method.
  *
  * Configures codec, muxer, and virtual display, then starts moving bits
@@ -558,16 +614,26 @@
         return err;
     }
 
-    // Configure muxer.  We have to wait for the CSD blob from the encoder
-    // before we can start it.
-    sp<MediaMuxer> muxer = new MediaMuxer(fileName,
-            MediaMuxer::OUTPUT_FORMAT_MPEG_4);
-    if (gRotate) {
-        muxer->setOrientationHint(90);  // TODO: does this do anything?
+    sp<MediaMuxer> muxer = NULL;
+    FILE* rawFp = NULL;
+    if (gRawOutput) {
+        rawFp = prepareRawOutput(fileName);
+        if (rawFp == NULL) {
+            encoder->release();
+            return -1;
+        }
+    } else {
+        // Configure muxer.  We have to wait for the CSD blob from the encoder
+        // before we can start it.
+        muxer = new MediaMuxer(fileName, MediaMuxer::OUTPUT_FORMAT_MPEG_4);
+        if (gRotate) {
+            muxer->setOrientationHint(90);  // TODO: does this do anything?
+        }
     }
 
     // Main encoder loop.
-    err = runEncoder(encoder, muxer, mainDpy, dpy, mainDpyInfo.orientation);
+    err = runEncoder(encoder, muxer, rawFp, mainDpy, dpy,
+            mainDpyInfo.orientation);
     if (err != NO_ERROR) {
         fprintf(stderr, "Encoder failed (err=%d)\n", err);
         // fall through to cleanup
@@ -584,9 +650,13 @@
         overlay->stop();
     }
     encoder->stop();
-    // If we don't stop muxer explicitly, i.e. let the destructor run,
-    // it may hang (b/11050628).
-    muxer->stop();
+    if (muxer != NULL) {
+        // If we don't stop muxer explicitly, i.e. let the destructor run,
+        // it may hang (b/11050628).
+        muxer->stop();
+    } else if (rawFp != stdout) {
+        fclose(rawFp);
+    }
     encoder->release();
 
     return err;
@@ -753,6 +823,7 @@
         { "show-frame-time",    no_argument,        NULL, 'f' },
         { "bugreport",          no_argument,        NULL, 'u' },
         { "rotate",             no_argument,        NULL, 'r' },
+        { "raw",                no_argument,        NULL, 'w' },
         { NULL,                 0,                  NULL, 0 }
     };
 
@@ -818,6 +889,10 @@
             // experimental feature
             gRotate = true;
             break;
+        case 'w':
+            // experimental feature
+            gRawOutput = true;
+            break;
         default:
             if (ic != '?') {
                 fprintf(stderr, "getopt_long returned unexpected value 0x%x\n", ic);
@@ -831,17 +906,19 @@
         return 2;
     }
 
-    // MediaMuxer tries to create the file in the constructor, but we don't
-    // learn about the failure until muxer.start(), which returns a generic
-    // error code without logging anything.  We attempt to create the file
-    // now for better diagnostics.
     const char* fileName = argv[optind];
-    int fd = open(fileName, O_CREAT | O_RDWR, 0644);
-    if (fd < 0) {
-        fprintf(stderr, "Unable to open '%s': %s\n", fileName, strerror(errno));
-        return 1;
+    if (!gRawOutput) {
+        // MediaMuxer tries to create the file in the constructor, but we don't
+        // learn about the failure until muxer.start(), which returns a generic
+        // error code without logging anything.  We attempt to create the file
+        // now for better diagnostics.
+        int fd = open(fileName, O_CREAT | O_RDWR, 0644);
+        if (fd < 0) {
+            fprintf(stderr, "Unable to open '%s': %s\n", fileName, strerror(errno));
+            return 1;
+        }
+        close(fd);
     }
-    close(fd);
 
     status_t err = recordScreen(fileName);
     if (err == NO_ERROR) {
diff --git a/cmds/screenrecord/screenrecord.h b/cmds/screenrecord/screenrecord.h
index 95e8a68..9b058c2 100644
--- a/cmds/screenrecord/screenrecord.h
+++ b/cmds/screenrecord/screenrecord.h
@@ -18,6 +18,6 @@
 #define SCREENRECORD_SCREENRECORD_H
 
 #define kVersionMajor 1
-#define kVersionMinor 1
+#define kVersionMinor 2
 
 #endif /*SCREENRECORD_SCREENRECORD_H*/
diff --git a/cmds/stagefright/SimplePlayer.cpp b/cmds/stagefright/SimplePlayer.cpp
index 5d2d721..1b2f792 100644
--- a/cmds/stagefright/SimplePlayer.cpp
+++ b/cmds/stagefright/SimplePlayer.cpp
@@ -23,6 +23,7 @@
 #include <gui/Surface.h>
 #include <media/AudioTrack.h>
 #include <media/ICrypto.h>
+#include <media/IMediaHTTPService.h>
 #include <media/stagefright/foundation/ABuffer.h>
 #include <media/stagefright/foundation/ADebug.h>
 #include <media/stagefright/foundation/AMessage.h>
@@ -275,7 +276,8 @@
 
     mExtractor = new NuMediaExtractor;
 
-    status_t err = mExtractor->setDataSource(mPath.c_str());
+    status_t err = mExtractor->setDataSource(
+            NULL /* httpService */, mPath.c_str());
 
     if (err != OK) {
         mExtractor.clear();
diff --git a/cmds/stagefright/codec.cpp b/cmds/stagefright/codec.cpp
index d125ad1..fd02bcc 100644
--- a/cmds/stagefright/codec.cpp
+++ b/cmds/stagefright/codec.cpp
@@ -24,6 +24,7 @@
 #include <binder/IServiceManager.h>
 #include <binder/ProcessState.h>
 #include <media/ICrypto.h>
+#include <media/IMediaHTTPService.h>
 #include <media/IMediaPlayerService.h>
 #include <media/stagefright/foundation/ABuffer.h>
 #include <media/stagefright/foundation/ADebug.h>
@@ -76,7 +77,7 @@
     static int64_t kTimeout = 500ll;
 
     sp<NuMediaExtractor> extractor = new NuMediaExtractor;
-    if (extractor->setDataSource(path) != OK) {
+    if (extractor->setDataSource(NULL /* httpService */, path) != OK) {
         fprintf(stderr, "unable to instantiate extractor.\n");
         return 1;
     }
diff --git a/cmds/stagefright/muxer.cpp b/cmds/stagefright/muxer.cpp
index 90daea2..f4a33e8 100644
--- a/cmds/stagefright/muxer.cpp
+++ b/cmds/stagefright/muxer.cpp
@@ -20,6 +20,7 @@
 #include <utils/Log.h>
 
 #include <binder/ProcessState.h>
+#include <media/IMediaHTTPService.h>
 #include <media/stagefright/foundation/ABuffer.h>
 #include <media/stagefright/foundation/ADebug.h>
 #include <media/stagefright/foundation/ALooper.h>
@@ -59,7 +60,7 @@
         int trimEndTimeMs,
         int rotationDegrees) {
     sp<NuMediaExtractor> extractor = new NuMediaExtractor;
-    if (extractor->setDataSource(path) != OK) {
+    if (extractor->setDataSource(NULL /* httpService */, path) != OK) {
         fprintf(stderr, "unable to instantiate extractor. %s\n", path);
         return 1;
     }
diff --git a/cmds/stagefright/sf2.cpp b/cmds/stagefright/sf2.cpp
index b2b9ce5..3c0c7ec 100644
--- a/cmds/stagefright/sf2.cpp
+++ b/cmds/stagefright/sf2.cpp
@@ -19,8 +19,12 @@
 #include <inttypes.h>
 #include <utils/Log.h>
 
+#include <signal.h>
+
 #include <binder/ProcessState.h>
 
+#include <media/IMediaHTTPService.h>
+
 #include <media/stagefright/foundation/hexdump.h>
 #include <media/stagefright/foundation/ABuffer.h>
 #include <media/stagefright/foundation/ADebug.h>
@@ -43,6 +47,18 @@
 
 using namespace android;
 
+volatile static bool ctrlc = false;
+
+static sighandler_t oldhandler = NULL;
+
+static void mysighandler(int signum) {
+    if (signum == SIGINT) {
+        ctrlc = true;
+        return;
+    }
+    oldhandler(signum);
+}
+
 struct Controller : public AHandler {
     Controller(const char *uri, bool decodeAudio,
                const sp<Surface> &surface, bool renderToSurface)
@@ -63,7 +79,30 @@
     virtual ~Controller() {
     }
 
+    virtual void printStatistics() {
+        int64_t delayUs = ALooper::GetNowUs() - mStartTimeUs;
+
+        if (mDecodeAudio) {
+            printf("%" PRId64 " bytes received. %.2f KB/sec\n",
+            mTotalBytesReceived,
+            mTotalBytesReceived * 1E6 / 1024 / delayUs);
+        } else {
+            printf("%d frames decoded, %.2f fps. %" PRId64 " bytes "
+                    "received. %.2f KB/sec\n",
+            mNumOutputBuffersReceived,
+            mNumOutputBuffersReceived * 1E6 / delayUs,
+            mTotalBytesReceived,
+            mTotalBytesReceived * 1E6 / 1024 / delayUs);
+        }
+    }
+
     virtual void onMessageReceived(const sp<AMessage> &msg) {
+        if (ctrlc) {
+            printf("\n");
+            printStatistics();
+            (new AMessage(kWhatStop, id()))->post();
+            ctrlc = false;
+        }
         switch (msg->what()) {
             case kWhatStart:
             {
@@ -76,7 +115,8 @@
 #endif
 
                 sp<DataSource> dataSource =
-                    DataSource::CreateFromURI(mURI.c_str());
+                    DataSource::CreateFromURI(
+                            NULL /* httpService */, mURI.c_str());
 
                 sp<MediaExtractor> extractor =
                     MediaExtractor::Create(dataSource);
@@ -99,7 +139,10 @@
                         break;
                     }
                 }
-                CHECK(mSource != NULL);
+                if (mSource == NULL) {
+                    printf("no %s track found\n", mDecodeAudio ? "audio" : "video");
+                    exit (1);
+                }
 
                 CHECK_EQ(mSource->start(), (status_t)OK);
 
@@ -181,21 +224,7 @@
                         || what == ACodec::kWhatError) {
                     printf((what == ACodec::kWhatEOS) ? "$\n" : "E\n");
 
-                    int64_t delayUs = ALooper::GetNowUs() - mStartTimeUs;
-
-                    if (mDecodeAudio) {
-                        printf("%" PRId64 " bytes received. %.2f KB/sec\n",
-                               mTotalBytesReceived,
-                               mTotalBytesReceived * 1E6 / 1024 / delayUs);
-                    } else {
-                        printf("%d frames decoded, %.2f fps. %" PRId64 " bytes "
-                               "received. %.2f KB/sec\n",
-                               mNumOutputBuffersReceived,
-                               mNumOutputBuffersReceived * 1E6 / delayUs,
-                               mTotalBytesReceived,
-                               mTotalBytesReceived * 1E6 / 1024 / delayUs);
-                    }
-
+                    printStatistics();
                     (new AMessage(kWhatStop, id()))->post();
                 } else if (what == ACodec::kWhatFlushCompleted) {
                     mSeekState = SEEK_FLUSH_COMPLETED;
@@ -639,6 +668,8 @@
 
     looper->registerHandler(controller);
 
+    signal(SIGINT, mysighandler);
+
     controller->startAsync();
 
     CHECK_EQ(looper->start(true /* runOnCallingThread */), (status_t)OK);
diff --git a/cmds/stagefright/stagefright.cpp b/cmds/stagefright/stagefright.cpp
index 8efb39e..daaea27 100644
--- a/cmds/stagefright/stagefright.cpp
+++ b/cmds/stagefright/stagefright.cpp
@@ -29,6 +29,7 @@
 
 #include <binder/IServiceManager.h>
 #include <binder/ProcessState.h>
+#include <media/IMediaHTTPService.h>
 #include <media/IMediaPlayerService.h>
 #include <media/stagefright/foundation/ALooper.h>
 #include "include/NuCachedSource2.h"
@@ -960,7 +961,8 @@
 
         const char *filename = argv[k];
 
-        sp<DataSource> dataSource = DataSource::CreateFromURI(filename);
+        sp<DataSource> dataSource =
+            DataSource::CreateFromURI(NULL /* httpService */, filename);
 
         if (strncasecmp(filename, "sine:", 5) && dataSource == NULL) {
             fprintf(stderr, "Unable to create data source.\n");
diff --git a/cmds/stagefright/stream.cpp b/cmds/stagefright/stream.cpp
index dba67a9..b2abc0f 100644
--- a/cmds/stagefright/stream.cpp
+++ b/cmds/stagefright/stream.cpp
@@ -21,6 +21,7 @@
 #include <binder/ProcessState.h>
 #include <cutils/properties.h> // for property_get
 
+#include <media/IMediaHTTPService.h>
 #include <media/IStreamSource.h>
 #include <media/mediaplayer.h>
 #include <media/stagefright/foundation/ADebug.h>
@@ -159,7 +160,9 @@
 MyConvertingStreamSource::MyConvertingStreamSource(const char *filename)
     : mCurrentBufferIndex(-1),
       mCurrentBufferOffset(0) {
-    sp<DataSource> dataSource = DataSource::CreateFromURI(filename);
+    sp<DataSource> dataSource =
+        DataSource::CreateFromURI(NULL /* httpService */, filename);
+
     CHECK(dataSource != NULL);
 
     sp<MediaExtractor> extractor = MediaExtractor::Create(dataSource);
diff --git a/drm/libdrmframework/plugins/forward-lock/FwdLockEngine/src/FwdLockEngine.cpp b/drm/libdrmframework/plugins/forward-lock/FwdLockEngine/src/FwdLockEngine.cpp
index 234aef2..f400732 100644
--- a/drm/libdrmframework/plugins/forward-lock/FwdLockEngine/src/FwdLockEngine.cpp
+++ b/drm/libdrmframework/plugins/forward-lock/FwdLockEngine/src/FwdLockEngine.cpp
@@ -316,6 +316,7 @@
 
     if (-1 < fileDesc) {
         if (FwdLockFile_attach(fileDesc) < 0) {
+            close(fileDesc);
             return mimeString;
         }
         const char* pMimeType = FwdLockFile_GetContentType(fileDesc);
diff --git a/include/media/AudioBufferProvider.h b/include/media/AudioBufferProvider.h
index ef392f0..7be449c 100644
--- a/include/media/AudioBufferProvider.h
+++ b/include/media/AudioBufferProvider.h
@@ -61,6 +61,17 @@
     //  buffer->frameCount  0
     virtual status_t getNextBuffer(Buffer* buffer, int64_t pts = kInvalidPTS) = 0;
 
+    // Release (a portion of) the buffer previously obtained by getNextBuffer().
+    // It is permissible to call releaseBuffer() multiple times per getNextBuffer().
+    // On entry:
+    //  buffer->frameCount  number of frames to release, must be <= number of frames
+    //                      obtained but not yet released
+    //  buffer->raw         unused
+    // On return:
+    //  buffer->frameCount  0; implementation MUST set to zero
+    //  buffer->raw         undefined; implementation is PERMITTED to set to any value,
+    //                      so if caller needs to continue using this buffer it must
+    //                      keep track of the pointer itself
     virtual void releaseBuffer(Buffer* buffer) = 0;
 };
 
diff --git a/include/media/AudioEffect.h b/include/media/AudioEffect.h
index 05d834d..f3024b7 100644
--- a/include/media/AudioEffect.h
+++ b/include/media/AudioEffect.h
@@ -36,7 +36,7 @@
 
 // ----------------------------------------------------------------------------
 
-class effect_param_cblk_t;
+struct effect_param_cblk_t;
 
 // ----------------------------------------------------------------------------
 
diff --git a/include/media/AudioRecord.h b/include/media/AudioRecord.h
index 052064d..7054fd4 100644
--- a/include/media/AudioRecord.h
+++ b/include/media/AudioRecord.h
@@ -26,7 +26,7 @@
 
 // ----------------------------------------------------------------------------
 
-class audio_track_cblk_t;
+struct audio_track_cblk_t;
 class AudioRecordClientProxy;
 
 // ----------------------------------------------------------------------------
@@ -39,8 +39,12 @@
      * Keep in sync with frameworks/base/media/java/android/media/AudioRecord.java NATIVE_EVENT_*.
      */
     enum event_type {
-        EVENT_MORE_DATA = 0,        // Request to read more data from PCM buffer.
-        EVENT_OVERRUN = 1,          // PCM buffer overrun occurred.
+        EVENT_MORE_DATA = 0,        // Request to read available data from buffer.
+                                    // If this event is delivered but the callback handler
+                                    // does not want to read the available data, the handler must
+                                    // explicitly
+                                    // ignore the event by setting frameCount to zero.
+        EVENT_OVERRUN = 1,          // Buffer overrun occurred.
         EVENT_MARKER = 2,           // Record head is at the specified marker position
                                     // (See setMarkerPosition()).
         EVENT_NEW_POS = 3,          // Record head is at a new position
@@ -60,9 +64,10 @@
         size_t      frameCount;     // number of sample frames corresponding to size;
                                     // on input it is the number of frames available,
                                     // on output is the number of frames actually drained
-                                    // (currently ignored, but will make the primary field in future)
+                                    // (currently ignored but will make the primary field in future)
 
         size_t      size;           // input/output in bytes == frameCount * frameSize
+                                    // on output is the number of bytes actually drained
                                     // FIXME this is redundant with respect to frameCount,
                                     // and TRANSFER_OBTAIN mode is broken for 8-bit data
                                     // since we don't define the frame format
@@ -76,7 +81,7 @@
 
     /* As a convenience, if a callback is supplied, a handler thread
      * is automatically created with the appropriate priority. This thread
-     * invokes the callback when a new buffer becomes ready or various conditions occur.
+     * invokes the callback when a new buffer becomes available or various conditions occur.
      * Parameters:
      *
      * event:   type of event notified (see enum AudioRecord::event_type).
@@ -99,6 +104,8 @@
      *  - NO_ERROR: successful operation
      *  - NO_INIT: audio server or audio hardware not initialized
      *  - BAD_VALUE: unsupported configuration
+     * frameCount is guaranteed to be non-zero if status is NO_ERROR,
+     * and is undefined otherwise.
      */
 
      static status_t getMinFrameCount(size_t* frameCount,
@@ -109,7 +116,7 @@
     /* How data is transferred from AudioRecord
      */
     enum transfer_type {
-        TRANSFER_DEFAULT,   // not specified explicitly; determine from other parameters
+        TRANSFER_DEFAULT,   // not specified explicitly; determine from the other parameters
         TRANSFER_CALLBACK,  // callback EVENT_MORE_DATA
         TRANSFER_OBTAIN,    // FIXME deprecated: call obtainBuffer() and releaseBuffer()
         TRANSFER_SYNC,      // synchronous read()
@@ -137,7 +144,7 @@
      *                     be larger if the requested size is not compatible with current audio HAL
      *                     latency.  Zero means to use a default value.
      * cbf:                Callback function. If not null, this function is called periodically
-     *                     to consume new PCM data and inform of marker, position updates, etc.
+     *                     to consume new data and inform of marker, position updates, etc.
      * user:               Context for use by the callback receiver.
      * notificationFrames: The callback function is called each time notificationFrames PCM
      *                     frames are ready in record track output buffer.
@@ -155,7 +162,7 @@
                                     callback_t cbf = NULL,
                                     void* user = NULL,
                                     int notificationFrames = 0,
-                                    int sessionId = 0,
+                                    int sessionId = AUDIO_SESSION_ALLOCATE,
                                     transfer_type transferType = TRANSFER_DEFAULT,
                                     audio_input_flags_t flags = AUDIO_INPUT_FLAG_NONE);
 
@@ -171,9 +178,10 @@
      * Returned status (from utils/Errors.h) can be:
      *  - NO_ERROR: successful intialization
      *  - INVALID_OPERATION: AudioRecord is already initialized or record device is already in use
-     *  - BAD_VALUE: invalid parameter (channels, format, sampleRate...)
+     *  - BAD_VALUE: invalid parameter (channelMask, format, sampleRate...)
      *  - NO_INIT: audio server or audio hardware not initialized
      *  - PERMISSION_DENIED: recording is not allowed for the requesting process
+     * If status is not equal to NO_ERROR, don't call any other APIs on this AudioRecord.
      *
      * Parameters not listed in the AudioRecord constructors above:
      *
@@ -188,11 +196,11 @@
                             void* user = NULL,
                             int notificationFrames = 0,
                             bool threadCanCallJava = false,
-                            int sessionId = 0,
+                            int sessionId = AUDIO_SESSION_ALLOCATE,
                             transfer_type transferType = TRANSFER_DEFAULT,
                             audio_input_flags_t flags = AUDIO_INPUT_FLAG_NONE);
 
-    /* Result of constructing the AudioRecord. This must be checked
+    /* Result of constructing the AudioRecord. This must be checked for successful initialization
      * before using any AudioRecord API (except for set()), because using
      * an uninitialized AudioRecord produces undefined results.
      * See set() method above for possible return codes.
@@ -221,7 +229,7 @@
             status_t    start(AudioSystem::sync_event_t event = AudioSystem::SYNC_EVENT_NONE,
                               int triggerSession = 0);
 
-    /* Stop a track. If set, the callback will cease being called.  Note that obtainBuffer() still
+    /* Stop a track.  The callback will cease being called.  Note that obtainBuffer() still
      * works and will drain buffers until the pool is exhausted, and then will return WOULD_BLOCK.
      */
             void        stop();
@@ -236,7 +244,7 @@
      * a callback with event type EVENT_MARKER is called. Calling setMarkerPosition
      * with marker == 0 cancels marker notification callback.
      * To set a marker at a position which would compute as 0,
-     * a workaround is to the set the marker at a nearby position such as ~0 or 1.
+     * a workaround is to set the marker at a nearby position such as ~0 or 1.
      * If the AudioRecord has been opened with no callback function associated,
      * the operation will fail.
      *
@@ -378,8 +386,10 @@
      * returning the current value by this function call.  Such loss typically occurs when the
      * user space process is blocked longer than the capacity of audio driver buffers.
      * Units: the number of input audio frames.
+     * FIXME The side-effect of resetting the counter may be incompatible with multi-client.
+     * Consider making it more like AudioTrack::getUnderrunFrames which doesn't have side effects.
      */
-            unsigned int  getInputFramesLost() const;
+            uint32_t    getInputFramesLost() const;
 
 private:
     /* copying audio record objects is not allowed */
@@ -412,6 +422,7 @@
         bool                mPaused;    // whether thread is requested to pause at next loop entry
         bool                mPausedInt; // whether thread internally requests pause
         nsecs_t             mPausedNs;  // if mPausedInt then associated timeout, otherwise ignored
+        bool                mIgnoreNextPausedInt;   // whether to ignore next mPausedInt request
     };
 
             // body of AudioRecordThread::threadLoop()
@@ -422,9 +433,10 @@
             //      NS_INACTIVE inactive so don't run again until re-started
             //      NS_NEVER    never again
             static const nsecs_t NS_WHENEVER = -1, NS_INACTIVE = -2, NS_NEVER = -3;
-            nsecs_t processAudioBuffer(const sp<AudioRecordThread>& thread);
+            nsecs_t processAudioBuffer();
 
             // caller must hold lock on mLock for all _l methods
+
             status_t openRecord_l(size_t epoch);
 
             // FIXME enum is faster than strcmp() for parameter 'from'
@@ -446,12 +458,13 @@
                                                     // notification callback
     uint32_t                mNotificationFramesAct; // actual number of frames between each
                                                     // notification callback
-    bool                    mRefreshRemaining;  // processAudioBuffer() should refresh next 2
+    bool                    mRefreshRemaining;      // processAudioBuffer() should refresh
+                                                    // mRemainingFrames and mRetryOnPartialBuffer
 
     // These are private to processAudioBuffer(), and are not protected by a lock
     uint32_t                mRemainingFrames;       // number of frames to request in obtainBuffer()
     bool                    mRetryOnPartialBuffer;  // sleep and retry after partial obtainBuffer()
-    int                     mObservedSequence;      // last observed value of mSequence
+    uint32_t                mObservedSequence;      // last observed value of mSequence
 
     uint32_t                mMarkerPosition;    // in wrapping (overflow) frame units
     bool                    mMarkerReached;
@@ -460,9 +473,13 @@
 
     status_t                mStatus;
 
+    size_t                  mFrameCount;            // corresponds to current IAudioRecord, value is
+                                                    // reported back by AudioFlinger to the client
+    size_t                  mReqFrameCount;         // frame count to request the first or next time
+                                                    // a new IAudioRecord is needed, non-decreasing
+
     // constant after constructor or set()
     uint32_t                mSampleRate;
-    size_t                  mFrameCount;
     audio_format_t          mFormat;
     uint32_t                mChannelCount;
     size_t                  mFrameSize;         // app-level frame size == AudioFlinger frame size
@@ -473,12 +490,11 @@
     int                     mSessionId;
     transfer_type           mTransfer;
 
-    audio_io_handle_t       mInput;             // returned by AudioSystem::getInput()
-
-    // may be changed if IAudioRecord object is re-created
+    // Next 4 fields may be changed if IAudioRecord is re-created, but always != 0
     sp<IAudioRecord>        mAudioRecord;
     sp<IMemory>             mCblkMemory;
     audio_track_cblk_t*     mCblk;              // re-load after mLock.unlock()
+    audio_io_handle_t       mInput;             // returned by AudioSystem::getInput()
 
     int                     mPreviousPriority;  // before start()
     SchedPolicy             mPreviousSchedulingGroup;
diff --git a/include/media/AudioSystem.h b/include/media/AudioSystem.h
index 4c22412..dfc2066 100644
--- a/include/media/AudioSystem.h
+++ b/include/media/AudioSystem.h
@@ -67,20 +67,24 @@
 
     // returns true in *state if tracks are active on the specified stream or have been active
     // in the past inPastMs milliseconds
-    static status_t isStreamActive(audio_stream_type_t stream, bool *state, uint32_t inPastMs = 0);
+    static status_t isStreamActive(audio_stream_type_t stream, bool *state, uint32_t inPastMs);
     // returns true in *state if tracks are active for what qualifies as remote playback
     // on the specified stream or have been active in the past inPastMs milliseconds. Remote
     // playback isn't mutually exclusive with local playback.
     static status_t isStreamActiveRemotely(audio_stream_type_t stream, bool *state,
-            uint32_t inPastMs = 0);
+            uint32_t inPastMs);
     // returns true in *state if a recorder is currently recording with the specified source
     static status_t isSourceActive(audio_source_t source, bool *state);
 
     // set/get audio hardware parameters. The function accepts a list of parameters
     // key value pairs in the form: key1=value1;key2=value2;...
     // Some keys are reserved for standard parameters (See AudioParameter class).
+    // The versions with audio_io_handle_t are intended for internal media framework use only.
     static status_t setParameters(audio_io_handle_t ioHandle, const String8& keyValuePairs);
     static String8  getParameters(audio_io_handle_t ioHandle, const String8& keys);
+    // The versions without audio_io_handle_t are intended for JNI.
+    static status_t setParameters(const String8& keyValuePairs);
+    static String8  getParameters(const String8& keys);
 
     static void setErrorCallback(audio_error_callback cb);
 
@@ -90,12 +94,14 @@
     static float linearToLog(int volume);
     static int logToLinear(float volume);
 
+    // Returned samplingRate and frameCount output values are guaranteed
+    // to be non-zero if status == NO_ERROR
     static status_t getOutputSamplingRate(uint32_t* samplingRate,
-            audio_stream_type_t stream = AUDIO_STREAM_DEFAULT);
+            audio_stream_type_t stream);
     static status_t getOutputFrameCount(size_t* frameCount,
-            audio_stream_type_t stream = AUDIO_STREAM_DEFAULT);
+            audio_stream_type_t stream);
     static status_t getOutputLatency(uint32_t* latency,
-            audio_stream_type_t stream = AUDIO_STREAM_DEFAULT);
+            audio_stream_type_t stream);
     static status_t getSamplingRate(audio_io_handle_t output,
                                           audio_stream_type_t streamType,
                                           uint32_t* samplingRate);
@@ -132,7 +138,7 @@
                                       audio_stream_type_t stream = AUDIO_STREAM_DEFAULT);
 
     // return the number of input frames lost by HAL implementation, or 0 if the handle is invalid
-    static size_t getInputFramesLost(audio_io_handle_t ioHandle);
+    static uint32_t getInputFramesLost(audio_io_handle_t ioHandle);
 
     static int newAudioSessionId();
     static void acquireAudioSessionId(int audioSession);
@@ -155,7 +161,8 @@
     class OutputDescriptor {
     public:
         OutputDescriptor()
-        : samplingRate(0), format(AUDIO_FORMAT_DEFAULT), channelMask(0), frameCount(0), latency(0)  {}
+        : samplingRate(0), format(AUDIO_FORMAT_DEFAULT), channelMask(0), frameCount(0), latency(0)
+            {}
 
         uint32_t samplingRate;
         audio_format_t format;
@@ -193,24 +200,32 @@
     static status_t setPhoneState(audio_mode_t state);
     static status_t setForceUse(audio_policy_force_use_t usage, audio_policy_forced_cfg_t config);
     static audio_policy_forced_cfg_t getForceUse(audio_policy_force_use_t usage);
+
+    // Client must successfully hand off the handle reference to AudioFlinger via createTrack(),
+    // or release it with releaseOutput().
     static audio_io_handle_t getOutput(audio_stream_type_t stream,
                                         uint32_t samplingRate = 0,
                                         audio_format_t format = AUDIO_FORMAT_DEFAULT,
                                         audio_channel_mask_t channelMask = AUDIO_CHANNEL_OUT_STEREO,
                                         audio_output_flags_t flags = AUDIO_OUTPUT_FLAG_NONE,
                                         const audio_offload_info_t *offloadInfo = NULL);
+
     static status_t startOutput(audio_io_handle_t output,
                                 audio_stream_type_t stream,
-                                int session = 0);
+                                int session);
     static status_t stopOutput(audio_io_handle_t output,
                                audio_stream_type_t stream,
-                               int session = 0);
+                               int session);
     static void releaseOutput(audio_io_handle_t output);
+
+    // Client must successfully hand off the handle reference to AudioFlinger via openRecord(),
+    // or release it with releaseInput().
     static audio_io_handle_t getInput(audio_source_t inputSource,
-                                    uint32_t samplingRate = 0,
-                                    audio_format_t format = AUDIO_FORMAT_DEFAULT,
-                                    audio_channel_mask_t channelMask = AUDIO_CHANNEL_IN_MONO,
-                                    int sessionId = 0);
+                                    uint32_t samplingRate,
+                                    audio_format_t format,
+                                    audio_channel_mask_t channelMask,
+                                    int sessionId);
+
     static status_t startInput(audio_io_handle_t input);
     static status_t stopInput(audio_io_handle_t input);
     static void releaseInput(audio_io_handle_t input);
diff --git a/include/media/AudioTimestamp.h b/include/media/AudioTimestamp.h
index c29c7e5..99e9c3e 100644
--- a/include/media/AudioTimestamp.h
+++ b/include/media/AudioTimestamp.h
@@ -19,6 +19,8 @@
 
 #include <time.h>
 
+namespace android {
+
 class AudioTimestamp {
 public:
     AudioTimestamp() : mPosition(0) {
@@ -30,4 +32,6 @@
     struct timespec mTime;     // corresponding CLOCK_MONOTONIC when frame is expected to present
 };
 
+}   // namespace
+
 #endif  // ANDROID_AUDIO_TIMESTAMP_H
diff --git a/include/media/AudioTrack.h b/include/media/AudioTrack.h
index f6646ab..5a50280 100644
--- a/include/media/AudioTrack.h
+++ b/include/media/AudioTrack.h
@@ -27,7 +27,7 @@
 
 // ----------------------------------------------------------------------------
 
-class audio_track_cblk_t;
+struct audio_track_cblk_t;
 class AudioTrackClientProxy;
 class StaticAudioTrackClientProxy;
 
@@ -123,6 +123,8 @@
      *  - NO_ERROR: successful operation
      *  - NO_INIT: audio server or audio hardware not initialized
      *  - BAD_VALUE: unsupported configuration
+     * frameCount is guaranteed to be non-zero if status is NO_ERROR,
+     * and is undefined otherwise.
      */
 
     static status_t getMinFrameCount(size_t* frameCount,
@@ -158,7 +160,7 @@
      * sampleRate:         Data source sampling rate in Hz.
      * format:             Audio format (e.g AUDIO_FORMAT_PCM_16_BIT for signed
      *                     16 bits per sample).
-     * channelMask:        Channel mask.
+     * channelMask:        Channel mask, such that audio_is_output_channel(channelMask) is true.
      * frameCount:         Minimum size of track PCM buffer in frames. This defines the
      *                     application's contribution to the
      *                     latency of the track. The actual size selected by the AudioTrack could be
@@ -185,7 +187,7 @@
                                     callback_t cbf       = NULL,
                                     void* user           = NULL,
                                     int notificationFrames = 0,
-                                    int sessionId        = 0,
+                                    int sessionId        = AUDIO_SESSION_ALLOCATE,
                                     transfer_type transferType = TRANSFER_DEFAULT,
                                     const audio_offload_info_t *offloadInfo = NULL,
                                     int uid = -1);
@@ -210,7 +212,7 @@
                                     callback_t cbf      = NULL,
                                     void* user          = NULL,
                                     int notificationFrames = 0,
-                                    int sessionId       = 0,
+                                    int sessionId       = AUDIO_SESSION_ALLOCATE,
                                     transfer_type transferType = TRANSFER_DEFAULT,
                                     const audio_offload_info_t *offloadInfo = NULL,
                                     int uid = -1);
@@ -248,7 +250,7 @@
                             int notificationFrames = 0,
                             const sp<IMemory>& sharedBuffer = 0,
                             bool threadCanCallJava = false,
-                            int sessionId       = 0,
+                            int sessionId       = AUDIO_SESSION_ALLOCATE,
                             transfer_type transferType = TRANSFER_DEFAULT,
                             const audio_offload_info_t *offloadInfo = NULL,
                             int uid = -1);
@@ -336,7 +338,7 @@
      */
             status_t    setSampleRate(uint32_t sampleRate);
 
-    /* Return current source sample rate in Hz, or 0 if unknown */
+    /* Return current source sample rate in Hz */
             uint32_t    getSampleRate() const;
 
     /* Enables looping and sets the start and end points of looping.
@@ -361,7 +363,7 @@
     /* Sets marker position. When playback reaches the number of frames specified, a callback with
      * event type EVENT_MARKER is called. Calling setMarkerPosition with marker == 0 cancels marker
      * notification callback.  To set a marker at a position which would compute as 0,
-     * a workaround is to the set the marker at a nearby position such as ~0 or 1.
+     * a workaround is to set the marker at a nearby position such as ~0 or 1.
      * If the AudioTrack has been opened with no callback function associated, the operation will
      * fail.
      *
@@ -452,7 +454,7 @@
      * Returned value:
      *  handle on audio hardware output
      */
-            audio_io_handle_t    getOutput();
+            audio_io_handle_t    getOutput() const;
 
     /* Returns the unique session ID associated with this track.
      *
@@ -566,7 +568,7 @@
             uint32_t    getUnderrunFrames() const;
 
     /* Get the flags */
-            audio_output_flags_t getFlags() const { return mFlags; }
+            audio_output_flags_t getFlags() const { AutoMutex _l(mLock); return mFlags; }
 
     /* Set parameters - only possible when using direct output */
             status_t    setParameters(const String8& keyValuePairs);
@@ -626,53 +628,50 @@
             //      NS_INACTIVE inactive so don't run again until re-started
             //      NS_NEVER    never again
             static const nsecs_t NS_WHENEVER = -1, NS_INACTIVE = -2, NS_NEVER = -3;
-            nsecs_t processAudioBuffer(const sp<AudioTrackThread>& thread);
-            status_t processStreamEnd(int32_t waitCount);
+            nsecs_t processAudioBuffer();
 
+            bool     isOffloaded() const;
 
             // caller must hold lock on mLock for all _l methods
 
-            status_t createTrack_l(audio_stream_type_t streamType,
-                                 uint32_t sampleRate,
-                                 audio_format_t format,
-                                 size_t frameCount,
-                                 audio_output_flags_t flags,
-                                 const sp<IMemory>& sharedBuffer,
-                                 audio_io_handle_t output,
-                                 size_t epoch);
+            status_t createTrack_l(size_t epoch);
 
             // can only be called when mState != STATE_ACTIVE
             void flush_l();
 
             void setLoop_l(uint32_t loopStart, uint32_t loopEnd, int loopCount);
-            audio_io_handle_t getOutput_l();
 
             // FIXME enum is faster than strcmp() for parameter 'from'
             status_t restoreTrack_l(const char *from);
 
-            bool     isOffloaded() const
+            bool     isOffloaded_l() const
                 { return (mFlags & AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD) != 0; }
 
-    // Next 3 fields may be changed if IAudioTrack is re-created, but always != 0
+    // Next 4 fields may be changed if IAudioTrack is re-created, but always != 0
     sp<IAudioTrack>         mAudioTrack;
     sp<IMemory>             mCblkMemory;
     audio_track_cblk_t*     mCblk;                  // re-load after mLock.unlock()
+    audio_io_handle_t       mOutput;                // returned by AudioSystem::getOutput()
 
     sp<AudioTrackThread>    mAudioTrackThread;
+
     float                   mVolume[2];
     float                   mSendLevel;
     mutable uint32_t        mSampleRate;            // mutable because getSampleRate() can update it.
-    size_t                  mFrameCount;            // corresponds to current IAudioTrack
-    size_t                  mReqFrameCount;         // frame count to request the next time a new
-                                                    // IAudioTrack is needed
-
+    size_t                  mFrameCount;            // corresponds to current IAudioTrack, value is
+                                                    // reported back by AudioFlinger to the client
+    size_t                  mReqFrameCount;         // frame count to request the first or next time
+                                                    // a new IAudioTrack is needed, non-decreasing
 
     // constant after constructor or set()
     audio_format_t          mFormat;                // as requested by client, not forced to 16-bit
     audio_stream_type_t     mStreamType;
     uint32_t                mChannelCount;
     audio_channel_mask_t    mChannelMask;
+    sp<IMemory>             mSharedBuffer;
     transfer_type           mTransfer;
+    audio_offload_info_t    mOffloadInfoCopy;
+    const audio_offload_info_t* mOffloadInfo;
 
     // mFrameSize is equal to mFrameSizeAF for non-PCM or 16-bit PCM data.  For 8-bit PCM data, it's
     // twice as large as mFrameSize because data is expanded to 16-bit before it's stored in buffer.
@@ -705,21 +704,25 @@
     uint32_t                mNotificationFramesAct; // actual number of frames between each
                                                     // notification callback,
                                                     // at initial source sample rate
-    bool                    mRefreshRemaining;      // processAudioBuffer() should refresh next 2
+    bool                    mRefreshRemaining;      // processAudioBuffer() should refresh
+                                                    // mRemainingFrames and mRetryOnPartialBuffer
 
     // These are private to processAudioBuffer(), and are not protected by a lock
     uint32_t                mRemainingFrames;       // number of frames to request in obtainBuffer()
     bool                    mRetryOnPartialBuffer;  // sleep and retry after partial obtainBuffer()
     uint32_t                mObservedSequence;      // last observed value of mSequence
 
-    sp<IMemory>             mSharedBuffer;
     uint32_t                mLoopPeriod;            // in frames, zero means looping is disabled
+
     uint32_t                mMarkerPosition;        // in wrapping (overflow) frame units
     bool                    mMarkerReached;
     uint32_t                mNewPosition;           // in frames
     uint32_t                mUpdatePeriod;          // in frames, zero means no EVENT_NEW_POS
 
     audio_output_flags_t    mFlags;
+        // const after set(), except for bits AUDIO_OUTPUT_FLAG_FAST and AUDIO_OUTPUT_FLAG_OFFLOAD.
+        // mLock must be held to read or write those bits reliably.
+
     int                     mSessionId;
     int                     mAuxEffectId;
 
@@ -753,7 +756,6 @@
 
     sp<DeathNotifier>       mDeathNotifier;
     uint32_t                mSequence;              // incremented for each new IAudioTrack attempt
-    audio_io_handle_t       mOutput;                // cached output io handle
     int                     mClientUid;
 };
 
diff --git a/include/media/IAudioFlinger.h b/include/media/IAudioFlinger.h
index 282f275..ea225ac 100644
--- a/include/media/IAudioFlinger.h
+++ b/include/media/IAudioFlinger.h
@@ -64,9 +64,12 @@
                                 uint32_t sampleRate,
                                 audio_format_t format,
                                 audio_channel_mask_t channelMask,
-                                size_t frameCount,
+                                size_t *pFrameCount,
                                 track_flags_t *flags,
                                 const sp<IMemory>& sharedBuffer,
+                                // On successful return, AudioFlinger takes over the handle
+                                // reference and will release it when the track is destroyed.
+                                // However on failure, the client is responsible for release.
                                 audio_io_handle_t output,
                                 pid_t tid,  // -1 means unused, otherwise must be valid non-0
                                 int *sessionId,
@@ -78,11 +81,14 @@
                                 status_t *status) = 0;
 
     virtual sp<IAudioRecord> openRecord(
+                                // On successful return, AudioFlinger takes over the handle
+                                // reference and will release it when the track is destroyed.
+                                // However on failure, the client is responsible for release.
                                 audio_io_handle_t input,
                                 uint32_t sampleRate,
                                 audio_format_t format,
                                 audio_channel_mask_t channelMask,
-                                size_t frameCount,
+                                size_t *pFrameCount,
                                 track_flags_t *flags,
                                 pid_t tid,  // -1 means unused, otherwise must be valid non-0
                                 int *sessionId,
@@ -188,6 +194,7 @@
                                     effect_descriptor_t *pDesc,
                                     const sp<IEffectClient>& client,
                                     int32_t priority,
+                                    // AudioFlinger doesn't take over handle reference from client
                                     audio_io_handle_t output,
                                     int sessionId,
                                     status_t *status,
diff --git a/include/media/IMediaHTTPConnection.h b/include/media/IMediaHTTPConnection.h
new file mode 100644
index 0000000..e048b64
--- /dev/null
+++ b/include/media/IMediaHTTPConnection.h
@@ -0,0 +1,48 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef I_MEDIA_HTTP_CONNECTION_H_
+
+#define I_MEDIA_HTTP_CONNECTION_H_
+
+#include <binder/IInterface.h>
+#include <media/stagefright/foundation/ABase.h>
+#include <utils/KeyedVector.h>
+
+namespace android {
+
+struct IMediaHTTPConnection;
+
+/** MUST stay in sync with IMediaHTTPConnection.aidl */
+
+struct IMediaHTTPConnection : public IInterface {
+    DECLARE_META_INTERFACE(MediaHTTPConnection);
+
+    virtual bool connect(
+            const char *uri, const KeyedVector<String8, String8> *headers) = 0;
+
+    virtual void disconnect() = 0;
+    virtual ssize_t readAt(off64_t offset, void *data, size_t size) = 0;
+    virtual off64_t getSize() = 0;
+    virtual status_t getMIMEType(String8 *mimeType) = 0;
+
+private:
+    DISALLOW_EVIL_CONSTRUCTORS(IMediaHTTPConnection);
+};
+
+}  // namespace android
+
+#endif  // I_MEDIA_HTTP_CONNECTION_H_
diff --git a/include/media/IMediaHTTPService.h b/include/media/IMediaHTTPService.h
new file mode 100644
index 0000000..f66d6c8
--- /dev/null
+++ b/include/media/IMediaHTTPService.h
@@ -0,0 +1,41 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef I_MEDIA_HTTP_SERVICE_H_
+
+#define I_MEDIA_HTTP_SERVICE_H_
+
+#include <binder/IInterface.h>
+#include <media/stagefright/foundation/ABase.h>
+
+namespace android {
+
+struct IMediaHTTPConnection;
+
+/** MUST stay in sync with IMediaHTTPService.aidl */
+
+struct IMediaHTTPService : public IInterface {
+    DECLARE_META_INTERFACE(MediaHTTPService);
+
+    virtual sp<IMediaHTTPConnection> makeHTTPConnection() = 0;
+
+private:
+    DISALLOW_EVIL_CONSTRUCTORS(IMediaHTTPService);
+};
+
+}  // namespace android
+
+#endif  // I_MEDIA_HTTP_SERVICE_H_
diff --git a/include/media/IMediaMetadataRetriever.h b/include/media/IMediaMetadataRetriever.h
index 6dbb2d7..2529800 100644
--- a/include/media/IMediaMetadataRetriever.h
+++ b/include/media/IMediaMetadataRetriever.h
@@ -26,6 +26,8 @@
 
 namespace android {
 
+struct IMediaHTTPService;
+
 class IMediaMetadataRetriever: public IInterface
 {
 public:
@@ -33,6 +35,7 @@
     virtual void            disconnect() = 0;
 
     virtual status_t        setDataSource(
+            const sp<IMediaHTTPService> &httpService,
             const char *srcUrl,
             const KeyedVector<String8, String8> *headers = NULL) = 0;
 
diff --git a/include/media/IMediaPlayer.h b/include/media/IMediaPlayer.h
index 0cbd269..db62cd5 100644
--- a/include/media/IMediaPlayer.h
+++ b/include/media/IMediaPlayer.h
@@ -33,6 +33,7 @@
 class Surface;
 class IStreamSource;
 class IGraphicBufferProducer;
+struct IMediaHTTPService;
 
 class IMediaPlayer: public IInterface
 {
@@ -41,8 +42,11 @@
 
     virtual void            disconnect() = 0;
 
-    virtual status_t        setDataSource(const char *url,
-                                    const KeyedVector<String8, String8>* headers) = 0;
+    virtual status_t        setDataSource(
+            const sp<IMediaHTTPService> &httpService,
+            const char *url,
+            const KeyedVector<String8, String8>* headers) = 0;
+
     virtual status_t        setDataSource(int fd, int64_t offset, int64_t length) = 0;
     virtual status_t        setDataSource(const sp<IStreamSource>& source) = 0;
     virtual status_t        setVideoSurfaceTexture(
diff --git a/include/media/IMediaPlayerService.h b/include/media/IMediaPlayerService.h
index 2998b37..5b45376 100644
--- a/include/media/IMediaPlayerService.h
+++ b/include/media/IMediaPlayerService.h
@@ -34,6 +34,7 @@
 struct ICrypto;
 struct IDrm;
 struct IHDCP;
+struct IMediaHTTPService;
 class IMediaRecorder;
 class IOMX;
 class IRemoteDisplay;
@@ -49,9 +50,14 @@
     virtual sp<IMediaMetadataRetriever> createMetadataRetriever() = 0;
     virtual sp<IMediaPlayer> create(const sp<IMediaPlayerClient>& client, int audioSessionId = 0) = 0;
 
-    virtual status_t         decode(const char* url, uint32_t *pSampleRate, int* pNumChannels,
-                                    audio_format_t* pFormat,
-                                    const sp<IMemoryHeap>& heap, size_t *pSize) = 0;
+    virtual status_t         decode(
+            const sp<IMediaHTTPService> &httpService,
+            const char* url,
+            uint32_t *pSampleRate,
+            int* pNumChannels,
+            audio_format_t* pFormat,
+            const sp<IMemoryHeap>& heap, size_t *pSize) = 0;
+
     virtual status_t         decode(int fd, int64_t offset, int64_t length, uint32_t *pSampleRate,
                                     int* pNumChannels, audio_format_t* pFormat,
                                     const sp<IMemoryHeap>& heap, size_t *pSize) = 0;
@@ -93,9 +99,6 @@
 
     virtual void addBatteryData(uint32_t params) = 0;
     virtual status_t pullBatteryData(Parcel* reply) = 0;
-
-    virtual status_t updateProxyConfig(
-            const char *host, int32_t port, const char *exclusionList) = 0;
 };
 
 // ----------------------------------------------------------------------------
diff --git a/include/media/IOMX.h b/include/media/IOMX.h
index 9c8451c..3db2c38 100644
--- a/include/media/IOMX.h
+++ b/include/media/IOMX.h
@@ -142,6 +142,8 @@
     enum InternalOptionType {
         INTERNAL_OPTION_SUSPEND,  // data is a bool
         INTERNAL_OPTION_REPEAT_PREVIOUS_FRAME_DELAY,  // data is an int64_t
+        INTERNAL_OPTION_MAX_TIMESTAMP_GAP, // data is int64_t
+        INTERNAL_OPTION_START_TIME, // data is an int64_t
     };
     virtual status_t setInternalOption(
             node_id node,
diff --git a/include/media/MediaMetadataRetrieverInterface.h b/include/media/MediaMetadataRetrieverInterface.h
index ecc3b65..bb6b97b 100644
--- a/include/media/MediaMetadataRetrieverInterface.h
+++ b/include/media/MediaMetadataRetrieverInterface.h
@@ -24,6 +24,8 @@
 
 namespace android {
 
+struct IMediaHTTPService;
+
 // Abstract base class
 class MediaMetadataRetrieverBase : public RefBase
 {
@@ -32,6 +34,7 @@
     virtual             ~MediaMetadataRetrieverBase() {}
 
     virtual status_t    setDataSource(
+            const sp<IMediaHTTPService> &httpService,
             const char *url,
             const KeyedVector<String8, String8> *headers = NULL) = 0;
 
diff --git a/include/media/MediaPlayerInterface.h b/include/media/MediaPlayerInterface.h
index 26d8729..87717da 100644
--- a/include/media/MediaPlayerInterface.h
+++ b/include/media/MediaPlayerInterface.h
@@ -137,6 +137,7 @@
     }
 
     virtual status_t    setDataSource(
+            const sp<IMediaHTTPService> &httpService,
             const char *url,
             const KeyedVector<String8, String8> *headers = NULL) = 0;
 
@@ -213,11 +214,6 @@
         return INVALID_OPERATION;
     }
 
-    virtual status_t updateProxyConfig(
-            const char *host, int32_t port, const char *exclusionList) {
-        return INVALID_OPERATION;
-    }
-
 private:
     friend class MediaPlayerService;
 
diff --git a/include/media/mediametadataretriever.h b/include/media/mediametadataretriever.h
index 0df77c1..b35cf32 100644
--- a/include/media/mediametadataretriever.h
+++ b/include/media/mediametadataretriever.h
@@ -25,6 +25,7 @@
 
 namespace android {
 
+struct IMediaHTTPService;
 class IMediaPlayerService;
 class IMediaMetadataRetriever;
 
@@ -68,6 +69,7 @@
     void disconnect();
 
     status_t setDataSource(
+            const sp<IMediaHTTPService> &httpService,
             const char *dataSourceUrl,
             const KeyedVector<String8, String8> *headers = NULL);
 
diff --git a/include/media/mediaplayer.h b/include/media/mediaplayer.h
index 4c05fc3..f8e4e3b 100644
--- a/include/media/mediaplayer.h
+++ b/include/media/mediaplayer.h
@@ -189,6 +189,8 @@
     virtual void notify(int msg, int ext1, int ext2, const Parcel *obj) = 0;
 };
 
+struct IMediaHTTPService;
+
 class MediaPlayer : public BnMediaPlayerClient,
                     public virtual IMediaDeathNotifier
 {
@@ -199,6 +201,7 @@
             void            disconnect();
 
             status_t        setDataSource(
+                    const sp<IMediaHTTPService> &httpService,
                     const char *url,
                     const KeyedVector<String8, String8> *headers);
 
@@ -224,9 +227,14 @@
             bool            isLooping();
             status_t        setVolume(float leftVolume, float rightVolume);
             void            notify(int msg, int ext1, int ext2, const Parcel *obj = NULL);
-    static  status_t        decode(const char* url, uint32_t *pSampleRate, int* pNumChannels,
-                                   audio_format_t* pFormat,
-                                   const sp<IMemoryHeap>& heap, size_t *pSize);
+    static  status_t        decode(
+            const sp<IMediaHTTPService> &httpService,
+            const char* url,
+            uint32_t *pSampleRate,
+            int* pNumChannels,
+            audio_format_t* pFormat,
+            const sp<IMemoryHeap>& heap,
+            size_t *pSize);
     static  status_t        decode(int fd, int64_t offset, int64_t length, uint32_t *pSampleRate,
                                    int* pNumChannels, audio_format_t* pFormat,
                                    const sp<IMemoryHeap>& heap, size_t *pSize);
@@ -242,9 +250,6 @@
             status_t        setRetransmitEndpoint(const char* addrString, uint16_t port);
             status_t        setNextMediaPlayer(const sp<MediaPlayer>& player);
 
-            status_t updateProxyConfig(
-                    const char *host, int32_t port, const char *exclusionList);
-
 private:
             void            clear_l();
             status_t        seekTo_l(int msec);
diff --git a/include/media/mediarecorder.h b/include/media/mediarecorder.h
index 88a42a0..142cb90 100644
--- a/include/media/mediarecorder.h
+++ b/include/media/mediarecorder.h
@@ -39,7 +39,7 @@
 enum video_source {
     VIDEO_SOURCE_DEFAULT = 0,
     VIDEO_SOURCE_CAMERA = 1,
-    VIDEO_SOURCE_GRALLOC_BUFFER = 2,
+    VIDEO_SOURCE_SURFACE = 2,
 
     VIDEO_SOURCE_LIST_END  // must be last - used to validate audio source type
 };
diff --git a/include/media/mediascanner.h b/include/media/mediascanner.h
index a73403b..4537679 100644
--- a/include/media/mediascanner.h
+++ b/include/media/mediascanner.h
@@ -21,6 +21,7 @@
 #include <utils/threads.h>
 #include <utils/List.h>
 #include <utils/Errors.h>
+#include <utils/String8.h>
 #include <pthread.h>
 
 struct dirent;
@@ -29,6 +30,7 @@
 
 class MediaScannerClient;
 class StringArray;
+class CharacterEncodingDetector;
 
 enum MediaScanResult {
     // This file or directory was scanned successfully.
@@ -94,15 +96,9 @@
     virtual status_t setMimeType(const char* mimeType) = 0;
 
 protected:
-    void convertValues(uint32_t encoding);
-
-protected:
-    // cached name and value strings, for native encoding support.
-    StringArray*    mNames;
-    StringArray*    mValues;
-
-    // default encoding based on MediaScanner::mLocale string
-    uint32_t        mLocaleEncoding;
+    // default encoding from MediaScanner::mLocale
+    String8 mLocale;
+    CharacterEncodingDetector *mEncodingDetector;
 };
 
 }; // namespace android
diff --git a/include/media/nbaio/AudioBufferProviderSource.h b/include/media/nbaio/AudioBufferProviderSource.h
index 2c4aaff..b16e20a 100644
--- a/include/media/nbaio/AudioBufferProviderSource.h
+++ b/include/media/nbaio/AudioBufferProviderSource.h
@@ -27,7 +27,7 @@
 class AudioBufferProviderSource : public NBAIO_Source {
 
 public:
-    AudioBufferProviderSource(AudioBufferProvider *provider, NBAIO_Format format);
+    AudioBufferProviderSource(AudioBufferProvider *provider, const NBAIO_Format& format);
     virtual ~AudioBufferProviderSource();
 
     // NBAIO_Port interface
diff --git a/include/media/nbaio/MonoPipe.h b/include/media/nbaio/MonoPipe.h
index d3802fe..b09b35f 100644
--- a/include/media/nbaio/MonoPipe.h
+++ b/include/media/nbaio/MonoPipe.h
@@ -41,7 +41,7 @@
     // Note: whatever shares this object with another thread needs to do so in an SMP-safe way (like
     // creating it the object before creating the other thread, or storing the object with a
     // release_store). Otherwise the other thread could see a partially-constructed object.
-    MonoPipe(size_t reqFrames, NBAIO_Format format, bool writeCanBlock = false);
+    MonoPipe(size_t reqFrames, const NBAIO_Format& format, bool writeCanBlock = false);
     virtual ~MonoPipe();
 
     // NBAIO_Port interface
diff --git a/include/media/nbaio/NBAIO.h b/include/media/nbaio/NBAIO.h
index 1da0c73..56896b9 100644
--- a/include/media/nbaio/NBAIO.h
+++ b/include/media/nbaio/NBAIO.h
@@ -52,31 +52,41 @@
 // the combinations that are actually needed within AudioFlinger.  If the list of combinations grows
 // too large, then this decision should be re-visited.
 // Sample rate and channel count are explicit, PCM interleaved 16-bit is assumed.
-typedef unsigned NBAIO_Format;
-enum {
-    Format_Invalid
+struct NBAIO_Format {
+//private:
+    unsigned    mPacked;
 };
 
+extern const NBAIO_Format Format_Invalid;
+
 // Return the frame size of an NBAIO_Format in bytes
-size_t Format_frameSize(NBAIO_Format format);
+size_t Format_frameSize(const NBAIO_Format& format);
 
 // Return the frame size of an NBAIO_Format as a bit shift
-size_t Format_frameBitShift(NBAIO_Format format);
+// or -1 if frame size is not a power of 2
+int Format_frameBitShift(const NBAIO_Format& format);
 
 // Convert a sample rate in Hz and channel count to an NBAIO_Format
+// FIXME The sample format is hard-coded to AUDIO_FORMAT_PCM_16_BIT
 NBAIO_Format Format_from_SR_C(unsigned sampleRate, unsigned channelCount);
 
 // Return the sample rate in Hz of an NBAIO_Format
-unsigned Format_sampleRate(NBAIO_Format format);
+unsigned Format_sampleRate(const NBAIO_Format& format);
 
 // Return the channel count of an NBAIO_Format
-unsigned Format_channelCount(NBAIO_Format format);
+unsigned Format_channelCount(const NBAIO_Format& format);
 
 // Callbacks used by NBAIO_Sink::writeVia() and NBAIO_Source::readVia() below.
 typedef ssize_t (*writeVia_t)(void *user, void *buffer, size_t count);
 typedef ssize_t (*readVia_t)(void *user, const void *buffer,
                              size_t count, int64_t readPTS);
 
+// Check whether an NBAIO_Format is valid
+bool Format_isValid(const NBAIO_Format& format);
+
+// Compare two NBAIO_Format values
+bool Format_isEqual(const NBAIO_Format& format1, const NBAIO_Format& format2);
+
 // Abstract class (interface) representing a data port.
 class NBAIO_Port : public RefBase {
 
@@ -115,8 +125,8 @@
     virtual NBAIO_Format format() const { return mNegotiated ? mFormat : Format_Invalid; }
 
 protected:
-    NBAIO_Port(NBAIO_Format format) : mNegotiated(false), mFormat(format),
-                                      mBitShift(Format_frameBitShift(format)) { }
+    NBAIO_Port(const NBAIO_Format& format) : mNegotiated(false), mFormat(format),
+                                             mBitShift(Format_frameBitShift(format)) { }
     virtual ~NBAIO_Port() { }
 
     // Implementations are free to ignore these if they don't need them
@@ -220,7 +230,7 @@
     virtual status_t getTimestamp(AudioTimestamp& timestamp) { return INVALID_OPERATION; }
 
 protected:
-    NBAIO_Sink(NBAIO_Format format = Format_Invalid) : NBAIO_Port(format), mFramesWritten(0) { }
+    NBAIO_Sink(const NBAIO_Format& format = Format_Invalid) : NBAIO_Port(format), mFramesWritten(0) { }
     virtual ~NBAIO_Sink() { }
 
     // Implementations are free to ignore these if they don't need them
@@ -311,7 +321,7 @@
     virtual void    onTimestamp(const AudioTimestamp& timestamp) { }
 
 protected:
-    NBAIO_Source(NBAIO_Format format = Format_Invalid) : NBAIO_Port(format), mFramesRead(0) { }
+    NBAIO_Source(const NBAIO_Format& format = Format_Invalid) : NBAIO_Port(format), mFramesRead(0) { }
     virtual ~NBAIO_Source() { }
 
     // Implementations are free to ignore these if they don't need them
diff --git a/include/media/nbaio/Pipe.h b/include/media/nbaio/Pipe.h
index 79a4eee..c784129 100644
--- a/include/media/nbaio/Pipe.h
+++ b/include/media/nbaio/Pipe.h
@@ -30,7 +30,7 @@
 
 public:
     // maxFrames will be rounded up to a power of 2, and all slots are available. Must be >= 2.
-    Pipe(size_t maxFrames, NBAIO_Format format);
+    Pipe(size_t maxFrames, const NBAIO_Format& format);
     virtual ~Pipe();
 
     // NBAIO_Port interface
diff --git a/include/media/stagefright/ACodec.h b/include/media/stagefright/ACodec.h
index f1636e6..e284109 100644
--- a/include/media/stagefright/ACodec.h
+++ b/include/media/stagefright/ACodec.h
@@ -67,6 +67,8 @@
 
     void signalRequestIDRFrame();
 
+    bool isConfiguredForAdaptivePlayback() { return mIsConfiguredForAdaptivePlayback; }
+
     struct PortDescription : public RefBase {
         size_t countBuffers();
         IOMX::buffer_id bufferIDAt(size_t index) const;
@@ -187,6 +189,7 @@
     bool mIsEncoder;
     bool mUseMetadataOnEncoderOutput;
     bool mShutdownInProgress;
+    bool mIsConfiguredForAdaptivePlayback;
 
     // If "mKeepComponentAllocated" we only transition back to Loaded state
     // and do not release the component instance.
@@ -202,6 +205,9 @@
     int32_t mMetaDataBuffersToSubmit;
 
     int64_t mRepeatFrameDelayUs;
+    int64_t mMaxPtsGapUs;
+
+    bool mCreateInputBuffersSuspended;
 
     status_t setCyclicIntraMacroblockRefresh(const sp<AMessage> &msg, int32_t mode);
     status_t allocateBuffersOnPort(OMX_U32 portIndex);
diff --git a/include/media/stagefright/CameraSource.h b/include/media/stagefright/CameraSource.h
index a829916..69cfbd0 100644
--- a/include/media/stagefright/CameraSource.h
+++ b/include/media/stagefright/CameraSource.h
@@ -185,6 +185,8 @@
     virtual void dataCallbackTimestamp(int64_t timestampUs, int32_t msgType,
             const sp<IMemory> &data);
 
+    void releaseCamera();
+
 private:
     friend class CameraSourceListener;
 
@@ -233,7 +235,6 @@
                     int32_t frameRate);
 
     void stopCameraRecording();
-    void releaseCamera();
     status_t reset();
 
     CameraSource(const CameraSource &);
diff --git a/include/media/stagefright/DataSource.h b/include/media/stagefright/DataSource.h
index 157b1aa..f8787dd 100644
--- a/include/media/stagefright/DataSource.h
+++ b/include/media/stagefright/DataSource.h
@@ -31,6 +31,7 @@
 namespace android {
 
 struct AMessage;
+struct IMediaHTTPService;
 class String8;
 
 class DataSource : public RefBase {
@@ -43,6 +44,7 @@
     };
 
     static sp<DataSource> CreateFromURI(
+            const sp<IMediaHTTPService> &httpService,
             const char *uri,
             const KeyedVector<String8, String8> *headers = NULL);
 
diff --git a/include/media/stagefright/MediaCodecSource.h b/include/media/stagefright/MediaCodecSource.h
new file mode 100644
index 0000000..4b18a0b
--- /dev/null
+++ b/include/media/stagefright/MediaCodecSource.h
@@ -0,0 +1,134 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef MediaCodecSource_H_
+#define MediaCodecSource_H_
+
+#include <media/stagefright/foundation/ABase.h>
+#include <media/stagefright/foundation/AHandlerReflector.h>
+#include <media/stagefright/MediaSource.h>
+
+namespace android {
+
+class ALooper;
+class AMessage;
+class IGraphicBufferProducer;
+class MediaCodec;
+class MetaData;
+
+struct MediaCodecSource : public MediaSource,
+                          public MediaBufferObserver {
+    enum FlagBits {
+        FLAG_USE_SURFACE_INPUT      = 1,
+        FLAG_USE_METADATA_INPUT     = 2,
+    };
+
+    static sp<MediaCodecSource> Create(
+            const sp<ALooper> &looper,
+            const sp<AMessage> &format,
+            const sp<MediaSource> &source,
+            uint32_t flags = 0);
+
+    bool isVideo() const { return mIsVideo; }
+    sp<IGraphicBufferProducer> getGraphicBufferProducer();
+
+    // MediaSource
+    virtual status_t start(MetaData *params = NULL);
+    virtual status_t stop();
+    virtual status_t pause();
+    virtual sp<MetaData> getFormat() { return mMeta; }
+    virtual status_t read(
+            MediaBuffer **buffer,
+            const ReadOptions *options = NULL);
+
+    // MediaBufferObserver
+    virtual void signalBufferReturned(MediaBuffer *buffer);
+
+    // for AHandlerReflector
+    void onMessageReceived(const sp<AMessage> &msg);
+
+protected:
+    virtual ~MediaCodecSource();
+
+private:
+    struct Puller;
+
+    enum {
+        kWhatPullerNotify,
+        kWhatEncoderActivity,
+        kWhatStart,
+        kWhatStop,
+        kWhatPause,
+    };
+
+    MediaCodecSource(
+            const sp<ALooper> &looper,
+            const sp<AMessage> &outputFormat,
+            const sp<MediaSource> &source,
+            uint32_t flags = 0);
+
+    status_t onStart(MetaData *params);
+    status_t init();
+    status_t initEncoder();
+    void releaseEncoder();
+    status_t feedEncoderInputBuffers();
+    void scheduleDoMoreWork();
+    status_t doMoreWork();
+    void suspend();
+    void resume(int64_t skipFramesBeforeUs = -1ll);
+    void signalEOS(status_t err = ERROR_END_OF_STREAM);
+    bool reachedEOS();
+    status_t postSynchronouslyAndReturnError(const sp<AMessage> &msg);
+
+    sp<ALooper> mLooper;
+    sp<ALooper> mCodecLooper;
+    sp<AHandlerReflector<MediaCodecSource> > mReflector;
+    sp<AMessage> mOutputFormat;
+    sp<MetaData> mMeta;
+    sp<Puller> mPuller;
+    sp<MediaCodec> mEncoder;
+    uint32_t mFlags;
+    List<uint32_t> mStopReplyIDQueue;
+    bool mIsVideo;
+    bool mStarted;
+    bool mStopping;
+    bool mDoMoreWorkPending;
+    bool mPullerReachedEOS;
+    sp<AMessage> mEncoderActivityNotify;
+    sp<IGraphicBufferProducer> mGraphicBufferProducer;
+    Vector<sp<ABuffer> > mEncoderInputBuffers;
+    Vector<sp<ABuffer> > mEncoderOutputBuffers;
+    List<MediaBuffer *> mInputBufferQueue;
+    List<size_t> mAvailEncoderInputIndices;
+    List<int64_t> mDecodingTimeQueue; // decoding time (us) for video
+
+    // audio drift time
+    int64_t mFirstSampleTimeUs;
+    List<int64_t> mDriftTimeQueue;
+
+    // following variables are protected by mOutputBufferLock
+    Mutex mOutputBufferLock;
+    Condition mOutputBufferCond;
+    List<MediaBuffer*> mOutputBufferQueue;
+    bool mEncodedReachedEOS;
+    status_t mErrorCode;
+
+    DISALLOW_EVIL_CONSTRUCTORS(MediaCodecSource);
+};
+
+} // namespace android
+
+#endif /* MediaCodecSource_H_ */
diff --git a/include/media/stagefright/MediaHTTP.h b/include/media/stagefright/MediaHTTP.h
new file mode 100644
index 0000000..006d8d8
--- /dev/null
+++ b/include/media/stagefright/MediaHTTP.h
@@ -0,0 +1,77 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef MEDIA_HTTP_H_
+
+#define MEDIA_HTTP_H_
+
+#include <media/stagefright/foundation/AString.h>
+
+#include "include/HTTPBase.h"
+
+namespace android {
+
+struct IMediaHTTPConnection;
+
+struct MediaHTTP : public HTTPBase {
+    MediaHTTP(const sp<IMediaHTTPConnection> &conn);
+
+    virtual status_t connect(
+            const char *uri,
+            const KeyedVector<String8, String8> *headers,
+            off64_t offset);
+
+    virtual void disconnect();
+
+    virtual status_t initCheck() const;
+
+    virtual ssize_t readAt(off64_t offset, void *data, size_t size);
+
+    virtual status_t getSize(off64_t *size);
+
+    virtual uint32_t flags();
+
+    virtual status_t reconnectAtOffset(off64_t offset);
+
+protected:
+    virtual ~MediaHTTP();
+
+    virtual sp<DecryptHandle> DrmInitialization(const char* mime);
+    virtual void getDrmInfo(sp<DecryptHandle> &handle, DrmManagerClient **client);
+    virtual String8 getUri();
+    virtual String8 getMIMEType() const;
+
+private:
+    status_t mInitCheck;
+    sp<IMediaHTTPConnection> mHTTPConnection;
+
+    KeyedVector<String8, String8> mLastHeaders;
+    AString mLastURI;
+
+    bool mCachedSizeValid;
+    off64_t mCachedSize;
+
+    sp<DecryptHandle> mDecryptHandle;
+    DrmManagerClient *mDrmManagerClient;
+
+    void clearDRMState_l();
+
+    DISALLOW_EVIL_CONSTRUCTORS(MediaHTTP);
+};
+
+}  // namespace android
+
+#endif  // MEDIA_HTTP_H_
diff --git a/include/media/stagefright/NuMediaExtractor.h b/include/media/stagefright/NuMediaExtractor.h
index 5ae6f6b..402e7f8 100644
--- a/include/media/stagefright/NuMediaExtractor.h
+++ b/include/media/stagefright/NuMediaExtractor.h
@@ -31,6 +31,7 @@
 struct ABuffer;
 struct AMessage;
 struct DataSource;
+struct IMediaHTTPService;
 struct MediaBuffer;
 struct MediaExtractor;
 struct MediaSource;
@@ -45,6 +46,7 @@
     NuMediaExtractor();
 
     status_t setDataSource(
+            const sp<IMediaHTTPService> &httpService,
             const char *path,
             const KeyedVector<String8, String8> *headers = NULL);
 
diff --git a/include/media/stagefright/SkipCutBuffer.h b/include/media/stagefright/SkipCutBuffer.h
index 2653b53..098aa69 100644
--- a/include/media/stagefright/SkipCutBuffer.h
+++ b/include/media/stagefright/SkipCutBuffer.h
@@ -47,6 +47,7 @@
  private:
     void write(const char *src, size_t num);
     size_t read(char *dst, size_t num);
+    int32_t mSkip;
     int32_t mFrontPadding;
     int32_t mBackPadding;
     int32_t mWriteHead;
diff --git a/include/media/stagefright/timedtext/TimedTextDriver.h b/include/media/stagefright/timedtext/TimedTextDriver.h
index f23c337..37ef674 100644
--- a/include/media/stagefright/timedtext/TimedTextDriver.h
+++ b/include/media/stagefright/timedtext/TimedTextDriver.h
@@ -25,6 +25,7 @@
 namespace android {
 
 class ALooper;
+struct IMediaHTTPService;
 class MediaPlayerBase;
 class MediaSource;
 class Parcel;
@@ -34,7 +35,9 @@
 
 class TimedTextDriver {
 public:
-    TimedTextDriver(const wp<MediaPlayerBase> &listener);
+    TimedTextDriver(
+            const wp<MediaPlayerBase> &listener,
+            const sp<IMediaHTTPService> &httpService);
 
     ~TimedTextDriver();
 
@@ -77,6 +80,7 @@
     sp<ALooper> mLooper;
     sp<TimedTextPlayer> mPlayer;
     wp<MediaPlayerBase> mListener;
+    sp<IMediaHTTPService> mHTTPService;
 
     // Variables to be guarded by mLock.
     State mState;
diff --git a/include/private/media/AudioTrackShared.h b/include/private/media/AudioTrackShared.h
index 7fd9379..3901e79 100644
--- a/include/private/media/AudioTrackShared.h
+++ b/include/private/media/AudioTrackShared.h
@@ -48,7 +48,7 @@
 #define CBLK_STREAM_END_DONE 0x400 // set by server on render completion, cleared by client
 
 //EL_FIXME 20 seconds may not be enough and must be reconciled with new obtainBuffer implementation
-#define MAX_RUN_OFFLOADED_TIMEOUT_MS 20000 //assuming upto a maximum of 20 seconds of offloaded
+#define MAX_RUN_OFFLOADED_TIMEOUT_MS 20000 // assuming up to a maximum of 20 seconds of offloaded
 
 struct AudioTrackSharedStreaming {
     // similar to NBAIO MonoPipe
@@ -65,7 +65,9 @@
 struct AudioTrackSharedStatic {
     StaticAudioTrackSingleStateQueue::Shared
                     mSingleStateQueue;
-    size_t          mBufferPosition;    // updated asynchronously by server,
+    // This field should be a size_t, but since it is located in shared memory we
+    // force to 32-bit.  The client and server may have different typedefs for size_t.
+    uint32_t        mBufferPosition;    // updated asynchronously by server,
                                         // "for entertainment purposes only"
 };
 
@@ -96,11 +98,7 @@
                                         // The value should be used "for entertainment purposes only",
                                         // which means don't make important decisions based on it.
 
-                size_t      frameCount_;    // used during creation to pass actual track buffer size
-                                            // from AudioFlinger to client, and not referenced again
-                                            // FIXME remove here and replace by createTrack() in/out
-                                            // parameter
-                                            // renamed to "_" to detect incorrect use
+                uint32_t    mPad1;      // unused
 
     volatile    int32_t     mFutex;     // event flag: down (P) by client,
                                         // up (V) by server or binderDied() or interrupt()
@@ -108,7 +106,9 @@
 
 private:
 
-                size_t      mMinimum;       // server wakes up client if available >= mMinimum
+                // This field should be a size_t, but since it is located in shared memory we
+                // force to 32-bit.  The client and server may have different typedefs for size_t.
+                uint32_t    mMinimum;       // server wakes up client if available >= mMinimum
 
                 // Channel volumes are fixed point U4.12, so 0x1000 means 1.0.
                 // Left channel is in [0:15], right channel is in [16:31].
@@ -245,7 +245,11 @@
     }
 
     void        setMinimum(size_t minimum) {
-        mCblk->mMinimum = minimum;
+        // This can only happen on a 64-bit client
+        if (minimum > UINT32_MAX) {
+            minimum = UINT32_MAX;
+        }
+        mCblk->mMinimum = (uint32_t) minimum;
     }
 
     // Return the number of frames that would need to be obtained and released
diff --git a/include/private/media/StaticAudioTrackState.h b/include/private/media/StaticAudioTrackState.h
index 46a5946..d483061 100644
--- a/include/private/media/StaticAudioTrackState.h
+++ b/include/private/media/StaticAudioTrackState.h
@@ -25,9 +25,13 @@
 // state is wrapped by a SingleStateQueue.
 struct StaticAudioTrackState {
     // do not define constructors, destructors, or virtual methods
-    size_t  mLoopStart;
-    size_t  mLoopEnd;
-    int     mLoopCount;
+
+    // These fields should both be size_t, but since they are located in shared memory we
+    // force to 32-bit.  The client and server may have different typedefs for size_t.
+    uint32_t    mLoopStart;
+    uint32_t    mLoopEnd;
+
+    int         mLoopCount;
 };
 
 }   // namespace android
diff --git a/libvideoeditor/lvpp/PreviewPlayer.cpp b/libvideoeditor/lvpp/PreviewPlayer.cpp
index 2bd9f84..b36fe0a 100755
--- a/libvideoeditor/lvpp/PreviewPlayer.cpp
+++ b/libvideoeditor/lvpp/PreviewPlayer.cpp
@@ -21,6 +21,7 @@
 
 #include <binder/IPCThreadState.h>
 #include <binder/IServiceManager.h>
+#include <media/IMediaHTTPService.h>
 #include <media/IMediaPlayerService.h>
 #include <media/stagefright/DataSource.h>
 #include <media/stagefright/MediaBuffer.h>
@@ -1160,7 +1161,8 @@
     sp<DataSource> dataSource;
     sp<MediaExtractor> extractor;
 
-    dataSource = DataSource::CreateFromURI(mUri.string(), NULL);
+    dataSource = DataSource::CreateFromURI(
+            NULL /* httpService */, mUri.string(), NULL);
 
     if (dataSource == NULL) {
         return UNKNOWN_ERROR;
diff --git a/libvideoeditor/lvpp/VideoEditorPlayer.cpp b/libvideoeditor/lvpp/VideoEditorPlayer.cpp
index 8d656c4..f9c3879 100755
--- a/libvideoeditor/lvpp/VideoEditorPlayer.cpp
+++ b/libvideoeditor/lvpp/VideoEditorPlayer.cpp
@@ -57,6 +57,7 @@
 
 
 status_t VideoEditorPlayer::setDataSource(
+        const sp<IMediaHTTPService> &httpService,
         const char *url, const KeyedVector<String8, String8> *headers) {
     ALOGI("setDataSource('%s')", url);
     if (headers != NULL) {
diff --git a/libvideoeditor/lvpp/VideoEditorPlayer.h b/libvideoeditor/lvpp/VideoEditorPlayer.h
index b8c1254..781e4bc 100755
--- a/libvideoeditor/lvpp/VideoEditorPlayer.h
+++ b/libvideoeditor/lvpp/VideoEditorPlayer.h
@@ -98,6 +98,7 @@
     virtual status_t initCheck();
 
     virtual status_t setDataSource(
+            const sp<IMediaHTTPService> &httpService,
             const char *url, const KeyedVector<String8, String8> *headers);
 
     virtual status_t setDataSource(int fd, int64_t offset, int64_t length);
diff --git a/libvideoeditor/lvpp/VideoEditorPreviewController.cpp b/libvideoeditor/lvpp/VideoEditorPreviewController.cpp
index c3cd3d0..953f35a 100755
--- a/libvideoeditor/lvpp/VideoEditorPreviewController.cpp
+++ b/libvideoeditor/lvpp/VideoEditorPreviewController.cpp
@@ -19,6 +19,7 @@
 #include <utils/Log.h>
 
 #include <gui/Surface.h>
+#include <media/IMediaHTTPService.h>
 
 #include "VideoEditorAudioPlayer.h"
 #include "PreviewRenderer.h"
@@ -967,7 +968,8 @@
     ALOGV("preparePlayer: instance %d file %d", playerInstance, index);
 
     const char* fileName = (const char*) pController->mClipList[index]->pFile;
-    pController->mVePlayer[playerInstance]->setDataSource(fileName, NULL);
+    pController->mVePlayer[playerInstance]->setDataSource(
+            NULL /* httpService */, fileName, NULL);
 
     ALOGV("preparePlayer: setDataSource instance %s",
      (const char *)pController->mClipList[index]->pFile);
diff --git a/media/libmedia/Android.mk b/media/libmedia/Android.mk
index 56e7787..fc4b2a5 100644
--- a/media/libmedia/Android.mk
+++ b/media/libmedia/Android.mk
@@ -25,6 +25,8 @@
     AudioRecord.cpp \
     AudioSystem.cpp \
     mediaplayer.cpp \
+    IMediaHTTPConnection.cpp \
+    IMediaHTTPService.cpp \
     IMediaLogService.cpp \
     IMediaPlayerService.cpp \
     IMediaPlayerClient.cpp \
@@ -44,7 +46,7 @@
     IAudioPolicyService.cpp \
     MediaScanner.cpp \
     MediaScannerClient.cpp \
-    autodetect.cpp \
+    CharacterEncodingDetector.cpp \
     IMediaDeathNotifier.cpp \
     MediaProfiles.cpp \
     IEffect.cpp \
@@ -65,7 +67,7 @@
 # Consider a separate a library for SingleStateQueueInstantiations.
 
 LOCAL_SHARED_LIBRARIES := \
-	libui liblog libcutils libutils libbinder libsonivox libicuuc libexpat \
+	libui liblog libcutils libutils libbinder libsonivox libicuuc libicui18n libexpat \
         libcamera_client libstagefright_foundation \
         libgui libdl libaudioutils
 
@@ -77,6 +79,7 @@
     $(call include-path-for, graphics corecg) \
     $(TOP)/frameworks/native/include/media/openmax \
     external/icu4c/common \
+    external/icu4c/i18n \
     $(call include-path-for, audio-effects) \
     $(call include-path-for, audio-utils)
 
diff --git a/media/libmedia/AudioEffect.cpp b/media/libmedia/AudioEffect.cpp
index 8dfffb3..35f6557 100644
--- a/media/libmedia/AudioEffect.cpp
+++ b/media/libmedia/AudioEffect.cpp
@@ -380,9 +380,9 @@
 }
 
 void AudioEffect::commandExecuted(uint32_t cmdCode,
-                                  uint32_t cmdSize,
+                                  uint32_t cmdSize __unused,
                                   void *cmdData,
-                                  uint32_t replySize,
+                                  uint32_t replySize __unused,
                                   void *replyData)
 {
     if (cmdData == NULL || replyData == NULL) {
diff --git a/media/libmedia/AudioRecord.cpp b/media/libmedia/AudioRecord.cpp
index 666fafa..541cb51 100644
--- a/media/libmedia/AudioRecord.cpp
+++ b/media/libmedia/AudioRecord.cpp
@@ -52,7 +52,7 @@
     }
 
     if (size == 0) {
-        ALOGE("Unsupported configuration: sampleRate %u, format %d, channelMask %#x",
+        ALOGE("Unsupported configuration: sampleRate %u, format %#x, channelMask %#x",
             sampleRate, format, channelMask);
         return BAD_VALUE;
     }
@@ -71,7 +71,7 @@
 // ---------------------------------------------------------------------------
 
 AudioRecord::AudioRecord()
-    : mStatus(NO_INIT), mSessionId(0),
+    : mStatus(NO_INIT), mSessionId(AUDIO_SESSION_ALLOCATE),
       mPreviousPriority(ANDROID_PRIORITY_NORMAL), mPreviousSchedulingGroup(SP_DEFAULT)
 {
 }
@@ -87,8 +87,8 @@
         int notificationFrames,
         int sessionId,
         transfer_type transferType,
-        audio_input_flags_t flags)
-    : mStatus(NO_INIT), mSessionId(0),
+        audio_input_flags_t flags __unused)
+    : mStatus(NO_INIT), mSessionId(AUDIO_SESSION_ALLOCATE),
       mPreviousPriority(ANDROID_PRIORITY_NORMAL),
       mPreviousSchedulingGroup(SP_DEFAULT),
       mProxy(NULL)
@@ -191,12 +191,12 @@
 
     // validate parameters
     if (!audio_is_valid_format(format)) {
-        ALOGE("Invalid format %d", format);
+        ALOGE("Invalid format %#x", format);
         return BAD_VALUE;
     }
     // Temporary restriction: AudioFlinger currently supports 16-bit PCM only
     if (format != AUDIO_FORMAT_PCM_16_BIT) {
-        ALOGE("Format %d is not supported", format);
+        ALOGE("Format %#x is not supported", format);
         return BAD_VALUE;
     }
     mFormat = format;
@@ -228,12 +228,13 @@
         ALOGE("frameCount %u < minFrameCount %u", frameCount, minFrameCount);
         return BAD_VALUE;
     }
-    mFrameCount = frameCount;
+    // mFrameCount is initialized in openRecord_l
+    mReqFrameCount = frameCount;
 
     mNotificationFramesReq = notificationFrames;
     mNotificationFramesAct = 0;
 
-    if (sessionId == 0 ) {
+    if (sessionId == AUDIO_SESSION_ALLOCATE) {
         mSessionId = AudioSystem::newAudioSessionId();
     } else {
         mSessionId = sessionId;
@@ -244,7 +245,7 @@
 
     // create the IAudioRecord
     status = openRecord_l(0 /*epoch*/);
-    if (status) {
+    if (status != NO_ERROR) {
         return status;
     }
 
@@ -255,9 +256,6 @@
 
     mStatus = NO_ERROR;
 
-    // Update buffer size in case it has been limited by AudioFlinger during track creation
-    mFrameCount = mCblk->frameCount_;
-
     mActive = false;
     mCbf = cbf;
     mRefreshRemaining = true;
@@ -289,6 +287,9 @@
 
     // reset current position as seen by client to 0
     mProxy->setEpoch(mProxy->getEpoch() - mProxy->getPosition());
+    // force refresh of remaining frames by processAudioBuffer() as last
+    // read before stop could be partial.
+    mRefreshRemaining = true;
 
     mNewPosition = mProxy->getPosition() + mUpdatePeriod;
     int32_t flags = android_atomic_acquire_load(&mCblk->mFlags);
@@ -352,6 +353,7 @@
 
 status_t AudioRecord::setMarkerPosition(uint32_t marker)
 {
+    // The only purpose of setting marker position is to get a callback
     if (mCbf == NULL) {
         return INVALID_OPERATION;
     }
@@ -377,6 +379,7 @@
 
 status_t AudioRecord::setPositionUpdatePeriod(uint32_t updatePeriod)
 {
+    // The only purpose of setting position update period is to get a callback
     if (mCbf == NULL) {
         return INVALID_OPERATION;
     }
@@ -412,7 +415,7 @@
     return NO_ERROR;
 }
 
-unsigned int AudioRecord::getInputFramesLost() const
+uint32_t AudioRecord::getInputFramesLost() const
 {
     // no need to check mActive, because if inactive this will return 0, which is what we want
     return AudioSystem::getInputFramesLost(getInput());
@@ -447,11 +450,12 @@
     }
 
     mNotificationFramesAct = mNotificationFramesReq;
+    size_t frameCount = mReqFrameCount;
 
     if (!(mFlags & AUDIO_INPUT_FLAG_FAST)) {
         // Make sure that application is notified with sufficient margin before overrun
-        if (mNotificationFramesAct == 0 || mNotificationFramesAct > mFrameCount/2) {
-            mNotificationFramesAct = mFrameCount/2;
+        if (mNotificationFramesAct == 0 || mNotificationFramesAct > frameCount/2) {
+            mNotificationFramesAct = frameCount/2;
         }
     }
 
@@ -461,24 +465,31 @@
         ALOGE("Could not get audio input for record source %d", mInputSource);
         return BAD_VALUE;
     }
+    {
+    // Now that we have a reference to an I/O handle and have not yet handed it off to AudioFlinger,
+    // we must release it ourselves if anything goes wrong.
 
+    size_t temp = frameCount;   // temp may be replaced by a revised value of frameCount,
+                                // but we will still need the original value also
     int originalSessionId = mSessionId;
     sp<IAudioRecord> record = audioFlinger->openRecord(input,
                                                        mSampleRate, mFormat,
                                                        mChannelMask,
-                                                       mFrameCount,
+                                                       &temp,
                                                        &trackFlags,
                                                        tid,
                                                        &mSessionId,
                                                        &status);
-    ALOGE_IF(originalSessionId != 0 && mSessionId != originalSessionId,
+    ALOGE_IF(originalSessionId != AUDIO_SESSION_ALLOCATE && mSessionId != originalSessionId,
             "session ID changed from %d to %d", originalSessionId, mSessionId);
 
     if (record == 0 || status != NO_ERROR) {
         ALOGE("AudioFlinger could not create record track, status: %d", status);
-        AudioSystem::releaseInput(input);
-        return status;
+        goto release;
     }
+    // AudioFlinger now owns the reference to the I/O handle,
+    // so we are no longer responsible for releasing it.
+
     sp<IMemory> iMem = record->getCblk();
     if (iMem == 0) {
         ALOGE("Could not get control block");
@@ -493,11 +504,24 @@
         mAudioRecord->asBinder()->unlinkToDeath(mDeathNotifier, this);
         mDeathNotifier.clear();
     }
+
+    // We retain a copy of the I/O handle, but don't own the reference
     mInput = input;
     mAudioRecord = record;
     mCblkMemory = iMem;
     audio_track_cblk_t* cblk = static_cast<audio_track_cblk_t*>(iMemPointer);
     mCblk = cblk;
+    // note that temp is the (possibly revised) value of mFrameCount
+    if (temp < frameCount || (frameCount == 0 && temp == 0)) {
+        ALOGW("Requested frameCount %u but received frameCount %u", frameCount, temp);
+    }
+    frameCount = temp;
+    // If IAudioRecord is re-created, don't let the requested frameCount
+    // decrease.  This can confuse clients that cache frameCount().
+    if (frameCount > mReqFrameCount) {
+        mReqFrameCount = frameCount;
+    }
+
     // FIXME missing fast track frameCount logic
     mAwaitBoost = false;
     if (mFlags & AUDIO_INPUT_FLAG_FAST) {
@@ -521,6 +545,8 @@
     // starting address of buffers in shared memory
     void *buffers = (char*)cblk + sizeof(audio_track_cblk_t);
 
+    mFrameCount = frameCount;
+
     // update proxy
     mProxy = new AudioRecordClientProxy(cblk, buffers, mFrameCount, mFrameSize);
     mProxy->setEpoch(epoch);
@@ -530,6 +556,14 @@
     mAudioRecord->asBinder()->linkToDeath(mDeathNotifier, this);
 
     return NO_ERROR;
+    }
+
+release:
+    AudioSystem::releaseInput(input);
+    if (status == NO_ERROR) {
+        status = NO_INIT;
+    }
+    return status;
 }
 
 status_t AudioRecord::obtainBuffer(Buffer* audioBuffer, int32_t waitCount)
@@ -545,13 +579,13 @@
     }
 
     const struct timespec *requested;
+    struct timespec timeout;
     if (waitCount == -1) {
         requested = &ClientProxy::kForever;
     } else if (waitCount == 0) {
         requested = &ClientProxy::kNonBlocking;
     } else if (waitCount > 0) {
         long long ms = WAIT_PERIOD_MS * (long long) waitCount;
-        struct timespec timeout;
         timeout.tv_sec = ms / 1000;
         timeout.tv_nsec = (int) (ms % 1000) * 1000000;
         requested = &timeout;
@@ -591,6 +625,9 @@
                 if (newSequence == oldSequence) {
                     status = restoreRecord_l("obtainBuffer");
                     if (status != NO_ERROR) {
+                        buffer.mFrameCount = 0;
+                        buffer.mRaw = NULL;
+                        buffer.mNonContig = 0;
                         break;
                     }
                 }
@@ -692,7 +729,7 @@
 
 // -------------------------------------------------------------------------
 
-nsecs_t AudioRecord::processAudioBuffer(const sp<AudioRecordThread>& thread)
+nsecs_t AudioRecord::processAudioBuffer()
 {
     mLock.lock();
     if (mAwaitBoost) {
@@ -767,10 +804,10 @@
         mRetryOnPartialBuffer = false;
     }
     size_t misalignment = mProxy->getMisalignment();
-    int32_t sequence = mSequence;
+    uint32_t sequence = mSequence;
 
     // These fields don't need to be cached, because they are assigned only by set():
-    //      mTransfer, mCbf, mUserData, mSampleRate
+    //      mTransfer, mCbf, mUserData, mSampleRate, mFrameSize
 
     mLock.unlock();
 
@@ -844,8 +881,8 @@
                 "obtainBuffer() err=%d frameCount=%u", err, audioBuffer.frameCount);
         requested = &ClientProxy::kNonBlocking;
         size_t avail = audioBuffer.frameCount + nonContig;
-        ALOGV("obtainBuffer(%u) returned %u = %u + %u",
-                mRemainingFrames, avail, audioBuffer.frameCount, nonContig);
+        ALOGV("obtainBuffer(%u) returned %u = %u + %u err %d",
+                mRemainingFrames, avail, audioBuffer.frameCount, nonContig, err);
         if (err != NO_ERROR) {
             if (err == TIMED_OUT || err == WOULD_BLOCK || err == -EINTR) {
                 break;
@@ -954,7 +991,7 @@
 
 // =========================================================================
 
-void AudioRecord::DeathNotifier::binderDied(const wp<IBinder>& who)
+void AudioRecord::DeathNotifier::binderDied(const wp<IBinder>& who __unused)
 {
     sp<AudioRecord> audioRecord = mAudioRecord.promote();
     if (audioRecord != 0) {
@@ -966,7 +1003,8 @@
 // =========================================================================
 
 AudioRecord::AudioRecordThread::AudioRecordThread(AudioRecord& receiver, bool bCanCallJava)
-    : Thread(bCanCallJava), mReceiver(receiver), mPaused(true), mPausedInt(false), mPausedNs(0LL)
+    : Thread(bCanCallJava), mReceiver(receiver), mPaused(true), mPausedInt(false), mPausedNs(0LL),
+      mIgnoreNextPausedInt(false)
 {
 }
 
@@ -983,6 +1021,10 @@
             // caller will check for exitPending()
             return true;
         }
+        if (mIgnoreNextPausedInt) {
+            mIgnoreNextPausedInt = false;
+            mPausedInt = false;
+        }
         if (mPausedInt) {
             if (mPausedNs > 0) {
                 (void) mMyCond.waitRelative(mMyLock, mPausedNs);
@@ -993,7 +1035,7 @@
             return true;
         }
     }
-    nsecs_t ns =  mReceiver.processAudioBuffer(this);
+    nsecs_t ns =  mReceiver.processAudioBuffer();
     switch (ns) {
     case 0:
         return true;
@@ -1017,12 +1059,7 @@
 {
     // must be in this order to avoid a race condition
     Thread::requestExit();
-    AutoMutex _l(mMyLock);
-    if (mPaused || mPausedInt) {
-        mPaused = false;
-        mPausedInt = false;
-        mMyCond.signal();
-    }
+    resume();
 }
 
 void AudioRecord::AudioRecordThread::pause()
@@ -1034,6 +1071,7 @@
 void AudioRecord::AudioRecordThread::resume()
 {
     AutoMutex _l(mMyLock);
+    mIgnoreNextPausedInt = true;
     if (mPaused || mPausedInt) {
         mPaused = false;
         mPausedInt = false;
diff --git a/media/libmedia/AudioSystem.cpp b/media/libmedia/AudioSystem.cpp
index cc5b810..87f4b4c 100644
--- a/media/libmedia/AudioSystem.cpp
+++ b/media/libmedia/AudioSystem.cpp
@@ -40,10 +40,10 @@
 DefaultKeyedVector<audio_io_handle_t, AudioSystem::OutputDescriptor *> AudioSystem::gOutputs(0);
 
 // Cached values for recording queries, all protected by gLock
-uint32_t AudioSystem::gPrevInSamplingRate = 16000;
-audio_format_t AudioSystem::gPrevInFormat = AUDIO_FORMAT_PCM_16_BIT;
-audio_channel_mask_t AudioSystem::gPrevInChannelMask = AUDIO_CHANNEL_IN_MONO;
-size_t AudioSystem::gInBuffSize = 0;
+uint32_t AudioSystem::gPrevInSamplingRate;
+audio_format_t AudioSystem::gPrevInFormat;
+audio_channel_mask_t AudioSystem::gPrevInChannelMask;
+size_t AudioSystem::gInBuffSize = 0;    // zero indicates cache is invalid
 
 
 // establish binder interface to AudioFlinger service
@@ -84,13 +84,15 @@
     return DEAD_OBJECT;
 }
 
-status_t AudioSystem::muteMicrophone(bool state) {
+status_t AudioSystem::muteMicrophone(bool state)
+{
     const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
     if (af == 0) return PERMISSION_DENIED;
     return af->setMicMute(state);
 }
 
-status_t AudioSystem::isMicrophoneMuted(bool* state) {
+status_t AudioSystem::isMicrophoneMuted(bool* state)
+{
     const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
     if (af == 0) return PERMISSION_DENIED;
     *state = af->getMicMute();
@@ -175,13 +177,15 @@
     return af->setMode(mode);
 }
 
-status_t AudioSystem::setParameters(audio_io_handle_t ioHandle, const String8& keyValuePairs) {
+status_t AudioSystem::setParameters(audio_io_handle_t ioHandle, const String8& keyValuePairs)
+{
     const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
     if (af == 0) return PERMISSION_DENIED;
     return af->setParameters(ioHandle, keyValuePairs);
 }
 
-String8 AudioSystem::getParameters(audio_io_handle_t ioHandle, const String8& keys) {
+String8 AudioSystem::getParameters(audio_io_handle_t ioHandle, const String8& keys)
+{
     const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
     String8 result = String8("");
     if (af == 0) return result;
@@ -190,6 +194,16 @@
     return result;
 }
 
+status_t AudioSystem::setParameters(const String8& keyValuePairs)
+{
+    return setParameters((audio_io_handle_t) 0, keyValuePairs);
+}
+
+String8 AudioSystem::getParameters(const String8& keys)
+{
+    return getParameters((audio_io_handle_t) 0, keys);
+}
+
 // convert volume steps to natural log scale
 
 // change this value to change volume scaling
@@ -249,6 +263,11 @@
         *samplingRate = outputDesc->samplingRate;
         gLock.unlock();
     }
+    if (*samplingRate == 0) {
+        ALOGE("AudioSystem::getSamplingRate failed for output %d stream type %d",
+                output, streamType);
+        return BAD_VALUE;
+    }
 
     ALOGV("getSamplingRate() streamType %d, output %d, sampling rate %u", streamType, output,
             *samplingRate);
@@ -289,6 +308,11 @@
         *frameCount = outputDesc->frameCount;
         gLock.unlock();
     }
+    if (*frameCount == 0) {
+        ALOGE("AudioSystem::getFrameCount failed for output %d stream type %d",
+                output, streamType);
+        return BAD_VALUE;
+    }
 
     ALOGV("getFrameCount() streamType %d, output %d, frameCount %d", streamType, output,
             *frameCount);
@@ -313,7 +337,7 @@
 }
 
 status_t AudioSystem::getLatency(audio_io_handle_t output,
-                                 audio_stream_type_t streamType,
+                                 audio_stream_type_t streamType __unused,
                                  uint32_t* latency)
 {
     OutputDescriptor *outputDesc;
@@ -349,6 +373,12 @@
             return PERMISSION_DENIED;
         }
         inBuffSize = af->getInputBufferSize(sampleRate, format, channelMask);
+        if (inBuffSize == 0) {
+            ALOGE("AudioSystem::getInputBufferSize failed sampleRate %d format %#x channelMask %x",
+                    sampleRate, format, channelMask);
+            return BAD_VALUE;
+        }
+        // A benign race is possible here: we could overwrite a fresher cache entry
         gLock.lock();
         // save the request params
         gPrevInSamplingRate = sampleRate;
@@ -387,9 +417,10 @@
     return af->getRenderPosition(halFrames, dspFrames, output);
 }
 
-size_t AudioSystem::getInputFramesLost(audio_io_handle_t ioHandle) {
+uint32_t AudioSystem::getInputFramesLost(audio_io_handle_t ioHandle)
+{
     const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
-    unsigned int result = 0;
+    uint32_t result = 0;
     if (af == 0) return result;
     if (ioHandle == 0) return result;
 
@@ -397,20 +428,23 @@
     return result;
 }
 
-int AudioSystem::newAudioSessionId() {
+int AudioSystem::newAudioSessionId()
+{
     const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
     if (af == 0) return 0;
     return af->newAudioSessionId();
 }
 
-void AudioSystem::acquireAudioSessionId(int audioSession) {
+void AudioSystem::acquireAudioSessionId(int audioSession)
+{
     const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
     if (af != 0) {
         af->acquireAudioSessionId(audioSession);
     }
 }
 
-void AudioSystem::releaseAudioSessionId(int audioSession) {
+void AudioSystem::releaseAudioSessionId(int audioSession)
+{
     const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
     if (af != 0) {
         af->releaseAudioSessionId(audioSession);
@@ -419,7 +453,8 @@
 
 // ---------------------------------------------------------------------------
 
-void AudioSystem::AudioFlingerClient::binderDied(const wp<IBinder>& who) {
+void AudioSystem::AudioFlingerClient::binderDied(const wp<IBinder>& who __unused)
+{
     Mutex::Autolock _l(AudioSystem::gLock);
 
     AudioSystem::gAudioFlinger.clear();
@@ -455,7 +490,7 @@
 
         OutputDescriptor *outputDesc =  new OutputDescriptor(*desc);
         gOutputs.add(ioHandle, outputDesc);
-        ALOGV("ioConfigChanged() new output samplingRate %u, format %d channel mask %#x frameCount %u "
+        ALOGV("ioConfigChanged() new output samplingRate %u, format %#x channel mask %#x frameCount %u "
                 "latency %d",
                 outputDesc->samplingRate, outputDesc->format, outputDesc->channelMask,
                 outputDesc->frameCount, outputDesc->latency);
@@ -479,7 +514,7 @@
         if (param2 == NULL) break;
         desc = (const OutputDescriptor *)param2;
 
-        ALOGV("ioConfigChanged() new config for output %d samplingRate %u, format %d channel mask %#x "
+        ALOGV("ioConfigChanged() new config for output %d samplingRate %u, format %#x channel mask %#x "
                 "frameCount %d latency %d",
                 ioHandle, desc->samplingRate, desc->format,
                 desc->channelMask, desc->frameCount, desc->latency);
@@ -496,12 +531,14 @@
     }
 }
 
-void AudioSystem::setErrorCallback(audio_error_callback cb) {
+void AudioSystem::setErrorCallback(audio_error_callback cb)
+{
     Mutex::Autolock _l(gLock);
     gAudioErrorCallback = cb;
 }
 
-bool AudioSystem::routedToA2dpOutput(audio_stream_type_t streamType) {
+bool AudioSystem::routedToA2dpOutput(audio_stream_type_t streamType)
+{
     switch (streamType) {
     case AUDIO_STREAM_MUSIC:
     case AUDIO_STREAM_VOICE_CALL:
@@ -709,7 +746,8 @@
 audio_io_handle_t AudioSystem::getOutputForEffect(const effect_descriptor_t *desc)
 {
     const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
-    if (aps == 0) return PERMISSION_DENIED;
+    // FIXME change return type to status_t, and return PERMISSION_DENIED here
+    if (aps == 0) return 0;
     return aps->getOutputForEffect(desc);
 }
 
@@ -804,7 +842,8 @@
 
 // ---------------------------------------------------------------------------
 
-void AudioSystem::AudioPolicyServiceClient::binderDied(const wp<IBinder>& who) {
+void AudioSystem::AudioPolicyServiceClient::binderDied(const wp<IBinder>& who __unused)
+{
     Mutex::Autolock _l(AudioSystem::gLock);
     AudioSystem::gAudioPolicyService.clear();
 
diff --git a/media/libmedia/AudioTrack.cpp b/media/libmedia/AudioTrack.cpp
index a9d6993..63eaf1a 100644
--- a/media/libmedia/AudioTrack.cpp
+++ b/media/libmedia/AudioTrack.cpp
@@ -44,9 +44,6 @@
         return BAD_VALUE;
     }
 
-    // default to 0 in case of error
-    *frameCount = 0;
-
     // FIXME merge with similar code in createTrack_l(), except we're missing
     //       some information here that is available in createTrack_l():
     //          audio_io_handle_t output
@@ -54,16 +51,26 @@
     //          audio_channel_mask_t channelMask
     //          audio_output_flags_t flags
     uint32_t afSampleRate;
-    if (AudioSystem::getOutputSamplingRate(&afSampleRate, streamType) != NO_ERROR) {
-        return NO_INIT;
+    status_t status;
+    status = AudioSystem::getOutputSamplingRate(&afSampleRate, streamType);
+    if (status != NO_ERROR) {
+        ALOGE("Unable to query output sample rate for stream type %d; status %d",
+                streamType, status);
+        return status;
     }
     size_t afFrameCount;
-    if (AudioSystem::getOutputFrameCount(&afFrameCount, streamType) != NO_ERROR) {
-        return NO_INIT;
+    status = AudioSystem::getOutputFrameCount(&afFrameCount, streamType);
+    if (status != NO_ERROR) {
+        ALOGE("Unable to query output frame count for stream type %d; status %d",
+                streamType, status);
+        return status;
     }
     uint32_t afLatency;
-    if (AudioSystem::getOutputLatency(&afLatency, streamType) != NO_ERROR) {
-        return NO_INIT;
+    status = AudioSystem::getOutputLatency(&afLatency, streamType);
+    if (status != NO_ERROR) {
+        ALOGE("Unable to query output latency for stream type %d; status %d",
+                streamType, status);
+        return status;
     }
 
     // Ensure that buffer depth covers at least audio hardware latency
@@ -74,6 +81,13 @@
 
     *frameCount = (sampleRate == 0) ? afFrameCount * minBufCount :
             afFrameCount * minBufCount * sampleRate / afSampleRate;
+    // The formula above should always produce a non-zero value, but return an error
+    // in the unlikely event that it does not, as that's part of the API contract.
+    if (*frameCount == 0) {
+        ALOGE("AudioTrack::getMinFrameCount failed for streamType %d, sampleRate %d",
+                streamType, sampleRate);
+        return BAD_VALUE;
+    }
     ALOGV("getMinFrameCount=%d: afFrameCount=%d, minBufCount=%d, afSampleRate=%d, afLatency=%d",
             *frameCount, afFrameCount, minBufCount, afSampleRate, afLatency);
     return NO_ERROR;
@@ -208,6 +222,7 @@
         ALOGE("Invalid transfer type %d", transferType);
         return BAD_VALUE;
     }
+    mSharedBuffer = sharedBuffer;
     mTransfer = transferType;
 
     // FIXME "int" here is legacy and will be replaced by size_t later
@@ -230,19 +245,24 @@
         return INVALID_OPERATION;
     }
 
-    mOutput = 0;
-
     // handle default values first.
     if (streamType == AUDIO_STREAM_DEFAULT) {
         streamType = AUDIO_STREAM_MUSIC;
     }
+    if (uint32_t(streamType) >= AUDIO_STREAM_CNT) {
+        ALOGE("Invalid stream type %d", streamType);
+        return BAD_VALUE;
+    }
+    mStreamType = streamType;
 
+    status_t status;
     if (sampleRate == 0) {
-        uint32_t afSampleRate;
-        if (AudioSystem::getOutputSamplingRate(&afSampleRate, streamType) != NO_ERROR) {
-            return NO_INIT;
+        status = AudioSystem::getOutputSamplingRate(&sampleRate, streamType);
+        if (status != NO_ERROR) {
+            ALOGE("Could not get output sample rate for stream type %d; status %d",
+                    streamType, status);
+            return status;
         }
-        sampleRate = afSampleRate;
     }
     mSampleRate = sampleRate;
 
@@ -250,13 +270,16 @@
     if (format == AUDIO_FORMAT_DEFAULT) {
         format = AUDIO_FORMAT_PCM_16_BIT;
     }
-    if (channelMask == 0) {
-        channelMask = AUDIO_CHANNEL_OUT_STEREO;
-    }
 
     // validate parameters
     if (!audio_is_valid_format(format)) {
-        ALOGE("Invalid format %d", format);
+        ALOGE("Invalid format %#x", format);
+        return BAD_VALUE;
+    }
+    mFormat = format;
+
+    if (!audio_is_output_channel(channelMask)) {
+        ALOGE("Invalid channel mask %#x", channelMask);
         return BAD_VALUE;
     }
 
@@ -282,10 +305,6 @@
         flags = (audio_output_flags_t)(flags &~AUDIO_OUTPUT_FLAG_DEEP_BUFFER);
     }
 
-    if (!audio_is_output_channel(channelMask)) {
-        ALOGE("Invalid channel mask %#x", channelMask);
-        return BAD_VALUE;
-    }
     mChannelMask = channelMask;
     uint32_t channelCount = popcount(channelMask);
     mChannelCount = channelCount;
@@ -298,21 +317,20 @@
         mFrameSizeAF = sizeof(uint8_t);
     }
 
-    audio_io_handle_t output = AudioSystem::getOutput(
-                                    streamType,
-                                    sampleRate, format, channelMask,
-                                    flags,
-                                    offloadInfo);
-
-    if (output == 0) {
-        ALOGE("Could not get audio output for stream type %d", streamType);
-        return BAD_VALUE;
+    // Make copy of input parameter offloadInfo so that in the future:
+    //  (a) createTrack_l doesn't need it as an input parameter
+    //  (b) we can support re-creation of offloaded tracks
+    if (offloadInfo != NULL) {
+        mOffloadInfoCopy = *offloadInfo;
+        mOffloadInfo = &mOffloadInfoCopy;
+    } else {
+        mOffloadInfo = NULL;
     }
 
     mVolume[LEFT] = 1.0f;
     mVolume[RIGHT] = 1.0f;
     mSendLevel = 0.0f;
-    mFrameCount = frameCount;
+    // mFrameCount is initialized in createTrack_l
     mReqFrameCount = frameCount;
     mNotificationFramesReq = notificationFrames;
     mNotificationFramesAct = 0;
@@ -332,14 +350,7 @@
     }
 
     // create the IAudioTrack
-    status_t status = createTrack_l(streamType,
-                                  sampleRate,
-                                  format,
-                                  frameCount,
-                                  flags,
-                                  sharedBuffer,
-                                  output,
-                                  0 /*epoch*/);
+    status = createTrack_l(0 /*epoch*/);
 
     if (status != NO_ERROR) {
         if (mAudioTrackThread != 0) {
@@ -347,17 +358,20 @@
             mAudioTrackThread->requestExitAndWait();
             mAudioTrackThread.clear();
         }
+        // Use of direct and offloaded output streams is ref counted by audio policy manager.
+#if 0   // FIXME This should no longer be needed
         //Use of direct and offloaded output streams is ref counted by audio policy manager.
         // As getOutput was called above and resulted in an output stream to be opened,
         // we need to release it.
-        AudioSystem::releaseOutput(output);
+        if (mOutput != 0) {
+            AudioSystem::releaseOutput(mOutput);
+            mOutput = 0;
+        }
+#endif
         return status;
     }
 
     mStatus = NO_ERROR;
-    mStreamType = streamType;
-    mFormat = format;
-    mSharedBuffer = sharedBuffer;
     mState = STATE_STOPPED;
     mUserData = user;
     mLoopPeriod = 0;
@@ -369,7 +383,6 @@
     mSequence = 1;
     mObservedSequence = mSequence;
     mInUnderrun = false;
-    mOutput = output;
 
     return NO_ERROR;
 }
@@ -445,12 +458,11 @@
 void AudioTrack::stop()
 {
     AutoMutex lock(mLock);
-    // FIXME pause then stop should not be a nop
-    if (mState != STATE_ACTIVE) {
+    if (mState != STATE_ACTIVE && mState != STATE_PAUSED) {
         return;
     }
 
-    if (isOffloaded()) {
+    if (isOffloaded_l()) {
         mState = STATE_STOPPING;
     } else {
         mState = STATE_STOPPED;
@@ -472,7 +484,7 @@
 
     sp<AudioTrackThread> t = mAudioTrackThread;
     if (t != 0) {
-        if (!isOffloaded()) {
+        if (!isOffloaded_l()) {
             t->pause();
         }
     } else {
@@ -510,7 +522,7 @@
     mRefreshRemaining = true;
 
     mState = STATE_FLUSHED;
-    if (isOffloaded()) {
+    if (isOffloaded_l()) {
         mProxy->interrupt();
     }
     mProxy->flush();
@@ -543,7 +555,7 @@
 
     mProxy->setVolumeLR((uint32_t(uint16_t(right * 0x1000)) << 16) | uint16_t(left * 0x1000));
 
-    if (isOffloaded()) {
+    if (isOffloaded_l()) {
         mAudioTrack->signal();
     }
     return NO_ERROR;
@@ -607,7 +619,7 @@
     // sample rate can be updated during playback by the offloaded decoder so we need to
     // query the HAL and update if needed.
 // FIXME use Proxy return channel to update the rate from server and avoid polling here
-    if (isOffloaded()) {
+    if (isOffloaded_l()) {
         if (mOutput != 0) {
             uint32_t sampleRate = 0;
             status_t status = AudioSystem::getSamplingRate(mOutput, mStreamType, &sampleRate);
@@ -691,6 +703,7 @@
     AutoMutex lock(mLock);
     mNewPosition = mProxy->getPosition() + updatePeriod;
     mUpdatePeriod = updatePeriod;
+
     return NO_ERROR;
 }
 
@@ -744,7 +757,7 @@
     }
 
     AutoMutex lock(mLock);
-    if (isOffloaded()) {
+    if (isOffloaded_l()) {
         uint32_t dspFrames = 0;
 
         if (mOutput != 0) {
@@ -793,23 +806,12 @@
     return NO_ERROR;
 }
 
-audio_io_handle_t AudioTrack::getOutput()
+audio_io_handle_t AudioTrack::getOutput() const
 {
     AutoMutex lock(mLock);
     return mOutput;
 }
 
-// must be called with mLock held
-audio_io_handle_t AudioTrack::getOutput_l()
-{
-    if (mOutput) {
-        return mOutput;
-    } else {
-        return AudioSystem::getOutput(mStreamType,
-                                      mSampleRate, mFormat, mChannelMask, mFlags);
-    }
-}
-
 status_t AudioTrack::attachAuxEffect(int effectId)
 {
     AutoMutex lock(mLock);
@@ -823,15 +825,7 @@
 // -------------------------------------------------------------------------
 
 // must be called with mLock held
-status_t AudioTrack::createTrack_l(
-        audio_stream_type_t streamType,
-        uint32_t sampleRate,
-        audio_format_t format,
-        size_t frameCount,
-        audio_output_flags_t flags,
-        const sp<IMemory>& sharedBuffer,
-        audio_io_handle_t output,
-        size_t epoch)
+status_t AudioTrack::createTrack_l(size_t epoch)
 {
     status_t status;
     const sp<IAudioFlinger>& audioFlinger = AudioSystem::get_audio_flinger();
@@ -840,50 +834,57 @@
         return NO_INIT;
     }
 
+    audio_io_handle_t output = AudioSystem::getOutput(mStreamType, mSampleRate, mFormat,
+            mChannelMask, mFlags, mOffloadInfo);
+    if (output == 0) {
+        ALOGE("Could not get audio output for stream type %d, sample rate %u, format %#x, "
+              "channel mask %#x, flags %#x",
+              mStreamType, mSampleRate, mFormat, mChannelMask, mFlags);
+        return BAD_VALUE;
+    }
+    {
+    // Now that we have a reference to an I/O handle and have not yet handed it off to AudioFlinger,
+    // we must release it ourselves if anything goes wrong.
+
     // Not all of these values are needed under all conditions, but it is easier to get them all
 
     uint32_t afLatency;
-    status = AudioSystem::getLatency(output, streamType, &afLatency);
+    status = AudioSystem::getLatency(output, mStreamType, &afLatency);
     if (status != NO_ERROR) {
         ALOGE("getLatency(%d) failed status %d", output, status);
-        return NO_INIT;
+        goto release;
     }
 
     size_t afFrameCount;
-    status = AudioSystem::getFrameCount(output, streamType, &afFrameCount);
+    status = AudioSystem::getFrameCount(output, mStreamType, &afFrameCount);
     if (status != NO_ERROR) {
-        ALOGE("getFrameCount(output=%d, streamType=%d) status %d", output, streamType, status);
-        return NO_INIT;
+        ALOGE("getFrameCount(output=%d, streamType=%d) status %d", output, mStreamType, status);
+        goto release;
     }
 
     uint32_t afSampleRate;
-    status = AudioSystem::getSamplingRate(output, streamType, &afSampleRate);
+    status = AudioSystem::getSamplingRate(output, mStreamType, &afSampleRate);
     if (status != NO_ERROR) {
-        ALOGE("getSamplingRate(output=%d, streamType=%d) status %d", output, streamType, status);
-        return NO_INIT;
+        ALOGE("getSamplingRate(output=%d, streamType=%d) status %d", output, mStreamType, status);
+        goto release;
     }
 
     // Client decides whether the track is TIMED (see below), but can only express a preference
     // for FAST.  Server will perform additional tests.
-    if ((flags & AUDIO_OUTPUT_FLAG_FAST) && !(
+    if ((mFlags & AUDIO_OUTPUT_FLAG_FAST) && !((
             // either of these use cases:
             // use case 1: shared buffer
-            (sharedBuffer != 0) ||
+            (mSharedBuffer != 0) ||
             // use case 2: callback handler
-            (mCbf != NULL))) {
+            (mCbf != NULL)) &&
+            // matching sample rate
+            (mSampleRate == afSampleRate))) {
         ALOGW("AUDIO_OUTPUT_FLAG_FAST denied by client");
         // once denied, do not request again if IAudioTrack is re-created
-        flags = (audio_output_flags_t) (flags & ~AUDIO_OUTPUT_FLAG_FAST);
-        mFlags = flags;
+        mFlags = (audio_output_flags_t) (mFlags & ~AUDIO_OUTPUT_FLAG_FAST);
     }
     ALOGV("createTrack_l() output %d afLatency %d", output, afLatency);
 
-    if ((flags & AUDIO_OUTPUT_FLAG_FAST) && sampleRate != afSampleRate) {
-        ALOGW("AUDIO_OUTPUT_FLAG_FAST denied by client due to mismatching sample rate (%d vs %d)",
-              sampleRate, afSampleRate);
-        flags = (audio_output_flags_t) (flags & ~AUDIO_OUTPUT_FLAG_FAST);
-    }
-
     // The client's AudioTrack buffer is divided into n parts for purpose of wakeup by server, where
     //  n = 1   fast track with single buffering; nBuffering is ignored
     //  n = 2   fast track with double buffering
@@ -891,43 +892,45 @@
     //  n = 3   normal track, with sample rate conversion
     //          (pessimistic; some non-1:1 conversion ratios don't actually need triple-buffering)
     //  n > 3   very high latency or very small notification interval; nBuffering is ignored
-    const uint32_t nBuffering = (sampleRate == afSampleRate) ? 2 : 3;
+    const uint32_t nBuffering = (mSampleRate == afSampleRate) ? 2 : 3;
 
     mNotificationFramesAct = mNotificationFramesReq;
 
-    if (!audio_is_linear_pcm(format)) {
+    size_t frameCount = mReqFrameCount;
+    if (!audio_is_linear_pcm(mFormat)) {
 
-        if (sharedBuffer != 0) {
+        if (mSharedBuffer != 0) {
             // Same comment as below about ignoring frameCount parameter for set()
-            frameCount = sharedBuffer->size();
+            frameCount = mSharedBuffer->size();
         } else if (frameCount == 0) {
             frameCount = afFrameCount;
         }
         if (mNotificationFramesAct != frameCount) {
             mNotificationFramesAct = frameCount;
         }
-    } else if (sharedBuffer != 0) {
+    } else if (mSharedBuffer != 0) {
 
         // Ensure that buffer alignment matches channel count
         // 8-bit data in shared memory is not currently supported by AudioFlinger
-        size_t alignment = /* format == AUDIO_FORMAT_PCM_8_BIT ? 1 : */ 2;
+        size_t alignment = /* mFormat == AUDIO_FORMAT_PCM_8_BIT ? 1 : */ 2;
         if (mChannelCount > 1) {
             // More than 2 channels does not require stronger alignment than stereo
             alignment <<= 1;
         }
-        if (((uintptr_t)sharedBuffer->pointer() & (alignment - 1)) != 0) {
+        if (((uintptr_t)mSharedBuffer->pointer() & (alignment - 1)) != 0) {
             ALOGE("Invalid buffer alignment: address %p, channel count %u",
-                    sharedBuffer->pointer(), mChannelCount);
-            return BAD_VALUE;
+                    mSharedBuffer->pointer(), mChannelCount);
+            status = BAD_VALUE;
+            goto release;
         }
 
         // When initializing a shared buffer AudioTrack via constructors,
         // there's no frameCount parameter.
         // But when initializing a shared buffer AudioTrack via set(),
         // there _is_ a frameCount parameter.  We silently ignore it.
-        frameCount = sharedBuffer->size()/mChannelCount/sizeof(int16_t);
+        frameCount = mSharedBuffer->size()/mChannelCount/sizeof(int16_t);
 
-    } else if (!(flags & AUDIO_OUTPUT_FLAG_FAST)) {
+    } else if (!(mFlags & AUDIO_OUTPUT_FLAG_FAST)) {
 
         // FIXME move these calculations and associated checks to server
 
@@ -939,10 +942,10 @@
             minBufCount = nBuffering;
         }
 
-        size_t minFrameCount = (afFrameCount*sampleRate*minBufCount)/afSampleRate;
+        size_t minFrameCount = (afFrameCount*mSampleRate*minBufCount)/afSampleRate;
         ALOGV("minFrameCount: %u, afFrameCount=%d, minBufCount=%d, sampleRate=%u, afSampleRate=%u"
                 ", afLatency=%d",
-                minFrameCount, afFrameCount, minBufCount, sampleRate, afSampleRate, afLatency);
+                minFrameCount, afFrameCount, minBufCount, mSampleRate, afSampleRate, afLatency);
 
         if (frameCount == 0) {
             frameCount = minFrameCount;
@@ -967,26 +970,28 @@
     }
 
     pid_t tid = -1;
-    if (flags & AUDIO_OUTPUT_FLAG_FAST) {
+    if (mFlags & AUDIO_OUTPUT_FLAG_FAST) {
         trackFlags |= IAudioFlinger::TRACK_FAST;
         if (mAudioTrackThread != 0) {
             tid = mAudioTrackThread->getTid();
         }
     }
 
-    if (flags & AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD) {
+    if (mFlags & AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD) {
         trackFlags |= IAudioFlinger::TRACK_OFFLOAD;
     }
 
-    sp<IAudioTrack> track = audioFlinger->createTrack(streamType,
-                                                      sampleRate,
+    size_t temp = frameCount;   // temp may be replaced by a revised value of frameCount,
+                                // but we will still need the original value also
+    sp<IAudioTrack> track = audioFlinger->createTrack(mStreamType,
+                                                      mSampleRate,
                                                       // AudioFlinger only sees 16-bit PCM
-                                                      format == AUDIO_FORMAT_PCM_8_BIT ?
-                                                              AUDIO_FORMAT_PCM_16_BIT : format,
+                                                      mFormat == AUDIO_FORMAT_PCM_8_BIT ?
+                                                              AUDIO_FORMAT_PCM_16_BIT : mFormat,
                                                       mChannelMask,
-                                                      frameCount,
+                                                      &temp,
                                                       &trackFlags,
-                                                      sharedBuffer,
+                                                      mSharedBuffer,
                                                       output,
                                                       tid,
                                                       &mSessionId,
@@ -996,13 +1001,21 @@
 
     if (track == 0) {
         ALOGE("AudioFlinger could not create track, status: %d", status);
-        return status;
+        goto release;
     }
+    // AudioFlinger now owns the reference to the I/O handle,
+    // so we are no longer responsible for releasing it.
+
     sp<IMemory> iMem = track->getCblk();
     if (iMem == 0) {
         ALOGE("Could not get control block");
         return NO_INIT;
     }
+    void *iMemPointer = iMem->pointer();
+    if (iMemPointer == NULL) {
+        ALOGE("Could not get control block pointer");
+        return NO_INIT;
+    }
     // invariant that mAudioTrack != 0 is true only after set() returns successfully
     if (mAudioTrack != 0) {
         mAudioTrack->asBinder()->unlinkToDeath(mDeathNotifier, this);
@@ -1010,9 +1023,9 @@
     }
     mAudioTrack = track;
     mCblkMemory = iMem;
-    audio_track_cblk_t* cblk = static_cast<audio_track_cblk_t*>(iMem->pointer());
+    audio_track_cblk_t* cblk = static_cast<audio_track_cblk_t*>(iMemPointer);
     mCblk = cblk;
-    size_t temp = cblk->frameCount_;
+    // note that temp is the (possibly revised) value of frameCount
     if (temp < frameCount || (frameCount == 0 && temp == 0)) {
         // In current design, AudioTrack client checks and ensures frame count validity before
         // passing it to AudioFlinger so AudioFlinger should not return a different value except
@@ -1021,11 +1034,11 @@
     }
     frameCount = temp;
     mAwaitBoost = false;
-    if (flags & AUDIO_OUTPUT_FLAG_FAST) {
+    if (mFlags & AUDIO_OUTPUT_FLAG_FAST) {
         if (trackFlags & IAudioFlinger::TRACK_FAST) {
             ALOGV("AUDIO_OUTPUT_FLAG_FAST successful; frameCount %u", frameCount);
             mAwaitBoost = true;
-            if (sharedBuffer == 0) {
+            if (mSharedBuffer == 0) {
                 // Theoretically double-buffering is not required for fast tracks,
                 // due to tighter scheduling.  But in practice, to accommodate kernels with
                 // scheduling jitter, and apps with computation jitter, we use double-buffering.
@@ -1036,26 +1049,27 @@
         } else {
             ALOGV("AUDIO_OUTPUT_FLAG_FAST denied by server; frameCount %u", frameCount);
             // once denied, do not request again if IAudioTrack is re-created
-            flags = (audio_output_flags_t) (flags & ~AUDIO_OUTPUT_FLAG_FAST);
-            mFlags = flags;
-            if (sharedBuffer == 0) {
+            mFlags = (audio_output_flags_t) (mFlags & ~AUDIO_OUTPUT_FLAG_FAST);
+            if (mSharedBuffer == 0) {
                 if (mNotificationFramesAct == 0 || mNotificationFramesAct > frameCount/nBuffering) {
                     mNotificationFramesAct = frameCount/nBuffering;
                 }
             }
         }
     }
-    if (flags & AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD) {
+    if (mFlags & AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD) {
         if (trackFlags & IAudioFlinger::TRACK_OFFLOAD) {
             ALOGV("AUDIO_OUTPUT_FLAG_OFFLOAD successful");
         } else {
             ALOGW("AUDIO_OUTPUT_FLAG_OFFLOAD denied by server");
-            flags = (audio_output_flags_t) (flags & ~AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD);
-            mFlags = flags;
-            return NO_INIT;
+            mFlags = (audio_output_flags_t) (mFlags & ~AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD);
+            // FIXME This is a warning, not an error, so don't return error status
+            //return NO_INIT;
         }
     }
 
+    // We retain a copy of the I/O handle, but don't own the reference
+    mOutput = output;
     mRefreshRemaining = true;
 
     // Starting address of buffers in shared memory.  If there is a shared buffer, buffers
@@ -1063,15 +1077,15 @@
     // immediately after the control block.  This address is for the mapping within client
     // address space.  AudioFlinger::TrackBase::mBuffer is for the server address space.
     void* buffers;
-    if (sharedBuffer == 0) {
+    if (mSharedBuffer == 0) {
         buffers = (char*)cblk + sizeof(audio_track_cblk_t);
     } else {
-        buffers = sharedBuffer->pointer();
+        buffers = mSharedBuffer->pointer();
     }
 
     mAudioTrack->attachAuxEffect(mAuxEffectId);
     // FIXME don't believe this lie
-    mLatency = afLatency + (1000*frameCount) / sampleRate;
+    mLatency = afLatency + (1000*frameCount) / mSampleRate;
     mFrameCount = frameCount;
     // If IAudioTrack is re-created, don't let the requested frameCount
     // decrease.  This can confuse clients that cache frameCount().
@@ -1080,7 +1094,7 @@
     }
 
     // update proxy
-    if (sharedBuffer == 0) {
+    if (mSharedBuffer == 0) {
         mStaticProxy.clear();
         mProxy = new AudioTrackClientProxy(cblk, buffers, frameCount, mFrameSizeAF);
     } else {
@@ -1098,6 +1112,14 @@
     mAudioTrack->asBinder()->linkToDeath(mDeathNotifier, this);
 
     return NO_ERROR;
+    }
+
+release:
+    AudioSystem::releaseOutput(output);
+    if (status == NO_ERROR) {
+        status = NO_INIT;
+    }
+    return status;
 }
 
 status_t AudioTrack::obtainBuffer(Buffer* audioBuffer, int32_t waitCount)
@@ -1113,13 +1135,13 @@
     }
 
     const struct timespec *requested;
+    struct timespec timeout;
     if (waitCount == -1) {
         requested = &ClientProxy::kForever;
     } else if (waitCount == 0) {
         requested = &ClientProxy::kNonBlocking;
     } else if (waitCount > 0) {
         long long ms = WAIT_PERIOD_MS * (long long) waitCount;
-        struct timespec timeout;
         timeout.tv_sec = ms / 1000;
         timeout.tv_nsec = (int) (ms % 1000) * 1000000;
         requested = &timeout;
@@ -1350,7 +1372,7 @@
 
 // -------------------------------------------------------------------------
 
-nsecs_t AudioTrack::processAudioBuffer(const sp<AudioTrackThread>& thread)
+nsecs_t AudioTrack::processAudioBuffer()
 {
     // Currently the AudioTrack thread is not created if there are no callbacks.
     // Would it ever make sense to run the thread, even without callbacks?
@@ -1388,7 +1410,7 @@
         // for offloaded tracks restoreTrack_l() will just update the sequence and clear
         // AudioSystem cache. We should not exit here but after calling the callback so
         // that the upper layers can recreate the track
-        if (!isOffloaded() || (mSequence == mObservedSequence)) {
+        if (!isOffloaded_l() || (mSequence == mObservedSequence)) {
             status_t status = restoreTrack_l("processAudioBuffer");
             mLock.unlock();
             // Run again immediately, but with a new IAudioTrack
@@ -1609,7 +1631,6 @@
         size_t reqSize = audioBuffer.size;
         mCbf(EVENT_MORE_DATA, mUserData, &audioBuffer);
         size_t writtenSize = audioBuffer.size;
-        size_t writtenFrames = writtenSize / mFrameSize;
 
         // Sanity check on returned size
         if (ssize_t(writtenSize) < 0 || writtenSize > reqSize) {
@@ -1675,22 +1696,19 @@
 status_t AudioTrack::restoreTrack_l(const char *from)
 {
     ALOGW("dead IAudioTrack, %s, creating a new one from %s()",
-          isOffloaded() ? "Offloaded" : "PCM", from);
+          isOffloaded_l() ? "Offloaded" : "PCM", from);
     ++mSequence;
     status_t result;
 
     // refresh the audio configuration cache in this process to make sure we get new
-    // output parameters in getOutput_l() and createTrack_l()
+    // output parameters in createTrack_l()
     AudioSystem::clearAudioConfigCache();
 
-    if (isOffloaded()) {
+    if (isOffloaded_l()) {
+        // FIXME re-creation of offloaded tracks is not yet implemented
         return DEAD_OBJECT;
     }
 
-    // force new output query from audio policy manager;
-    mOutput = 0;
-    audio_io_handle_t output = getOutput_l();
-
     // if the new IAudioTrack is created, createTrack_l() will modify the
     // following member variables: mAudioTrack, mCblkMemory and mCblk.
     // It will also delete the strong references on previous IAudioTrack and IMemory
@@ -1698,14 +1716,7 @@
     // take the frames that will be lost by track recreation into account in saved position
     size_t position = mProxy->getPosition() + mProxy->getFramesFilled();
     size_t bufferPosition = mStaticProxy != NULL ? mStaticProxy->getBufferPosition() : 0;
-    result = createTrack_l(mStreamType,
-                           mSampleRate,
-                           mFormat,
-                           mReqFrameCount,  // so that frame count never goes down
-                           mFlags,
-                           mSharedBuffer,
-                           output,
-                           position /*epoch*/);
+    result = createTrack_l(position /*epoch*/);
 
     if (result == NO_ERROR) {
         // continue playback from last known position, but
@@ -1733,10 +1744,16 @@
         }
     }
     if (result != NO_ERROR) {
+        // Use of direct and offloaded output streams is ref counted by audio policy manager.
+#if 0   // FIXME This should no longer be needed
         //Use of direct and offloaded output streams is ref counted by audio policy manager.
         // As getOutput was called above and resulted in an output stream to be opened,
         // we need to release it.
-        AudioSystem::releaseOutput(output);
+        if (mOutput != 0) {
+            AudioSystem::releaseOutput(mOutput);
+            mOutput = 0;
+        }
+#endif
         ALOGW("restoreTrack_l() failed status %d", result);
         mState = STATE_STOPPED;
     }
@@ -1769,14 +1786,21 @@
 
 String8 AudioTrack::getParameters(const String8& keys)
 {
-    if (mOutput) {
-        return AudioSystem::getParameters(mOutput, keys);
+    audio_io_handle_t output = getOutput();
+    if (output != 0) {
+        return AudioSystem::getParameters(output, keys);
     } else {
         return String8::empty();
     }
 }
 
-status_t AudioTrack::dump(int fd, const Vector<String16>& args) const
+bool AudioTrack::isOffloaded() const
+{
+    AutoMutex lock(mLock);
+    return isOffloaded_l();
+}
+
+status_t AudioTrack::dump(int fd, const Vector<String16>& args __unused) const
 {
 
     const size_t SIZE = 256;
@@ -1806,7 +1830,7 @@
 
 // =========================================================================
 
-void AudioTrack::DeathNotifier::binderDied(const wp<IBinder>& who)
+void AudioTrack::DeathNotifier::binderDied(const wp<IBinder>& who __unused)
 {
     sp<AudioTrack> audioTrack = mAudioTrack.promote();
     if (audioTrack != 0) {
@@ -1850,7 +1874,7 @@
             return true;
         }
     }
-    nsecs_t ns = mReceiver.processAudioBuffer(this);
+    nsecs_t ns = mReceiver.processAudioBuffer();
     switch (ns) {
     case 0:
         return true;
diff --git a/media/libmedia/AudioTrackShared.cpp b/media/libmedia/AudioTrackShared.cpp
index caa7900..fdd1a12 100644
--- a/media/libmedia/AudioTrackShared.cpp
+++ b/media/libmedia/AudioTrackShared.cpp
@@ -26,7 +26,7 @@
 namespace android {
 
 audio_track_cblk_t::audio_track_cblk_t()
-    : mServer(0), frameCount_(0), mFutex(0), mMinimum(0),
+    : mServer(0), mFutex(0), mMinimum(0),
     mVolumeLR(0x10001000), mSampleRate(0), mSendLevel(0), mFlags(0)
 {
     memset(&u, 0, sizeof(u));
@@ -475,9 +475,14 @@
 
 void StaticAudioTrackClientProxy::setLoop(size_t loopStart, size_t loopEnd, int loopCount)
 {
+    // This can only happen on a 64-bit client
+    if (loopStart > UINT32_MAX || loopEnd > UINT32_MAX) {
+        // FIXME Should return an error status
+        return;
+    }
     StaticAudioTrackState newState;
-    newState.mLoopStart = loopStart;
-    newState.mLoopEnd = loopEnd;
+    newState.mLoopStart = (uint32_t) loopStart;
+    newState.mLoopEnd = (uint32_t) loopEnd;
     newState.mLoopCount = loopCount;
     mBufferPosition = loopStart;
     (void) mMutator.push(newState);
@@ -487,7 +492,7 @@
 {
     size_t bufferPosition;
     if (mMutator.ack()) {
-        bufferPosition = mCblk->u.mStatic.mBufferPosition;
+        bufferPosition = (size_t) mCblk->u.mStatic.mBufferPosition;
         if (bufferPosition > mFrameCount) {
             bufferPosition = mFrameCount;
         }
@@ -622,7 +627,7 @@
     if (half == 0) {
         half = 1;
     }
-    size_t minimum = cblk->mMinimum;
+    size_t minimum = (size_t) cblk->mMinimum;
     if (minimum == 0) {
         minimum = mIsOut ? half : 1;
     } else if (minimum > half) {
@@ -760,12 +765,13 @@
             mIsShutdown = true;
             return (ssize_t) NO_INIT;
         }
-        mCblk->u.mStatic.mBufferPosition = position;
+        // This may overflow, but client is not supposed to rely on it
+        mCblk->u.mStatic.mBufferPosition = (uint32_t) position;
     }
     return (ssize_t) position;
 }
 
-status_t StaticAudioTrackServerProxy::obtainBuffer(Buffer* buffer, bool ackFlush)
+status_t StaticAudioTrackServerProxy::obtainBuffer(Buffer* buffer, bool ackFlush __unused)
 {
     if (mIsShutdown) {
         buffer->mFrameCount = 0;
@@ -836,7 +842,8 @@
     mPosition = newPosition;
 
     cblk->mServer += stepCount;
-    cblk->u.mStatic.mBufferPosition = newPosition;
+    // This may overflow, but client is not supposed to rely on it
+    cblk->u.mStatic.mBufferPosition = (uint32_t) newPosition;
     if (setFlags != 0) {
         (void) android_atomic_or(setFlags, &cblk->mFlags);
         // this would be a good place to wake a futex
@@ -847,7 +854,7 @@
     buffer->mNonContig = 0;
 }
 
-void StaticAudioTrackServerProxy::tallyUnderrunFrames(uint32_t frameCount)
+void StaticAudioTrackServerProxy::tallyUnderrunFrames(uint32_t frameCount __unused)
 {
     // Unlike AudioTrackServerProxy::tallyUnderrunFrames() used for streaming tracks,
     // we don't have a location to count underrun frames.  The underrun frame counter
diff --git a/media/libmedia/CharacterEncodingDetector.cpp b/media/libmedia/CharacterEncodingDetector.cpp
new file mode 100644
index 0000000..eb091ac
--- /dev/null
+++ b/media/libmedia/CharacterEncodingDetector.cpp
@@ -0,0 +1,364 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "CharacterEncodingDector"
+#include <utils/Log.h>
+
+#include "CharacterEncodingDetector.h"
+#include "CharacterEncodingDetectorTables.h"
+
+#include "utils/Vector.h"
+#include "StringArray.h"
+
+#include "unicode/ucnv.h"
+#include "unicode/ucsdet.h"
+#include "unicode/ustring.h"
+
+namespace android {
+
+CharacterEncodingDetector::CharacterEncodingDetector() {
+
+    UErrorCode status = U_ZERO_ERROR;
+    mUtf8Conv = ucnv_open("UTF-8", &status);
+    if (U_FAILURE(status)) {
+        ALOGE("could not create UConverter for UTF-8");
+        mUtf8Conv = NULL;
+    }
+}
+
+CharacterEncodingDetector::~CharacterEncodingDetector() {
+    ucnv_close(mUtf8Conv);
+}
+
+void CharacterEncodingDetector::addTag(const char *name, const char *value) {
+    mNames.push_back(name);
+    mValues.push_back(value);
+}
+
+size_t CharacterEncodingDetector::size() {
+    return mNames.size();
+}
+
+status_t CharacterEncodingDetector::getTag(int index, const char **name, const char**value) {
+    if (index >= mNames.size()) {
+        return BAD_VALUE;
+    }
+
+    *name = mNames.getEntry(index);
+    *value = mValues.getEntry(index);
+    return OK;
+}
+
+static bool isPrintableAscii(const char *value, size_t len) {
+    for (size_t i = 0; i < len; i++) {
+        if ((value[i] & 0x80) || value[i] < 0x20 || value[i] == 0x7f) {
+            return false;
+        }
+    }
+    return true;
+}
+
+void CharacterEncodingDetector::detectAndConvert() {
+
+    int size = mNames.size();
+    ALOGV("%d tags before conversion", size);
+    for (int i = 0; i < size; i++) {
+        ALOGV("%s: %s", mNames.getEntry(i), mValues.getEntry(i));
+    }
+
+    if (size && mUtf8Conv) {
+
+        UErrorCode status = U_ZERO_ERROR;
+        UCharsetDetector *csd = ucsdet_open(&status);
+        const UCharsetMatch *ucm;
+
+        // try combined detection of artist/album/title etc.
+        char buf[1024];
+        buf[0] = 0;
+        int idx;
+        for (int i = 0; i < size; i++) {
+            const char *name = mNames.getEntry(i);
+            const char *value = mValues.getEntry(i);
+            if (!isPrintableAscii(value, strlen(value)) && (
+                        !strcmp(name, "artist") ||
+                        !strcmp(name, "albumartist") ||
+                        !strcmp(name, "composer") ||
+                        !strcmp(name, "genre") ||
+                        !strcmp(name, "album") ||
+                        !strcmp(name, "title"))) {
+                strlcat(buf, value, sizeof(buf));
+                // separate tags by space so ICU's ngram detector can do its job
+                strlcat(buf, " ", sizeof(buf));
+            }
+        }
+        ucsdet_setText(csd, buf, strlen(buf), &status);
+
+        int32_t matches;
+        const UCharsetMatch** ucma = ucsdet_detectAll(csd, &matches, &status);
+        const char *combinedenc = "???";
+
+        const UCharsetMatch* bestCombinedMatch = getPreferred(buf, strlen(buf), ucma, matches);
+
+        if (bestCombinedMatch != NULL) {
+            combinedenc = ucsdet_getName(bestCombinedMatch, &status);
+        }
+
+        for (int i = 0; i < size; i++) {
+            const char *name = mNames.getEntry(i);
+            uint8_t* src = (uint8_t *)mValues.getEntry(i);
+            int len = strlen((char *)src);
+            uint8_t* dest = src;
+
+            ALOGV("@@@ checking %s", name);
+            const char *s = mValues.getEntry(i);
+            int32_t inputLength = strlen(s);
+            const char *enc;
+
+            if (!strcmp(name, "artist") ||
+                    !strcmp(name, "albumartist") ||
+                    !strcmp(name, "composer") ||
+                    !strcmp(name, "genre") ||
+                    !strcmp(name, "album") ||
+                    !strcmp(name, "title")) {
+                // use encoding determined from the combination of artist/album/title etc.
+                enc = combinedenc;
+            } else {
+                ucsdet_setText(csd, s, inputLength, &status);
+                ucm = ucsdet_detect(csd, &status);
+                if (!ucm) {
+                    mValues.setEntry(i, "???");
+                    continue;
+                }
+                enc = ucsdet_getName(ucm, &status);
+                ALOGV("@@@@ recognized charset: %s for %s confidence %d",
+                        enc, mNames.getEntry(i), ucsdet_getConfidence(ucm, &status));
+            }
+
+            if (strcmp(enc,"UTF-8") != 0) {
+                // only convert if the source encoding isn't already UTF-8
+                ALOGV("@@@ using converter %s for %s", enc, mNames.getEntry(i));
+                UConverter *conv = ucnv_open(enc, &status);
+                if (U_FAILURE(status)) {
+                    ALOGE("could not create UConverter for %s", enc);
+                    continue;
+                }
+
+                // convert from native encoding to UTF-8
+                const char* source = mValues.getEntry(i);
+                int targetLength = len * 3 + 1;
+                char* buffer = new char[targetLength];
+                // don't normally check for NULL, but in this case targetLength may be large
+                if (!buffer)
+                    break;
+                char* target = buffer;
+
+                ucnv_convertEx(mUtf8Conv, conv, &target, target + targetLength,
+                        &source, source + strlen(source),
+                        NULL, NULL, NULL, NULL, TRUE, TRUE, &status);
+
+                if (U_FAILURE(status)) {
+                    ALOGE("ucnv_convertEx failed: %d", status);
+                    mValues.setEntry(i, "???");
+                } else {
+                    // zero terminate
+                    *target = 0;
+                    mValues.setEntry(i, buffer);
+                }
+
+                delete[] buffer;
+
+                ucnv_close(conv);
+            }
+        }
+
+        for (int i = size - 1; i >= 0; --i) {
+            if (strlen(mValues.getEntry(i)) == 0) {
+                ALOGV("erasing %s because entry is empty", mNames.getEntry(i));
+                mNames.erase(i);
+                mValues.erase(i);
+            }
+        }
+
+        ucsdet_close(csd);
+    }
+}
+
+/*
+ * When ICU detects multiple encoding matches, apply additional heuristics to determine
+ * which one is the best match, since ICU can't always be trusted to make the right choice.
+ *
+ * What this method does is:
+ * - decode the input using each of the matches found
+ * - recalculate the starting confidence level for multibyte encodings using a different
+ *   algorithm and larger frequent character lists than ICU
+ * - devalue encoding where the conversion contains unlikely characters (symbols, reserved, etc)
+ * - pick the highest match
+ */
+const UCharsetMatch *CharacterEncodingDetector::getPreferred(
+        const char *input, size_t len, const UCharsetMatch** ucma, size_t nummatches) {
+
+    Vector<const UCharsetMatch*> matches;
+    UErrorCode status = U_ZERO_ERROR;
+
+    ALOGV("%d matches", nummatches);
+    for (size_t i = 0; i < nummatches; i++) {
+        const char *encname = ucsdet_getName(ucma[i], &status);
+        int confidence = ucsdet_getConfidence(ucma[i], &status);
+        ALOGV("%d: %s %d", i, encname, confidence);
+        matches.push_back(ucma[i]);
+    }
+
+    size_t num = matches.size();
+    if (num == 0) {
+        return NULL;
+    }
+    if (num == 1) {
+        return matches[0];
+    }
+
+    ALOGV("considering %d matches", num);
+
+    // keep track of how many "special" characters result when converting the input using each
+    // encoding
+    Vector<int> newconfidence;
+    for (size_t i = 0; i < num; i++) {
+        const uint16_t *freqdata = NULL;
+        float freqcoverage = 0;
+        status = U_ZERO_ERROR;
+        const char *encname = ucsdet_getName(matches[i], &status);
+        int confidence = ucsdet_getConfidence(matches[i], &status);
+        if (!strcmp("GB18030", encname)) {
+            freqdata = frequent_zhCN;
+            freqcoverage = frequent_zhCN_coverage;
+        } else if (!strcmp("Big5", encname)) {
+            freqdata = frequent_zhTW;
+            freqcoverage = frequent_zhTW_coverage;
+        } else if (!strcmp("EUC-KR", encname)) {
+            freqdata = frequent_ko;
+            freqcoverage = frequent_ko_coverage;
+        } else if (!strcmp("EUC-JP", encname)) {
+            freqdata = frequent_ja;
+            freqcoverage = frequent_ja_coverage;
+        } else if (!strcmp("Shift_JIS", encname)) {
+            freqdata = frequent_ja;
+            freqcoverage = frequent_ja_coverage;
+        }
+
+        ALOGV("%d: %s %d", i, encname, confidence);
+        UConverter *conv = ucnv_open(encname, &status);
+        const char *source = input;
+        const char *sourceLimit = input + len;
+        status = U_ZERO_ERROR;
+        int demerit = 0;
+        int frequentchars = 0;
+        int totalchars = 0;
+        while (true) {
+            // demerit the current encoding for each "special" character found after conversion.
+            // The amount of demerit is somewhat arbitrarily chosen.
+            int inchar;
+            if (source != sourceLimit) {
+                inchar = (source[0] << 8) + source[1];
+            }
+            UChar32 c = ucnv_getNextUChar(conv, &source, sourceLimit, &status);
+            if (!U_SUCCESS(status)) {
+                break;
+            }
+            if (c < 0x20 || (c >= 0x7f && c <= 0x009f)) {
+                ALOGV("control character %x", c);
+                demerit += 100;
+            } else if ((c >= 0xa0 && c <= 0xbe)         // symbols, superscripts
+                    || (c == 0xd7) || (c == 0xf7)       // multiplication and division signs
+                    || (c >= 0x2000 && c <= 0x209f)) {  // punctuation, superscripts
+                ALOGV("unlikely character %x", c);
+                demerit += 10;
+            } else if (c >= 0xe000 && c <= 0xf8ff) {
+                ALOGV("private use character %x", c);
+                demerit += 30;
+            } else if (c >= 0x2190 && c <= 0x2bff) {
+                // this range comprises various symbol ranges that are unlikely to appear in
+                // music file metadata.
+                ALOGV("symbol %x", c);
+                demerit += 10;
+            } else if (c == 0xfffd) {
+                ALOGV("replacement character");
+                demerit += 50;
+            } else if (c >= 0xfff0 && c <= 0xfffc) {
+                ALOGV("unicode special %x", c);
+                demerit += 50;
+            } else if (freqdata != NULL) {
+                totalchars++;
+                if (isFrequent(freqdata, c)) {
+                    frequentchars++;
+                }
+            }
+        }
+        if (freqdata != NULL && totalchars != 0) {
+            int myconfidence = 10 + float((100 * frequentchars) / totalchars) / freqcoverage;
+            ALOGV("ICU confidence: %d, my confidence: %d (%d %d)", confidence, myconfidence,
+                    totalchars, frequentchars);
+            if (myconfidence > 100) myconfidence = 100;
+            if (myconfidence < 0) myconfidence = 0;
+            confidence = myconfidence;
+        }
+        ALOGV("%d-%d=%d", confidence, demerit, confidence - demerit);
+        newconfidence.push_back(confidence - demerit);
+        ucnv_close(conv);
+        if (i == 0 && (confidence - demerit) == 100) {
+            // no need to check any further, we'll end up using this match anyway
+            break;
+        }
+    }
+
+    // find match with highest confidence after adjusting for unlikely characters
+    int highest = newconfidence[0];
+    size_t highestidx = 0;
+    num = newconfidence.size();
+    for (size_t i = 1; i < num; i++) {
+        if (newconfidence[i] > highest) {
+            highest = newconfidence[i];
+            highestidx = i;
+        }
+    }
+    status = U_ZERO_ERROR;
+    ALOGV("selecting '%s' w/ %d confidence", ucsdet_getName(matches[highestidx], &status), highest);
+    return matches[highestidx];
+}
+
+
+bool CharacterEncodingDetector::isFrequent(const uint16_t *values, uint32_t c) {
+
+    int start = 0;
+    int end = 511; // All the tables have 512 entries
+    int mid = (start+end)/2;
+
+    while(start <= end) {
+        if(c == values[mid]) {
+            return true;
+        } else if (c > values[mid]) {
+            start = mid + 1;
+        } else {
+            end = mid - 1;
+        }
+
+        mid = (start + end) / 2;
+    }
+
+    return false;
+}
+
+
+}  // namespace android
diff --git a/media/libmedia/CharacterEncodingDetector.h b/media/libmedia/CharacterEncodingDetector.h
new file mode 100644
index 0000000..3655a91
--- /dev/null
+++ b/media/libmedia/CharacterEncodingDetector.h
@@ -0,0 +1,61 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef _CHARACTER_ENCODING_DETECTOR_H
+#define _CHARACTER_ENCODING_DETECTOR_H
+
+#include <media/mediascanner.h>
+
+#include "StringArray.h"
+
+#include "unicode/ucnv.h"
+#include "unicode/ucsdet.h"
+#include "unicode/ustring.h"
+
+namespace android {
+
+class CharacterEncodingDetector {
+
+    public:
+    CharacterEncodingDetector();
+        ~CharacterEncodingDetector();
+
+        void addTag(const char *name, const char *value);
+        size_t size();
+
+        void detectAndConvert();
+        status_t getTag(int index, const char **name, const char**value);
+
+    private:
+        const UCharsetMatch *getPreferred(
+                const char *input, size_t len, const UCharsetMatch** ucma, size_t matches);
+
+        bool isFrequent(const uint16_t *values, uint32_t c);
+
+        // cached name and value strings, for native encoding support.
+        // TODO: replace these with byte blob arrays that don't require the data to be
+        // singlenullbyte-terminated
+        StringArray     mNames;
+        StringArray     mValues;
+
+        UConverter*     mUtf8Conv;
+};
+
+
+
+};  // namespace android
+
+#endif
diff --git a/media/libmedia/CharacterEncodingDetectorTables.h b/media/libmedia/CharacterEncodingDetectorTables.h
new file mode 100644
index 0000000..1fe1137
--- /dev/null
+++ b/media/libmedia/CharacterEncodingDetectorTables.h
@@ -0,0 +1,2092 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// The 512 most frequently occuring characters for the zhCN language in a sample of the Internet.
+// Ordered by codepoint, comment shows character and ranking by frequency
+const uint16_t frequent_zhCN[] = {
+    0x4E00, // 一, #2
+    0x4E07, // 万, #306
+    0x4E09, // 三, #138
+    0x4E0A, // 上, #16
+    0x4E0B, // 下, #25
+    0x4E0D, // 不, #7
+    0x4E0E, // 与, #133
+    0x4E13, // 专, #151
+    0x4E16, // 世, #346
+    0x4E1A, // 业, #39
+    0x4E1C, // 东, #197
+    0x4E24, // 两, #376
+    0x4E2A, // 个, #23
+    0x4E2D, // 中, #4
+    0x4E3A, // 为, #31
+    0x4E3B, // 主, #95
+    0x4E3E, // 举, #418
+    0x4E48, // 么, #93
+    0x4E4B, // 之, #131
+    0x4E50, // 乐, #130
+    0x4E5F, // 也, #145
+    0x4E66, // 书, #283
+    0x4E70, // 买, #483
+    0x4E86, // 了, #13
+    0x4E8B, // 事, #168
+    0x4E8C, // 二, #218
+    0x4E8E, // 于, #64
+    0x4E94, // 五, #430
+    0x4E9A, // 亚, #468
+    0x4E9B, // 些, #366
+    0x4EA4, // 交, #243
+    0x4EA7, // 产, #86
+    0x4EAB, // 享, #345
+    0x4EAC, // 京, #206
+    0x4EBA, // 人, #3
+    0x4EC0, // 什, #287
+    0x4ECB, // 介, #478
+    0x4ECE, // 从, #381
+    0x4ED6, // 他, #129
+    0x4EE3, // 代, #241
+    0x4EE5, // 以, #51
+    0x4EEC, // 们, #83
+    0x4EF6, // 件, #141
+    0x4EF7, // 价, #140
+    0x4EFB, // 任, #383
+    0x4F01, // 企, #439
+    0x4F18, // 优, #374
+    0x4F1A, // 会, #29
+    0x4F20, // 传, #222
+    0x4F46, // 但, #451
+    0x4F4D, // 位, #208
+    0x4F53, // 体, #98
+    0x4F55, // 何, #339
+    0x4F5C, // 作, #44
+    0x4F60, // 你, #76
+    0x4F7F, // 使, #272
+    0x4F9B, // 供, #375
+    0x4FDD, // 保, #180
+    0x4FE1, // 信, #84
+    0x4FEE, // 修, #437
+    0x503C, // 值, #450
+    0x505A, // 做, #368
+    0x5065, // 健, #484
+    0x50CF, // 像, #487
+    0x513F, // 儿, #326
+    0x5143, // 元, #202
+    0x5148, // 先, #485
+    0x5149, // 光, #254
+    0x514B, // 克, #503
+    0x514D, // 免, #349
+    0x5165, // 入, #156
+    0x5168, // 全, #47
+    0x516C, // 公, #35
+    0x5171, // 共, #448
+    0x5173, // 关, #49
+    0x5176, // 其, #195
+    0x5177, // 具, #329
+    0x5185, // 内, #109
+    0x518C, // 册, #225
+    0x519B, // 军, #466
+    0x51FA, // 出, #53
+    0x51FB, // 击, #359
+    0x5206, // 分, #22
+    0x5217, // 列, #410
+    0x521B, // 创, #399
+    0x5229, // 利, #296
+    0x522B, // 别, #372
+    0x5230, // 到, #33
+    0x5236, // 制, #192
+    0x524D, // 前, #117
+    0x529B, // 力, #173
+    0x529E, // 办, #436
+    0x529F, // 功, #455
+    0x52A0, // 加, #97
+    0x52A1, // 务, #100
+    0x52A8, // 动, #46
+    0x52A9, // 助, #365
+    0x5305, // 包, #331
+    0x5316, // 化, #155
+    0x5317, // 北, #194
+    0x533A, // 区, #105
+    0x533B, // 医, #234
+    0x5341, // 十, #294
+    0x534E, // 华, #205
+    0x5355, // 单, #259
+    0x5357, // 南, #182
+    0x535A, // 博, #153
+    0x5361, // 卡, #332
+    0x539F, // 原, #271
+    0x53BB, // 去, #282
+    0x53C2, // 参, #500
+    0x53CA, // 及, #255
+    0x53CB, // 友, #186
+    0x53CD, // 反, #422
+    0x53D1, // 发, #15
+    0x53D7, // 受, #507
+    0x53D8, // 变, #395
+    0x53E3, // 口, #293
+    0x53EA, // 只, #340
+    0x53EF, // 可, #45
+    0x53F0, // 台, #267
+    0x53F7, // 号, #121
+    0x53F8, // 司, #150
+    0x5404, // 各, #491
+    0x5408, // 合, #115
+    0x540C, // 同, #189
+    0x540D, // 名, #127
+    0x540E, // 后, #75
+    0x5411, // 向, #459
+    0x5427, // 吧, #353
+    0x544A, // 告, #318
+    0x5458, // 员, #232
+    0x5468, // 周, #347
+    0x548C, // 和, #43
+    0x54C1, // 品, #36
+    0x5546, // 商, #148
+    0x5668, // 器, #228
+    0x56DB, // 四, #352
+    0x56DE, // 回, #38
+    0x56E0, // 因, #355
+    0x56E2, // 团, #412
+    0x56ED, // 园, #470
+    0x56FD, // 国, #12
+    0x56FE, // 图, #32
+    0x5728, // 在, #10
+    0x5730, // 地, #30
+    0x573A, // 场, #177
+    0x575B, // 坛, #364
+    0x578B, // 型, #274
+    0x57CE, // 城, #172
+    0x57FA, // 基, #315
+    0x58EB, // 士, #434
+    0x58F0, // 声, #397
+    0x5904, // 处, #416
+    0x5907, // 备, #270
+    0x590D, // 复, #122
+    0x5916, // 外, #190
+    0x591A, // 多, #40
+    0x5927, // 大, #8
+    0x5929, // 天, #52
+    0x592A, // 太, #456
+    0x5934, // 头, #258
+    0x5973, // 女, #65
+    0x597D, // 好, #62
+    0x5982, // 如, #135
+    0x5A31, // 娱, #452
+    0x5B50, // 子, #37
+    0x5B57, // 字, #285
+    0x5B66, // 学, #19
+    0x5B89, // 安, #144
+    0x5B8C, // 完, #469
+    0x5B9A, // 定, #179
+    0x5B9D, // 宝, #188
+    0x5B9E, // 实, #154
+    0x5BA2, // 客, #174
+    0x5BB6, // 家, #26
+    0x5BB9, // 容, #307
+    0x5BC6, // 密, #471
+    0x5BF9, // 对, #90
+    0x5BFC, // 导, #348
+    0x5C06, // 将, #265
+    0x5C0F, // 小, #28
+    0x5C11, // 少, #379
+    0x5C14, // 尔, #490
+    0x5C31, // 就, #101
+    0x5C55, // 展, #291
+    0x5C71, // 山, #239
+    0x5DDE, // 州, #227
+    0x5DE5, // 工, #73
+    0x5DF1, // 己, #480
+    0x5DF2, // 已, #310
+    0x5E02, // 市, #78
+    0x5E03, // 布, #350
+    0x5E08, // 师, #277
+    0x5E16, // 帖, #396
+    0x5E26, // 带, #449
+    0x5E2E, // 帮, #461
+    0x5E38, // 常, #319
+    0x5E73, // 平, #217
+    0x5E74, // 年, #20
+    0x5E76, // 并, #440
+    0x5E7F, // 广, #166
+    0x5E93, // 库, #446
+    0x5E94, // 应, #187
+    0x5E97, // 店, #320
+    0x5EA6, // 度, #114
+    0x5EB7, // 康, #499
+    0x5EFA, // 建, #211
+    0x5F00, // 开, #72
+    0x5F0F, // 式, #207
+    0x5F15, // 引, #495
+    0x5F20, // 张, #385
+    0x5F3A, // 强, #404
+    0x5F53, // 当, #233
+    0x5F55, // 录, #146
+    0x5F62, // 形, #494
+    0x5F69, // 彩, #356
+    0x5F71, // 影, #214
+    0x5F88, // 很, #300
+    0x5F97, // 得, #193
+    0x5FAE, // 微, #245
+    0x5FC3, // 心, #70
+    0x5FEB, // 快, #324
+    0x6001, // 态, #508
+    0x600E, // 怎, #370
+    0x6027, // 性, #99
+    0x603B, // 总, #398
+    0x606F, // 息, #176
+    0x60A8, // 您, #251
+    0x60C5, // 情, #87
+    0x60F3, // 想, #290
+    0x610F, // 意, #184
+    0x611F, // 感, #253
+    0x620F, // 戏, #237
+    0x6210, // 成, #71
+    0x6211, // 我, #11
+    0x6216, // 或, #321
+    0x6218, // 战, #369
+    0x6237, // 户, #215
+    0x623F, // 房, #236
+    0x6240, // 所, #147
+    0x624B, // 手, #55
+    0x624D, // 才, #407
+    0x6253, // 打, #281
+    0x6280, // 技, #203
+    0x6295, // 投, #408
+    0x62A4, // 护, #502
+    0x62A5, // 报, #113
+    0x62DB, // 招, #363
+    0x6301, // 持, #403
+    0x6307, // 指, #414
+    0x636E, // 据, #409
+    0x6392, // 排, #377
+    0x63A5, // 接, #266
+    0x63A8, // 推, #244
+    0x63D0, // 提, #181
+    0x641C, // 搜, #301
+    0x64AD, // 播, #401
+    0x652F, // 支, #400
+    0x6536, // 收, #158
+    0x653E, // 放, #317
+    0x653F, // 政, #380
+    0x6548, // 效, #496
+    0x6559, // 教, #170
+    0x6570, // 数, #136
+    0x6587, // 文, #21
+    0x6599, // 料, #295
+    0x65AF, // 斯, #473
+    0x65B0, // 新, #14
+    0x65B9, // 方, #68
+    0x65C5, // 旅, #457
+    0x65E0, // 无, #164
+    0x65E5, // 日, #50
+    0x65F6, // 时, #18
+    0x660E, // 明, #132
+    0x6613, // 易, #428
+    0x661F, // 星, #240
+    0x662F, // 是, #6
+    0x663E, // 显, #486
+    0x66F4, // 更, #103
+    0x6700, // 最, #61
+    0x6708, // 月, #80
+    0x6709, // 有, #5
+    0x670D, // 服, #94
+    0x671F, // 期, #139
+    0x672C, // 本, #56
+    0x672F, // 术, #216
+    0x673A, // 机, #27
+    0x6743, // 权, #250
+    0x6761, // 条, #309
+    0x6765, // 来, #42
+    0x677F, // 板, #505
+    0x6797, // 林, #475
+    0x679C, // 果, #212
+    0x67E5, // 查, #165
+    0x6807, // 标, #269
+    0x6821, // 校, #462
+    0x6837, // 样, #314
+    0x683C, // 格, #238
+    0x6848, // 案, #378
+    0x697C, // 楼, #342
+    0x6A21, // 模, #413
+    0x6B21, // 次, #263
+    0x6B22, // 欢, #443
+    0x6B3E, // 款, #358
+    0x6B63, // 正, #219
+    0x6B64, // 此, #362
+    0x6BD4, // 比, #298
+    0x6C11, // 民, #279
+    0x6C14, // 气, #303
+    0x6C34, // 水, #163
+    0x6C42, // 求, #373
+    0x6C5F, // 江, #336
+    0x6CA1, // 没, #229
+    0x6CBB, // 治, #425
+    0x6CD5, // 法, #85
+    0x6CE8, // 注, #119
+    0x6D3B, // 活, #231
+    0x6D41, // 流, #280
+    0x6D4B, // 测, #460
+    0x6D77, // 海, #124
+    0x6D88, // 消, #415
+    0x6DF1, // 深, #477
+    0x6E05, // 清, #311
+    0x6E38, // 游, #81
+    0x6E90, // 源, #325
+    0x706B, // 火, #498
+    0x70B9, // 点, #58
+    0x70ED, // 热, #183
+    0x7136, // 然, #308
+    0x7167, // 照, #431
+    0x7231, // 爱, #223
+    0x7247, // 片, #128
+    0x7248, // 版, #91
+    0x724C, // 牌, #429
+    0x7269, // 物, #169
+    0x7279, // 特, #224
+    0x738B, // 王, #351
+    0x73A9, // 玩, #476
+    0x73B0, // 现, #125
+    0x7403, // 球, #367
+    0x7406, // 理, #69
+    0x751F, // 生, #24
+    0x7528, // 用, #17
+    0x7531, // 由, #441
+    0x7535, // 电, #34
+    0x7537, // 男, #275
+    0x754C, // 界, #419
+    0x75C5, // 病, #371
+    0x767B, // 登, #204
+    0x767D, // 白, #338
+    0x767E, // 百, #157
+    0x7684, // 的, #1
+    0x76D8, // 盘, #493
+    0x76EE, // 目, #261
+    0x76F4, // 直, #391
+    0x76F8, // 相, #143
+    0x7701, // 省, #464
+    0x770B, // 看, #54
+    0x771F, // 真, #249
+    0x7740, // 着, #302
+    0x77E5, // 知, #142
+    0x7801, // 码, #257
+    0x7814, // 研, #387
+    0x793A, // 示, #334
+    0x793E, // 社, #343
+    0x795E, // 神, #330
+    0x798F, // 福, #509
+    0x79BB, // 离, #454
+    0x79CD, // 种, #278
+    0x79D1, // 科, #126
+    0x79EF, // 积, #390
+    0x7A0B, // 程, #209
+    0x7A76, // 究, #504
+    0x7A7A, // 空, #312
+    0x7ACB, // 立, #393
+    0x7AD9, // 站, #107
+    0x7AE0, // 章, #304
+    0x7B2C, // 第, #96
+    0x7B49, // 等, #210
+    0x7B54, // 答, #256
+    0x7B80, // 简, #474
+    0x7BA1, // 管, #221
+    0x7C7B, // 类, #246
+    0x7CBE, // 精, #226
+    0x7CFB, // 系, #89
+    0x7D22, // 索, #354
+    0x7EA2, // 红, #417
+    0x7EA7, // 级, #178
+    0x7EBF, // 线, #108
+    0x7EC4, // 组, #389
+    0x7EC6, // 细, #442
+    0x7ECF, // 经, #74
+    0x7ED3, // 结, #333
+    0x7ED9, // 给, #384
+    0x7EDC, // 络, #472
+    0x7EDF, // 统, #344
+    0x7F16, // 编, #424
+    0x7F51, // 网, #9
+    0x7F6E, // 置, #411
+    0x7F8E, // 美, #60
+    0x8001, // 老, #292
+    0x8003, // 考, #288
+    0x8005, // 者, #106
+    0x800C, // 而, #297
+    0x8054, // 联, #159
+    0x80B2, // 育, #327
+    0x80FD, // 能, #59
+    0x81EA, // 自, #77
+    0x8272, // 色, #198
+    0x8282, // 节, #361
+    0x82B1, // 花, #299
+    0x82F1, // 英, #316
+    0x8350, // 荐, #402
+    0x836F, // 药, #481
+    0x8425, // 营, #394
+    0x85CF, // 藏, #337
+    0x884C, // 行, #41
+    0x8868, // 表, #104
+    0x88AB, // 被, #289
+    0x88C5, // 装, #161
+    0x897F, // 西, #199
+    0x8981, // 要, #48
+    0x89C1, // 见, #360
+    0x89C2, // 观, #423
+    0x89C4, // 规, #453
+    0x89C6, // 视, #120
+    0x89E3, // 解, #264
+    0x8A00, // 言, #433
+    0x8BA1, // 计, #191
+    0x8BA4, // 认, #482
+    0x8BA9, // 让, #421
+    0x8BAE, // 议, #427
+    0x8BAF, // 讯, #388
+    0x8BB0, // 记, #273
+    0x8BBA, // 论, #66
+    0x8BBE, // 设, #162
+    0x8BC1, // 证, #201
+    0x8BC4, // 评, #111
+    0x8BC6, // 识, #463
+    0x8BD5, // 试, #323
+    0x8BDD, // 话, #247
+    0x8BE2, // 询, #432
+    0x8BE5, // 该, #447
+    0x8BE6, // 详, #497
+    0x8BED, // 语, #268
+    0x8BF4, // 说, #112
+    0x8BF7, // 请, #213
+    0x8BFB, // 读, #341
+    0x8C03, // 调, #438
+    0x8D22, // 财, #488
+    0x8D28, // 质, #386
+    0x8D2D, // 购, #260
+    0x8D34, // 贴, #510
+    0x8D39, // 费, #242
+    0x8D44, // 资, #116
+    0x8D77, // 起, #220
+    0x8D85, // 超, #406
+    0x8DEF, // 路, #235
+    0x8EAB, // 身, #262
+    0x8F66, // 车, #82
+    0x8F6C, // 转, #322
+    0x8F7D, // 载, #175
+    0x8FBE, // 达, #435
+    0x8FC7, // 过, #118
+    0x8FD0, // 运, #357
+    0x8FD1, // 近, #492
+    0x8FD8, // 还, #171
+    0x8FD9, // 这, #57
+    0x8FDB, // 进, #160
+    0x8FDE, // 连, #489
+    0x9009, // 选, #328
+    0x901A, // 通, #137
+    0x901F, // 速, #458
+    0x9020, // 造, #511
+    0x9053, // 道, #79
+    0x90A3, // 那, #305
+    0x90E8, // 部, #102
+    0x90FD, // 都, #167
+    0x914D, // 配, #479
+    0x9152, // 酒, #444
+    0x91CC, // 里, #196
+    0x91CD, // 重, #230
+    0x91CF, // 量, #248
+    0x91D1, // 金, #134
+    0x9500, // 销, #465
+    0x957F, // 长, #152
+    0x95E8, // 门, #185
+    0x95EE, // 问, #92
+    0x95F4, // 间, #88
+    0x95FB, // 闻, #313
+    0x9605, // 阅, #467
+    0x9633, // 阳, #420
+    0x9645, // 际, #501
+    0x9650, // 限, #286
+    0x9662, // 院, #276
+    0x96C6, // 集, #284
+    0x9700, // 需, #405
+    0x9762, // 面, #123
+    0x97F3, // 音, #335
+    0x9875, // 页, #63
+    0x9879, // 项, #506
+    0x9891, // 频, #200
+    0x9898, // 题, #110
+    0x98CE, // 风, #252
+    0x98DF, // 食, #445
+    0x9996, // 首, #149
+    0x9999, // 香, #512
+    0x9A6C, // 马, #392
+    0x9A8C, // 验, #382
+    0x9AD8, // 高, #67
+    0x9F99, // 龙, #426
+};
+// the percentage of the sample covered by the above characters
+static const float frequent_zhCN_coverage=0.718950369339973;
+
+// The 512 most frequently occuring characters for the zhTW language in a sample of the Internet.
+// Ordered by codepoint, comment shows character and ranking by frequency
+const uint16_t frequent_zhTW[] = {
+    0x4E00, // 一, #2
+    0x4E09, // 三, #131
+    0x4E0A, // 上, #12
+    0x4E0B, // 下, #37
+    0x4E0D, // 不, #6
+    0x4E16, // 世, #312
+    0x4E26, // 並, #434
+    0x4E2D, // 中, #9
+    0x4E3B, // 主, #97
+    0x4E4B, // 之, #55
+    0x4E5F, // 也, #95
+    0x4E86, // 了, #19
+    0x4E8B, // 事, #128
+    0x4E8C, // 二, #187
+    0x4E94, // 五, #339
+    0x4E9B, // 些, #435
+    0x4E9E, // 亞, #432
+    0x4EA4, // 交, #264
+    0x4EAB, // 享, #160
+    0x4EBA, // 人, #3
+    0x4EC0, // 什, #483
+    0x4ECA, // 今, #380
+    0x4ECB, // 介, #468
+    0x4ED6, // 他, #65
+    0x4EE3, // 代, #284
+    0x4EE5, // 以, #26
+    0x4EF6, // 件, #234
+    0x4EFB, // 任, #381
+    0x4EFD, // 份, #447
+    0x4F46, // 但, #281
+    0x4F4D, // 位, #202
+    0x4F4F, // 住, #471
+    0x4F55, // 何, #334
+    0x4F5C, // 作, #56
+    0x4F60, // 你, #64
+    0x4F7F, // 使, #236
+    0x4F86, // 來, #38
+    0x4F9B, // 供, #397
+    0x4FBF, // 便, #440
+    0x4FC2, // 係, #506
+    0x4FDD, // 保, #161
+    0x4FE1, // 信, #268
+    0x4FEE, // 修, #473
+    0x500B, // 個, #27
+    0x5011, // 們, #109
+    0x505A, // 做, #383
+    0x5065, // 健, #415
+    0x5099, // 備, #461
+    0x50B3, // 傳, #277
+    0x50CF, // 像, #403
+    0x50F9, // 價, #93
+    0x512A, // 優, #396
+    0x5143, // 元, #158
+    0x5148, // 先, #382
+    0x5149, // 光, #216
+    0x514D, // 免, #321
+    0x5152, // 兒, #374
+    0x5165, // 入, #58
+    0x5167, // 內, #106
+    0x5168, // 全, #67
+    0x5169, // 兩, #322
+    0x516C, // 公, #53
+    0x516D, // 六, #493
+    0x5171, // 共, #456
+    0x5176, // 其, #148
+    0x5177, // 具, #328
+    0x518A, // 冊, #360
+    0x518D, // 再, #311
+    0x51FA, // 出, #44
+    0x5206, // 分, #15
+    0x5217, // 列, #259
+    0x5225, // 別, #361
+    0x5229, // 利, #251
+    0x5230, // 到, #29
+    0x5247, // 則, #511
+    0x524D, // 前, #82
+    0x5275, // 創, #409
+    0x529B, // 力, #176
+    0x529F, // 功, #430
+    0x52A0, // 加, #87
+    0x52A9, // 助, #465
+    0x52D5, // 動, #48
+    0x52D9, // 務, #102
+    0x5305, // 包, #248
+    0x5316, // 化, #223
+    0x5317, // 北, #145
+    0x5340, // 區, #60
+    0x5341, // 十, #242
+    0x5357, // 南, #261
+    0x535A, // 博, #484
+    0x5361, // 卡, #327
+    0x5370, // 印, #498
+    0x5373, // 即, #351
+    0x539F, // 原, #237
+    0x53BB, // 去, #190
+    0x53C3, // 參, #444
+    0x53C8, // 又, #426
+    0x53CA, // 及, #136
+    0x53CB, // 友, #142
+    0x53D6, // 取, #422
+    0x53D7, // 受, #410
+    0x53E3, // 口, #357
+    0x53EA, // 只, #250
+    0x53EF, // 可, #35
+    0x53F0, // 台, #34
+    0x53F8, // 司, #226
+    0x5403, // 吃, #362
+    0x5404, // 各, #454
+    0x5408, // 合, #147
+    0x540C, // 同, #173
+    0x540D, // 名, #108
+    0x544A, // 告, #186
+    0x548C, // 和, #130
+    0x54C1, // 品, #23
+    0x54E1, // 員, #150
+    0x5546, // 商, #75
+    0x554F, // 問, #120
+    0x559C, // 喜, #502
+    0x55AE, // 單, #210
+    0x55CE, // 嗎, #443
+    0x5668, // 器, #305
+    0x56DB, // 四, #318
+    0x56DE, // 回, #59
+    0x56E0, // 因, #253
+    0x570B, // 國, #21
+    0x5712, // 園, #345
+    0x5716, // 圖, #73
+    0x5718, // 團, #338
+    0x5728, // 在, #11
+    0x5730, // 地, #50
+    0x578B, // 型, #270
+    0x57CE, // 城, #466
+    0x57FA, // 基, #349
+    0x5831, // 報, #127
+    0x5834, // 場, #165
+    0x58EB, // 士, #372
+    0x5916, // 外, #152
+    0x591A, // 多, #54
+    0x5927, // 大, #8
+    0x5929, // 天, #43
+    0x592A, // 太, #343
+    0x5947, // 奇, #325
+    0x5973, // 女, #85
+    0x5979, // 她, #420
+    0x597D, // 好, #22
+    0x5982, // 如, #144
+    0x5B50, // 子, #46
+    0x5B57, // 字, #275
+    0x5B78, // 學, #49
+    0x5B89, // 安, #239
+    0x5B8C, // 完, #320
+    0x5B9A, // 定, #159
+    0x5BA2, // 客, #188
+    0x5BB6, // 家, #31
+    0x5BB9, // 容, #244
+    0x5BE6, // 實, #198
+    0x5BF6, // 寶, #367
+    0x5C07, // 將, #232
+    0x5C08, // 專, #133
+    0x5C0B, // 尋, #352
+    0x5C0D, // 對, #126
+    0x5C0E, // 導, #418
+    0x5C0F, // 小, #20
+    0x5C11, // 少, #368
+    0x5C31, // 就, #63
+    0x5C55, // 展, #341
+    0x5C71, // 山, #273
+    0x5DE5, // 工, #121
+    0x5DF1, // 己, #402
+    0x5DF2, // 已, #299
+    0x5E02, // 市, #81
+    0x5E2B, // 師, #262
+    0x5E36, // 帶, #470
+    0x5E38, // 常, #303
+    0x5E73, // 平, #297
+    0x5E74, // 年, #30
+    0x5E97, // 店, #171
+    0x5EA6, // 度, #220
+    0x5EB7, // 康, #441
+    0x5EE3, // 廣, #279
+    0x5EFA, // 建, #254
+    0x5F0F, // 式, #155
+    0x5F15, // 引, #346
+    0x5F35, // 張, #366
+    0x5F37, // 強, #437
+    0x5F71, // 影, #94
+    0x5F88, // 很, #177
+    0x5F8C, // 後, #66
+    0x5F97, // 得, #113
+    0x5F9E, // 從, #436
+    0x5FC3, // 心, #57
+    0x5FEB, // 快, #292
+    0x6027, // 性, #175
+    0x606F, // 息, #378
+    0x60A8, // 您, #252
+    0x60C5, // 情, #123
+    0x60F3, // 想, #178
+    0x610F, // 意, #168
+    0x611B, // 愛, #125
+    0x611F, // 感, #211
+    0x61C9, // 應, #164
+    0x6210, // 成, #86
+    0x6211, // 我, #7
+    0x6216, // 或, #199
+    0x6230, // 戰, #438
+    0x6232, // 戲, #309
+    0x6236, // 戶, #497
+    0x623F, // 房, #274
+    0x6240, // 所, #79
+    0x624B, // 手, #68
+    0x624D, // 才, #400
+    0x6253, // 打, #278
+    0x627E, // 找, #449
+    0x6280, // 技, #332
+    0x6295, // 投, #425
+    0x62C9, // 拉, #500
+    0x62CD, // 拍, #398
+    0x6307, // 指, #407
+    0x6392, // 排, #458
+    0x63A5, // 接, #326
+    0x63A8, // 推, #153
+    0x63D0, // 提, #235
+    0x641C, // 搜, #314
+    0x6469, // 摩, #472
+    0x6536, // 收, #249
+    0x6539, // 改, #508
+    0x653E, // 放, #331
+    0x653F, // 政, #295
+    0x6559, // 教, #184
+    0x6574, // 整, #394
+    0x6578, // 數, #134
+    0x6587, // 文, #16
+    0x6599, // 料, #167
+    0x65AF, // 斯, #476
+    0x65B0, // 新, #10
+    0x65B9, // 方, #96
+    0x65BC, // 於, #70
+    0x65C5, // 旅, #289
+    0x65E5, // 日, #18
+    0x660E, // 明, #118
+    0x6613, // 易, #482
+    0x661F, // 星, #205
+    0x662F, // 是, #5
+    0x6642, // 時, #13
+    0x66F4, // 更, #149
+    0x66F8, // 書, #209
+    0x6700, // 最, #51
+    0x6703, // 會, #14
+    0x6708, // 月, #25
+    0x6709, // 有, #4
+    0x670D, // 服, #99
+    0x671F, // 期, #139
+    0x672A, // 未, #404
+    0x672C, // 本, #45
+    0x6771, // 東, #221
+    0x677F, // 板, #364
+    0x6797, // 林, #330
+    0x679C, // 果, #179
+    0x67E5, // 查, #283
+    0x683C, // 格, #157
+    0x6848, // 案, #392
+    0x689D, // 條, #406
+    0x696D, // 業, #103
+    0x6A02, // 樂, #116
+    0x6A13, // 樓, #411
+    0x6A19, // 標, #384
+    0x6A23, // 樣, #306
+    0x6A5F, // 機, #40
+    0x6AA2, // 檢, #359
+    0x6B0A, // 權, #228
+    0x6B21, // 次, #227
+    0x6B3E, // 款, #276
+    0x6B4C, // 歌, #496
+    0x6B61, // 歡, #427
+    0x6B63, // 正, #206
+    0x6B64, // 此, #247
+    0x6BCF, // 每, #391
+    0x6BD4, // 比, #257
+    0x6C11, // 民, #230
+    0x6C23, // 氣, #200
+    0x6C34, // 水, #140
+    0x6C42, // 求, #501
+    0x6C92, // 沒, #162
+    0x6CD5, // 法, #89
+    0x6D3B, // 活, #124
+    0x6D41, // 流, #315
+    0x6D77, // 海, #258
+    0x6D88, // 消, #342
+    0x6E05, // 清, #329
+    0x6E2F, // 港, #293
+    0x6F14, // 演, #491
+    0x7063, // 灣, #195
+    0x70BA, // 為, #39
+    0x7121, // 無, #107
+    0x7136, // 然, #215
+    0x7167, // 照, #376
+    0x71B1, // 熱, #245
+    0x7247, // 片, #90
+    0x7248, // 版, #112
+    0x724C, // 牌, #467
+    0x7269, // 物, #110
+    0x7279, // 特, #183
+    0x738B, // 王, #287
+    0x73A9, // 玩, #354
+    0x73FE, // 現, #143
+    0x7403, // 球, #350
+    0x7406, // 理, #105
+    0x751F, // 生, #24
+    0x7522, // 產, #201
+    0x7528, // 用, #17
+    0x7531, // 由, #288
+    0x7537, // 男, #298
+    0x754C, // 界, #399
+    0x7559, // 留, #218
+    0x756B, // 畫, #412
+    0x7576, // 當, #185
+    0x767B, // 登, #138
+    0x767C, // 發, #28
+    0x767D, // 白, #377
+    0x767E, // 百, #393
+    0x7684, // 的, #1
+    0x76EE, // 目, #271
+    0x76F4, // 直, #379
+    0x76F8, // 相, #98
+    0x770B, // 看, #52
+    0x771F, // 真, #180
+    0x773C, // 眼, #433
+    0x77E5, // 知, #170
+    0x78BC, // 碼, #481
+    0x793A, // 示, #353
+    0x793E, // 社, #333
+    0x795E, // 神, #304
+    0x7968, // 票, #477
+    0x798F, // 福, #494
+    0x79C1, // 私, #507
+    0x79D1, // 科, #280
+    0x7A0B, // 程, #272
+    0x7A2E, // 種, #337
+    0x7A4D, // 積, #385
+    0x7A7A, // 空, #324
+    0x7ACB, // 立, #286
+    0x7AD9, // 站, #117
+    0x7AE0, // 章, #141
+    0x7B2C, // 第, #135
+    0x7B49, // 等, #240
+    0x7BA1, // 管, #340
+    0x7BC0, // 節, #431
+    0x7BC7, // 篇, #479
+    0x7C21, // 簡, #499
+    0x7CBE, // 精, #213
+    0x7CFB, // 系, #212
+    0x7D04, // 約, #462
+    0x7D05, // 紅, #452
+    0x7D1A, // 級, #267
+    0x7D30, // 細, #486
+    0x7D44, // 組, #335
+    0x7D50, // 結, #243
+    0x7D66, // 給, #355
+    0x7D71, // 統, #375
+    0x7D93, // 經, #111
+    0x7DB2, // 網, #32
+    0x7DDA, // 線, #151
+    0x7E23, // 縣, #439
+    0x7E3D, // 總, #370
+    0x7F8E, // 美, #41
+    0x7FA9, // 義, #504
+    0x8001, // 老, #290
+    0x8003, // 考, #428
+    0x8005, // 者, #92
+    0x800C, // 而, #217
+    0x805E, // 聞, #181
+    0x806F, // 聯, #310
+    0x8072, // 聲, #413
+    0x80A1, // 股, #390
+    0x80B2, // 育, #453
+    0x80FD, // 能, #71
+    0x8166, // 腦, #408
+    0x81EA, // 自, #61
+    0x81F3, // 至, #344
+    0x8207, // 與, #84
+    0x8209, // 舉, #463
+    0x8272, // 色, #192
+    0x82B1, // 花, #255
+    0x82F1, // 英, #348
+    0x83EF, // 華, #196
+    0x842C, // 萬, #316
+    0x843D, // 落, #308
+    0x8457, // 著, #233
+    0x85A6, // 薦, #401
+    0x85CF, // 藏, #503
+    0x85DD, // 藝, #488
+    0x8655, // 處, #419
+    0x865F, // 號, #191
+    0x884C, // 行, #47
+    0x8853, // 術, #395
+    0x8868, // 表, #77
+    0x88AB, // 被, #291
+    0x88DD, // 裝, #256
+    0x88E1, // 裡, #369
+    0x88FD, // 製, #510
+    0x897F, // 西, #300
+    0x8981, // 要, #36
+    0x898B, // 見, #307
+    0x8996, // 視, #204
+    0x89BA, // 覺, #450
+    0x89BD, // 覽, #387
+    0x89C0, // 觀, #365
+    0x89E3, // 解, #323
+    0x8A00, // 言, #169
+    0x8A02, // 訂, #423
+    0x8A08, // 計, #225
+    0x8A0A, // 訊, #156
+    0x8A0E, // 討, #373
+    0x8A18, // 記, #222
+    0x8A2D, // 設, #174
+    0x8A3B, // 註, #356
+    0x8A55, // 評, #246
+    0x8A66, // 試, #448
+    0x8A71, // 話, #229
+    0x8A72, // 該, #446
+    0x8A8D, // 認, #464
+    0x8A9E, // 語, #371
+    0x8AAA, // 說, #91
+    0x8ABF, // 調, #509
+    0x8ACB, // 請, #119
+    0x8AD6, // 論, #114
+    0x8B1D, // 謝, #389
+    0x8B49, // 證, #429
+    0x8B58, // 識, #416
+    0x8B70, // 議, #485
+    0x8B77, // 護, #475
+    0x8B80, // 讀, #386
+    0x8B8A, // 變, #388
+    0x8B93, // 讓, #336
+    0x8CA8, // 貨, #313
+    0x8CB7, // 買, #260
+    0x8CBB, // 費, #203
+    0x8CC7, // 資, #62
+    0x8CE3, // 賣, #294
+    0x8CEA, // 質, #457
+    0x8CFC, // 購, #189
+    0x8D77, // 起, #214
+    0x8D85, // 超, #296
+    0x8DDF, // 跟, #489
+    0x8DEF, // 路, #137
+    0x8EAB, // 身, #197
+    0x8ECA, // 車, #76
+    0x8F09, // 載, #301
+    0x8F49, // 轉, #282
+    0x8FD1, // 近, #414
+    0x9001, // 送, #363
+    0x9019, // 這, #42
+    0x901A, // 通, #207
+    0x901F, // 速, #495
+    0x9020, // 造, #455
+    0x9023, // 連, #285
+    0x9032, // 進, #231
+    0x904A, // 遊, #132
+    0x904B, // 運, #219
+    0x904E, // 過, #101
+    0x9053, // 道, #146
+    0x9054, // 達, #417
+    0x9078, // 選, #182
+    0x9084, // 還, #154
+    0x908A, // 邊, #487
+    0x90A3, // 那, #269
+    0x90E8, // 部, #78
+    0x90FD, // 都, #104
+    0x914D, // 配, #421
+    0x9152, // 酒, #512
+    0x91AB, // 醫, #358
+    0x91CD, // 重, #224
+    0x91CF, // 量, #319
+    0x91D1, // 金, #115
+    0x9304, // 錄, #302
+    0x9577, // 長, #172
+    0x9580, // 門, #193
+    0x958B, // 開, #72
+    0x9593, // 間, #80
+    0x95B1, // 閱, #405
+    0x95DC, // 關, #74
+    0x963F, // 阿, #460
+    0x9650, // 限, #265
+    0x9662, // 院, #474
+    0x9664, // 除, #478
+    0x969B, // 際, #459
+    0x96C6, // 集, #347
+    0x96E2, // 離, #442
+    0x96FB, // 電, #33
+    0x9700, // 需, #445
+    0x975E, // 非, #451
+    0x9762, // 面, #129
+    0x97F3, // 音, #194
+    0x9801, // 頁, #83
+    0x982D, // 頭, #238
+    0x984C, // 題, #122
+    0x985E, // 類, #163
+    0x98A8, // 風, #266
+    0x98DF, // 食, #208
+    0x9910, // 餐, #469
+    0x9928, // 館, #424
+    0x9996, // 首, #166
+    0x9999, // 香, #263
+    0x99AC, // 馬, #317
+    0x9A57, // 驗, #492
+    0x9AD4, // 體, #100
+    0x9AD8, // 高, #88
+    0x9EBC, // 麼, #241
+    0x9EC3, // 黃, #480
+    0x9ED1, // 黑, #490
+    0x9EDE, // 點, #69
+    0x9F8D, // 龍, #505
+};
+// the percentage of the sample covered by the above characters
+static const float frequent_zhTW_coverage=0.704841200026877;
+
+// The 512 most frequently occuring characters for the ja language in a sample of the Internet.
+// Ordered by codepoint, comment shows character and ranking by frequency
+const uint16_t frequent_ja[] = {
+    0x3005, // 々, #352
+    0x3041, // ぁ, #486
+    0x3042, // あ, #50
+    0x3044, // い, #2
+    0x3046, // う, #33
+    0x3048, // え, #83
+    0x304A, // お, #37
+    0x304B, // か, #21
+    0x304C, // が, #17
+    0x304D, // き, #51
+    0x304E, // ぎ, #324
+    0x304F, // く, #38
+    0x3050, // ぐ, #334
+    0x3051, // け, #60
+    0x3052, // げ, #296
+    0x3053, // こ, #34
+    0x3054, // ご, #100
+    0x3055, // さ, #31
+    0x3056, // ざ, #378
+    0x3057, // し, #4
+    0x3058, // じ, #121
+    0x3059, // す, #12
+    0x305A, // ず, #215
+    0x305B, // せ, #86
+    0x305D, // そ, #68
+    0x305F, // た, #11
+    0x3060, // だ, #42
+    0x3061, // ち, #67
+    0x3063, // っ, #23
+    0x3064, // つ, #73
+    0x3066, // て, #7
+    0x3067, // で, #6
+    0x3068, // と, #14
+    0x3069, // ど, #75
+    0x306A, // な, #8
+    0x306B, // に, #5
+    0x306D, // ね, #123
+    0x306E, // の, #1
+    0x306F, // は, #16
+    0x3070, // ば, #150
+    0x3071, // ぱ, #259
+    0x3072, // ひ, #364
+    0x3073, // び, #266
+    0x3075, // ふ, #484
+    0x3076, // ぶ, #330
+    0x3078, // へ, #146
+    0x3079, // べ, #207
+    0x307B, // ほ, #254
+    0x307E, // ま, #18
+    0x307F, // み, #74
+    0x3080, // む, #285
+    0x3081, // め, #78
+    0x3082, // も, #32
+    0x3083, // ゃ, #111
+    0x3084, // や, #85
+    0x3086, // ゆ, #392
+    0x3087, // ょ, #224
+    0x3088, // よ, #63
+    0x3089, // ら, #29
+    0x308A, // り, #28
+    0x308B, // る, #9
+    0x308C, // れ, #35
+    0x308D, // ろ, #127
+    0x308F, // わ, #88
+    0x3092, // を, #19
+    0x3093, // ん, #22
+    0x30A1, // ァ, #193
+    0x30A2, // ア, #27
+    0x30A3, // ィ, #70
+    0x30A4, // イ, #15
+    0x30A6, // ウ, #89
+    0x30A7, // ェ, #134
+    0x30A8, // エ, #81
+    0x30A9, // ォ, #225
+    0x30AA, // オ, #76
+    0x30AB, // カ, #52
+    0x30AC, // ガ, #147
+    0x30AD, // キ, #66
+    0x30AE, // ギ, #246
+    0x30AF, // ク, #25
+    0x30B0, // グ, #39
+    0x30B1, // ケ, #137
+    0x30B2, // ゲ, #200
+    0x30B3, // コ, #46
+    0x30B4, // ゴ, #183
+    0x30B5, // サ, #64
+    0x30B6, // ザ, #221
+    0x30B7, // シ, #48
+    0x30B8, // ジ, #55
+    0x30B9, // ス, #13
+    0x30BA, // ズ, #103
+    0x30BB, // セ, #109
+    0x30BC, // ゼ, #499
+    0x30BD, // ソ, #175
+    0x30BF, // タ, #45
+    0x30C0, // ダ, #104
+    0x30C1, // チ, #71
+    0x30C3, // ッ, #20
+    0x30C4, // ツ, #119
+    0x30C6, // テ, #59
+    0x30C7, // デ, #82
+    0x30C8, // ト, #10
+    0x30C9, // ド, #44
+    0x30CA, // ナ, #102
+    0x30CB, // ニ, #72
+    0x30CD, // ネ, #117
+    0x30CE, // ノ, #192
+    0x30CF, // ハ, #164
+    0x30D0, // バ, #62
+    0x30D1, // パ, #90
+    0x30D2, // ヒ, #398
+    0x30D3, // ビ, #77
+    0x30D4, // ピ, #135
+    0x30D5, // フ, #47
+    0x30D6, // ブ, #56
+    0x30D7, // プ, #43
+    0x30D8, // ヘ, #268
+    0x30D9, // ベ, #157
+    0x30DA, // ペ, #125
+    0x30DB, // ホ, #155
+    0x30DC, // ボ, #168
+    0x30DD, // ポ, #114
+    0x30DE, // マ, #57
+    0x30DF, // ミ, #97
+    0x30E0, // ム, #69
+    0x30E1, // メ, #53
+    0x30E2, // モ, #142
+    0x30E3, // ャ, #93
+    0x30E4, // ヤ, #258
+    0x30E5, // ュ, #79
+    0x30E6, // ユ, #405
+    0x30E7, // ョ, #98
+    0x30E9, // ラ, #26
+    0x30EA, // リ, #30
+    0x30EB, // ル, #24
+    0x30EC, // レ, #41
+    0x30ED, // ロ, #40
+    0x30EF, // ワ, #144
+    0x30F3, // ン, #3
+    0x30F4, // ヴ, #483
+    0x30FD, // ヽ, #501
+    0x4E00, // 一, #84
+    0x4E07, // 万, #337
+    0x4E09, // 三, #323
+    0x4E0A, // 上, #133
+    0x4E0B, // 下, #180
+    0x4E0D, // 不, #277
+    0x4E16, // 世, #385
+    0x4E2D, // 中, #87
+    0x4E3B, // 主, #432
+    0x4E88, // 予, #326
+    0x4E8B, // 事, #95
+    0x4E8C, // 二, #394
+    0x4E95, // 井, #468
+    0x4EA4, // 交, #410
+    0x4EAC, // 京, #260
+    0x4EBA, // 人, #61
+    0x4ECA, // 今, #184
+    0x4ECB, // 介, #358
+    0x4ED5, // 仕, #391
+    0x4ED6, // 他, #256
+    0x4ED8, // 付, #243
+    0x4EE3, // 代, #280
+    0x4EE5, // 以, #216
+    0x4EF6, // 件, #190
+    0x4F1A, // 会, #105
+    0x4F4D, // 位, #177
+    0x4F4F, // 住, #376
+    0x4F53, // 体, #223
+    0x4F55, // 何, #294
+    0x4F5C, // 作, #154
+    0x4F7F, // 使, #233
+    0x4F9B, // 供, #503
+    0x4FA1, // 価, #217
+    0x4FBF, // 便, #511
+    0x4FDD, // 保, #279
+    0x4FE1, // 信, #271
+    0x500B, // 個, #415
+    0x50CF, // 像, #178
+    0x512A, // 優, #403
+    0x5143, // 元, #384
+    0x5148, // 先, #311
+    0x5149, // 光, #488
+    0x5165, // 入, #115
+    0x5168, // 全, #173
+    0x516C, // 公, #287
+    0x5177, // 具, #447
+    0x5185, // 内, #169
+    0x5186, // 円, #131
+    0x5199, // 写, #275
+    0x51FA, // 出, #110
+    0x5206, // 分, #130
+    0x5207, // 切, #401
+    0x521D, // 初, #319
+    0x5225, // 別, #290
+    0x5229, // 利, #226
+    0x5236, // 制, #507
+    0x524D, // 前, #124
+    0x529B, // 力, #272
+    0x52A0, // 加, #249
+    0x52D5, // 動, #120
+    0x52D9, // 務, #421
+    0x52DF, // 募, #476
+    0x5316, // 化, #308
+    0x5317, // 北, #341
+    0x533A, // 区, #348
+    0x539F, // 原, #321
+    0x53C2, // 参, #452
+    0x53CB, // 友, #451
+    0x53D6, // 取, #237
+    0x53D7, // 受, #354
+    0x53E3, // 口, #289
+    0x53E4, // 古, #339
+    0x53EF, // 可, #298
+    0x53F0, // 台, #439
+    0x53F7, // 号, #361
+    0x5408, // 合, #118
+    0x540C, // 同, #263
+    0x540D, // 名, #65
+    0x5411, // 向, #434
+    0x544A, // 告, #386
+    0x5468, // 周, #393
+    0x5473, // 味, #299
+    0x548C, // 和, #350
+    0x54C1, // 品, #96
+    0x54E1, // 員, #293
+    0x5546, // 商, #198
+    0x554F, // 問, #158
+    0x55B6, // 営, #438
+    0x5668, // 器, #366
+    0x56DE, // 回, #143
+    0x56F3, // 図, #444
+    0x56FD, // 国, #153
+    0x5712, // 園, #435
+    0x571F, // 土, #239
+    0x5728, // 在, #351
+    0x5730, // 地, #163
+    0x578B, // 型, #430
+    0x5831, // 報, #112
+    0x5834, // 場, #139
+    0x58F2, // 売, #232
+    0x5909, // 変, #306
+    0x5916, // 外, #222
+    0x591A, // 多, #336
+    0x5927, // 大, #80
+    0x5929, // 天, #278
+    0x5973, // 女, #161
+    0x597D, // 好, #349
+    0x5A5A, // 婚, #479
+    0x5B50, // 子, #113
+    0x5B57, // 字, #492
+    0x5B66, // 学, #132
+    0x5B89, // 安, #295
+    0x5B9A, // 定, #145
+    0x5B9F, // 実, #220
+    0x5BA4, // 室, #482
+    0x5BAE, // 宮, #487
+    0x5BB6, // 家, #211
+    0x5BB9, // 容, #333
+    0x5BFE, // 対, #252
+    0x5C02, // 専, #474
+    0x5C0F, // 小, #212
+    0x5C11, // 少, #377
+    0x5C4B, // 屋, #284
+    0x5C71, // 山, #206
+    0x5CA1, // 岡, #429
+    0x5CF6, // 島, #297
+    0x5DDD, // 川, #253
+    0x5DE5, // 工, #374
+    0x5E02, // 市, #159
+    0x5E2F, // 帯, #416
+    0x5E38, // 常, #437
+    0x5E73, // 平, #390
+    0x5E74, // 年, #54
+    0x5E83, // 広, #367
+    0x5E97, // 店, #149
+    0x5EA6, // 度, #269
+    0x5EAB, // 庫, #380
+    0x5F0F, // 式, #265
+    0x5F15, // 引, #345
+    0x5F37, // 強, #446
+    0x5F53, // 当, #240
+    0x5F62, // 形, #502
+    0x5F8C, // 後, #230
+    0x5F97, // 得, #490
+    0x5FC3, // 心, #307
+    0x5FC5, // 必, #422
+    0x5FDC, // 応, #356
+    0x601D, // 思, #189
+    0x6027, // 性, #201
+    0x6075, // 恵, #400
+    0x60C5, // 情, #140
+    0x60F3, // 想, #477
+    0x610F, // 意, #305
+    0x611B, // 愛, #273
+    0x611F, // 感, #257
+    0x6210, // 成, #262
+    0x6226, // 戦, #365
+    0x6240, // 所, #236
+    0x624B, // 手, #160
+    0x6295, // 投, #129
+    0x6301, // 持, #355
+    0x6307, // 指, #425
+    0x63A2, // 探, #369
+    0x63B2, // 掲, #399
+    0x643A, // 携, #459
+    0x652F, // 支, #512
+    0x653E, // 放, #469
+    0x6559, // 教, #270
+    0x6570, // 数, #181
+    0x6587, // 文, #202
+    0x6599, // 料, #106
+    0x65B0, // 新, #99
+    0x65B9, // 方, #126
+    0x65C5, // 旅, #445
+    0x65E5, // 日, #36
+    0x660E, // 明, #300
+    0x6620, // 映, #418
+    0x6642, // 時, #107
+    0x66F4, // 更, #359
+    0x66F8, // 書, #174
+    0x6700, // 最, #152
+    0x6708, // 月, #49
+    0x6709, // 有, #302
+    0x671F, // 期, #332
+    0x6728, // 木, #203
+    0x672C, // 本, #92
+    0x6750, // 材, #489
+    0x6751, // 村, #466
+    0x6765, // 来, #267
+    0x6771, // 東, #191
+    0x677F, // 板, #411
+    0x679C, // 果, #441
+    0x6821, // 校, #327
+    0x682A, // 株, #412
+    0x683C, // 格, #228
+    0x691C, // 検, #179
+    0x696D, // 業, #166
+    0x697D, // 楽, #172
+    0x69D8, // 様, #255
+    0x6A5F, // 機, #235
+    0x6B21, // 次, #318
+    0x6B62, // 止, #475
+    0x6B63, // 正, #312
+    0x6C17, // 気, #116
+    0x6C34, // 水, #165
+    0x6C42, // 求, #465
+    0x6C7A, // 決, #370
+    0x6CBB, // 治, #505
+    0x6CC1, // 況, #462
+    0x6CD5, // 法, #227
+    0x6CE8, // 注, #372
+    0x6D3B, // 活, #303
+    0x6D41, // 流, #480
+    0x6D77, // 海, #274
+    0x6E08, // 済, #417
+    0x6F14, // 演, #504
+    0x706B, // 火, #264
+    0x70B9, // 点, #331
+    0x7121, // 無, #58
+    0x7248, // 版, #409
+    0x7269, // 物, #170
+    0x7279, // 特, #242
+    0x72B6, // 状, #458
+    0x73FE, // 現, #322
+    0x7406, // 理, #162
+    0x751F, // 生, #122
+    0x7523, // 産, #320
+    0x7528, // 用, #94
+    0x7530, // 田, #195
+    0x7537, // 男, #373
+    0x753A, // 町, #314
+    0x753B, // 画, #91
+    0x754C, // 界, #436
+    0x756A, // 番, #261
+    0x75C5, // 病, #428
+    0x767A, // 発, #194
+    0x767B, // 登, #231
+    0x767D, // 白, #419
+    0x7684, // 的, #251
+    0x76EE, // 目, #197
+    0x76F4, // 直, #497
+    0x76F8, // 相, #286
+    0x770C, // 県, #199
+    0x771F, // 真, #219
+    0x7740, // 着, #283
+    0x77E5, // 知, #185
+    0x77F3, // 石, #500
+    0x78BA, // 確, #383
+    0x793A, // 示, #241
+    0x793E, // 社, #167
+    0x795E, // 神, #315
+    0x798F, // 福, #423
+    0x79C1, // 私, #347
+    0x79D1, // 科, #420
+    0x7A0E, // 税, #368
+    0x7A2E, // 種, #455
+    0x7A3F, // 稿, #148
+    0x7A7A, // 空, #427
+    0x7ACB, // 立, #309
+    0x7B11, // 笑, #454
+    0x7B2C, // 第, #317
+    0x7B49, // 等, #457
+    0x7B54, // 答, #426
+    0x7BA1, // 管, #481
+    0x7CFB, // 系, #408
+    0x7D04, // 約, #276
+    0x7D20, // 素, #407
+    0x7D22, // 索, #214
+    0x7D30, // 細, #381
+    0x7D39, // 紹, #471
+    0x7D42, // 終, #456
+    0x7D44, // 組, #424
+    0x7D4C, // 経, #360
+    0x7D50, // 結, #291
+    0x7D9A, // 続, #357
+    0x7DCF, // 総, #467
+    0x7DDA, // 線, #338
+    0x7DE8, // 編, #453
+    0x7F8E, // 美, #204
+    0x8003, // 考, #387
+    0x8005, // 者, #151
+    0x805E, // 聞, #463
+    0x8077, // 職, #363
+    0x80B2, // 育, #433
+    0x80FD, // 能, #250
+    0x8179, // 腹, #396
+    0x81EA, // 自, #156
+    0x826F, // 良, #329
+    0x8272, // 色, #402
+    0x82B1, // 花, #440
+    0x82B8, // 芸, #413
+    0x82F1, // 英, #485
+    0x8449, // 葉, #472
+    0x884C, // 行, #128
+    0x8853, // 術, #460
+    0x8868, // 表, #209
+    0x88FD, // 製, #431
+    0x897F, // 西, #406
+    0x8981, // 要, #313
+    0x898B, // 見, #101
+    0x898F, // 規, #375
+    0x89A7, // 覧, #171
+    0x89E3, // 解, #388
+    0x8A00, // 言, #210
+    0x8A08, // 計, #343
+    0x8A18, // 記, #136
+    0x8A2D, // 設, #292
+    0x8A71, // 話, #213
+    0x8A73, // 詳, #371
+    0x8A8D, // 認, #404
+    0x8A9E, // 語, #234
+    0x8AAC, // 説, #494
+    0x8AAD, // 読, #301
+    0x8ABF, // 調, #443
+    0x8AC7, // 談, #448
+    0x8B77, // 護, #509
+    0x8C37, // 谷, #506
+    0x8CA9, // 販, #362
+    0x8CB7, // 買, #346
+    0x8CC7, // 資, #473
+    0x8CEA, // 質, #281
+    0x8CFC, // 購, #495
+    0x8EAB, // 身, #470
+    0x8ECA, // 車, #205
+    0x8EE2, // 転, #335
+    0x8F09, // 載, #342
+    0x8FBC, // 込, #229
+    0x8FD1, // 近, #304
+    0x8FD4, // 返, #461
+    0x8FFD, // 追, #379
+    0x9001, // 送, #186
+    0x901A, // 通, #182
+    0x901F, // 速, #340
+    0x9023, // 連, #244
+    0x904B, // 運, #382
+    0x904E, // 過, #498
+    0x9053, // 道, #282
+    0x9054, // 達, #450
+    0x9055, // 違, #414
+    0x9078, // 選, #288
+    0x90E8, // 部, #208
+    0x90FD, // 都, #344
+    0x914D, // 配, #389
+    0x91CD, // 重, #478
+    0x91CE, // 野, #245
+    0x91D1, // 金, #138
+    0x9332, // 録, #238
+    0x9577, // 長, #247
+    0x9580, // 門, #508
+    0x958B, // 開, #248
+    0x9593, // 間, #141
+    0x95A2, // 関, #188
+    0x962A, // 阪, #496
+    0x9650, // 限, #395
+    0x9662, // 院, #449
+    0x9664, // 除, #510
+    0x969B, // 際, #493
+    0x96C6, // 集, #196
+    0x96D1, // 雑, #442
+    0x96FB, // 電, #187
+    0x9762, // 面, #328
+    0x97F3, // 音, #325
+    0x984C, // 題, #310
+    0x985E, // 類, #491
+    0x98A8, // 風, #353
+    0x98DF, // 食, #218
+    0x9928, // 館, #464
+    0x99C5, // 駅, #316
+    0x9A13, // 験, #397
+    0x9AD8, // 高, #176
+    0xFF57, // w, #108
+};
+// the percentage of the sample covered by the above characters
+static const float frequent_ja_coverage=0.880569589120162;
+
+// The 512 most frequently occuring characters for the ko language in a sample of the Internet.
+// Ordered by codepoint, comment shows character and ranking by frequency
+const uint16_t frequent_ko[] = {
+    0x314B, // ㅋ, #148
+    0x314E, // ㅎ, #390
+    0x3160, // ㅠ, #354
+    0x318D, // ㆍ, #439
+    0xAC00, // 가, #6
+    0xAC01, // 각, #231
+    0xAC04, // 간, #106
+    0xAC08, // 갈, #362
+    0xAC10, // 감, #122
+    0xAC11, // 갑, #493
+    0xAC15, // 강, #155
+    0xAC19, // 같, #264
+    0xAC1C, // 개, #87
+    0xAC1D, // 객, #198
+    0xAC24, // 갤, #457
+    0xAC70, // 거, #91
+    0xAC74, // 건, #161
+    0xAC78, // 걸, #338
+    0xAC80, // 검, #184
+    0xAC83, // 것, #116
+    0xAC8C, // 게, #36
+    0xACA0, // 겠, #233
+    0xACA8, // 겨, #341
+    0xACA9, // 격, #245
+    0xACAC, // 견, #413
+    0xACB0, // 결, #202
+    0xACBD, // 경, #62
+    0xACC4, // 계, #142
+    0xACE0, // 고, #12
+    0xACE1, // 곡, #444
+    0xACE8, // 골, #379
+    0xACF3, // 곳, #388
+    0xACF5, // 공, #59
+    0xACFC, // 과, #69
+    0xAD00, // 관, #95
+    0xAD11, // 광, #235
+    0xAD50, // 교, #128
+    0xAD6C, // 구, #52
+    0xAD6D, // 국, #85
+    0xAD70, // 군, #293
+    0xAD74, // 굴, #487
+    0xAD81, // 궁, #441
+    0xAD8C, // 권, #192
+    0xADC0, // 귀, #386
+    0xADDC, // 규, #367
+    0xADF8, // 그, #30
+    0xADF9, // 극, #424
+    0xADFC, // 근, #241
+    0xAE00, // 글, #61
+    0xAE08, // 금, #138
+    0xAE09, // 급, #269
+    0xAE30, // 기, #3
+    0xAE34, // 긴, #465
+    0xAE38, // 길, #297
+    0xAE40, // 김, #205
+    0xAE4C, // 까, #171
+    0xAED8, // 께, #273
+    0xAF43, // 꽃, #475
+    0xB05D, // 끝, #505
+    0xB07C, // 끼, #490
+    0xB098, // 나, #39
+    0xB09C, // 난, #274
+    0xB0A0, // 날, #292
+    0xB0A8, // 남, #139
+    0xB0B4, // 내, #56
+    0xB108, // 너, #272
+    0xB110, // 널, #476
+    0xB118, // 넘, #492
+    0xB124, // 네, #100
+    0xB137, // 넷, #329
+    0xB140, // 녀, #288
+    0xB144, // 년, #151
+    0xB178, // 노, #149
+    0xB17C, // 논, #491
+    0xB180, // 놀, #464
+    0xB18D, // 농, #442
+    0xB204, // 누, #319
+    0xB208, // 눈, #383
+    0xB274, // 뉴, #173
+    0xB290, // 느, #368
+    0xB294, // 는, #5
+    0xB298, // 늘, #322
+    0xB2A5, // 능, #190
+    0xB2C8, // 니, #16
+    0xB2D8, // 님, #153
+    0xB2E4, // 다, #2
+    0xB2E8, // 단, #134
+    0xB2EB, // 닫, #195
+    0xB2EC, // 달, #243
+    0xB2F4, // 담, #254
+    0xB2F5, // 답, #287
+    0xB2F9, // 당, #159
+    0xB300, // 대, #33
+    0xB313, // 댓, #303
+    0xB354, // 더, #140
+    0xB358, // 던, #252
+    0xB367, // 덧, #463
+    0xB370, // 데, #104
+    0xB378, // 델, #429
+    0xB3C4, // 도, #25
+    0xB3C5, // 독, #301
+    0xB3CC, // 돌, #309
+    0xB3D9, // 동, #58
+    0xB418, // 되, #82
+    0xB41C, // 된, #189
+    0xB420, // 될, #408
+    0xB429, // 됩, #332
+    0xB450, // 두, #199
+    0xB4A4, // 뒤, #496
+    0xB4DC, // 드, #40
+    0xB4E0, // 든, #283
+    0xB4E4, // 들, #54
+    0xB4EF, // 듯, #478
+    0xB4F1, // 등, #90
+    0xB514, // 디, #133
+    0xB529, // 딩, #462
+    0xB530, // 따, #333
+    0xB54C, // 때, #240
+    0xB610, // 또, #313
+    0xB77C, // 라, #42
+    0xB77D, // 락, #355
+    0xB780, // 란, #290
+    0xB78C, // 람, #246
+    0xB78D, // 랍, #420
+    0xB791, // 랑, #270
+    0xB798, // 래, #174
+    0xB799, // 랙, #381
+    0xB79C, // 랜, #357
+    0xB7A8, // 램, #359
+    0xB7A9, // 랩, #402
+    0xB7C9, // 량, #346
+    0xB7EC, // 러, #130
+    0xB7F0, // 런, #312
+    0xB7FC, // 럼, #327
+    0xB7FD, // 럽, #447
+    0xB807, // 렇, #412
+    0xB808, // 레, #114
+    0xB80C, // 렌, #395
+    0xB824, // 려, #158
+    0xB825, // 력, #194
+    0xB828, // 련, #326
+    0xB839, // 령, #389
+    0xB85C, // 로, #4
+    0xB85D, // 록, #84
+    0xB860, // 론, #366
+    0xB8CC, // 료, #154
+    0xB8E8, // 루, #236
+    0xB958, // 류, #265
+    0xB974, // 르, #212
+    0xB978, // 른, #250
+    0xB97C, // 를, #35
+    0xB984, // 름, #276
+    0xB9AC, // 리, #19
+    0xB9AD, // 릭, #394
+    0xB9B0, // 린, #259
+    0xB9B4, // 릴, #485
+    0xB9BC, // 림, #305
+    0xB9BD, // 립, #217
+    0xB9C1, // 링, #351
+    0xB9C8, // 마, #67
+    0xB9C9, // 막, #310
+    0xB9CC, // 만, #65
+    0xB9CE, // 많, #257
+    0xB9D0, // 말, #188
+    0xB9DB, // 맛, #397
+    0xB9DD, // 망, #370
+    0xB9DE, // 맞, #399
+    0xB9E4, // 매, #125
+    0xB9E8, // 맨, #422
+    0xBA38, // 머, #311
+    0xBA39, // 먹, #377
+    0xBA3C, // 먼, #469
+    0xBA54, // 메, #147
+    0xBA70, // 며, #191
+    0xBA74, // 면, #72
+    0xBA85, // 명, #131
+    0xBAA8, // 모, #73
+    0xBAA9, // 목, #157
+    0xBAB0, // 몰, #401
+    0xBAB8, // 몸, #437
+    0xBABB, // 못, #336
+    0xBB34, // 무, #80
+    0xBB38, // 문, #57
+    0xBB3C, // 물, #94
+    0xBBA4, // 뮤, #431
+    0xBBF8, // 미, #76
+    0xBBFC, // 민, #200
+    0xBC00, // 밀, #308
+    0xBC0F, // 및, #249
+    0xBC14, // 바, #89
+    0xBC15, // 박, #226
+    0xBC18, // 반, #175
+    0xBC1B, // 받, #248
+    0xBC1C, // 발, #164
+    0xBC29, // 방, #92
+    0xBC30, // 배, #162
+    0xBC31, // 백, #256
+    0xBC84, // 버, #111
+    0xBC88, // 번, #167
+    0xBC8C, // 벌, #423
+    0xBC94, // 범, #427
+    0xBC95, // 법, #207
+    0xBCA0, // 베, #281
+    0xBCA4, // 벤, #378
+    0xBCA8, // 벨, #387
+    0xBCC0, // 변, #253
+    0xBCC4, // 별, #262
+    0xBCD1, // 병, #340
+    0xBCF4, // 보, #20
+    0xBCF5, // 복, #204
+    0xBCF8, // 본, #182
+    0xBCFC, // 볼, #385
+    0xBD09, // 봉, #405
+    0xBD80, // 부, #46
+    0xBD81, // 북, #261
+    0xBD84, // 분, #105
+    0xBD88, // 불, #225
+    0xBDF0, // 뷰, #350
+    0xBE0C, // 브, #214
+    0xBE14, // 블, #99
+    0xBE44, // 비, #55
+    0xBE4C, // 빌, #510
+    0xBE60, // 빠, #398
+    0xC0AC, // 사, #14
+    0xC0AD, // 삭, #342
+    0xC0B0, // 산, #121
+    0xC0B4, // 살, #279
+    0xC0BC, // 삼, #348
+    0xC0C1, // 상, #41
+    0xC0C8, // 새, #282
+    0xC0C9, // 색, #181
+    0xC0DD, // 생, #109
+    0xC11C, // 서, #21
+    0xC11D, // 석, #234
+    0xC120, // 선, #107
+    0xC124, // 설, #170
+    0xC131, // 성, #50
+    0xC138, // 세, #60
+    0xC139, // 섹, #456
+    0xC13C, // 센, #267
+    0xC154, // 셔, #455
+    0xC158, // 션, #237
+    0xC15C, // 셜, #448
+    0xC168, // 셨, #421
+    0xC18C, // 소, #51
+    0xC18D, // 속, #219
+    0xC190, // 손, #323
+    0xC1A1, // 송, #203
+    0xC1C4, // 쇄, #501
+    0xC1FC, // 쇼, #364
+    0xC218, // 수, #27
+    0xC219, // 숙, #467
+    0xC21C, // 순, #258
+    0xC220, // 술, #302
+    0xC26C, // 쉬, #511
+    0xC288, // 슈, #384
+    0xC2A4, // 스, #11
+    0xC2AC, // 슬, #438
+    0xC2B4, // 슴, #504
+    0xC2B5, // 습, #77
+    0xC2B9, // 승, #299
+    0xC2DC, // 시, #13
+    0xC2DD, // 식, #137
+    0xC2E0, // 신, #47
+    0xC2E4, // 실, #132
+    0xC2EC, // 심, #196
+    0xC2ED, // 십, #482
+    0xC2F6, // 싶, #352
+    0xC2F8, // 싸, #419
+    0xC4F0, // 쓰, #278
+    0xC528, // 씨, #360
+    0xC544, // 아, #23
+    0xC545, // 악, #296
+    0xC548, // 안, #71
+    0xC54A, // 않, #209
+    0xC54C, // 알, #222
+    0xC554, // 암, #460
+    0xC558, // 았, #349
+    0xC559, // 앙, #473
+    0xC55E, // 앞, #434
+    0xC560, // 애, #271
+    0xC561, // 액, #415
+    0xC571, // 앱, #477
+    0xC57C, // 야, #124
+    0xC57D, // 약, #229
+    0xC591, // 양, #177
+    0xC5B4, // 어, #24
+    0xC5B5, // 억, #407
+    0xC5B8, // 언, #294
+    0xC5BC, // 얼, #356
+    0xC5C4, // 엄, #426
+    0xC5C5, // 업, #118
+    0xC5C6, // 없, #178
+    0xC5C8, // 었, #165
+    0xC5D0, // 에, #9
+    0xC5D4, // 엔, #375
+    0xC5D8, // 엘, #506
+    0xC5EC, // 여, #66
+    0xC5ED, // 역, #186
+    0xC5EE, // 엮, #488
+    0xC5F0, // 연, #96
+    0xC5F4, // 열, #266
+    0xC5FC, // 염, #449
+    0xC600, // 였, #374
+    0xC601, // 영, #83
+    0xC608, // 예, #168
+    0xC624, // 오, #75
+    0xC628, // 온, #300
+    0xC62C, // 올, #306
+    0xC640, // 와, #119
+    0xC644, // 완, #361
+    0xC654, // 왔, #489
+    0xC655, // 왕, #418
+    0xC678, // 외, #218
+    0xC694, // 요, #43
+    0xC695, // 욕, #479
+    0xC6A9, // 용, #48
+    0xC6B0, // 우, #64
+    0xC6B1, // 욱, #503
+    0xC6B4, // 운, #108
+    0xC6B8, // 울, #223
+    0xC6C0, // 움, #317
+    0xC6C3, // 웃, #404
+    0xC6CC, // 워, #280
+    0xC6D0, // 원, #45
+    0xC6D4, // 월, #150
+    0xC6E8, // 웨, #446
+    0xC6F9, // 웹, #500
+    0xC704, // 위, #78
+    0xC720, // 유, #81
+    0xC721, // 육, #321
+    0xC724, // 윤, #416
+    0xC73C, // 으, #49
+    0xC740, // 은, #31
+    0xC744, // 을, #17
+    0xC74C, // 음, #112
+    0xC751, // 응, #461
+    0xC758, // 의, #8
+    0xC774, // 이, #1
+    0xC775, // 익, #403
+    0xC778, // 인, #18
+    0xC77C, // 일, #28
+    0xC784, // 임, #160
+    0xC785, // 입, #93
+    0xC788, // 있, #44
+    0xC790, // 자, #22
+    0xC791, // 작, #88
+    0xC798, // 잘, #347
+    0xC7A1, // 잡, #372
+    0xC7A5, // 장, #53
+    0xC7AC, // 재, #120
+    0xC7C1, // 쟁, #483
+    0xC800, // 저, #98
+    0xC801, // 적, #97
+    0xC804, // 전, #34
+    0xC808, // 절, #320
+    0xC810, // 점, #201
+    0xC811, // 접, #331
+    0xC815, // 정, #26
+    0xC81C, // 제, #29
+    0xC838, // 져, #414
+    0xC870, // 조, #86
+    0xC871, // 족, #373
+    0xC874, // 존, #432
+    0xC880, // 좀, #470
+    0xC885, // 종, #208
+    0xC88B, // 좋, #239
+    0xC8E0, // 죠, #451
+    0xC8FC, // 주, #38
+    0xC8FD, // 죽, #471
+    0xC900, // 준, #286
+    0xC904, // 줄, #392
+    0xC911, // 중, #103
+    0xC988, // 즈, #255
+    0xC98C, // 즌, #507
+    0xC990, // 즐, #371
+    0xC99D, // 증, #260
+    0xC9C0, // 지, #10
+    0xC9C1, // 직, #216
+    0xC9C4, // 진, #79
+    0xC9C8, // 질, #238
+    0xC9D1, // 집, #206
+    0xC9DC, // 짜, #411
+    0xC9F8, // 째, #494
+    0xCABD, // 쪽, #435
+    0xCC28, // 차, #146
+    0xCC29, // 착, #443
+    0xCC2C, // 찬, #481
+    0xCC30, // 찰, #440
+    0xCC38, // 참, #343
+    0xCC3D, // 창, #304
+    0xCC3E, // 찾, #335
+    0xCC44, // 채, #284
+    0xCC45, // 책, #298
+    0xCC98, // 처, #242
+    0xCC9C, // 천, #143
+    0xCCA0, // 철, #380
+    0xCCA8, // 첨, #452
+    0xCCAB, // 첫, #484
+    0xCCAD, // 청, #197
+    0xCCB4, // 체, #126
+    0xCCD0, // 쳐, #472
+    0xCD08, // 초, #220
+    0xCD1D, // 총, #406
+    0xCD5C, // 최, #179
+    0xCD94, // 추, #136
+    0xCD95, // 축, #337
+    0xCD9C, // 출, #166
+    0xCDA9, // 충, #369
+    0xCDE8, // 취, #210
+    0xCE20, // 츠, #215
+    0xCE21, // 측, #468
+    0xCE35, // 층, #512
+    0xCE58, // 치, #102
+    0xCE5C, // 친, #325
+    0xCE68, // 침, #263
+    0xCE74, // 카, #115
+    0xCE7C, // 칼, #466
+    0xCE90, // 캐, #454
+    0xCEE4, // 커, #285
+    0xCEE8, // 컨, #328
+    0xCEF4, // 컴, #417
+    0xCF00, // 케, #339
+    0xCF13, // 켓, #509
+    0xCF1C, // 켜, #508
+    0xCF54, // 코, #193
+    0xCF58, // 콘, #391
+    0xCFE0, // 쿠, #393
+    0xD035, // 퀵, #453
+    0xD06C, // 크, #101
+    0xD070, // 큰, #495
+    0xD074, // 클, #289
+    0xD0A4, // 키, #230
+    0xD0C0, // 타, #127
+    0xD0C1, // 탁, #314
+    0xD0C4, // 탄, #450
+    0xD0C8, // 탈, #436
+    0xD0DC, // 태, #221
+    0xD0DD, // 택, #275
+    0xD130, // 터, #70
+    0xD14C, // 테, #213
+    0xD150, // 텐, #324
+    0xD154, // 텔, #430
+    0xD15C, // 템, #382
+    0xD1A0, // 토, #145
+    0xD1B5, // 통, #156
+    0xD22C, // 투, #227
+    0xD2B8, // 트, #37
+    0xD2B9, // 특, #247
+    0xD2F0, // 티, #187
+    0xD305, // 팅, #410
+    0xD30C, // 파, #141
+    0xD310, // 판, #163
+    0xD314, // 팔, #499
+    0xD328, // 패, #307
+    0xD32C, // 팬, #459
+    0xD338, // 팸, #433
+    0xD37C, // 퍼, #344
+    0xD398, // 페, #172
+    0xD3B8, // 편, #251
+    0xD3C9, // 평, #291
+    0xD3EC, // 포, #68
+    0xD3ED, // 폭, #445
+    0xD3F0, // 폰, #318
+    0xD45C, // 표, #232
+    0xD480, // 풀, #497
+    0xD488, // 품, #113
+    0xD48D, // 풍, #425
+    0xD504, // 프, #110
+    0xD508, // 픈, #498
+    0xD50C, // 플, #211
+    0xD53C, // 피, #169
+    0xD544, // 필, #295
+    0xD551, // 핑, #376
+    0xD558, // 하, #7
+    0xD559, // 학, #129
+    0xD55C, // 한, #15
+    0xD560, // 할, #144
+    0xD568, // 함, #152
+    0xD569, // 합, #123
+    0xD56D, // 항, #268
+    0xD574, // 해, #32
+    0xD588, // 했, #180
+    0xD589, // 행, #135
+    0xD5A5, // 향, #345
+    0xD5C8, // 허, #396
+    0xD5D8, // 험, #316
+    0xD5E4, // 헤, #474
+    0xD604, // 현, #185
+    0xD611, // 협, #315
+    0xD615, // 형, #244
+    0xD61C, // 혜, #428
+    0xD638, // 호, #117
+    0xD63C, // 혼, #358
+    0xD648, // 홈, #330
+    0xD64D, // 홍, #363
+    0xD654, // 화, #63
+    0xD655, // 확, #183
+    0xD658, // 환, #224
+    0xD65C, // 활, #277
+    0xD669, // 황, #353
+    0xD68C, // 회, #74
+    0xD68D, // 획, #458
+    0xD69F, // 횟, #409
+    0xD6A8, // 효, #400
+    0xD6C4, // 후, #176
+    0xD6C8, // 훈, #486
+    0xD734, // 휴, #365
+    0xD754, // 흔, #480
+    0xD76C, // 희, #334
+    0xD788, // 히, #228
+    0xD798, // 힘, #502
+};
+// the percentage of the sample covered by the above characters
+static const float frequent_ko_coverage=0.948157021464184;
+
diff --git a/media/libmedia/IAudioFlinger.cpp b/media/libmedia/IAudioFlinger.cpp
index 86ff8bd..eef6a3d 100644
--- a/media/libmedia/IAudioFlinger.cpp
+++ b/media/libmedia/IAudioFlinger.cpp
@@ -89,7 +89,7 @@
                                 uint32_t sampleRate,
                                 audio_format_t format,
                                 audio_channel_mask_t channelMask,
-                                size_t frameCount,
+                                size_t *pFrameCount,
                                 track_flags_t *flags,
                                 const sp<IMemory>& sharedBuffer,
                                 audio_io_handle_t output,
@@ -106,6 +106,7 @@
         data.writeInt32(sampleRate);
         data.writeInt32(format);
         data.writeInt32(channelMask);
+        size_t frameCount = pFrameCount != NULL ? *pFrameCount : 0;
         data.writeInt32(frameCount);
         track_flags_t lFlags = flags != NULL ? *flags : (track_flags_t) TRACK_DEFAULT;
         data.writeInt32(lFlags);
@@ -117,7 +118,7 @@
         }
         data.writeInt32((int32_t) output);
         data.writeInt32((int32_t) tid);
-        int lSessionId = 0;
+        int lSessionId = AUDIO_SESSION_ALLOCATE;
         if (sessionId != NULL) {
             lSessionId = *sessionId;
         }
@@ -127,6 +128,10 @@
         if (lStatus != NO_ERROR) {
             ALOGE("createTrack error: %s", strerror(-lStatus));
         } else {
+            frameCount = reply.readInt32();
+            if (pFrameCount != NULL) {
+                *pFrameCount = frameCount;
+            }
             lFlags = reply.readInt32();
             if (flags != NULL) {
                 *flags = lFlags;
@@ -138,8 +143,19 @@
             name = reply.readString8();
             lStatus = reply.readInt32();
             track = interface_cast<IAudioTrack>(reply.readStrongBinder());
+            if (lStatus == NO_ERROR) {
+                if (track == 0) {
+                    ALOGE("createTrack should have returned an IAudioTrack");
+                    lStatus = UNKNOWN_ERROR;
+                }
+            } else {
+                if (track != 0) {
+                    ALOGE("createTrack returned an IAudioTrack but with status %d", lStatus);
+                    track.clear();
+                }
+            }
         }
-        if (status) {
+        if (status != NULL) {
             *status = lStatus;
         }
         return track;
@@ -150,7 +166,7 @@
                                 uint32_t sampleRate,
                                 audio_format_t format,
                                 audio_channel_mask_t channelMask,
-                                size_t frameCount,
+                                size_t *pFrameCount,
                                 track_flags_t *flags,
                                 pid_t tid,
                                 int *sessionId,
@@ -163,11 +179,12 @@
         data.writeInt32(sampleRate);
         data.writeInt32(format);
         data.writeInt32(channelMask);
+        size_t frameCount = pFrameCount != NULL ? *pFrameCount : 0;
         data.writeInt32(frameCount);
         track_flags_t lFlags = flags != NULL ? *flags : (track_flags_t) TRACK_DEFAULT;
         data.writeInt32(lFlags);
         data.writeInt32((int32_t) tid);
-        int lSessionId = 0;
+        int lSessionId = AUDIO_SESSION_ALLOCATE;
         if (sessionId != NULL) {
             lSessionId = *sessionId;
         }
@@ -176,6 +193,10 @@
         if (lStatus != NO_ERROR) {
             ALOGE("openRecord error: %s", strerror(-lStatus));
         } else {
+            frameCount = reply.readInt32();
+            if (pFrameCount != NULL) {
+                *pFrameCount = frameCount;
+            }
             lFlags = reply.readInt32();
             if (flags != NULL) {
                 *flags = lFlags;
@@ -198,7 +219,7 @@
                 }
             }
         }
-        if (status) {
+        if (status != NULL) {
             *status = lStatus;
         }
         return record;
@@ -415,15 +436,25 @@
         audio_io_handle_t output = (audio_io_handle_t) reply.readInt32();
         ALOGV("openOutput() returned output, %d", output);
         devices = (audio_devices_t)reply.readInt32();
-        if (pDevices != NULL) *pDevices = devices;
+        if (pDevices != NULL) {
+            *pDevices = devices;
+        }
         samplingRate = reply.readInt32();
-        if (pSamplingRate != NULL) *pSamplingRate = samplingRate;
+        if (pSamplingRate != NULL) {
+            *pSamplingRate = samplingRate;
+        }
         format = (audio_format_t) reply.readInt32();
-        if (pFormat != NULL) *pFormat = format;
+        if (pFormat != NULL) {
+            *pFormat = format;
+        }
         channelMask = (audio_channel_mask_t)reply.readInt32();
-        if (pChannelMask != NULL) *pChannelMask = channelMask;
+        if (pChannelMask != NULL) {
+            *pChannelMask = channelMask;
+        }
         latency = reply.readInt32();
-        if (pLatencyMs != NULL) *pLatencyMs = latency;
+        if (pLatencyMs != NULL) {
+            *pLatencyMs = latency;
+        }
         return output;
     }
 
@@ -487,13 +518,21 @@
         remote()->transact(OPEN_INPUT, data, &reply);
         audio_io_handle_t input = (audio_io_handle_t) reply.readInt32();
         devices = (audio_devices_t)reply.readInt32();
-        if (pDevices != NULL) *pDevices = devices;
+        if (pDevices != NULL) {
+            *pDevices = devices;
+        }
         samplingRate = reply.readInt32();
-        if (pSamplingRate != NULL) *pSamplingRate = samplingRate;
+        if (pSamplingRate != NULL) {
+            *pSamplingRate = samplingRate;
+        }
         format = (audio_format_t) reply.readInt32();
-        if (pFormat != NULL) *pFormat = format;
+        if (pFormat != NULL) {
+            *pFormat = format;
+        }
         channelMask = (audio_channel_mask_t)reply.readInt32();
-        if (pChannelMask != NULL) *pChannelMask = channelMask;
+        if (pChannelMask != NULL) {
+            *pChannelMask = channelMask;
+        }
         return input;
     }
 
@@ -535,11 +574,11 @@
         status_t status = reply.readInt32();
         if (status == NO_ERROR) {
             uint32_t tmp = reply.readInt32();
-            if (halFrames) {
+            if (halFrames != NULL) {
                 *halFrames = tmp;
             }
             tmp = reply.readInt32();
-            if (dspFrames) {
+            if (dspFrames != NULL) {
                 *dspFrames = tmp;
             }
         }
@@ -551,8 +590,11 @@
         Parcel data, reply;
         data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
         data.writeInt32((int32_t) ioHandle);
-        remote()->transact(GET_INPUT_FRAMES_LOST, data, &reply);
-        return reply.readInt32();
+        status_t status = remote()->transact(GET_INPUT_FRAMES_LOST, data, &reply);
+        if (status != NO_ERROR) {
+            return 0;
+        }
+        return (uint32_t) reply.readInt32();
     }
 
     virtual int newAudioSessionId()
@@ -657,7 +699,7 @@
 
         if (pDesc == NULL) {
             return effect;
-            if (status) {
+            if (status != NULL) {
                 *status = BAD_VALUE;
             }
         }
@@ -675,7 +717,7 @@
         } else {
             lStatus = reply.readInt32();
             int tmp = reply.readInt32();
-            if (id) {
+            if (id != NULL) {
                 *id = tmp;
             }
             tmp = reply.readInt32();
@@ -685,7 +727,7 @@
             effect = interface_cast<IEffect>(reply.readStrongBinder());
             reply.read(pDesc, sizeof(effect_descriptor_t));
         }
-        if (status) {
+        if (status != NULL) {
             *status = lStatus;
         }
 
@@ -775,9 +817,11 @@
             } else {
                 track = createTrack(
                         (audio_stream_type_t) streamType, sampleRate, format,
-                        channelMask, frameCount, &flags, buffer, output, tid,
+                        channelMask, &frameCount, &flags, buffer, output, tid,
                         &sessionId, name, clientUid, &status);
+                LOG_ALWAYS_FATAL_IF((track != 0) != (status == NO_ERROR));
             }
+            reply->writeInt32(frameCount);
             reply->writeInt32(flags);
             reply->writeInt32(sessionId);
             reply->writeString8(name);
@@ -797,8 +841,9 @@
             int sessionId = data.readInt32();
             status_t status;
             sp<IAudioRecord> record = openRecord(input,
-                    sampleRate, format, channelMask, frameCount, &flags, tid, &sessionId, &status);
+                    sampleRate, format, channelMask, &frameCount, &flags, tid, &sessionId, &status);
             LOG_ALWAYS_FATAL_IF((record != 0) != (status == NO_ERROR));
+            reply->writeInt32(frameCount);
             reply->writeInt32(flags);
             reply->writeInt32(sessionId);
             reply->writeInt32(status);
@@ -1026,7 +1071,7 @@
         case GET_INPUT_FRAMES_LOST: {
             CHECK_INTERFACE(IAudioFlinger, data, reply);
             audio_io_handle_t ioHandle = (audio_io_handle_t) data.readInt32();
-            reply->writeInt32(getInputFramesLost(ioHandle));
+            reply->writeInt32((int32_t) getInputFramesLost(ioHandle));
             return NO_ERROR;
         } break;
         case NEW_AUDIO_SESSION_ID: {
diff --git a/media/libmedia/IAudioRecord.cpp b/media/libmedia/IAudioRecord.cpp
index 4a7de65..9866d70 100644
--- a/media/libmedia/IAudioRecord.cpp
+++ b/media/libmedia/IAudioRecord.cpp
@@ -50,6 +50,9 @@
         status_t status = remote()->transact(GET_CBLK, data, &reply);
         if (status == NO_ERROR) {
             cblk = interface_cast<IMemory>(reply.readStrongBinder());
+            if (cblk != 0 && cblk->pointer() == NULL) {
+                cblk.clear();
+            }
         }
         return cblk;
     }
diff --git a/media/libmedia/IAudioTrack.cpp b/media/libmedia/IAudioTrack.cpp
index 3cd9cfd..ffc21fc 100644
--- a/media/libmedia/IAudioTrack.cpp
+++ b/media/libmedia/IAudioTrack.cpp
@@ -60,6 +60,9 @@
         status_t status = remote()->transact(GET_CBLK, data, &reply);
         if (status == NO_ERROR) {
             cblk = interface_cast<IMemory>(reply.readStrongBinder());
+            if (cblk != 0 && cblk->pointer() == NULL) {
+                cblk.clear();
+            }
         }
         return cblk;
     }
@@ -122,6 +125,9 @@
             status = reply.readInt32();
             if (status == NO_ERROR) {
                 *buffer = interface_cast<IMemory>(reply.readStrongBinder());
+                if (*buffer != 0 && (*buffer)->pointer() == NULL) {
+                    (*buffer).clear();
+                }
             }
         }
         return status;
diff --git a/media/libmedia/IEffect.cpp b/media/libmedia/IEffect.cpp
index a303a8f..b94012a 100644
--- a/media/libmedia/IEffect.cpp
+++ b/media/libmedia/IEffect.cpp
@@ -117,6 +117,9 @@
         status_t status = remote()->transact(GET_CBLK, data, &reply);
         if (status == NO_ERROR) {
             cblk = interface_cast<IMemory>(reply.readStrongBinder());
+            if (cblk != 0 && cblk->pointer() == NULL) {
+                cblk.clear();
+            }
         }
         return cblk;
     }
diff --git a/media/libmedia/IMediaDeathNotifier.cpp b/media/libmedia/IMediaDeathNotifier.cpp
index 9db5b1b..10b4934 100644
--- a/media/libmedia/IMediaDeathNotifier.cpp
+++ b/media/libmedia/IMediaDeathNotifier.cpp
@@ -75,7 +75,7 @@
 }
 
 void
-IMediaDeathNotifier::DeathNotifier::binderDied(const wp<IBinder>& who) {
+IMediaDeathNotifier::DeathNotifier::binderDied(const wp<IBinder>& who __unused) {
     ALOGW("media server died");
 
     // Need to do this with the lock held
diff --git a/media/libmedia/IMediaHTTPConnection.cpp b/media/libmedia/IMediaHTTPConnection.cpp
new file mode 100644
index 0000000..622d9cf
--- /dev/null
+++ b/media/libmedia/IMediaHTTPConnection.cpp
@@ -0,0 +1,158 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "IMediaHTTPConnection"
+#include <utils/Log.h>
+
+#include <media/IMediaHTTPConnection.h>
+
+#include <binder/IMemory.h>
+#include <binder/Parcel.h>
+#include <utils/String8.h>
+#include <media/stagefright/foundation/ADebug.h>
+
+namespace android {
+
+enum {
+    CONNECT = IBinder::FIRST_CALL_TRANSACTION,
+    DISCONNECT,
+    READ_AT,
+    GET_SIZE,
+    GET_MIME_TYPE,
+};
+
+struct BpMediaHTTPConnection : public BpInterface<IMediaHTTPConnection> {
+    BpMediaHTTPConnection(const sp<IBinder> &impl)
+        : BpInterface<IMediaHTTPConnection>(impl) {
+    }
+
+    virtual bool connect(
+            const char *uri, const KeyedVector<String8, String8> *headers) {
+        Parcel data, reply;
+        data.writeInterfaceToken(
+                IMediaHTTPConnection::getInterfaceDescriptor());
+
+        String16 tmp(uri);
+        data.writeString16(tmp);
+
+        tmp = String16("");
+        if (headers != NULL) {
+            for (size_t i = 0; i < headers->size(); ++i) {
+                String16 key(headers->keyAt(i).string());
+                String16 val(headers->valueAt(i).string());
+
+                tmp.append(key);
+                tmp.append(String16(": "));
+                tmp.append(val);
+                tmp.append(String16("\r\n"));
+            }
+        }
+        data.writeString16(tmp);
+
+        remote()->transact(CONNECT, data, &reply);
+
+        int32_t exceptionCode = reply.readExceptionCode();
+
+        if (exceptionCode) {
+            return UNKNOWN_ERROR;
+        }
+
+        sp<IBinder> binder = reply.readStrongBinder();
+        mMemory = interface_cast<IMemory>(binder);
+
+        return mMemory != NULL;
+    }
+
+    virtual void disconnect() {
+        Parcel data, reply;
+        data.writeInterfaceToken(
+                IMediaHTTPConnection::getInterfaceDescriptor());
+
+        remote()->transact(DISCONNECT, data, &reply);
+    }
+
+    virtual ssize_t readAt(off64_t offset, void *buffer, size_t size) {
+        Parcel data, reply;
+        data.writeInterfaceToken(
+                IMediaHTTPConnection::getInterfaceDescriptor());
+
+        data.writeInt64(offset);
+        data.writeInt32(size);
+
+        status_t err = remote()->transact(READ_AT, data, &reply);
+        CHECK_EQ(err, (status_t)OK);
+
+        int32_t exceptionCode = reply.readExceptionCode();
+
+        if (exceptionCode) {
+            return UNKNOWN_ERROR;
+        }
+
+        int32_t len = reply.readInt32();
+
+        if (len > 0) {
+            memcpy(buffer, mMemory->pointer(), len);
+        }
+
+        return len;
+    }
+
+    virtual off64_t getSize() {
+        Parcel data, reply;
+        data.writeInterfaceToken(
+                IMediaHTTPConnection::getInterfaceDescriptor());
+
+        remote()->transact(GET_SIZE, data, &reply);
+
+        int32_t exceptionCode = reply.readExceptionCode();
+
+        if (exceptionCode) {
+            return UNKNOWN_ERROR;
+        }
+
+        return reply.readInt64();
+    }
+
+    virtual status_t getMIMEType(String8 *mimeType) {
+        *mimeType = String8("");
+
+        Parcel data, reply;
+        data.writeInterfaceToken(
+                IMediaHTTPConnection::getInterfaceDescriptor());
+
+        remote()->transact(GET_MIME_TYPE, data, &reply);
+
+        int32_t exceptionCode = reply.readExceptionCode();
+
+        if (exceptionCode) {
+            return UNKNOWN_ERROR;
+        }
+
+        *mimeType = String8(reply.readString16());
+
+        return OK;
+    }
+
+private:
+    sp<IMemory> mMemory;
+};
+
+IMPLEMENT_META_INTERFACE(
+        MediaHTTPConnection, "android.media.IMediaHTTPConnection");
+
+}  // namespace android
+
diff --git a/media/libmedia/IMediaHTTPService.cpp b/media/libmedia/IMediaHTTPService.cpp
new file mode 100644
index 0000000..1260582
--- /dev/null
+++ b/media/libmedia/IMediaHTTPService.cpp
@@ -0,0 +1,58 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "IMediaHTTPService"
+#include <utils/Log.h>
+
+#include <media/IMediaHTTPService.h>
+
+#include <binder/Parcel.h>
+#include <media/IMediaHTTPConnection.h>
+
+namespace android {
+
+enum {
+    MAKE_HTTP = IBinder::FIRST_CALL_TRANSACTION,
+};
+
+struct BpMediaHTTPService : public BpInterface<IMediaHTTPService> {
+    BpMediaHTTPService(const sp<IBinder> &impl)
+        : BpInterface<IMediaHTTPService>(impl) {
+    }
+
+    virtual sp<IMediaHTTPConnection> makeHTTPConnection() {
+        Parcel data, reply;
+        data.writeInterfaceToken(
+                IMediaHTTPService::getInterfaceDescriptor());
+
+        remote()->transact(MAKE_HTTP, data, &reply);
+
+        status_t err = reply.readInt32();
+
+        if (err != OK) {
+            return NULL;
+        }
+
+        return interface_cast<IMediaHTTPConnection>(reply.readStrongBinder());
+    }
+};
+
+IMPLEMENT_META_INTERFACE(
+        MediaHTTPService, "android.media.IMediaHTTPService");
+
+}  // namespace android
+
diff --git a/media/libmedia/IMediaMetadataRetriever.cpp b/media/libmedia/IMediaMetadataRetriever.cpp
index bb066a0..c7d9d51 100644
--- a/media/libmedia/IMediaMetadataRetriever.cpp
+++ b/media/libmedia/IMediaMetadataRetriever.cpp
@@ -18,6 +18,7 @@
 #include <stdint.h>
 #include <sys/types.h>
 #include <binder/Parcel.h>
+#include <media/IMediaHTTPService.h>
 #include <media/IMediaMetadataRetriever.h>
 #include <utils/String8.h>
 #include <utils/KeyedVector.h>
@@ -84,10 +85,16 @@
     }
 
     status_t setDataSource(
-            const char *srcUrl, const KeyedVector<String8, String8> *headers)
+            const sp<IMediaHTTPService> &httpService,
+            const char *srcUrl,
+            const KeyedVector<String8, String8> *headers)
     {
         Parcel data, reply;
         data.writeInterfaceToken(IMediaMetadataRetriever::getInterfaceDescriptor());
+        data.writeInt32(httpService != NULL);
+        if (httpService != NULL) {
+            data.writeStrongBinder(httpService->asBinder());
+        }
         data.writeCString(srcUrl);
 
         if (headers == NULL) {
@@ -195,6 +202,13 @@
         } break;
         case SET_DATA_SOURCE_URL: {
             CHECK_INTERFACE(IMediaMetadataRetriever, data, reply);
+
+            sp<IMediaHTTPService> httpService;
+            if (data.readInt32()) {
+                httpService =
+                    interface_cast<IMediaHTTPService>(data.readStrongBinder());
+            }
+
             const char* srcUrl = data.readCString();
 
             KeyedVector<String8, String8> headers;
@@ -206,7 +220,8 @@
             }
 
             reply->writeInt32(
-                    setDataSource(srcUrl, numHeaders > 0 ? &headers : NULL));
+                    setDataSource(
+                        httpService, srcUrl, numHeaders > 0 ? &headers : NULL));
 
             return NO_ERROR;
         } break;
diff --git a/media/libmedia/IMediaPlayer.cpp b/media/libmedia/IMediaPlayer.cpp
index e79bcd2..d778d05 100644
--- a/media/libmedia/IMediaPlayer.cpp
+++ b/media/libmedia/IMediaPlayer.cpp
@@ -21,6 +21,7 @@
 
 #include <binder/Parcel.h>
 
+#include <media/IMediaHTTPService.h>
 #include <media/IMediaPlayer.h>
 #include <media/IStreamSource.h>
 
@@ -75,11 +76,17 @@
         remote()->transact(DISCONNECT, data, &reply);
     }
 
-    status_t setDataSource(const char* url,
+    status_t setDataSource(
+            const sp<IMediaHTTPService> &httpService,
+            const char* url,
             const KeyedVector<String8, String8>* headers)
     {
         Parcel data, reply;
         data.writeInterfaceToken(IMediaPlayer::getInterfaceDescriptor());
+        data.writeInt32(httpService != NULL);
+        if (httpService != NULL) {
+            data.writeStrongBinder(httpService->asBinder());
+        }
         data.writeCString(url);
         if (headers == NULL) {
             data.writeInt32(0);
@@ -355,6 +362,13 @@
         } break;
         case SET_DATA_SOURCE_URL: {
             CHECK_INTERFACE(IMediaPlayer, data, reply);
+
+            sp<IMediaHTTPService> httpService;
+            if (data.readInt32()) {
+                httpService =
+                    interface_cast<IMediaHTTPService>(data.readStrongBinder());
+            }
+
             const char* url = data.readCString();
             KeyedVector<String8, String8> headers;
             int32_t numHeaders = data.readInt32();
@@ -363,7 +377,8 @@
                 String8 value = data.readString8();
                 headers.add(key, value);
             }
-            reply->writeInt32(setDataSource(url, numHeaders > 0 ? &headers : NULL));
+            reply->writeInt32(setDataSource(
+                        httpService, url, numHeaders > 0 ? &headers : NULL));
             return NO_ERROR;
         } break;
         case SET_DATA_SOURCE_FD: {
diff --git a/media/libmedia/IMediaPlayerService.cpp b/media/libmedia/IMediaPlayerService.cpp
index 3c22b4c..d116b14 100644
--- a/media/libmedia/IMediaPlayerService.cpp
+++ b/media/libmedia/IMediaPlayerService.cpp
@@ -23,6 +23,7 @@
 #include <media/ICrypto.h>
 #include <media/IDrm.h>
 #include <media/IHDCP.h>
+#include <media/IMediaHTTPService.h>
 #include <media/IMediaPlayerService.h>
 #include <media/IMediaRecorder.h>
 #include <media/IOMX.h>
@@ -48,7 +49,6 @@
     ADD_BATTERY_DATA,
     PULL_BATTERY_DATA,
     LISTEN_FOR_REMOTE_DISPLAY,
-    UPDATE_PROXY_CONFIG,
 };
 
 class BpMediaPlayerService: public BpInterface<IMediaPlayerService>
@@ -86,12 +86,21 @@
         return interface_cast<IMediaRecorder>(reply.readStrongBinder());
     }
 
-    virtual status_t decode(const char* url, uint32_t *pSampleRate, int* pNumChannels,
-                               audio_format_t* pFormat,
-                               const sp<IMemoryHeap>& heap, size_t *pSize)
+    virtual status_t decode(
+            const sp<IMediaHTTPService> &httpService,
+            const char* url,
+            uint32_t *pSampleRate,
+            int* pNumChannels,
+            audio_format_t* pFormat,
+            const sp<IMemoryHeap>& heap,
+            size_t *pSize)
     {
         Parcel data, reply;
         data.writeInterfaceToken(IMediaPlayerService::getInterfaceDescriptor());
+        data.writeInt32(httpService != NULL);
+        if (httpService != NULL) {
+            data.writeStrongBinder(httpService->asBinder());
+        }
         data.writeCString(url);
         data.writeStrongBinder(heap->asBinder());
         status_t status = remote()->transact(DECODE_URL, data, &reply);
@@ -182,25 +191,6 @@
         remote()->transact(LISTEN_FOR_REMOTE_DISPLAY, data, &reply);
         return interface_cast<IRemoteDisplay>(reply.readStrongBinder());
     }
-
-    virtual status_t updateProxyConfig(
-            const char *host, int32_t port, const char *exclusionList) {
-        Parcel data, reply;
-
-        data.writeInterfaceToken(IMediaPlayerService::getInterfaceDescriptor());
-        if (host == NULL) {
-            data.writeInt32(0);
-        } else {
-            data.writeInt32(1);
-            data.writeCString(host);
-            data.writeInt32(port);
-            data.writeCString(exclusionList);
-        }
-
-        remote()->transact(UPDATE_PROXY_CONFIG, data, &reply);
-
-        return reply.readInt32();
-    }
 };
 
 IMPLEMENT_META_INTERFACE(MediaPlayerService, "android.media.IMediaPlayerService");
@@ -222,13 +212,25 @@
         } break;
         case DECODE_URL: {
             CHECK_INTERFACE(IMediaPlayerService, data, reply);
+            sp<IMediaHTTPService> httpService;
+            if (data.readInt32()) {
+                httpService =
+                    interface_cast<IMediaHTTPService>(data.readStrongBinder());
+            }
             const char* url = data.readCString();
             sp<IMemoryHeap> heap = interface_cast<IMemoryHeap>(data.readStrongBinder());
             uint32_t sampleRate;
             int numChannels;
             audio_format_t format;
             size_t size;
-            status_t status = decode(url, &sampleRate, &numChannels, &format, heap, &size);
+            status_t status =
+                decode(httpService,
+                       url,
+                       &sampleRate,
+                       &numChannels,
+                       &format,
+                       heap,
+                       &size);
             reply->writeInt32(status);
             if (status == NO_ERROR) {
                 reply->writeInt32(sampleRate);
@@ -316,24 +318,6 @@
             reply->writeStrongBinder(display->asBinder());
             return NO_ERROR;
         } break;
-        case UPDATE_PROXY_CONFIG:
-        {
-            CHECK_INTERFACE(IMediaPlayerService, data, reply);
-
-            const char *host = NULL;
-            int32_t port = 0;
-            const char *exclusionList = NULL;
-
-            if (data.readInt32()) {
-                host = data.readCString();
-                port = data.readInt32();
-                exclusionList = data.readCString();
-            }
-
-            reply->writeInt32(updateProxyConfig(host, port, exclusionList));
-
-            return OK;
-        }
         default:
             return BBinder::onTransact(code, data, reply, flags);
     }
diff --git a/media/libmedia/MediaProfiles.cpp b/media/libmedia/MediaProfiles.cpp
index 8319cd7..1074da9 100644
--- a/media/libmedia/MediaProfiles.cpp
+++ b/media/libmedia/MediaProfiles.cpp
@@ -81,8 +81,14 @@
     {"timelapseqvga", CAMCORDER_QUALITY_TIME_LAPSE_QVGA},
 };
 
+#if LOG_NDEBUG
+#define UNUSED __unused
+#else
+#define UNUSED
+#endif
+
 /*static*/ void
-MediaProfiles::logVideoCodec(const MediaProfiles::VideoCodec& codec)
+MediaProfiles::logVideoCodec(const MediaProfiles::VideoCodec& codec UNUSED)
 {
     ALOGV("video codec:");
     ALOGV("codec = %d", codec.mCodec);
@@ -93,7 +99,7 @@
 }
 
 /*static*/ void
-MediaProfiles::logAudioCodec(const MediaProfiles::AudioCodec& codec)
+MediaProfiles::logAudioCodec(const MediaProfiles::AudioCodec& codec UNUSED)
 {
     ALOGV("audio codec:");
     ALOGV("codec = %d", codec.mCodec);
@@ -103,7 +109,7 @@
 }
 
 /*static*/ void
-MediaProfiles::logVideoEncoderCap(const MediaProfiles::VideoEncoderCap& cap)
+MediaProfiles::logVideoEncoderCap(const MediaProfiles::VideoEncoderCap& cap UNUSED)
 {
     ALOGV("video encoder cap:");
     ALOGV("codec = %d", cap.mCodec);
@@ -114,7 +120,7 @@
 }
 
 /*static*/ void
-MediaProfiles::logAudioEncoderCap(const MediaProfiles::AudioEncoderCap& cap)
+MediaProfiles::logAudioEncoderCap(const MediaProfiles::AudioEncoderCap& cap UNUSED)
 {
     ALOGV("audio encoder cap:");
     ALOGV("codec = %d", cap.mCodec);
@@ -124,21 +130,21 @@
 }
 
 /*static*/ void
-MediaProfiles::logVideoDecoderCap(const MediaProfiles::VideoDecoderCap& cap)
+MediaProfiles::logVideoDecoderCap(const MediaProfiles::VideoDecoderCap& cap UNUSED)
 {
     ALOGV("video decoder cap:");
     ALOGV("codec = %d", cap.mCodec);
 }
 
 /*static*/ void
-MediaProfiles::logAudioDecoderCap(const MediaProfiles::AudioDecoderCap& cap)
+MediaProfiles::logAudioDecoderCap(const MediaProfiles::AudioDecoderCap& cap UNUSED)
 {
     ALOGV("audio codec cap:");
     ALOGV("codec = %d", cap.mCodec);
 }
 
 /*static*/ void
-MediaProfiles::logVideoEditorCap(const MediaProfiles::VideoEditorCap& cap)
+MediaProfiles::logVideoEditorCap(const MediaProfiles::VideoEditorCap& cap UNUSED)
 {
     ALOGV("videoeditor cap:");
     ALOGV("mMaxInputFrameWidth = %d", cap.mMaxInputFrameWidth);
diff --git a/media/libmedia/MediaScannerClient.cpp b/media/libmedia/MediaScannerClient.cpp
index 93a4a4c..1661f04 100644
--- a/media/libmedia/MediaScannerClient.cpp
+++ b/media/libmedia/MediaScannerClient.cpp
@@ -14,217 +14,57 @@
  * limitations under the License.
  */
 
+//#define LOG_NDEBUG 0
+#define LOG_TAG "MediaScannerClient"
+#include <utils/Log.h>
+
 #include <media/mediascanner.h>
 
+#include "CharacterEncodingDetector.h"
 #include "StringArray.h"
 
-#include "autodetect.h"
-#include "unicode/ucnv.h"
-#include "unicode/ustring.h"
-
 namespace android {
 
 MediaScannerClient::MediaScannerClient()
-    :   mNames(NULL),
-        mValues(NULL),
-        mLocaleEncoding(kEncodingNone)
+    :   mEncodingDetector(NULL)
 {
 }
 
 MediaScannerClient::~MediaScannerClient()
 {
-    delete mNames;
-    delete mValues;
+    delete mEncodingDetector;
 }
 
 void MediaScannerClient::setLocale(const char* locale)
 {
-    if (!locale) return;
-
-    if (!strncmp(locale, "ja", 2))
-        mLocaleEncoding = kEncodingShiftJIS;
-    else if (!strncmp(locale, "ko", 2))
-        mLocaleEncoding = kEncodingEUCKR;
-    else if (!strncmp(locale, "zh", 2)) {
-        if (!strcmp(locale, "zh_CN")) {
-            // simplified chinese for mainland China
-            mLocaleEncoding = kEncodingGBK;
-        } else {
-            // assume traditional for non-mainland Chinese locales (Taiwan, Hong Kong, Singapore)
-            mLocaleEncoding = kEncodingBig5;
-        }
-    }
+    mLocale = locale; // not currently used
 }
 
 void MediaScannerClient::beginFile()
 {
-    mNames = new StringArray;
-    mValues = new StringArray;
+    delete mEncodingDetector;
+    mEncodingDetector = new CharacterEncodingDetector();
 }
 
 status_t MediaScannerClient::addStringTag(const char* name, const char* value)
 {
-    if (mLocaleEncoding != kEncodingNone) {
-        // don't bother caching strings that are all ASCII.
-        // call handleStringTag directly instead.
-        // check to see if value (which should be utf8) has any non-ASCII characters
-        bool nonAscii = false;
-        const char* chp = value;
-        char ch;
-        while ((ch = *chp++)) {
-            if (ch & 0x80) {
-                nonAscii = true;
-                break;
-            }
-        }
-
-        if (nonAscii) {
-            // save the strings for later so they can be used for native encoding detection
-            mNames->push_back(name);
-            mValues->push_back(value);
-            return OK;
-        }
-        // else fall through
-    }
-
-    // autodetection is not necessary, so no need to cache the values
-    // pass directly to the client instead
-    return handleStringTag(name, value);
-}
-
-static uint32_t possibleEncodings(const char* s)
-{
-    uint32_t result = kEncodingAll;
-    // if s contains a native encoding, then it was mistakenly encoded in utf8 as if it were latin-1
-    // so we need to reverse the latin-1 -> utf8 conversion to get the native chars back
-    uint8_t ch1, ch2;
-    uint8_t* chp = (uint8_t *)s;
-
-    while ((ch1 = *chp++)) {
-        if (ch1 & 0x80) {
-            ch2 = *chp++;
-            ch1 = ((ch1 << 6) & 0xC0) | (ch2 & 0x3F);
-            // ch1 is now the first byte of the potential native char
-
-            ch2 = *chp++;
-            if (ch2 & 0x80)
-                ch2 = ((ch2 << 6) & 0xC0) | (*chp++ & 0x3F);
-            // ch2 is now the second byte of the potential native char
-            int ch = (int)ch1 << 8 | (int)ch2;
-            result &= findPossibleEncodings(ch);
-        }
-        // else ASCII character, which could be anything
-    }
-
-    return result;
-}
-
-void MediaScannerClient::convertValues(uint32_t encoding)
-{
-    const char* enc = NULL;
-    switch (encoding) {
-        case kEncodingShiftJIS:
-            enc = "shift-jis";
-            break;
-        case kEncodingGBK:
-            enc = "gbk";
-            break;
-        case kEncodingBig5:
-            enc = "Big5";
-            break;
-        case kEncodingEUCKR:
-            enc = "EUC-KR";
-            break;
-    }
-
-    if (enc) {
-        UErrorCode status = U_ZERO_ERROR;
-
-        UConverter *conv = ucnv_open(enc, &status);
-        if (U_FAILURE(status)) {
-            ALOGE("could not create UConverter for %s", enc);
-            return;
-        }
-        UConverter *utf8Conv = ucnv_open("UTF-8", &status);
-        if (U_FAILURE(status)) {
-            ALOGE("could not create UConverter for UTF-8");
-            ucnv_close(conv);
-            return;
-        }
-
-        // for each value string, convert from native encoding to UTF-8
-        for (int i = 0; i < mNames->size(); i++) {
-            // first we need to untangle the utf8 and convert it back to the original bytes
-            // since we are reducing the length of the string, we can do this in place
-            uint8_t* src = (uint8_t *)mValues->getEntry(i);
-            int len = strlen((char *)src);
-            uint8_t* dest = src;
-
-            uint8_t uch;
-            while ((uch = *src++)) {
-                if (uch & 0x80)
-                    *dest++ = ((uch << 6) & 0xC0) | (*src++ & 0x3F);
-                else
-                    *dest++ = uch;
-            }
-            *dest = 0;
-
-            // now convert from native encoding to UTF-8
-            const char* source = mValues->getEntry(i);
-            int targetLength = len * 3 + 1;
-            char* buffer = new char[targetLength];
-            // don't normally check for NULL, but in this case targetLength may be large
-            if (!buffer)
-                break;
-            char* target = buffer;
-
-            ucnv_convertEx(utf8Conv, conv, &target, target + targetLength,
-                    &source, (const char *)dest, NULL, NULL, NULL, NULL, TRUE, TRUE, &status);
-            if (U_FAILURE(status)) {
-                ALOGE("ucnv_convertEx failed: %d", status);
-                mValues->setEntry(i, "???");
-            } else {
-                // zero terminate
-                *target = 0;
-                mValues->setEntry(i, buffer);
-            }
-
-            delete[] buffer;
-        }
-
-        ucnv_close(conv);
-        ucnv_close(utf8Conv);
-    }
+    mEncodingDetector->addTag(name, value);
+    return OK;
 }
 
 void MediaScannerClient::endFile()
 {
-    if (mLocaleEncoding != kEncodingNone) {
-        int size = mNames->size();
-        uint32_t encoding = kEncodingAll;
+    mEncodingDetector->detectAndConvert();
 
-        // compute a bit mask containing all possible encodings
-        for (int i = 0; i < mNames->size(); i++)
-            encoding &= possibleEncodings(mValues->getEntry(i));
-
-        // if the locale encoding matches, then assume we have a native encoding.
-        if (encoding & mLocaleEncoding)
-            convertValues(mLocaleEncoding);
-
-        // finally, push all name/value pairs to the client
-        for (int i = 0; i < mNames->size(); i++) {
-            status_t status = handleStringTag(mNames->getEntry(i), mValues->getEntry(i));
-            if (status) {
-                break;
-            }
+    int size = mEncodingDetector->size();
+    if (size) {
+        for (int i = 0; i < size; i++) {
+            const char *name;
+            const char *value;
+            mEncodingDetector->getTag(i, &name, &value);
+            handleStringTag(name, value);
         }
     }
-    // else addStringTag() has done all the work so we have nothing to do
-
-    delete mNames;
-    delete mValues;
-    mNames = NULL;
-    mValues = NULL;
 }
 
 }  // namespace android
diff --git a/media/libmedia/SoundPool.cpp b/media/libmedia/SoundPool.cpp
index 22e9fad..4885b4f 100644
--- a/media/libmedia/SoundPool.cpp
+++ b/media/libmedia/SoundPool.cpp
@@ -21,6 +21,7 @@
 #define USE_SHARED_MEM_BUFFER
 
 #include <media/AudioTrack.h>
+#include <media/IMediaHTTPService.h>
 #include <media/mediaplayer.h>
 #include <media/SoundPool.h>
 #include "SoundPoolThread.h"
@@ -199,7 +200,7 @@
     return NULL;
 }
 
-int SoundPool::load(const char* path, int priority)
+int SoundPool::load(const char* path, int priority __unused)
 {
     ALOGV("load: path=%s, priority=%d", path, priority);
     Mutex::Autolock lock(&mLock);
@@ -209,7 +210,7 @@
     return sample->sampleID();
 }
 
-int SoundPool::load(int fd, int64_t offset, int64_t length, int priority)
+int SoundPool::load(int fd, int64_t offset, int64_t length, int priority __unused)
 {
     ALOGV("load: fd=%d, offset=%lld, length=%lld, priority=%d",
             fd, offset, length, priority);
@@ -496,7 +497,14 @@
 
     ALOGV("Start decode");
     if (mUrl) {
-        status = MediaPlayer::decode(mUrl, &sampleRate, &numChannels, &format, mHeap, &mSize);
+        status = MediaPlayer::decode(
+                NULL /* httpService */,
+                mUrl,
+                &sampleRate,
+                &numChannels,
+                &format,
+                mHeap,
+                &mSize);
     } else {
         status = MediaPlayer::decode(mFd, mOffset, mLength, &sampleRate, &numChannels, &format,
                                      mHeap, &mSize);
@@ -600,16 +608,15 @@
         // wrong audio audio buffer size  (mAudioBufferSize)
         unsigned long toggle = mToggle ^ 1;
         void *userData = (void *)((unsigned long)this | toggle);
-        uint32_t channels = (numChannels == 2) ?
-                AUDIO_CHANNEL_OUT_STEREO : AUDIO_CHANNEL_OUT_MONO;
+        audio_channel_mask_t channelMask = audio_channel_out_mask_from_count(numChannels);
 
         // do not create a new audio track if current track is compatible with sample parameters
 #ifdef USE_SHARED_MEM_BUFFER
         newTrack = new AudioTrack(streamType, sampleRate, sample->format(),
-                channels, sample->getIMemory(), AUDIO_OUTPUT_FLAG_FAST, callback, userData);
+                channelMask, sample->getIMemory(), AUDIO_OUTPUT_FLAG_FAST, callback, userData);
 #else
         newTrack = new AudioTrack(streamType, sampleRate, sample->format(),
-                channels, frameCount, AUDIO_OUTPUT_FLAG_FAST, callback, userData,
+                channelMask, frameCount, AUDIO_OUTPUT_FLAG_FAST, callback, userData,
                 bufferFrames);
 #endif
         oldTrack = mAudioTrack;
@@ -730,7 +737,8 @@
                     count = b->size;
                 }
                 memcpy(q, p, count);
-//              ALOGV("fill: q=%p, p=%p, mPos=%u, b->size=%u, count=%d", q, p, mPos, b->size, count);
+//              ALOGV("fill: q=%p, p=%p, mPos=%u, b->size=%u, count=%d", q, p, mPos, b->size,
+//                      count);
             } else if (mPos < mAudioBufferSize) {
                 count = mAudioBufferSize - mPos;
                 if (count > b->size) {
diff --git a/media/libmedia/autodetect.cpp b/media/libmedia/autodetect.cpp
deleted file mode 100644
index be5c3b2..0000000
--- a/media/libmedia/autodetect.cpp
+++ /dev/null
@@ -1,885 +0,0 @@
-/*
- * Copyright (C) 2008 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "autodetect.h"
-
-struct CharRange {
-    uint16_t first;
-    uint16_t last;
-};
-
-#define ARRAY_SIZE(x)   (sizeof(x) / sizeof(*x))
-
-// generated from http://unicode.org/Public/MAPPINGS/VENDORS/MICSFT/WINDOWS/CP932.TXT
-static const CharRange kShiftJISRanges[] = {
-    { 0x8140, 0x817E },
-    { 0x8180, 0x81AC },
-    { 0x81B8, 0x81BF },
-    { 0x81C8, 0x81CE },
-    { 0x81DA, 0x81E8 },
-    { 0x81F0, 0x81F7 },
-    { 0x81FC, 0x81FC },
-    { 0x824F, 0x8258 },
-    { 0x8260, 0x8279 },
-    { 0x8281, 0x829A },
-    { 0x829F, 0x82F1 },
-    { 0x8340, 0x837E },
-    { 0x8380, 0x8396 },
-    { 0x839F, 0x83B6 },
-    { 0x83BF, 0x83D6 },
-    { 0x8440, 0x8460 },
-    { 0x8470, 0x847E },
-    { 0x8480, 0x8491 },
-    { 0x849F, 0x84BE },
-    { 0x8740, 0x875D },
-    { 0x875F, 0x8775 },
-    { 0x877E, 0x877E },
-    { 0x8780, 0x879C },
-    { 0x889F, 0x88FC },
-    { 0x8940, 0x897E },
-    { 0x8980, 0x89FC },
-    { 0x8A40, 0x8A7E },
-    { 0x8A80, 0x8AFC },
-    { 0x8B40, 0x8B7E },
-    { 0x8B80, 0x8BFC },
-    { 0x8C40, 0x8C7E },
-    { 0x8C80, 0x8CFC },
-    { 0x8D40, 0x8D7E },
-    { 0x8D80, 0x8DFC },
-    { 0x8E40, 0x8E7E },
-    { 0x8E80, 0x8EFC },
-    { 0x8F40, 0x8F7E },
-    { 0x8F80, 0x8FFC },
-    { 0x9040, 0x907E },
-    { 0x9080, 0x90FC },
-    { 0x9140, 0x917E },
-    { 0x9180, 0x91FC },
-    { 0x9240, 0x927E },
-    { 0x9280, 0x92FC },
-    { 0x9340, 0x937E },
-    { 0x9380, 0x93FC },
-    { 0x9440, 0x947E },
-    { 0x9480, 0x94FC },
-    { 0x9540, 0x957E },
-    { 0x9580, 0x95FC },
-    { 0x9640, 0x967E },
-    { 0x9680, 0x96FC },
-    { 0x9740, 0x977E },
-    { 0x9780, 0x97FC },
-    { 0x9840, 0x9872 },
-    { 0x989F, 0x98FC },
-    { 0x9940, 0x997E },
-    { 0x9980, 0x99FC },
-    { 0x9A40, 0x9A7E },
-    { 0x9A80, 0x9AFC },
-    { 0x9B40, 0x9B7E },
-    { 0x9B80, 0x9BFC },
-    { 0x9C40, 0x9C7E },
-    { 0x9C80, 0x9CFC },
-    { 0x9D40, 0x9D7E },
-    { 0x9D80, 0x9DFC },
-    { 0x9E40, 0x9E7E },
-    { 0x9E80, 0x9EFC },
-    { 0x9F40, 0x9F7E },
-    { 0x9F80, 0x9FFC },
-    { 0xE040, 0xE07E },
-    { 0xE080, 0xE0FC },
-    { 0xE140, 0xE17E },
-    { 0xE180, 0xE1FC },
-    { 0xE240, 0xE27E },
-    { 0xE280, 0xE2FC },
-    { 0xE340, 0xE37E },
-    { 0xE380, 0xE3FC },
-    { 0xE440, 0xE47E },
-    { 0xE480, 0xE4FC },
-    { 0xE540, 0xE57E },
-    { 0xE580, 0xE5FC },
-    { 0xE640, 0xE67E },
-    { 0xE680, 0xE6FC },
-    { 0xE740, 0xE77E },
-    { 0xE780, 0xE7FC },
-    { 0xE840, 0xE87E },
-    { 0xE880, 0xE8FC },
-    { 0xE940, 0xE97E },
-    { 0xE980, 0xE9FC },
-    { 0xEA40, 0xEA7E },
-    { 0xEA80, 0xEAA4 },
-    { 0xED40, 0xED7E },
-    { 0xED80, 0xEDFC },
-    { 0xEE40, 0xEE7E },
-    { 0xEE80, 0xEEEC },
-    { 0xEEEF, 0xEEFC },
-    { 0xFA40, 0xFA7E },
-    { 0xFA80, 0xFAFC },
-    { 0xFB40, 0xFB7E },
-    { 0xFB80, 0xFBFC },
-    { 0xFC40, 0xFC4B },
-};
-
-// generated from http://unicode.org/Public/MAPPINGS/VENDORS/MICSFT/WINDOWS/CP936.TXT
-static const CharRange kGBKRanges[] = {
-    { 0x8140, 0x817E },
-    { 0x8180, 0x81FE },
-    { 0x8240, 0x827E },
-    { 0x8280, 0x82FE },
-    { 0x8340, 0x837E },
-    { 0x8380, 0x83FE },
-    { 0x8440, 0x847E },
-    { 0x8480, 0x84FE },
-    { 0x8540, 0x857E },
-    { 0x8580, 0x85FE },
-    { 0x8640, 0x867E },
-    { 0x8680, 0x86FE },
-    { 0x8740, 0x877E },
-    { 0x8780, 0x87FE },
-    { 0x8840, 0x887E },
-    { 0x8880, 0x88FE },
-    { 0x8940, 0x897E },
-    { 0x8980, 0x89FE },
-    { 0x8A40, 0x8A7E },
-    { 0x8A80, 0x8AFE },
-    { 0x8B40, 0x8B7E },
-    { 0x8B80, 0x8BFE },
-    { 0x8C40, 0x8C7E },
-    { 0x8C80, 0x8CFE },
-    { 0x8D40, 0x8D7E },
-    { 0x8D80, 0x8DFE },
-    { 0x8E40, 0x8E7E },
-    { 0x8E80, 0x8EFE },
-    { 0x8F40, 0x8F7E },
-    { 0x8F80, 0x8FFE },
-    { 0x9040, 0x907E },
-    { 0x9080, 0x90FE },
-    { 0x9140, 0x917E },
-    { 0x9180, 0x91FE },
-    { 0x9240, 0x927E },
-    { 0x9280, 0x92FE },
-    { 0x9340, 0x937E },
-    { 0x9380, 0x93FE },
-    { 0x9440, 0x947E },
-    { 0x9480, 0x94FE },
-    { 0x9540, 0x957E },
-    { 0x9580, 0x95FE },
-    { 0x9640, 0x967E },
-    { 0x9680, 0x96FE },
-    { 0x9740, 0x977E },
-    { 0x9780, 0x97FE },
-    { 0x9840, 0x987E },
-    { 0x9880, 0x98FE },
-    { 0x9940, 0x997E },
-    { 0x9980, 0x99FE },
-    { 0x9A40, 0x9A7E },
-    { 0x9A80, 0x9AFE },
-    { 0x9B40, 0x9B7E },
-    { 0x9B80, 0x9BFE },
-    { 0x9C40, 0x9C7E },
-    { 0x9C80, 0x9CFE },
-    { 0x9D40, 0x9D7E },
-    { 0x9D80, 0x9DFE },
-    { 0x9E40, 0x9E7E },
-    { 0x9E80, 0x9EFE },
-    { 0x9F40, 0x9F7E },
-    { 0x9F80, 0x9FFE },
-    { 0xA040, 0xA07E },
-    { 0xA080, 0xA0FE },
-    { 0xA1A1, 0xA1FE },
-    { 0xA2A1, 0xA2AA },
-    { 0xA2B1, 0xA2E2 },
-    { 0xA2E5, 0xA2EE },
-    { 0xA2F1, 0xA2FC },
-    { 0xA3A1, 0xA3FE },
-    { 0xA4A1, 0xA4F3 },
-    { 0xA5A1, 0xA5F6 },
-    { 0xA6A1, 0xA6B8 },
-    { 0xA6C1, 0xA6D8 },
-    { 0xA6E0, 0xA6EB },
-    { 0xA6EE, 0xA6F2 },
-    { 0xA6F4, 0xA6F5 },
-    { 0xA7A1, 0xA7C1 },
-    { 0xA7D1, 0xA7F1 },
-    { 0xA840, 0xA87E },
-    { 0xA880, 0xA895 },
-    { 0xA8A1, 0xA8BB },
-    { 0xA8BD, 0xA8BE },
-    { 0xA8C0, 0xA8C0 },
-    { 0xA8C5, 0xA8E9 },
-    { 0xA940, 0xA957 },
-    { 0xA959, 0xA95A },
-    { 0xA95C, 0xA95C },
-    { 0xA960, 0xA97E },
-    { 0xA980, 0xA988 },
-    { 0xA996, 0xA996 },
-    { 0xA9A4, 0xA9EF },
-    { 0xAA40, 0xAA7E },
-    { 0xAA80, 0xAAA0 },
-    { 0xAB40, 0xAB7E },
-    { 0xAB80, 0xABA0 },
-    { 0xAC40, 0xAC7E },
-    { 0xAC80, 0xACA0 },
-    { 0xAD40, 0xAD7E },
-    { 0xAD80, 0xADA0 },
-    { 0xAE40, 0xAE7E },
-    { 0xAE80, 0xAEA0 },
-    { 0xAF40, 0xAF7E },
-    { 0xAF80, 0xAFA0 },
-    { 0xB040, 0xB07E },
-    { 0xB080, 0xB0FE },
-    { 0xB140, 0xB17E },
-    { 0xB180, 0xB1FE },
-    { 0xB240, 0xB27E },
-    { 0xB280, 0xB2FE },
-    { 0xB340, 0xB37E },
-    { 0xB380, 0xB3FE },
-    { 0xB440, 0xB47E },
-    { 0xB480, 0xB4FE },
-    { 0xB540, 0xB57E },
-    { 0xB580, 0xB5FE },
-    { 0xB640, 0xB67E },
-    { 0xB680, 0xB6FE },
-    { 0xB740, 0xB77E },
-    { 0xB780, 0xB7FE },
-    { 0xB840, 0xB87E },
-    { 0xB880, 0xB8FE },
-    { 0xB940, 0xB97E },
-    { 0xB980, 0xB9FE },
-    { 0xBA40, 0xBA7E },
-    { 0xBA80, 0xBAFE },
-    { 0xBB40, 0xBB7E },
-    { 0xBB80, 0xBBFE },
-    { 0xBC40, 0xBC7E },
-    { 0xBC80, 0xBCFE },
-    { 0xBD40, 0xBD7E },
-    { 0xBD80, 0xBDFE },
-    { 0xBE40, 0xBE7E },
-    { 0xBE80, 0xBEFE },
-    { 0xBF40, 0xBF7E },
-    { 0xBF80, 0xBFFE },
-    { 0xC040, 0xC07E },
-    { 0xC080, 0xC0FE },
-    { 0xC140, 0xC17E },
-    { 0xC180, 0xC1FE },
-    { 0xC240, 0xC27E },
-    { 0xC280, 0xC2FE },
-    { 0xC340, 0xC37E },
-    { 0xC380, 0xC3FE },
-    { 0xC440, 0xC47E },
-    { 0xC480, 0xC4FE },
-    { 0xC540, 0xC57E },
-    { 0xC580, 0xC5FE },
-    { 0xC640, 0xC67E },
-    { 0xC680, 0xC6FE },
-    { 0xC740, 0xC77E },
-    { 0xC780, 0xC7FE },
-    { 0xC840, 0xC87E },
-    { 0xC880, 0xC8FE },
-    { 0xC940, 0xC97E },
-    { 0xC980, 0xC9FE },
-    { 0xCA40, 0xCA7E },
-    { 0xCA80, 0xCAFE },
-    { 0xCB40, 0xCB7E },
-    { 0xCB80, 0xCBFE },
-    { 0xCC40, 0xCC7E },
-    { 0xCC80, 0xCCFE },
-    { 0xCD40, 0xCD7E },
-    { 0xCD80, 0xCDFE },
-    { 0xCE40, 0xCE7E },
-    { 0xCE80, 0xCEFE },
-    { 0xCF40, 0xCF7E },
-    { 0xCF80, 0xCFFE },
-    { 0xD040, 0xD07E },
-    { 0xD080, 0xD0FE },
-    { 0xD140, 0xD17E },
-    { 0xD180, 0xD1FE },
-    { 0xD240, 0xD27E },
-    { 0xD280, 0xD2FE },
-    { 0xD340, 0xD37E },
-    { 0xD380, 0xD3FE },
-    { 0xD440, 0xD47E },
-    { 0xD480, 0xD4FE },
-    { 0xD540, 0xD57E },
-    { 0xD580, 0xD5FE },
-    { 0xD640, 0xD67E },
-    { 0xD680, 0xD6FE },
-    { 0xD740, 0xD77E },
-    { 0xD780, 0xD7F9 },
-    { 0xD840, 0xD87E },
-    { 0xD880, 0xD8FE },
-    { 0xD940, 0xD97E },
-    { 0xD980, 0xD9FE },
-    { 0xDA40, 0xDA7E },
-    { 0xDA80, 0xDAFE },
-    { 0xDB40, 0xDB7E },
-    { 0xDB80, 0xDBFE },
-    { 0xDC40, 0xDC7E },
-    { 0xDC80, 0xDCFE },
-    { 0xDD40, 0xDD7E },
-    { 0xDD80, 0xDDFE },
-    { 0xDE40, 0xDE7E },
-    { 0xDE80, 0xDEFE },
-    { 0xDF40, 0xDF7E },
-    { 0xDF80, 0xDFFE },
-    { 0xE040, 0xE07E },
-    { 0xE080, 0xE0FE },
-    { 0xE140, 0xE17E },
-    { 0xE180, 0xE1FE },
-    { 0xE240, 0xE27E },
-    { 0xE280, 0xE2FE },
-    { 0xE340, 0xE37E },
-    { 0xE380, 0xE3FE },
-    { 0xE440, 0xE47E },
-    { 0xE480, 0xE4FE },
-    { 0xE540, 0xE57E },
-    { 0xE580, 0xE5FE },
-    { 0xE640, 0xE67E },
-    { 0xE680, 0xE6FE },
-    { 0xE740, 0xE77E },
-    { 0xE780, 0xE7FE },
-    { 0xE840, 0xE87E },
-    { 0xE880, 0xE8FE },
-    { 0xE940, 0xE97E },
-    { 0xE980, 0xE9FE },
-    { 0xEA40, 0xEA7E },
-    { 0xEA80, 0xEAFE },
-    { 0xEB40, 0xEB7E },
-    { 0xEB80, 0xEBFE },
-    { 0xEC40, 0xEC7E },
-    { 0xEC80, 0xECFE },
-    { 0xED40, 0xED7E },
-    { 0xED80, 0xEDFE },
-    { 0xEE40, 0xEE7E },
-    { 0xEE80, 0xEEFE },
-    { 0xEF40, 0xEF7E },
-    { 0xEF80, 0xEFFE },
-    { 0xF040, 0xF07E },
-    { 0xF080, 0xF0FE },
-    { 0xF140, 0xF17E },
-    { 0xF180, 0xF1FE },
-    { 0xF240, 0xF27E },
-    { 0xF280, 0xF2FE },
-    { 0xF340, 0xF37E },
-    { 0xF380, 0xF3FE },
-    { 0xF440, 0xF47E },
-    { 0xF480, 0xF4FE },
-    { 0xF540, 0xF57E },
-    { 0xF580, 0xF5FE },
-    { 0xF640, 0xF67E },
-    { 0xF680, 0xF6FE },
-    { 0xF740, 0xF77E },
-    { 0xF780, 0xF7FE },
-    { 0xF840, 0xF87E },
-    { 0xF880, 0xF8A0 },
-    { 0xF940, 0xF97E },
-    { 0xF980, 0xF9A0 },
-    { 0xFA40, 0xFA7E },
-    { 0xFA80, 0xFAA0 },
-    { 0xFB40, 0xFB7E },
-    { 0xFB80, 0xFBA0 },
-    { 0xFC40, 0xFC7E },
-    { 0xFC80, 0xFCA0 },
-    { 0xFD40, 0xFD7E },
-    { 0xFD80, 0xFDA0 },
-    { 0xFE40, 0xFE4F },
-};
-
-// generated from http://unicode.org/Public/MAPPINGS/VENDORS/MICSFT/WINDOWS/CP949.TXT
-static const CharRange kEUCKRRanges[] = {
-    { 0x8141, 0x815A },
-    { 0x8161, 0x817A },
-    { 0x8181, 0x81FE },
-    { 0x8241, 0x825A },
-    { 0x8261, 0x827A },
-    { 0x8281, 0x82FE },
-    { 0x8341, 0x835A },
-    { 0x8361, 0x837A },
-    { 0x8381, 0x83FE },
-    { 0x8441, 0x845A },
-    { 0x8461, 0x847A },
-    { 0x8481, 0x84FE },
-    { 0x8541, 0x855A },
-    { 0x8561, 0x857A },
-    { 0x8581, 0x85FE },
-    { 0x8641, 0x865A },
-    { 0x8661, 0x867A },
-    { 0x8681, 0x86FE },
-    { 0x8741, 0x875A },
-    { 0x8761, 0x877A },
-    { 0x8781, 0x87FE },
-    { 0x8841, 0x885A },
-    { 0x8861, 0x887A },
-    { 0x8881, 0x88FE },
-    { 0x8941, 0x895A },
-    { 0x8961, 0x897A },
-    { 0x8981, 0x89FE },
-    { 0x8A41, 0x8A5A },
-    { 0x8A61, 0x8A7A },
-    { 0x8A81, 0x8AFE },
-    { 0x8B41, 0x8B5A },
-    { 0x8B61, 0x8B7A },
-    { 0x8B81, 0x8BFE },
-    { 0x8C41, 0x8C5A },
-    { 0x8C61, 0x8C7A },
-    { 0x8C81, 0x8CFE },
-    { 0x8D41, 0x8D5A },
-    { 0x8D61, 0x8D7A },
-    { 0x8D81, 0x8DFE },
-    { 0x8E41, 0x8E5A },
-    { 0x8E61, 0x8E7A },
-    { 0x8E81, 0x8EFE },
-    { 0x8F41, 0x8F5A },
-    { 0x8F61, 0x8F7A },
-    { 0x8F81, 0x8FFE },
-    { 0x9041, 0x905A },
-    { 0x9061, 0x907A },
-    { 0x9081, 0x90FE },
-    { 0x9141, 0x915A },
-    { 0x9161, 0x917A },
-    { 0x9181, 0x91FE },
-    { 0x9241, 0x925A },
-    { 0x9261, 0x927A },
-    { 0x9281, 0x92FE },
-    { 0x9341, 0x935A },
-    { 0x9361, 0x937A },
-    { 0x9381, 0x93FE },
-    { 0x9441, 0x945A },
-    { 0x9461, 0x947A },
-    { 0x9481, 0x94FE },
-    { 0x9541, 0x955A },
-    { 0x9561, 0x957A },
-    { 0x9581, 0x95FE },
-    { 0x9641, 0x965A },
-    { 0x9661, 0x967A },
-    { 0x9681, 0x96FE },
-    { 0x9741, 0x975A },
-    { 0x9761, 0x977A },
-    { 0x9781, 0x97FE },
-    { 0x9841, 0x985A },
-    { 0x9861, 0x987A },
-    { 0x9881, 0x98FE },
-    { 0x9941, 0x995A },
-    { 0x9961, 0x997A },
-    { 0x9981, 0x99FE },
-    { 0x9A41, 0x9A5A },
-    { 0x9A61, 0x9A7A },
-    { 0x9A81, 0x9AFE },
-    { 0x9B41, 0x9B5A },
-    { 0x9B61, 0x9B7A },
-    { 0x9B81, 0x9BFE },
-    { 0x9C41, 0x9C5A },
-    { 0x9C61, 0x9C7A },
-    { 0x9C81, 0x9CFE },
-    { 0x9D41, 0x9D5A },
-    { 0x9D61, 0x9D7A },
-    { 0x9D81, 0x9DFE },
-    { 0x9E41, 0x9E5A },
-    { 0x9E61, 0x9E7A },
-    { 0x9E81, 0x9EFE },
-    { 0x9F41, 0x9F5A },
-    { 0x9F61, 0x9F7A },
-    { 0x9F81, 0x9FFE },
-    { 0xA041, 0xA05A },
-    { 0xA061, 0xA07A },
-    { 0xA081, 0xA0FE },
-    { 0xA141, 0xA15A },
-    { 0xA161, 0xA17A },
-    { 0xA181, 0xA1FE },
-    { 0xA241, 0xA25A },
-    { 0xA261, 0xA27A },
-    { 0xA281, 0xA2E7 },
-    { 0xA341, 0xA35A },
-    { 0xA361, 0xA37A },
-    { 0xA381, 0xA3FE },
-    { 0xA441, 0xA45A },
-    { 0xA461, 0xA47A },
-    { 0xA481, 0xA4FE },
-    { 0xA541, 0xA55A },
-    { 0xA561, 0xA57A },
-    { 0xA581, 0xA5AA },
-    { 0xA5B0, 0xA5B9 },
-    { 0xA5C1, 0xA5D8 },
-    { 0xA5E1, 0xA5F8 },
-    { 0xA641, 0xA65A },
-    { 0xA661, 0xA67A },
-    { 0xA681, 0xA6E4 },
-    { 0xA741, 0xA75A },
-    { 0xA761, 0xA77A },
-    { 0xA781, 0xA7EF },
-    { 0xA841, 0xA85A },
-    { 0xA861, 0xA87A },
-    { 0xA881, 0xA8A4 },
-    { 0xA8A6, 0xA8A6 },
-    { 0xA8A8, 0xA8AF },
-    { 0xA8B1, 0xA8FE },
-    { 0xA941, 0xA95A },
-    { 0xA961, 0xA97A },
-    { 0xA981, 0xA9FE },
-    { 0xAA41, 0xAA5A },
-    { 0xAA61, 0xAA7A },
-    { 0xAA81, 0xAAF3 },
-    { 0xAB41, 0xAB5A },
-    { 0xAB61, 0xAB7A },
-    { 0xAB81, 0xABF6 },
-    { 0xAC41, 0xAC5A },
-    { 0xAC61, 0xAC7A },
-    { 0xAC81, 0xACC1 },
-    { 0xACD1, 0xACF1 },
-    { 0xAD41, 0xAD5A },
-    { 0xAD61, 0xAD7A },
-    { 0xAD81, 0xADA0 },
-    { 0xAE41, 0xAE5A },
-    { 0xAE61, 0xAE7A },
-    { 0xAE81, 0xAEA0 },
-    { 0xAF41, 0xAF5A },
-    { 0xAF61, 0xAF7A },
-    { 0xAF81, 0xAFA0 },
-    { 0xB041, 0xB05A },
-    { 0xB061, 0xB07A },
-    { 0xB081, 0xB0FE },
-    { 0xB141, 0xB15A },
-    { 0xB161, 0xB17A },
-    { 0xB181, 0xB1FE },
-    { 0xB241, 0xB25A },
-    { 0xB261, 0xB27A },
-    { 0xB281, 0xB2FE },
-    { 0xB341, 0xB35A },
-    { 0xB361, 0xB37A },
-    { 0xB381, 0xB3FE },
-    { 0xB441, 0xB45A },
-    { 0xB461, 0xB47A },
-    { 0xB481, 0xB4FE },
-    { 0xB541, 0xB55A },
-    { 0xB561, 0xB57A },
-    { 0xB581, 0xB5FE },
-    { 0xB641, 0xB65A },
-    { 0xB661, 0xB67A },
-    { 0xB681, 0xB6FE },
-    { 0xB741, 0xB75A },
-    { 0xB761, 0xB77A },
-    { 0xB781, 0xB7FE },
-    { 0xB841, 0xB85A },
-    { 0xB861, 0xB87A },
-    { 0xB881, 0xB8FE },
-    { 0xB941, 0xB95A },
-    { 0xB961, 0xB97A },
-    { 0xB981, 0xB9FE },
-    { 0xBA41, 0xBA5A },
-    { 0xBA61, 0xBA7A },
-    { 0xBA81, 0xBAFE },
-    { 0xBB41, 0xBB5A },
-    { 0xBB61, 0xBB7A },
-    { 0xBB81, 0xBBFE },
-    { 0xBC41, 0xBC5A },
-    { 0xBC61, 0xBC7A },
-    { 0xBC81, 0xBCFE },
-    { 0xBD41, 0xBD5A },
-    { 0xBD61, 0xBD7A },
-    { 0xBD81, 0xBDFE },
-    { 0xBE41, 0xBE5A },
-    { 0xBE61, 0xBE7A },
-    { 0xBE81, 0xBEFE },
-    { 0xBF41, 0xBF5A },
-    { 0xBF61, 0xBF7A },
-    { 0xBF81, 0xBFFE },
-    { 0xC041, 0xC05A },
-    { 0xC061, 0xC07A },
-    { 0xC081, 0xC0FE },
-    { 0xC141, 0xC15A },
-    { 0xC161, 0xC17A },
-    { 0xC181, 0xC1FE },
-    { 0xC241, 0xC25A },
-    { 0xC261, 0xC27A },
-    { 0xC281, 0xC2FE },
-    { 0xC341, 0xC35A },
-    { 0xC361, 0xC37A },
-    { 0xC381, 0xC3FE },
-    { 0xC441, 0xC45A },
-    { 0xC461, 0xC47A },
-    { 0xC481, 0xC4FE },
-    { 0xC541, 0xC55A },
-    { 0xC561, 0xC57A },
-    { 0xC581, 0xC5FE },
-    { 0xC641, 0xC652 },
-    { 0xC6A1, 0xC6FE },
-    { 0xC7A1, 0xC7FE },
-    { 0xC8A1, 0xC8FE },
-    { 0xCAA1, 0xCAFE },
-    { 0xCBA1, 0xCBFE },
-    { 0xCCA1, 0xCCFE },
-    { 0xCDA1, 0xCDFE },
-    { 0xCEA1, 0xCEFE },
-    { 0xCFA1, 0xCFFE },
-    { 0xD0A1, 0xD0FE },
-    { 0xD1A1, 0xD1FE },
-    { 0xD2A1, 0xD2FE },
-    { 0xD3A1, 0xD3FE },
-    { 0xD4A1, 0xD4FE },
-    { 0xD5A1, 0xD5FE },
-    { 0xD6A1, 0xD6FE },
-    { 0xD7A1, 0xD7FE },
-    { 0xD8A1, 0xD8FE },
-    { 0xD9A1, 0xD9FE },
-    { 0xDAA1, 0xDAFE },
-    { 0xDBA1, 0xDBFE },
-    { 0xDCA1, 0xDCFE },
-    { 0xDDA1, 0xDDFE },
-    { 0xDEA1, 0xDEFE },
-    { 0xDFA1, 0xDFFE },
-    { 0xE0A1, 0xE0FE },
-    { 0xE1A1, 0xE1FE },
-    { 0xE2A1, 0xE2FE },
-    { 0xE3A1, 0xE3FE },
-    { 0xE4A1, 0xE4FE },
-    { 0xE5A1, 0xE5FE },
-    { 0xE6A1, 0xE6FE },
-    { 0xE7A1, 0xE7FE },
-    { 0xE8A1, 0xE8FE },
-    { 0xE9A1, 0xE9FE },
-    { 0xEAA1, 0xEAFE },
-    { 0xEBA1, 0xEBFE },
-    { 0xECA1, 0xECFE },
-    { 0xEDA1, 0xEDFE },
-    { 0xEEA1, 0xEEFE },
-    { 0xEFA1, 0xEFFE },
-    { 0xF0A1, 0xF0FE },
-    { 0xF1A1, 0xF1FE },
-    { 0xF2A1, 0xF2FE },
-    { 0xF3A1, 0xF3FE },
-    { 0xF4A1, 0xF4FE },
-    { 0xF5A1, 0xF5FE },
-    { 0xF6A1, 0xF6FE },
-    { 0xF7A1, 0xF7FE },
-    { 0xF8A1, 0xF8FE },
-    { 0xF9A1, 0xF9FE },
-    { 0xFAA1, 0xFAFE },
-    { 0xFBA1, 0xFBFE },
-    { 0xFCA1, 0xFCFE },
-    { 0xFDA1, 0xFDFE },
-};
-
-// generated from http://unicode.org/Public/MAPPINGS/VENDORS/MICSFT/WINDOWS/CP950.TXT
-static const CharRange kBig5Ranges[] = {
-    { 0xA140, 0xA17E },
-    { 0xA1A1, 0xA1FE },
-    { 0xA240, 0xA27E },
-    { 0xA2A1, 0xA2FE },
-    { 0xA340, 0xA37E },
-    { 0xA3A1, 0xA3BF },
-    { 0xA3E1, 0xA3E1 },
-    { 0xA440, 0xA47E },
-    { 0xA4A1, 0xA4FE },
-    { 0xA540, 0xA57E },
-    { 0xA5A1, 0xA5FE },
-    { 0xA640, 0xA67E },
-    { 0xA6A1, 0xA6FE },
-    { 0xA740, 0xA77E },
-    { 0xA7A1, 0xA7FE },
-    { 0xA840, 0xA87E },
-    { 0xA8A1, 0xA8FE },
-    { 0xA940, 0xA97E },
-    { 0xA9A1, 0xA9FE },
-    { 0xAA40, 0xAA7E },
-    { 0xAAA1, 0xAAFE },
-    { 0xAB40, 0xAB7E },
-    { 0xABA1, 0xABFE },
-    { 0xAC40, 0xAC7E },
-    { 0xACA1, 0xACFE },
-    { 0xAD40, 0xAD7E },
-    { 0xADA1, 0xADFE },
-    { 0xAE40, 0xAE7E },
-    { 0xAEA1, 0xAEFE },
-    { 0xAF40, 0xAF7E },
-    { 0xAFA1, 0xAFFE },
-    { 0xB040, 0xB07E },
-    { 0xB0A1, 0xB0FE },
-    { 0xB140, 0xB17E },
-    { 0xB1A1, 0xB1FE },
-    { 0xB240, 0xB27E },
-    { 0xB2A1, 0xB2FE },
-    { 0xB340, 0xB37E },
-    { 0xB3A1, 0xB3FE },
-    { 0xB440, 0xB47E },
-    { 0xB4A1, 0xB4FE },
-    { 0xB540, 0xB57E },
-    { 0xB5A1, 0xB5FE },
-    { 0xB640, 0xB67E },
-    { 0xB6A1, 0xB6FE },
-    { 0xB740, 0xB77E },
-    { 0xB7A1, 0xB7FE },
-    { 0xB840, 0xB87E },
-    { 0xB8A1, 0xB8FE },
-    { 0xB940, 0xB97E },
-    { 0xB9A1, 0xB9FE },
-    { 0xBA40, 0xBA7E },
-    { 0xBAA1, 0xBAFE },
-    { 0xBB40, 0xBB7E },
-    { 0xBBA1, 0xBBFE },
-    { 0xBC40, 0xBC7E },
-    { 0xBCA1, 0xBCFE },
-    { 0xBD40, 0xBD7E },
-    { 0xBDA1, 0xBDFE },
-    { 0xBE40, 0xBE7E },
-    { 0xBEA1, 0xBEFE },
-    { 0xBF40, 0xBF7E },
-    { 0xBFA1, 0xBFFE },
-    { 0xC040, 0xC07E },
-    { 0xC0A1, 0xC0FE },
-    { 0xC140, 0xC17E },
-    { 0xC1A1, 0xC1FE },
-    { 0xC240, 0xC27E },
-    { 0xC2A1, 0xC2FE },
-    { 0xC340, 0xC37E },
-    { 0xC3A1, 0xC3FE },
-    { 0xC440, 0xC47E },
-    { 0xC4A1, 0xC4FE },
-    { 0xC540, 0xC57E },
-    { 0xC5A1, 0xC5FE },
-    { 0xC640, 0xC67E },
-    { 0xC940, 0xC97E },
-    { 0xC9A1, 0xC9FE },
-    { 0xCA40, 0xCA7E },
-    { 0xCAA1, 0xCAFE },
-    { 0xCB40, 0xCB7E },
-    { 0xCBA1, 0xCBFE },
-    { 0xCC40, 0xCC7E },
-    { 0xCCA1, 0xCCFE },
-    { 0xCD40, 0xCD7E },
-    { 0xCDA1, 0xCDFE },
-    { 0xCE40, 0xCE7E },
-    { 0xCEA1, 0xCEFE },
-    { 0xCF40, 0xCF7E },
-    { 0xCFA1, 0xCFFE },
-    { 0xD040, 0xD07E },
-    { 0xD0A1, 0xD0FE },
-    { 0xD140, 0xD17E },
-    { 0xD1A1, 0xD1FE },
-    { 0xD240, 0xD27E },
-    { 0xD2A1, 0xD2FE },
-    { 0xD340, 0xD37E },
-    { 0xD3A1, 0xD3FE },
-    { 0xD440, 0xD47E },
-    { 0xD4A1, 0xD4FE },
-    { 0xD540, 0xD57E },
-    { 0xD5A1, 0xD5FE },
-    { 0xD640, 0xD67E },
-    { 0xD6A1, 0xD6FE },
-    { 0xD740, 0xD77E },
-    { 0xD7A1, 0xD7FE },
-    { 0xD840, 0xD87E },
-    { 0xD8A1, 0xD8FE },
-    { 0xD940, 0xD97E },
-    { 0xD9A1, 0xD9FE },
-    { 0xDA40, 0xDA7E },
-    { 0xDAA1, 0xDAFE },
-    { 0xDB40, 0xDB7E },
-    { 0xDBA1, 0xDBFE },
-    { 0xDC40, 0xDC7E },
-    { 0xDCA1, 0xDCFE },
-    { 0xDD40, 0xDD7E },
-    { 0xDDA1, 0xDDFE },
-    { 0xDE40, 0xDE7E },
-    { 0xDEA1, 0xDEFE },
-    { 0xDF40, 0xDF7E },
-    { 0xDFA1, 0xDFFE },
-    { 0xE040, 0xE07E },
-    { 0xE0A1, 0xE0FE },
-    { 0xE140, 0xE17E },
-    { 0xE1A1, 0xE1FE },
-    { 0xE240, 0xE27E },
-    { 0xE2A1, 0xE2FE },
-    { 0xE340, 0xE37E },
-    { 0xE3A1, 0xE3FE },
-    { 0xE440, 0xE47E },
-    { 0xE4A1, 0xE4FE },
-    { 0xE540, 0xE57E },
-    { 0xE5A1, 0xE5FE },
-    { 0xE640, 0xE67E },
-    { 0xE6A1, 0xE6FE },
-    { 0xE740, 0xE77E },
-    { 0xE7A1, 0xE7FE },
-    { 0xE840, 0xE87E },
-    { 0xE8A1, 0xE8FE },
-    { 0xE940, 0xE97E },
-    { 0xE9A1, 0xE9FE },
-    { 0xEA40, 0xEA7E },
-    { 0xEAA1, 0xEAFE },
-    { 0xEB40, 0xEB7E },
-    { 0xEBA1, 0xEBFE },
-    { 0xEC40, 0xEC7E },
-    { 0xECA1, 0xECFE },
-    { 0xED40, 0xED7E },
-    { 0xEDA1, 0xEDFE },
-    { 0xEE40, 0xEE7E },
-    { 0xEEA1, 0xEEFE },
-    { 0xEF40, 0xEF7E },
-    { 0xEFA1, 0xEFFE },
-    { 0xF040, 0xF07E },
-    { 0xF0A1, 0xF0FE },
-    { 0xF140, 0xF17E },
-    { 0xF1A1, 0xF1FE },
-    { 0xF240, 0xF27E },
-    { 0xF2A1, 0xF2FE },
-    { 0xF340, 0xF37E },
-    { 0xF3A1, 0xF3FE },
-    { 0xF440, 0xF47E },
-    { 0xF4A1, 0xF4FE },
-    { 0xF540, 0xF57E },
-    { 0xF5A1, 0xF5FE },
-    { 0xF640, 0xF67E },
-    { 0xF6A1, 0xF6FE },
-    { 0xF740, 0xF77E },
-    { 0xF7A1, 0xF7FE },
-    { 0xF840, 0xF87E },
-    { 0xF8A1, 0xF8FE },
-    { 0xF940, 0xF97E },
-    { 0xF9A1, 0xF9FE },
-};
-
-static bool charMatchesEncoding(int ch, const CharRange* encodingRanges, int rangeCount) {
-    // Use binary search to see if the character is contained in the encoding
-    int low = 0;
-    int high = rangeCount;
-
-    while (low < high) {
-        int i = (low + high) / 2;
-        const CharRange* range = &encodingRanges[i];
-        if (ch >= range->first && ch <= range->last)
-            return true;
-        if (ch > range->last)
-            low = i + 1;
-        else
-            high = i;
-    }
-
-    return false;
-}
-
-extern uint32_t findPossibleEncodings(int ch)
-{
-    // ASCII matches everything
-    if (ch < 256) return kEncodingAll;
-
-    int result = kEncodingNone;
-
-    if (charMatchesEncoding(ch, kShiftJISRanges, ARRAY_SIZE(kShiftJISRanges)))
-        result |= kEncodingShiftJIS;
-    if (charMatchesEncoding(ch, kGBKRanges, ARRAY_SIZE(kGBKRanges)))
-        result |= kEncodingGBK;
-    if (charMatchesEncoding(ch, kBig5Ranges, ARRAY_SIZE(kBig5Ranges)))
-        result |= kEncodingBig5;
-    if (charMatchesEncoding(ch, kEUCKRRanges, ARRAY_SIZE(kEUCKRRanges)))
-        result |= kEncodingEUCKR;
-
-    return result;
-}
diff --git a/media/libmedia/autodetect.h b/media/libmedia/autodetect.h
deleted file mode 100644
index 9675db3..0000000
--- a/media/libmedia/autodetect.h
+++ /dev/null
@@ -1,37 +0,0 @@
-/*
- * Copyright (C) 2008 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef AUTODETECT_H
-#define AUTODETECT_H
-
-#include <inttypes.h>
-
-// flags used for native encoding detection
-enum {
-    kEncodingNone               = 0,
-    kEncodingShiftJIS           = (1 << 0),
-    kEncodingGBK                = (1 << 1),
-    kEncodingBig5               = (1 << 2),
-    kEncodingEUCKR              = (1 << 3),
-
-    kEncodingAll                = (kEncodingShiftJIS | kEncodingGBK | kEncodingBig5 | kEncodingEUCKR),
-};
-
-
-// returns a bitfield containing the possible native encodings for the given character
-extern uint32_t findPossibleEncodings(int ch);
-
-#endif // AUTODETECT_H
diff --git a/media/libmedia/mediametadataretriever.cpp b/media/libmedia/mediametadataretriever.cpp
index 110b94c..1d6bb6f 100644
--- a/media/libmedia/mediametadataretriever.cpp
+++ b/media/libmedia/mediametadataretriever.cpp
@@ -21,6 +21,7 @@
 #include <binder/IServiceManager.h>
 #include <binder/IPCThreadState.h>
 #include <media/mediametadataretriever.h>
+#include <media/IMediaHTTPService.h>
 #include <media/IMediaPlayerService.h>
 #include <utils/Log.h>
 #include <dlfcn.h>
@@ -93,7 +94,9 @@
 }
 
 status_t MediaMetadataRetriever::setDataSource(
-        const char *srcUrl, const KeyedVector<String8, String8> *headers)
+        const sp<IMediaHTTPService> &httpService,
+        const char *srcUrl,
+        const KeyedVector<String8, String8> *headers)
 {
     ALOGV("setDataSource");
     Mutex::Autolock _l(mLock);
@@ -106,7 +109,7 @@
         return UNKNOWN_ERROR;
     }
     ALOGV("data source (%s)", srcUrl);
-    return mRetriever->setDataSource(srcUrl, headers);
+    return mRetriever->setDataSource(httpService, srcUrl, headers);
 }
 
 status_t MediaMetadataRetriever::setDataSource(int fd, int64_t offset, int64_t length)
@@ -157,7 +160,7 @@
     return mRetriever->extractAlbumArt();
 }
 
-void MediaMetadataRetriever::DeathNotifier::binderDied(const wp<IBinder>& who) {
+void MediaMetadataRetriever::DeathNotifier::binderDied(const wp<IBinder>& who __unused) {
     Mutex::Autolock lock(MediaMetadataRetriever::sServiceLock);
     MediaMetadataRetriever::sService.clear();
     ALOGW("MediaMetadataRetriever server died!");
diff --git a/media/libmedia/mediaplayer.cpp b/media/libmedia/mediaplayer.cpp
index 0f6d897..24663ad 100644
--- a/media/libmedia/mediaplayer.cpp
+++ b/media/libmedia/mediaplayer.cpp
@@ -136,6 +136,7 @@
 }
 
 status_t MediaPlayer::setDataSource(
+        const sp<IMediaHTTPService> &httpService,
         const char *url, const KeyedVector<String8, String8> *headers)
 {
     ALOGV("setDataSource(%s)", url);
@@ -145,7 +146,7 @@
         if (service != 0) {
             sp<IMediaPlayer> player(service->create(this, mAudioSessionId));
             if ((NO_ERROR != doSetRetransmitEndpoint(player)) ||
-                (NO_ERROR != player->setDataSource(url, headers))) {
+                (NO_ERROR != player->setDataSource(httpService, url, headers))) {
                 player.clear();
             }
             err = attachNewPlayer(player);
@@ -654,7 +655,7 @@
         return BAD_VALUE;
     }
 
-    memset(&mRetransmitEndpoint, 0, sizeof(&mRetransmitEndpoint));
+    memset(&mRetransmitEndpoint, 0, sizeof(mRetransmitEndpoint));
     mRetransmitEndpoint.sin_family = AF_INET;
     mRetransmitEndpoint.sin_addr   = saddr;
     mRetransmitEndpoint.sin_port   = htons(port);
@@ -776,15 +777,20 @@
     }
 }
 
-/*static*/ status_t MediaPlayer::decode(const char* url, uint32_t *pSampleRate,
-                                           int* pNumChannels, audio_format_t* pFormat,
-                                           const sp<IMemoryHeap>& heap, size_t *pSize)
+/*static*/ status_t MediaPlayer::decode(
+        const sp<IMediaHTTPService> &httpService,
+        const char* url,
+        uint32_t *pSampleRate,
+        int* pNumChannels,
+        audio_format_t* pFormat,
+        const sp<IMemoryHeap>& heap,
+        size_t *pSize)
 {
     ALOGV("decode(%s)", url);
     status_t status;
     const sp<IMediaPlayerService>& service = getMediaPlayerService();
     if (service != 0) {
-        status = service->decode(url, pSampleRate, pNumChannels, pFormat, heap, pSize);
+        status = service->decode(httpService, url, pSampleRate, pNumChannels, pFormat, heap, pSize);
     } else {
         ALOGE("Unable to locate media service");
         status = DEAD_OBJECT;
@@ -832,15 +838,4 @@
     return mPlayer->setNextPlayer(next == NULL ? NULL : next->mPlayer);
 }
 
-status_t MediaPlayer::updateProxyConfig(
-        const char *host, int32_t port, const char *exclusionList) {
-    const sp<IMediaPlayerService>& service = getMediaPlayerService();
-
-    if (service != NULL) {
-        return service->updateProxyConfig(host, port, exclusionList);
-    }
-
-    return INVALID_OPERATION;
-}
-
 }; // namespace android
diff --git a/media/libmediaplayerservice/HDCP.cpp b/media/libmediaplayerservice/HDCP.cpp
index c2ac1a3..afe3936 100644
--- a/media/libmediaplayerservice/HDCP.cpp
+++ b/media/libmediaplayerservice/HDCP.cpp
@@ -107,11 +107,7 @@
         return NO_INIT;
     }
 
-    // TO-DO:
-    // Only support HDCP_CAPS_ENCRYPT (byte-array to byte-array) for now.
-    // use mHDCPModule->getCaps() when the HDCP libraries get updated.
-    //return mHDCPModule->getCaps();
-    return HDCPModule::HDCP_CAPS_ENCRYPT;
+    return mHDCPModule->getCaps();
 }
 
 status_t HDCP::encrypt(
diff --git a/media/libmediaplayerservice/MediaPlayerService.cpp b/media/libmediaplayerservice/MediaPlayerService.cpp
index a392b76..1f70620 100644
--- a/media/libmediaplayerservice/MediaPlayerService.cpp
+++ b/media/libmediaplayerservice/MediaPlayerService.cpp
@@ -44,6 +44,7 @@
 #include <utils/SystemClock.h>
 #include <utils/Vector.h>
 
+#include <media/IMediaHTTPService.h>
 #include <media/IRemoteDisplay.h>
 #include <media/IRemoteDisplayClient.h>
 #include <media/MediaPlayerInterface.h>
@@ -306,11 +307,6 @@
     return new RemoteDisplay(client, iface.string());
 }
 
-status_t MediaPlayerService::updateProxyConfig(
-        const char *host, int32_t port, const char *exclusionList) {
-    return HTTPBase::UpdateProxyConfig(host, port, exclusionList);
-}
-
 status_t MediaPlayerService::AudioCache::dump(int fd, const Vector<String16>& args) const
 {
     const size_t SIZE = 256;
@@ -622,7 +618,9 @@
 }
 
 status_t MediaPlayerService::Client::setDataSource(
-        const char *url, const KeyedVector<String8, String8> *headers)
+        const sp<IMediaHTTPService> &httpService,
+        const char *url,
+        const KeyedVector<String8, String8> *headers)
 {
     ALOGV("setDataSource(%s)", url);
     if (url == NULL)
@@ -657,7 +655,7 @@
             return NO_INIT;
         }
 
-        setDataSource_post(p, p->setDataSource(url, headers));
+        setDataSource_post(p, p->setDataSource(httpService, url, headers));
         return mStatus;
     }
 }
@@ -1176,9 +1174,14 @@
 }
 #endif
 
-status_t MediaPlayerService::decode(const char* url, uint32_t *pSampleRate, int* pNumChannels,
-                                       audio_format_t* pFormat,
-                                       const sp<IMemoryHeap>& heap, size_t *pSize)
+status_t MediaPlayerService::decode(
+        const sp<IMediaHTTPService> &httpService,
+        const char* url,
+        uint32_t *pSampleRate,
+        int* pNumChannels,
+        audio_format_t* pFormat,
+        const sp<IMemoryHeap>& heap,
+        size_t *pSize)
 {
     ALOGV("decode(%s)", url);
     sp<MediaPlayerBase> player;
@@ -1206,7 +1209,7 @@
     static_cast<MediaPlayerInterface*>(player.get())->setAudioSink(cache);
 
     // set data source
-    if (player->setDataSource(url) != NO_ERROR) goto Exit;
+    if (player->setDataSource(httpService, url) != NO_ERROR) goto Exit;
 
     ALOGV("prepare");
     player->prepareAsync();
diff --git a/media/libmediaplayerservice/MediaPlayerService.h b/media/libmediaplayerservice/MediaPlayerService.h
index 9c084e1..fc355b0 100644
--- a/media/libmediaplayerservice/MediaPlayerService.h
+++ b/media/libmediaplayerservice/MediaPlayerService.h
@@ -211,12 +211,12 @@
         virtual void            flush() {}
         virtual void            pause() {}
         virtual void            close() {}
-                void            setAudioStreamType(audio_stream_type_t streamType) {}
+                void            setAudioStreamType(audio_stream_type_t streamType __unused) {}
                 // stream type is not used for AudioCache
         virtual audio_stream_type_t getAudioStreamType() const { return AUDIO_STREAM_DEFAULT; }
 
-                void            setVolume(float left, float right) {}
-        virtual status_t        setPlaybackRatePermille(int32_t ratePermille) { return INVALID_OPERATION; }
+                void            setVolume(float left __unused, float right __unused) {}
+        virtual status_t        setPlaybackRatePermille(int32_t ratePermille __unused) { return INVALID_OPERATION; }
                 uint32_t        sampleRate() const { return mSampleRate; }
                 audio_format_t  format() const { return mFormat; }
                 size_t          size() const { return mSize; }
@@ -256,9 +256,15 @@
 
     virtual sp<IMediaPlayer>    create(const sp<IMediaPlayerClient>& client, int audioSessionId);
 
-    virtual status_t            decode(const char* url, uint32_t *pSampleRate, int* pNumChannels,
-                                       audio_format_t* pFormat,
-                                       const sp<IMemoryHeap>& heap, size_t *pSize);
+    virtual status_t            decode(
+            const sp<IMediaHTTPService> &httpService,
+            const char* url,
+            uint32_t *pSampleRate,
+            int* pNumChannels,
+            audio_format_t* pFormat,
+            const sp<IMemoryHeap>& heap,
+            size_t *pSize);
+
     virtual status_t            decode(int fd, int64_t offset, int64_t length,
                                        uint32_t *pSampleRate, int* pNumChannels,
                                        audio_format_t* pFormat,
@@ -272,9 +278,6 @@
             const String8& iface);
     virtual status_t            dump(int fd, const Vector<String16>& args);
 
-    virtual status_t        updateProxyConfig(
-            const char *host, int32_t port, const char *exclusionList);
-
             void                removeClient(wp<Client> client);
 
     // For battery usage tracking purpose
@@ -356,6 +359,7 @@
         sp<MediaPlayerBase>     createPlayer(player_type playerType);
 
         virtual status_t        setDataSource(
+                        const sp<IMediaHTTPService> &httpService,
                         const char *url,
                         const KeyedVector<String8, String8> *headers);
 
diff --git a/media/libmediaplayerservice/MetadataRetrieverClient.cpp b/media/libmediaplayerservice/MetadataRetrieverClient.cpp
index 348957f..c61cf89 100644
--- a/media/libmediaplayerservice/MetadataRetrieverClient.cpp
+++ b/media/libmediaplayerservice/MetadataRetrieverClient.cpp
@@ -31,6 +31,7 @@
 #include <binder/MemoryHeapBase.h>
 #include <binder/IPCThreadState.h>
 #include <binder/IServiceManager.h>
+#include <media/IMediaHTTPService.h>
 #include <media/MediaMetadataRetrieverInterface.h>
 #include <media/MediaPlayerInterface.h>
 #include <private/media/VideoFrame.h>
@@ -106,7 +107,9 @@
 }
 
 status_t MetadataRetrieverClient::setDataSource(
-        const char *url, const KeyedVector<String8, String8> *headers)
+        const sp<IMediaHTTPService> &httpService,
+        const char *url,
+        const KeyedVector<String8, String8> *headers)
 {
     ALOGV("setDataSource(%s)", url);
     Mutex::Autolock lock(mLock);
@@ -127,7 +130,7 @@
     ALOGV("player type = %d", playerType);
     sp<MediaMetadataRetrieverBase> p = createRetriever(playerType);
     if (p == NULL) return NO_INIT;
-    status_t ret = p->setDataSource(url, headers);
+    status_t ret = p->setDataSource(httpService, url, headers);
     if (ret == NO_ERROR) mRetriever = p;
     return ret;
 }
diff --git a/media/libmediaplayerservice/MetadataRetrieverClient.h b/media/libmediaplayerservice/MetadataRetrieverClient.h
index f08f933..9d3fbe9 100644
--- a/media/libmediaplayerservice/MetadataRetrieverClient.h
+++ b/media/libmediaplayerservice/MetadataRetrieverClient.h
@@ -30,6 +30,7 @@
 
 namespace android {
 
+struct IMediaHTTPService;
 class IMediaPlayerService;
 class MemoryDealer;
 
@@ -43,7 +44,9 @@
     virtual void                    disconnect();
 
     virtual status_t                setDataSource(
-            const char *url, const KeyedVector<String8, String8> *headers);
+            const sp<IMediaHTTPService> &httpService,
+            const char *url,
+            const KeyedVector<String8, String8> *headers);
 
     virtual status_t                setDataSource(int fd, int64_t offset, int64_t length);
     virtual sp<IMemory>             getFrameAtTime(int64_t timeUs, int option);
diff --git a/media/libmediaplayerservice/MidiFile.cpp b/media/libmediaplayerservice/MidiFile.cpp
index 0a6aa90..deeddd1 100644
--- a/media/libmediaplayerservice/MidiFile.cpp
+++ b/media/libmediaplayerservice/MidiFile.cpp
@@ -114,7 +114,9 @@
 }
 
 status_t MidiFile::setDataSource(
-        const char* path, const KeyedVector<String8, String8> *) {
+        const sp<IMediaHTTPService> &httpService,
+        const char* path,
+        const KeyedVector<String8, String8> *) {
     ALOGV("MidiFile::setDataSource url=%s", path);
     Mutex::Autolock lock(mMutex);
 
diff --git a/media/libmediaplayerservice/MidiFile.h b/media/libmediaplayerservice/MidiFile.h
index 24d59b4..12802ba 100644
--- a/media/libmediaplayerservice/MidiFile.h
+++ b/media/libmediaplayerservice/MidiFile.h
@@ -32,7 +32,9 @@
     virtual status_t    initCheck();
 
     virtual status_t    setDataSource(
-            const char* path, const KeyedVector<String8, String8> *headers);
+            const sp<IMediaHTTPService> &httpService,
+            const char* path,
+            const KeyedVector<String8, String8> *headers);
 
     virtual status_t    setDataSource(int fd, int64_t offset, int64_t length);
     virtual status_t    setVideoSurfaceTexture(
diff --git a/media/libmediaplayerservice/MidiMetadataRetriever.cpp b/media/libmediaplayerservice/MidiMetadataRetriever.cpp
index 465209f..f3cf6ef 100644
--- a/media/libmediaplayerservice/MidiMetadataRetriever.cpp
+++ b/media/libmediaplayerservice/MidiMetadataRetriever.cpp
@@ -22,6 +22,8 @@
 #include "MidiMetadataRetriever.h"
 #include <media/mediametadataretriever.h>
 
+#include <media/IMediaHTTPService.h>
+
 namespace android {
 
 static status_t ERROR_NOT_OPEN = -1;
@@ -36,7 +38,9 @@
 }
 
 status_t MidiMetadataRetriever::setDataSource(
-        const char *url, const KeyedVector<String8, String8> *headers)
+        const sp<IMediaHTTPService> &httpService,
+        const char *url,
+        const KeyedVector<String8, String8> *headers)
 {
     ALOGV("setDataSource: %s", url? url: "NULL pointer");
     Mutex::Autolock lock(mLock);
@@ -44,7 +48,7 @@
     if (mMidiPlayer == 0) {
         mMidiPlayer = new MidiFile();
     }
-    return mMidiPlayer->setDataSource(url, headers);
+    return mMidiPlayer->setDataSource(httpService, url, headers);
 }
 
 status_t MidiMetadataRetriever::setDataSource(int fd, int64_t offset, int64_t length)
diff --git a/media/libmediaplayerservice/MidiMetadataRetriever.h b/media/libmediaplayerservice/MidiMetadataRetriever.h
index 4cee42d..b8214ee 100644
--- a/media/libmediaplayerservice/MidiMetadataRetriever.h
+++ b/media/libmediaplayerservice/MidiMetadataRetriever.h
@@ -32,7 +32,9 @@
                                    ~MidiMetadataRetriever() {}
 
     virtual status_t                setDataSource(
-            const char *url, const KeyedVector<String8, String8> *headers);
+            const sp<IMediaHTTPService> &httpService,
+            const char *url,
+            const KeyedVector<String8, String8> *headers);
 
     virtual status_t                setDataSource(int fd, int64_t offset, int64_t length);
     virtual const char*             extractMetadata(int keyCode);
diff --git a/media/libmediaplayerservice/StagefrightPlayer.cpp b/media/libmediaplayerservice/StagefrightPlayer.cpp
index de61d9b..b19e8bf 100644
--- a/media/libmediaplayerservice/StagefrightPlayer.cpp
+++ b/media/libmediaplayerservice/StagefrightPlayer.cpp
@@ -54,8 +54,10 @@
 }
 
 status_t StagefrightPlayer::setDataSource(
-        const char *url, const KeyedVector<String8, String8> *headers) {
-    return mPlayer->setDataSource(url, headers);
+        const sp<IMediaHTTPService> &httpService,
+        const char *url,
+        const KeyedVector<String8, String8> *headers) {
+    return mPlayer->setDataSource(httpService, url, headers);
 }
 
 // Warning: The filedescriptor passed into this method will only be valid until
diff --git a/media/libmediaplayerservice/StagefrightPlayer.h b/media/libmediaplayerservice/StagefrightPlayer.h
index 600945e..e6c30ff 100644
--- a/media/libmediaplayerservice/StagefrightPlayer.h
+++ b/media/libmediaplayerservice/StagefrightPlayer.h
@@ -34,7 +34,9 @@
     virtual status_t setUID(uid_t uid);
 
     virtual status_t setDataSource(
-            const char *url, const KeyedVector<String8, String8> *headers);
+            const sp<IMediaHTTPService> &httpService,
+            const char *url,
+            const KeyedVector<String8, String8> *headers);
 
     virtual status_t setDataSource(int fd, int64_t offset, int64_t length);
 
diff --git a/media/libmediaplayerservice/StagefrightRecorder.cpp b/media/libmediaplayerservice/StagefrightRecorder.cpp
index 4da74e1..e0eae37 100644
--- a/media/libmediaplayerservice/StagefrightRecorder.cpp
+++ b/media/libmediaplayerservice/StagefrightRecorder.cpp
@@ -25,8 +25,10 @@
 #include <binder/IServiceManager.h>
 
 #include <media/IMediaPlayerService.h>
-#include <media/openmax/OMX_Audio.h>
+#include <media/stagefright/foundation/ABuffer.h>
 #include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/foundation/ALooper.h>
 #include <media/stagefright/AudioSource.h>
 #include <media/stagefright/AMRWriter.h>
 #include <media/stagefright/AACWriter.h>
@@ -36,13 +38,12 @@
 #include <media/stagefright/MPEG4Writer.h>
 #include <media/stagefright/MediaDefs.h>
 #include <media/stagefright/MetaData.h>
+#include <media/stagefright/MediaCodecSource.h>
 #include <media/stagefright/OMXClient.h>
 #include <media/stagefright/OMXCodec.h>
-#include <media/stagefright/SurfaceMediaSource.h>
 #include <media/MediaProfiles.h>
 #include <camera/ICamera.h>
 #include <camera/CameraParameters.h>
-#include <gui/Surface.h>
 
 #include <utils/Errors.h>
 #include <sys/types.h>
@@ -72,8 +73,7 @@
       mAudioSource(AUDIO_SOURCE_CNT),
       mVideoSource(VIDEO_SOURCE_LIST_END),
       mCaptureTimeLapse(false),
-      mStarted(false),
-      mSurfaceMediaSource(NULL) {
+      mStarted(false) {
 
     ALOGV("Constructor");
     reset();
@@ -82,10 +82,19 @@
 StagefrightRecorder::~StagefrightRecorder() {
     ALOGV("Destructor");
     stop();
+
+    if (mLooper != NULL) {
+        mLooper->stop();
+    }
 }
 
 status_t StagefrightRecorder::init() {
     ALOGV("init");
+
+    mLooper = new ALooper;
+    mLooper->setName("recorder_looper");
+    mLooper->start();
+
     return OK;
 }
 
@@ -94,7 +103,7 @@
 // while encoding GL Frames
 sp<IGraphicBufferProducer> StagefrightRecorder::querySurfaceMediaSource() const {
     ALOGV("Get SurfaceMediaSource");
-    return mSurfaceMediaSource->getBufferQueue();
+    return mGraphicBufferProducer;
 }
 
 status_t StagefrightRecorder::setAudioSource(audio_source_t as) {
@@ -740,15 +749,61 @@
 }
 
 status_t StagefrightRecorder::prepare() {
-    return OK;
-}
-
-status_t StagefrightRecorder::start() {
-    CHECK_GE(mOutputFd, 0);
+    ALOGV("prepare");
+    if (mOutputFd < 0) {
+        ALOGE("Output file descriptor is invalid");
+        return INVALID_OPERATION;
+    }
 
     // Get UID here for permission checking
     mClientUid = IPCThreadState::self()->getCallingUid();
-    if (mWriter != NULL) {
+
+    status_t status = OK;
+
+    switch (mOutputFormat) {
+        case OUTPUT_FORMAT_DEFAULT:
+        case OUTPUT_FORMAT_THREE_GPP:
+        case OUTPUT_FORMAT_MPEG_4:
+            status = setupMPEG4Recording();
+            break;
+
+        case OUTPUT_FORMAT_AMR_NB:
+        case OUTPUT_FORMAT_AMR_WB:
+            status = setupAMRRecording();
+            break;
+
+        case OUTPUT_FORMAT_AAC_ADIF:
+        case OUTPUT_FORMAT_AAC_ADTS:
+            status = setupAACRecording();
+            break;
+
+        case OUTPUT_FORMAT_RTP_AVP:
+            status = setupRTPRecording();
+            break;
+
+        case OUTPUT_FORMAT_MPEG2TS:
+            status = setupMPEG2TSRecording();
+            break;
+
+        default:
+            ALOGE("Unsupported output file format: %d", mOutputFormat);
+            status = UNKNOWN_ERROR;
+            break;
+    }
+
+    return status;
+}
+
+status_t StagefrightRecorder::start() {
+    ALOGV("start");
+    if (mOutputFd < 0) {
+        ALOGE("Output file descriptor is invalid");
+        return INVALID_OPERATION;
+    }
+
+    // Get UID here for permission checking
+    mClientUid = IPCThreadState::self()->getCallingUid();
+    if (mWriter == NULL) {
         ALOGE("File writer is not avaialble");
         return UNKNOWN_ERROR;
     }
@@ -759,31 +814,35 @@
         case OUTPUT_FORMAT_DEFAULT:
         case OUTPUT_FORMAT_THREE_GPP:
         case OUTPUT_FORMAT_MPEG_4:
-            status = startMPEG4Recording();
+        {
+            sp<MetaData> meta = new MetaData;
+            setupMPEG4MetaData(&meta);
+            status = mWriter->start(meta.get());
             break;
+        }
 
         case OUTPUT_FORMAT_AMR_NB:
         case OUTPUT_FORMAT_AMR_WB:
-            status = startAMRRecording();
-            break;
-
         case OUTPUT_FORMAT_AAC_ADIF:
         case OUTPUT_FORMAT_AAC_ADTS:
-            status = startAACRecording();
-            break;
-
         case OUTPUT_FORMAT_RTP_AVP:
-            status = startRTPRecording();
-            break;
-
         case OUTPUT_FORMAT_MPEG2TS:
-            status = startMPEG2TSRecording();
+        {
+            status = mWriter->start();
             break;
+        }
 
         default:
+        {
             ALOGE("Unsupported output file format: %d", mOutputFormat);
             status = UNKNOWN_ERROR;
             break;
+        }
+    }
+
+    if (status != OK) {
+        mWriter.clear();
+        mWriter = NULL;
     }
 
     if ((status == OK) && (!mStarted)) {
@@ -817,58 +876,54 @@
         return NULL;
     }
 
-    sp<MetaData> encMeta = new MetaData;
+    sp<AMessage> format = new AMessage;
     const char *mime;
     switch (mAudioEncoder) {
         case AUDIO_ENCODER_AMR_NB:
         case AUDIO_ENCODER_DEFAULT:
-            mime = MEDIA_MIMETYPE_AUDIO_AMR_NB;
+            format->setString("mime", MEDIA_MIMETYPE_AUDIO_AMR_NB);
             break;
         case AUDIO_ENCODER_AMR_WB:
-            mime = MEDIA_MIMETYPE_AUDIO_AMR_WB;
+            format->setString("mime", MEDIA_MIMETYPE_AUDIO_AMR_WB);
             break;
         case AUDIO_ENCODER_AAC:
-            mime = MEDIA_MIMETYPE_AUDIO_AAC;
-            encMeta->setInt32(kKeyAACProfile, OMX_AUDIO_AACObjectLC);
+            format->setString("mime", MEDIA_MIMETYPE_AUDIO_AAC);
+            format->setInt32("aac-profile", OMX_AUDIO_AACObjectLC);
             break;
         case AUDIO_ENCODER_HE_AAC:
-            mime = MEDIA_MIMETYPE_AUDIO_AAC;
-            encMeta->setInt32(kKeyAACProfile, OMX_AUDIO_AACObjectHE);
+            format->setString("mime", MEDIA_MIMETYPE_AUDIO_AAC);
+            format->setInt32("aac-profile", OMX_AUDIO_AACObjectHE);
             break;
         case AUDIO_ENCODER_AAC_ELD:
-            mime = MEDIA_MIMETYPE_AUDIO_AAC;
-            encMeta->setInt32(kKeyAACProfile, OMX_AUDIO_AACObjectELD);
+            format->setString("mime", MEDIA_MIMETYPE_AUDIO_AAC);
+            format->setInt32("aac-profile", OMX_AUDIO_AACObjectELD);
             break;
 
         default:
             ALOGE("Unknown audio encoder: %d", mAudioEncoder);
             return NULL;
     }
-    encMeta->setCString(kKeyMIMEType, mime);
 
     int32_t maxInputSize;
     CHECK(audioSource->getFormat()->findInt32(
                 kKeyMaxInputSize, &maxInputSize));
 
-    encMeta->setInt32(kKeyMaxInputSize, maxInputSize);
-    encMeta->setInt32(kKeyChannelCount, mAudioChannels);
-    encMeta->setInt32(kKeySampleRate, mSampleRate);
-    encMeta->setInt32(kKeyBitRate, mAudioBitRate);
+    format->setInt32("max-input-size", maxInputSize);
+    format->setInt32("channel-count", mAudioChannels);
+    format->setInt32("sample-rate", mSampleRate);
+    format->setInt32("bitrate", mAudioBitRate);
     if (mAudioTimeScale > 0) {
-        encMeta->setInt32(kKeyTimeScale, mAudioTimeScale);
+        format->setInt32("time-scale", mAudioTimeScale);
     }
 
-    OMXClient client;
-    CHECK_EQ(client.connect(), (status_t)OK);
     sp<MediaSource> audioEncoder =
-        OMXCodec::Create(client.interface(), encMeta,
-                         true /* createEncoder */, audioSource);
+            MediaCodecSource::Create(mLooper, format, audioSource);
     mAudioSourceNode = audioSource;
 
     return audioEncoder;
 }
 
-status_t StagefrightRecorder::startAACRecording() {
+status_t StagefrightRecorder::setupAACRecording() {
     // FIXME:
     // Add support for OUTPUT_FORMAT_AAC_ADIF
     CHECK_EQ(mOutputFormat, OUTPUT_FORMAT_AAC_ADTS);
@@ -879,16 +934,10 @@
     CHECK(mAudioSource != AUDIO_SOURCE_CNT);
 
     mWriter = new AACWriter(mOutputFd);
-    status_t status = startRawAudioRecording();
-    if (status != OK) {
-        mWriter.clear();
-        mWriter = NULL;
-    }
-
-    return status;
+    return setupRawAudioRecording();
 }
 
-status_t StagefrightRecorder::startAMRRecording() {
+status_t StagefrightRecorder::setupAMRRecording() {
     CHECK(mOutputFormat == OUTPUT_FORMAT_AMR_NB ||
           mOutputFormat == OUTPUT_FORMAT_AMR_WB);
 
@@ -908,15 +957,10 @@
     }
 
     mWriter = new AMRWriter(mOutputFd);
-    status_t status = startRawAudioRecording();
-    if (status != OK) {
-        mWriter.clear();
-        mWriter = NULL;
-    }
-    return status;
+    return setupRawAudioRecording();
 }
 
-status_t StagefrightRecorder::startRawAudioRecording() {
+status_t StagefrightRecorder::setupRawAudioRecording() {
     if (mAudioSource >= AUDIO_SOURCE_CNT) {
         ALOGE("Invalid audio source: %d", mAudioSource);
         return BAD_VALUE;
@@ -942,12 +986,11 @@
         mWriter->setMaxFileSize(mMaxFileSizeBytes);
     }
     mWriter->setListener(mListener);
-    mWriter->start();
 
     return OK;
 }
 
-status_t StagefrightRecorder::startRTPRecording() {
+status_t StagefrightRecorder::setupRTPRecording() {
     CHECK_EQ(mOutputFormat, OUTPUT_FORMAT_RTP_AVP);
 
     if ((mAudioSource != AUDIO_SOURCE_CNT
@@ -974,7 +1017,7 @@
             return err;
         }
 
-        err = setupVideoEncoder(mediaSource, mVideoBitRate, &source);
+        err = setupVideoEncoder(mediaSource, &source);
         if (err != OK) {
             return err;
         }
@@ -984,10 +1027,10 @@
     mWriter->addSource(source);
     mWriter->setListener(mListener);
 
-    return mWriter->start();
+    return OK;
 }
 
-status_t StagefrightRecorder::startMPEG2TSRecording() {
+status_t StagefrightRecorder::setupMPEG2TSRecording() {
     CHECK_EQ(mOutputFormat, OUTPUT_FORMAT_MPEG2TS);
 
     sp<MediaWriter> writer = new MPEG2TSWriter(mOutputFd);
@@ -1018,7 +1061,7 @@
         }
 
         sp<MediaSource> encoder;
-        err = setupVideoEncoder(mediaSource, mVideoBitRate, &encoder);
+        err = setupVideoEncoder(mediaSource, &encoder);
 
         if (err != OK) {
             return err;
@@ -1037,7 +1080,7 @@
 
     mWriter = writer;
 
-    return mWriter->start();
+    return OK;
 }
 
 void StagefrightRecorder::clipVideoFrameRate() {
@@ -1278,49 +1321,14 @@
             return err;
         }
         *mediaSource = cameraSource;
-    } else if (mVideoSource == VIDEO_SOURCE_GRALLOC_BUFFER) {
-        // If using GRAlloc buffers, setup surfacemediasource.
-        // Later a handle to that will be passed
-        // to the client side when queried
-        status_t err = setupSurfaceMediaSource();
-        if (err != OK) {
-            return err;
-        }
-        *mediaSource = mSurfaceMediaSource;
+    } else if (mVideoSource == VIDEO_SOURCE_SURFACE) {
+        *mediaSource = NULL;
     } else {
         return INVALID_OPERATION;
     }
     return OK;
 }
 
-// setupSurfaceMediaSource creates a source with the given
-// width and height and framerate.
-// TODO: This could go in a static function inside SurfaceMediaSource
-// similar to that in CameraSource
-status_t StagefrightRecorder::setupSurfaceMediaSource() {
-    status_t err = OK;
-    mSurfaceMediaSource = new SurfaceMediaSource(mVideoWidth, mVideoHeight);
-    if (mSurfaceMediaSource == NULL) {
-        return NO_INIT;
-    }
-
-    if (mFrameRate == -1) {
-        int32_t frameRate = 0;
-        CHECK (mSurfaceMediaSource->getFormat()->findInt32(
-                                        kKeyFrameRate, &frameRate));
-        ALOGI("Frame rate is not explicitly set. Use the current frame "
-             "rate (%d fps)", frameRate);
-        mFrameRate = frameRate;
-    } else {
-        err = mSurfaceMediaSource->setFrameRate(mFrameRate);
-    }
-    CHECK(mFrameRate != -1);
-
-    mIsMetaDataStoredInVideoBuffers =
-        mSurfaceMediaSource->isMetaDataStoredInVideoBuffers();
-    return err;
-}
-
 status_t StagefrightRecorder::setupCameraSource(
         sp<CameraSource> *cameraSource) {
     status_t err = OK;
@@ -1384,25 +1392,22 @@
 
 status_t StagefrightRecorder::setupVideoEncoder(
         sp<MediaSource> cameraSource,
-        int32_t videoBitRate,
         sp<MediaSource> *source) {
     source->clear();
 
-    sp<MetaData> enc_meta = new MetaData;
-    enc_meta->setInt32(kKeyBitRate, videoBitRate);
-    enc_meta->setInt32(kKeyFrameRate, mFrameRate);
+    sp<AMessage> format = new AMessage();
 
     switch (mVideoEncoder) {
         case VIDEO_ENCODER_H263:
-            enc_meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_H263);
+            format->setString("mime", MEDIA_MIMETYPE_VIDEO_H263);
             break;
 
         case VIDEO_ENCODER_MPEG_4_SP:
-            enc_meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_MPEG4);
+            format->setString("mime", MEDIA_MIMETYPE_VIDEO_MPEG4);
             break;
 
         case VIDEO_ENCODER_H264:
-            enc_meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_AVC);
+            format->setString("mime", MEDIA_MIMETYPE_VIDEO_AVC);
             break;
 
         default:
@@ -1410,59 +1415,69 @@
             break;
     }
 
-    sp<MetaData> meta = cameraSource->getFormat();
+    if (cameraSource != NULL) {
+        sp<MetaData> meta = cameraSource->getFormat();
 
-    int32_t width, height, stride, sliceHeight, colorFormat;
-    CHECK(meta->findInt32(kKeyWidth, &width));
-    CHECK(meta->findInt32(kKeyHeight, &height));
-    CHECK(meta->findInt32(kKeyStride, &stride));
-    CHECK(meta->findInt32(kKeySliceHeight, &sliceHeight));
-    CHECK(meta->findInt32(kKeyColorFormat, &colorFormat));
+        int32_t width, height, stride, sliceHeight, colorFormat;
+        CHECK(meta->findInt32(kKeyWidth, &width));
+        CHECK(meta->findInt32(kKeyHeight, &height));
+        CHECK(meta->findInt32(kKeyStride, &stride));
+        CHECK(meta->findInt32(kKeySliceHeight, &sliceHeight));
+        CHECK(meta->findInt32(kKeyColorFormat, &colorFormat));
 
-    enc_meta->setInt32(kKeyWidth, width);
-    enc_meta->setInt32(kKeyHeight, height);
-    enc_meta->setInt32(kKeyIFramesInterval, mIFramesIntervalSec);
-    enc_meta->setInt32(kKeyStride, stride);
-    enc_meta->setInt32(kKeySliceHeight, sliceHeight);
-    enc_meta->setInt32(kKeyColorFormat, colorFormat);
+        format->setInt32("width", width);
+        format->setInt32("height", height);
+        format->setInt32("stride", stride);
+        format->setInt32("slice-height", sliceHeight);
+        format->setInt32("color-format", colorFormat);
+    } else {
+        format->setInt32("width", mVideoWidth);
+        format->setInt32("height", mVideoHeight);
+        format->setInt32("stride", mVideoWidth);
+        format->setInt32("slice-height", mVideoWidth);
+        format->setInt32("color-format", OMX_COLOR_FormatAndroidOpaque);
+    }
+
+    format->setInt32("bitrate", mVideoBitRate);
+    format->setInt32("frame-rate", mFrameRate);
+    format->setInt32("i-frame-interval", mIFramesIntervalSec);
+
     if (mVideoTimeScale > 0) {
-        enc_meta->setInt32(kKeyTimeScale, mVideoTimeScale);
+        format->setInt32("time-scale", mVideoTimeScale);
     }
     if (mVideoEncoderProfile != -1) {
-        enc_meta->setInt32(kKeyVideoProfile, mVideoEncoderProfile);
+        format->setInt32("profile", mVideoEncoderProfile);
     }
     if (mVideoEncoderLevel != -1) {
-        enc_meta->setInt32(kKeyVideoLevel, mVideoEncoderLevel);
+        format->setInt32("level", mVideoEncoderLevel);
     }
 
-    OMXClient client;
-    CHECK_EQ(client.connect(), (status_t)OK);
-
-    uint32_t encoder_flags = 0;
+    uint32_t flags = 0;
     if (mIsMetaDataStoredInVideoBuffers) {
-        encoder_flags |= OMXCodec::kStoreMetaDataInVideoBuffers;
+        flags |= MediaCodecSource::FLAG_USE_METADATA_INPUT;
     }
 
-    // Do not wait for all the input buffers to become available.
-    // This give timelapse video recording faster response in
-    // receiving output from video encoder component.
-    if (mCaptureTimeLapse) {
-        encoder_flags |= OMXCodec::kOnlySubmitOneInputBufferAtOneTime;
+    if (cameraSource == NULL) {
+        flags |= MediaCodecSource::FLAG_USE_SURFACE_INPUT;
     }
 
-    sp<MediaSource> encoder = OMXCodec::Create(
-            client.interface(), enc_meta,
-            true /* createEncoder */, cameraSource,
-            NULL, encoder_flags);
+    sp<MediaCodecSource> encoder =
+            MediaCodecSource::Create(mLooper, format, cameraSource, flags);
     if (encoder == NULL) {
         ALOGW("Failed to create the encoder");
         // When the encoder fails to be created, we need
         // release the camera source due to the camera's lock
         // and unlock mechanism.
-        cameraSource->stop();
+        if (cameraSource != NULL) {
+            cameraSource->stop();
+        }
         return UNKNOWN_ERROR;
     }
 
+    if (cameraSource == NULL) {
+        mGraphicBufferProducer = encoder->getGraphicBufferProducer();
+    }
+
     *source = encoder;
 
     return OK;
@@ -1496,16 +1511,12 @@
     return OK;
 }
 
-status_t StagefrightRecorder::setupMPEG4Recording(
-        int outputFd,
-        int32_t videoWidth, int32_t videoHeight,
-        int32_t videoBitRate,
-        int32_t *totalBitRate,
-        sp<MediaWriter> *mediaWriter) {
-    mediaWriter->clear();
-    *totalBitRate = 0;
+status_t StagefrightRecorder::setupMPEG4Recording() {
+    mWriter.clear();
+    mTotalBitRate = 0;
+
     status_t err = OK;
-    sp<MediaWriter> writer = new MPEG4Writer(outputFd);
+    sp<MediaWriter> writer = new MPEG4Writer(mOutputFd);
 
     if (mVideoSource < VIDEO_SOURCE_LIST_END) {
 
@@ -1516,13 +1527,13 @@
         }
 
         sp<MediaSource> encoder;
-        err = setupVideoEncoder(mediaSource, videoBitRate, &encoder);
+        err = setupVideoEncoder(mediaSource, &encoder);
         if (err != OK) {
             return err;
         }
 
         writer->addSource(encoder);
-        *totalBitRate += videoBitRate;
+        mTotalBitRate += mVideoBitRate;
     }
 
     // Audio source is added at the end if it exists.
@@ -1531,7 +1542,7 @@
     if (!mCaptureTimeLapse && (mAudioSource != AUDIO_SOURCE_CNT)) {
         err = setupAudioEncoder(writer);
         if (err != OK) return err;
-        *totalBitRate += mAudioBitRate;
+        mTotalBitRate += mAudioBitRate;
     }
 
     if (mInterleaveDurationUs > 0) {
@@ -1549,22 +1560,28 @@
         writer->setMaxFileSize(mMaxFileSizeBytes);
     }
 
-    mStartTimeOffsetMs = mEncoderProfiles->getStartTimeOffsetMs(mCameraId);
+    if (mVideoSource == VIDEO_SOURCE_DEFAULT
+            || mVideoSource == VIDEO_SOURCE_CAMERA) {
+        mStartTimeOffsetMs = mEncoderProfiles->getStartTimeOffsetMs(mCameraId);
+    } else if (mVideoSource == VIDEO_SOURCE_SURFACE) {
+        // surface source doesn't need large initial delay
+        mStartTimeOffsetMs = 200;
+    }
     if (mStartTimeOffsetMs > 0) {
         reinterpret_cast<MPEG4Writer *>(writer.get())->
             setStartTimeOffsetMs(mStartTimeOffsetMs);
     }
 
     writer->setListener(mListener);
-    *mediaWriter = writer;
+    mWriter = writer;
     return OK;
 }
 
-void StagefrightRecorder::setupMPEG4MetaData(int64_t startTimeUs, int32_t totalBitRate,
-        sp<MetaData> *meta) {
+void StagefrightRecorder::setupMPEG4MetaData(sp<MetaData> *meta) {
+    int64_t startTimeUs = systemTime() / 1000;
     (*meta)->setInt64(kKeyTime, startTimeUs);
     (*meta)->setInt32(kKeyFileType, mOutputFormat);
-    (*meta)->setInt32(kKeyBitRate, totalBitRate);
+    (*meta)->setInt32(kKeyBitRate, mTotalBitRate);
     (*meta)->setInt32(kKey64BitFileOffset, mUse64BitFileOffset);
     if (mMovieTimeScale > 0) {
         (*meta)->setInt32(kKeyTimeScale, mMovieTimeScale);
@@ -1577,27 +1594,6 @@
     }
 }
 
-status_t StagefrightRecorder::startMPEG4Recording() {
-    int32_t totalBitRate;
-    status_t err = setupMPEG4Recording(
-            mOutputFd, mVideoWidth, mVideoHeight,
-            mVideoBitRate, &totalBitRate, &mWriter);
-    if (err != OK) {
-        return err;
-    }
-
-    int64_t startTimeUs = systemTime() / 1000;
-    sp<MetaData> meta = new MetaData;
-    setupMPEG4MetaData(startTimeUs, totalBitRate, &meta);
-
-    err = mWriter->start(meta.get());
-    if (err != OK) {
-        return err;
-    }
-
-    return OK;
-}
-
 status_t StagefrightRecorder::pause() {
     ALOGV("pause");
     if (mWriter == NULL) {
@@ -1637,6 +1633,8 @@
         mWriter.clear();
     }
 
+    mGraphicBufferProducer.clear();
+
     if (mOutputFd >= 0) {
         ::close(mOutputFd);
         mOutputFd = -1;
@@ -1656,7 +1654,6 @@
         addBatteryData(params);
     }
 
-
     return err;
 }
 
@@ -1708,6 +1705,7 @@
     mRotationDegrees = 0;
     mLatitudex10000 = -3600000;
     mLongitudex10000 = -3600000;
+    mTotalBitRate = 0;
 
     mOutputFd = -1;
 
diff --git a/media/libmediaplayerservice/StagefrightRecorder.h b/media/libmediaplayerservice/StagefrightRecorder.h
index 31f09e0..7d6abd3 100644
--- a/media/libmediaplayerservice/StagefrightRecorder.h
+++ b/media/libmediaplayerservice/StagefrightRecorder.h
@@ -37,6 +37,7 @@
 class MediaProfiles;
 class IGraphicBufferProducer;
 class SurfaceMediaSource;
+class ALooper;
 
 struct StagefrightRecorder : public MediaRecorderBase {
     StagefrightRecorder();
@@ -106,6 +107,7 @@
     int32_t mLatitudex10000;
     int32_t mLongitudex10000;
     int32_t mStartTimeOffsetMs;
+    int32_t mTotalBitRate;
 
     bool mCaptureTimeLapse;
     int64_t mTimeBetweenTimeLapseFrameCaptureUs;
@@ -122,22 +124,16 @@
     // An <IGraphicBufferProducer> pointer
     // will be sent to the client side using which the
     // frame buffers will be queued and dequeued
-    sp<SurfaceMediaSource> mSurfaceMediaSource;
+    sp<IGraphicBufferProducer> mGraphicBufferProducer;
+    sp<ALooper> mLooper;
 
-    status_t setupMPEG4Recording(
-        int outputFd,
-        int32_t videoWidth, int32_t videoHeight,
-        int32_t videoBitRate,
-        int32_t *totalBitRate,
-        sp<MediaWriter> *mediaWriter);
-    void setupMPEG4MetaData(int64_t startTimeUs, int32_t totalBitRate,
-        sp<MetaData> *meta);
-    status_t startMPEG4Recording();
-    status_t startAMRRecording();
-    status_t startAACRecording();
-    status_t startRawAudioRecording();
-    status_t startRTPRecording();
-    status_t startMPEG2TSRecording();
+    status_t setupMPEG4Recording();
+    void setupMPEG4MetaData(sp<MetaData> *meta);
+    status_t setupAMRRecording();
+    status_t setupAACRecording();
+    status_t setupRawAudioRecording();
+    status_t setupRTPRecording();
+    status_t setupMPEG2TSRecording();
     sp<MediaSource> createAudioSource();
     status_t checkVideoEncoderCapabilities(
             bool *supportsCameraSourceMetaDataMode);
@@ -147,14 +143,8 @@
     // depending on the videosource type
     status_t setupMediaSource(sp<MediaSource> *mediaSource);
     status_t setupCameraSource(sp<CameraSource> *cameraSource);
-    // setup the surfacemediasource for the encoder
-    status_t setupSurfaceMediaSource();
-
     status_t setupAudioEncoder(const sp<MediaWriter>& writer);
-    status_t setupVideoEncoder(
-            sp<MediaSource> cameraSource,
-            int32_t videoBitRate,
-            sp<MediaSource> *source);
+    status_t setupVideoEncoder(sp<MediaSource> cameraSource, sp<MediaSource> *source);
 
     // Encoding parameter handling utilities
     status_t setParameter(const String8 &key, const String8 &value);
diff --git a/media/libmediaplayerservice/TestPlayerStub.cpp b/media/libmediaplayerservice/TestPlayerStub.cpp
index 5d9728a..5795773 100644
--- a/media/libmediaplayerservice/TestPlayerStub.cpp
+++ b/media/libmediaplayerservice/TestPlayerStub.cpp
@@ -113,7 +113,9 @@
 // Create the test player.
 // Call setDataSource on the test player with the url in param.
 status_t TestPlayerStub::setDataSource(
-        const char *url, const KeyedVector<String8, String8> *headers) {
+        const sp<IMediaHTTPService> &httpService,
+        const char *url,
+        const KeyedVector<String8, String8> *headers) {
     if (!isTestUrl(url) || NULL != mHandle) {
         return INVALID_OPERATION;
     }
@@ -162,7 +164,7 @@
     }
 
     mPlayer = (*mNewPlayer)();
-    return mPlayer->setDataSource(mContentUrl, headers);
+    return mPlayer->setDataSource(httpService, mContentUrl, headers);
 }
 
 // Internal cleanup.
diff --git a/media/libmediaplayerservice/TestPlayerStub.h b/media/libmediaplayerservice/TestPlayerStub.h
index a3802eb..55bf2c8 100644
--- a/media/libmediaplayerservice/TestPlayerStub.h
+++ b/media/libmediaplayerservice/TestPlayerStub.h
@@ -66,7 +66,9 @@
 
     // @param url Should be a test url. See class comment.
     virtual status_t setDataSource(
-            const char* url, const KeyedVector<String8, String8> *headers);
+            const sp<IMediaHTTPService> &httpService,
+            const char* url,
+            const KeyedVector<String8, String8> *headers);
 
     // Test player for a file descriptor source is not supported.
     virtual status_t setDataSource(int, int64_t, int64_t)  {
diff --git a/media/libmediaplayerservice/nuplayer/GenericSource.cpp b/media/libmediaplayerservice/nuplayer/GenericSource.cpp
index b04e7a6..06aac33 100644
--- a/media/libmediaplayerservice/nuplayer/GenericSource.cpp
+++ b/media/libmediaplayerservice/nuplayer/GenericSource.cpp
@@ -33,17 +33,16 @@
 
 NuPlayer::GenericSource::GenericSource(
         const sp<AMessage> &notify,
+        const sp<IMediaHTTPService> &httpService,
         const char *url,
-        const KeyedVector<String8, String8> *headers,
-        bool uidValid,
-        uid_t uid)
+        const KeyedVector<String8, String8> *headers)
     : Source(notify),
       mDurationUs(0ll),
       mAudioIsVorbis(false) {
     DataSource::RegisterDefaultSniffers();
 
     sp<DataSource> dataSource =
-        DataSource::CreateFromURI(url, headers);
+        DataSource::CreateFromURI(httpService, url, headers);
     CHECK(dataSource != NULL);
 
     initFromDataSource(dataSource);
diff --git a/media/libmediaplayerservice/nuplayer/GenericSource.h b/media/libmediaplayerservice/nuplayer/GenericSource.h
index 2da680c..20d597e 100644
--- a/media/libmediaplayerservice/nuplayer/GenericSource.h
+++ b/media/libmediaplayerservice/nuplayer/GenericSource.h
@@ -33,10 +33,9 @@
 struct NuPlayer::GenericSource : public NuPlayer::Source {
     GenericSource(
             const sp<AMessage> &notify,
+            const sp<IMediaHTTPService> &httpService,
             const char *url,
-            const KeyedVector<String8, String8> *headers,
-            bool uidValid = false,
-            uid_t uid = 0);
+            const KeyedVector<String8, String8> *headers);
 
     GenericSource(
             const sp<AMessage> &notify,
diff --git a/media/libmediaplayerservice/nuplayer/HTTPLiveSource.cpp b/media/libmediaplayerservice/nuplayer/HTTPLiveSource.cpp
index f1782cc..ac2aab8 100644
--- a/media/libmediaplayerservice/nuplayer/HTTPLiveSource.cpp
+++ b/media/libmediaplayerservice/nuplayer/HTTPLiveSource.cpp
@@ -24,6 +24,7 @@
 #include "LiveDataSource.h"
 #include "LiveSession.h"
 
+#include <media/IMediaHTTPService.h>
 #include <media/stagefright/foundation/ABuffer.h>
 #include <media/stagefright/foundation/ADebug.h>
 #include <media/stagefright/foundation/AMessage.h>
@@ -34,13 +35,12 @@
 
 NuPlayer::HTTPLiveSource::HTTPLiveSource(
         const sp<AMessage> &notify,
+        const sp<IMediaHTTPService> &httpService,
         const char *url,
-        const KeyedVector<String8, String8> *headers,
-        bool uidValid, uid_t uid)
+        const KeyedVector<String8, String8> *headers)
     : Source(notify),
+      mHTTPService(httpService),
       mURL(url),
-      mUIDValid(uidValid),
-      mUID(uid),
       mFlags(0),
       mFinalResult(OK),
       mOffset(0),
@@ -79,8 +79,7 @@
     mLiveSession = new LiveSession(
             notify,
             (mFlags & kFlagIncognito) ? LiveSession::kFlagIncognito : 0,
-            mUIDValid,
-            mUID);
+            mHTTPService);
 
     mLiveLooper->registerHandler(mLiveSession);
 
diff --git a/media/libmediaplayerservice/nuplayer/HTTPLiveSource.h b/media/libmediaplayerservice/nuplayer/HTTPLiveSource.h
index bcc3f8b..4d7251f 100644
--- a/media/libmediaplayerservice/nuplayer/HTTPLiveSource.h
+++ b/media/libmediaplayerservice/nuplayer/HTTPLiveSource.h
@@ -28,10 +28,9 @@
 struct NuPlayer::HTTPLiveSource : public NuPlayer::Source {
     HTTPLiveSource(
             const sp<AMessage> &notify,
+            const sp<IMediaHTTPService> &httpService,
             const char *url,
-            const KeyedVector<String8, String8> *headers,
-            bool uidValid = false,
-            uid_t uid = 0);
+            const KeyedVector<String8, String8> *headers);
 
     virtual void prepareAsync();
     virtual void start();
@@ -61,10 +60,9 @@
         kWhatFetchSubtitleData,
     };
 
+    sp<IMediaHTTPService> mHTTPService;
     AString mURL;
     KeyedVector<String8, String8> mExtraHeaders;
-    bool mUIDValid;
-    uid_t mUID;
     uint32_t mFlags;
     status_t mFinalResult;
     off64_t mOffset;
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
index 3669a5b..817395a 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
@@ -212,7 +212,9 @@
 }
 
 void NuPlayer::setDataSourceAsync(
-        const char *url, const KeyedVector<String8, String8> *headers) {
+        const sp<IMediaHTTPService> &httpService,
+        const char *url,
+        const KeyedVector<String8, String8> *headers) {
     sp<AMessage> msg = new AMessage(kWhatSetDataSource, id());
     size_t len = strlen(url);
 
@@ -220,16 +222,18 @@
 
     sp<Source> source;
     if (IsHTTPLiveURL(url)) {
-        source = new HTTPLiveSource(notify, url, headers, mUIDValid, mUID);
+        source = new HTTPLiveSource(notify, httpService, url, headers);
     } else if (!strncasecmp(url, "rtsp://", 7)) {
-        source = new RTSPSource(notify, url, headers, mUIDValid, mUID);
+        source = new RTSPSource(
+                notify, httpService, url, headers, mUIDValid, mUID);
     } else if ((!strncasecmp(url, "http://", 7)
                 || !strncasecmp(url, "https://", 8))
                     && ((len >= 4 && !strcasecmp(".sdp", &url[len - 4]))
                     || strstr(url, ".sdp?"))) {
-        source = new RTSPSource(notify, url, headers, mUIDValid, mUID, true);
+        source = new RTSPSource(
+                notify, httpService, url, headers, mUIDValid, mUID, true);
     } else {
-        source = new GenericSource(notify, url, headers, mUIDValid, mUID);
+        source = new GenericSource(notify, httpService, url, headers);
     }
 
     msg->setObject("source", source);
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayer.h b/media/libmediaplayerservice/nuplayer/NuPlayer.h
index 590e1f2..9dfe4a0 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayer.h
+++ b/media/libmediaplayerservice/nuplayer/NuPlayer.h
@@ -38,7 +38,9 @@
     void setDataSourceAsync(const sp<IStreamSource> &source);
 
     void setDataSourceAsync(
-            const char *url, const KeyedVector<String8, String8> *headers);
+            const sp<IMediaHTTPService> &httpService,
+            const char *url,
+            const KeyedVector<String8, String8> *headers);
 
     void setDataSourceAsync(int fd, int64_t offset, int64_t length);
 
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp
index 239296e..9ef8dbd 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp
@@ -71,7 +71,9 @@
 }
 
 status_t NuPlayerDriver::setDataSource(
-        const char *url, const KeyedVector<String8, String8> *headers) {
+        const sp<IMediaHTTPService> &httpService,
+        const char *url,
+        const KeyedVector<String8, String8> *headers) {
     Mutex::Autolock autoLock(mLock);
 
     if (mState != STATE_IDLE) {
@@ -80,7 +82,7 @@
 
     mState = STATE_SET_DATASOURCE_PENDING;
 
-    mPlayer->setDataSourceAsync(url, headers);
+    mPlayer->setDataSourceAsync(httpService, url, headers);
 
     while (mState == STATE_SET_DATASOURCE_PENDING) {
         mCondition.wait(mLock);
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDriver.h b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.h
index 99f72a6..0148fb1 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerDriver.h
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.h
@@ -31,7 +31,9 @@
     virtual status_t setUID(uid_t uid);
 
     virtual status_t setDataSource(
-            const char *url, const KeyedVector<String8, String8> *headers);
+            const sp<IMediaHTTPService> &httpService,
+            const char *url,
+            const KeyedVector<String8, String8> *headers);
 
     virtual status_t setDataSource(int fd, int64_t offset, int64_t length);
 
diff --git a/media/libmediaplayerservice/nuplayer/RTSPSource.cpp b/media/libmediaplayerservice/nuplayer/RTSPSource.cpp
index 18cf6d1..94800ba 100644
--- a/media/libmediaplayerservice/nuplayer/RTSPSource.cpp
+++ b/media/libmediaplayerservice/nuplayer/RTSPSource.cpp
@@ -24,6 +24,7 @@
 #include "MyHandler.h"
 #include "SDPLoader.h"
 
+#include <media/IMediaHTTPService.h>
 #include <media/stagefright/MediaDefs.h>
 #include <media/stagefright/MetaData.h>
 
@@ -33,12 +34,14 @@
 
 NuPlayer::RTSPSource::RTSPSource(
         const sp<AMessage> &notify,
+        const sp<IMediaHTTPService> &httpService,
         const char *url,
         const KeyedVector<String8, String8> *headers,
         bool uidValid,
         uid_t uid,
         bool isSDP)
     : Source(notify),
+      mHTTPService(httpService),
       mURL(url),
       mUIDValid(uidValid),
       mUID(uid),
@@ -92,7 +95,7 @@
     if (mIsSDP) {
         mSDPLoader = new SDPLoader(notify,
                 (mFlags & kFlagIncognito) ? SDPLoader::kFlagIncognito : 0,
-                mUIDValid, mUID);
+                mHTTPService);
 
         mSDPLoader->load(
                 mURL.c_str(), mExtraHeaders.isEmpty() ? NULL : &mExtraHeaders);
diff --git a/media/libmediaplayerservice/nuplayer/RTSPSource.h b/media/libmediaplayerservice/nuplayer/RTSPSource.h
index 8cf34a0..3718bf9 100644
--- a/media/libmediaplayerservice/nuplayer/RTSPSource.h
+++ b/media/libmediaplayerservice/nuplayer/RTSPSource.h
@@ -34,6 +34,7 @@
 struct NuPlayer::RTSPSource : public NuPlayer::Source {
     RTSPSource(
             const sp<AMessage> &notify,
+            const sp<IMediaHTTPService> &httpService,
             const char *url,
             const KeyedVector<String8, String8> *headers,
             bool uidValid = false,
@@ -88,6 +89,7 @@
         bool mNPTMappingValid;
     };
 
+    sp<IMediaHTTPService> mHTTPService;
     AString mURL;
     KeyedVector<String8, String8> mExtraHeaders;
     bool mUIDValid;
diff --git a/media/libnbaio/AudioBufferProviderSource.cpp b/media/libnbaio/AudioBufferProviderSource.cpp
index 74a6fdb..4a69104 100644
--- a/media/libnbaio/AudioBufferProviderSource.cpp
+++ b/media/libnbaio/AudioBufferProviderSource.cpp
@@ -24,11 +24,11 @@
 namespace android {
 
 AudioBufferProviderSource::AudioBufferProviderSource(AudioBufferProvider *provider,
-                                                     NBAIO_Format format) :
+                                                     const NBAIO_Format& format) :
     NBAIO_Source(format), mProvider(provider), mConsumed(0)
 {
     ALOG_ASSERT(provider != NULL);
-    ALOG_ASSERT(format != Format_Invalid);
+    ALOG_ASSERT(Format_isValid(format));
 }
 
 AudioBufferProviderSource::~AudioBufferProviderSource()
diff --git a/media/libnbaio/AudioStreamInSource.cpp b/media/libnbaio/AudioStreamInSource.cpp
index 05273f6..ae8fac8 100644
--- a/media/libnbaio/AudioStreamInSource.cpp
+++ b/media/libnbaio/AudioStreamInSource.cpp
@@ -40,7 +40,7 @@
 ssize_t AudioStreamInSource::negotiate(const NBAIO_Format offers[], size_t numOffers,
                                       NBAIO_Format counterOffers[], size_t& numCounterOffers)
 {
-    if (mFormat == Format_Invalid) {
+    if (!Format_isValid(mFormat)) {
         mStreamBufferSizeBytes = mStream->common.get_buffer_size(&mStream->common);
         audio_format_t streamFormat = mStream->common.get_format(&mStream->common);
         if (streamFormat == AUDIO_FORMAT_PCM_16_BIT) {
@@ -67,7 +67,7 @@
 
 ssize_t AudioStreamInSource::read(void *buffer, size_t count)
 {
-    if (CC_UNLIKELY(mFormat == Format_Invalid)) {
+    if (CC_UNLIKELY(!Format_isValid(mFormat))) {
         return NEGOTIATE;
     }
     ssize_t bytesRead = mStream->read(mStream, buffer, count << mBitShift);
diff --git a/media/libnbaio/AudioStreamOutSink.cpp b/media/libnbaio/AudioStreamOutSink.cpp
index e4341d7..aa9810e 100644
--- a/media/libnbaio/AudioStreamOutSink.cpp
+++ b/media/libnbaio/AudioStreamOutSink.cpp
@@ -37,7 +37,7 @@
 ssize_t AudioStreamOutSink::negotiate(const NBAIO_Format offers[], size_t numOffers,
                                       NBAIO_Format counterOffers[], size_t& numCounterOffers)
 {
-    if (mFormat == Format_Invalid) {
+    if (!Format_isValid(mFormat)) {
         mStreamBufferSizeBytes = mStream->common.get_buffer_size(&mStream->common);
         audio_format_t streamFormat = mStream->common.get_format(&mStream->common);
         if (streamFormat == AUDIO_FORMAT_PCM_16_BIT) {
@@ -56,7 +56,7 @@
     if (!mNegotiated) {
         return NEGOTIATE;
     }
-    ALOG_ASSERT(mFormat != Format_Invalid);
+    ALOG_ASSERT(Format_isValid(mFormat));
     ssize_t ret = mStream->write(mStream, buffer, count << mBitShift);
     if (ret > 0) {
         ret >>= mBitShift;
diff --git a/media/libnbaio/MonoPipe.cpp b/media/libnbaio/MonoPipe.cpp
index 3c61b60..b23967b 100644
--- a/media/libnbaio/MonoPipe.cpp
+++ b/media/libnbaio/MonoPipe.cpp
@@ -30,7 +30,7 @@
 
 namespace android {
 
-MonoPipe::MonoPipe(size_t reqFrames, NBAIO_Format format, bool writeCanBlock) :
+MonoPipe::MonoPipe(size_t reqFrames, const NBAIO_Format& format, bool writeCanBlock) :
         NBAIO_Sink(format),
         mUpdateSeq(0),
         mReqFrames(reqFrames),
diff --git a/media/libnbaio/NBAIO.cpp b/media/libnbaio/NBAIO.cpp
index e0d2c21..51514de 100644
--- a/media/libnbaio/NBAIO.cpp
+++ b/media/libnbaio/NBAIO.cpp
@@ -22,17 +22,22 @@
 
 namespace android {
 
-size_t Format_frameSize(NBAIO_Format format)
+size_t Format_frameSize(const NBAIO_Format& format)
 {
+    // FIXME The sample format is hard-coded to AUDIO_FORMAT_PCM_16_BIT
     return Format_channelCount(format) * sizeof(short);
 }
 
-size_t Format_frameBitShift(NBAIO_Format format)
+int Format_frameBitShift(const NBAIO_Format& format)
 {
+    // FIXME The sample format is hard-coded to AUDIO_FORMAT_PCM_16_BIT
     // sizeof(short) == 2, so frame size == 1 << channels
     return Format_channelCount(format);
+    // FIXME must return -1 for non-power of 2
 }
 
+const NBAIO_Format Format_Invalid = { 0 };
+
 enum {
     Format_SR_8000,
     Format_SR_11025,
@@ -51,12 +56,12 @@
     Format_C_Mask = 0x18
 };
 
-unsigned Format_sampleRate(NBAIO_Format format)
+unsigned Format_sampleRate(const NBAIO_Format& format)
 {
-    if (format == Format_Invalid) {
+    if (!Format_isValid(format)) {
         return 0;
     }
-    switch (format & Format_SR_Mask) {
+    switch (format.mPacked & Format_SR_Mask) {
     case Format_SR_8000:
         return 8000;
     case Format_SR_11025:
@@ -78,12 +83,12 @@
     }
 }
 
-unsigned Format_channelCount(NBAIO_Format format)
+unsigned Format_channelCount(const NBAIO_Format& format)
 {
-    if (format == Format_Invalid) {
+    if (!Format_isValid(format)) {
         return 0;
     }
-    switch (format & Format_C_Mask) {
+    switch (format.mPacked & Format_C_Mask) {
     case Format_C_1:
         return 1;
     case Format_C_2:
@@ -95,7 +100,7 @@
 
 NBAIO_Format Format_from_SR_C(unsigned sampleRate, unsigned channelCount)
 {
-    NBAIO_Format format;
+    unsigned format;
     switch (sampleRate) {
     case 8000:
         format = Format_SR_8000;
@@ -134,7 +139,9 @@
     default:
         return Format_Invalid;
     }
-    return format;
+    NBAIO_Format ret;
+    ret.mPacked = format;
+    return ret;
 }
 
 // This is a default implementation; it is expected that subclasses will optimize this.
@@ -216,9 +223,9 @@
 {
     ALOGV("negotiate offers=%p numOffers=%u countersOffers=%p numCounterOffers=%u",
             offers, numOffers, counterOffers, numCounterOffers);
-    if (mFormat != Format_Invalid) {
+    if (Format_isValid(mFormat)) {
         for (size_t i = 0; i < numOffers; ++i) {
-            if (offers[i] == mFormat) {
+            if (Format_isEqual(offers[i], mFormat)) {
                 mNegotiated = true;
                 return i;
             }
@@ -233,4 +240,14 @@
     return (ssize_t) NEGOTIATE;
 }
 
+bool Format_isValid(const NBAIO_Format& format)
+{
+    return format.mPacked != Format_Invalid.mPacked;
+}
+
+bool Format_isEqual(const NBAIO_Format& format1, const NBAIO_Format& format2)
+{
+    return format1.mPacked == format2.mPacked;
+}
+
 }   // namespace android
diff --git a/media/libnbaio/NBLog.cpp b/media/libnbaio/NBLog.cpp
index d74a7a6..190824d 100644
--- a/media/libnbaio/NBLog.cpp
+++ b/media/libnbaio/NBLog.cpp
@@ -441,7 +441,7 @@
 
 bool NBLog::Reader::isIMemory(const sp<IMemory>& iMemory) const
 {
-    return iMemory.get() == mIMemory.get();
+    return iMemory != 0 && mIMemory != 0 && iMemory->pointer() == mIMemory->pointer();
 }
 
 }   // namespace android
diff --git a/media/libnbaio/Pipe.cpp b/media/libnbaio/Pipe.cpp
index 1c21f9c..115f311 100644
--- a/media/libnbaio/Pipe.cpp
+++ b/media/libnbaio/Pipe.cpp
@@ -25,7 +25,7 @@
 
 namespace android {
 
-Pipe::Pipe(size_t maxFrames, NBAIO_Format format) :
+Pipe::Pipe(size_t maxFrames, const NBAIO_Format& format) :
         NBAIO_Sink(format),
         mMaxFrames(roundup(maxFrames)),
         mBuffer(malloc(mMaxFrames * Format_frameSize(format))),
diff --git a/media/libnbaio/PipeReader.cpp b/media/libnbaio/PipeReader.cpp
index d786b84..24da1bd 100644
--- a/media/libnbaio/PipeReader.cpp
+++ b/media/libnbaio/PipeReader.cpp
@@ -59,7 +59,7 @@
     return avail;
 }
 
-ssize_t PipeReader::read(void *buffer, size_t count, int64_t readPTS)
+ssize_t PipeReader::read(void *buffer, size_t count, int64_t readPTS __unused)
 {
     ssize_t avail = availableToRead();
     if (CC_UNLIKELY(avail <= 0)) {
diff --git a/media/libstagefright/ACodec.cpp b/media/libstagefright/ACodec.cpp
index c68dfc1..f2354b4 100644
--- a/media/libstagefright/ACodec.cpp
+++ b/media/libstagefright/ACodec.cpp
@@ -365,6 +365,7 @@
       mIsEncoder(false),
       mUseMetadataOnEncoderOutput(false),
       mShutdownInProgress(false),
+      mIsConfiguredForAdaptivePlayback(false),
       mEncoderDelay(0),
       mEncoderPadding(0),
       mChannelMaskPresent(false),
@@ -372,7 +373,9 @@
       mDequeueCounter(0),
       mStoreMetaDataInOutputBuffers(false),
       mMetaDataBuffersToSubmit(0),
-      mRepeatFrameDelayUs(-1ll) {
+      mRepeatFrameDelayUs(-1ll),
+      mMaxPtsGapUs(-1l),
+      mCreateInputBuffersSuspended(false) {
     mUninitializedState = new UninitializedState(this);
     mLoadedState = new LoadedState(this);
     mLoadedToIdleState = new LoadedToIdleState(this);
@@ -1114,6 +1117,16 @@
                     &mRepeatFrameDelayUs)) {
             mRepeatFrameDelayUs = -1ll;
         }
+
+        if (!msg->findInt64("max-pts-gap-to-encoder", &mMaxPtsGapUs)) {
+            mMaxPtsGapUs = -1l;
+        }
+
+        if (!msg->findInt32(
+                    "create-input-buffers-suspended",
+                    (int32_t*)&mCreateInputBuffersSuspended)) {
+            mCreateInputBuffersSuspended = false;
+        }
     }
 
     // Always try to enable dynamic output buffers on native surface
@@ -1121,6 +1134,7 @@
     int32_t haveNativeWindow = msg->findObject("native-window", &obj) &&
             obj != NULL;
     mStoreMetaDataInOutputBuffers = false;
+    mIsConfiguredForAdaptivePlayback = false;
     if (!encoder && video && haveNativeWindow) {
         err = mOMX->storeMetaDataInBuffers(mNode, kPortIndexOutput, OMX_TRUE);
         if (err != OK) {
@@ -1165,12 +1179,14 @@
                 ALOGW_IF(err != OK,
                         "[%s] prepareForAdaptivePlayback failed w/ err %d",
                         mComponentName.c_str(), err);
+                mIsConfiguredForAdaptivePlayback = (err == OK);
             }
             // allow failure
             err = OK;
         } else {
             ALOGV("[%s] storeMetaDataInBuffers succeeded", mComponentName.c_str());
             mStoreMetaDataInOutputBuffers = true;
+            mIsConfiguredForAdaptivePlayback = true;
         }
 
         int32_t push;
@@ -3318,11 +3334,11 @@
                 mCodec->mInputEOSResult = err;
             }
             break;
-
-            default:
-                CHECK_EQ((int)mode, (int)FREE_BUFFERS);
-                break;
         }
+
+        default:
+            CHECK_EQ((int)mode, (int)FREE_BUFFERS);
+            break;
     }
 }
 
@@ -3413,7 +3429,7 @@
             sp<AMessage> reply =
                 new AMessage(kWhatOutputBufferDrained, mCodec->id());
 
-            if (!mCodec->mSentFormat) {
+            if (!mCodec->mSentFormat && rangeLength > 0) {
                 mCodec->sendFormatChange(reply);
             }
 
@@ -3773,6 +3789,7 @@
     mCodec->mDequeueCounter = 0;
     mCodec->mMetaDataBuffersToSubmit = 0;
     mCodec->mRepeatFrameDelayUs = -1ll;
+    mCodec->mIsConfiguredForAdaptivePlayback = false;
 
     if (mCodec->mShutdownInProgress) {
         bool keepComponentAllocated = mCodec->mKeepComponentAllocated;
@@ -3921,6 +3938,37 @@
         }
     }
 
+    if (err == OK && mCodec->mMaxPtsGapUs > 0l) {
+        err = mCodec->mOMX->setInternalOption(
+                mCodec->mNode,
+                kPortIndexInput,
+                IOMX::INTERNAL_OPTION_MAX_TIMESTAMP_GAP,
+                &mCodec->mMaxPtsGapUs,
+                sizeof(mCodec->mMaxPtsGapUs));
+
+        if (err != OK) {
+            ALOGE("[%s] Unable to configure max timestamp gap (err %d)",
+                    mCodec->mComponentName.c_str(),
+                    err);
+          }
+      }
+
+    if (err == OK && mCodec->mCreateInputBuffersSuspended) {
+        bool suspend = true;
+        err = mCodec->mOMX->setInternalOption(
+                mCodec->mNode,
+                kPortIndexInput,
+                IOMX::INTERNAL_OPTION_SUSPEND,
+                &suspend,
+                sizeof(suspend));
+
+        if (err != OK) {
+            ALOGE("[%s] Unable to configure option to suspend (err %d)",
+                  mCodec->mComponentName.c_str(),
+                  err);
+        }
+    }
+
     if (err == OK) {
         notify->setObject("input-surface",
                 new BufferProducerWrapper(bufferProducer));
@@ -3978,6 +4026,7 @@
 
 bool ACodec::LoadedToIdleState::onMessageReceived(const sp<AMessage> &msg) {
     switch (msg->what()) {
+        case kWhatSetParameters:
         case kWhatShutdown:
         {
             mCodec->deferMessage(msg);
@@ -4044,6 +4093,7 @@
 
 bool ACodec::IdleToExecutingState::onMessageReceived(const sp<AMessage> &msg) {
     switch (msg->what()) {
+        case kWhatSetParameters:
         case kWhatShutdown:
         {
             mCodec->deferMessage(msg);
@@ -4313,6 +4363,22 @@
         }
     }
 
+    int64_t skipFramesBeforeUs;
+    if (params->findInt64("skip-frames-before", &skipFramesBeforeUs)) {
+        status_t err =
+            mOMX->setInternalOption(
+                     mNode,
+                     kPortIndexInput,
+                     IOMX::INTERNAL_OPTION_START_TIME,
+                     &skipFramesBeforeUs,
+                     sizeof(skipFramesBeforeUs));
+
+        if (err != OK) {
+            ALOGE("Failed to set parameter 'skip-frames-before' (err %d)", err);
+            return err;
+        }
+    }
+
     int32_t dropInputFrames;
     if (params->findInt32("drop-input-frames", &dropInputFrames)) {
         bool suspend = dropInputFrames != 0;
diff --git a/media/libstagefright/Android.mk b/media/libstagefright/Android.mk
index 6a2a696..63f9399 100644
--- a/media/libstagefright/Android.mk
+++ b/media/libstagefright/Android.mk
@@ -30,8 +30,10 @@
         MediaBufferGroup.cpp              \
         MediaCodec.cpp                    \
         MediaCodecList.cpp                \
+        MediaCodecSource.cpp              \
         MediaDefs.cpp                     \
         MediaExtractor.cpp                \
+        http/MediaHTTP.cpp                \
         MediaMuxer.cpp                    \
         MediaSource.cpp                   \
         MetaData.cpp                      \
@@ -103,13 +105,6 @@
         libFLAC \
         libmedia_helper
 
-LOCAL_SRC_FILES += \
-        chromium_http_stub.cpp
-LOCAL_CPPFLAGS += -DCHROMIUM_AVAILABLE=1
-
-LOCAL_SHARED_LIBRARIES += libstlport
-include external/stlport/libstlport.mk
-
 LOCAL_SHARED_LIBRARIES += \
         libstagefright_enc_common \
         libstagefright_avc_common \
diff --git a/media/libstagefright/AudioSource.cpp b/media/libstagefright/AudioSource.cpp
index f0d1a14..6fdee6b 100644
--- a/media/libstagefright/AudioSource.cpp
+++ b/media/libstagefright/AudioSource.cpp
@@ -278,7 +278,7 @@
 
     // Drop retrieved and previously lost audio data.
     if (mNumFramesReceived == 0 && timeUs < mStartTimeUs) {
-        mRecord->getInputFramesLost();
+        (void) mRecord->getInputFramesLost();
         ALOGV("Drop audio data at %lld/%lld us", timeUs, mStartTimeUs);
         return OK;
     }
diff --git a/media/libstagefright/AwesomePlayer.cpp b/media/libstagefright/AwesomePlayer.cpp
index 29c007a..f57ee25 100644
--- a/media/libstagefright/AwesomePlayer.cpp
+++ b/media/libstagefright/AwesomePlayer.cpp
@@ -35,6 +35,8 @@
 
 #include <binder/IPCThreadState.h>
 #include <binder/IServiceManager.h>
+#include <media/IMediaHTTPConnection.h>
+#include <media/IMediaHTTPService.h>
 #include <media/IMediaPlayerService.h>
 #include <media/stagefright/foundation/hexdump.h>
 #include <media/stagefright/foundation/ADebug.h>
@@ -45,6 +47,7 @@
 #include <media/stagefright/MediaBuffer.h>
 #include <media/stagefright/MediaDefs.h>
 #include <media/stagefright/MediaExtractor.h>
+#include <media/stagefright/MediaHTTP.h>
 #include <media/stagefright/MediaSource.h>
 #include <media/stagefright/MetaData.h>
 #include <media/stagefright/OMXCodec.h>
@@ -277,15 +280,20 @@
 }
 
 status_t AwesomePlayer::setDataSource(
-        const char *uri, const KeyedVector<String8, String8> *headers) {
+        const sp<IMediaHTTPService> &httpService,
+        const char *uri,
+        const KeyedVector<String8, String8> *headers) {
     Mutex::Autolock autoLock(mLock);
-    return setDataSource_l(uri, headers);
+    return setDataSource_l(httpService, uri, headers);
 }
 
 status_t AwesomePlayer::setDataSource_l(
-        const char *uri, const KeyedVector<String8, String8> *headers) {
+        const sp<IMediaHTTPService> &httpService,
+        const char *uri,
+        const KeyedVector<String8, String8> *headers) {
     reset_l();
 
+    mHTTPService = httpService;
     mUri = uri;
 
     if (headers) {
@@ -582,6 +590,7 @@
     mSeekNotificationSent = true;
     mSeekTimeUs = 0;
 
+    mHTTPService.clear();
     mUri.setTo("");
     mUriHeaders.clear();
 
@@ -1483,7 +1492,7 @@
     CHECK(source != NULL);
 
     if (mTextDriver == NULL) {
-        mTextDriver = new TimedTextDriver(mListener);
+        mTextDriver = new TimedTextDriver(mListener, mHTTPService);
     }
 
     mTextDriver->addInBandTextSource(trackIndex, source);
@@ -2193,15 +2202,14 @@
     if (!strncasecmp("http://", mUri.string(), 7)
             || !strncasecmp("https://", mUri.string(), 8)
             || isWidevineStreaming) {
-        mConnectingDataSource = HTTPBase::Create(
-                (mFlags & INCOGNITO)
-                    ? HTTPBase::kFlagIncognito
-                    : 0);
-
-        if (mUIDValid) {
-            mConnectingDataSource->setUID(mUID);
+        if (mHTTPService == NULL) {
+            ALOGE("Attempt to play media from http URI without HTTP service.");
+            return UNKNOWN_ERROR;
         }
 
+        sp<IMediaHTTPConnection> conn = mHTTPService->makeHTTPConnection();
+        mConnectingDataSource = new MediaHTTP(conn);
+
         String8 cacheConfig;
         bool disconnectAtHighwatermark;
         NuCachedSource2::RemoveCacheSpecificHeaders(
@@ -2317,7 +2325,8 @@
             }
         }
     } else {
-        dataSource = DataSource::CreateFromURI(mUri.string(), &mUriHeaders);
+        dataSource = DataSource::CreateFromURI(
+                mHTTPService, mUri.string(), &mUriHeaders);
     }
 
     if (dataSource == NULL) {
@@ -2759,7 +2768,7 @@
         {
             Mutex::Autolock autoLock(mLock);
             if (mTextDriver == NULL) {
-                mTextDriver = new TimedTextDriver(mListener);
+                mTextDriver = new TimedTextDriver(mListener, mHTTPService);
             }
             // String values written in Parcel are UTF-16 values.
             String8 uri(request.readString16());
@@ -2771,7 +2780,7 @@
         {
             Mutex::Autolock autoLock(mLock);
             if (mTextDriver == NULL) {
-                mTextDriver = new TimedTextDriver(mListener);
+                mTextDriver = new TimedTextDriver(mListener, mHTTPService);
             }
             int fd         = request.readFileDescriptor();
             off64_t offset = request.readInt64();
@@ -2900,6 +2909,8 @@
     // get current position so we can start recreated stream from here
     getPosition(&mAudioTearDownPosition);
 
+    sp<IMediaHTTPService> savedHTTPService = mHTTPService;
+
     // Reset and recreate
     reset_l();
 
@@ -2909,7 +2920,7 @@
         mFileSource = fileSource;
         err = setDataSource_l(fileSource);
     } else {
-        err = setDataSource_l(uri, &uriHeaders);
+        err = setDataSource_l(savedHTTPService, uri, &uriHeaders);
     }
 
     mFlags |= PREPARING;
diff --git a/media/libstagefright/CameraSourceTimeLapse.cpp b/media/libstagefright/CameraSourceTimeLapse.cpp
index 5772316..86844b8 100644
--- a/media/libstagefright/CameraSourceTimeLapse.cpp
+++ b/media/libstagefright/CameraSourceTimeLapse.cpp
@@ -85,7 +85,8 @@
     mVideoWidth = videoSize.width;
     mVideoHeight = videoSize.height;
 
-    if (!trySettingVideoSize(videoSize.width, videoSize.height)) {
+    if (OK == mInitCheck && !trySettingVideoSize(videoSize.width, videoSize.height)) {
+        releaseCamera();
         mInitCheck = NO_INIT;
     }
 
diff --git a/media/libstagefright/DataSource.cpp b/media/libstagefright/DataSource.cpp
index 97987e2..2704b74 100644
--- a/media/libstagefright/DataSource.cpp
+++ b/media/libstagefright/DataSource.cpp
@@ -16,10 +16,6 @@
 
 #include "include/AMRExtractor.h"
 
-#if CHROMIUM_AVAILABLE
-#include "include/chromium_http_stub.h"
-#endif
-
 #include "include/AACExtractor.h"
 #include "include/DRMExtractor.h"
 #include "include/FLACExtractor.h"
@@ -35,10 +31,13 @@
 
 #include "matroska/MatroskaExtractor.h"
 
+#include <media/IMediaHTTPConnection.h>
+#include <media/IMediaHTTPService.h>
 #include <media/stagefright/foundation/AMessage.h>
 #include <media/stagefright/DataSource.h>
 #include <media/stagefright/FileSource.h>
 #include <media/stagefright/MediaErrors.h>
+#include <media/stagefright/MediaHTTP.h>
 #include <utils/String8.h>
 
 #include <cutils/properties.h>
@@ -180,7 +179,9 @@
 
 // static
 sp<DataSource> DataSource::CreateFromURI(
-        const char *uri, const KeyedVector<String8, String8> *headers) {
+        const sp<IMediaHTTPService> &httpService,
+        const char *uri,
+        const KeyedVector<String8, String8> *headers) {
     bool isWidevine = !strncasecmp("widevine://", uri, 11);
 
     sp<DataSource> source;
@@ -189,7 +190,7 @@
     } else if (!strncasecmp("http://", uri, 7)
             || !strncasecmp("https://", uri, 8)
             || isWidevine) {
-        sp<HTTPBase> httpSource = HTTPBase::Create();
+        sp<HTTPBase> httpSource = new MediaHTTP(httpService->makeHTTPConnection());
 
         String8 tmp;
         if (isWidevine) {
@@ -220,11 +221,6 @@
             // in the widevine:// case.
             source = httpSource;
         }
-
-# if CHROMIUM_AVAILABLE
-    } else if (!strncasecmp("data:", uri, 5)) {
-        source = createDataUriSource(uri);
-#endif
     } else {
         // Assume it's a filename.
         source = new FileSource(uri);
diff --git a/media/libstagefright/HTTPBase.cpp b/media/libstagefright/HTTPBase.cpp
index 5fa4b6f..ca68c3d 100644
--- a/media/libstagefright/HTTPBase.cpp
+++ b/media/libstagefright/HTTPBase.cpp
@@ -20,10 +20,6 @@
 
 #include "include/HTTPBase.h"
 
-#if CHROMIUM_AVAILABLE
-#include "include/chromium_http_stub.h"
-#endif
-
 #include <media/stagefright/foundation/ADebug.h>
 #include <media/stagefright/foundation/ALooper.h>
 
@@ -40,34 +36,7 @@
       mTotalTransferBytes(0),
       mPrevBandwidthMeasureTimeUs(0),
       mPrevEstimatedBandWidthKbps(0),
-      mBandWidthCollectFreqMs(5000),
-      mUIDValid(false),
-      mUID(0) {
-}
-
-// static
-sp<HTTPBase> HTTPBase::Create(uint32_t flags) {
-#if CHROMIUM_AVAILABLE
-        HTTPBase *dataSource = createChromiumHTTPDataSource(flags);
-        if (dataSource) {
-           return dataSource;
-        }
-#endif
-    {
-        TRESPASS();
-
-        return NULL;
-    }
-}
-
-// static
-status_t HTTPBase::UpdateProxyConfig(
-        const char *host, int32_t port, const char *exclusionList) {
-#if CHROMIUM_AVAILABLE
-    return UpdateChromiumHTTPDataSourceProxyConfig(host, port, exclusionList);
-#else
-    return INVALID_OPERATION;
-#endif
+      mBandWidthCollectFreqMs(5000) {
 }
 
 void HTTPBase::addBandwidthMeasurement(
@@ -135,21 +104,6 @@
     return OK;
 }
 
-void HTTPBase::setUID(uid_t uid) {
-    mUIDValid = true;
-    mUID = uid;
-}
-
-bool HTTPBase::getUID(uid_t *uid) const {
-    if (!mUIDValid) {
-        return false;
-    }
-
-    *uid = mUID;
-
-    return true;
-}
-
 // static
 void HTTPBase::RegisterSocketUserTag(int sockfd, uid_t uid, uint32_t kTag) {
     int res = qtaguid_tagSocket(sockfd, kTag, uid);
diff --git a/media/libstagefright/MPEG4Extractor.cpp b/media/libstagefright/MPEG4Extractor.cpp
index 6a33ce6..e6312c1 100644
--- a/media/libstagefright/MPEG4Extractor.cpp
+++ b/media/libstagefright/MPEG4Extractor.cpp
@@ -2441,6 +2441,58 @@
     return OK;
 }
 
+typedef enum {
+    //AOT_NONE             = -1,
+    //AOT_NULL_OBJECT      = 0,
+    //AOT_AAC_MAIN         = 1, /**< Main profile                              */
+    AOT_AAC_LC           = 2,   /**< Low Complexity object                     */
+    //AOT_AAC_SSR          = 3,
+    //AOT_AAC_LTP          = 4,
+    AOT_SBR              = 5,
+    //AOT_AAC_SCAL         = 6,
+    //AOT_TWIN_VQ          = 7,
+    //AOT_CELP             = 8,
+    //AOT_HVXC             = 9,
+    //AOT_RSVD_10          = 10, /**< (reserved)                                */
+    //AOT_RSVD_11          = 11, /**< (reserved)                                */
+    //AOT_TTSI             = 12, /**< TTSI Object                               */
+    //AOT_MAIN_SYNTH       = 13, /**< Main Synthetic object                     */
+    //AOT_WAV_TAB_SYNTH    = 14, /**< Wavetable Synthesis object                */
+    //AOT_GEN_MIDI         = 15, /**< General MIDI object                       */
+    //AOT_ALG_SYNTH_AUD_FX = 16, /**< Algorithmic Synthesis and Audio FX object */
+    AOT_ER_AAC_LC        = 17,   /**< Error Resilient(ER) AAC Low Complexity    */
+    //AOT_RSVD_18          = 18, /**< (reserved)                                */
+    //AOT_ER_AAC_LTP       = 19, /**< Error Resilient(ER) AAC LTP object        */
+    AOT_ER_AAC_SCAL      = 20,   /**< Error Resilient(ER) AAC Scalable object   */
+    //AOT_ER_TWIN_VQ       = 21, /**< Error Resilient(ER) TwinVQ object         */
+    AOT_ER_BSAC          = 22,   /**< Error Resilient(ER) BSAC object           */
+    AOT_ER_AAC_LD        = 23,   /**< Error Resilient(ER) AAC LowDelay object   */
+    //AOT_ER_CELP          = 24, /**< Error Resilient(ER) CELP object           */
+    //AOT_ER_HVXC          = 25, /**< Error Resilient(ER) HVXC object           */
+    //AOT_ER_HILN          = 26, /**< Error Resilient(ER) HILN object           */
+    //AOT_ER_PARA          = 27, /**< Error Resilient(ER) Parametric object     */
+    //AOT_RSVD_28          = 28, /**< might become SSC                          */
+    AOT_PS               = 29,   /**< PS, Parametric Stereo (includes SBR)      */
+    //AOT_MPEGS            = 30, /**< MPEG Surround                             */
+
+    AOT_ESCAPE           = 31,   /**< Signal AOT uses more than 5 bits          */
+
+    //AOT_MP3ONMP4_L1      = 32, /**< MPEG-Layer1 in mp4                        */
+    //AOT_MP3ONMP4_L2      = 33, /**< MPEG-Layer2 in mp4                        */
+    //AOT_MP3ONMP4_L3      = 34, /**< MPEG-Layer3 in mp4                        */
+    //AOT_RSVD_35          = 35, /**< might become DST                          */
+    //AOT_RSVD_36          = 36, /**< might become ALS                          */
+    //AOT_AAC_SLS          = 37, /**< AAC + SLS                                 */
+    //AOT_SLS              = 38, /**< SLS                                       */
+    //AOT_ER_AAC_ELD       = 39, /**< AAC Enhanced Low Delay                    */
+
+    //AOT_USAC             = 42, /**< USAC                                      */
+    //AOT_SAOC             = 43, /**< SAOC                                      */
+    //AOT_LD_MPEGS         = 44, /**< Low Delay MPEG Surround                   */
+
+    //AOT_RSVD50           = 50,  /**< Interim AOT for Rsvd50                   */
+} AUDIO_OBJECT_TYPE;
+
 status_t MPEG4Extractor::updateAudioTrackInfoFromESDS_MPEG4Audio(
         const void *esds_data, size_t esds_size) {
     ESDS esds(esds_data, esds_size);
@@ -2523,7 +2575,7 @@
         sampleRate = kSamplingRate[freqIndex];
     }
 
-    if (objectType == 5 || objectType == 29) { // SBR specific config per 14496-3 table 1.13
+    if (objectType == AOT_SBR || objectType == AOT_PS) {//SBR specific config per 14496-3 table 1.13
         uint32_t extFreqIndex = br.getBits(4);
         int32_t extSampleRate;
         if (extFreqIndex == 15) {
@@ -2541,6 +2593,111 @@
         //      mLastTrack->meta->setInt32(kKeyExtSampleRate, extSampleRate);
     }
 
+    switch (numChannels) {
+        // values defined in 14496-3_2009 amendment-4 Table 1.19 - Channel Configuration
+        case 0:
+        case 1:// FC
+        case 2:// FL FR
+        case 3:// FC, FL FR
+        case 4:// FC, FL FR, RC
+        case 5:// FC, FL FR, SL SR
+        case 6:// FC, FL FR, SL SR, LFE
+            //numChannels already contains the right value
+            break;
+        case 11:// FC, FL FR, SL SR, RC, LFE
+            numChannels = 7;
+            break;
+        case 7: // FC, FCL FCR, FL FR, SL SR, LFE
+        case 12:// FC, FL  FR,  SL SR, RL RR, LFE
+        case 14:// FC, FL  FR,  SL SR, LFE, FHL FHR
+            numChannels = 8;
+            break;
+        default:
+            return ERROR_UNSUPPORTED;
+    }
+
+    {
+        if (objectType == AOT_SBR || objectType == AOT_PS) {
+            const int32_t extensionSamplingFrequency = br.getBits(4);
+            objectType = br.getBits(5);
+
+            if (objectType == AOT_ESCAPE) {
+                objectType = 32 + br.getBits(6);
+            }
+        }
+        if (objectType == AOT_AAC_LC || objectType == AOT_ER_AAC_LC ||
+                objectType == AOT_ER_AAC_LD || objectType == AOT_ER_AAC_SCAL ||
+                objectType == AOT_ER_BSAC) {
+            const int32_t frameLengthFlag = br.getBits(1);
+
+            const int32_t dependsOnCoreCoder = br.getBits(1);
+
+            if (dependsOnCoreCoder ) {
+                const int32_t coreCoderDelay = br.getBits(14);
+            }
+
+            const int32_t extensionFlag = br.getBits(1);
+
+            if (numChannels == 0 ) {
+                int32_t channelsEffectiveNum = 0;
+                int32_t channelsNum = 0;
+                const int32_t ElementInstanceTag = br.getBits(4);
+                const int32_t Profile = br.getBits(2);
+                const int32_t SamplingFrequencyIndex = br.getBits(4);
+                const int32_t NumFrontChannelElements = br.getBits(4);
+                const int32_t NumSideChannelElements = br.getBits(4);
+                const int32_t NumBackChannelElements = br.getBits(4);
+                const int32_t NumLfeChannelElements = br.getBits(2);
+                const int32_t NumAssocDataElements = br.getBits(3);
+                const int32_t NumValidCcElements = br.getBits(4);
+
+                const int32_t MonoMixdownPresent = br.getBits(1);
+                if (MonoMixdownPresent != 0) {
+                    const int32_t MonoMixdownElementNumber = br.getBits(4);
+                }
+
+                const int32_t StereoMixdownPresent = br.getBits(1);
+                if (StereoMixdownPresent != 0) {
+                    const int32_t StereoMixdownElementNumber = br.getBits(4);
+                }
+
+                const int32_t MatrixMixdownIndexPresent = br.getBits(1);
+                if (MatrixMixdownIndexPresent != 0) {
+                    const int32_t MatrixMixdownIndex = br.getBits(2);
+                    const int32_t PseudoSurroundEnable = br.getBits(1);
+                }
+
+                int i;
+                for (i=0; i < NumFrontChannelElements; i++) {
+                    const int32_t FrontElementIsCpe = br.getBits(1);
+                    const int32_t FrontElementTagSelect = br.getBits(4);
+                    channelsNum += FrontElementIsCpe ? 2 : 1;
+                }
+
+                for (i=0; i < NumSideChannelElements; i++) {
+                    const int32_t SideElementIsCpe = br.getBits(1);
+                    const int32_t SideElementTagSelect = br.getBits(4);
+                    channelsNum += SideElementIsCpe ? 2 : 1;
+                }
+
+                for (i=0; i < NumBackChannelElements; i++) {
+                    const int32_t BackElementIsCpe = br.getBits(1);
+                    const int32_t BackElementTagSelect = br.getBits(4);
+                    channelsNum += BackElementIsCpe ? 2 : 1;
+                }
+                channelsEffectiveNum = channelsNum;
+
+                for (i=0; i < NumLfeChannelElements; i++) {
+                    const int32_t LfeElementTagSelect = br.getBits(4);
+                    channelsNum += 1;
+                }
+                ALOGV("mpeg4 audio channelsNum = %d", channelsNum);
+                ALOGV("mpeg4 audio channelsEffectiveNum = %d", channelsEffectiveNum);
+                numChannels = channelsNum;
+            }
+        }
+    }
+
     if (numChannels == 0) {
         return ERROR_UNSUPPORTED;
     }
diff --git a/media/libstagefright/MPEG4Writer.cpp b/media/libstagefright/MPEG4Writer.cpp
index e7d3cc2..ff6feb9 100644
--- a/media/libstagefright/MPEG4Writer.cpp
+++ b/media/libstagefright/MPEG4Writer.cpp
@@ -973,13 +973,16 @@
     if (param && param->findInt32(kKeyFileType, &fileType) &&
         fileType != OUTPUT_FORMAT_MPEG_4) {
         writeFourcc("3gp4");
-    } else {
+        writeInt32(0);
         writeFourcc("isom");
+        writeFourcc("3gp4");
+    } else {
+        writeFourcc("mp42");
+        writeInt32(0);
+        writeFourcc("isom");
+        writeFourcc("mp42");
     }
 
-    writeInt32(0);
-    writeFourcc("isom");
-    writeFourcc("3gp4");
     endBox();
 }
 
@@ -1761,7 +1764,7 @@
 }
 
 status_t MPEG4Writer::Track::stop() {
-    ALOGD("Stopping %s track", mIsAudio? "Audio": "Video");
+    ALOGD("%s track stopping", mIsAudio? "Audio": "Video");
     if (!mStarted) {
         ALOGE("Stop() called but track is not started");
         return ERROR_END_OF_STREAM;
@@ -1772,19 +1775,14 @@
     }
     mDone = true;
 
+    ALOGD("%s track source stopping", mIsAudio? "Audio": "Video");
+    mSource->stop();
+    ALOGD("%s track source stopped", mIsAudio? "Audio": "Video");
+
     void *dummy;
     pthread_join(mThread, &dummy);
-
     status_t err = static_cast<status_t>(reinterpret_cast<uintptr_t>(dummy));
 
-    ALOGD("Stopping %s track source", mIsAudio? "Audio": "Video");
-    {
-        status_t status = mSource->stop();
-        if (err == OK && status != OK && status != ERROR_END_OF_STREAM) {
-            err = status;
-        }
-    }
-
     ALOGD("%s track stopped", mIsAudio? "Audio": "Video");
     return err;
 }
diff --git a/media/libstagefright/MediaCodecSource.cpp b/media/libstagefright/MediaCodecSource.cpp
new file mode 100644
index 0000000..2c20d62
--- /dev/null
+++ b/media/libstagefright/MediaCodecSource.cpp
@@ -0,0 +1,881 @@
+/*
+ * Copyright 2014, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "MediaCodecSource"
+#define DEBUG_DRIFT_TIME 0
+#include <gui/IGraphicBufferProducer.h>
+#include <gui/Surface.h>
+#include <media/ICrypto.h>
+#include <media/stagefright/foundation/ABuffer.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/ALooper.h>
+#include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/MediaBuffer.h>
+#include <media/stagefright/MediaCodec.h>
+#include <media/stagefright/MetaData.h>
+#include <media/stagefright/MediaErrors.h>
+#include <media/stagefright/MediaSource.h>
+#include <media/stagefright/MediaCodecSource.h>
+#include <media/stagefright/Utils.h>
+
+namespace android {
+
+static void ReleaseMediaBufferReference(const sp<ABuffer> &accessUnit) {
+    void *mbuf;
+    if (accessUnit->meta()->findPointer("mediaBuffer", &mbuf)
+            && mbuf != NULL) {
+        ALOGV("releasing mbuf %p", mbuf);
+
+        accessUnit->meta()->setPointer("mediaBuffer", NULL);
+
+        static_cast<MediaBuffer *>(mbuf)->release();
+        mbuf = NULL;
+    }
+}
+
+struct MediaCodecSource::Puller : public AHandler {
+    Puller(const sp<MediaSource> &source);
+
+    status_t start(const sp<MetaData> &meta, const sp<AMessage> &notify);
+    void stopAsync();
+
+    void pause();
+    void resume();
+
+protected:
+    virtual void onMessageReceived(const sp<AMessage> &msg);
+    virtual ~Puller();
+
+private:
+    enum {
+        kWhatStart = 'msta',
+        kWhatStop,
+        kWhatPull,
+        kWhatPause,
+        kWhatResume,
+    };
+
+    sp<MediaSource> mSource;
+    sp<AMessage> mNotify;
+    sp<ALooper> mLooper;
+    int32_t mPullGeneration;
+    bool mIsAudio;
+    bool mPaused;
+    bool mReachedEOS;
+
+    status_t postSynchronouslyAndReturnError(const sp<AMessage> &msg);
+    void schedulePull();
+    void handleEOS();
+
+    DISALLOW_EVIL_CONSTRUCTORS(Puller);
+};
+
+MediaCodecSource::Puller::Puller(const sp<MediaSource> &source)
+    : mSource(source),
+      mLooper(new ALooper()),
+      mPullGeneration(0),
+      mIsAudio(false),
+      mPaused(false),
+      mReachedEOS(false) {
+    sp<MetaData> meta = source->getFormat();
+    const char *mime;
+    CHECK(meta->findCString(kKeyMIMEType, &mime));
+
+    mIsAudio = !strncasecmp(mime, "audio/", 6);
+
+    mLooper->setName("pull_looper");
+}
+
+MediaCodecSource::Puller::~Puller() {
+    mLooper->unregisterHandler(id());
+    mLooper->stop();
+}
+
+status_t MediaCodecSource::Puller::postSynchronouslyAndReturnError(
+        const sp<AMessage> &msg) {
+    sp<AMessage> response;
+    status_t err = msg->postAndAwaitResponse(&response);
+
+    if (err != OK) {
+        return err;
+    }
+
+    if (!response->findInt32("err", &err)) {
+        err = OK;
+    }
+
+    return err;
+}
+
+status_t MediaCodecSource::Puller::start(const sp<MetaData> &meta,
+        const sp<AMessage> &notify) {
+    ALOGV("puller (%s) start", mIsAudio ? "audio" : "video");
+    mLooper->start(
+            false /* runOnCallingThread */,
+            false /* canCallJava */,
+            PRIORITY_AUDIO);
+    mLooper->registerHandler(this);
+    mNotify = notify;
+
+    sp<AMessage> msg = new AMessage(kWhatStart, id());
+    msg->setObject("meta", meta);
+    return postSynchronouslyAndReturnError(msg);
+}
+
+void MediaCodecSource::Puller::stopAsync() {
+    ALOGV("puller (%s) stopAsync", mIsAudio ? "audio" : "video");
+    (new AMessage(kWhatStop, id()))->post();
+}
+
+void MediaCodecSource::Puller::pause() {
+    (new AMessage(kWhatPause, id()))->post();
+}
+
+void MediaCodecSource::Puller::resume() {
+    (new AMessage(kWhatResume, id()))->post();
+}
+
+void MediaCodecSource::Puller::schedulePull() {
+    sp<AMessage> msg = new AMessage(kWhatPull, id());
+    msg->setInt32("generation", mPullGeneration);
+    msg->post();
+}
+
+void MediaCodecSource::Puller::handleEOS() {
+    if (!mReachedEOS) {
+        ALOGV("puller (%s) posting EOS", mIsAudio ? "audio" : "video");
+        mReachedEOS = true;
+        sp<AMessage> notify = mNotify->dup();
+        notify->setPointer("accessUnit", NULL);
+        notify->post();
+    }
+}
+
+void MediaCodecSource::Puller::onMessageReceived(const sp<AMessage> &msg) {
+    switch (msg->what()) {
+        case kWhatStart:
+        {
+            sp<RefBase> obj;
+            CHECK(msg->findObject("meta", &obj));
+
+            mReachedEOS = false;
+
+            status_t err = mSource->start(static_cast<MetaData *>(obj.get()));
+
+            if (err == OK) {
+                schedulePull();
+            }
+
+            sp<AMessage> response = new AMessage;
+            response->setInt32("err", err);
+
+            uint32_t replyID;
+            CHECK(msg->senderAwaitsResponse(&replyID));
+            response->postReply(replyID);
+            break;
+        }
+
+        case kWhatStop:
+        {
+            ALOGV("source (%s) stopping", mIsAudio ? "audio" : "video");
+            mSource->stop();
+            ALOGV("source (%s) stopped", mIsAudio ? "audio" : "video");
+            ++mPullGeneration;
+
+            handleEOS();
+            break;
+        }
+
+        case kWhatPull:
+        {
+            int32_t generation;
+            CHECK(msg->findInt32("generation", &generation));
+
+            if (generation != mPullGeneration) {
+                break;
+            }
+
+            MediaBuffer *mbuf;
+            status_t err = mSource->read(&mbuf);
+
+            if (mPaused) {
+                if (err == OK) {
+                    mbuf->release();
+                    mbuf = NULL;
+                }
+
+                msg->post();
+                break;
+            }
+
+            if (err != OK) {
+                if (err == ERROR_END_OF_STREAM) {
+                    ALOGV("stream ended, mbuf %p", mbuf);
+                } else {
+                    ALOGE("error %d reading stream.", err);
+                }
+                handleEOS();
+            } else {
+                sp<AMessage> notify = mNotify->dup();
+
+                notify->setPointer("accessUnit", mbuf);
+                notify->post();
+
+                msg->post();
+            }
+            break;
+        }
+
+        case kWhatPause:
+        {
+            mPaused = true;
+            break;
+        }
+
+        case kWhatResume:
+        {
+            mPaused = false;
+            break;
+        }
+
+        default:
+            TRESPASS();
+    }
+}
+
+// static
+sp<MediaCodecSource> MediaCodecSource::Create(
+        const sp<ALooper> &looper,
+        const sp<AMessage> &format,
+        const sp<MediaSource> &source,
+        uint32_t flags) {
+    sp<MediaCodecSource> mediaSource =
+            new MediaCodecSource(looper, format, source, flags);
+
+    if (mediaSource->init() == OK) {
+        return mediaSource;
+    }
+    return NULL;
+}
+
+status_t MediaCodecSource::start(MetaData* params) {
+    sp<AMessage> msg = new AMessage(kWhatStart, mReflector->id());
+    msg->setObject("meta", params);
+    return postSynchronouslyAndReturnError(msg);
+}
+
+status_t MediaCodecSource::stop() {
+    sp<AMessage> msg = new AMessage(kWhatStop, mReflector->id());
+    return postSynchronouslyAndReturnError(msg);
+}
+
+status_t MediaCodecSource::pause() {
+    (new AMessage(kWhatPause, mReflector->id()))->post();
+    return OK;
+}
+
+sp<IGraphicBufferProducer> MediaCodecSource::getGraphicBufferProducer() {
+    CHECK(mFlags & FLAG_USE_SURFACE_INPUT);
+    return mGraphicBufferProducer;
+}
+
+status_t MediaCodecSource::read(MediaBuffer** buffer,
+        const ReadOptions* options) {
+    Mutex::Autolock autolock(mOutputBufferLock);
+
+    *buffer = NULL;
+    while (mOutputBufferQueue.size() == 0 && !mEncodedReachedEOS) {
+        mOutputBufferCond.wait(mOutputBufferLock);
+    }
+    if (!mEncodedReachedEOS) {
+        *buffer = *mOutputBufferQueue.begin();
+        mOutputBufferQueue.erase(mOutputBufferQueue.begin());
+        return OK;
+    }
+    return mErrorCode;
+}
+
+void MediaCodecSource::signalBufferReturned(MediaBuffer *buffer) {
+    buffer->setObserver(0);
+    buffer->release();
+}
+
+MediaCodecSource::MediaCodecSource(
+        const sp<ALooper> &looper,
+        const sp<AMessage> &outputFormat,
+        const sp<MediaSource> &source,
+        uint32_t flags)
+    : mLooper(looper),
+      mOutputFormat(outputFormat),
+      mMeta(new MetaData),
+      mFlags(flags),
+      mIsVideo(false),
+      mStarted(false),
+      mStopping(false),
+      mDoMoreWorkPending(false),
+      mPullerReachedEOS(false),
+      mFirstSampleTimeUs(-1ll),
+      mEncodedReachedEOS(false),
+      mErrorCode(OK) {
+    CHECK(mLooper != NULL);
+
+    AString mime;
+    CHECK(mOutputFormat->findString("mime", &mime));
+
+    if (!strncasecmp("video/", mime.c_str(), 6)) {
+        mIsVideo = true;
+    }
+
+    if (!(mFlags & FLAG_USE_SURFACE_INPUT)) {
+        mPuller = new Puller(source);
+    }
+}
+
+MediaCodecSource::~MediaCodecSource() {
+    releaseEncoder();
+
+    mCodecLooper->stop();
+    mLooper->unregisterHandler(mReflector->id());
+}
+
+status_t MediaCodecSource::init() {
+    status_t err = initEncoder();
+
+    if (err != OK) {
+        releaseEncoder();
+    }
+
+    return err;
+}
+
+status_t MediaCodecSource::initEncoder() {
+    mReflector = new AHandlerReflector<MediaCodecSource>(this);
+    mLooper->registerHandler(mReflector);
+
+    mCodecLooper = new ALooper;
+    mCodecLooper->setName("codec_looper");
+    mCodecLooper->start();
+
+    if (mFlags & FLAG_USE_METADATA_INPUT) {
+        mOutputFormat->setInt32("store-metadata-in-buffers", 1);
+    }
+
+    if (mFlags & FLAG_USE_SURFACE_INPUT) {
+        mOutputFormat->setInt32("create-input-buffers-suspended", 1);
+    }
+
+    AString outputMIME;
+    CHECK(mOutputFormat->findString("mime", &outputMIME));
+
+    mEncoder = MediaCodec::CreateByType(
+            mCodecLooper, outputMIME.c_str(), true /* encoder */);
+
+    if (mEncoder == NULL) {
+        return NO_INIT;
+    }
+
+    ALOGV("output format is '%s'", mOutputFormat->debugString(0).c_str());
+
+    status_t err = mEncoder->configure(
+                mOutputFormat,
+                NULL /* nativeWindow */,
+                NULL /* crypto */,
+                MediaCodec::CONFIGURE_FLAG_ENCODE);
+
+    if (err != OK) {
+        return err;
+    }
+
+    mEncoder->getOutputFormat(&mOutputFormat);
+    convertMessageToMetaData(mOutputFormat, mMeta);
+
+    if (mFlags & FLAG_USE_SURFACE_INPUT) {
+        CHECK(mIsVideo);
+
+        err = mEncoder->createInputSurface(&mGraphicBufferProducer);
+
+        if (err != OK) {
+            return err;
+        }
+    }
+
+    err = mEncoder->start();
+
+    if (err != OK) {
+        return err;
+    }
+
+    err = mEncoder->getInputBuffers(&mEncoderInputBuffers);
+
+    if (err != OK) {
+        return err;
+    }
+
+    err = mEncoder->getOutputBuffers(&mEncoderOutputBuffers);
+
+    if (err != OK) {
+        return err;
+    }
+
+    mEncodedReachedEOS = false;
+    mErrorCode = OK;
+
+    return OK;
+}
+
+void MediaCodecSource::releaseEncoder() {
+    if (mEncoder == NULL) {
+        return;
+    }
+
+    mEncoder->release();
+    mEncoder.clear();
+
+    while (!mInputBufferQueue.empty()) {
+        MediaBuffer *mbuf = *mInputBufferQueue.begin();
+        mInputBufferQueue.erase(mInputBufferQueue.begin());
+        if (mbuf != NULL) {
+            mbuf->release();
+        }
+    }
+
+    for (size_t i = 0; i < mEncoderInputBuffers.size(); ++i) {
+        sp<ABuffer> accessUnit = mEncoderInputBuffers.itemAt(i);
+        ReleaseMediaBufferReference(accessUnit);
+    }
+
+    mEncoderInputBuffers.clear();
+    mEncoderOutputBuffers.clear();
+}
+
+bool MediaCodecSource::reachedEOS() {
+    return mEncodedReachedEOS && ((mPuller == NULL) || mPullerReachedEOS);
+}
+
+status_t MediaCodecSource::postSynchronouslyAndReturnError(
+        const sp<AMessage> &msg) {
+    sp<AMessage> response;
+    status_t err = msg->postAndAwaitResponse(&response);
+
+    if (err != OK) {
+        return err;
+    }
+
+    if (!response->findInt32("err", &err)) {
+        err = OK;
+    }
+
+    return err;
+}
+
+void MediaCodecSource::signalEOS(status_t err) {
+    if (!mEncodedReachedEOS) {
+        ALOGI("encoder (%s) reached EOS", mIsVideo ? "video" : "audio");
+        {
+            Mutex::Autolock autoLock(mOutputBufferLock);
+            // release all unread media buffers
+            for (List<MediaBuffer*>::iterator it = mOutputBufferQueue.begin();
+                    it != mOutputBufferQueue.end(); it++) {
+                (*it)->release();
+            }
+            mOutputBufferQueue.clear();
+            mEncodedReachedEOS = true;
+            mErrorCode = err;
+            mOutputBufferCond.signal();
+        }
+
+        releaseEncoder();
+    }
+    if (mStopping && reachedEOS()) {
+        ALOGI("MediaCodecSource (%s) fully stopped",
+                mIsVideo ? "video" : "audio");
+        // posting reply to everyone that's waiting
+        List<uint32_t>::iterator it;
+        for (it = mStopReplyIDQueue.begin();
+                it != mStopReplyIDQueue.end(); it++) {
+            (new AMessage)->postReply(*it);
+        }
+        mStopReplyIDQueue.clear();
+        mStopping = false;
+    }
+}
+
+void MediaCodecSource::suspend() {
+    CHECK(mFlags & FLAG_USE_SURFACE_INPUT);
+    if (mEncoder != NULL) {
+        sp<AMessage> params = new AMessage;
+        params->setInt32("drop-input-frames", true);
+        mEncoder->setParameters(params);
+    }
+}
+
+void MediaCodecSource::resume(int64_t skipFramesBeforeUs) {
+    CHECK(mFlags & FLAG_USE_SURFACE_INPUT);
+    if (mEncoder != NULL) {
+        sp<AMessage> params = new AMessage;
+        params->setInt32("drop-input-frames", false);
+        if (skipFramesBeforeUs > 0) {
+            params->setInt64("skip-frames-before", skipFramesBeforeUs);
+        }
+        mEncoder->setParameters(params);
+    }
+}
+
+void MediaCodecSource::scheduleDoMoreWork() {
+    if (mDoMoreWorkPending) {
+        return;
+    }
+
+    mDoMoreWorkPending = true;
+
+    if (mEncoderActivityNotify == NULL) {
+        mEncoderActivityNotify = new AMessage(
+                kWhatEncoderActivity, mReflector->id());
+    }
+    mEncoder->requestActivityNotification(mEncoderActivityNotify);
+}
+
+status_t MediaCodecSource::feedEncoderInputBuffers() {
+    while (!mInputBufferQueue.empty()
+            && !mAvailEncoderInputIndices.empty()) {
+        MediaBuffer* mbuf = *mInputBufferQueue.begin();
+        mInputBufferQueue.erase(mInputBufferQueue.begin());
+
+        size_t bufferIndex = *mAvailEncoderInputIndices.begin();
+        mAvailEncoderInputIndices.erase(mAvailEncoderInputIndices.begin());
+
+        int64_t timeUs = 0ll;
+        uint32_t flags = 0;
+        size_t size = 0;
+
+        if (mbuf != NULL) {
+            CHECK(mbuf->meta_data()->findInt64(kKeyTime, &timeUs));
+
+            // push decoding time for video, or drift time for audio
+            if (mIsVideo) {
+                mDecodingTimeQueue.push_back(timeUs);
+            } else {
+#if DEBUG_DRIFT_TIME
+                if (mFirstSampleTimeUs < 0ll) {
+                    mFirstSampleTimeUs = timeUs;
+                }
+
+                int64_t driftTimeUs = 0;
+                if (mbuf->meta_data()->findInt64(kKeyDriftTime, &driftTimeUs)
+                        && driftTimeUs) {
+                    driftTimeUs = timeUs - mFirstSampleTimeUs - driftTimeUs;
+                }
+                mDriftTimeQueue.push_back(driftTimeUs);
+#endif // DEBUG_DRIFT_TIME
+            }
+
+            size = mbuf->size();
+
+            memcpy(mEncoderInputBuffers.itemAt(bufferIndex)->data(),
+                   mbuf->data(), size);
+
+            if (mIsVideo) {
+                // video encoder will release MediaBuffer when done
+                // with underlying data.
+                mEncoderInputBuffers.itemAt(bufferIndex)->meta()
+                        ->setPointer("mediaBuffer", mbuf);
+            } else {
+                mbuf->release();
+            }
+        } else {
+            flags = MediaCodec::BUFFER_FLAG_EOS;
+        }
+
+        status_t err = mEncoder->queueInputBuffer(
+                bufferIndex, 0, size, timeUs, flags);
+
+        if (err != OK) {
+            return err;
+        }
+    }
+
+    return OK;
+}
+
+status_t MediaCodecSource::doMoreWork() {
+    status_t err;
+
+    if (!(mFlags & FLAG_USE_SURFACE_INPUT)) {
+        for (;;) {
+            size_t bufferIndex;
+            err = mEncoder->dequeueInputBuffer(&bufferIndex);
+
+            if (err != OK) {
+                break;
+            }
+
+            mAvailEncoderInputIndices.push_back(bufferIndex);
+        }
+
+        feedEncoderInputBuffers();
+    }
+
+    for (;;) {
+        size_t bufferIndex;
+        size_t offset;
+        size_t size;
+        int64_t timeUs;
+        uint32_t flags;
+        native_handle_t* handle = NULL;
+        err = mEncoder->dequeueOutputBuffer(
+                &bufferIndex, &offset, &size, &timeUs, &flags);
+
+        if (err != OK) {
+            if (err == INFO_FORMAT_CHANGED) {
+                continue;
+            } else if (err == INFO_OUTPUT_BUFFERS_CHANGED) {
+                mEncoder->getOutputBuffers(&mEncoderOutputBuffers);
+                continue;
+            }
+
+            if (err == -EAGAIN) {
+                err = OK;
+            }
+            break;
+        }
+        if (!(flags & MediaCodec::BUFFER_FLAG_EOS)) {
+            sp<ABuffer> outbuf = mEncoderOutputBuffers.itemAt(bufferIndex);
+
+            MediaBuffer *mbuf = new MediaBuffer(outbuf->size());
+            memcpy(mbuf->data(), outbuf->data(), outbuf->size());
+
+            if (!(flags & MediaCodec::BUFFER_FLAG_CODECCONFIG)) {
+                if (mIsVideo) {
+                    int64_t decodingTimeUs;
+                    if (mFlags & FLAG_USE_SURFACE_INPUT) {
+                        // GraphicBufferSource is supposed to discard samples
+                        // queued before start, and offset timeUs by start time
+                        CHECK_GE(timeUs, 0ll);
+                        // TODO:
+                        // Decoding time for surface source is unavailable,
+                        // use presentation time for now. May need to move
+                        // this logic into MediaCodec.
+                        decodingTimeUs = timeUs;
+                    } else {
+                        CHECK(!mDecodingTimeQueue.empty());
+                        decodingTimeUs = *(mDecodingTimeQueue.begin());
+                        mDecodingTimeQueue.erase(mDecodingTimeQueue.begin());
+                    }
+                    mbuf->meta_data()->setInt64(kKeyDecodingTime, decodingTimeUs);
+
+                    ALOGV("[video] time %lld us (%.2f secs), dts/pts diff %lld",
+                            timeUs, timeUs / 1E6, decodingTimeUs - timeUs);
+                } else {
+                    int64_t driftTimeUs = 0;
+#if DEBUG_DRIFT_TIME
+                    CHECK(!mDriftTimeQueue.empty());
+                    driftTimeUs = *(mDriftTimeQueue.begin());
+                    mDriftTimeQueue.erase(mDriftTimeQueue.begin());
+                    mbuf->meta_data()->setInt64(kKeyDriftTime, driftTimeUs);
+#endif // DEBUG_DRIFT_TIME
+                    ALOGV("[audio] time %lld us (%.2f secs), drift %lld",
+                            timeUs, timeUs / 1E6, driftTimeUs);
+                }
+                mbuf->meta_data()->setInt64(kKeyTime, timeUs);
+            } else {
+                mbuf->meta_data()->setInt32(kKeyIsCodecConfig, true);
+            }
+            if (flags & MediaCodec::BUFFER_FLAG_SYNCFRAME) {
+                mbuf->meta_data()->setInt32(kKeyIsSyncFrame, true);
+            }
+            mbuf->setObserver(this);
+            mbuf->add_ref();
+
+            {
+                Mutex::Autolock autoLock(mOutputBufferLock);
+                mOutputBufferQueue.push_back(mbuf);
+                mOutputBufferCond.signal();
+            }
+        }
+
+        mEncoder->releaseOutputBuffer(bufferIndex);
+
+        if (flags & MediaCodec::BUFFER_FLAG_EOS) {
+            err = ERROR_END_OF_STREAM;
+            break;
+        }
+    }
+
+    return err;
+}
+
+status_t MediaCodecSource::onStart(MetaData *params) {
+    if (mStopping) {
+        ALOGE("Failed to start while we're stopping");
+        return INVALID_OPERATION;
+    }
+
+    if (mStarted) {
+        ALOGI("MediaCodecSource (%s) resuming", mIsVideo ? "video" : "audio");
+        if (mFlags & FLAG_USE_SURFACE_INPUT) {
+            resume();
+        } else {
+            CHECK(mPuller != NULL);
+            mPuller->resume();
+        }
+        return OK;
+    }
+
+    ALOGI("MediaCodecSource (%s) starting", mIsVideo ? "video" : "audio");
+
+    status_t err = OK;
+
+    if (mFlags & FLAG_USE_SURFACE_INPUT) {
+        int64_t startTimeUs;
+        if (!params || !params->findInt64(kKeyTime, &startTimeUs)) {
+            startTimeUs = -1ll;
+        }
+        resume(startTimeUs);
+        scheduleDoMoreWork();
+    } else {
+        CHECK(mPuller != NULL);
+        sp<AMessage> notify = new AMessage(
+                kWhatPullerNotify, mReflector->id());
+        err = mPuller->start(params, notify);
+        if (err != OK) {
+            mPullerReachedEOS = true;
+            return err;
+        }
+    }
+
+    ALOGI("MediaCodecSource (%s) started", mIsVideo ? "video" : "audio");
+
+    mStarted = true;
+    return OK;
+}
+
+void MediaCodecSource::onMessageReceived(const sp<AMessage> &msg) {
+    switch (msg->what()) {
+    case kWhatPullerNotify:
+    {
+        MediaBuffer *mbuf;
+        CHECK(msg->findPointer("accessUnit", (void**)&mbuf));
+
+        if (mbuf == NULL) {
+            ALOGI("puller (%s) reached EOS",
+                    mIsVideo ? "video" : "audio");
+            mPullerReachedEOS = true;
+        }
+
+        if (mEncoder == NULL) {
+            ALOGV("got msg '%s' after encoder shutdown.",
+                  msg->debugString().c_str());
+
+            if (mbuf != NULL) {
+                mbuf->release();
+            } else {
+                signalEOS();
+            }
+            break;
+        }
+
+        mInputBufferQueue.push_back(mbuf);
+
+        feedEncoderInputBuffers();
+        scheduleDoMoreWork();
+
+        break;
+    }
+    case kWhatEncoderActivity:
+    {
+        mDoMoreWorkPending = false;
+
+        if (mEncoder == NULL) {
+            break;
+        }
+
+        status_t err = doMoreWork();
+
+        if (err == OK) {
+            scheduleDoMoreWork();
+        } else {
+            // reached EOS, or error
+            signalEOS(err);
+        }
+
+        break;
+    }
+    case kWhatStart:
+    {
+        uint32_t replyID;
+        CHECK(msg->senderAwaitsResponse(&replyID));
+
+        sp<RefBase> obj;
+        CHECK(msg->findObject("meta", &obj));
+        MetaData *params = static_cast<MetaData *>(obj.get());
+
+        sp<AMessage> response = new AMessage;
+        response->setInt32("err", onStart(params));
+        response->postReply(replyID);
+        break;
+    }
+    case kWhatStop:
+    {
+        ALOGI("MediaCodecSource (%s) stopping", mIsVideo ? "video" : "audio");
+
+        uint32_t replyID;
+        CHECK(msg->senderAwaitsResponse(&replyID));
+
+        if (reachedEOS()) {
+            // if we already reached EOS, reply and return now
+            ALOGI("MediaCodecSource (%s) already stopped",
+                    mIsVideo ? "video" : "audio");
+            (new AMessage)->postReply(replyID);
+            break;
+        }
+
+        mStopReplyIDQueue.push_back(replyID);
+        if (mStopping) {
+            // nothing to do if we're already stopping, reply will be posted
+            // to all when we're stopped.
+            break;
+        }
+
+        mStopping = true;
+
+        // if using surface, signal source EOS and wait for EOS to come back.
+        // otherwise, release encoder and post EOS if haven't done already
+        if (mFlags & FLAG_USE_SURFACE_INPUT) {
+            mEncoder->signalEndOfInputStream();
+        } else {
+            CHECK(mPuller != NULL);
+            mPuller->stopAsync();
+            signalEOS();
+        }
+        break;
+    }
+    case kWhatPause:
+    {
+        if (mFlags && FLAG_USE_SURFACE_INPUT) {
+            suspend();
+        } else {
+            CHECK(mPuller != NULL);
+            mPuller->pause();
+        }
+        break;
+    }
+    default:
+        TRESPASS();
+    }
+}
+
+} // namespace android
diff --git a/media/libstagefright/NuCachedSource2.cpp b/media/libstagefright/NuCachedSource2.cpp
index 05e599b..1287fb1 100644
--- a/media/libstagefright/NuCachedSource2.cpp
+++ b/media/libstagefright/NuCachedSource2.cpp
@@ -213,7 +213,14 @@
 
     mLooper->setName("NuCachedSource2");
     mLooper->registerHandler(mReflector);
-    mLooper->start();
+
+    // Since it may not be obvious why our looper thread needs to be
+    // able to call into java since it doesn't appear to do so at all...
+    // IMediaHTTPConnection may be (and most likely is) implemented in JAVA
+    // and a local JAVA IBinder will call directly into JNI methods.
+    // So whenever we call DataSource::readAt it may end up in a call to
+    // IMediaHTTPConnection::readAt and therefore call back into JAVA.
+    mLooper->start(false /* runOnCallingThread */, true /* canCallJava */);
 
     Mutex::Autolock autoLock(mLock);
     (new AMessage(kWhatFetchMore, mReflector->id()))->post();
diff --git a/media/libstagefright/NuMediaExtractor.cpp b/media/libstagefright/NuMediaExtractor.cpp
index 7bc7da2..64f56e9 100644
--- a/media/libstagefright/NuMediaExtractor.cpp
+++ b/media/libstagefright/NuMediaExtractor.cpp
@@ -58,7 +58,9 @@
 }
 
 status_t NuMediaExtractor::setDataSource(
-        const char *path, const KeyedVector<String8, String8> *headers) {
+        const sp<IMediaHTTPService> &httpService,
+        const char *path,
+        const KeyedVector<String8, String8> *headers) {
     Mutex::Autolock autoLock(mLock);
 
     if (mImpl != NULL) {
@@ -66,7 +68,7 @@
     }
 
     sp<DataSource> dataSource =
-        DataSource::CreateFromURI(path, headers);
+        DataSource::CreateFromURI(httpService, path, headers);
 
     if (dataSource == NULL) {
         return -ENOENT;
diff --git a/media/libstagefright/SkipCutBuffer.cpp b/media/libstagefright/SkipCutBuffer.cpp
index 773854f..e2e6d79 100644
--- a/media/libstagefright/SkipCutBuffer.cpp
+++ b/media/libstagefright/SkipCutBuffer.cpp
@@ -25,7 +25,7 @@
 namespace android {
 
 SkipCutBuffer::SkipCutBuffer(int32_t skip, int32_t cut) {
-    mFrontPadding = skip;
+    mFrontPadding = mSkip = skip;
     mBackPadding = cut;
     mWriteHead = 0;
     mReadHead = 0;
@@ -94,6 +94,7 @@
 
 void SkipCutBuffer::clear() {
     mWriteHead = mReadHead = 0;
+    mFrontPadding = mSkip;
 }
 
 void SkipCutBuffer::write(const char *src, size_t num) {
diff --git a/media/libstagefright/StagefrightMediaScanner.cpp b/media/libstagefright/StagefrightMediaScanner.cpp
index af8186c..edd12fc 100644
--- a/media/libstagefright/StagefrightMediaScanner.cpp
+++ b/media/libstagefright/StagefrightMediaScanner.cpp
@@ -24,6 +24,7 @@
 
 #include <media/stagefright/StagefrightMediaScanner.h>
 
+#include <media/IMediaHTTPService.h>
 #include <media/mediametadataretriever.h>
 #include <private/media/VideoFrame.h>
 
@@ -147,7 +148,7 @@
     status_t status;
     if (fd < 0) {
         // couldn't open it locally, maybe the media server can?
-        status = mRetriever->setDataSource(path);
+        status = mRetriever->setDataSource(NULL /* httpService */, path);
     } else {
         status = mRetriever->setDataSource(fd, 0, 0x7ffffffffffffffL);
         close(fd);
diff --git a/media/libstagefright/StagefrightMetadataRetriever.cpp b/media/libstagefright/StagefrightMetadataRetriever.cpp
index fcd9a85..9475d05 100644
--- a/media/libstagefright/StagefrightMetadataRetriever.cpp
+++ b/media/libstagefright/StagefrightMetadataRetriever.cpp
@@ -21,6 +21,7 @@
 
 #include "include/StagefrightMetadataRetriever.h"
 
+#include <media/IMediaHTTPService.h>
 #include <media/stagefright/foundation/ADebug.h>
 #include <media/stagefright/ColorConverter.h>
 #include <media/stagefright/DataSource.h>
@@ -51,7 +52,9 @@
 }
 
 status_t StagefrightMetadataRetriever::setDataSource(
-        const char *uri, const KeyedVector<String8, String8> *headers) {
+        const sp<IMediaHTTPService> &httpService,
+        const char *uri,
+        const KeyedVector<String8, String8> *headers) {
     ALOGV("setDataSource(%s)", uri);
 
     mParsedMetaData = false;
@@ -59,7 +62,7 @@
     delete mAlbumArt;
     mAlbumArt = NULL;
 
-    mSource = DataSource::CreateFromURI(uri, headers);
+    mSource = DataSource::CreateFromURI(httpService, uri, headers);
 
     if (mSource == NULL) {
         ALOGE("Unable to create data source for '%s'.", uri);
diff --git a/media/libstagefright/chromium_http/Android.mk b/media/libstagefright/chromium_http/Android.mk
deleted file mode 100644
index f26f386..0000000
--- a/media/libstagefright/chromium_http/Android.mk
+++ /dev/null
@@ -1,37 +0,0 @@
-LOCAL_PATH:= $(call my-dir)
-
-ifneq ($(TARGET_BUILD_PDK), true)
-include $(CLEAR_VARS)
-
-LOCAL_SRC_FILES:=       \
-        DataUriSource.cpp \
-        ChromiumHTTPDataSource.cpp \
-        support.cpp \
-        chromium_http_stub.cpp
-
-LOCAL_C_INCLUDES:= \
-        $(TOP)/frameworks/av/media/libstagefright \
-        $(TOP)/frameworks/native/include/media/openmax \
-        external/chromium \
-        external/chromium/android
-
-LOCAL_CFLAGS += -Wno-multichar
-
-LOCAL_SHARED_LIBRARIES += \
-        libstlport \
-        libchromium_net \
-        libutils \
-        libcutils \
-        liblog \
-        libstagefright_foundation \
-        libstagefright \
-        libdrmframework
-
-include external/stlport/libstlport.mk
-
-LOCAL_MODULE:= libstagefright_chromium_http
-
-LOCAL_MODULE_TAGS := optional
-
-include $(BUILD_SHARED_LIBRARY)
-endif
diff --git a/media/libstagefright/chromium_http/ChromiumHTTPDataSource.cpp b/media/libstagefright/chromium_http/ChromiumHTTPDataSource.cpp
deleted file mode 100644
index 7e5c280..0000000
--- a/media/libstagefright/chromium_http/ChromiumHTTPDataSource.cpp
+++ /dev/null
@@ -1,355 +0,0 @@
-/*
- * Copyright (C) 2011 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-//#define LOG_NDEBUG 0
-#define LOG_TAG "ChromiumHTTPDataSource"
-#include <media/stagefright/foundation/ADebug.h>
-
-#include "include/ChromiumHTTPDataSource.h"
-
-#include <media/stagefright/foundation/ALooper.h>
-#include <media/stagefright/MediaErrors.h>
-
-#include "support.h"
-
-#include <cutils/properties.h> // for property_get
-
-namespace android {
-
-ChromiumHTTPDataSource::ChromiumHTTPDataSource(uint32_t flags)
-    : mFlags(flags),
-      mState(DISCONNECTED),
-      mDelegate(new SfDelegate),
-      mCurrentOffset(0),
-      mIOResult(OK),
-      mContentSize(-1),
-      mDecryptHandle(NULL),
-      mDrmManagerClient(NULL) {
-    mDelegate->setOwner(this);
-}
-
-ChromiumHTTPDataSource::~ChromiumHTTPDataSource() {
-    disconnect();
-
-    delete mDelegate;
-    mDelegate = NULL;
-
-    clearDRMState_l();
-
-    if (mDrmManagerClient != NULL) {
-        delete mDrmManagerClient;
-        mDrmManagerClient = NULL;
-    }
-}
-
-status_t ChromiumHTTPDataSource::connect(
-        const char *uri,
-        const KeyedVector<String8, String8> *headers,
-        off64_t offset) {
-    Mutex::Autolock autoLock(mLock);
-
-    uid_t uid;
-    if (getUID(&uid)) {
-        mDelegate->setUID(uid);
-    }
-
-#if defined(LOG_NDEBUG) && !LOG_NDEBUG
-    LOG_PRI(ANDROID_LOG_VERBOSE, LOG_TAG, "connect on behalf of uid %d", uid);
-#endif
-
-    return connect_l(uri, headers, offset);
-}
-
-status_t ChromiumHTTPDataSource::connect_l(
-        const char *uri,
-        const KeyedVector<String8, String8> *headers,
-        off64_t offset) {
-    if (mState != DISCONNECTED) {
-        disconnect_l();
-    }
-
-#if defined(LOG_NDEBUG) && !LOG_NDEBUG
-    LOG_PRI(ANDROID_LOG_VERBOSE, LOG_TAG,
-                "connect to <URL suppressed> @%lld", offset);
-#endif
-
-    mURI = uri;
-    mContentType = String8("application/octet-stream");
-
-    if (headers != NULL) {
-        mHeaders = *headers;
-    } else {
-        mHeaders.clear();
-    }
-
-    mState = CONNECTING;
-    mContentSize = -1;
-    mCurrentOffset = offset;
-
-    mDelegate->initiateConnection(mURI.c_str(), &mHeaders, offset);
-
-    while (mState == CONNECTING || mState == DISCONNECTING) {
-        mCondition.wait(mLock);
-    }
-
-    return mState == CONNECTED ? OK : mIOResult;
-}
-
-void ChromiumHTTPDataSource::onRedirect(const char *url) {
-    Mutex::Autolock autoLock(mLock);
-    mURI = url;
-}
-
-void ChromiumHTTPDataSource::onConnectionEstablished(
-        int64_t contentSize, const char *contentType) {
-    Mutex::Autolock autoLock(mLock);
-
-    if (mState != CONNECTING) {
-        // We may have initiated disconnection.
-        CHECK_EQ(mState, DISCONNECTING);
-        return;
-    }
-
-    mState = CONNECTED;
-    mContentSize = (contentSize < 0) ? -1 : contentSize + mCurrentOffset;
-    mContentType = String8(contentType);
-    mCondition.broadcast();
-}
-
-void ChromiumHTTPDataSource::onConnectionFailed(status_t err) {
-    Mutex::Autolock autoLock(mLock);
-    mState = DISCONNECTED;
-    mCondition.broadcast();
-
-    // mURI.clear();
-
-    mIOResult = err;
-}
-
-void ChromiumHTTPDataSource::disconnect() {
-    Mutex::Autolock autoLock(mLock);
-    disconnect_l();
-}
-
-void ChromiumHTTPDataSource::disconnect_l() {
-    if (mState == DISCONNECTED) {
-        return;
-    }
-
-    mState = DISCONNECTING;
-    mIOResult = -EINTR;
-
-    mDelegate->initiateDisconnect();
-
-    while (mState == DISCONNECTING) {
-        mCondition.wait(mLock);
-    }
-
-    CHECK_EQ((int)mState, (int)DISCONNECTED);
-}
-
-status_t ChromiumHTTPDataSource::initCheck() const {
-    Mutex::Autolock autoLock(mLock);
-
-    return mState == CONNECTED ? OK : NO_INIT;
-}
-
-ssize_t ChromiumHTTPDataSource::readAt(off64_t offset, void *data, size_t size) {
-    Mutex::Autolock autoLock(mLock);
-
-    if (mState != CONNECTED) {
-        return INVALID_OPERATION;
-    }
-
-#if 0
-    char value[PROPERTY_VALUE_MAX];
-    if (property_get("media.stagefright.disable-net", value, 0)
-            && (!strcasecmp(value, "true") || !strcmp(value, "1"))) {
-        LOG_PRI(ANDROID_LOG_INFO, LOG_TAG, "Simulating that the network is down.");
-        disconnect_l();
-        return ERROR_IO;
-    }
-#endif
-
-    if (offset != mCurrentOffset) {
-        AString tmp = mURI;
-        KeyedVector<String8, String8> tmpHeaders = mHeaders;
-
-        disconnect_l();
-
-        status_t err = connect_l(tmp.c_str(), &tmpHeaders, offset);
-
-        if (err != OK) {
-            return err;
-        }
-    }
-
-    mState = READING;
-
-    int64_t startTimeUs = ALooper::GetNowUs();
-
-    mDelegate->initiateRead(data, size);
-
-    while (mState == READING) {
-        mCondition.wait(mLock);
-    }
-
-    if (mIOResult < OK) {
-        return mIOResult;
-    }
-
-    if (mState == CONNECTED) {
-        int64_t delayUs = ALooper::GetNowUs() - startTimeUs;
-
-        // The read operation was successful, mIOResult contains
-        // the number of bytes read.
-        addBandwidthMeasurement(mIOResult, delayUs);
-
-        mCurrentOffset += mIOResult;
-        return mIOResult;
-    }
-
-    return ERROR_IO;
-}
-
-void ChromiumHTTPDataSource::onReadCompleted(ssize_t size) {
-    Mutex::Autolock autoLock(mLock);
-
-    mIOResult = size;
-
-    if (mState == READING) {
-        mState = CONNECTED;
-        mCondition.broadcast();
-    }
-}
-
-status_t ChromiumHTTPDataSource::getSize(off64_t *size) {
-    Mutex::Autolock autoLock(mLock);
-
-    if (mContentSize < 0) {
-        return ERROR_UNSUPPORTED;
-    }
-
-    *size = mContentSize;
-
-    return OK;
-}
-
-uint32_t ChromiumHTTPDataSource::flags() {
-    return kWantsPrefetching | kIsHTTPBasedSource;
-}
-
-// static
-void ChromiumHTTPDataSource::InitiateRead(
-        ChromiumHTTPDataSource *me, void *data, size_t size) {
-    me->initiateRead(data, size);
-}
-
-void ChromiumHTTPDataSource::initiateRead(void *data, size_t size) {
-    mDelegate->initiateRead(data, size);
-}
-
-void ChromiumHTTPDataSource::onDisconnectComplete() {
-    Mutex::Autolock autoLock(mLock);
-    CHECK_EQ((int)mState, (int)DISCONNECTING);
-
-    mState = DISCONNECTED;
-    // mURI.clear();
-    mIOResult = -ENOTCONN;
-
-    mCondition.broadcast();
-}
-
-sp<DecryptHandle> ChromiumHTTPDataSource::DrmInitialization(const char* mime) {
-    Mutex::Autolock autoLock(mLock);
-
-    if (mDrmManagerClient == NULL) {
-        mDrmManagerClient = new DrmManagerClient();
-    }
-
-    if (mDrmManagerClient == NULL) {
-        return NULL;
-    }
-
-    if (mDecryptHandle == NULL) {
-        /* Note if redirect occurs, mUri is the redirect uri instead of the
-         * original one
-         */
-        mDecryptHandle = mDrmManagerClient->openDecryptSession(
-                String8(mURI.c_str()), mime);
-    }
-
-    if (mDecryptHandle == NULL) {
-        delete mDrmManagerClient;
-        mDrmManagerClient = NULL;
-    }
-
-    return mDecryptHandle;
-}
-
-void ChromiumHTTPDataSource::getDrmInfo(
-        sp<DecryptHandle> &handle, DrmManagerClient **client) {
-    Mutex::Autolock autoLock(mLock);
-
-    handle = mDecryptHandle;
-    *client = mDrmManagerClient;
-}
-
-String8 ChromiumHTTPDataSource::getUri() {
-    Mutex::Autolock autoLock(mLock);
-
-    return String8(mURI.c_str());
-}
-
-String8 ChromiumHTTPDataSource::getMIMEType() const {
-    Mutex::Autolock autoLock(mLock);
-
-    return mContentType;
-}
-
-void ChromiumHTTPDataSource::clearDRMState_l() {
-    if (mDecryptHandle != NULL) {
-        // To release mDecryptHandle
-        CHECK(mDrmManagerClient);
-        mDrmManagerClient->closeDecryptSession(mDecryptHandle);
-        mDecryptHandle = NULL;
-    }
-}
-
-status_t ChromiumHTTPDataSource::reconnectAtOffset(off64_t offset) {
-    Mutex::Autolock autoLock(mLock);
-
-    if (mURI.empty()) {
-        return INVALID_OPERATION;
-    }
-
-    LOG_PRI(ANDROID_LOG_INFO, LOG_TAG, "Reconnecting...");
-    status_t err = connect_l(mURI.c_str(), &mHeaders, offset);
-    if (err != OK) {
-        LOG_PRI(ANDROID_LOG_INFO, LOG_TAG, "Reconnect failed w/ err 0x%08x", err);
-    }
-
-    return err;
-}
-
-// static
-status_t ChromiumHTTPDataSource::UpdateProxyConfig(
-        const char *host, int32_t port, const char *exclusionList) {
-    return SfDelegate::UpdateProxyConfig(host, port, exclusionList);
-}
-
-}  // namespace android
-
diff --git a/media/libstagefright/chromium_http/DataUriSource.cpp b/media/libstagefright/chromium_http/DataUriSource.cpp
deleted file mode 100644
index ecf3fa1..0000000
--- a/media/libstagefright/chromium_http/DataUriSource.cpp
+++ /dev/null
@@ -1,68 +0,0 @@
-/*
- * Copyright (C) 2011 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include <include/DataUriSource.h>
-
-#include <net/base/data_url.h>
-#include <googleurl/src/gurl.h>
-
-
-namespace android {
-
-DataUriSource::DataUriSource(const char *uri) :
-    mDataUri(uri),
-    mInited(NO_INIT) {
-
-    // Copy1: const char *uri -> String8 mDataUri.
-    std::string mimeTypeStr, unusedCharsetStr, dataStr;
-    // Copy2: String8 mDataUri -> std::string
-    const bool ret = net::DataURL::Parse(
-            GURL(std::string(mDataUri.string())),
-            &mimeTypeStr, &unusedCharsetStr, &dataStr);
-    // Copy3: std::string dataStr -> AString mData
-    mData.setTo(dataStr.data(), dataStr.length());
-    mInited = ret ? OK : UNKNOWN_ERROR;
-
-    // The chromium data url implementation defaults to using "text/plain"
-    // if no mime type is specified. We prefer to leave this unspecified
-    // instead, since the mime type is sniffed in most cases.
-    if (mimeTypeStr != "text/plain") {
-        mMimeType = mimeTypeStr.c_str();
-    }
-}
-
-ssize_t DataUriSource::readAt(off64_t offset, void *out, size_t size) {
-    if (mInited != OK) {
-        return mInited;
-    }
-
-    const off64_t length = mData.size();
-    if (offset >= length) {
-        return UNKNOWN_ERROR;
-    }
-
-    const char *dataBuf = mData.c_str();
-    const size_t bytesToCopy =
-            offset + size >= length ? (length - offset) : size;
-
-    if (bytesToCopy > 0) {
-        memcpy(out, dataBuf + offset, bytesToCopy);
-    }
-
-    return bytesToCopy;
-}
-
-}  // namespace android
diff --git a/media/libstagefright/chromium_http/chromium_http_stub.cpp b/media/libstagefright/chromium_http/chromium_http_stub.cpp
deleted file mode 100644
index 289f6de..0000000
--- a/media/libstagefright/chromium_http/chromium_http_stub.cpp
+++ /dev/null
@@ -1,38 +0,0 @@
-/*
- * Copyright (C) 2012 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include <dlfcn.h>
-
-#include <include/chromium_http_stub.h>
-#include <include/ChromiumHTTPDataSource.h>
-#include <include/DataUriSource.h>
-
-namespace android {
-
-HTTPBase *createChromiumHTTPDataSource(uint32_t flags) {
-    return new ChromiumHTTPDataSource(flags);
-}
-
-status_t UpdateChromiumHTTPDataSourceProxyConfig(
-        const char *host, int32_t port, const char *exclusionList) {
-    return ChromiumHTTPDataSource::UpdateProxyConfig(host, port, exclusionList);
-}
-
-DataSource *createDataUriSource(const char *uri) {
-    return new DataUriSource(uri);
-}
-
-}
diff --git a/media/libstagefright/chromium_http/support.cpp b/media/libstagefright/chromium_http/support.cpp
deleted file mode 100644
index 3b33212..0000000
--- a/media/libstagefright/chromium_http/support.cpp
+++ /dev/null
@@ -1,559 +0,0 @@
-/*
- * Copyright (C) 2011 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-//#define LOG_NDEBUG 0
-#define LOG_TAG "ChromiumHTTPDataSourceSupport"
-#include <utils/Log.h>
-
-#include <media/stagefright/foundation/AString.h>
-
-#include "support.h"
-
-#include "android/net/android_network_library_impl.h"
-#include "base/logging.h"
-#include "base/threading/thread.h"
-#include "net/base/cert_verifier.h"
-#include "net/base/cookie_monster.h"
-#include "net/base/host_resolver.h"
-#include "net/base/ssl_config_service.h"
-#include "net/http/http_auth_handler_factory.h"
-#include "net/http/http_cache.h"
-#include "net/proxy/proxy_config_service_android.h"
-
-#include "include/ChromiumHTTPDataSource.h"
-
-#include <cutils/log.h>
-#include <media/stagefright/MediaErrors.h>
-#include <media/stagefright/Utils.h>
-#include <string>
-
-namespace android {
-
-static Mutex gNetworkThreadLock;
-static base::Thread *gNetworkThread = NULL;
-static scoped_refptr<SfRequestContext> gReqContext;
-static scoped_ptr<net::NetworkChangeNotifier> gNetworkChangeNotifier;
-
-bool logMessageHandler(
-        int severity,
-        const char* file,
-        int line,
-        size_t message_start,
-        const std::string& str) {
-    int androidSeverity = ANDROID_LOG_VERBOSE;
-    switch(severity) {
-    case logging::LOG_FATAL:
-        androidSeverity = ANDROID_LOG_FATAL;
-        break;
-    case logging::LOG_ERROR_REPORT:
-    case logging::LOG_ERROR:
-        androidSeverity = ANDROID_LOG_ERROR;
-        break;
-    case logging::LOG_WARNING:
-        androidSeverity = ANDROID_LOG_WARN;
-        break;
-    default:
-        androidSeverity = ANDROID_LOG_VERBOSE;
-        break;
-    }
-    android_printLog(androidSeverity, "chromium-libstagefright",
-                    "%s:%d: %s", file, line, str.c_str());
-    return false;
-}
-
-struct AutoPrioritySaver {
-    AutoPrioritySaver()
-        : mTID(androidGetTid()),
-          mPrevPriority(androidGetThreadPriority(mTID)) {
-        androidSetThreadPriority(mTID, ANDROID_PRIORITY_NORMAL);
-    }
-
-    ~AutoPrioritySaver() {
-        androidSetThreadPriority(mTID, mPrevPriority);
-    }
-
-private:
-    pid_t mTID;
-    int mPrevPriority;
-
-    DISALLOW_EVIL_CONSTRUCTORS(AutoPrioritySaver);
-};
-
-static void InitializeNetworkThreadIfNecessary() {
-    Mutex::Autolock autoLock(gNetworkThreadLock);
-
-    if (gNetworkThread == NULL) {
-        // Make sure any threads spawned by the chromium framework are
-        // running at normal priority instead of inheriting this thread's.
-        AutoPrioritySaver saver;
-
-        gNetworkThread = new base::Thread("network");
-        base::Thread::Options options;
-        options.message_loop_type = MessageLoop::TYPE_IO;
-        CHECK(gNetworkThread->StartWithOptions(options));
-
-        gReqContext = new SfRequestContext;
-
-        gNetworkChangeNotifier.reset(net::NetworkChangeNotifier::Create());
-
-        net::AndroidNetworkLibrary::RegisterSharedInstance(
-                new SfNetworkLibrary);
-        logging::SetLogMessageHandler(logMessageHandler);
-    }
-}
-
-static void MY_LOGI(const char *s) {
-    LOG_PRI(ANDROID_LOG_INFO, LOG_TAG, "%s", s);
-}
-
-static void MY_LOGV(const char *s) {
-#if !defined(LOG_NDEBUG) || LOG_NDEBUG == 0
-    LOG_PRI(ANDROID_LOG_VERBOSE, LOG_TAG, "%s", s);
-#endif
-}
-
-SfNetLog::SfNetLog()
-    : mNextID(1) {
-}
-
-void SfNetLog::AddEntry(
-        EventType type,
-        const base::TimeTicks &time,
-        const Source &source,
-        EventPhase phase,
-        EventParameters *params) {
-#if 0
-    MY_LOGI(StringPrintf(
-                "AddEntry time=%s type=%s source=%s phase=%s\n",
-                TickCountToString(time).c_str(),
-                EventTypeToString(type),
-                SourceTypeToString(source.type),
-                EventPhaseToString(phase)).c_str());
-#endif
-}
-
-uint32 SfNetLog::NextID() {
-    return mNextID++;
-}
-
-net::NetLog::LogLevel SfNetLog::GetLogLevel() const {
-    return LOG_BASIC;
-}
-
-////////////////////////////////////////////////////////////////////////////////
-
-SfRequestContext::SfRequestContext() {
-    mUserAgent = MakeUserAgent().c_str();
-
-    set_net_log(new SfNetLog());
-
-    set_host_resolver(
-        net::CreateSystemHostResolver(
-                net::HostResolver::kDefaultParallelism,
-                NULL /* resolver_proc */,
-                net_log()));
-
-    set_ssl_config_service(
-        net::SSLConfigService::CreateSystemSSLConfigService());
-
-    mProxyConfigService = new net::ProxyConfigServiceAndroid;
-
-    set_proxy_service(net::ProxyService::CreateWithoutProxyResolver(
-        mProxyConfigService, net_log()));
-
-    set_http_transaction_factory(new net::HttpCache(
-            host_resolver(),
-            new net::CertVerifier(),
-            dnsrr_resolver(),
-            dns_cert_checker(),
-            proxy_service(),
-            ssl_config_service(),
-            net::HttpAuthHandlerFactory::CreateDefault(host_resolver()),
-            network_delegate(),
-            net_log(),
-            NULL));  // backend_factory
-
-    set_cookie_store(new net::CookieMonster(NULL, NULL));
-}
-
-const std::string &SfRequestContext::GetUserAgent(const GURL &url) const {
-    return mUserAgent;
-}
-
-status_t SfRequestContext::updateProxyConfig(
-        const char *host, int32_t port, const char *exclusionList) {
-    Mutex::Autolock autoLock(mProxyConfigLock);
-
-    if (host == NULL || *host == '\0') {
-        MY_LOGV("updateProxyConfig NULL");
-
-        std::string proxy;
-        std::string exList;
-        mProxyConfigService->UpdateProxySettings(proxy, exList);
-    } else {
-#if !defined(LOG_NDEBUG) || LOG_NDEBUG == 0
-        LOG_PRI(ANDROID_LOG_VERBOSE, LOG_TAG,
-                "updateProxyConfig %s:%d, exclude '%s'",
-                host, port, exclusionList);
-#endif
-
-        std::string proxy = StringPrintf("%s:%d", host, port).c_str();
-        std::string exList = exclusionList;
-        mProxyConfigService->UpdateProxySettings(proxy, exList);
-    }
-
-    return OK;
-}
-
-////////////////////////////////////////////////////////////////////////////////
-
-SfNetworkLibrary::SfNetworkLibrary() {}
-
-SfNetworkLibrary::VerifyResult SfNetworkLibrary::VerifyX509CertChain(
-        const std::vector<std::string>& cert_chain,
-        const std::string& hostname,
-        const std::string& auth_type) {
-    return VERIFY_OK;
-}
-
-////////////////////////////////////////////////////////////////////////////////
-
-SfDelegate::SfDelegate()
-    : mOwner(NULL),
-      mURLRequest(NULL),
-      mReadBuffer(new net::IOBufferWithSize(8192)),
-      mNumBytesRead(0),
-      mNumBytesTotal(0),
-      mDataDestination(NULL),
-      mAtEOS(false) {
-    InitializeNetworkThreadIfNecessary();
-}
-
-SfDelegate::~SfDelegate() {
-    CHECK(mURLRequest == NULL);
-}
-
-// static
-status_t SfDelegate::UpdateProxyConfig(
-        const char *host, int32_t port, const char *exclusionList) {
-    InitializeNetworkThreadIfNecessary();
-
-    return gReqContext->updateProxyConfig(host, port, exclusionList);
-}
-
-void SfDelegate::setOwner(ChromiumHTTPDataSource *owner) {
-    mOwner = owner;
-}
-
-void SfDelegate::setUID(uid_t uid) {
-    gReqContext->setUID(uid);
-}
-
-bool SfDelegate::getUID(uid_t *uid) const {
-    return gReqContext->getUID(uid);
-}
-
-void SfDelegate::OnReceivedRedirect(
-            net::URLRequest *request, const GURL &new_url, bool *defer_redirect) {
-    MY_LOGV("OnReceivedRedirect");
-    mOwner->onRedirect(new_url.spec().c_str());
-}
-
-void SfDelegate::OnAuthRequired(
-            net::URLRequest *request, net::AuthChallengeInfo *auth_info) {
-    MY_LOGV("OnAuthRequired");
-
-    inherited::OnAuthRequired(request, auth_info);
-}
-
-void SfDelegate::OnCertificateRequested(
-            net::URLRequest *request, net::SSLCertRequestInfo *cert_request_info) {
-    MY_LOGV("OnCertificateRequested");
-
-    inherited::OnCertificateRequested(request, cert_request_info);
-}
-
-void SfDelegate::OnSSLCertificateError(
-            net::URLRequest *request, int cert_error, net::X509Certificate *cert) {
-    fprintf(stderr, "OnSSLCertificateError cert_error=%d\n", cert_error);
-
-    inherited::OnSSLCertificateError(request, cert_error, cert);
-}
-
-void SfDelegate::OnGetCookies(net::URLRequest *request, bool blocked_by_policy) {
-    MY_LOGV("OnGetCookies");
-}
-
-void SfDelegate::OnSetCookie(
-        net::URLRequest *request,
-        const std::string &cookie_line,
-        const net::CookieOptions &options,
-        bool blocked_by_policy) {
-    MY_LOGV("OnSetCookie");
-}
-
-void SfDelegate::OnResponseStarted(net::URLRequest *request) {
-    if (request->status().status() != net::URLRequestStatus::SUCCESS) {
-        MY_LOGI(StringPrintf(
-                    "Request failed with status %d and os_error %d",
-                    request->status().status(),
-                    request->status().os_error()).c_str());
-
-        delete mURLRequest;
-        mURLRequest = NULL;
-
-        mOwner->onConnectionFailed(ERROR_IO);
-        return;
-    } else if (mRangeRequested && request->GetResponseCode() != 206) {
-        MY_LOGI(StringPrintf(
-                    "We requested a content range, but server didn't "
-                    "support that. (responded with %d)",
-                    request->GetResponseCode()).c_str());
-
-        delete mURLRequest;
-        mURLRequest = NULL;
-
-        mOwner->onConnectionFailed(-EPIPE);
-        return;
-    } else if ((request->GetResponseCode() / 100) != 2) {
-        MY_LOGI(StringPrintf(
-                    "Server responded with http status %d",
-                    request->GetResponseCode()).c_str());
-
-        delete mURLRequest;
-        mURLRequest = NULL;
-
-        mOwner->onConnectionFailed(ERROR_IO);
-        return;
-    }
-
-    MY_LOGV("OnResponseStarted");
-
-    std::string headers;
-    request->GetAllResponseHeaders(&headers);
-
-    MY_LOGV(StringPrintf("response headers: %s", headers.c_str()).c_str());
-
-    std::string contentType;
-    request->GetResponseHeaderByName("Content-Type", &contentType);
-
-    mOwner->onConnectionEstablished(
-            request->GetExpectedContentSize(), contentType.c_str());
-}
-
-void SfDelegate::OnReadCompleted(net::URLRequest *request, int bytes_read) {
-    if (bytes_read == -1) {
-        MY_LOGI(StringPrintf(
-                    "OnReadCompleted, read failed, status %d",
-                    request->status().status()).c_str());
-
-        mOwner->onReadCompleted(ERROR_IO);
-        return;
-    }
-
-    MY_LOGV(StringPrintf("OnReadCompleted, read %d bytes", bytes_read).c_str());
-
-    if (bytes_read < 0) {
-        MY_LOGI(StringPrintf(
-                    "Read failed w/ status %d\n",
-                    request->status().status()).c_str());
-
-        mOwner->onReadCompleted(ERROR_IO);
-        return;
-    } else if (bytes_read == 0) {
-        mAtEOS = true;
-        mOwner->onReadCompleted(mNumBytesRead);
-        return;
-    }
-
-    CHECK_GT(bytes_read, 0);
-    CHECK_LE(mNumBytesRead + bytes_read, mNumBytesTotal);
-
-    memcpy((uint8_t *)mDataDestination + mNumBytesRead,
-           mReadBuffer->data(),
-           bytes_read);
-
-    mNumBytesRead += bytes_read;
-
-    readMore(request);
-}
-
-void SfDelegate::readMore(net::URLRequest *request) {
-    while (mNumBytesRead < mNumBytesTotal) {
-        size_t copy = mNumBytesTotal - mNumBytesRead;
-        if (copy > mReadBuffer->size()) {
-            copy = mReadBuffer->size();
-        }
-
-        int n;
-        if (request->Read(mReadBuffer, copy, &n)) {
-            MY_LOGV(StringPrintf("Read %d bytes directly.", n).c_str());
-
-            CHECK_LE((size_t)n, copy);
-
-            memcpy((uint8_t *)mDataDestination + mNumBytesRead,
-                   mReadBuffer->data(),
-                   n);
-
-            mNumBytesRead += n;
-
-            if (n == 0) {
-                mAtEOS = true;
-                break;
-            }
-        } else {
-            MY_LOGV("readMore pending read");
-
-            if (request->status().status() != net::URLRequestStatus::IO_PENDING) {
-                MY_LOGI(StringPrintf(
-                            "Direct read failed w/ status %d\n",
-                            request->status().status()).c_str());
-
-                mOwner->onReadCompleted(ERROR_IO);
-                return;
-            }
-
-            return;
-        }
-    }
-
-    mOwner->onReadCompleted(mNumBytesRead);
-}
-
-void SfDelegate::initiateConnection(
-        const char *uri,
-        const KeyedVector<String8, String8> *headers,
-        off64_t offset) {
-    GURL url(uri);
-
-    MessageLoop *loop = gNetworkThread->message_loop();
-    loop->PostTask(
-            FROM_HERE,
-            NewRunnableFunction(
-                &SfDelegate::OnInitiateConnectionWrapper,
-                this,
-                url,
-                headers,
-                offset));
-
-}
-
-// static
-void SfDelegate::OnInitiateConnectionWrapper(
-        SfDelegate *me, GURL url,
-        const KeyedVector<String8, String8> *headers,
-        off64_t offset) {
-    me->onInitiateConnection(url, headers, offset);
-}
-
-void SfDelegate::onInitiateConnection(
-        const GURL &url,
-        const KeyedVector<String8, String8> *extra,
-        off64_t offset) {
-    CHECK(mURLRequest == NULL);
-
-    mURLRequest = new net::URLRequest(url, this);
-    mAtEOS = false;
-
-    mRangeRequested = false;
-
-    if (offset != 0 || extra != NULL) {
-        net::HttpRequestHeaders headers =
-            mURLRequest->extra_request_headers();
-
-        if (offset != 0) {
-            headers.AddHeaderFromString(
-                    StringPrintf("Range: bytes=%lld-", offset).c_str());
-
-            mRangeRequested = true;
-        }
-
-        if (extra != NULL) {
-            for (size_t i = 0; i < extra->size(); ++i) {
-                AString s;
-                s.append(extra->keyAt(i).string());
-                s.append(": ");
-                s.append(extra->valueAt(i).string());
-
-                headers.AddHeaderFromString(s.c_str());
-            }
-        }
-
-        mURLRequest->SetExtraRequestHeaders(headers);
-    }
-
-    mURLRequest->set_context(gReqContext);
-
-    mURLRequest->Start();
-}
-
-void SfDelegate::initiateDisconnect() {
-    MessageLoop *loop = gNetworkThread->message_loop();
-    loop->PostTask(
-            FROM_HERE,
-            NewRunnableFunction(
-                &SfDelegate::OnInitiateDisconnectWrapper, this));
-}
-
-// static
-void SfDelegate::OnInitiateDisconnectWrapper(SfDelegate *me) {
-    me->onInitiateDisconnect();
-}
-
-void SfDelegate::onInitiateDisconnect() {
-    if (mURLRequest == NULL) {
-        return;
-    }
-
-    mURLRequest->Cancel();
-
-    delete mURLRequest;
-    mURLRequest = NULL;
-
-    mOwner->onDisconnectComplete();
-}
-
-void SfDelegate::initiateRead(void *data, size_t size) {
-    MessageLoop *loop = gNetworkThread->message_loop();
-    loop->PostTask(
-            FROM_HERE,
-            NewRunnableFunction(
-                &SfDelegate::OnInitiateReadWrapper, this, data, size));
-}
-
-// static
-void SfDelegate::OnInitiateReadWrapper(
-        SfDelegate *me, void *data, size_t size) {
-    me->onInitiateRead(data, size);
-}
-
-void SfDelegate::onInitiateRead(void *data, size_t size) {
-    CHECK(mURLRequest != NULL);
-
-    mNumBytesRead = 0;
-    mNumBytesTotal = size;
-    mDataDestination = data;
-
-    if (mAtEOS) {
-        mOwner->onReadCompleted(0);
-        return;
-    }
-
-    readMore(mURLRequest);
-}
-
-}  // namespace android
-
diff --git a/media/libstagefright/chromium_http/support.h b/media/libstagefright/chromium_http/support.h
deleted file mode 100644
index 975a1d3..0000000
--- a/media/libstagefright/chromium_http/support.h
+++ /dev/null
@@ -1,178 +0,0 @@
-/*
- * Copyright (C) 2011 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef SUPPORT_H_
-
-#define SUPPORT_H_
-
-#include <assert.h>
-
-#include "net/base/net_log.h"
-#include "net/url_request/url_request.h"
-#include "net/url_request/url_request_context.h"
-#include "net/base/android_network_library.h"
-#include "net/base/io_buffer.h"
-
-#include <utils/KeyedVector.h>
-#include <utils/Mutex.h>
-#include <utils/String8.h>
-
-namespace net {
-    struct ProxyConfigServiceAndroid;
-};
-
-namespace android {
-
-struct SfNetLog : public net::NetLog {
-    SfNetLog();
-
-    virtual void AddEntry(
-            EventType type,
-            const base::TimeTicks &time,
-            const Source &source,
-            EventPhase phase,
-            EventParameters *params);
-
-    virtual uint32 NextID();
-    virtual LogLevel GetLogLevel() const;
-
-private:
-    uint32 mNextID;
-
-    DISALLOW_EVIL_CONSTRUCTORS(SfNetLog);
-};
-
-struct SfRequestContext : public net::URLRequestContext {
-    SfRequestContext();
-
-    virtual const std::string &GetUserAgent(const GURL &url) const;
-
-    status_t updateProxyConfig(
-            const char *host, int32_t port, const char *exclusionList);
-
-private:
-    Mutex mProxyConfigLock;
-
-    std::string mUserAgent;
-    net::ProxyConfigServiceAndroid *mProxyConfigService;
-
-    DISALLOW_EVIL_CONSTRUCTORS(SfRequestContext);
-};
-
-// This is required for https support, we don't really verify certificates,
-// we accept anything...
-struct SfNetworkLibrary : public net::AndroidNetworkLibrary {
-    SfNetworkLibrary();
-
-    virtual VerifyResult VerifyX509CertChain(
-            const std::vector<std::string>& cert_chain,
-            const std::string& hostname,
-            const std::string& auth_type);
-
-private:
-    DISALLOW_EVIL_CONSTRUCTORS(SfNetworkLibrary);
-};
-
-struct ChromiumHTTPDataSource;
-
-struct SfDelegate : public net::URLRequest::Delegate {
-    SfDelegate();
-    virtual ~SfDelegate();
-
-    void initiateConnection(
-            const char *uri,
-            const KeyedVector<String8, String8> *headers,
-            off64_t offset);
-
-    void initiateDisconnect();
-    void initiateRead(void *data, size_t size);
-
-    void setOwner(ChromiumHTTPDataSource *mOwner);
-
-    // Gets the UID of the calling process
-    bool getUID(uid_t *uid) const;
-
-    void setUID(uid_t uid);
-
-    virtual void OnReceivedRedirect(
-            net::URLRequest *request, const GURL &new_url, bool *defer_redirect);
-
-    virtual void OnAuthRequired(
-            net::URLRequest *request, net::AuthChallengeInfo *auth_info);
-
-    virtual void OnCertificateRequested(
-            net::URLRequest *request, net::SSLCertRequestInfo *cert_request_info);
-
-    virtual void OnSSLCertificateError(
-            net::URLRequest *request, int cert_error, net::X509Certificate *cert);
-
-    virtual void OnGetCookies(net::URLRequest *request, bool blocked_by_policy);
-
-    virtual void OnSetCookie(
-            net::URLRequest *request,
-            const std::string &cookie_line,
-            const net::CookieOptions &options,
-            bool blocked_by_policy);
-
-    virtual void OnResponseStarted(net::URLRequest *request);
-
-    virtual void OnReadCompleted(net::URLRequest *request, int bytes_read);
-
-    static status_t UpdateProxyConfig(
-            const char *host, int32_t port, const char *exclusionList);
-
-private:
-    typedef Delegate inherited;
-
-    ChromiumHTTPDataSource *mOwner;
-
-    net::URLRequest *mURLRequest;
-    scoped_refptr<net::IOBufferWithSize> mReadBuffer;
-
-    size_t mNumBytesRead;
-    size_t mNumBytesTotal;
-    void *mDataDestination;
-
-    bool mRangeRequested;
-    bool mAtEOS;
-
-    void readMore(net::URLRequest *request);
-
-    static void OnInitiateConnectionWrapper(
-            SfDelegate *me,
-            GURL url,
-            const KeyedVector<String8, String8> *headers,
-            off64_t offset);
-
-    static void OnInitiateDisconnectWrapper(SfDelegate *me);
-
-    static void OnInitiateReadWrapper(
-            SfDelegate *me, void *data, size_t size);
-
-    void onInitiateConnection(
-            const GURL &url,
-            const KeyedVector<String8, String8> *headers,
-            off64_t offset);
-
-    void onInitiateDisconnect();
-    void onInitiateRead(void *data, size_t size);
-
-    DISALLOW_EVIL_CONSTRUCTORS(SfDelegate);
-};
-
-}  // namespace android
-
-#endif  // SUPPORT_H_
diff --git a/media/libstagefright/chromium_http_stub.cpp b/media/libstagefright/chromium_http_stub.cpp
deleted file mode 100644
index ed8a878..0000000
--- a/media/libstagefright/chromium_http_stub.cpp
+++ /dev/null
@@ -1,102 +0,0 @@
-/*
- * Copyright (C) 2012 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include <dlfcn.h>
-
-#include <media/stagefright/DataSource.h>
-
-#include "include/chromium_http_stub.h"
-#include "include/HTTPBase.h"
-
-namespace android {
-
-static bool gFirst = true;
-static void *gHandle;
-static Mutex gLibMutex;
-
-HTTPBase *(*gLib_createChromiumHTTPDataSource)(uint32_t flags);
-DataSource *(*gLib_createDataUriSource)(const char *uri);
-
-status_t (*gLib_UpdateChromiumHTTPDataSourceProxyConfig)(
-        const char *host, int32_t port, const char *exclusionList);
-
-static bool load_libstagefright_chromium_http() {
-    Mutex::Autolock autoLock(gLibMutex);
-    void *sym;
-
-    if (!gFirst) {
-        return (gHandle != NULL);
-    }
-
-    gFirst = false;
-
-    gHandle = dlopen("libstagefright_chromium_http.so", RTLD_NOW);
-    if (gHandle == NULL) {
-        return false;
-    }
-
-    sym = dlsym(gHandle, "createChromiumHTTPDataSource");
-    if (sym == NULL) {
-        gHandle = NULL;
-        return false;
-    }
-    gLib_createChromiumHTTPDataSource = (HTTPBase *(*)(uint32_t))sym;
-
-    sym = dlsym(gHandle, "createDataUriSource");
-    if (sym == NULL) {
-        gHandle = NULL;
-        return false;
-    }
-    gLib_createDataUriSource = (DataSource *(*)(const char *))sym;
-
-    sym = dlsym(gHandle, "UpdateChromiumHTTPDataSourceProxyConfig");
-    if (sym == NULL) {
-        gHandle = NULL;
-        return false;
-    }
-    gLib_UpdateChromiumHTTPDataSourceProxyConfig =
-        (status_t (*)(const char *, int32_t, const char *))sym;
-
-    return true;
-}
-
-HTTPBase *createChromiumHTTPDataSource(uint32_t flags) {
-    if (!load_libstagefright_chromium_http()) {
-        return NULL;
-    }
-
-    return gLib_createChromiumHTTPDataSource(flags);
-}
-
-status_t UpdateChromiumHTTPDataSourceProxyConfig(
-        const char *host, int32_t port, const char *exclusionList) {
-    if (!load_libstagefright_chromium_http()) {
-        return INVALID_OPERATION;
-    }
-
-    return gLib_UpdateChromiumHTTPDataSourceProxyConfig(
-            host, port, exclusionList);
-}
-
-DataSource *createDataUriSource(const char *uri) {
-    if (!load_libstagefright_chromium_http()) {
-        return NULL;
-    }
-
-    return gLib_createDataUriSource(uri);
-}
-
-}
diff --git a/media/libstagefright/codecs/aacdec/SoftAAC2.cpp b/media/libstagefright/codecs/aacdec/SoftAAC2.cpp
index 1b20cbb..4ac8999 100644
--- a/media/libstagefright/codecs/aacdec/SoftAAC2.cpp
+++ b/media/libstagefright/codecs/aacdec/SoftAAC2.cpp
@@ -30,7 +30,7 @@
 #define DRC_DEFAULT_MOBILE_REF_LEVEL 64  /* 64*-0.25dB = -16 dB below full scale for mobile conf */
 #define DRC_DEFAULT_MOBILE_DRC_CUT   127 /* maximum compression of dynamic range for mobile conf */
 #define DRC_DEFAULT_MOBILE_DRC_BOOST 127 /* maximum compression of dynamic range for mobile conf */
-#define MAX_CHANNEL_COUNT            6  /* maximum number of audio channels that can be decoded */
+#define MAX_CHANNEL_COUNT            8  /* maximum number of audio channels that can be decoded */
 // names of properties that can be used to override the default DRC settings
 #define PROP_DRC_OVERRIDE_REF_LEVEL  "aac_drc_reference_level"
 #define PROP_DRC_OVERRIDE_CUT        "aac_drc_cut"
@@ -58,6 +58,8 @@
       mIsADTS(false),
       mInputBufferCount(0),
       mSignalledError(false),
+      mSawInputEos(false),
+      mSignalledOutputEos(false),
       mAnchorTimeUs(0),
       mNumSamplesOutput(0),
       mOutputPortSettingsChange(NONE) {
@@ -294,8 +296,11 @@
         if (!(property_get("media.aac_51_output_enabled", value, NULL) &&
                 (!strcmp(value, "1") || !strcasecmp(value, "true")))) {
             ALOGI("Downmixing multichannel AAC to stereo");
-            aacDecoder_SetParam(mAACDecoder, AAC_PCM_OUTPUT_CHANNELS, 2);
+            aacDecoder_SetParam(mAACDecoder, AAC_PCM_MAX_OUTPUT_CHANNELS, 2);
             mStreamInfo->numChannels = 2;
+            // By default, the decoder creates a 5.1 channel downmix signal
+            // for seven and eight channel input streams. To enable 6.1 and 7.1 channel output
+            // use aacDecoder_SetParam(mAACDecoder, AAC_PCM_MAX_OUTPUT_CHANNELS, -1)
         }
     }
 }
@@ -350,115 +355,83 @@
         return;
     }
 
-    while (!inQueue.empty() && !outQueue.empty()) {
-        BufferInfo *inInfo = *inQueue.begin();
-        OMX_BUFFERHEADERTYPE *inHeader = inInfo->mHeader;
+    while ((!inQueue.empty() || (mSawInputEos && !mSignalledOutputEos)) && !outQueue.empty()) {
+        BufferInfo *inInfo = NULL;
+        OMX_BUFFERHEADERTYPE *inHeader = NULL;
+        if (!inQueue.empty()) {
+            inInfo = *inQueue.begin();
+            inHeader = inInfo->mHeader;
+        }
 
         BufferInfo *outInfo = *outQueue.begin();
         OMX_BUFFERHEADERTYPE *outHeader = outInfo->mHeader;
+        outHeader->nFlags = 0;
 
-        if (inHeader->nFlags & OMX_BUFFERFLAG_EOS) {
-            inQueue.erase(inQueue.begin());
-            inInfo->mOwnedByUs = false;
-            notifyEmptyBufferDone(inHeader);
+        if (inHeader) {
+            if (inHeader->nFlags & OMX_BUFFERFLAG_EOS) {
+                mSawInputEos = true;
+            }
 
-            if (mDecoderHasData) {
-                // flush out the decoder's delayed data by calling DecodeFrame
-                // one more time, with the AACDEC_FLUSH flag set
-                INT_PCM *outBuffer =
-                        reinterpret_cast<INT_PCM *>(
-                                outHeader->pBuffer + outHeader->nOffset);
+            if (inHeader->nOffset == 0 && inHeader->nFilledLen) {
+                mAnchorTimeUs = inHeader->nTimeStamp;
+                mNumSamplesOutput = 0;
+            }
 
-                AAC_DECODER_ERROR decoderErr =
-                    aacDecoder_DecodeFrame(mAACDecoder,
-                                           outBuffer,
-                                           outHeader->nAllocLen,
-                                           AACDEC_FLUSH);
-                mDecoderHasData = false;
+            if (mIsADTS && inHeader->nFilledLen) {
+                size_t adtsHeaderSize = 0;
+                // skip 30 bits, aac_frame_length follows.
+                // ssssssss ssssiiip ppffffPc ccohCCll llllllll lll?????
 
-                if (decoderErr != AAC_DEC_OK) {
+                const uint8_t *adtsHeader = inHeader->pBuffer + inHeader->nOffset;
+
+                bool signalError = false;
+                if (inHeader->nFilledLen < 7) {
+                    ALOGE("Audio data too short to contain even the ADTS header. "
+                          "Got %ld bytes.", inHeader->nFilledLen);
+                    hexdump(adtsHeader, inHeader->nFilledLen);
+                    signalError = true;
+                } else {
+                    bool protectionAbsent = (adtsHeader[1] & 1);
+
+                    unsigned aac_frame_length =
+                        ((adtsHeader[3] & 3) << 11)
+                        | (adtsHeader[4] << 3)
+                        | (adtsHeader[5] >> 5);
+
+                    if (inHeader->nFilledLen < aac_frame_length) {
+                        ALOGE("Not enough audio data for the complete frame. "
+                              "Got %ld bytes, frame size according to the ADTS "
+                              "header is %u bytes.",
+                              inHeader->nFilledLen, aac_frame_length);
+                        hexdump(adtsHeader, inHeader->nFilledLen);
+                        signalError = true;
+                    } else {
+                        adtsHeaderSize = (protectionAbsent ? 7 : 9);
+
+                        inBuffer[0] = (UCHAR *)adtsHeader + adtsHeaderSize;
+                        inBufferLength[0] = aac_frame_length - adtsHeaderSize;
+
+                        inHeader->nOffset += adtsHeaderSize;
+                        inHeader->nFilledLen -= adtsHeaderSize;
+                    }
+                }
+
+                if (signalError) {
                     mSignalledError = true;
 
-                    notify(OMX_EventError, OMX_ErrorUndefined, decoderErr,
+                    notify(OMX_EventError,
+                           OMX_ErrorStreamCorrupt,
+                           ERROR_MALFORMED,
                            NULL);
 
                     return;
                 }
-
-                outHeader->nFilledLen =
-                        mStreamInfo->frameSize
-                            * sizeof(int16_t)
-                            * mStreamInfo->numChannels;
             } else {
-                // we never submitted any data to the decoder, so there's nothing to flush out
-                outHeader->nFilledLen = 0;
-            }
-
-            outHeader->nFlags = OMX_BUFFERFLAG_EOS;
-
-            outQueue.erase(outQueue.begin());
-            outInfo->mOwnedByUs = false;
-            notifyFillBufferDone(outHeader);
-            return;
-        }
-
-        if (inHeader->nOffset == 0) {
-            mAnchorTimeUs = inHeader->nTimeStamp;
-            mNumSamplesOutput = 0;
-        }
-
-        size_t adtsHeaderSize = 0;
-        if (mIsADTS) {
-            // skip 30 bits, aac_frame_length follows.
-            // ssssssss ssssiiip ppffffPc ccohCCll llllllll lll?????
-
-            const uint8_t *adtsHeader = inHeader->pBuffer + inHeader->nOffset;
-
-            bool signalError = false;
-            if (inHeader->nFilledLen < 7) {
-                ALOGE("Audio data too short to contain even the ADTS header. "
-                      "Got %ld bytes.", inHeader->nFilledLen);
-                hexdump(adtsHeader, inHeader->nFilledLen);
-                signalError = true;
-            } else {
-                bool protectionAbsent = (adtsHeader[1] & 1);
-
-                unsigned aac_frame_length =
-                    ((adtsHeader[3] & 3) << 11)
-                    | (adtsHeader[4] << 3)
-                    | (adtsHeader[5] >> 5);
-
-                if (inHeader->nFilledLen < aac_frame_length) {
-                    ALOGE("Not enough audio data for the complete frame. "
-                          "Got %ld bytes, frame size according to the ADTS "
-                          "header is %u bytes.",
-                          inHeader->nFilledLen, aac_frame_length);
-                    hexdump(adtsHeader, inHeader->nFilledLen);
-                    signalError = true;
-                } else {
-                    adtsHeaderSize = (protectionAbsent ? 7 : 9);
-
-                    inBuffer[0] = (UCHAR *)adtsHeader + adtsHeaderSize;
-                    inBufferLength[0] = aac_frame_length - adtsHeaderSize;
-
-                    inHeader->nOffset += adtsHeaderSize;
-                    inHeader->nFilledLen -= adtsHeaderSize;
-                }
-            }
-
-            if (signalError) {
-                mSignalledError = true;
-
-                notify(OMX_EventError,
-                       OMX_ErrorStreamCorrupt,
-                       ERROR_MALFORMED,
-                       NULL);
-
-                return;
+                inBuffer[0] = inHeader->pBuffer + inHeader->nOffset;
+                inBufferLength[0] = inHeader->nFilledLen;
             }
         } else {
-            inBuffer[0] = inHeader->pBuffer + inHeader->nOffset;
-            inBufferLength[0] = inHeader->nFilledLen;
+            inBufferLength[0] = 0;
         }
 
         // Fill and decode
@@ -471,50 +444,66 @@
         int prevNumChannels = mStreamInfo->numChannels;
 
         AAC_DECODER_ERROR decoderErr = AAC_DEC_NOT_ENOUGH_BITS;
-        while (bytesValid[0] > 0 && decoderErr == AAC_DEC_NOT_ENOUGH_BITS) {
+        while ((bytesValid[0] > 0 || mSawInputEos) && decoderErr == AAC_DEC_NOT_ENOUGH_BITS) {
+            mDecoderHasData |= (bytesValid[0] > 0);
             aacDecoder_Fill(mAACDecoder,
                             inBuffer,
                             inBufferLength,
                             bytesValid);
-            mDecoderHasData = true;
 
             decoderErr = aacDecoder_DecodeFrame(mAACDecoder,
                                                 outBuffer,
                                                 outHeader->nAllocLen,
                                                 0 /* flags */);
-
             if (decoderErr == AAC_DEC_NOT_ENOUGH_BITS) {
-                ALOGW("Not enough bits, bytesValid %d", bytesValid[0]);
+                if (mSawInputEos && bytesValid[0] <= 0) {
+                    if (mDecoderHasData) {
+                        // flush out the decoder's delayed data by calling DecodeFrame
+                        // one more time, with the AACDEC_FLUSH flag set
+                        decoderErr = aacDecoder_DecodeFrame(mAACDecoder,
+                                                            outBuffer,
+                                                            outHeader->nAllocLen,
+                                                            AACDEC_FLUSH);
+                        mDecoderHasData = false;
+                    }
+                    outHeader->nFlags = OMX_BUFFERFLAG_EOS;
+                    mSignalledOutputEos = true;
+                    break;
+                } else {
+                    ALOGW("Not enough bits, bytesValid %d", bytesValid[0]);
+                }
             }
         }
 
         size_t numOutBytes =
             mStreamInfo->frameSize * sizeof(int16_t) * mStreamInfo->numChannels;
 
-        if (decoderErr == AAC_DEC_OK) {
-            UINT inBufferUsedLength = inBufferLength[0] - bytesValid[0];
-            inHeader->nFilledLen -= inBufferUsedLength;
-            inHeader->nOffset += inBufferUsedLength;
-        } else {
-            ALOGW("AAC decoder returned error %d, substituting silence",
-                  decoderErr);
+        if (inHeader) {
+            if (decoderErr == AAC_DEC_OK) {
+                UINT inBufferUsedLength = inBufferLength[0] - bytesValid[0];
+                inHeader->nFilledLen -= inBufferUsedLength;
+                inHeader->nOffset += inBufferUsedLength;
+            } else {
+                ALOGW("AAC decoder returned error %d, substituting silence",
+                      decoderErr);
 
-            memset(outHeader->pBuffer + outHeader->nOffset, 0, numOutBytes);
+                memset(outHeader->pBuffer + outHeader->nOffset, 0, numOutBytes);
 
-            // Discard input buffer.
-            inHeader->nFilledLen = 0;
+                // Discard input buffer.
+                inHeader->nFilledLen = 0;
 
-            aacDecoder_SetParam(mAACDecoder, AAC_TPDEC_CLEAR_BUFFER, 1);
+                aacDecoder_SetParam(mAACDecoder, AAC_TPDEC_CLEAR_BUFFER, 1);
 
-            // fall through
-        }
+                // fall through
+            }
 
-        if (inHeader->nFilledLen == 0) {
-            inInfo->mOwnedByUs = false;
-            inQueue.erase(inQueue.begin());
-            inInfo = NULL;
-            notifyEmptyBufferDone(inHeader);
-            inHeader = NULL;
+            if (inHeader->nFilledLen == 0) {
+                inInfo->mOwnedByUs = false;
+                inQueue.erase(inQueue.begin());
+                inInfo = NULL;
+                notifyEmptyBufferDone(inHeader);
+                inHeader = NULL;
+            }
         }
 
         /*
@@ -555,7 +544,6 @@
             // we've previously decoded valid data, in the latter case
             // (decode failed) we'll output a silent frame.
             outHeader->nFilledLen = numOutBytes;
-            outHeader->nFlags = 0;
 
             outHeader->nTimeStamp =
                 mAnchorTimeUs
@@ -582,6 +570,12 @@
         // depend on fragments from the last one decoded.
         // drain all existing data
         drainDecoder();
+        // force decoder loop to drop the first decoded buffer by resetting these state variables,
+        // but only if initialization has already happened.
+        if (mInputBufferCount != 0) {
+            mInputBufferCount = 1;
+            mStreamInfo->sampleRate = 0;
+        }
     }
 }
 
@@ -606,6 +600,8 @@
     mStreamInfo->sampleRate = 0;
 
     mSignalledError = false;
+    mSawInputEos = false;
+    mSignalledOutputEos = false;
     mOutputPortSettingsChange = NONE;
 }
 
diff --git a/media/libstagefright/codecs/aacdec/SoftAAC2.h b/media/libstagefright/codecs/aacdec/SoftAAC2.h
index 2d960ab..a7ea1e2 100644
--- a/media/libstagefright/codecs/aacdec/SoftAAC2.h
+++ b/media/libstagefright/codecs/aacdec/SoftAAC2.h
@@ -55,6 +55,8 @@
     bool mDecoderHasData;
     size_t mInputBufferCount;
     bool mSignalledError;
+    bool mSawInputEos;
+    bool mSignalledOutputEos;
     int64_t mAnchorTimeUs;
     int64_t mNumSamplesOutput;
 
diff --git a/media/libstagefright/codecs/mp3dec/SoftMP3.cpp b/media/libstagefright/codecs/mp3dec/SoftMP3.cpp
index 7c382fb..61df65e 100644
--- a/media/libstagefright/codecs/mp3dec/SoftMP3.cpp
+++ b/media/libstagefright/codecs/mp3dec/SoftMP3.cpp
@@ -49,6 +49,8 @@
       mNumChannels(2),
       mSamplingRate(44100),
       mSignalledError(false),
+      mSawInputEos(false),
+      mSignalledOutputEos(false),
       mOutputPortSettingsChange(NONE) {
     initPorts();
     initDecoder();
@@ -194,48 +196,36 @@
     List<BufferInfo *> &inQueue = getPortQueue(0);
     List<BufferInfo *> &outQueue = getPortQueue(1);
 
-    while (!inQueue.empty() && !outQueue.empty()) {
-        BufferInfo *inInfo = *inQueue.begin();
-        OMX_BUFFERHEADERTYPE *inHeader = inInfo->mHeader;
+    while ((!inQueue.empty() || (mSawInputEos && !mSignalledOutputEos)) && !outQueue.empty()) {
+        BufferInfo *inInfo = NULL;
+        OMX_BUFFERHEADERTYPE *inHeader = NULL;
+        if (!inQueue.empty()) {
+            inInfo = *inQueue.begin();
+            inHeader = inInfo->mHeader;
+        }
 
         BufferInfo *outInfo = *outQueue.begin();
         OMX_BUFFERHEADERTYPE *outHeader = outInfo->mHeader;
+        outHeader->nFlags = 0;
 
-        if (inHeader->nFlags & OMX_BUFFERFLAG_EOS) {
-            inQueue.erase(inQueue.begin());
-            inInfo->mOwnedByUs = false;
-            notifyEmptyBufferDone(inHeader);
-
-            if (!mIsFirst) {
-                // pad the end of the stream with 529 samples, since that many samples
-                // were trimmed off the beginning when decoding started
-                outHeader->nFilledLen =
-                    kPVMP3DecoderDelay * mNumChannels * sizeof(int16_t);
-
-                memset(outHeader->pBuffer, 0, outHeader->nFilledLen);
-            } else {
-                // Since we never discarded frames from the start, we won't have
-                // to add any padding at the end either.
-                outHeader->nFilledLen = 0;
+        if (inHeader) {
+            if (inHeader->nOffset == 0 && inHeader->nFilledLen) {
+                mAnchorTimeUs = inHeader->nTimeStamp;
+                mNumFramesOutput = 0;
             }
 
-            outHeader->nFlags = OMX_BUFFERFLAG_EOS;
+            if (inHeader->nFlags & OMX_BUFFERFLAG_EOS) {
+                mSawInputEos = true;
+            }
 
-            outQueue.erase(outQueue.begin());
-            outInfo->mOwnedByUs = false;
-            notifyFillBufferDone(outHeader);
-            return;
+            mConfig->pInputBuffer =
+                inHeader->pBuffer + inHeader->nOffset;
+
+            mConfig->inputBufferCurrentLength = inHeader->nFilledLen;
+        } else {
+            mConfig->pInputBuffer = NULL;
+            mConfig->inputBufferCurrentLength = 0;
         }
-
-        if (inHeader->nOffset == 0) {
-            mAnchorTimeUs = inHeader->nTimeStamp;
-            mNumFramesOutput = 0;
-        }
-
-        mConfig->pInputBuffer =
-            inHeader->pBuffer + inHeader->nOffset;
-
-        mConfig->inputBufferCurrentLength = inHeader->nFilledLen;
         mConfig->inputBufferMaxLength = 0;
         mConfig->inputBufferUsedLength = 0;
 
@@ -262,13 +252,28 @@
                 mConfig->outputFrameSize = kOutputBufferSize / sizeof(int16_t);
             }
 
-            // This is recoverable, just ignore the current frame and
-            // play silence instead.
-            memset(outHeader->pBuffer,
-                   0,
-                   mConfig->outputFrameSize * sizeof(int16_t));
+            if (decoderErr == NO_ENOUGH_MAIN_DATA_ERROR && mSawInputEos) {
+                if (!mIsFirst) {
+                    // pad the end of the stream with 529 samples, since that many samples
+                    // were trimmed off the beginning when decoding started
+                    outHeader->nOffset = 0;
+                    outHeader->nFilledLen = kPVMP3DecoderDelay * mNumChannels * sizeof(int16_t);
 
-            mConfig->inputBufferUsedLength = inHeader->nFilledLen;
+                    memset(outHeader->pBuffer, 0, outHeader->nFilledLen);
+                }
+                outHeader->nFlags = OMX_BUFFERFLAG_EOS;
+                mSignalledOutputEos = true;
+            } else {
+                // This is recoverable, just ignore the current frame and
+                // play silence instead.
+                memset(outHeader->pBuffer,
+                       0,
+                       mConfig->outputFrameSize * sizeof(int16_t));
+
+                if (inHeader) {
+                    mConfig->inputBufferUsedLength = inHeader->nFilledLen;
+                }
+            }
         } else if (mConfig->samplingRate != mSamplingRate
                 || mConfig->num_channels != mNumChannels) {
             mSamplingRate = mConfig->samplingRate;
@@ -289,7 +294,7 @@
 
             outHeader->nFilledLen =
                 mConfig->outputFrameSize * sizeof(int16_t) - outHeader->nOffset;
-        } else {
+        } else if (!mSignalledOutputEos) {
             outHeader->nOffset = 0;
             outHeader->nFilledLen = mConfig->outputFrameSize * sizeof(int16_t);
         }
@@ -298,23 +303,24 @@
             mAnchorTimeUs
                 + (mNumFramesOutput * 1000000ll) / mConfig->samplingRate;
 
-        outHeader->nFlags = 0;
+        if (inHeader) {
+            CHECK_GE(inHeader->nFilledLen, mConfig->inputBufferUsedLength);
 
-        CHECK_GE(inHeader->nFilledLen, mConfig->inputBufferUsedLength);
+            inHeader->nOffset += mConfig->inputBufferUsedLength;
+            inHeader->nFilledLen -= mConfig->inputBufferUsedLength;
 
-        inHeader->nOffset += mConfig->inputBufferUsedLength;
-        inHeader->nFilledLen -= mConfig->inputBufferUsedLength;
+
+            if (inHeader->nFilledLen == 0) {
+                inInfo->mOwnedByUs = false;
+                inQueue.erase(inQueue.begin());
+                inInfo = NULL;
+                notifyEmptyBufferDone(inHeader);
+                inHeader = NULL;
+            }
+        }
 
         mNumFramesOutput += mConfig->outputFrameSize / mNumChannels;
 
-        if (inHeader->nFilledLen == 0) {
-            inInfo->mOwnedByUs = false;
-            inQueue.erase(inQueue.begin());
-            inInfo = NULL;
-            notifyEmptyBufferDone(inHeader);
-            inHeader = NULL;
-        }
-
         outInfo->mOwnedByUs = false;
         outQueue.erase(outQueue.begin());
         outInfo = NULL;
@@ -329,6 +335,9 @@
         // depend on fragments from the last one decoded.
         pvmp3_InitDecoder(mConfig, mDecoderBuf);
         mIsFirst = true;
+        mSignalledError = false;
+        mSawInputEos = false;
+        mSignalledOutputEos = false;
     }
 }
 
@@ -362,6 +371,8 @@
     pvmp3_InitDecoder(mConfig, mDecoderBuf);
     mIsFirst = true;
     mSignalledError = false;
+    mSawInputEos = false;
+    mSignalledOutputEos = false;
     mOutputPortSettingsChange = NONE;
 }
 
diff --git a/media/libstagefright/codecs/mp3dec/SoftMP3.h b/media/libstagefright/codecs/mp3dec/SoftMP3.h
index 4af91ea..f9e7b53 100644
--- a/media/libstagefright/codecs/mp3dec/SoftMP3.h
+++ b/media/libstagefright/codecs/mp3dec/SoftMP3.h
@@ -61,6 +61,8 @@
 
     bool mIsFirst;
     bool mSignalledError;
+    bool mSawInputEos;
+    bool mSignalledOutputEos;
 
     enum {
         NONE,
diff --git a/media/libstagefright/codecs/vorbis/dec/SoftVorbis.cpp b/media/libstagefright/codecs/vorbis/dec/SoftVorbis.cpp
index 51bb958..515e4d3 100644
--- a/media/libstagefright/codecs/vorbis/dec/SoftVorbis.cpp
+++ b/media/libstagefright/codecs/vorbis/dec/SoftVorbis.cpp
@@ -54,6 +54,8 @@
       mAnchorTimeUs(0),
       mNumFramesOutput(0),
       mNumFramesLeftOnPage(-1),
+      mSawInputEos(false),
+      mSignalledOutputEos(false),
       mOutputPortSettingsChange(NONE) {
     initPorts();
     CHECK_EQ(initDecoder(), (status_t)OK);
@@ -290,48 +292,47 @@
         return;
     }
 
-    while (!inQueue.empty() && !outQueue.empty()) {
-        BufferInfo *inInfo = *inQueue.begin();
-        OMX_BUFFERHEADERTYPE *inHeader = inInfo->mHeader;
+    while ((!inQueue.empty() || (mSawInputEos && !mSignalledOutputEos)) && !outQueue.empty()) {
+        BufferInfo *inInfo = NULL;
+        OMX_BUFFERHEADERTYPE *inHeader = NULL;
+        if (!inQueue.empty()) {
+            inInfo = *inQueue.begin();
+            inHeader = inInfo->mHeader;
+        }
 
         BufferInfo *outInfo = *outQueue.begin();
         OMX_BUFFERHEADERTYPE *outHeader = outInfo->mHeader;
 
-        if (inHeader->nFlags & OMX_BUFFERFLAG_EOS) {
-            inQueue.erase(inQueue.begin());
-            inInfo->mOwnedByUs = false;
-            notifyEmptyBufferDone(inHeader);
+        int32_t numPageSamples = 0;
 
-            outHeader->nFilledLen = 0;
-            outHeader->nFlags = OMX_BUFFERFLAG_EOS;
+        if (inHeader) {
+            if (inHeader->nFlags & OMX_BUFFERFLAG_EOS) {
+                mSawInputEos = true;
+            }
 
-            outQueue.erase(outQueue.begin());
-            outInfo->mOwnedByUs = false;
-            notifyFillBufferDone(outHeader);
-            return;
+            if (inHeader->nFilledLen || !mSawInputEos) {
+                CHECK_GE(inHeader->nFilledLen, sizeof(numPageSamples));
+                memcpy(&numPageSamples,
+                       inHeader->pBuffer
+                        + inHeader->nOffset + inHeader->nFilledLen - 4,
+                       sizeof(numPageSamples));
+
+                if (inHeader->nOffset == 0) {
+                    mAnchorTimeUs = inHeader->nTimeStamp;
+                    mNumFramesOutput = 0;
+                }
+
+                inHeader->nFilledLen -= sizeof(numPageSamples);;
+            }
         }
 
-        int32_t numPageSamples;
-        CHECK_GE(inHeader->nFilledLen, sizeof(numPageSamples));
-        memcpy(&numPageSamples,
-               inHeader->pBuffer
-                + inHeader->nOffset + inHeader->nFilledLen - 4,
-               sizeof(numPageSamples));
-
         if (numPageSamples >= 0) {
             mNumFramesLeftOnPage = numPageSamples;
         }
 
-        if (inHeader->nOffset == 0) {
-            mAnchorTimeUs = inHeader->nTimeStamp;
-            mNumFramesOutput = 0;
-        }
-
-        inHeader->nFilledLen -= sizeof(numPageSamples);;
-
         ogg_buffer buf;
-        buf.data = inHeader->pBuffer + inHeader->nOffset;
-        buf.size = inHeader->nFilledLen;
+        buf.data = inHeader ? inHeader->pBuffer + inHeader->nOffset : NULL;
+        buf.size = inHeader ? inHeader->nFilledLen : 0;
         buf.refcount = 1;
         buf.ptr.owner = NULL;
 
@@ -351,6 +352,7 @@
 
         int numFrames = 0;
 
+        outHeader->nFlags = 0;
         int err = vorbis_dsp_synthesis(mState, &pack, 1);
         if (err != 0) {
             ALOGW("vorbis_dsp_synthesis returned %d", err);
@@ -370,13 +372,16 @@
                 ALOGV("discarding %d frames at end of page",
                      numFrames - mNumFramesLeftOnPage);
                 numFrames = mNumFramesLeftOnPage;
+                if (mSawInputEos) {
+                    outHeader->nFlags = OMX_BUFFERFLAG_EOS;
+                    mSignalledOutputEos = true;
+                }
             }
             mNumFramesLeftOnPage -= numFrames;
         }
 
         outHeader->nFilledLen = numFrames * sizeof(int16_t) * mVi->channels;
         outHeader->nOffset = 0;
-        outHeader->nFlags = 0;
 
         outHeader->nTimeStamp =
             mAnchorTimeUs
@@ -384,11 +389,13 @@
 
         mNumFramesOutput += numFrames;
 
-        inInfo->mOwnedByUs = false;
-        inQueue.erase(inQueue.begin());
-        inInfo = NULL;
-        notifyEmptyBufferDone(inHeader);
-        inHeader = NULL;
+        if (inHeader) {
+            inInfo->mOwnedByUs = false;
+            inQueue.erase(inQueue.begin());
+            inInfo = NULL;
+            notifyEmptyBufferDone(inHeader);
+            inHeader = NULL;
+        }
 
         outInfo->mOwnedByUs = false;
         outQueue.erase(outQueue.begin());
@@ -425,6 +432,8 @@
         mVi = NULL;
     }
 
+    mSawInputEos = false;
+    mSignalledOutputEos = false;
     mOutputPortSettingsChange = NONE;
 }
 
diff --git a/media/libstagefright/codecs/vorbis/dec/SoftVorbis.h b/media/libstagefright/codecs/vorbis/dec/SoftVorbis.h
index cb628a0..1d00816 100644
--- a/media/libstagefright/codecs/vorbis/dec/SoftVorbis.h
+++ b/media/libstagefright/codecs/vorbis/dec/SoftVorbis.h
@@ -59,6 +59,8 @@
     int64_t mAnchorTimeUs;
     int64_t mNumFramesOutput;
     int32_t mNumFramesLeftOnPage;
+    bool mSawInputEos;
+    bool mSignalledOutputEos;
 
     enum {
         NONE,
diff --git a/media/libstagefright/http/Android.mk b/media/libstagefright/http/Android.mk
new file mode 100644
index 0000000..a6b481f
--- /dev/null
+++ b/media/libstagefright/http/Android.mk
@@ -0,0 +1,26 @@
+LOCAL_PATH:= $(call my-dir)
+
+ifneq ($(TARGET_BUILD_PDK), true)
+
+include $(CLEAR_VARS)
+
+LOCAL_SRC_FILES:=       \
+        HTTPHelper.cpp          \
+
+LOCAL_C_INCLUDES:= \
+	$(TOP)/frameworks/av/media/libstagefright \
+	$(TOP)/frameworks/native/include/media/openmax \
+	$(TOP)/frameworks/base/core/jni \
+
+LOCAL_SHARED_LIBRARIES := \
+	libstagefright liblog libutils libbinder libstagefright_foundation \
+        libandroid_runtime \
+        libmedia
+
+LOCAL_MODULE:= libstagefright_http_support
+
+LOCAL_CFLAGS += -Wno-multichar
+
+include $(BUILD_SHARED_LIBRARY)
+
+endif
diff --git a/media/libstagefright/http/HTTPHelper.cpp b/media/libstagefright/http/HTTPHelper.cpp
new file mode 100644
index 0000000..77845e2
--- /dev/null
+++ b/media/libstagefright/http/HTTPHelper.cpp
@@ -0,0 +1,70 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "HTTPHelper"
+#include <utils/Log.h>
+
+#include "HTTPHelper.h"
+
+#include "android_runtime/AndroidRuntime.h"
+#include "android_util_Binder.h"
+#include <media/IMediaHTTPService.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <nativehelper/ScopedLocalRef.h>
+#include "jni.h"
+
+namespace android {
+
+sp<IMediaHTTPService> CreateHTTPServiceInCurrentJavaContext() {
+    if (AndroidRuntime::getJavaVM() == NULL) {
+        ALOGE("CreateHTTPServiceInCurrentJavaContext called outside "
+              "JAVA environment.");
+        return NULL;
+    }
+
+    JNIEnv *env = AndroidRuntime::getJNIEnv();
+
+    ScopedLocalRef<jclass> clazz(
+            env, env->FindClass("android/media/MediaHTTPService"));
+    CHECK(clazz.get() != NULL);
+
+    jmethodID constructID = env->GetMethodID(clazz.get(), "<init>", "()V");
+    CHECK(constructID != NULL);
+
+    ScopedLocalRef<jobject> httpServiceObj(
+            env, env->NewObject(clazz.get(), constructID));
+
+    sp<IMediaHTTPService> httpService;
+    if (httpServiceObj.get() != NULL) {
+        jmethodID asBinderID =
+            env->GetMethodID(clazz.get(), "asBinder", "()Landroid/os/IBinder;");
+        CHECK(asBinderID != NULL);
+
+        ScopedLocalRef<jobject> httpServiceBinderObj(
+                env, env->CallObjectMethod(httpServiceObj.get(), asBinderID));
+        CHECK(httpServiceBinderObj.get() != NULL);
+
+        sp<IBinder> binder =
+            ibinderForJavaObject(env, httpServiceBinderObj.get());
+
+        httpService = interface_cast<IMediaHTTPService>(binder);
+    }
+
+    return httpService;
+}
+
+}  // namespace android
diff --git a/media/libstagefright/http/HTTPHelper.h b/media/libstagefright/http/HTTPHelper.h
new file mode 100644
index 0000000..8aef115
--- /dev/null
+++ b/media/libstagefright/http/HTTPHelper.h
@@ -0,0 +1,31 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef HTTP_HELPER_H_
+
+#define HTTP_HELPER_H_
+
+#include <utils/RefBase.h>
+
+namespace android {
+
+struct IMediaHTTPService;
+
+sp<IMediaHTTPService> CreateHTTPServiceInCurrentJavaContext();
+
+}  // namespace android
+
+#endif  // HTTP_HELPER_H_
diff --git a/media/libstagefright/http/MediaHTTP.cpp b/media/libstagefright/http/MediaHTTP.cpp
new file mode 100644
index 0000000..157d967
--- /dev/null
+++ b/media/libstagefright/http/MediaHTTP.cpp
@@ -0,0 +1,201 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "MediaHTTP"
+#include <utils/Log.h>
+
+#include <media/stagefright/MediaHTTP.h>
+
+#include <binder/IServiceManager.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/ALooper.h>
+#include <media/stagefright/Utils.h>
+
+#include <media/IMediaHTTPConnection.h>
+
+namespace android {
+
+MediaHTTP::MediaHTTP(const sp<IMediaHTTPConnection> &conn)
+    : mInitCheck(NO_INIT),
+      mHTTPConnection(conn),
+      mCachedSizeValid(false),
+      mCachedSize(0ll),
+      mDrmManagerClient(NULL) {
+    mInitCheck = OK;
+}
+
+MediaHTTP::~MediaHTTP() {
+    clearDRMState_l();
+}
+
+status_t MediaHTTP::connect(
+        const char *uri,
+        const KeyedVector<String8, String8> *headers,
+        off64_t /* offset */) {
+    if (mInitCheck != OK) {
+        return mInitCheck;
+    }
+
+    KeyedVector<String8, String8> extHeaders;
+    if (headers != NULL) {
+        extHeaders = *headers;
+    }
+    extHeaders.add(String8("User-Agent"), String8(MakeUserAgent().c_str()));
+
+    bool success = mHTTPConnection->connect(uri, &extHeaders);
+
+    mLastHeaders = extHeaders;
+    mLastURI = uri;
+
+    mCachedSizeValid = false;
+
+    return success ? OK : UNKNOWN_ERROR;
+}
+
+void MediaHTTP::disconnect() {
+    if (mInitCheck != OK) {
+        return;
+    }
+
+    mHTTPConnection->disconnect();
+}
+
+status_t MediaHTTP::initCheck() const {
+    return mInitCheck;
+}
+
+ssize_t MediaHTTP::readAt(off64_t offset, void *data, size_t size) {
+    if (mInitCheck != OK) {
+        return mInitCheck;
+    }
+
+    int64_t startTimeUs = ALooper::GetNowUs();
+
+    size_t numBytesRead = 0;
+    while (numBytesRead < size) {
+        size_t copy = size - numBytesRead;
+
+        if (copy > 64 * 1024) {
+            // limit the buffer sizes transferred across binder boundaries
+            // to avoid spurious transaction failures.
+            copy = 64 * 1024;
+        }
+
+        ssize_t n = mHTTPConnection->readAt(
+                offset + numBytesRead, (uint8_t *)data + numBytesRead, copy);
+
+        if (n < 0) {
+            return n;
+        } else if (n == 0) {
+            break;
+        }
+
+        numBytesRead += n;
+    }
+
+    int64_t delayUs = ALooper::GetNowUs() - startTimeUs;
+
+    addBandwidthMeasurement(numBytesRead, delayUs);
+
+    return numBytesRead;
+}
+
+status_t MediaHTTP::getSize(off64_t *size) {
+    if (mInitCheck != OK) {
+        return mInitCheck;
+    }
+
+    // Caching the returned size so that it stays valid even after a
+    // disconnect. NuCachedSource2 relies on this.
+
+    if (!mCachedSizeValid) {
+        mCachedSize = mHTTPConnection->getSize();
+        mCachedSizeValid = true;
+    }
+
+    *size = mCachedSize;
+
+    return *size < 0 ? *size : OK;
+}
+
+uint32_t MediaHTTP::flags() {
+    return kWantsPrefetching | kIsHTTPBasedSource;
+}
+
+status_t MediaHTTP::reconnectAtOffset(off64_t offset) {
+    return connect(mLastURI.c_str(), &mLastHeaders, offset);
+}
+
+// DRM...
+
+sp<DecryptHandle> MediaHTTP::DrmInitialization(const char* mime) {
+    if (mDrmManagerClient == NULL) {
+        mDrmManagerClient = new DrmManagerClient();
+    }
+
+    if (mDrmManagerClient == NULL) {
+        return NULL;
+    }
+
+    if (mDecryptHandle == NULL) {
+        mDecryptHandle = mDrmManagerClient->openDecryptSession(
+                String8(mLastURI.c_str()), mime);
+    }
+
+    if (mDecryptHandle == NULL) {
+        delete mDrmManagerClient;
+        mDrmManagerClient = NULL;
+    }
+
+    return mDecryptHandle;
+}
+
+void MediaHTTP::getDrmInfo(
+        sp<DecryptHandle> &handle, DrmManagerClient **client) {
+    handle = mDecryptHandle;
+    *client = mDrmManagerClient;
+}
+
+String8 MediaHTTP::getUri() {
+    return String8(mLastURI.c_str());
+}
+
+String8 MediaHTTP::getMIMEType() const {
+    if (mInitCheck != OK) {
+        return String8("application/octet-stream");
+    }
+
+    String8 mimeType;
+    status_t err = mHTTPConnection->getMIMEType(&mimeType);
+
+    if (err != OK) {
+        return String8("application/octet-stream");
+    }
+
+    return mimeType;
+}
+
+void MediaHTTP::clearDRMState_l() {
+    if (mDecryptHandle != NULL) {
+        // To release mDecryptHandle
+        CHECK(mDrmManagerClient);
+        mDrmManagerClient->closeDecryptSession(mDecryptHandle);
+        mDecryptHandle = NULL;
+    }
+}
+
+}  // namespace android
diff --git a/media/libstagefright/httplive/LiveSession.cpp b/media/libstagefright/httplive/LiveSession.cpp
index 233db44..149a817 100644
--- a/media/libstagefright/httplive/LiveSession.cpp
+++ b/media/libstagefright/httplive/LiveSession.cpp
@@ -27,6 +27,8 @@
 #include "mpeg2ts/AnotherPacketSource.h"
 
 #include <cutils/properties.h>
+#include <media/IMediaHTTPConnection.h>
+#include <media/IMediaHTTPService.h>
 #include <media/stagefright/foundation/hexdump.h>
 #include <media/stagefright/foundation/ABuffer.h>
 #include <media/stagefright/foundation/ADebug.h>
@@ -34,6 +36,7 @@
 #include <media/stagefright/DataSource.h>
 #include <media/stagefright/FileSource.h>
 #include <media/stagefright/MediaErrors.h>
+#include <media/stagefright/MediaHTTP.h>
 #include <media/stagefright/MetaData.h>
 #include <media/stagefright/Utils.h>
 
@@ -44,17 +47,13 @@
 namespace android {
 
 LiveSession::LiveSession(
-        const sp<AMessage> &notify, uint32_t flags, bool uidValid, uid_t uid)
+        const sp<AMessage> &notify, uint32_t flags,
+        const sp<IMediaHTTPService> &httpService)
     : mNotify(notify),
       mFlags(flags),
-      mUIDValid(uidValid),
-      mUID(uid),
+      mHTTPService(httpService),
       mInPreparationPhase(true),
-      mHTTPDataSource(
-              HTTPBase::Create(
-                  (mFlags & kFlagIncognito)
-                    ? HTTPBase::kFlagIncognito
-                    : 0)),
+      mHTTPDataSource(new MediaHTTP(mHTTPService->makeHTTPConnection())),
       mPrevBandwidthIndex(-1),
       mStreamMask(0),
       mCheckBandwidthGeneration(0),
@@ -62,10 +61,6 @@
       mRealTimeBaseUs(0ll),
       mReconfigurationInProgress(false),
       mDisconnectReplyID(0) {
-    if (mUIDValid) {
-        mHTTPDataSource->setUID(mUID);
-    }
-
     mPacketSources.add(
             STREAMTYPE_AUDIO, new AnotherPacketSource(NULL /* meta */));
 
diff --git a/media/libstagefright/httplive/LiveSession.h b/media/libstagefright/httplive/LiveSession.h
index 99b480a8..ee10e70 100644
--- a/media/libstagefright/httplive/LiveSession.h
+++ b/media/libstagefright/httplive/LiveSession.h
@@ -28,6 +28,7 @@
 struct AnotherPacketSource;
 struct DataSource;
 struct HTTPBase;
+struct IMediaHTTPService;
 struct LiveDataSource;
 struct M3UParser;
 struct PlaylistFetcher;
@@ -40,7 +41,8 @@
     };
     LiveSession(
             const sp<AMessage> &notify,
-            uint32_t flags = 0, bool uidValid = false, uid_t uid = 0);
+            uint32_t flags,
+            const sp<IMediaHTTPService> &httpService);
 
     enum StreamType {
         STREAMTYPE_AUDIO        = 1,
@@ -107,8 +109,7 @@
 
     sp<AMessage> mNotify;
     uint32_t mFlags;
-    bool mUIDValid;
-    uid_t mUID;
+    sp<IMediaHTTPService> mHTTPService;
 
     bool mInPreparationPhase;
 
diff --git a/media/libstagefright/httplive/PlaylistFetcher.cpp b/media/libstagefright/httplive/PlaylistFetcher.cpp
index 973b779..f095987 100644
--- a/media/libstagefright/httplive/PlaylistFetcher.cpp
+++ b/media/libstagefright/httplive/PlaylistFetcher.cpp
@@ -47,6 +47,7 @@
 
 // static
 const int64_t PlaylistFetcher::kMinBufferedDurationUs = 10000000ll;
+const int64_t PlaylistFetcher::kMaxMonitorDelayUs = 3000000ll;
 
 PlaylistFetcher::PlaylistFetcher(
         const sp<AMessage> &notify,
@@ -61,6 +62,7 @@
       mSeqNumber(-1),
       mNumRetries(0),
       mStartup(true),
+      mPrepared(false),
       mNextPTSTimeUs(-1ll),
       mMonitorQueueGeneration(0),
       mRefreshState(INITIAL_MINIMUM_RELOAD_DELAY),
@@ -103,10 +105,16 @@
     return segmentStartUs;
 }
 
-bool PlaylistFetcher::timeToRefreshPlaylist(int64_t nowUs) const {
-    if (mPlaylist == NULL) {
+int64_t PlaylistFetcher::delayUsToRefreshPlaylist() const {
+    int64_t nowUs = ALooper::GetNowUs();
+
+    if (mPlaylist == NULL || mLastPlaylistFetchTimeUs < 0ll) {
         CHECK_EQ((int)mRefreshState, (int)INITIAL_MINIMUM_RELOAD_DELAY);
-        return true;
+        return 0ll;
+    }
+
+    if (mPlaylist->isComplete()) {
+        return (~0llu >> 1);
     }
 
     int32_t targetDurationSecs;
@@ -157,7 +165,8 @@
             break;
     }
 
-    return mLastPlaylistFetchTimeUs + minPlaylistAgeUs <= nowUs;
+    int64_t delayUs = mLastPlaylistFetchTimeUs + minPlaylistAgeUs - nowUs;
+    return delayUs > 0ll ? delayUs : 0ll;
 }
 
 status_t PlaylistFetcher::decryptBuffer(
@@ -274,7 +283,15 @@
     return OK;
 }
 
-void PlaylistFetcher::postMonitorQueue(int64_t delayUs) {
+void PlaylistFetcher::postMonitorQueue(int64_t delayUs, int64_t minDelayUs) {
+    int64_t maxDelayUs = delayUsToRefreshPlaylist();
+    if (maxDelayUs < minDelayUs) {
+        maxDelayUs = minDelayUs;
+    }
+    if (delayUs > maxDelayUs) {
+        ALOGV("Need to refresh playlist in %lld", maxDelayUs);
+        delayUs = maxDelayUs;
+    }
     sp<AMessage> msg = new AMessage(kWhatMonitorQueue, id());
     msg->setInt32("generation", mMonitorQueueGeneration);
     msg->post(delayUs);
@@ -415,6 +432,7 @@
     if (mStartTimeUs >= 0ll) {
         mSeqNumber = -1;
         mStartup = true;
+        mPrepared = false;
     }
 
     postMonitorQueue();
@@ -456,40 +474,62 @@
 
 void PlaylistFetcher::onMonitorQueue() {
     bool downloadMore = false;
+    refreshPlaylist();
 
-    status_t finalResult;
+    int32_t targetDurationSecs;
+    int64_t targetDurationUs = kMinBufferedDurationUs;
+    if (mPlaylist != NULL) {
+        CHECK(mPlaylist->meta()->findInt32("target-duration", &targetDurationSecs));
+        targetDurationUs = targetDurationSecs * 1000000ll;
+    }
+
+    // buffer at least 3 times the target duration, or up to 10 seconds
+    int64_t durationToBufferUs = targetDurationUs * 3;
+    if (durationToBufferUs > kMinBufferedDurationUs)  {
+        durationToBufferUs = kMinBufferedDurationUs;
+    }
+
+    int64_t bufferedDurationUs = 0ll;
+    status_t finalResult = NOT_ENOUGH_DATA;
     if (mStreamTypeMask == LiveSession::STREAMTYPE_SUBTITLES) {
         sp<AnotherPacketSource> packetSource =
             mPacketSources.valueFor(LiveSession::STREAMTYPE_SUBTITLES);
 
-        int64_t bufferedDurationUs =
+        bufferedDurationUs =
                 packetSource->getBufferedDurationUs(&finalResult);
-
-        downloadMore = (bufferedDurationUs < kMinBufferedDurationUs);
         finalResult = OK;
     } else {
         bool first = true;
-        int64_t minBufferedDurationUs = 0ll;
 
         for (size_t i = 0; i < mPacketSources.size(); ++i) {
             if ((mStreamTypeMask & mPacketSources.keyAt(i)) == 0) {
                 continue;
             }
 
-            int64_t bufferedDurationUs =
+            int64_t bufferedStreamDurationUs =
                 mPacketSources.valueAt(i)->getBufferedDurationUs(&finalResult);
-
-            if (first || bufferedDurationUs < minBufferedDurationUs) {
-                minBufferedDurationUs = bufferedDurationUs;
+            if (first || bufferedStreamDurationUs < bufferedDurationUs) {
+                bufferedDurationUs = bufferedStreamDurationUs;
                 first = false;
             }
         }
+    }
+    downloadMore = (bufferedDurationUs < durationToBufferUs);
 
-        downloadMore =
-            !first && (minBufferedDurationUs < kMinBufferedDurationUs);
+    // signal start if buffered up at least the target size
+    if (!mPrepared && bufferedDurationUs > targetDurationUs && downloadMore) {
+        mPrepared = true;
+
+        ALOGV("prepared, buffered=%lld > %lld",
+                bufferedDurationUs, targetDurationUs);
+        sp<AMessage> msg = mNotify->dup();
+        msg->setInt32("what", kWhatTemporarilyDoneFetching);
+        msg->post();
     }
 
     if (finalResult == OK && downloadMore) {
+        ALOGV("monitoring, buffered=%lld < %lld",
+                bufferedDurationUs, durationToBufferUs);
         onDownloadNext();
     } else {
         // Nothing to do yet, try again in a second.
@@ -498,15 +538,17 @@
         msg->setInt32("what", kWhatTemporarilyDoneFetching);
         msg->post();
 
-        postMonitorQueue(1000000ll);
+        int64_t delayUs = mPrepared ? kMaxMonitorDelayUs : targetDurationUs / 2;
+        ALOGV("pausing for %lld, buffered=%lld > %lld",
+                delayUs, bufferedDurationUs, durationToBufferUs);
+        // :TRICKY: need to enforce minimum delay because the delay to
+        // refresh the playlist will become 0
+        postMonitorQueue(delayUs, mPrepared ? targetDurationUs * 2 : 0);
     }
 }
 
-void PlaylistFetcher::onDownloadNext() {
-    int64_t nowUs = ALooper::GetNowUs();
-
-    if (mLastPlaylistFetchTimeUs < 0ll
-            || (!mPlaylist->isComplete() && timeToRefreshPlaylist(nowUs))) {
+status_t PlaylistFetcher::refreshPlaylist() {
+    if (delayUsToRefreshPlaylist() <= 0) {
         bool unchanged;
         sp<M3UParser> playlist = mSession->fetchPlaylist(
                 mURI.c_str(), mPlaylistHash, &unchanged);
@@ -522,7 +564,7 @@
             } else {
                 ALOGE("failed to load playlist at url '%s'", mURI.c_str());
                 notifyError(ERROR_IO);
-                return;
+                return ERROR_IO;
             }
         } else {
             mRefreshState = INITIAL_MINIMUM_RELOAD_DELAY;
@@ -535,6 +577,13 @@
 
         mLastPlaylistFetchTimeUs = ALooper::GetNowUs();
     }
+    return OK;
+}
+
+void PlaylistFetcher::onDownloadNext() {
+    if (refreshPlaylist() != OK) {
+        return;
+    }
 
     int32_t firstSeqNumberInPlaylist;
     if (mPlaylist->meta() == NULL || !mPlaylist->meta()->findInt32(
@@ -553,12 +602,18 @@
 
         if (mPlaylist->isComplete() || mPlaylist->isEvent()) {
             mSeqNumber = getSeqNumberForTime(mStartTimeUs);
+            ALOGV("Initial sequence number for time %lld is %ld from (%ld .. %ld)",
+                    mStartTimeUs, mSeqNumber, firstSeqNumberInPlaylist,
+                    lastSeqNumberInPlaylist);
         } else {
             // If this is a live session, start 3 segments from the end.
             mSeqNumber = lastSeqNumberInPlaylist - 3;
             if (mSeqNumber < firstSeqNumberInPlaylist) {
                 mSeqNumber = firstSeqNumberInPlaylist;
             }
+            ALOGV("Initial sequence number for live event %ld from (%ld .. %ld)",
+                    mSeqNumber, firstSeqNumberInPlaylist,
+                    lastSeqNumberInPlaylist);
         }
 
         mStartTimeUs = -1ll;
@@ -570,16 +625,34 @@
             ++mNumRetries;
 
             if (mSeqNumber > lastSeqNumberInPlaylist) {
-                mLastPlaylistFetchTimeUs = -1;
-                postMonitorQueue(3000000ll);
+                // refresh in increasing fraction (1/2, 1/3, ...) of the
+                // playlist's target duration or 3 seconds, whichever is less
+                int32_t targetDurationSecs;
+                CHECK(mPlaylist->meta()->findInt32(
+                        "target-duration", &targetDurationSecs));
+                int64_t delayUs = mPlaylist->size() * targetDurationSecs *
+                        1000000ll / (1 + mNumRetries);
+                if (delayUs > kMaxMonitorDelayUs) {
+                    delayUs = kMaxMonitorDelayUs;
+                }
+                ALOGV("sequence number high: %ld from (%ld .. %ld), monitor in %lld (retry=%d)",
+                        mSeqNumber, firstSeqNumberInPlaylist,
+                        lastSeqNumberInPlaylist, delayUs, mNumRetries);
+                postMonitorQueue(delayUs);
                 return;
             }
 
             // we've missed the boat, let's start from the lowest sequence
             // number available and signal a discontinuity.
 
-            ALOGI("We've missed the boat, restarting playback.");
-            mSeqNumber = lastSeqNumberInPlaylist;
+            ALOGI("We've missed the boat, restarting playback."
+                  "  mStartup=%d, was  looking for %d in %d-%d",
+                    mStartup, mSeqNumber, firstSeqNumberInPlaylist,
+                    lastSeqNumberInPlaylist);
+            mSeqNumber = lastSeqNumberInPlaylist - 3;
+            if (mSeqNumber < firstSeqNumberInPlaylist) {
+                mSeqNumber = firstSeqNumberInPlaylist;
+            }
             explicitDiscontinuity = true;
 
             // fall through
@@ -788,12 +861,13 @@
                     && source->dequeueAccessUnit(&accessUnit) == OK) {
                 // Note that we do NOT dequeue any discontinuities.
 
+                // for simplicity, store a reference to the format in each unit
+                sp<MetaData> format = source->getFormat();
+                if (format != NULL) {
+                    accessUnit->meta()->setObject("format", format);
+                }
                 packetSource->queueAccessUnit(accessUnit);
             }
-
-            if (packetSource->getFormat() == NULL) {
-                packetSource->setFormat(source->getFormat());
-            }
         }
 
         return OK;
diff --git a/media/libstagefright/httplive/PlaylistFetcher.h b/media/libstagefright/httplive/PlaylistFetcher.h
index 1648e02..78dea20 100644
--- a/media/libstagefright/httplive/PlaylistFetcher.h
+++ b/media/libstagefright/httplive/PlaylistFetcher.h
@@ -79,6 +79,7 @@
     };
 
     static const int64_t kMinBufferedDurationUs;
+    static const int64_t kMaxMonitorDelayUs;
 
     sp<AMessage> mNotify;
     sp<LiveSession> mSession;
@@ -97,6 +98,7 @@
     int32_t mSeqNumber;
     int32_t mNumRetries;
     bool mStartup;
+    bool mPrepared;
     int64_t mNextPTSTimeUs;
 
     int32_t mMonitorQueueGeneration;
@@ -120,10 +122,11 @@
     status_t decryptBuffer(
             size_t playlistIndex, const sp<ABuffer> &buffer);
 
-    void postMonitorQueue(int64_t delayUs = 0);
+    void postMonitorQueue(int64_t delayUs = 0, int64_t minDelayUs = 0);
     void cancelMonitorQueue();
 
-    bool timeToRefreshPlaylist(int64_t nowUs) const;
+    int64_t delayUsToRefreshPlaylist() const;
+    status_t refreshPlaylist();
 
     // Returns the media time in us of the segment specified by seqNumber.
     // This is computed by summing the durations of all segments before it.
diff --git a/media/libstagefright/id3/ID3.cpp b/media/libstagefright/id3/ID3.cpp
index 1ec4a40..f0f203c 100644
--- a/media/libstagefright/id3/ID3.cpp
+++ b/media/libstagefright/id3/ID3.cpp
@@ -468,49 +468,6 @@
     }
 }
 
-static void convertISO8859ToString8(
-        const uint8_t *data, size_t size,
-        String8 *s) {
-    size_t utf8len = 0;
-    for (size_t i = 0; i < size; ++i) {
-        if (data[i] == '\0') {
-            size = i;
-            break;
-        } else if (data[i] < 0x80) {
-            ++utf8len;
-        } else {
-            utf8len += 2;
-        }
-    }
-
-    if (utf8len == size) {
-        // Only ASCII characters present.
-
-        s->setTo((const char *)data, size);
-        return;
-    }
-
-    char *tmp = new char[utf8len];
-    char *ptr = tmp;
-    for (size_t i = 0; i < size; ++i) {
-        if (data[i] == '\0') {
-            break;
-        } else if (data[i] < 0x80) {
-            *ptr++ = data[i];
-        } else if (data[i] < 0xc0) {
-            *ptr++ = 0xc2;
-            *ptr++ = data[i];
-        } else {
-            *ptr++ = 0xc3;
-            *ptr++ = data[i] - 64;
-        }
-    }
-
-    s->setTo(tmp, utf8len);
-
-    delete[] tmp;
-    tmp = NULL;
-}
 
 // the 2nd argument is used to get the data following the \0 in a comment field
 void ID3::Iterator::getString(String8 *id, String8 *comment) const {
@@ -543,7 +500,9 @@
             return;
         }
 
-        convertISO8859ToString8(frameData, mFrameSize, id);
+        // this is supposed to be ISO-8859-1, but pass it up as-is to the caller, who will figure
+        // out the real encoding
+        id->setTo((const char*)frameData, mFrameSize);
         return;
     }
 
@@ -561,13 +520,13 @@
     }
 
     if (encoding == 0x00) {
-        // ISO 8859-1
-        convertISO8859ToString8(frameData + 1, n, id);
+        // supposedly ISO 8859-1
+        id->setTo((const char*)frameData + 1, n);
     } else if (encoding == 0x03) {
-        // UTF-8
+        // supposedly UTF-8
         id->setTo((const char *)(frameData + 1), n);
     } else if (encoding == 0x02) {
-        // UTF-16 BE, no byte order mark.
+        // supposedly UTF-16 BE, no byte order mark.
         // API wants number of characters, not number of bytes...
         int len = n / 2;
         const char16_t *framedata = (const char16_t *) (frameData + 1);
@@ -583,7 +542,7 @@
         if (framedatacopy != NULL) {
             delete[] framedatacopy;
         }
-    } else {
+    } else if (encoding == 0x01) {
         // UCS-2
         // API wants number of characters, not number of bytes...
         int len = n / 2;
@@ -602,7 +561,27 @@
             framedata++;
             len--;
         }
-        id->setTo(framedata, len);
+
+        // check if the resulting data consists entirely of 8-bit values
+        bool eightBit = true;
+        for (int i = 0; i < len; i++) {
+            if (framedata[i] > 0xff) {
+                eightBit = false;
+                break;
+            }
+        }
+        if (eightBit) {
+            // collapse to 8 bit, then let the media scanner client figure out the real encoding
+            char *frame8 = new char[len];
+            for (int i = 0; i < len; i++) {
+                frame8[i] = framedata[i];
+            }
+            id->setTo(frame8, len);
+            delete [] frame8;
+        } else {
+            id->setTo(framedata, len);
+        }
+
         if (framedatacopy != NULL) {
             delete[] framedatacopy;
         }
diff --git a/media/libstagefright/include/AwesomePlayer.h b/media/libstagefright/include/AwesomePlayer.h
index 271df8e..a81bbba 100644
--- a/media/libstagefright/include/AwesomePlayer.h
+++ b/media/libstagefright/include/AwesomePlayer.h
@@ -63,6 +63,7 @@
     void setUID(uid_t uid);
 
     status_t setDataSource(
+            const sp<IMediaHTTPService> &httpService,
             const char *uri,
             const KeyedVector<String8, String8> *headers = NULL);
 
@@ -159,6 +160,7 @@
     SystemTimeSource mSystemTimeSource;
     TimeSource *mTimeSource;
 
+    sp<IMediaHTTPService> mHTTPService;
     String8 mUri;
     KeyedVector<String8, String8> mUriHeaders;
 
@@ -247,6 +249,7 @@
     sp<MediaExtractor> mExtractor;
 
     status_t setDataSource_l(
+            const sp<IMediaHTTPService> &httpService,
             const char *uri,
             const KeyedVector<String8, String8> *headers = NULL);
 
diff --git a/media/libstagefright/include/ChromiumHTTPDataSource.h b/media/libstagefright/include/ChromiumHTTPDataSource.h
deleted file mode 100644
index da188dd..0000000
--- a/media/libstagefright/include/ChromiumHTTPDataSource.h
+++ /dev/null
@@ -1,125 +0,0 @@
-/*
- * Copyright (C) 2011 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef CHROME_HTTP_DATA_SOURCE_H_
-
-#define CHROME_HTTP_DATA_SOURCE_H_
-
-#include <media/stagefright/foundation/AString.h>
-#include <utils/threads.h>
-
-#include "HTTPBase.h"
-
-namespace android {
-
-struct SfDelegate;
-
-struct ChromiumHTTPDataSource : public HTTPBase {
-    ChromiumHTTPDataSource(uint32_t flags = 0);
-
-    virtual status_t connect(
-            const char *uri,
-            const KeyedVector<String8, String8> *headers = NULL,
-            off64_t offset = 0);
-
-    virtual void disconnect();
-
-    virtual status_t initCheck() const;
-
-    virtual ssize_t readAt(off64_t offset, void *data, size_t size);
-    virtual status_t getSize(off64_t *size);
-    virtual uint32_t flags();
-
-    virtual sp<DecryptHandle> DrmInitialization(const char *mime);
-
-    virtual void getDrmInfo(sp<DecryptHandle> &handle, DrmManagerClient **client);
-
-    virtual String8 getUri();
-
-    virtual String8 getMIMEType() const;
-
-    virtual status_t reconnectAtOffset(off64_t offset);
-
-    static status_t UpdateProxyConfig(
-            const char *host, int32_t port, const char *exclusionList);
-
-protected:
-    virtual ~ChromiumHTTPDataSource();
-
-private:
-    friend struct SfDelegate;
-
-    enum State {
-        DISCONNECTED,
-        CONNECTING,
-        CONNECTED,
-        READING,
-        DISCONNECTING
-    };
-
-    const uint32_t mFlags;
-
-    mutable Mutex mLock;
-    Condition mCondition;
-
-    State mState;
-
-    SfDelegate *mDelegate;
-
-    AString mURI;
-    KeyedVector<String8, String8> mHeaders;
-
-    off64_t mCurrentOffset;
-
-    // Any connection error or the result of a read operation
-    // (for the lattter this is the number of bytes read, if successful).
-    ssize_t mIOResult;
-
-    int64_t mContentSize;
-
-    String8 mContentType;
-
-    sp<DecryptHandle> mDecryptHandle;
-    DrmManagerClient *mDrmManagerClient;
-
-    void disconnect_l();
-
-    status_t connect_l(
-            const char *uri,
-            const KeyedVector<String8, String8> *headers,
-            off64_t offset);
-
-    static void InitiateRead(
-            ChromiumHTTPDataSource *me, void *data, size_t size);
-
-    void initiateRead(void *data, size_t size);
-
-    void onConnectionEstablished(
-            int64_t contentSize, const char *contentType);
-
-    void onConnectionFailed(status_t err);
-    void onReadCompleted(ssize_t size);
-    void onDisconnectComplete();
-    void onRedirect(const char *url);
-
-    void clearDRMState_l();
-
-    DISALLOW_EVIL_CONSTRUCTORS(ChromiumHTTPDataSource);
-};
-
-}  // namespace android
-
-#endif  // CHROME_HTTP_DATA_SOURCE_H_
diff --git a/media/libstagefright/include/HTTPBase.h b/media/libstagefright/include/HTTPBase.h
index d4b7f9f..1c3cd5e 100644
--- a/media/libstagefright/include/HTTPBase.h
+++ b/media/libstagefright/include/HTTPBase.h
@@ -48,14 +48,6 @@
 
     virtual status_t setBandwidthStatCollectFreq(int32_t freqMs);
 
-    static status_t UpdateProxyConfig(
-            const char *host, int32_t port, const char *exclusionList);
-
-    void setUID(uid_t uid);
-    bool getUID(uid_t *uid) const;
-
-    static sp<HTTPBase> Create(uint32_t flags = 0);
-
     static void RegisterSocketUserTag(int sockfd, uid_t uid, uint32_t kTag);
     static void UnRegisterSocketUserTag(int sockfd);
 
@@ -87,9 +79,6 @@
     int32_t mPrevEstimatedBandWidthKbps;
     int32_t mBandWidthCollectFreqMs;
 
-    bool mUIDValid;
-    uid_t mUID;
-
     DISALLOW_EVIL_CONSTRUCTORS(HTTPBase);
 };
 
diff --git a/media/libstagefright/include/SDPLoader.h b/media/libstagefright/include/SDPLoader.h
index ca59dc0..2c4f543 100644
--- a/media/libstagefright/include/SDPLoader.h
+++ b/media/libstagefright/include/SDPLoader.h
@@ -25,6 +25,7 @@
 namespace android {
 
 struct HTTPBase;
+struct IMediaHTTPService;
 
 struct SDPLoader : public AHandler {
     enum Flags {
@@ -34,7 +35,10 @@
     enum {
         kWhatSDPLoaded = 'sdpl'
     };
-    SDPLoader(const sp<AMessage> &notify, uint32_t flags = 0, bool uidValid = false, uid_t uid = 0);
+    SDPLoader(
+            const sp<AMessage> &notify,
+            uint32_t flags,
+            const sp<IMediaHTTPService> &httpService);
 
     void load(const char* url, const KeyedVector<String8, String8> *headers);
 
@@ -55,8 +59,6 @@
     sp<AMessage> mNotify;
     const char* mUrl;
     uint32_t mFlags;
-    bool mUIDValid;
-    uid_t mUID;
     sp<ALooper> mNetLooper;
     bool mCancelled;
 
diff --git a/media/libstagefright/include/StagefrightMetadataRetriever.h b/media/libstagefright/include/StagefrightMetadataRetriever.h
index b02ed0e..6632c27 100644
--- a/media/libstagefright/include/StagefrightMetadataRetriever.h
+++ b/media/libstagefright/include/StagefrightMetadataRetriever.h
@@ -33,6 +33,7 @@
     virtual ~StagefrightMetadataRetriever();
 
     virtual status_t setDataSource(
+            const sp<IMediaHTTPService> &httpService,
             const char *url,
             const KeyedVector<String8, String8> *headers);
 
diff --git a/media/libstagefright/include/chromium_http_stub.h b/media/libstagefright/include/chromium_http_stub.h
deleted file mode 100644
index e0651a4..0000000
--- a/media/libstagefright/include/chromium_http_stub.h
+++ /dev/null
@@ -1,34 +0,0 @@
-/*
- * Copyright (C) 2012 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef CHROMIUM_HTTP_STUB_H_
-#define CHROMIUM_HTTP_STUB_H_
-
-#include <include/HTTPBase.h>
-#include <media/stagefright/DataSource.h>
-
-namespace android {
-extern "C" {
-HTTPBase *createChromiumHTTPDataSource(uint32_t flags);
-
-status_t UpdateChromiumHTTPDataSourceProxyConfig(
-        const char *host, int32_t port, const char *exclusionList);
-
-DataSource *createDataUriSource(const char *uri);
-}
-}
-
-#endif
diff --git a/media/libstagefright/matroska/MatroskaExtractor.cpp b/media/libstagefright/matroska/MatroskaExtractor.cpp
index d260d0f..dcb1cda 100644
--- a/media/libstagefright/matroska/MatroskaExtractor.cpp
+++ b/media/libstagefright/matroska/MatroskaExtractor.cpp
@@ -716,41 +716,61 @@
     return mIsLiveStreaming;
 }
 
+static int bytesForSize(size_t size) {
+    // use at most 28 bits (4 times 7)
+    CHECK(size <= 0xfffffff);
+
+    if (size > 0x1fffff) {
+        return 4;
+    } else if (size > 0x3fff) {
+        return 3;
+    } else if (size > 0x7f) {
+        return 2;
+    }
+    return 1;
+}
+
+static void storeSize(uint8_t *data, size_t &idx, size_t size) {
+    int numBytes = bytesForSize(size);
+    idx += numBytes;
+
+    data += idx;
+    size_t next = 0;
+    while (numBytes--) {
+        *--data = (size & 0x7f) | next;
+        size >>= 7;
+        next = 0x80;
+    }
+}
+
 static void addESDSFromCodecPrivate(
         const sp<MetaData> &meta,
         bool isAudio, const void *priv, size_t privSize) {
-    static const uint8_t kStaticESDS[] = {
-        0x03, 22,
-        0x00, 0x00,     // ES_ID
-        0x00,           // streamDependenceFlag, URL_Flag, OCRstreamFlag
 
-        0x04, 17,
-        0x40,           // ObjectTypeIndication
-        0x00, 0x00, 0x00, 0x00,
-        0x00, 0x00, 0x00, 0x00,
-        0x00, 0x00, 0x00, 0x00,
-
-        0x05,
-        // CodecSpecificInfo (with size prefix) follows
-    };
-
-    // Make sure all sizes can be coded in a single byte.
-    CHECK(privSize + 22 - 2 < 128);
-    size_t esdsSize = sizeof(kStaticESDS) + privSize + 1;
+    int privSizeBytesRequired = bytesForSize(privSize);
+    int esdsSize2 = 14 + privSizeBytesRequired + privSize;
+    int esdsSize2BytesRequired = bytesForSize(esdsSize2);
+    int esdsSize1 = 4 + esdsSize2BytesRequired + esdsSize2;
+    int esdsSize1BytesRequired = bytesForSize(esdsSize1);
+    size_t esdsSize = 1 + esdsSize1BytesRequired + esdsSize1;
     uint8_t *esds = new uint8_t[esdsSize];
-    memcpy(esds, kStaticESDS, sizeof(kStaticESDS));
-    uint8_t *ptr = esds + sizeof(kStaticESDS);
-    *ptr++ = privSize;
-    memcpy(ptr, priv, privSize);
 
-    // Increment by codecPrivateSize less 2 bytes that are accounted for
-    // already in lengths of 22/17
-    esds[1] += privSize - 2;
-    esds[6] += privSize - 2;
-
-    // Set ObjectTypeIndication.
-    esds[7] = isAudio ? 0x40   // Audio ISO/IEC 14496-3
-                      : 0x20;  // Visual ISO/IEC 14496-2
+    size_t idx = 0;
+    esds[idx++] = 0x03;
+    storeSize(esds, idx, esdsSize1);
+    esds[idx++] = 0x00; // ES_ID
+    esds[idx++] = 0x00; // ES_ID
+    esds[idx++] = 0x00; // streamDependenceFlag, URL_Flag, OCRstreamFlag
+    esds[idx++] = 0x04;
+    storeSize(esds, idx, esdsSize2);
+    esds[idx++] = isAudio ? 0x40   // Audio ISO/IEC 14496-3
+                          : 0x20;  // Visual ISO/IEC 14496-2
+    for (int i = 0; i < 12; i++) {
+        esds[idx++] = 0x00;
+    }
+    esds[idx++] = 0x05;
+    storeSize(esds, idx, privSize);
+    memcpy(esds + idx, priv, privSize);
 
     meta->setData(kKeyESDS, 0, esds, esdsSize);
 
diff --git a/media/libstagefright/mpeg2ts/AnotherPacketSource.cpp b/media/libstagefright/mpeg2ts/AnotherPacketSource.cpp
index 3153c8b..52fb2a5 100644
--- a/media/libstagefright/mpeg2ts/AnotherPacketSource.cpp
+++ b/media/libstagefright/mpeg2ts/AnotherPacketSource.cpp
@@ -70,7 +70,27 @@
 }
 
 sp<MetaData> AnotherPacketSource::getFormat() {
-    return mFormat;
+    Mutex::Autolock autoLock(mLock);
+    if (mFormat != NULL) {
+        return mFormat;
+    }
+
+    List<sp<ABuffer> >::iterator it = mBuffers.begin();
+    while (it != mBuffers.end()) {
+        sp<ABuffer> buffer = *it;
+        int32_t discontinuity;
+        if (buffer->meta()->findInt32("discontinuity", &discontinuity)) {
+            break;
+        }
+
+        sp<RefBase> object;
+        if (buffer->meta()->findObject("format", &object)) {
+            return static_cast<MetaData*>(object.get());
+        }
+
+        ++it;
+    }
+    return NULL;
 }
 
 status_t AnotherPacketSource::dequeueAccessUnit(sp<ABuffer> *buffer) {
@@ -94,6 +114,11 @@
             return INFO_DISCONTINUITY;
         }
 
+        sp<RefBase> object;
+        if ((*buffer)->meta()->findObject("format", &object)) {
+            mFormat = static_cast<MetaData*>(object.get());
+        }
+
         return OK;
     }
 
@@ -120,17 +145,22 @@
             }
 
             return INFO_DISCONTINUITY;
-        } else {
-            int64_t timeUs;
-            CHECK(buffer->meta()->findInt64("timeUs", &timeUs));
-
-            MediaBuffer *mediaBuffer = new MediaBuffer(buffer);
-
-            mediaBuffer->meta_data()->setInt64(kKeyTime, timeUs);
-
-            *out = mediaBuffer;
-            return OK;
         }
+
+        sp<RefBase> object;
+        if (buffer->meta()->findObject("format", &object)) {
+            mFormat = static_cast<MetaData*>(object.get());
+        }
+
+        int64_t timeUs;
+        CHECK(buffer->meta()->findInt64("timeUs", &timeUs));
+
+        MediaBuffer *mediaBuffer = new MediaBuffer(buffer);
+
+        mediaBuffer->meta_data()->setInt64(kKeyTime, timeUs);
+
+        *out = mediaBuffer;
+        return OK;
     }
 
     return mEOSResult;
diff --git a/media/libstagefright/omx/GraphicBufferSource.cpp b/media/libstagefright/omx/GraphicBufferSource.cpp
index b8970ad..1e6de55 100644
--- a/media/libstagefright/omx/GraphicBufferSource.cpp
+++ b/media/libstagefright/omx/GraphicBufferSource.cpp
@@ -42,7 +42,12 @@
     mEndOfStream(false),
     mEndOfStreamSent(false),
     mRepeatAfterUs(-1ll),
+    mMaxTimestampGapUs(-1ll),
+    mPrevOriginalTimeUs(-1ll),
+    mPrevModifiedTimeUs(-1ll),
+    mSkipFramesBeforeNs(-1ll),
     mRepeatLastFrameGeneration(0),
+    mRepeatLastFrameTimestamp(-1ll),
     mLatestSubmittedBufferId(-1),
     mLatestSubmittedBufferFrameNum(0),
     mLatestSubmittedBufferUseCount(0),
@@ -299,6 +304,32 @@
     return;
 }
 
+void GraphicBufferSource::codecBufferFilled(OMX_BUFFERHEADERTYPE* header) {
+    Mutex::Autolock autoLock(mMutex);
+
+    if (mMaxTimestampGapUs > 0ll
+            && !(header->nFlags & OMX_BUFFERFLAG_CODECCONFIG)) {
+        ssize_t index = mOriginalTimeUs.indexOfKey(header->nTimeStamp);
+        if (index >= 0) {
+            ALOGV("OUT timestamp: %lld -> %lld",
+                    header->nTimeStamp, mOriginalTimeUs[index]);
+            header->nTimeStamp = mOriginalTimeUs[index];
+            mOriginalTimeUs.removeItemsAt(index);
+        } else {
+            // giving up the effort as encoder doesn't appear to preserve pts
+            ALOGW("giving up limiting timestamp gap (pts = %lld)",
+                    header->nTimeStamp);
+            mMaxTimestampGapUs = -1ll;
+        }
+        if (mOriginalTimeUs.size() > BufferQueue::NUM_BUFFER_SLOTS) {
+            // something terribly wrong must have happened, giving up...
+            ALOGE("mOriginalTimeUs has too many entries (%d)",
+                    mOriginalTimeUs.size());
+            mMaxTimestampGapUs = -1ll;
+        }
+    }
+}
+
 void GraphicBufferSource::suspend(bool suspend) {
     Mutex::Autolock autoLock(mMutex);
 
@@ -384,7 +415,18 @@
         mBufferSlot[item.mBuf] = item.mGraphicBuffer;
     }
 
-    err = submitBuffer_l(item, cbi);
+    err = UNKNOWN_ERROR;
+
+    // only submit sample if start time is unspecified, or sample
+    // is queued after the specified start time
+    if (mSkipFramesBeforeNs < 0ll || item.mTimestamp >= mSkipFramesBeforeNs) {
+        // if start time is set, offset time stamp by start time
+        if (mSkipFramesBeforeNs > 0) {
+            item.mTimestamp -= mSkipFramesBeforeNs;
+        }
+        err = submitBuffer_l(item, cbi);
+    }
+
     if (err != OK) {
         ALOGV("submitBuffer_l failed, releasing bq buf %d", item.mBuf);
         mBufferQueue->releaseBuffer(item.mBuf, item.mFrameNumber,
@@ -431,6 +473,7 @@
     BufferQueue::BufferItem item;
     item.mBuf = mLatestSubmittedBufferId;
     item.mFrameNumber = mLatestSubmittedBufferFrameNum;
+    item.mTimestamp = mRepeatLastFrameTimestamp;
 
     status_t err = submitBuffer_l(item, cbi);
 
@@ -440,6 +483,20 @@
 
     ++mLatestSubmittedBufferUseCount;
 
+    /* repeat last frame up to kRepeatLastFrameCount times.
+     * in case of static scene, a single repeat might not get rid of encoder
+     * ghosting completely, refresh a couple more times to get better quality
+     */
+    if (--mRepeatLastFrameCount > 0) {
+        mRepeatLastFrameTimestamp = item.mTimestamp + mRepeatAfterUs * 1000;
+
+        if (mReflector != NULL) {
+            sp<AMessage> msg = new AMessage(kWhatRepeatLastFrame, mReflector->id());
+            msg->setInt32("generation", ++mRepeatLastFrameGeneration);
+            msg->post(mRepeatAfterUs);
+        }
+    }
+
     return true;
 }
 
@@ -460,8 +517,11 @@
 
     mLatestSubmittedBufferId = item.mBuf;
     mLatestSubmittedBufferFrameNum = item.mFrameNumber;
+    mRepeatLastFrameTimestamp = item.mTimestamp + mRepeatAfterUs * 1000;
+
     mLatestSubmittedBufferUseCount = 1;
     mRepeatBufferDeferred = false;
+    mRepeatLastFrameCount = kRepeatLastFrameCount;
 
     if (mReflector != NULL) {
         sp<AMessage> msg = new AMessage(kWhatRepeatLastFrame, mReflector->id());
@@ -497,6 +557,39 @@
     return OK;
 }
 
+int64_t GraphicBufferSource::getTimestamp(const BufferQueue::BufferItem &item) {
+    int64_t timeUs = item.mTimestamp / 1000;
+
+    if (mMaxTimestampGapUs > 0ll) {
+        /* Cap timestamp gap between adjacent frames to specified max
+         *
+         * In the scenario of cast mirroring, encoding could be suspended for
+         * prolonged periods. Limiting the pts gap to workaround the problem
+         * where encoder's rate control logic produces huge frames after a
+         * long period of suspension.
+         */
+
+        int64_t originalTimeUs = timeUs;
+        if (mPrevOriginalTimeUs >= 0ll) {
+            if (originalTimeUs < mPrevOriginalTimeUs) {
+                // Drop the frame if it's going backward in time. Bad timestamp
+                // could disrupt encoder's rate control completely.
+                ALOGV("Dropping frame that's going backward in time");
+                return -1;
+            }
+            int64_t timestampGapUs = originalTimeUs - mPrevOriginalTimeUs;
+            timeUs = (timestampGapUs < mMaxTimestampGapUs ?
+                    timestampGapUs : mMaxTimestampGapUs) + mPrevModifiedTimeUs;
+        }
+        mPrevOriginalTimeUs = originalTimeUs;
+        mPrevModifiedTimeUs = timeUs;
+        mOriginalTimeUs.add(timeUs, originalTimeUs);
+        ALOGV("IN  timestamp: %lld -> %lld", originalTimeUs, timeUs);
+    }
+
+    return timeUs;
+}
+
 status_t GraphicBufferSource::submitBuffer_l(
         const BufferQueue::BufferItem &item, int cbi) {
     ALOGV("submitBuffer_l cbi=%d", cbi);
@@ -513,9 +606,15 @@
     memcpy(data, &type, 4);
     memcpy(data + 4, &handle, sizeof(buffer_handle_t));
 
+    int64_t timeUs = getTimestamp(item);
+    if (timeUs < 0ll) {
+        ALOGE("Dropping frame with bad timestamp");
+        return UNKNOWN_ERROR;
+    }
+
     status_t err = mNodeInstance->emptyDirectBuffer(header, 0,
             4 + sizeof(buffer_handle_t), OMX_BUFFERFLAG_ENDOFFRAME,
-            item.mTimestamp / 1000);
+            timeUs);
     if (err != OK) {
         ALOGW("WARNING: emptyDirectBuffer failed: 0x%x", err);
         codecBuffer.mGraphicBuffer = NULL;
@@ -609,6 +708,12 @@
         BufferQueue::BufferItem item;
         status_t err = mBufferQueue->acquireBuffer(&item, 0);
         if (err == OK) {
+            // If this is the first time we're seeing this buffer, add it to our
+            // slot table.
+            if (item.mGraphicBuffer != NULL) {
+                ALOGV("fillCodecBuffer_l: setting mBufferSlot %d", item.mBuf);
+                mBufferSlot[item.mBuf] = item.mGraphicBuffer;
+            }
             mBufferQueue->releaseBuffer(item.mBuf, item.mFrameNumber,
                     EGL_NO_DISPLAY, EGL_NO_SYNC_KHR, item.mFence);
         }
@@ -658,6 +763,25 @@
     return OK;
 }
 
+status_t GraphicBufferSource::setMaxTimestampGapUs(int64_t maxGapUs) {
+    Mutex::Autolock autoLock(mMutex);
+
+    if (mExecuting || maxGapUs <= 0ll) {
+        return INVALID_OPERATION;
+    }
+
+    mMaxTimestampGapUs = maxGapUs;
+
+    return OK;
+}
+
+void GraphicBufferSource::setSkipFramesBeforeUs(int64_t skipFramesBeforeUs) {
+    Mutex::Autolock autoLock(mMutex);
+
+    mSkipFramesBeforeNs =
+            (skipFramesBeforeUs > 0) ? (skipFramesBeforeUs * 1000) : -1ll;
+}
+
 void GraphicBufferSource::onMessageReceived(const sp<AMessage> &msg) {
     switch (msg->what()) {
         case kWhatRepeatLastFrame:
diff --git a/media/libstagefright/omx/GraphicBufferSource.h b/media/libstagefright/omx/GraphicBufferSource.h
index 9e5eee6..153f2a0 100644
--- a/media/libstagefright/omx/GraphicBufferSource.h
+++ b/media/libstagefright/omx/GraphicBufferSource.h
@@ -87,6 +87,10 @@
     // fill it with a new frame of data; otherwise, just mark it as available.
     void codecBufferEmptied(OMX_BUFFERHEADERTYPE* header);
 
+    // Called when omx_message::FILL_BUFFER_DONE is received. (Currently the
+    // buffer source will fix timestamp in the header if needed.)
+    void codecBufferFilled(OMX_BUFFERHEADERTYPE* header);
+
     // This is called after the last input frame has been submitted.  We
     // need to submit an empty buffer with the EOS flag set.  If we don't
     // have a codec buffer ready, we just set the mEndOfStream flag.
@@ -105,6 +109,19 @@
     // state and once this behaviour is specified it cannot be reset.
     status_t setRepeatPreviousFrameDelayUs(int64_t repeatAfterUs);
 
+    // When set, the timestamp fed to the encoder will be modified such that
+    // the gap between two adjacent frames is capped at maxGapUs. Timestamp
+    // will be restored to the original when the encoded frame is returned to
+    // the client.
+    // This is to solve a problem in certain real-time streaming case, where
+    // encoder's rate control logic produces huge frames after a long period
+    // of suspension on input.
+    status_t setMaxTimestampGapUs(int64_t maxGapUs);
+
+    // Sets the start time us (in system time), samples before which should
+    // be dropped and not submitted to encoder
+    void setSkipFramesBeforeUs(int64_t startTimeUs);
+
 protected:
     // BufferQueue::ConsumerListener interface, called when a new frame of
     // data is available.  If we're executing and a codec buffer is
@@ -165,6 +182,7 @@
 
     void setLatestSubmittedBuffer_l(const BufferQueue::BufferItem &item);
     bool repeatLatestSubmittedBuffer_l();
+    int64_t getTimestamp(const BufferQueue::BufferItem &item);
 
     // Lock, covers all member variables.
     mutable Mutex mMutex;
@@ -206,13 +224,23 @@
     enum {
         kWhatRepeatLastFrame,
     };
+    enum {
+        kRepeatLastFrameCount = 10,
+    };
 
-    int64_t mRepeatAfterUs;
+    KeyedVector<int64_t, int64_t> mOriginalTimeUs;
+    int64_t mMaxTimestampGapUs;
+    int64_t mPrevOriginalTimeUs;
+    int64_t mPrevModifiedTimeUs;
+    int64_t mSkipFramesBeforeNs;
 
     sp<ALooper> mLooper;
     sp<AHandlerReflector<GraphicBufferSource> > mReflector;
 
+    int64_t mRepeatAfterUs;
     int32_t mRepeatLastFrameGeneration;
+    int64_t mRepeatLastFrameTimestamp;
+    int32_t mRepeatLastFrameCount;
 
     int mLatestSubmittedBufferId;
     uint64_t mLatestSubmittedBufferFrameNum;
diff --git a/media/libstagefright/omx/OMXNodeInstance.cpp b/media/libstagefright/omx/OMXNodeInstance.cpp
index 5f104fc..dba522f 100644
--- a/media/libstagefright/omx/OMXNodeInstance.cpp
+++ b/media/libstagefright/omx/OMXNodeInstance.cpp
@@ -849,6 +849,8 @@
     switch (type) {
         case IOMX::INTERNAL_OPTION_SUSPEND:
         case IOMX::INTERNAL_OPTION_REPEAT_PREVIOUS_FRAME_DELAY:
+        case IOMX::INTERNAL_OPTION_MAX_TIMESTAMP_GAP:
+        case IOMX::INTERNAL_OPTION_START_TIME:
         {
             const sp<GraphicBufferSource> &bufferSource =
                 getGraphicBufferSource();
@@ -864,7 +866,8 @@
 
                 bool suspend = *(bool *)data;
                 bufferSource->suspend(suspend);
-            } else {
+            } else if (type ==
+                    IOMX::INTERNAL_OPTION_REPEAT_PREVIOUS_FRAME_DELAY){
                 if (size != sizeof(int64_t)) {
                     return INVALID_OPERATION;
                 }
@@ -872,6 +875,23 @@
                 int64_t delayUs = *(int64_t *)data;
 
                 return bufferSource->setRepeatPreviousFrameDelayUs(delayUs);
+            } else if (type ==
+                    IOMX::INTERNAL_OPTION_MAX_TIMESTAMP_GAP){
+                if (size != sizeof(int64_t)) {
+                    return INVALID_OPERATION;
+                }
+
+                int64_t maxGapUs = *(int64_t *)data;
+
+                return bufferSource->setMaxTimestampGapUs(maxGapUs);
+            } else { // IOMX::INTERNAL_OPTION_START_TIME
+                if (size != sizeof(int64_t)) {
+                    return INVALID_OPERATION;
+                }
+
+                int64_t skipFramesBeforeUs = *(int64_t *)data;
+
+                bufferSource->setSkipFramesBeforeUs(skipFramesBeforeUs);
             }
 
             return OK;
@@ -883,6 +903,8 @@
 }
 
 void OMXNodeInstance::onMessage(const omx_message &msg) {
+    const sp<GraphicBufferSource>& bufferSource(getGraphicBufferSource());
+
     if (msg.type == omx_message::FILL_BUFFER_DONE) {
         OMX_BUFFERHEADERTYPE *buffer =
             static_cast<OMX_BUFFERHEADERTYPE *>(
@@ -892,10 +914,18 @@
             static_cast<BufferMeta *>(buffer->pAppPrivate);
 
         buffer_meta->CopyFromOMX(buffer);
-    } else if (msg.type == omx_message::EMPTY_BUFFER_DONE) {
-        const sp<GraphicBufferSource>& bufferSource(getGraphicBufferSource());
 
         if (bufferSource != NULL) {
+            // fix up the buffer info (especially timestamp) if needed
+            bufferSource->codecBufferFilled(buffer);
+
+            omx_message newMsg = msg;
+            newMsg.u.extended_buffer_data.timestamp = buffer->nTimeStamp;
+            mObserver->onMessage(newMsg);
+            return;
+        }
+    } else if (msg.type == omx_message::EMPTY_BUFFER_DONE) {
+        if (bufferSource != NULL) {
             // This is one of the buffers used exclusively by
             // GraphicBufferSource.
             // Don't dispatch a message back to ACodec, since it doesn't
diff --git a/media/libstagefright/omx/tests/OMXHarness.cpp b/media/libstagefright/omx/tests/OMXHarness.cpp
index 44e4f9d..03725df 100644
--- a/media/libstagefright/omx/tests/OMXHarness.cpp
+++ b/media/libstagefright/omx/tests/OMXHarness.cpp
@@ -26,6 +26,7 @@
 #include <binder/ProcessState.h>
 #include <binder/IServiceManager.h>
 #include <binder/MemoryDealer.h>
+#include <media/IMediaHTTPService.h>
 #include <media/IMediaPlayerService.h>
 #include <media/stagefright/foundation/ADebug.h>
 #include <media/stagefright/foundation/ALooper.h>
@@ -242,7 +243,8 @@
 };
 
 static sp<MediaExtractor> CreateExtractorFromURI(const char *uri) {
-    sp<DataSource> source = DataSource::CreateFromURI(uri);
+    sp<DataSource> source =
+        DataSource::CreateFromURI(NULL /* httpService */, uri);
 
     if (source == NULL) {
         return NULL;
diff --git a/media/libstagefright/rtsp/APacketSource.cpp b/media/libstagefright/rtsp/APacketSource.cpp
index 462c384..09f52bc 100644
--- a/media/libstagefright/rtsp/APacketSource.cpp
+++ b/media/libstagefright/rtsp/APacketSource.cpp
@@ -23,7 +23,7 @@
 #include "ARawAudioAssembler.h"
 #include "ASessionDescription.h"
 
-#include "avc_utils.h"
+#include "include/avc_utils.h"
 
 #include <ctype.h>
 
diff --git a/media/libstagefright/rtsp/ARTSPConnection.cpp b/media/libstagefright/rtsp/ARTSPConnection.cpp
index efde7a9..4054da6 100644
--- a/media/libstagefright/rtsp/ARTSPConnection.cpp
+++ b/media/libstagefright/rtsp/ARTSPConnection.cpp
@@ -33,7 +33,7 @@
 #include <openssl/md5.h>
 #include <sys/socket.h>
 
-#include "HTTPBase.h"
+#include "include/HTTPBase.h"
 
 namespace android {
 
diff --git a/media/libstagefright/rtsp/Android.mk b/media/libstagefright/rtsp/Android.mk
index e77c69c..02e44f4 100644
--- a/media/libstagefright/rtsp/Android.mk
+++ b/media/libstagefright/rtsp/Android.mk
@@ -20,7 +20,7 @@
         SDPLoader.cpp               \
 
 LOCAL_C_INCLUDES:= \
-	$(TOP)/frameworks/av/media/libstagefright/include \
+	$(TOP)/frameworks/av/media/libstagefright \
 	$(TOP)/frameworks/native/include/media/openmax \
 	$(TOP)/external/openssl/include
 
diff --git a/media/libstagefright/rtsp/SDPLoader.cpp b/media/libstagefright/rtsp/SDPLoader.cpp
index ed3fa7e..ce1e89d 100644
--- a/media/libstagefright/rtsp/SDPLoader.cpp
+++ b/media/libstagefright/rtsp/SDPLoader.cpp
@@ -18,11 +18,13 @@
 #define LOG_TAG "SDPLoader"
 #include <utils/Log.h>
 
-#include "SDPLoader.h"
+#include "include/SDPLoader.h"
 
 #include "ASessionDescription.h"
-#include "HTTPBase.h"
 
+#include <media/IMediaHTTPConnection.h>
+#include <media/IMediaHTTPService.h>
+#include <media/stagefright/MediaHTTP.h>
 #include <media/stagefright/foundation/ABuffer.h>
 #include <media/stagefright/foundation/ADebug.h>
 
@@ -30,22 +32,15 @@
 
 namespace android {
 
-SDPLoader::SDPLoader(const sp<AMessage> &notify, uint32_t flags, bool uidValid, uid_t uid)
+SDPLoader::SDPLoader(
+        const sp<AMessage> &notify,
+        uint32_t flags,
+        const sp<IMediaHTTPService> &httpService)
     : mNotify(notify),
       mFlags(flags),
-      mUIDValid(uidValid),
-      mUID(uid),
       mNetLooper(new ALooper),
       mCancelled(false),
-      mHTTPDataSource(
-              HTTPBase::Create(
-                  (mFlags & kFlagIncognito)
-                    ? HTTPBase::kFlagIncognito
-                    : 0)) {
-    if (mUIDValid) {
-        mHTTPDataSource->setUID(mUID);
-    }
-
+      mHTTPDataSource(new MediaHTTP(httpService->makeHTTPConnection())) {
     mNetLooper->setName("sdp net");
     mNetLooper->start(false /* runOnCallingThread */,
                       false /* canCallJava */,
@@ -130,7 +125,7 @@
         ssize_t readSize = mHTTPDataSource->readAt(0, buffer->data(), sdpSize);
 
         if (readSize < 0) {
-            ALOGE("Failed to read SDP, error code = %ld", readSize);
+            ALOGE("Failed to read SDP, error code = %d", readSize);
             err = UNKNOWN_ERROR;
         } else {
             desc = new ASessionDescription;
diff --git a/media/libstagefright/tests/SurfaceMediaSource_test.cpp b/media/libstagefright/tests/SurfaceMediaSource_test.cpp
index 49ffcd6..ddedad7 100644
--- a/media/libstagefright/tests/SurfaceMediaSource_test.cpp
+++ b/media/libstagefright/tests/SurfaceMediaSource_test.cpp
@@ -746,9 +746,8 @@
     CHECK(fd >= 0);
 
     sp<MediaRecorder> mr = SurfaceMediaSourceGLTest::setUpMediaRecorder(fd,
-            VIDEO_SOURCE_GRALLOC_BUFFER,
-            OUTPUT_FORMAT_MPEG_4, VIDEO_ENCODER_H264, mYuvTexWidth,
-            mYuvTexHeight, 30);
+            VIDEO_SOURCE_SURFACE, OUTPUT_FORMAT_MPEG_4, VIDEO_ENCODER_H264,
+            mYuvTexWidth, mYuvTexHeight, 30);
     // get the reference to the surfacemediasource living in
     // mediaserver that is created by stagefrightrecorder
     sp<IGraphicBufferProducer> iST = mr->querySurfaceMediaSourceFromMediaServer();
@@ -880,7 +879,7 @@
     }
     CHECK(fd >= 0);
 
-    sp<MediaRecorder> mr = setUpMediaRecorder(fd, VIDEO_SOURCE_GRALLOC_BUFFER,
+    sp<MediaRecorder> mr = setUpMediaRecorder(fd, VIDEO_SOURCE_SURFACE,
             OUTPUT_FORMAT_MPEG_4, VIDEO_ENCODER_H264, mYuvTexWidth, mYuvTexHeight, 30);
 
     // get the reference to the surfacemediasource living in
@@ -923,7 +922,7 @@
     }
     CHECK(fd >= 0);
 
-    sp<MediaRecorder> mr = setUpMediaRecorder(fd, VIDEO_SOURCE_GRALLOC_BUFFER,
+    sp<MediaRecorder> mr = setUpMediaRecorder(fd, VIDEO_SOURCE_SURFACE,
             OUTPUT_FORMAT_MPEG_4, VIDEO_ENCODER_H264, mYuvTexWidth, mYuvTexHeight, 30);
 
     // get the reference to the surfacemediasource living in
diff --git a/media/libstagefright/timedtext/TimedTextDriver.cpp b/media/libstagefright/timedtext/TimedTextDriver.cpp
index 12fd7f4..05d6d02 100644
--- a/media/libstagefright/timedtext/TimedTextDriver.cpp
+++ b/media/libstagefright/timedtext/TimedTextDriver.cpp
@@ -20,6 +20,7 @@
 
 #include <binder/IPCThreadState.h>
 
+#include <media/IMediaHTTPService.h>
 #include <media/mediaplayer.h>
 #include <media/MediaPlayerInterface.h>
 #include <media/stagefright/DataSource.h>
@@ -40,7 +41,8 @@
 namespace android {
 
 TimedTextDriver::TimedTextDriver(
-        const wp<MediaPlayerBase> &listener)
+        const wp<MediaPlayerBase> &listener,
+        const sp<IMediaHTTPService> &httpService)
     : mLooper(new ALooper),
       mListener(listener),
       mState(UNINITIALIZED),
@@ -207,7 +209,7 @@
     }
 
     sp<DataSource> dataSource =
-            DataSource::CreateFromURI(uri);
+            DataSource::CreateFromURI(mHTTPService, uri);
     return createOutOfBandTextSource(trackIndex, mimeType, dataSource);
 }
 
diff --git a/media/libstagefright/timedtext/test/Android.mk b/media/libstagefright/timedtext/test/Android.mk
index a5e7ba2..9a9fde2 100644
--- a/media/libstagefright/timedtext/test/Android.mk
+++ b/media/libstagefright/timedtext/test/Android.mk
@@ -2,7 +2,6 @@
 
 # ================================================================
 # Unit tests for libstagefright_timedtext
-# See also /development/testrunner/test_defs.xml
 # ================================================================
 
 # ================================================================
@@ -18,10 +17,13 @@
 
 LOCAL_C_INCLUDES := \
     $(TOP)/external/expat/lib \
-    $(TOP)/frameworks/base/media/libstagefright/timedtext
+    $(TOP)/frameworks/av/media/libstagefright/timedtext
 
 LOCAL_SHARED_LIBRARIES := \
+    libbinder \
     libexpat \
-    libstagefright
+    libstagefright \
+    libstagefright_foundation \
+    libutils
 
 include $(BUILD_NATIVE_TEST)
diff --git a/media/libstagefright/wifi-display/source/PlaybackSession.cpp b/media/libstagefright/wifi-display/source/PlaybackSession.cpp
index 286ea13..1a5acba 100644
--- a/media/libstagefright/wifi-display/source/PlaybackSession.cpp
+++ b/media/libstagefright/wifi-display/source/PlaybackSession.cpp
@@ -29,6 +29,7 @@
 #include <binder/IServiceManager.h>
 #include <cutils/properties.h>
 #include <media/IHDCP.h>
+#include <media/IMediaHTTPService.h>
 #include <media/stagefright/foundation/ABitReader.h>
 #include <media/stagefright/foundation/ABuffer.h>
 #include <media/stagefright/foundation/ADebug.h>
@@ -749,7 +750,8 @@
 
     mExtractor = new NuMediaExtractor;
 
-    status_t err = mExtractor->setDataSource(mMediaPath.c_str());
+    status_t err = mExtractor->setDataSource(
+            NULL /* httpService */, mMediaPath.c_str());
 
     if (err != OK) {
         return err;
diff --git a/media/libstagefright/wifi-display/source/TSPacketizer.cpp b/media/libstagefright/wifi-display/source/TSPacketizer.cpp
index edcc087..50d317a 100644
--- a/media/libstagefright/wifi-display/source/TSPacketizer.cpp
+++ b/media/libstagefright/wifi-display/source/TSPacketizer.cpp
@@ -216,7 +216,7 @@
     uint8_t *ptr = dup->data();
 
     *ptr++ = 0xff;
-    *ptr++ = 0xf1;  // b11110001, ID=0, layer=0, protection_absent=1
+    *ptr++ = 0xf9;  // b11111001, ID=1(MPEG-2), layer=0, protection_absent=1
 
     *ptr++ =
         profile << 6
diff --git a/media/mediaserver/Android.mk b/media/mediaserver/Android.mk
index 1ac647a..f848054 100644
--- a/media/mediaserver/Android.mk
+++ b/media/mediaserver/Android.mk
@@ -15,6 +15,7 @@
 
 LOCAL_SHARED_LIBRARIES := \
 	libaudioflinger \
+	libcamera_metadata\
 	libcameraservice \
 	libmedialogservice \
 	libcutils \
diff --git a/media/mediaserver/main_mediaserver.cpp b/media/mediaserver/main_mediaserver.cpp
index d5207d5..a347951 100644
--- a/media/mediaserver/main_mediaserver.cpp
+++ b/media/mediaserver/main_mediaserver.cpp
@@ -37,7 +37,7 @@
 
 using namespace android;
 
-int main(int argc, char** argv)
+int main(int argc __unused, char** argv)
 {
     signal(SIGPIPE, SIG_IGN);
     char value[PROPERTY_VALUE_MAX];
diff --git a/services/audioflinger/Android.mk b/services/audioflinger/Android.mk
index 54377f1..4524d3c 100644
--- a/services/audioflinger/Android.mk
+++ b/services/audioflinger/Android.mk
@@ -23,7 +23,8 @@
     AudioPolicyService.cpp      \
     ServiceUtilities.cpp        \
     AudioResamplerCubic.cpp.arm \
-    AudioResamplerSinc.cpp.arm
+    AudioResamplerSinc.cpp.arm  \
+    AudioResamplerDyn.cpp.arm
 
 LOCAL_SRC_FILES += StateQueue.cpp
 
@@ -74,12 +75,20 @@
 include $(CLEAR_VARS)
 
 LOCAL_SRC_FILES:=               \
-	test-resample.cpp 			\
+    test-resample.cpp           \
     AudioResampler.cpp.arm      \
-	AudioResamplerCubic.cpp.arm \
-    AudioResamplerSinc.cpp.arm
+    AudioResamplerCubic.cpp.arm \
+    AudioResamplerSinc.cpp.arm  \
+    AudioResamplerDyn.cpp.arm
+
+LOCAL_C_INCLUDES := \
+    $(call include-path-for, audio-utils)
+
+LOCAL_STATIC_LIBRARIES := \
+    libsndfile
 
 LOCAL_SHARED_LIBRARIES := \
+    libaudioutils \
     libdl \
     libcutils \
     libutils \
diff --git a/services/audioflinger/AudioFlinger.cpp b/services/audioflinger/AudioFlinger.cpp
index e9c38e3..21bd2f4 100644
--- a/services/audioflinger/AudioFlinger.cpp
+++ b/services/audioflinger/AudioFlinger.cpp
@@ -104,6 +104,27 @@
 
 // ----------------------------------------------------------------------------
 
+const char *formatToString(audio_format_t format) {
+    switch(format) {
+    case AUDIO_FORMAT_PCM_SUB_8_BIT: return "pcm8";
+    case AUDIO_FORMAT_PCM_SUB_16_BIT: return "pcm16";
+    case AUDIO_FORMAT_PCM_SUB_32_BIT: return "pcm32";
+    case AUDIO_FORMAT_PCM_SUB_8_24_BIT: return "pcm8.24";
+    case AUDIO_FORMAT_PCM_SUB_24_BIT_PACKED: return "pcm24";
+    case AUDIO_FORMAT_PCM_SUB_FLOAT: return "pcmfloat";
+    case AUDIO_FORMAT_MP3: return "mp3";
+    case AUDIO_FORMAT_AMR_NB: return "amr-nb";
+    case AUDIO_FORMAT_AMR_WB: return "amr-wb";
+    case AUDIO_FORMAT_AAC: return "aac";
+    case AUDIO_FORMAT_HE_AAC_V1: return "he-aac-v1";
+    case AUDIO_FORMAT_HE_AAC_V2: return "he-aac-v2";
+    case AUDIO_FORMAT_VORBIS: return "vorbis";
+    default:
+        break;
+    }
+    return "unknown";
+}
+
 static int load_audio_interface(const char *if_name, audio_hw_device_t **dev)
 {
     const hw_module_t *mod;
@@ -162,12 +183,15 @@
         (void) property_get("af.tee", value, "0");
         teeEnabled = atoi(value);
     }
-    if (teeEnabled & 1)
+    if (teeEnabled & 1) {
         mTeeSinkInputEnabled = true;
-    if (teeEnabled & 2)
+    }
+    if (teeEnabled & 2) {
         mTeeSinkOutputEnabled = true;
-    if (teeEnabled & 4)
+    }
+    if (teeEnabled & 4) {
         mTeeSinkTrackEnabled = true;
+    }
 #endif
 }
 
@@ -210,6 +234,18 @@
         audio_hw_device_close(mAudioHwDevs.valueAt(i)->hwDevice());
         delete mAudioHwDevs.valueAt(i);
     }
+
+    // Tell media.log service about any old writers that still need to be unregistered
+    sp<IBinder> binder = defaultServiceManager()->getService(String16("media.log"));
+    if (binder != 0) {
+        sp<IMediaLogService> mediaLogService(interface_cast<IMediaLogService>(binder));
+        for (size_t count = mUnregisteredWriters.size(); count > 0; count--) {
+            sp<IMemory> iMemory(mUnregisteredWriters.top()->getIMemory());
+            mUnregisteredWriters.pop();
+            mediaLogService->unregisterWriter(iMemory);
+        }
+    }
+
 }
 
 static const char * const audio_interfaces[] = {
@@ -249,7 +285,7 @@
     return NULL;
 }
 
-void AudioFlinger::dumpClients(int fd, const Vector<String16>& args)
+void AudioFlinger::dumpClients(int fd, const Vector<String16>& args __unused)
 {
     const size_t SIZE = 256;
     char buffer[SIZE];
@@ -271,17 +307,17 @@
     }
 
     result.append("Global session refs:\n");
-    result.append(" session pid count\n");
+    result.append("  session   pid count\n");
     for (size_t i = 0; i < mAudioSessionRefs.size(); i++) {
         AudioSessionRef *r = mAudioSessionRefs[i];
-        snprintf(buffer, SIZE, " %7d %3d %3d\n", r->mSessionid, r->mPid, r->mCnt);
+        snprintf(buffer, SIZE, "  %7d %5d %5d\n", r->mSessionid, r->mPid, r->mCnt);
         result.append(buffer);
     }
     write(fd, result.string(), result.size());
 }
 
 
-void AudioFlinger::dumpInternals(int fd, const Vector<String16>& args)
+void AudioFlinger::dumpInternals(int fd, const Vector<String16>& args __unused)
 {
     const size_t SIZE = 256;
     char buffer[SIZE];
@@ -296,7 +332,7 @@
     write(fd, result.string(), result.size());
 }
 
-void AudioFlinger::dumpPermissionDenial(int fd, const Vector<String16>& args)
+void AudioFlinger::dumpPermissionDenial(int fd, const Vector<String16>& args __unused)
 {
     const size_t SIZE = 256;
     char buffer[SIZE];
@@ -403,16 +439,44 @@
 
 sp<NBLog::Writer> AudioFlinger::newWriter_l(size_t size, const char *name)
 {
+    // If there is no memory allocated for logs, return a dummy writer that does nothing
     if (mLogMemoryDealer == 0) {
         return new NBLog::Writer();
     }
-    sp<IMemory> shared = mLogMemoryDealer->allocate(NBLog::Timeline::sharedSize(size));
-    sp<NBLog::Writer> writer = new NBLog::Writer(size, shared);
     sp<IBinder> binder = defaultServiceManager()->getService(String16("media.log"));
-    if (binder != 0) {
-        interface_cast<IMediaLogService>(binder)->registerWriter(shared, size, name);
+    // Similarly if we can't contact the media.log service, also return a dummy writer
+    if (binder == 0) {
+        return new NBLog::Writer();
     }
-    return writer;
+    sp<IMediaLogService> mediaLogService(interface_cast<IMediaLogService>(binder));
+    sp<IMemory> shared = mLogMemoryDealer->allocate(NBLog::Timeline::sharedSize(size));
+    // If allocation fails, consult the vector of previously unregistered writers
+    // and garbage-collect one or more them until an allocation succeeds
+    if (shared == 0) {
+        Mutex::Autolock _l(mUnregisteredWritersLock);
+        for (size_t count = mUnregisteredWriters.size(); count > 0; count--) {
+            {
+                // Pick the oldest stale writer to garbage-collect
+                sp<IMemory> iMemory(mUnregisteredWriters[0]->getIMemory());
+                mUnregisteredWriters.removeAt(0);
+                mediaLogService->unregisterWriter(iMemory);
+                // Now the media.log remote reference to IMemory is gone.  When our last local
+                // reference to IMemory also drops to zero at end of this block,
+                // the IMemory destructor will deallocate the region from mLogMemoryDealer.
+            }
+            // Re-attempt the allocation
+            shared = mLogMemoryDealer->allocate(NBLog::Timeline::sharedSize(size));
+            if (shared != 0) {
+                goto success;
+            }
+        }
+        // Even after garbage-collecting all old writers, there is still not enough memory,
+        // so return a dummy writer
+        return new NBLog::Writer();
+    }
+success:
+    mediaLogService->registerWriter(shared, size, name);
+    return new NBLog::Writer(size, shared);
 }
 
 void AudioFlinger::unregisterWriter(const sp<NBLog::Writer>& writer)
@@ -424,13 +488,10 @@
     if (iMemory == 0) {
         return;
     }
-    sp<IBinder> binder = defaultServiceManager()->getService(String16("media.log"));
-    if (binder != 0) {
-        interface_cast<IMediaLogService>(binder)->unregisterWriter(iMemory);
-        // Now the media.log remote reference to IMemory is gone.
-        // When our last local reference to IMemory also drops to zero,
-        // the IMemory destructor will deallocate the region from mMemoryDealer.
-    }
+    // Rather than removing the writer immediately, append it to a queue of old writers to
+    // be garbage-collected later.  This allows us to continue to view old logs for a while.
+    Mutex::Autolock _l(mUnregisteredWritersLock);
+    mUnregisteredWriters.push(writer);
 }
 
 // IAudioFlinger interface
@@ -441,7 +502,7 @@
         uint32_t sampleRate,
         audio_format_t format,
         audio_channel_mask_t channelMask,
-        size_t frameCount,
+        size_t *frameCount,
         IAudioFlinger::track_flags_t *flags,
         const sp<IMemory>& sharedBuffer,
         audio_io_handle_t output,
@@ -468,7 +529,13 @@
     // client is responsible for conversion of 8-bit PCM to 16-bit PCM,
     // and we don't yet support 8.24 or 32-bit PCM
     if (audio_is_linear_pcm(format) && format != AUDIO_FORMAT_PCM_16_BIT) {
-        ALOGE("createTrack() invalid format %d", format);
+        ALOGE("createTrack() invalid format %#x", format);
+        lStatus = BAD_VALUE;
+        goto Exit;
+    }
+
+    if (sharedBuffer != 0 && sharedBuffer->pointer() == NULL) {
+        ALOGE("createTrack() sharedBuffer is non-0 but has NULL pointer()");
         lStatus = BAD_VALUE;
         goto Exit;
     }
@@ -488,7 +555,7 @@
         client = registerPid_l(pid);
 
         ALOGV("createTrack() sessionId: %d", (sessionId == NULL) ? -2 : *sessionId);
-        if (sessionId != NULL && *sessionId != AUDIO_SESSION_OUTPUT_MIX) {
+        if (sessionId != NULL && *sessionId != AUDIO_SESSION_ALLOCATE) {
             // check if an effect chain with the same session ID is present on another
             // output thread and move it here.
             for (size_t i = 0; i < mPlaybackThreads.size(); i++) {
@@ -513,10 +580,13 @@
 
         track = thread->createTrack_l(client, streamType, sampleRate, format,
                 channelMask, frameCount, sharedBuffer, lSessionId, flags, tid, clientUid, &lStatus);
+        LOG_ALWAYS_FATAL_IF((lStatus == NO_ERROR) && (track == 0));
+        // we don't abort yet if lStatus != NO_ERROR; there is still work to be done regardless
 
         // move effect chain to this output thread if an effect on same session was waiting
         // for a track to be created
         if (lStatus == NO_ERROR && effectThread != NULL) {
+            // no risk of deadlock because AudioFlinger::mLock is held
             Mutex::Autolock _dl(thread->mLock);
             Mutex::Autolock _sl(effectThread->mLock);
             moveEffectChain_l(lSessionId, effectThread, thread, true);
@@ -536,7 +606,9 @@
                 }
             }
         }
+
     }
+
     if (lStatus == NO_ERROR) {
         // s for server's pid, n for normal mixer name, f for fast index
         name = String8::format("s:%d;n:%d;f:%d", getpid_cached, track->name() - AudioMixer::TRACK0,
@@ -550,9 +622,7 @@
     }
 
 Exit:
-    if (status != NULL) {
-        *status = lStatus;
-    }
+    *status = lStatus;
     return trackHandle;
 }
 
@@ -1210,7 +1280,7 @@
 {
 }
 
-void AudioFlinger::NotificationClient::binderDied(const wp<IBinder>& who)
+void AudioFlinger::NotificationClient::binderDied(const wp<IBinder>& who __unused)
 {
     sp<NotificationClient> keep(this);
     mAudioFlinger->removeNotificationClient(mPid);
@@ -1228,7 +1298,7 @@
         uint32_t sampleRate,
         audio_format_t format,
         audio_channel_mask_t channelMask,
-        size_t frameCount,
+        size_t *frameCount,
         IAudioFlinger::track_flags_t *flags,
         pid_t tid,
         int *sessionId,
@@ -1250,7 +1320,7 @@
     }
 
     if (format != AUDIO_FORMAT_PCM_16_BIT) {
-        ALOGE("openRecord() invalid format %d", format);
+        ALOGE("openRecord() invalid format %#x", format);
         lStatus = BAD_VALUE;
         goto Exit;
     }
@@ -1276,7 +1346,7 @@
         client = registerPid_l(pid);
 
         // If no audio session id is provided, create one here
-        if (sessionId != NULL && *sessionId != AUDIO_SESSION_OUTPUT_MIX) {
+        if (sessionId != NULL && *sessionId != AUDIO_SESSION_ALLOCATE) {
             lSessionId = *sessionId;
         } else {
             lSessionId = nextUniqueId();
@@ -1291,8 +1361,9 @@
                                                   frameCount, lSessionId,
                                                   IPCThreadState::self()->getCallingUid(),
                                                   flags, tid, &lStatus);
-        LOG_ALWAYS_FATAL_IF((recordTrack != 0) != (lStatus == NO_ERROR));
+        LOG_ALWAYS_FATAL_IF((lStatus == NO_ERROR) && (recordTrack == 0));
     }
+
     if (lStatus != NO_ERROR) {
         // remove local strong reference to Client before deleting the RecordTrack so that the
         // Client destructor is called by the TrackBase destructor with mLock held
@@ -1301,14 +1372,11 @@
         goto Exit;
     }
 
-    // return to handle to client
+    // return handle to client
     recordHandle = new RecordHandle(recordTrack);
-    lStatus = NO_ERROR;
 
 Exit:
-    if (status) {
-        *status = lStatus;
-    }
+    *status = lStatus;
     return recordHandle;
 }
 
@@ -1449,18 +1517,15 @@
                                            audio_output_flags_t flags,
                                            const audio_offload_info_t *offloadInfo)
 {
-    PlaybackThread *thread = NULL;
     struct audio_config config;
+    memset(&config, 0, sizeof(config));
     config.sample_rate = (pSamplingRate != NULL) ? *pSamplingRate : 0;
     config.channel_mask = (pChannelMask != NULL) ? *pChannelMask : 0;
     config.format = (pFormat != NULL) ? *pFormat : AUDIO_FORMAT_DEFAULT;
-    if (offloadInfo) {
+    if (offloadInfo != NULL) {
         config.offload_info = *offloadInfo;
     }
 
-    audio_stream_out_t *outStream = NULL;
-    AudioHwDevice *outHwDev;
-
     ALOGV("openOutput(), module %d Device %x, SamplingRate %d, Format %#08x, Channels %x, flags %x",
               module,
               (pDevices != NULL) ? *pDevices : 0,
@@ -1469,7 +1534,7 @@
               config.channel_mask,
               flags);
     ALOGV("openOutput(), offloadInfo %p version 0x%04x",
-          offloadInfo, offloadInfo == NULL ? -1 : offloadInfo->version );
+          offloadInfo, offloadInfo == NULL ? -1 : offloadInfo->version);
 
     if (pDevices == NULL || *pDevices == 0) {
         return 0;
@@ -1477,15 +1542,17 @@
 
     Mutex::Autolock _l(mLock);
 
-    outHwDev = findSuitableHwDev_l(module, *pDevices);
-    if (outHwDev == NULL)
+    AudioHwDevice *outHwDev = findSuitableHwDev_l(module, *pDevices);
+    if (outHwDev == NULL) {
         return 0;
+    }
 
     audio_hw_device_t *hwDevHal = outHwDev->hwDevice();
     audio_io_handle_t id = nextUniqueId();
 
     mHardwareStatus = AUDIO_HW_OUTPUT_OPEN;
 
+    audio_stream_out_t *outStream = NULL;
     status_t status = hwDevHal->open_output_stream(hwDevHal,
                                           id,
                                           *pDevices,
@@ -1505,6 +1572,7 @@
     if (status == NO_ERROR && outStream != NULL) {
         AudioStreamOut *output = new AudioStreamOut(outHwDev, outStream, flags);
 
+        PlaybackThread *thread;
         if (flags & AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD) {
             thread = new OffloadThread(this, output, id, *pDevices);
             ALOGV("openOutput() created offload output: ID %d thread %p", id, thread);
@@ -1672,18 +1740,15 @@
                                           audio_format_t *pFormat,
                                           audio_channel_mask_t *pChannelMask)
 {
-    status_t status;
-    RecordThread *thread = NULL;
     struct audio_config config;
+    memset(&config, 0, sizeof(config));
     config.sample_rate = (pSamplingRate != NULL) ? *pSamplingRate : 0;
     config.channel_mask = (pChannelMask != NULL) ? *pChannelMask : 0;
     config.format = (pFormat != NULL) ? *pFormat : AUDIO_FORMAT_DEFAULT;
 
     uint32_t reqSamplingRate = config.sample_rate;
     audio_format_t reqFormat = config.format;
-    audio_channel_mask_t reqChannels = config.channel_mask;
-    audio_stream_in_t *inStream = NULL;
-    AudioHwDevice *inHwDev;
+    audio_channel_mask_t reqChannelMask = config.channel_mask;
 
     if (pDevices == NULL || *pDevices == 0) {
         return 0;
@@ -1691,16 +1756,18 @@
 
     Mutex::Autolock _l(mLock);
 
-    inHwDev = findSuitableHwDev_l(module, *pDevices);
-    if (inHwDev == NULL)
+    AudioHwDevice *inHwDev = findSuitableHwDev_l(module, *pDevices);
+    if (inHwDev == NULL) {
         return 0;
+    }
 
     audio_hw_device_t *inHwHal = inHwDev->hwDevice();
     audio_io_handle_t id = nextUniqueId();
 
-    status = inHwHal->open_input_stream(inHwHal, id, *pDevices, &config,
+    audio_stream_in_t *inStream = NULL;
+    status_t status = inHwHal->open_input_stream(inHwHal, id, *pDevices, &config,
                                         &inStream);
-    ALOGV("openInput() openInputStream returned input %p, SamplingRate %d, Format %d, Channels %x, "
+    ALOGV("openInput() openInputStream returned input %p, SamplingRate %d, Format %#x, Channels %x, "
             "status %d",
             inStream,
             config.sample_rate,
@@ -1714,10 +1781,12 @@
     if (status == BAD_VALUE &&
         reqFormat == config.format && config.format == AUDIO_FORMAT_PCM_16_BIT &&
         (config.sample_rate <= 2 * reqSamplingRate) &&
-        (popcount(config.channel_mask) <= FCC_2) && (popcount(reqChannels) <= FCC_2)) {
+        (popcount(config.channel_mask) <= FCC_2) && (popcount(reqChannelMask) <= FCC_2)) {
+        // FIXME describe the change proposed by HAL (save old values so we can log them here)
         ALOGV("openInput() reopening with proposed sampling rate and channel mask");
         inStream = NULL;
         status = inHwHal->open_input_stream(inHwHal, id, *pDevices, &config, &inStream);
+        // FIXME log this new status; HAL should not propose any further changes
     }
 
     if (status == NO_ERROR && inStream != NULL) {
@@ -1735,7 +1804,7 @@
                                         popcount(inStream->common.get_channels(&inStream->common)));
         if (!mTeeSinkInputEnabled) {
             kind = TEE_SINK_NO;
-        } else if (format == Format_Invalid) {
+        } else if (!Format_isValid(format)) {
             kind = TEE_SINK_NO;
         } else if (mRecordTeeSink == 0) {
             kind = TEE_SINK_NEW;
@@ -1776,10 +1845,10 @@
         // Start record thread
         // RecordThread requires both input and output device indication to forward to audio
         // pre processing modules
-        thread = new RecordThread(this,
+        RecordThread *thread = new RecordThread(this,
                                   input,
                                   reqSamplingRate,
-                                  reqChannels,
+                                  reqChannelMask,
                                   id,
                                   primaryOutputDevice_l(),
                                   *pDevices
@@ -1796,7 +1865,7 @@
             *pFormat = config.format;
         }
         if (pChannelMask != NULL) {
-            *pChannelMask = reqChannels;
+            *pChannelMask = reqChannelMask;
         }
 
         // notify client processes of the new input creation
@@ -1954,7 +2023,7 @@
             }
         }
         if (!found) {
-            Mutex::Autolock _l (t->mLock);
+            Mutex::Autolock _l(t->mLock);
             // remove all effects from the chain
             while (ec->mEffects.size()) {
                 sp<EffectModule> effect = ec->mEffects[0];
@@ -2249,9 +2318,7 @@
     }
 
 Exit:
-    if (status != NULL) {
-        *status = lStatus;
-    }
+    *status = lStatus;
     return handle;
 }
 
diff --git a/services/audioflinger/AudioFlinger.h b/services/audioflinger/AudioFlinger.h
index 7320144..459d2ec 100644
--- a/services/audioflinger/AudioFlinger.h
+++ b/services/audioflinger/AudioFlinger.h
@@ -60,8 +60,8 @@
 
 namespace android {
 
-class audio_track_cblk_t;
-class effect_param_cblk_t;
+struct audio_track_cblk_t;
+struct effect_param_cblk_t;
 class AudioMixer;
 class AudioBuffer;
 class AudioResampler;
@@ -102,7 +102,7 @@
                                 uint32_t sampleRate,
                                 audio_format_t format,
                                 audio_channel_mask_t channelMask,
-                                size_t frameCount,
+                                size_t *pFrameCount,
                                 IAudioFlinger::track_flags_t *flags,
                                 const sp<IMemory>& sharedBuffer,
                                 audio_io_handle_t output,
@@ -110,18 +110,18 @@
                                 int *sessionId,
                                 String8& name,
                                 int clientUid,
-                                status_t *status);
+                                status_t *status /*non-NULL*/);
 
     virtual sp<IAudioRecord> openRecord(
                                 audio_io_handle_t input,
                                 uint32_t sampleRate,
                                 audio_format_t format,
                                 audio_channel_mask_t channelMask,
-                                size_t frameCount,
+                                size_t *pFrameCount,
                                 IAudioFlinger::track_flags_t *flags,
                                 pid_t tid,
                                 int *sessionId,
-                                status_t *status);
+                                status_t *status /*non-NULL*/);
 
     virtual     uint32_t    sampleRate(audio_io_handle_t output) const;
     virtual     int         channelCount(audio_io_handle_t output) const;
@@ -210,7 +210,7 @@
                         int32_t priority,
                         audio_io_handle_t io,
                         int sessionId,
-                        status_t *status,
+                        status_t *status /*non-NULL*/,
                         int *id,
                         int *enabled);
 
@@ -235,8 +235,12 @@
     sp<NBLog::Writer>   newWriter_l(size_t size, const char *name);
     void                unregisterWriter(const sp<NBLog::Writer>& writer);
 private:
-    static const size_t kLogMemorySize = 10 * 1024;
+    static const size_t kLogMemorySize = 40 * 1024;
     sp<MemoryDealer>    mLogMemoryDealer;   // == 0 when NBLog is disabled
+    // When a log writer is unregistered, it is done lazily so that media.log can continue to see it
+    // for as long as possible.  The memory is only freed when it is needed for another log writer.
+    Vector< sp<NBLog::Writer> > mUnregisteredWriters;
+    Mutex               mUnregisteredWritersLock;
 public:
 
     class SyncEvent;
@@ -499,7 +503,7 @@
     private:
         const char * const mModuleName;
         audio_hw_device_t * const mHwDevice;
-        Flags mFlags;
+        const Flags mFlags;
     };
 
     // AudioStreamOut and AudioStreamIn are immutable, so their fields are const.
@@ -509,7 +513,7 @@
     struct AudioStreamOut {
         AudioHwDevice* const audioHwDev;
         audio_stream_out_t* const stream;
-        audio_output_flags_t flags;
+        const audio_output_flags_t flags;
 
         audio_hw_device_t* hwDev() const { return audioHwDev->hwDevice(); }
 
@@ -651,6 +655,8 @@
 
 #undef INCLUDING_FROM_AUDIOFLINGER_H
 
+const char *formatToString(audio_format_t format);
+
 // ----------------------------------------------------------------------------
 
 }; // namespace android
diff --git a/services/audioflinger/AudioMixer.cpp b/services/audioflinger/AudioMixer.cpp
index f92421e..67d83b1 100644
--- a/services/audioflinger/AudioMixer.cpp
+++ b/services/audioflinger/AudioMixer.cpp
@@ -58,7 +58,7 @@
 status_t AudioMixer::DownmixerBufferProvider::getNextBuffer(AudioBufferProvider::Buffer *pBuffer,
         int64_t pts) {
     //ALOGV("DownmixerBufferProvider::getNextBuffer()");
-    if (this->mTrackBufferProvider != NULL) {
+    if (mTrackBufferProvider != NULL) {
         status_t res = mTrackBufferProvider->getNextBuffer(pBuffer, pts);
         if (res == OK) {
             mDownmixConfig.inputCfg.buffer.frameCount = pBuffer->frameCount;
@@ -81,7 +81,7 @@
 
 void AudioMixer::DownmixerBufferProvider::releaseBuffer(AudioBufferProvider::Buffer *pBuffer) {
     //ALOGV("DownmixerBufferProvider::releaseBuffer()");
-    if (this->mTrackBufferProvider != NULL) {
+    if (mTrackBufferProvider != NULL) {
         mTrackBufferProvider->releaseBuffer(pBuffer);
     } else {
         ALOGE("DownmixerBufferProvider::releaseBuffer() error: NULL track buffer provider");
@@ -90,9 +90,9 @@
 
 
 // ----------------------------------------------------------------------------
-bool AudioMixer::isMultichannelCapable = false;
+bool AudioMixer::sIsMultichannelCapable = false;
 
-effect_descriptor_t AudioMixer::dwnmFxDesc;
+effect_descriptor_t AudioMixer::sDwnmFxDesc;
 
 // Ensure mConfiguredNames bitmask is initialized properly on all architectures.
 // The value of 1 << x is undefined in C when x >= 32.
@@ -113,8 +113,6 @@
     // AudioMixer is not yet capable of multi-channel output beyond stereo
     ALOG_ASSERT(2 == MAX_NUM_CHANNELS, "bad MAX_NUM_CHANNELS %d", MAX_NUM_CHANNELS);
 
-    LocalClock lc;
-
     pthread_once(&sOnceControl, &sInitRoutine);
 
     mState.enabledTracks= 0;
@@ -136,27 +134,6 @@
         t++;
     }
 
-    // find multichannel downmix effect if we have to play multichannel content
-    uint32_t numEffects = 0;
-    int ret = EffectQueryNumberEffects(&numEffects);
-    if (ret != 0) {
-        ALOGE("AudioMixer() error %d querying number of effects", ret);
-        return;
-    }
-    ALOGV("EffectQueryNumberEffects() numEffects=%d", numEffects);
-
-    for (uint32_t i = 0 ; i < numEffects ; i++) {
-        if (EffectQueryEffect(i, &dwnmFxDesc) == 0) {
-            ALOGV("effect %d is called %s", i, dwnmFxDesc.name);
-            if (memcmp(&dwnmFxDesc.type, EFFECT_UIID_DOWNMIX, sizeof(effect_uuid_t)) == 0) {
-                ALOGI("found effect \"%s\" from %s",
-                        dwnmFxDesc.name, dwnmFxDesc.implementor);
-                isMultichannelCapable = true;
-                break;
-            }
-        }
-    }
-    ALOGE_IF(!isMultichannelCapable, "unable to find downmix effect");
 }
 
 AudioMixer::~AudioMixer()
@@ -229,7 +206,7 @@
 
 void AudioMixer::invalidateState(uint32_t mask)
 {
-    if (mask) {
+    if (mask != 0) {
         mState.needsChanged |= mask;
         mState.hook = process__validate;
     }
@@ -252,7 +229,7 @@
     return status;
 }
 
-void AudioMixer::unprepareTrackForDownmix(track_t* pTrack, int trackName) {
+void AudioMixer::unprepareTrackForDownmix(track_t* pTrack, int trackName __unused) {
     ALOGV("AudioMixer::unprepareTrackForDownmix(%d)", trackName);
 
     if (pTrack->downmixerBufferProvider != NULL) {
@@ -276,13 +253,13 @@
     DownmixerBufferProvider* pDbp = new DownmixerBufferProvider();
     int32_t status;
 
-    if (!isMultichannelCapable) {
+    if (!sIsMultichannelCapable) {
         ALOGE("prepareTrackForDownmix(%d) fails: mixer doesn't support multichannel content",
                 trackName);
         goto noDownmixForActiveTrack;
     }
 
-    if (EffectCreate(&dwnmFxDesc.uuid,
+    if (EffectCreate(&sDwnmFxDesc.uuid,
             pTrack->sessionId /*sessionId*/, -2 /*ioId not relevant here, using random value*/,
             &pDbp->mDownmixHandle/*pHandle*/) != 0) {
         ALOGE("prepareTrackForDownmix(%d) fails: error creating downmixer effect", trackName);
@@ -560,14 +537,14 @@
                 // Should have a way to distinguish tracks with static ratios vs. dynamic ratios.
                 if (!((value == 44100 && devSampleRate == 48000) ||
                       (value == 48000 && devSampleRate == 44100))) {
-                    quality = AudioResampler::LOW_QUALITY;
+                    quality = AudioResampler::DYN_LOW_QUALITY;
                 } else {
                     quality = AudioResampler::DEFAULT_QUALITY;
                 }
                 resampler = AudioResampler::create(
                         format,
                         // the resampler sees the number of channels after the downmixer, if any
-                        downmixerBufferProvider != NULL ? MAX_NUM_CHANNELS : channelCount,
+                        (int) (downmixerBufferProvider != NULL ? MAX_NUM_CHANNELS : channelCount),
                         devSampleRate, quality);
                 resampler->setLocalTimeFreq(sLocalTimeFreq);
             }
@@ -668,27 +645,29 @@
         countActiveTracks++;
         track_t& t = state->tracks[i];
         uint32_t n = 0;
+        // FIXME can overflow (mask is only 3 bits)
         n |= NEEDS_CHANNEL_1 + t.channelCount - 1;
-        n |= NEEDS_FORMAT_16;
-        n |= t.doesResample() ? NEEDS_RESAMPLE_ENABLED : NEEDS_RESAMPLE_DISABLED;
+        if (t.doesResample()) {
+            n |= NEEDS_RESAMPLE;
+        }
         if (t.auxLevel != 0 && t.auxBuffer != NULL) {
-            n |= NEEDS_AUX_ENABLED;
+            n |= NEEDS_AUX;
         }
 
         if (t.volumeInc[0]|t.volumeInc[1]) {
             volumeRamp = true;
         } else if (!t.doesResample() && t.volumeRL == 0) {
-            n |= NEEDS_MUTE_ENABLED;
+            n |= NEEDS_MUTE;
         }
         t.needs = n;
 
-        if ((n & NEEDS_MUTE__MASK) == NEEDS_MUTE_ENABLED) {
+        if (n & NEEDS_MUTE) {
             t.hook = track__nop;
         } else {
-            if ((n & NEEDS_AUX__MASK) == NEEDS_AUX_ENABLED) {
+            if (n & NEEDS_AUX) {
                 all16BitsStereoNoResample = false;
             }
-            if ((n & NEEDS_RESAMPLE__MASK) == NEEDS_RESAMPLE_ENABLED) {
+            if (n & NEEDS_RESAMPLE) {
                 all16BitsStereoNoResample = false;
                 resampling = true;
                 t.hook = track__genericResample;
@@ -710,7 +689,7 @@
 
     // select the processing hooks
     state->hook = process__nop;
-    if (countActiveTracks) {
+    if (countActiveTracks > 0) {
         if (resampling) {
             if (!state->outputTemp) {
                 state->outputTemp = new int32_t[MAX_NUM_CHANNELS * state->frameCount];
@@ -746,16 +725,15 @@
 
     // Now that the volume ramp has been done, set optimal state and
     // track hooks for subsequent mixer process
-    if (countActiveTracks) {
+    if (countActiveTracks > 0) {
         bool allMuted = true;
         uint32_t en = state->enabledTracks;
         while (en) {
             const int i = 31 - __builtin_clz(en);
             en &= ~(1<<i);
             track_t& t = state->tracks[i];
-            if (!t.doesResample() && t.volumeRL == 0)
-            {
-                t.needs |= NEEDS_MUTE_ENABLED;
+            if (!t.doesResample() && t.volumeRL == 0) {
+                t.needs |= NEEDS_MUTE;
                 t.hook = track__nop;
             } else {
                 allMuted = false;
@@ -806,8 +784,8 @@
     }
 }
 
-void AudioMixer::track__nop(track_t* t, int32_t* out, size_t outFrameCount, int32_t* temp,
-        int32_t* aux)
+void AudioMixer::track__nop(track_t* t __unused, int32_t* out __unused,
+        size_t outFrameCount __unused, int32_t* temp __unused, int32_t* aux __unused)
 {
 }
 
@@ -883,8 +861,8 @@
     }
 }
 
-void AudioMixer::track__16BitsStereo(track_t* t, int32_t* out, size_t frameCount, int32_t* temp,
-        int32_t* aux)
+void AudioMixer::track__16BitsStereo(track_t* t, int32_t* out, size_t frameCount,
+        int32_t* temp __unused, int32_t* aux)
 {
     const int16_t *in = static_cast<const int16_t *>(t->in);
 
@@ -974,8 +952,8 @@
     t->in = in;
 }
 
-void AudioMixer::track__16BitsMono(track_t* t, int32_t* out, size_t frameCount, int32_t* temp,
-        int32_t* aux)
+void AudioMixer::track__16BitsMono(track_t* t, int32_t* out, size_t frameCount,
+        int32_t* temp __unused, int32_t* aux)
 {
     const int16_t *in = static_cast<int16_t const *>(t->in);
 
@@ -1154,7 +1132,7 @@
                 track_t& t = state->tracks[i];
                 size_t outFrames = BLOCKSIZE;
                 int32_t *aux = NULL;
-                if (CC_UNLIKELY((t.needs & NEEDS_AUX__MASK) == NEEDS_AUX_ENABLED)) {
+                if (CC_UNLIKELY(t.needs & NEEDS_AUX)) {
                     aux = t.auxBuffer + numFrames;
                 }
                 while (outFrames) {
@@ -1166,7 +1144,7 @@
                         break;
                     }
                     size_t inFrames = (t.frameCount > outFrames)?outFrames:t.frameCount;
-                    if (inFrames) {
+                    if (inFrames > 0) {
                         t.hook(&t, outTemp + (BLOCKSIZE-outFrames)*MAX_NUM_CHANNELS, inFrames,
                                 state->resampleTemp, aux);
                         t.frameCount -= inFrames;
@@ -1242,14 +1220,14 @@
             e1 &= ~(1<<i);
             track_t& t = state->tracks[i];
             int32_t *aux = NULL;
-            if (CC_UNLIKELY((t.needs & NEEDS_AUX__MASK) == NEEDS_AUX_ENABLED)) {
+            if (CC_UNLIKELY(t.needs & NEEDS_AUX)) {
                 aux = t.auxBuffer;
             }
 
             // this is a little goofy, on the resampling case we don't
             // acquire/release the buffers because it's done by
             // the resampler.
-            if ((t.needs & NEEDS_RESAMPLE__MASK) == NEEDS_RESAMPLE_ENABLED) {
+            if (t.needs & NEEDS_RESAMPLE) {
                 t.resampler->setPTS(pts);
                 t.hook(&t, outTemp, numFrames, state->resampleTemp, aux);
             } else {
@@ -1449,8 +1427,9 @@
 int64_t AudioMixer::calculateOutputPTS(const track_t& t, int64_t basePTS,
                                        int outputFrameIndex)
 {
-    if (AudioBufferProvider::kInvalidPTS == basePTS)
+    if (AudioBufferProvider::kInvalidPTS == basePTS) {
         return AudioBufferProvider::kInvalidPTS;
+    }
 
     return basePTS + ((outputFrameIndex * sLocalTimeFreq) / t.sampleRate);
 }
@@ -1462,6 +1441,28 @@
 {
     LocalClock lc;
     sLocalTimeFreq = lc.getLocalFreq();
+
+    // find multichannel downmix effect if we have to play multichannel content
+    uint32_t numEffects = 0;
+    int ret = EffectQueryNumberEffects(&numEffects);
+    if (ret != 0) {
+        ALOGE("AudioMixer() error %d querying number of effects", ret);
+        return;
+    }
+    ALOGV("EffectQueryNumberEffects() numEffects=%d", numEffects);
+
+    for (uint32_t i = 0 ; i < numEffects ; i++) {
+        if (EffectQueryEffect(i, &sDwnmFxDesc) == 0) {
+            ALOGV("effect %d is called %s", i, sDwnmFxDesc.name);
+            if (memcmp(&sDwnmFxDesc.type, EFFECT_UIID_DOWNMIX, sizeof(effect_uuid_t)) == 0) {
+                ALOGI("found effect \"%s\" from %s",
+                        sDwnmFxDesc.name, sDwnmFxDesc.implementor);
+                sIsMultichannelCapable = true;
+                break;
+            }
+        }
+    }
+    ALOGW_IF(!sIsMultichannelCapable, "unable to find downmix effect");
 }
 
 // ----------------------------------------------------------------------------
diff --git a/services/audioflinger/AudioMixer.h b/services/audioflinger/AudioMixer.h
index 43aeb86..d286986 100644
--- a/services/audioflinger/AudioMixer.h
+++ b/services/audioflinger/AudioMixer.h
@@ -120,27 +120,19 @@
 private:
 
     enum {
+        // FIXME this representation permits up to 8 channels
         NEEDS_CHANNEL_COUNT__MASK   = 0x00000007,
-        NEEDS_FORMAT__MASK          = 0x000000F0,
-        NEEDS_MUTE__MASK            = 0x00000100,
-        NEEDS_RESAMPLE__MASK        = 0x00001000,
-        NEEDS_AUX__MASK             = 0x00010000,
     };
 
     enum {
-        NEEDS_CHANNEL_1             = 0x00000000,
-        NEEDS_CHANNEL_2             = 0x00000001,
+        NEEDS_CHANNEL_1             = 0x00000000,   // mono
+        NEEDS_CHANNEL_2             = 0x00000001,   // stereo
 
-        NEEDS_FORMAT_16             = 0x00000010,
+        // sample format is not explicitly specified, and is assumed to be AUDIO_FORMAT_PCM_16_BIT
 
-        NEEDS_MUTE_DISABLED         = 0x00000000,
-        NEEDS_MUTE_ENABLED          = 0x00000100,
-
-        NEEDS_RESAMPLE_DISABLED     = 0x00000000,
-        NEEDS_RESAMPLE_ENABLED      = 0x00001000,
-
-        NEEDS_AUX_DISABLED     = 0x00000000,
-        NEEDS_AUX_ENABLED      = 0x00010000,
+        NEEDS_MUTE                  = 0x00000100,
+        NEEDS_RESAMPLE              = 0x00001000,
+        NEEDS_AUX                   = 0x00010000,
     };
 
     struct state_t;
@@ -224,7 +216,7 @@
         NBLog::Writer*  mLog;
         int32_t         reserved[1];
         // FIXME allocate dynamically to save some memory when maxNumTracks < MAX_NUM_TRACKS
-        track_t         tracks[MAX_NUM_TRACKS]; __attribute__((aligned(32)));
+        track_t         tracks[MAX_NUM_TRACKS] __attribute__((aligned(32)));
     };
 
     // AudioBufferProvider that wraps a track AudioBufferProvider by a call to a downmix effect
@@ -256,9 +248,9 @@
     state_t         mState __attribute__((aligned(32)));
 
     // effect descriptor for the downmixer used by the mixer
-    static effect_descriptor_t dwnmFxDesc;
+    static effect_descriptor_t sDwnmFxDesc;
     // indicates whether a downmix effect has been found and is usable by this mixer
-    static bool                isMultichannelCapable;
+    static bool                sIsMultichannelCapable;
 
     // Call after changing either the enabled status of a track, or parameters of an enabled track.
     // OK to call more often than that, but unnecessary.
diff --git a/services/audioflinger/AudioPolicyService.cpp b/services/audioflinger/AudioPolicyService.cpp
index 646a317..9980344 100644
--- a/services/audioflinger/AudioPolicyService.cpp
+++ b/services/audioflinger/AudioPolicyService.cpp
@@ -60,7 +60,7 @@
 // ----------------------------------------------------------------------------
 
 AudioPolicyService::AudioPolicyService()
-    : BnAudioPolicyService() , mpAudioPolicyDev(NULL) , mpAudioPolicy(NULL)
+    : BnAudioPolicyService(), mpAudioPolicyDev(NULL), mpAudioPolicy(NULL)
 {
     char value[PROPERTY_VALUE_MAX];
     const struct hw_module_t *module;
@@ -77,24 +77,28 @@
     mOutputCommandThread = new AudioCommandThread(String8("ApmOutput"), this);
     /* instantiate the audio policy manager */
     rc = hw_get_module(AUDIO_POLICY_HARDWARE_MODULE_ID, &module);
-    if (rc)
+    if (rc) {
         return;
+    }
 
     rc = audio_policy_dev_open(module, &mpAudioPolicyDev);
     ALOGE_IF(rc, "couldn't open audio policy device (%s)", strerror(-rc));
-    if (rc)
+    if (rc) {
         return;
+    }
 
     rc = mpAudioPolicyDev->create_audio_policy(mpAudioPolicyDev, &aps_ops, this,
                                                &mpAudioPolicy);
     ALOGE_IF(rc, "couldn't create audio policy (%s)", strerror(-rc));
-    if (rc)
+    if (rc) {
         return;
+    }
 
     rc = mpAudioPolicy->init_check(mpAudioPolicy);
     ALOGE_IF(rc, "couldn't init_check the audio policy (%s)", strerror(-rc));
-    if (rc)
+    if (rc) {
         return;
+    }
 
     ALOGI("Loaded audio policy from %s (%s)", module->name, module->id);
 
@@ -126,10 +130,12 @@
     }
     mInputs.clear();
 
-    if (mpAudioPolicy != NULL && mpAudioPolicyDev != NULL)
+    if (mpAudioPolicy != NULL && mpAudioPolicyDev != NULL) {
         mpAudioPolicyDev->destroy_audio_policy(mpAudioPolicyDev, mpAudioPolicy);
-    if (mpAudioPolicyDev != NULL)
+    }
+    if (mpAudioPolicyDev != NULL) {
         audio_policy_dev_close(mpAudioPolicyDev);
+    }
 }
 
 status_t AudioPolicyService::setDeviceConnectionState(audio_devices_t device,
@@ -469,8 +475,9 @@
 
 audio_io_handle_t AudioPolicyService::getOutputForEffect(const effect_descriptor_t *desc)
 {
+    // FIXME change return type to status_t, and return NO_INIT here
     if (mpAudioPolicy == NULL) {
-        return NO_INIT;
+        return 0;
     }
     Mutex::Autolock _l(mLock);
     return mpAudioPolicy->get_output_for_effect(mpAudioPolicy, desc);
@@ -606,7 +613,7 @@
     return NO_ERROR;
 }
 
-status_t AudioPolicyService::dump(int fd, const Vector<String16>& args)
+status_t AudioPolicyService::dump(int fd, const Vector<String16>& args __unused)
 {
     if (!dumpAllowed()) {
         dumpPermissionDenial(fd);
@@ -1114,11 +1121,13 @@
 int AudioPolicyService::startTone(audio_policy_tone_t tone,
                                   audio_stream_type_t stream)
 {
-    if (tone != AUDIO_POLICY_TONE_IN_CALL_NOTIFICATION)
+    if (tone != AUDIO_POLICY_TONE_IN_CALL_NOTIFICATION) {
         ALOGE("startTone: illegal tone requested (%d)", tone);
-    if (stream != AUDIO_STREAM_VOICE_CALL)
+    }
+    if (stream != AUDIO_STREAM_VOICE_CALL) {
         ALOGE("startTone: illegal stream (%d) requested for tone %d", stream,
             tone);
+    }
     mTonePlaybackThread->startToneCommand(ToneGenerator::TONE_SUP_CALL_WAITING,
                                           AUDIO_STREAM_VOICE_CALL);
     return 0;
@@ -1452,7 +1461,7 @@
 extern "C" {
 
 
-static audio_module_handle_t aps_load_hw_module(void *service,
+static audio_module_handle_t aps_load_hw_module(void *service __unused,
                                              const char *name)
 {
     sp<IAudioFlinger> af = AudioSystem::get_audio_flinger();
@@ -1465,7 +1474,7 @@
 }
 
 // deprecated: replaced by aps_open_output_on_module()
-static audio_io_handle_t aps_open_output(void *service,
+static audio_io_handle_t aps_open_output(void *service __unused,
                                          audio_devices_t *pDevices,
                                          uint32_t *pSamplingRate,
                                          audio_format_t *pFormat,
@@ -1483,7 +1492,7 @@
                           pLatencyMs, flags);
 }
 
-static audio_io_handle_t aps_open_output_on_module(void *service,
+static audio_io_handle_t aps_open_output_on_module(void *service __unused,
                                                    audio_module_handle_t module,
                                                    audio_devices_t *pDevices,
                                                    uint32_t *pSamplingRate,
@@ -1502,7 +1511,7 @@
                           pLatencyMs, flags, offloadInfo);
 }
 
-static audio_io_handle_t aps_open_dup_output(void *service,
+static audio_io_handle_t aps_open_dup_output(void *service __unused,
                                                  audio_io_handle_t output1,
                                                  audio_io_handle_t output2)
 {
@@ -1514,16 +1523,17 @@
     return af->openDuplicateOutput(output1, output2);
 }
 
-static int aps_close_output(void *service, audio_io_handle_t output)
+static int aps_close_output(void *service __unused, audio_io_handle_t output)
 {
     sp<IAudioFlinger> af = AudioSystem::get_audio_flinger();
-    if (af == 0)
+    if (af == 0) {
         return PERMISSION_DENIED;
+    }
 
     return af->closeOutput(output);
 }
 
-static int aps_suspend_output(void *service, audio_io_handle_t output)
+static int aps_suspend_output(void *service __unused, audio_io_handle_t output)
 {
     sp<IAudioFlinger> af = AudioSystem::get_audio_flinger();
     if (af == 0) {
@@ -1534,7 +1544,7 @@
     return af->suspendOutput(output);
 }
 
-static int aps_restore_output(void *service, audio_io_handle_t output)
+static int aps_restore_output(void *service __unused, audio_io_handle_t output)
 {
     sp<IAudioFlinger> af = AudioSystem::get_audio_flinger();
     if (af == 0) {
@@ -1546,12 +1556,12 @@
 }
 
 // deprecated: replaced by aps_open_input_on_module(), and acoustics parameter is ignored
-static audio_io_handle_t aps_open_input(void *service,
+static audio_io_handle_t aps_open_input(void *service __unused,
                                         audio_devices_t *pDevices,
                                         uint32_t *pSamplingRate,
                                         audio_format_t *pFormat,
                                         audio_channel_mask_t *pChannelMask,
-                                        audio_in_acoustics_t acoustics)
+                                        audio_in_acoustics_t acoustics __unused)
 {
     sp<IAudioFlinger> af = AudioSystem::get_audio_flinger();
     if (af == 0) {
@@ -1562,7 +1572,7 @@
     return af->openInput((audio_module_handle_t)0, pDevices, pSamplingRate, pFormat, pChannelMask);
 }
 
-static audio_io_handle_t aps_open_input_on_module(void *service,
+static audio_io_handle_t aps_open_input_on_module(void *service __unused,
                                                   audio_module_handle_t module,
                                                   audio_devices_t *pDevices,
                                                   uint32_t *pSamplingRate,
@@ -1578,37 +1588,40 @@
     return af->openInput(module, pDevices, pSamplingRate, pFormat, pChannelMask);
 }
 
-static int aps_close_input(void *service, audio_io_handle_t input)
+static int aps_close_input(void *service __unused, audio_io_handle_t input)
 {
     sp<IAudioFlinger> af = AudioSystem::get_audio_flinger();
-    if (af == 0)
+    if (af == 0) {
         return PERMISSION_DENIED;
+    }
 
     return af->closeInput(input);
 }
 
-static int aps_set_stream_output(void *service, audio_stream_type_t stream,
+static int aps_set_stream_output(void *service __unused, audio_stream_type_t stream,
                                      audio_io_handle_t output)
 {
     sp<IAudioFlinger> af = AudioSystem::get_audio_flinger();
-    if (af == 0)
+    if (af == 0) {
         return PERMISSION_DENIED;
+    }
 
     return af->setStreamOutput(stream, output);
 }
 
-static int aps_move_effects(void *service, int session,
+static int aps_move_effects(void *service __unused, int session,
                                 audio_io_handle_t src_output,
                                 audio_io_handle_t dst_output)
 {
     sp<IAudioFlinger> af = AudioSystem::get_audio_flinger();
-    if (af == 0)
+    if (af == 0) {
         return PERMISSION_DENIED;
+    }
 
     return af->moveEffects(session, src_output, dst_output);
 }
 
-static char * aps_get_parameters(void *service, audio_io_handle_t io_handle,
+static char * aps_get_parameters(void *service __unused, audio_io_handle_t io_handle,
                                      const char *keys)
 {
     String8 result = AudioSystem::getParameters(io_handle, String8(keys));
@@ -1659,24 +1672,24 @@
 
 namespace {
     struct audio_policy_service_ops aps_ops = {
-        open_output           : aps_open_output,
-        open_duplicate_output : aps_open_dup_output,
-        close_output          : aps_close_output,
-        suspend_output        : aps_suspend_output,
-        restore_output        : aps_restore_output,
-        open_input            : aps_open_input,
-        close_input           : aps_close_input,
-        set_stream_volume     : aps_set_stream_volume,
-        set_stream_output     : aps_set_stream_output,
-        set_parameters        : aps_set_parameters,
-        get_parameters        : aps_get_parameters,
-        start_tone            : aps_start_tone,
-        stop_tone             : aps_stop_tone,
-        set_voice_volume      : aps_set_voice_volume,
-        move_effects          : aps_move_effects,
-        load_hw_module        : aps_load_hw_module,
-        open_output_on_module : aps_open_output_on_module,
-        open_input_on_module  : aps_open_input_on_module,
+        .open_output           = aps_open_output,
+        .open_duplicate_output = aps_open_dup_output,
+        .close_output          = aps_close_output,
+        .suspend_output        = aps_suspend_output,
+        .restore_output        = aps_restore_output,
+        .open_input            = aps_open_input,
+        .close_input           = aps_close_input,
+        .set_stream_volume     = aps_set_stream_volume,
+        .set_stream_output     = aps_set_stream_output,
+        .set_parameters        = aps_set_parameters,
+        .get_parameters        = aps_get_parameters,
+        .start_tone            = aps_start_tone,
+        .stop_tone             = aps_stop_tone,
+        .set_voice_volume      = aps_set_voice_volume,
+        .move_effects          = aps_move_effects,
+        .load_hw_module        = aps_load_hw_module,
+        .open_output_on_module = aps_open_output_on_module,
+        .open_input_on_module  = aps_open_input_on_module,
     };
 }; // namespace <unnamed>
 
diff --git a/services/audioflinger/AudioResampler.cpp b/services/audioflinger/AudioResampler.cpp
index e5cceb1..b206116 100644
--- a/services/audioflinger/AudioResampler.cpp
+++ b/services/audioflinger/AudioResampler.cpp
@@ -25,6 +25,7 @@
 #include "AudioResampler.h"
 #include "AudioResamplerSinc.h"
 #include "AudioResamplerCubic.h"
+#include "AudioResamplerDyn.h"
 
 #ifdef __arm__
 #include <machine/cpu-features.h>
@@ -77,6 +78,9 @@
     int mX0R;
 };
 
+/*static*/
+const double AudioResampler::kPhaseMultiplier = 1L << AudioResampler::kNumPhaseBits;
+
 bool AudioResampler::qualityIsSupported(src_quality quality)
 {
     switch (quality) {
@@ -85,6 +89,9 @@
     case MED_QUALITY:
     case HIGH_QUALITY:
     case VERY_HIGH_QUALITY:
+    case DYN_LOW_QUALITY:
+    case DYN_MED_QUALITY:
+    case DYN_HIGH_QUALITY:
         return true;
     default:
         return false;
@@ -105,7 +112,7 @@
         if (*endptr == '\0') {
             defaultQuality = (src_quality) l;
             ALOGD("forcing AudioResampler quality to %d", defaultQuality);
-            if (defaultQuality < DEFAULT_QUALITY || defaultQuality > VERY_HIGH_QUALITY) {
+            if (defaultQuality < DEFAULT_QUALITY || defaultQuality > DYN_HIGH_QUALITY) {
                 defaultQuality = DEFAULT_QUALITY;
             }
         }
@@ -125,6 +132,12 @@
         return 20;
     case VERY_HIGH_QUALITY:
         return 34;
+    case DYN_LOW_QUALITY:
+        return 4;
+    case DYN_MED_QUALITY:
+        return 6;
+    case DYN_HIGH_QUALITY:
+        return 12;
     }
 }
 
@@ -148,6 +161,16 @@
         atFinalQuality = true;
     }
 
+    /* if the caller requests DEFAULT_QUALITY and af.resampler.property
+     * has not been set, the target resampler quality is set to DYN_MED_QUALITY,
+     * and allowed to "throttle" down to DYN_LOW_QUALITY if necessary
+     * due to estimated CPU load of having too many active resamplers
+     * (the code below the if).
+     */
+    if (quality == DEFAULT_QUALITY) {
+        quality = DYN_MED_QUALITY;
+    }
+
     // naive implementation of CPU load throttling doesn't account for whether resampler is active
     pthread_mutex_lock(&mutex);
     for (;;) {
@@ -162,7 +185,6 @@
         // not enough CPU available for proposed quality level, so try next lowest level
         switch (quality) {
         default:
-        case DEFAULT_QUALITY:
         case LOW_QUALITY:
             atFinalQuality = true;
             break;
@@ -175,6 +197,15 @@
         case VERY_HIGH_QUALITY:
             quality = HIGH_QUALITY;
             break;
+        case DYN_LOW_QUALITY:
+            atFinalQuality = true;
+            break;
+        case DYN_MED_QUALITY:
+            quality = DYN_LOW_QUALITY;
+            break;
+        case DYN_HIGH_QUALITY:
+            quality = DYN_MED_QUALITY;
+            break;
         }
     }
     pthread_mutex_unlock(&mutex);
@@ -183,7 +214,6 @@
 
     switch (quality) {
     default:
-    case DEFAULT_QUALITY:
     case LOW_QUALITY:
         ALOGV("Create linear Resampler");
         resampler = new AudioResamplerOrder1(bitDepth, inChannelCount, sampleRate);
@@ -200,6 +230,12 @@
         ALOGV("Create VERY_HIGH_QUALITY sinc Resampler = %d", quality);
         resampler = new AudioResamplerSinc(bitDepth, inChannelCount, sampleRate, quality);
         break;
+    case DYN_LOW_QUALITY:
+    case DYN_MED_QUALITY:
+    case DYN_HIGH_QUALITY:
+        ALOGV("Create dynamic Resampler = %d", quality);
+        resampler = new AudioResamplerDyn(bitDepth, inChannelCount, sampleRate, quality);
+        break;
     }
 
     // initialize resampler
@@ -339,8 +375,9 @@
             out[outputIndex++] += vl * Interp(mX0L, in[0], phaseFraction);
             out[outputIndex++] += vr * Interp(mX0R, in[1], phaseFraction);
             Advance(&inputIndex, &phaseFraction, phaseIncrement);
-            if (outputIndex == outputSampleCount)
+            if (outputIndex == outputSampleCount) {
                 break;
+            }
         }
 
         // process input samples
@@ -434,8 +471,9 @@
             out[outputIndex++] += vl * sample;
             out[outputIndex++] += vr * sample;
             Advance(&inputIndex, &phaseFraction, phaseIncrement);
-            if (outputIndex == outputSampleCount)
+            if (outputIndex == outputSampleCount) {
                 break;
+            }
         }
 
         // process input samples
@@ -514,6 +552,16 @@
             size_t &outputIndex, int32_t* out, size_t &inputIndex, int32_t vl, int32_t vr,
             uint32_t &phaseFraction, uint32_t phaseIncrement)
 {
+    (void)maxOutPt; // remove unused parameter warnings
+    (void)maxInIdx;
+    (void)outputIndex;
+    (void)out;
+    (void)inputIndex;
+    (void)vl;
+    (void)vr;
+    (void)phaseFraction;
+    (void)phaseIncrement;
+    (void)in;
 #define MO_PARAM5   "36"        // offset of parameter 5 (outputIndex)
 
     asm(
@@ -625,6 +673,16 @@
             size_t &outputIndex, int32_t* out, size_t &inputIndex, int32_t vl, int32_t vr,
             uint32_t &phaseFraction, uint32_t phaseIncrement)
 {
+    (void)maxOutPt; // remove unused parameter warnings
+    (void)maxInIdx;
+    (void)outputIndex;
+    (void)out;
+    (void)inputIndex;
+    (void)vl;
+    (void)vr;
+    (void)phaseFraction;
+    (void)phaseIncrement;
+    (void)in;
 #define ST_PARAM5    "40"     // offset of parameter 5 (outputIndex)
     asm(
         "stmfd  sp!, {r4, r5, r6, r7, r8, r9, r10, r11, r12, lr}\n"
diff --git a/services/audioflinger/AudioResampler.h b/services/audioflinger/AudioResampler.h
index 33e64ce..dc33f29 100644
--- a/services/audioflinger/AudioResampler.h
+++ b/services/audioflinger/AudioResampler.h
@@ -41,6 +41,9 @@
         MED_QUALITY=2,
         HIGH_QUALITY=3,
         VERY_HIGH_QUALITY=4,
+        DYN_LOW_QUALITY=5,
+        DYN_MED_QUALITY=6,
+        DYN_HIGH_QUALITY=7,
     };
 
     static AudioResampler* create(int bitDepth, int inChannelCount,
@@ -81,7 +84,7 @@
     static const uint32_t kPhaseMask = (1LU<<kNumPhaseBits)-1;
 
     // multiplier to calculate fixed point phase increment
-    static const double kPhaseMultiplier = 1L << kNumPhaseBits;
+    static const double kPhaseMultiplier;
 
     AudioResampler(int bitDepth, int inChannelCount, int32_t sampleRate, src_quality quality);
 
diff --git a/services/audioflinger/AudioResamplerCubic.cpp b/services/audioflinger/AudioResamplerCubic.cpp
index 18e59e9..1f9714b 100644
--- a/services/audioflinger/AudioResamplerCubic.cpp
+++ b/services/audioflinger/AudioResamplerCubic.cpp
@@ -66,8 +66,9 @@
     if (mBuffer.frameCount == 0) {
         mBuffer.frameCount = inFrameCount;
         provider->getNextBuffer(&mBuffer, mPTS);
-        if (mBuffer.raw == NULL)
+        if (mBuffer.raw == NULL) {
             return;
+        }
         // ALOGW("New buffer: offset=%p, frames=%dn", mBuffer.raw, mBuffer.frameCount);
     }
     int16_t *in = mBuffer.i16;
@@ -97,8 +98,9 @@
                 mBuffer.frameCount = inFrameCount;
                 provider->getNextBuffer(&mBuffer,
                                         calculateOutputPTS(outputIndex / 2));
-                if (mBuffer.raw == NULL)
+                if (mBuffer.raw == NULL) {
                     goto save_state;  // ugly, but efficient
+                }
                 in = mBuffer.i16;
                 // ALOGW("New buffer: offset=%p, frames=%d", mBuffer.raw, mBuffer.frameCount);
             }
@@ -132,8 +134,9 @@
     if (mBuffer.frameCount == 0) {
         mBuffer.frameCount = inFrameCount;
         provider->getNextBuffer(&mBuffer, mPTS);
-        if (mBuffer.raw == NULL)
+        if (mBuffer.raw == NULL) {
             return;
+        }
         // ALOGW("New buffer: offset=%p, frames=%d", mBuffer.raw, mBuffer.frameCount);
     }
     int16_t *in = mBuffer.i16;
@@ -163,8 +166,9 @@
                 mBuffer.frameCount = inFrameCount;
                 provider->getNextBuffer(&mBuffer,
                                         calculateOutputPTS(outputIndex / 2));
-                if (mBuffer.raw == NULL)
+                if (mBuffer.raw == NULL) {
                     goto save_state;  // ugly, but efficient
+                }
                 // ALOGW("New buffer: offset=%p, frames=%dn", mBuffer.raw, mBuffer.frameCount);
                 in = mBuffer.i16;
             }
diff --git a/services/audioflinger/AudioResamplerDyn.cpp b/services/audioflinger/AudioResamplerDyn.cpp
new file mode 100644
index 0000000..cd67df5
--- /dev/null
+++ b/services/audioflinger/AudioResamplerDyn.cpp
@@ -0,0 +1,547 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "AudioResamplerDyn"
+//#define LOG_NDEBUG 0
+
+#include <malloc.h>
+#include <string.h>
+#include <stdlib.h>
+#include <dlfcn.h>
+#include <math.h>
+
+#include <cutils/compiler.h>
+#include <cutils/properties.h>
+#include <utils/Log.h>
+
+#include "AudioResamplerFirOps.h" // USE_NEON and USE_INLINE_ASSEMBLY defined here
+#include "AudioResamplerFirProcess.h"
+#include "AudioResamplerFirProcessNeon.h"
+#include "AudioResamplerFirGen.h" // requires math.h
+#include "AudioResamplerDyn.h"
+
+//#define DEBUG_RESAMPLER
+
+namespace android {
+
+// generate a unique resample type compile-time constant (constexpr)
+#define RESAMPLETYPE(CHANNELS, LOCKED, STRIDE, COEFTYPE) \
+    ((((CHANNELS)-1)&1) | !!(LOCKED)<<1 | (COEFTYPE)<<2 \
+    | ((STRIDE)==8 ? 1 : (STRIDE)==16 ? 2 : 0)<<3)
+
+/*
+ * InBuffer is a type agnostic input buffer.
+ *
+ * Layout of the state buffer for halfNumCoefs=8.
+ *
+ * [rrrrrrppppppppnnnnnnnnrrrrrrrrrrrrrrrrrrr.... rrrrrrr]
+ *  S            I                                R
+ *
+ * S = mState
+ * I = mImpulse
+ * R = mRingFull
+ * p = past samples, convoluted with the (p)ositive side of sinc()
+ * n = future samples, convoluted with the (n)egative side of sinc()
+ * r = extra space for implementing the ring buffer
+ */
+
+template<typename TI>
+AudioResamplerDyn::InBuffer<TI>::InBuffer()
+    : mState(NULL), mImpulse(NULL), mRingFull(NULL), mStateSize(0) {
+}
+
+template<typename TI>
+AudioResamplerDyn::InBuffer<TI>::~InBuffer() {
+    init();
+}
+
+template<typename TI>
+void AudioResamplerDyn::InBuffer<TI>::init() {
+    free(mState);
+    mState = NULL;
+    mImpulse = NULL;
+    mRingFull = NULL;
+    mStateSize = 0;
+}
+
+// resizes the state buffer to accommodate the appropriate filter length
+template<typename TI>
+void AudioResamplerDyn::InBuffer<TI>::resize(int CHANNELS, int halfNumCoefs) {
+    // calculate desired state size
+    int stateSize = halfNumCoefs * CHANNELS * 2
+            * kStateSizeMultipleOfFilterLength;
+
+    // check if buffer needs resizing
+    if (mState
+            && stateSize == mStateSize
+            && mRingFull-mState == mStateSize-halfNumCoefs*CHANNELS) {
+        return;
+    }
+
+    // create new buffer
+    TI* state = (int16_t*)memalign(32, stateSize*sizeof(*state));
+    memset(state, 0, stateSize*sizeof(*state));
+
+    // attempt to preserve state
+    if (mState) {
+        TI* srcLo = mImpulse - halfNumCoefs*CHANNELS;
+        TI* srcHi = mImpulse + halfNumCoefs*CHANNELS;
+        TI* dst = state;
+
+        if (srcLo < mState) {
+            dst += mState-srcLo;
+            srcLo = mState;
+        }
+        if (srcHi > mState + mStateSize) {
+            srcHi = mState + mStateSize;
+        }
+        memcpy(dst, srcLo, (srcHi - srcLo) * sizeof(*srcLo));
+        free(mState);
+    }
+
+    // set class member vars
+    mState = state;
+    mStateSize = stateSize;
+    mImpulse = mState + halfNumCoefs*CHANNELS; // actually one sample greater than needed
+    mRingFull = mState + mStateSize - halfNumCoefs*CHANNELS;
+}
+
+// copy in the input data into the head (impulse+halfNumCoefs) of the buffer.
+template<typename TI>
+template<int CHANNELS>
+void AudioResamplerDyn::InBuffer<TI>::readAgain(TI*& impulse, const int halfNumCoefs,
+        const TI* const in, const size_t inputIndex) {
+    int16_t* head = impulse + halfNumCoefs*CHANNELS;
+    for (size_t i=0 ; i<CHANNELS ; i++) {
+        head[i] = in[inputIndex*CHANNELS + i];
+    }
+}
+
+// advance the impulse pointer, and load in data into the head (impulse+halfNumCoefs)
+template<typename TI>
+template<int CHANNELS>
+void AudioResamplerDyn::InBuffer<TI>::readAdvance(TI*& impulse, const int halfNumCoefs,
+        const TI* const in, const size_t inputIndex) {
+    impulse += CHANNELS;
+
+    if (CC_UNLIKELY(impulse >= mRingFull)) {
+        const size_t shiftDown = mRingFull - mState - halfNumCoefs*CHANNELS;
+        memcpy(mState, mState+shiftDown, halfNumCoefs*CHANNELS*2*sizeof(TI));
+        impulse -= shiftDown;
+    }
+    readAgain<CHANNELS>(impulse, halfNumCoefs, in, inputIndex);
+}
+
+void AudioResamplerDyn::Constants::set(
+        int L, int halfNumCoefs, int inSampleRate, int outSampleRate)
+{
+    int bits = 0;
+    int lscale = inSampleRate/outSampleRate < 2 ? L - 1 :
+            static_cast<int>(static_cast<uint64_t>(L)*inSampleRate/outSampleRate);
+    for (int i=lscale; i; ++bits, i>>=1)
+        ;
+    mL = L;
+    mShift = kNumPhaseBits - bits;
+    mHalfNumCoefs = halfNumCoefs;
+}
+
+AudioResamplerDyn::AudioResamplerDyn(int bitDepth,
+        int inChannelCount, int32_t sampleRate, src_quality quality)
+    : AudioResampler(bitDepth, inChannelCount, sampleRate, quality),
+    mResampleType(0), mFilterSampleRate(0), mFilterQuality(DEFAULT_QUALITY),
+    mCoefBuffer(NULL)
+{
+    mVolumeSimd[0] = mVolumeSimd[1] = 0;
+    mConstants.set(128, 8, mSampleRate, mSampleRate); // TODO: set better
+}
+
+AudioResamplerDyn::~AudioResamplerDyn() {
+    free(mCoefBuffer);
+}
+
+void AudioResamplerDyn::init() {
+    mFilterSampleRate = 0; // always trigger new filter generation
+    mInBuffer.init();
+}
+
+void AudioResamplerDyn::setVolume(int16_t left, int16_t right) {
+    AudioResampler::setVolume(left, right);
+    mVolumeSimd[0] = static_cast<int32_t>(left)<<16;
+    mVolumeSimd[1] = static_cast<int32_t>(right)<<16;
+}
+
+template <typename T> T max(T a, T b) {return a > b ? a : b;}
+
+template <typename T> T absdiff(T a, T b) {return a > b ? a - b : b - a;}
+
+template<typename T>
+void AudioResamplerDyn::createKaiserFir(Constants &c, double stopBandAtten,
+        int inSampleRate, int outSampleRate, double tbwCheat) {
+    T* buf = reinterpret_cast<T*>(memalign(32, (c.mL+1)*c.mHalfNumCoefs*sizeof(T)));
+    static const double atten = 0.9998;   // to avoid ripple overflow
+    double fcr;
+    double tbw = firKaiserTbw(c.mHalfNumCoefs, stopBandAtten);
+
+    if (inSampleRate < outSampleRate) { // upsample
+        fcr = max(0.5*tbwCheat - tbw/2, tbw/2);
+    } else { // downsample
+        fcr = max(0.5*tbwCheat*outSampleRate/inSampleRate - tbw/2, tbw/2);
+    }
+    // create and set filter
+    firKaiserGen(buf, c.mL, c.mHalfNumCoefs, stopBandAtten, fcr, atten);
+    c.setBuf(buf);
+    if (mCoefBuffer) {
+        free(mCoefBuffer);
+    }
+    mCoefBuffer = buf;
+#ifdef DEBUG_RESAMPLER
+    // print basic filter stats
+    printf("L:%d  hnc:%d  stopBandAtten:%lf  fcr:%lf  atten:%lf  tbw:%lf\n",
+            c.mL, c.mHalfNumCoefs, stopBandAtten, fcr, atten, tbw);
+    // test the filter and report results
+    double fp = (fcr - tbw/2)/c.mL;
+    double fs = (fcr + tbw/2)/c.mL;
+    double passMin, passMax, passRipple;
+    double stopMax, stopRipple;
+    testFir(buf, c.mL, c.mHalfNumCoefs, fp, fs, /*passSteps*/ 1000, /*stopSteps*/ 100000,
+            passMin, passMax, passRipple, stopMax, stopRipple);
+    printf("passband(%lf, %lf): %.8lf %.8lf %.8lf\n", 0., fp, passMin, passMax, passRipple);
+    printf("stopband(%lf, %lf): %.8lf %.3lf\n", fs, 0.5, stopMax, stopRipple);
+#endif
+}
+
+// recursive gcd. Using objdump, it appears the tail recursion is converted to a while loop.
+static int gcd(int n, int m) {
+    if (m == 0) {
+        return n;
+    }
+    return gcd(m, n % m);
+}
+
+static bool isClose(int32_t newSampleRate, int32_t prevSampleRate,
+        int32_t filterSampleRate, int32_t outSampleRate) {
+
+    // different upsampling ratios do not need a filter change.
+    if (filterSampleRate != 0
+            && filterSampleRate < outSampleRate
+            && newSampleRate < outSampleRate)
+        return true;
+
+    // check design criteria again if downsampling is detected.
+    int pdiff = absdiff(newSampleRate, prevSampleRate);
+    int adiff = absdiff(newSampleRate, filterSampleRate);
+
+    // allow up to 6% relative change increments.
+    // allow up to 12% absolute change increments (from filter design)
+    return pdiff < prevSampleRate>>4 && adiff < filterSampleRate>>3;
+}
+
+void AudioResamplerDyn::setSampleRate(int32_t inSampleRate) {
+    if (mInSampleRate == inSampleRate) {
+        return;
+    }
+    int32_t oldSampleRate = mInSampleRate;
+    int32_t oldHalfNumCoefs = mConstants.mHalfNumCoefs;
+    uint32_t oldPhaseWrapLimit = mConstants.mL << mConstants.mShift;
+    bool useS32 = false;
+
+    mInSampleRate = inSampleRate;
+
+    // TODO: Add precalculated Equiripple filters
+
+    if (mFilterQuality != getQuality() ||
+            !isClose(inSampleRate, oldSampleRate, mFilterSampleRate, mSampleRate)) {
+        mFilterSampleRate = inSampleRate;
+        mFilterQuality = getQuality();
+
+        // Begin Kaiser Filter computation
+        //
+        // The quantization floor for S16 is about 96db - 10*log_10(#length) + 3dB.
+        // Keep the stop band attenuation no greater than 84-85dB for 32 length S16 filters
+        //
+        // For s32 we keep the stop band attenuation at the same as 16b resolution, about
+        // 96-98dB
+        //
+
+        double stopBandAtten;
+        double tbwCheat = 1.; // how much we "cheat" into aliasing
+        int halfLength;
+        if (mFilterQuality == DYN_HIGH_QUALITY) {
+            // 32b coefficients, 64 length
+            useS32 = true;
+            stopBandAtten = 98.;
+            halfLength = 32;
+        } else if (mFilterQuality == DYN_LOW_QUALITY) {
+            // 16b coefficients, 16-32 length
+            useS32 = false;
+            stopBandAtten = 80.;
+            if (mSampleRate >= inSampleRate * 2) {
+                halfLength = 16;
+            } else {
+                halfLength = 8;
+            }
+            if (mSampleRate >= inSampleRate) {
+                tbwCheat = 1.05;
+            } else {
+                tbwCheat = 1.03;
+            }
+        } else { // DYN_MED_QUALITY
+            // 16b coefficients, 32-64 length
+            // note: > 64 length filters with 16b coefs can have quantization noise problems
+            useS32 = false;
+            stopBandAtten = 84.;
+            if (mSampleRate >= inSampleRate * 4) {
+                halfLength = 32;
+            } else if (mSampleRate >= inSampleRate * 2) {
+                halfLength = 24;
+            } else {
+                halfLength = 16;
+            }
+            if (mSampleRate >= inSampleRate) {
+                tbwCheat = 1.03;
+            } else {
+                tbwCheat = 1.01;
+            }
+        }
+
+        // determine the number of polyphases in the filterbank.
+        // for 16b, it is desirable to have 2^(16/2) = 256 phases.
+        // https://ccrma.stanford.edu/~jos/resample/Relation_Interpolation_Error_Quantization.html
+        //
+        // We are a bit more lax on this.
+
+        int phases = mSampleRate / gcd(mSampleRate, inSampleRate);
+
+        // TODO: Once dynamic sample rate change is an option, the code below
+        // should be modified to execute only when dynamic sample rate change is enabled.
+        //
+        // as above, #phases less than 63 is too few phases for accurate linear interpolation.
+        // we increase the phases to compensate, but more phases means more memory per
+        // filter and more time to compute the filter.
+        //
+        // if we know that the filter will be used for dynamic sample rate changes,
+        // that would allow us skip this part for fixed sample rate resamplers.
+        //
+        while (phases<63) {
+            phases *= 2; // this code only needed to support dynamic rate changes
+        }
+
+        if (phases>=256) {  // too many phases, always interpolate
+            phases = 127;
+        }
+
+        // create the filter
+        mConstants.set(phases, halfLength, inSampleRate, mSampleRate);
+        if (useS32) {
+            createKaiserFir<int32_t>(mConstants, stopBandAtten,
+                    inSampleRate, mSampleRate, tbwCheat);
+        } else {
+            createKaiserFir<int16_t>(mConstants, stopBandAtten,
+                    inSampleRate, mSampleRate, tbwCheat);
+        }
+    } // End Kaiser filter
+
+    // update phase and state based on the new filter.
+    const Constants& c(mConstants);
+    mInBuffer.resize(mChannelCount, c.mHalfNumCoefs);
+    const uint32_t phaseWrapLimit = c.mL << c.mShift;
+    // try to preserve as much of the phase fraction as possible for on-the-fly changes
+    mPhaseFraction = static_cast<unsigned long long>(mPhaseFraction)
+            * phaseWrapLimit / oldPhaseWrapLimit;
+    mPhaseFraction %= phaseWrapLimit; // should not do anything, but just in case.
+    mPhaseIncrement = static_cast<uint32_t>(static_cast<double>(phaseWrapLimit)
+            * inSampleRate / mSampleRate);
+
+    // determine which resampler to use
+    // check if locked phase (works only if mPhaseIncrement has no "fractional phase bits")
+    int locked = (mPhaseIncrement << (sizeof(mPhaseIncrement)*8 - c.mShift)) == 0;
+    int stride = (c.mHalfNumCoefs&7)==0 ? 16 : (c.mHalfNumCoefs&3)==0 ? 8 : 2;
+    if (locked) {
+        mPhaseFraction = mPhaseFraction >> c.mShift << c.mShift; // remove fractional phase
+    }
+
+    mResampleType = RESAMPLETYPE(mChannelCount, locked, stride, !!useS32);
+#ifdef DEBUG_RESAMPLER
+    printf("channels:%d  %s  stride:%d  %s  coef:%d  shift:%d\n",
+            mChannelCount, locked ? "locked" : "interpolated",
+            stride, useS32 ? "S32" : "S16", 2*c.mHalfNumCoefs, c.mShift);
+#endif
+}
+
+void AudioResamplerDyn::resample(int32_t* out, size_t outFrameCount,
+            AudioBufferProvider* provider)
+{
+    // TODO:
+    // 24 cases - this perhaps can be reduced later, as testing might take too long
+    switch (mResampleType) {
+
+    // stride 16 (falls back to stride 2 for machines that do not support NEON)
+    case RESAMPLETYPE(1, true, 16, 0):
+        return resample<1, true, 16>(out, outFrameCount, mConstants.mFirCoefsS16, provider);
+    case RESAMPLETYPE(2, true, 16, 0):
+        return resample<2, true, 16>(out, outFrameCount, mConstants.mFirCoefsS16, provider);
+    case RESAMPLETYPE(1, false, 16, 0):
+        return resample<1, false, 16>(out, outFrameCount, mConstants.mFirCoefsS16, provider);
+    case RESAMPLETYPE(2, false, 16, 0):
+        return resample<2, false, 16>(out, outFrameCount, mConstants.mFirCoefsS16, provider);
+    case RESAMPLETYPE(1, true, 16, 1):
+        return resample<1, true, 16>(out, outFrameCount, mConstants.mFirCoefsS32, provider);
+    case RESAMPLETYPE(2, true, 16, 1):
+        return resample<2, true, 16>(out, outFrameCount, mConstants.mFirCoefsS32, provider);
+    case RESAMPLETYPE(1, false, 16, 1):
+        return resample<1, false, 16>(out, outFrameCount, mConstants.mFirCoefsS32, provider);
+    case RESAMPLETYPE(2, false, 16, 1):
+        return resample<2, false, 16>(out, outFrameCount, mConstants.mFirCoefsS32, provider);
+#if 0
+    // TODO: Remove these?
+    // stride 8
+    case RESAMPLETYPE(1, true, 8, 0):
+        return resample<1, true, 8>(out, outFrameCount, mConstants.mFirCoefsS16, provider);
+    case RESAMPLETYPE(2, true, 8, 0):
+        return resample<2, true, 8>(out, outFrameCount, mConstants.mFirCoefsS16, provider);
+    case RESAMPLETYPE(1, false, 8, 0):
+        return resample<1, false, 8>(out, outFrameCount, mConstants.mFirCoefsS16, provider);
+    case RESAMPLETYPE(2, false, 8, 0):
+        return resample<2, false, 8>(out, outFrameCount, mConstants.mFirCoefsS16, provider);
+    case RESAMPLETYPE(1, true, 8, 1):
+        return resample<1, true, 8>(out, outFrameCount, mConstants.mFirCoefsS32, provider);
+    case RESAMPLETYPE(2, true, 8, 1):
+        return resample<2, true, 8>(out, outFrameCount, mConstants.mFirCoefsS32, provider);
+    case RESAMPLETYPE(1, false, 8, 1):
+        return resample<1, false, 8>(out, outFrameCount, mConstants.mFirCoefsS32, provider);
+    case RESAMPLETYPE(2, false, 8, 1):
+        return resample<2, false, 8>(out, outFrameCount, mConstants.mFirCoefsS32, provider);
+    // stride 2 (can handle any filter length)
+    case RESAMPLETYPE(1, true, 2, 0):
+        return resample<1, true, 2>(out, outFrameCount, mConstants.mFirCoefsS16, provider);
+    case RESAMPLETYPE(2, true, 2, 0):
+        return resample<2, true, 2>(out, outFrameCount, mConstants.mFirCoefsS16, provider);
+    case RESAMPLETYPE(1, false, 2, 0):
+        return resample<1, false, 2>(out, outFrameCount, mConstants.mFirCoefsS16, provider);
+    case RESAMPLETYPE(2, false, 2, 0):
+        return resample<2, false, 2>(out, outFrameCount, mConstants.mFirCoefsS16, provider);
+    case RESAMPLETYPE(1, true, 2, 1):
+        return resample<1, true, 2>(out, outFrameCount, mConstants.mFirCoefsS32, provider);
+    case RESAMPLETYPE(2, true, 2, 1):
+        return resample<2, true, 2>(out, outFrameCount, mConstants.mFirCoefsS32, provider);
+    case RESAMPLETYPE(1, false, 2, 1):
+        return resample<1, false, 2>(out, outFrameCount, mConstants.mFirCoefsS32, provider);
+    case RESAMPLETYPE(2, false, 2, 1):
+        return resample<2, false, 2>(out, outFrameCount, mConstants.mFirCoefsS32, provider);
+#endif
+    default:
+        ; // error
+    }
+}
+
+template<int CHANNELS, bool LOCKED, int STRIDE, typename TC>
+void AudioResamplerDyn::resample(int32_t* out, size_t outFrameCount,
+        const TC* const coefs,  AudioBufferProvider* provider)
+{
+    const Constants& c(mConstants);
+    int16_t* impulse = mInBuffer.getImpulse();
+    size_t inputIndex = mInputIndex;
+    uint32_t phaseFraction = mPhaseFraction;
+    const uint32_t phaseIncrement = mPhaseIncrement;
+    size_t outputIndex = 0;
+    size_t outputSampleCount = outFrameCount * 2;   // stereo output
+    size_t inFrameCount = (outFrameCount*mInSampleRate)/mSampleRate;
+    const uint32_t phaseWrapLimit = c.mL << c.mShift;
+
+    // NOTE: be very careful when modifying the code here. register
+    // pressure is very high and a small change might cause the compiler
+    // to generate far less efficient code.
+    // Always sanity check the result with objdump or test-resample.
+
+    // the following logic is a bit convoluted to keep the main processing loop
+    // as tight as possible with register allocation.
+    while (outputIndex < outputSampleCount) {
+        // buffer is empty, fetch a new one
+        while (mBuffer.frameCount == 0) {
+            mBuffer.frameCount = inFrameCount;
+            provider->getNextBuffer(&mBuffer,
+                    calculateOutputPTS(outputIndex / 2));
+            if (mBuffer.raw == NULL) {
+                goto resample_exit;
+            }
+            if (phaseFraction >= phaseWrapLimit) { // read in data
+                mInBuffer.readAdvance<CHANNELS>(
+                        impulse, c.mHalfNumCoefs, mBuffer.i16, inputIndex);
+                phaseFraction -= phaseWrapLimit;
+                while (phaseFraction >= phaseWrapLimit) {
+                    inputIndex++;
+                    if (inputIndex >= mBuffer.frameCount) {
+                        inputIndex -= mBuffer.frameCount;
+                        provider->releaseBuffer(&mBuffer);
+                        break;
+                    }
+                    mInBuffer.readAdvance<CHANNELS>(
+                            impulse, c.mHalfNumCoefs, mBuffer.i16, inputIndex);
+                    phaseFraction -= phaseWrapLimit;
+                }
+            }
+        }
+        const int16_t* const in = mBuffer.i16;
+        const size_t frameCount = mBuffer.frameCount;
+        const int coefShift = c.mShift;
+        const int halfNumCoefs = c.mHalfNumCoefs;
+        const int32_t* const volumeSimd = mVolumeSimd;
+
+        // reread the last input in.
+        mInBuffer.readAgain<CHANNELS>(impulse, halfNumCoefs, in, inputIndex);
+
+        // main processing loop
+        while (CC_LIKELY(outputIndex < outputSampleCount)) {
+            // caution: fir() is inlined and may be large.
+            // output will be loaded with the appropriate values
+            //
+            // from the input samples in impulse[-halfNumCoefs+1]... impulse[halfNumCoefs]
+            // from the polyphase filter of (phaseFraction / phaseWrapLimit) in coefs.
+            //
+            fir<CHANNELS, LOCKED, STRIDE>(
+                    &out[outputIndex],
+                    phaseFraction, phaseWrapLimit,
+                    coefShift, halfNumCoefs, coefs,
+                    impulse, volumeSimd);
+            outputIndex += 2;
+
+            phaseFraction += phaseIncrement;
+            while (phaseFraction >= phaseWrapLimit) {
+                inputIndex++;
+                if (inputIndex >= frameCount) {
+                    goto done;  // need a new buffer
+                }
+                mInBuffer.readAdvance<CHANNELS>(impulse, halfNumCoefs, in, inputIndex);
+                phaseFraction -= phaseWrapLimit;
+            }
+        }
+done:
+        // often arrives here when input buffer runs out
+        if (inputIndex >= frameCount) {
+            inputIndex -= frameCount;
+            provider->releaseBuffer(&mBuffer);
+            // mBuffer.frameCount MUST be zero here.
+        }
+    }
+
+resample_exit:
+    mInBuffer.setImpulse(impulse);
+    mInputIndex = inputIndex;
+    mPhaseFraction = phaseFraction;
+}
+
+// ----------------------------------------------------------------------------
+}; // namespace android
diff --git a/services/audioflinger/AudioResamplerDyn.h b/services/audioflinger/AudioResamplerDyn.h
new file mode 100644
index 0000000..df1fdbe
--- /dev/null
+++ b/services/audioflinger/AudioResamplerDyn.h
@@ -0,0 +1,124 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_AUDIO_RESAMPLER_DYN_H
+#define ANDROID_AUDIO_RESAMPLER_DYN_H
+
+#include <stdint.h>
+#include <sys/types.h>
+#include <cutils/log.h>
+
+#include "AudioResampler.h"
+
+namespace android {
+
+class AudioResamplerDyn: public AudioResampler {
+public:
+    AudioResamplerDyn(int bitDepth, int inChannelCount, int32_t sampleRate,
+            src_quality quality);
+
+    virtual ~AudioResamplerDyn();
+
+    virtual void init();
+
+    virtual void setSampleRate(int32_t inSampleRate);
+
+    virtual void setVolume(int16_t left, int16_t right);
+
+    virtual void resample(int32_t* out, size_t outFrameCount,
+            AudioBufferProvider* provider);
+
+private:
+
+    class Constants { // stores the filter constants.
+    public:
+        Constants() :
+            mL(0), mShift(0), mHalfNumCoefs(0), mFirCoefsS16(NULL)
+        {}
+        void set(int L, int halfNumCoefs,
+                int inSampleRate, int outSampleRate);
+        inline void setBuf(int16_t* buf) {
+            mFirCoefsS16 = buf;
+        }
+        inline void setBuf(int32_t* buf) {
+            mFirCoefsS32 = buf;
+        }
+
+        int mL;       // interpolation phases in the filter.
+        int mShift;   // right shift to get polyphase index
+        unsigned int mHalfNumCoefs; // filter half #coefs
+        union {       // polyphase filter bank
+            const int16_t* mFirCoefsS16;
+            const int32_t* mFirCoefsS32;
+        };
+    };
+
+    // Input buffer management for a given input type TI, now (int16_t)
+    // Is agnostic of the actual type, can work with int32_t and float.
+    template<typename TI>
+    class InBuffer {
+    public:
+        InBuffer();
+        ~InBuffer();
+        void init();
+        void resize(int CHANNELS, int halfNumCoefs);
+
+        // used for direct management of the mImpulse pointer
+        inline TI* getImpulse() {
+            return mImpulse;
+        }
+        inline void setImpulse(TI *impulse) {
+            mImpulse = impulse;
+        }
+        template<int CHANNELS>
+        inline void readAgain(TI*& impulse, const int halfNumCoefs,
+                const TI* const in, const size_t inputIndex);
+        template<int CHANNELS>
+        inline void readAdvance(TI*& impulse, const int halfNumCoefs,
+                const TI* const in, const size_t inputIndex);
+
+    private:
+        // tuning parameter guidelines: 2 <= multiple <= 8
+        static const int kStateSizeMultipleOfFilterLength = 4;
+
+        TI* mState;    // base pointer for the input buffer storage
+        TI* mImpulse;  // current location of the impulse response (centered)
+        TI* mRingFull; // mState <= mImpulse < mRingFull
+        // in general, mRingFull = mState + mStateSize - halfNumCoefs*CHANNELS.
+        size_t mStateSize; // in units of TI.
+    };
+
+    template<int CHANNELS, bool LOCKED, int STRIDE, typename TC>
+    void resample(int32_t* out, size_t outFrameCount,
+            const TC* const coefs, AudioBufferProvider* provider);
+
+    template<typename T>
+    void createKaiserFir(Constants &c, double stopBandAtten,
+            int inSampleRate, int outSampleRate, double tbwCheat);
+
+    InBuffer<int16_t> mInBuffer;
+    Constants mConstants;  // current set of coefficient parameters
+    int32_t __attribute__ ((aligned (8))) mVolumeSimd[2];
+    int32_t mResampleType; // contains the resample type.
+    int32_t mFilterSampleRate; // designed filter sample rate.
+    src_quality mFilterQuality; // designed filter quality.
+    void* mCoefBuffer; // if a filter is created, this is not null
+};
+
+// ----------------------------------------------------------------------------
+}; // namespace android
+
+#endif /*ANDROID_AUDIO_RESAMPLER_DYN_H*/
diff --git a/services/audioflinger/AudioResamplerFirGen.h b/services/audioflinger/AudioResamplerFirGen.h
new file mode 100644
index 0000000..fac3001
--- /dev/null
+++ b/services/audioflinger/AudioResamplerFirGen.h
@@ -0,0 +1,684 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_AUDIO_RESAMPLER_FIR_GEN_H
+#define ANDROID_AUDIO_RESAMPLER_FIR_GEN_H
+
+namespace android {
+
+/*
+ * generates a sine wave at equal steps.
+ *
+ * As most of our functions use sine or cosine at equal steps,
+ * it is very efficient to compute them that way (single multiply and subtract),
+ * rather than invoking the math library sin() or cos() each time.
+ *
+ * SineGen uses Goertzel's Algorithm (as a generator not a filter)
+ * to calculate sine(wstart + n * wstep) or cosine(wstart + n * wstep)
+ * by stepping through 0, 1, ... n.
+ *
+ * e^i(wstart+wstep) = 2cos(wstep) * e^i(wstart) - e^i(wstart-wstep)
+ *
+ * or looking at just the imaginary sine term, as the cosine follows identically:
+ *
+ * sin(wstart+wstep) = 2cos(wstep) * sin(wstart) - sin(wstart-wstep)
+ *
+ * Goertzel's algorithm is more efficient than the angle addition formula,
+ * e^i(wstart+wstep) = e^i(wstart) * e^i(wstep), which takes up to
+ * 4 multiplies and 2 adds (or 3* and 3+) and requires both sine and
+ * cosine generation due to the complex * complex multiply (full rotation).
+ *
+ * See: http://en.wikipedia.org/wiki/Goertzel_algorithm
+ *
+ */
+
+class SineGen {
+public:
+    SineGen(double wstart, double wstep, bool cosine = false) {
+        if (cosine) {
+            mCurrent = cos(wstart);
+            mPrevious = cos(wstart - wstep);
+        } else {
+            mCurrent = sin(wstart);
+            mPrevious = sin(wstart - wstep);
+        }
+        mTwoCos = 2.*cos(wstep);
+    }
+    SineGen(double expNow, double expPrev, double twoCosStep) {
+        mCurrent = expNow;
+        mPrevious = expPrev;
+        mTwoCos = twoCosStep;
+    }
+    inline double value() const {
+        return mCurrent;
+    }
+    inline void advance() {
+        double tmp = mCurrent;
+        mCurrent = mCurrent*mTwoCos - mPrevious;
+        mPrevious = tmp;
+    }
+    inline double valueAdvance() {
+        double tmp = mCurrent;
+        mCurrent = mCurrent*mTwoCos - mPrevious;
+        mPrevious = tmp;
+        return tmp;
+    }
+
+private:
+    double mCurrent; // current value of sine/cosine
+    double mPrevious; // previous value of sine/cosine
+    double mTwoCos; // stepping factor
+};
+
+/*
+ * generates a series of sine generators, phase offset by fixed steps.
+ *
+ * This is used to generate polyphase sine generators, one per polyphase
+ * in the filter code below.
+ *
+ * The SineGen returned by value() starts at innerStart = outerStart + n*outerStep;
+ * increments by innerStep.
+ *
+ */
+
+class SineGenGen {
+public:
+    SineGenGen(double outerStart, double outerStep, double innerStep, bool cosine = false)
+            : mSineInnerCur(outerStart, outerStep, cosine),
+              mSineInnerPrev(outerStart-innerStep, outerStep, cosine)
+    {
+        mTwoCos = 2.*cos(innerStep);
+    }
+    inline SineGen value() {
+        return SineGen(mSineInnerCur.value(), mSineInnerPrev.value(), mTwoCos);
+    }
+    inline void advance() {
+        mSineInnerCur.advance();
+        mSineInnerPrev.advance();
+    }
+    inline SineGen valueAdvance() {
+        return SineGen(mSineInnerCur.valueAdvance(), mSineInnerPrev.valueAdvance(), mTwoCos);
+    }
+
+private:
+    SineGen mSineInnerCur; // generate the inner sine values (stepped by outerStep).
+    SineGen mSineInnerPrev; // generate the inner sine previous values
+                            // (behind by innerStep, stepped by outerStep).
+    double mTwoCos; // the inner stepping factor for the returned SineGen.
+};
+
+static inline double sqr(double x) {
+    return x * x;
+}
+
+/*
+ * rounds a double to the nearest integer for FIR coefficients.
+ *
+ * One variant uses noise shaping, which must keep error history
+ * to work (the err parameter, initialized to 0).
+ * The other variant is a non-noise shaped version for
+ * S32 coefficients (noise shaping doesn't gain much).
+ *
+ * Caution: No bounds saturation is applied, but isn't needed in this case.
+ *
+ * @param x is the value to round.
+ *
+ * @param maxval is the maximum integer scale factor expressed as an int64 (for headroom).
+ * Typically this may be the maximum positive integer+1 (using the fact that double precision
+ * FIR coefficients generated here are never that close to 1.0 to pose an overflow condition).
+ *
+ * @param err is the previous error (actual - rounded) for the previous rounding op.
+ * For 16b coefficients this can improve stopband dB performance by up to 2dB.
+ *
+ * Many variants exist for the noise shaping: http://en.wikipedia.org/wiki/Noise_shaping
+ *
+ */
+
+static inline int64_t toint(double x, int64_t maxval, double& err) {
+    double val = x * maxval;
+    double ival = floor(val + 0.5 + err*0.2);
+    err = val - ival;
+    return static_cast<int64_t>(ival);
+}
+
+static inline int64_t toint(double x, int64_t maxval) {
+    return static_cast<int64_t>(floor(x * maxval + 0.5));
+}
+
+/*
+ * Modified Bessel function of the first kind
+ * http://en.wikipedia.org/wiki/Bessel_function
+ *
+ * The formulas are taken from Abramowitz and Stegun,
+ * _Handbook of Mathematical Functions_ (links below):
+ *
+ * http://people.math.sfu.ca/~cbm/aands/page_375.htm
+ * http://people.math.sfu.ca/~cbm/aands/page_378.htm
+ *
+ * http://dlmf.nist.gov/10.25
+ * http://dlmf.nist.gov/10.40
+ *
+ * Note we assume x is nonnegative (the function is symmetric,
+ * pass in the absolute value as needed).
+ *
+ * Constants are compile time derived with templates I0Term<> and
+ * I0ATerm<> to the precision of the compiler.  The series can be expanded
+ * to any precision needed, but currently set around 24b precision.
+ *
+ * We use a bit of template math here, constexpr would probably be
+ * more appropriate for a C++11 compiler.
+ *
+ * For the intermediate range 3.75 < x < 15, we use minimax polynomial fit.
+ *
+ */
+
+template <int N>
+struct I0Term {
+    static const double value = I0Term<N-1>::value / (4. * N * N);
+};
+
+template <>
+struct I0Term<0> {
+    static const double value = 1.;
+};
+
+template <int N>
+struct I0ATerm {
+    static const double value = I0ATerm<N-1>::value * (2.*N-1.) * (2.*N-1.) / (8. * N);
+};
+
+template <>
+struct I0ATerm<0> { // 1/sqrt(2*PI);
+    static const double value = 0.398942280401432677939946059934381868475858631164934657665925;
+};
+
+#if USE_HORNERS_METHOD
+/* Polynomial evaluation of A + Bx + Cx^2 + Dx^3 + ...
+ * using Horner's Method: http://en.wikipedia.org/wiki/Horner's_method
+ *
+ * This has fewer multiplications than Estrin's method below, but has back to back
+ * floating point dependencies.
+ *
+ * On ARM this appears to work slower, so USE_HORNERS_METHOD is not default enabled.
+ */
+
+inline double Poly2(double A, double B, double x) {
+    return A + x * B;
+}
+
+inline double Poly4(double A, double B, double C, double D, double x) {
+    return A + x * (B + x * (C + x * (D)));
+}
+
+inline double Poly7(double A, double B, double C, double D, double E, double F, double G,
+        double x) {
+    return A + x * (B + x * (C + x * (D + x * (E + x * (F + x * (G))))));
+}
+
+inline double Poly9(double A, double B, double C, double D, double E, double F, double G,
+        double H, double I, double x) {
+    return A + x * (B + x * (C + x * (D + x * (E + x * (F + x * (G + x * (H + x * (I))))))));
+}
+
+#else
+/* Polynomial evaluation of A + Bx + Cx^2 + Dx^3 + ...
+ * using Estrin's Method: http://en.wikipedia.org/wiki/Estrin's_scheme
+ *
+ * This is typically faster, perhaps gains about 5-10% overall on ARM processors
+ * over Horner's method above.
+ */
+
+inline double Poly2(double A, double B, double x) {
+    return A + B * x;
+}
+
+inline double Poly3(double A, double B, double C, double x, double x2) {
+    return Poly2(A, B, x) + C * x2;
+}
+
+inline double Poly3(double A, double B, double C, double x) {
+    return Poly2(A, B, x) + C * x * x;
+}
+
+inline double Poly4(double A, double B, double C, double D, double x, double x2) {
+    return Poly2(A, B, x) + Poly2(C, D, x) * x2; // same as poly2(poly2, poly2, x2);
+}
+
+inline double Poly4(double A, double B, double C, double D, double x) {
+    return Poly4(A, B, C, D, x, x * x);
+}
+
+inline double Poly7(double A, double B, double C, double D, double E, double F, double G,
+        double x) {
+    double x2 = x * x;
+    return Poly4(A, B, C, D, x, x2) + Poly3(E, F, G, x, x2) * (x2 * x2);
+}
+
+inline double Poly8(double A, double B, double C, double D, double E, double F, double G,
+        double H, double x, double x2, double x4) {
+    return Poly4(A, B, C, D, x, x2) + Poly4(E, F, G, H, x, x2) * x4;
+}
+
+inline double Poly9(double A, double B, double C, double D, double E, double F, double G,
+        double H, double I, double x) {
+    double x2 = x * x;
+#if 1
+    // It does not seem faster to explicitly decompose Poly8 into Poly4, but
+    // could depend on compiler floating point scheduling.
+    double x4 = x2 * x2;
+    return Poly8(A, B, C, D, E, F, G, H, x, x2, x4) + I * (x4 * x4);
+#else
+    double val = Poly4(A, B, C, D, x, x2);
+    double x4 = x2 * x2;
+    return val + Poly4(E, F, G, H, x, x2) * x4 + I * (x4 * x4);
+#endif
+}
+#endif
+
+static inline double I0(double x) {
+    if (x < 3.75) {
+        x *= x;
+        return Poly7(I0Term<0>::value, I0Term<1>::value,
+                I0Term<2>::value, I0Term<3>::value,
+                I0Term<4>::value, I0Term<5>::value,
+                I0Term<6>::value, x); // e < 1.6e-7
+    }
+    if (1) {
+        /*
+         * Series expansion coefs are easy to calculate, but are expanded around 0,
+         * so error is unequal over the interval 0 < x < 3.75, the error being
+         * significantly better near 0.
+         *
+         * A better solution is to use precise minimax polynomial fits.
+         *
+         * We use a slightly more complicated solution for 3.75 < x < 15, based on
+         * the tables in Blair and Edwards, "Stable Rational Minimax Approximations
+         * to the Modified Bessel Functions I0(x) and I1(x)", Chalk Hill Nuclear Laboratory,
+         * AECL-4928.
+         *
+         * http://www.iaea.org/inis/collection/NCLCollectionStore/_Public/06/178/6178667.pdf
+         *
+         * See Table 11 for 0 < x < 15; e < 10^(-7.13).
+         *
+         * Note: Beta cannot exceed 15 (hence Stopband cannot exceed 144dB = 24b).
+         *
+         * This speeds up overall computation by about 40% over using the else clause below,
+         * which requires sqrt and exp.
+         *
+         */
+
+        x *= x;
+        double num = Poly9(-0.13544938430e9, -0.33153754512e8,
+                -0.19406631946e7, -0.48058318783e5,
+                -0.63269783360e3, -0.49520779070e1,
+                -0.24970910370e-1, -0.74741159550e-4,
+                -0.18257612460e-6, x);
+        double y = x - 225.; // reflection around 15 (squared)
+        double den = Poly4(-0.34598737196e8, 0.23852643181e6,
+                -0.70699387620e3, 0.10000000000e1, y);
+        return num / den;
+
+#if IO_EXTENDED_BETA
+        /* Table 42 for x > 15; e < 10^(-8.11).
+         * This is used for Beta>15, but is disabled here as
+         * we never use Beta that high.
+         *
+         * NOTE: This should be enabled only for x > 15.
+         */
+
+        double y = 1./x;
+        double z = y - (1./15);
+        double num = Poly2(0.415079861746e1, -0.5149092496e1, z);
+        double den = Poly3(0.103150763823e2, -0.14181687413e2,
+                0.1000000000e1, z);
+        return exp(x) * sqrt(y) * num / den;
+#endif
+    } else {
+        /*
+         * NOT USED, but reference for large Beta.
+         *
+         * Abramowitz and Stegun asymptotic formula.
+         * works for x > 3.75.
+         */
+        double y = 1./x;
+        return exp(x) * sqrt(y) *
+                // note: reciprocal squareroot may be easier!
+                // http://en.wikipedia.org/wiki/Fast_inverse_square_root
+                Poly9(I0ATerm<0>::value, I0ATerm<1>::value,
+                        I0ATerm<2>::value, I0ATerm<3>::value,
+                        I0ATerm<4>::value, I0ATerm<5>::value,
+                        I0ATerm<6>::value, I0ATerm<7>::value,
+                        I0ATerm<8>::value, y); // (... e) < 1.9e-7
+    }
+}
+
+/*
+ * calculates the transition bandwidth for a Kaiser filter
+ *
+ * Formula 3.2.8, Vaidyanathan, _Multirate Systems and Filter Banks_, p. 48
+ * Formula 7.76, Oppenheim and Schafer, _Discrete-time Signal Processing, 3e_, p. 542
+ *
+ * @param halfNumCoef is half the number of coefficients per filter phase.
+ *
+ * @param stopBandAtten is the stop band attenuation desired.
+ *
+ * @return the transition bandwidth in normalized frequency (0 <= f <= 0.5)
+ */
+static inline double firKaiserTbw(int halfNumCoef, double stopBandAtten) {
+    return (stopBandAtten - 7.95)/((2.*14.36)*halfNumCoef);
+}
+
+/*
+ * calculates the fir transfer response of the overall polyphase filter at w.
+ *
+ * Calculates the DTFT transfer coefficient H(w) for 0 <= w <= PI, utilizing the
+ * fact that h[n] is symmetric (cosines only, no complex arithmetic).
+ *
+ * We use Goertzel's algorithm to accelerate the computation to essentially
+ * a single multiply and 2 adds per filter coefficient h[].
+ *
+ * Be careful be careful to consider that h[n] is the overall polyphase filter,
+ * with L phases, so rescaling H(w)/L is probably what you expect for "unity gain",
+ * as you only use one of the polyphases at a time.
+ */
+template <typename T>
+static inline double firTransfer(const T* coef, int L, int halfNumCoef, double w) {
+    double accum = static_cast<double>(coef[0])*0.5;  // "center coefficient" from first bank
+    coef += halfNumCoef;    // skip first filterbank (picked up by the last filterbank).
+#if SLOW_FIRTRANSFER
+    /* Original code for reference.  This is equivalent to the code below, but slower. */
+    for (int i=1 ; i<=L ; ++i) {
+        for (int j=0, ix=i ; j<halfNumCoef ; ++j, ix+=L) {
+            accum += cos(ix*w)*static_cast<double>(*coef++);
+        }
+    }
+#else
+    /*
+     * Our overall filter is stored striped by polyphases, not a contiguous h[n].
+     * We could fetch coefficients in a non-contiguous fashion
+     * but that will not scale to vector processing.
+     *
+     * We apply Goertzel's algorithm directly to each polyphase filter bank instead of
+     * using cosine generation/multiplication, thereby saving one multiply per inner loop.
+     *
+     * See: http://en.wikipedia.org/wiki/Goertzel_algorithm
+     * Also: Oppenheim and Schafer, _Discrete Time Signal Processing, 3e_, p. 720.
+     *
+     * We use the basic recursion to incorporate the cosine steps into real sequence x[n]:
+     * s[n] = x[n] + (2cosw)*s[n-1] + s[n-2]
+     *
+     * y[n] = s[n] - e^(iw)s[n-1]
+     *      = sum_{k=-\infty}^{n} x[k]e^(-iw(n-k))
+     *      = e^(-iwn) sum_{k=0}^{n} x[k]e^(iwk)
+     *
+     * The summation contains the frequency steps we want multiplied by the source
+     * (similar to a DTFT).
+     *
+     * Using symmetry, and just the real part (be careful, this must happen
+     * after any internal complex multiplications), the polyphase filterbank
+     * transfer function is:
+     *
+     * Hpp[n, w, w_0] = sum_{k=0}^{n} x[k] * cos(wk + w_0)
+     *                = Re{ e^(iwn + iw_0) y[n]}
+     *                = cos(wn+w_0) * s[n] - cos(w(n+1)+w_0) * s[n-1]
+     *
+     * using the fact that s[n] of real x[n] is real.
+     *
+     */
+    double dcos = 2. * cos(L*w);
+    int start = ((halfNumCoef)*L + 1);
+    SineGen cc((start - L) * w, w, true); // cosine
+    SineGen cp(start * w, w, true); // cosine
+    for (int i=1 ; i<=L ; ++i) {
+        double sc = 0;
+        double sp = 0;
+        for (int j=0 ; j<halfNumCoef ; ++j) {
+            double tmp = sc;
+            sc  = static_cast<double>(*coef++) + dcos*sc - sp;
+            sp = tmp;
+        }
+        // If we are awfully clever, we can apply Goertzel's algorithm
+        // again on the sc and sp sequences returned here.
+        accum += cc.valueAdvance() * sc - cp.valueAdvance() * sp;
+    }
+#endif
+    return accum*2.;
+}
+
+/*
+ * evaluates the minimum and maximum |H(f)| bound in a band region.
+ *
+ * This is usually done with equally spaced increments in the target band in question.
+ * The passband is often very small, and sampled that way. The stopband is often much
+ * larger.
+ *
+ * We use the fact that the overall polyphase filter has an additional bank at the end
+ * for interpolation; hence it is overspecified for the H(f) computation.  Thus the
+ * first polyphase is never actually checked, excepting its first term.
+ *
+ * In this code we use the firTransfer() evaluator above, which uses Goertzel's
+ * algorithm to calculate the transfer function at each point.
+ *
+ * TODO: An alternative with equal spacing is the FFT/DFT.  An alternative with unequal
+ * spacing is a chirp transform.
+ *
+ * @param coef is the designed polyphase filter banks
+ *
+ * @param L is the number of phases (for interpolation)
+ *
+ * @param halfNumCoef should be half the number of coefficients for a single
+ * polyphase.
+ *
+ * @param fstart is the normalized frequency start.
+ *
+ * @param fend is the normalized frequency end.
+ *
+ * @param steps is the number of steps to take (sampling) between frequency start and end
+ *
+ * @param firMin returns the minimum transfer |H(f)| found
+ *
+ * @param firMax returns the maximum transfer |H(f)| found
+ *
+ * 0 <= f <= 0.5.
+ * This is used to test passband and stopband performance.
+ */
+template <typename T>
+static void testFir(const T* coef, int L, int halfNumCoef,
+        double fstart, double fend, int steps, double &firMin, double &firMax) {
+    double wstart = fstart*(2.*M_PI);
+    double wend = fend*(2.*M_PI);
+    double wstep = (wend - wstart)/steps;
+    double fmax, fmin;
+    double trf = firTransfer(coef, L, halfNumCoef, wstart);
+    if (trf<0) {
+        trf = -trf;
+    }
+    fmin = fmax = trf;
+    wstart += wstep;
+    for (int i=1; i<steps; ++i) {
+        trf = firTransfer(coef, L, halfNumCoef, wstart);
+        if (trf<0) {
+            trf = -trf;
+        }
+        if (trf>fmax) {
+            fmax = trf;
+        }
+        else if (trf<fmin) {
+            fmin = trf;
+        }
+        wstart += wstep;
+    }
+    // renormalize - this is only needed for integer filter types
+    double norm = 1./((1ULL<<(sizeof(T)*8-1))*L);
+
+    firMin = fmin * norm;
+    firMax = fmax * norm;
+}
+
+/*
+ * evaluates the |H(f)| lowpass band characteristics.
+ *
+ * This function tests the lowpass characteristics for the overall polyphase filter,
+ * and is used to verify the design.  For this case, fp should be set to the
+ * passband normalized frequency from 0 to 0.5 for the overall filter (thus it
+ * is the designed polyphase bank value / L).  Likewise for fs.
+ *
+ * @param coef is the designed polyphase filter banks
+ *
+ * @param L is the number of phases (for interpolation)
+ *
+ * @param halfNumCoef should be half the number of coefficients for a single
+ * polyphase.
+ *
+ * @param fp is the passband normalized frequency, 0 < fp < fs < 0.5.
+ *
+ * @param fs is the stopband normalized frequency, 0 < fp < fs < 0.5.
+ *
+ * @param passSteps is the number of passband sampling steps.
+ *
+ * @param stopSteps is the number of stopband sampling steps.
+ *
+ * @param passMin is the minimum value in the passband
+ *
+ * @param passMax is the maximum value in the passband (useful for scaling).  This should
+ * be less than 1., to avoid sine wave test overflow.
+ *
+ * @param passRipple is the passband ripple.  Typically this should be less than 0.1 for
+ * an audio filter.  Generally speaker/headphone device characteristics will dominate
+ * the passband term.
+ *
+ * @param stopMax is the maximum value in the stopband.
+ *
+ * @param stopRipple is the stopband ripple, also known as stopband attenuation.
+ * Typically this should be greater than ~80dB for low quality, and greater than
+ * ~100dB for full 16b quality, otherwise aliasing may become noticeable.
+ *
+ */
+template <typename T>
+static void testFir(const T* coef, int L, int halfNumCoef,
+        double fp, double fs, int passSteps, int stopSteps,
+        double &passMin, double &passMax, double &passRipple,
+        double &stopMax, double &stopRipple) {
+    double fmin, fmax;
+    testFir(coef, L, halfNumCoef, 0., fp, passSteps, fmin, fmax);
+    double d1 = (fmax - fmin)/2.;
+    passMin = fmin;
+    passMax = fmax;
+    passRipple = -20.*log10(1. - d1); // passband ripple
+    testFir(coef, L, halfNumCoef, fs, 0.5, stopSteps, fmin, fmax);
+    // fmin is really not important for the stopband.
+    stopMax = fmax;
+    stopRipple = -20.*log10(fmax); // stopband ripple/attenuation
+}
+
+/*
+ * Calculates the overall polyphase filter based on a windowed sinc function.
+ *
+ * The windowed sinc is an odd length symmetric filter of exactly L*halfNumCoef*2+1
+ * taps for the entire kernel.  This is then decomposed into L+1 polyphase filterbanks.
+ * The last filterbank is used for interpolation purposes (and is mostly composed
+ * of the first bank shifted by one sample), and is unnecessary if one does
+ * not do interpolation.
+ *
+ * We use the last filterbank for some transfer function calculation purposes,
+ * so it needs to be generated anyways.
+ *
+ * @param coef is the caller allocated space for coefficients.  This should be
+ * exactly (L+1)*halfNumCoef in size.
+ *
+ * @param L is the number of phases (for interpolation)
+ *
+ * @param halfNumCoef should be half the number of coefficients for a single
+ * polyphase.
+ *
+ * @param stopBandAtten is the stopband value, should be >50dB.
+ *
+ * @param fcr is cutoff frequency/sampling rate (<0.5).  At this point, the energy
+ * should be 6dB less. (fcr is where the amplitude drops by half).  Use the
+ * firKaiserTbw() to calculate the transition bandwidth.  fcr is the midpoint
+ * between the stop band and the pass band (fstop+fpass)/2.
+ *
+ * @param atten is the attenuation (generally slightly less than 1).
+ */
+
+template <typename T>
+static inline void firKaiserGen(T* coef, int L, int halfNumCoef,
+        double stopBandAtten, double fcr, double atten) {
+    //
+    // Formula 3.2.5, 3.2.7, Vaidyanathan, _Multirate Systems and Filter Banks_, p. 48
+    // Formula 7.75, Oppenheim and Schafer, _Discrete-time Signal Processing, 3e_, p. 542
+    //
+    // See also: http://melodi.ee.washington.edu/courses/ee518/notes/lec17.pdf
+    //
+    // Kaiser window and beta parameter
+    //
+    //         | 0.1102*(A - 8.7)                         A > 50
+    //  beta = | 0.5842*(A - 21)^0.4 + 0.07886*(A - 21)   21 <= A <= 50
+    //         | 0.                                       A < 21
+    //
+    // with A is the desired stop-band attenuation in dBFS
+    //
+    //    30 dB    2.210
+    //    40 dB    3.384
+    //    50 dB    4.538
+    //    60 dB    5.658
+    //    70 dB    6.764
+    //    80 dB    7.865
+    //    90 dB    8.960
+    //   100 dB   10.056
+
+    const int N = L * halfNumCoef; // non-negative half
+    const double beta = 0.1102 * (stopBandAtten - 8.7); // >= 50dB always
+    const double xstep = (2. * M_PI) * fcr / L;
+    const double xfrac = 1. / N;
+    const double yscale = atten * L / (I0(beta) * M_PI);
+
+    // We use sine generators, which computes sines on regular step intervals.
+    // This speeds up overall computation about 40% from computing the sine directly.
+
+    SineGenGen sgg(0., xstep, L*xstep); // generates sine generators (one per polyphase)
+
+    for (int i=0 ; i<=L ; ++i) { // generate an extra set of coefs for interpolation
+
+        // computation for a single polyphase of the overall filter.
+        SineGen sg = sgg.valueAdvance(); // current sine generator for "j" inner loop.
+        double err = 0; // for noise shaping on int16_t coefficients (over each polyphase)
+
+        for (int j=0, ix=i ; j<halfNumCoef ; ++j, ix+=L) {
+            double y;
+            if (CC_LIKELY(ix)) {
+                double x = static_cast<double>(ix);
+
+                // sine generator: sg.valueAdvance() returns sin(ix*xstep);
+                y = I0(beta * sqrt(1.0 - sqr(x * xfrac))) * yscale * sg.valueAdvance() / x;
+            } else {
+                y = 2. * atten * fcr; // center of filter, sinc(0) = 1.
+                sg.advance();
+            }
+
+            // (caution!) float version does not need rounding
+            if (is_same<T, int16_t>::value) { // int16_t needs noise shaping
+                *coef++ = static_cast<T>(toint(y, 1ULL<<(sizeof(T)*8-1), err));
+            } else {
+                *coef++ = static_cast<T>(toint(y, 1ULL<<(sizeof(T)*8-1)));
+            }
+        }
+    }
+}
+
+}; // namespace android
+
+#endif /*ANDROID_AUDIO_RESAMPLER_FIR_GEN_H*/
diff --git a/services/audioflinger/AudioResamplerFirOps.h b/services/audioflinger/AudioResamplerFirOps.h
new file mode 100644
index 0000000..bf2163f
--- /dev/null
+++ b/services/audioflinger/AudioResamplerFirOps.h
@@ -0,0 +1,163 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_AUDIO_RESAMPLER_FIR_OPS_H
+#define ANDROID_AUDIO_RESAMPLER_FIR_OPS_H
+
+namespace android {
+
+#if defined(__arm__) && !defined(__thumb__)
+#define USE_INLINE_ASSEMBLY (true)
+#else
+#define USE_INLINE_ASSEMBLY (false)
+#endif
+
+#if USE_INLINE_ASSEMBLY && defined(__ARM_NEON__)
+#define USE_NEON (true)
+#include <arm_neon.h>
+#else
+#define USE_NEON (false)
+#endif
+
+template<typename T, typename U>
+struct is_same
+{
+    static const bool value = false;
+};
+
+template<typename T>
+struct is_same<T, T>  // partial specialization
+{
+    static const bool value = true;
+};
+
+static inline
+int32_t mulRL(int left, int32_t in, uint32_t vRL)
+{
+#if USE_INLINE_ASSEMBLY
+    int32_t out;
+    if (left) {
+        asm( "smultb %[out], %[in], %[vRL] \n"
+             : [out]"=r"(out)
+             : [in]"%r"(in), [vRL]"r"(vRL)
+             : );
+    } else {
+        asm( "smultt %[out], %[in], %[vRL] \n"
+             : [out]"=r"(out)
+             : [in]"%r"(in), [vRL]"r"(vRL)
+             : );
+    }
+    return out;
+#else
+    int16_t v = left ? static_cast<int16_t>(vRL) : static_cast<int16_t>(vRL>>16);
+    return static_cast<int32_t>((static_cast<int64_t>(in) * v) >> 16);
+#endif
+}
+
+static inline
+int32_t mulAdd(int16_t in, int16_t v, int32_t a)
+{
+#if USE_INLINE_ASSEMBLY
+    int32_t out;
+    asm( "smlabb %[out], %[v], %[in], %[a] \n"
+         : [out]"=r"(out)
+         : [in]"%r"(in), [v]"r"(v), [a]"r"(a)
+         : );
+    return out;
+#else
+    return a + v * in;
+#endif
+}
+
+static inline
+int32_t mulAdd(int16_t in, int32_t v, int32_t a)
+{
+#if USE_INLINE_ASSEMBLY
+    int32_t out;
+    asm( "smlawb %[out], %[v], %[in], %[a] \n"
+         : [out]"=r"(out)
+         : [in]"%r"(in), [v]"r"(v), [a]"r"(a)
+         : );
+    return out;
+#else
+    return a + static_cast<int32_t>((static_cast<int64_t>(v) * in) >> 16);
+#endif
+}
+
+static inline
+int32_t mulAdd(int32_t in, int32_t v, int32_t a)
+{
+#if USE_INLINE_ASSEMBLY
+    int32_t out;
+    asm( "smmla %[out], %[v], %[in], %[a] \n"
+         : [out]"=r"(out)
+         : [in]"%r"(in), [v]"r"(v), [a]"r"(a)
+         : );
+    return out;
+#else
+    return a + static_cast<int32_t>((static_cast<int64_t>(v) * in) >> 32);
+#endif
+}
+
+static inline
+int32_t mulAddRL(int left, uint32_t inRL, int16_t v, int32_t a)
+{
+#if USE_INLINE_ASSEMBLY
+    int32_t out;
+    if (left) {
+        asm( "smlabb %[out], %[v], %[inRL], %[a] \n"
+             : [out]"=r"(out)
+             : [inRL]"%r"(inRL), [v]"r"(v), [a]"r"(a)
+             : );
+    } else {
+        asm( "smlabt %[out], %[v], %[inRL], %[a] \n"
+             : [out]"=r"(out)
+             : [inRL]"%r"(inRL), [v]"r"(v), [a]"r"(a)
+             : );
+    }
+    return out;
+#else
+    int16_t s = left ? static_cast<int16_t>(inRL) : static_cast<int16_t>(inRL>>16);
+    return a + v * s;
+#endif
+}
+
+static inline
+int32_t mulAddRL(int left, uint32_t inRL, int32_t v, int32_t a)
+{
+#if USE_INLINE_ASSEMBLY
+    int32_t out;
+    if (left) {
+        asm( "smlawb %[out], %[v], %[inRL], %[a] \n"
+             : [out]"=r"(out)
+             : [inRL]"%r"(inRL), [v]"r"(v), [a]"r"(a)
+             : );
+    } else {
+        asm( "smlawt %[out], %[v], %[inRL], %[a] \n"
+             : [out]"=r"(out)
+             : [inRL]"%r"(inRL), [v]"r"(v), [a]"r"(a)
+             : );
+    }
+    return out;
+#else
+    int16_t s = left ? static_cast<int16_t>(inRL) : static_cast<int16_t>(inRL>>16);
+    return a + static_cast<int32_t>((static_cast<int64_t>(v) * s) >> 16);
+#endif
+}
+
+}; // namespace android
+
+#endif /*ANDROID_AUDIO_RESAMPLER_FIR_OPS_H*/
diff --git a/services/audioflinger/AudioResamplerFirProcess.h b/services/audioflinger/AudioResamplerFirProcess.h
new file mode 100644
index 0000000..38e387c
--- /dev/null
+++ b/services/audioflinger/AudioResamplerFirProcess.h
@@ -0,0 +1,256 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_AUDIO_RESAMPLER_FIR_PROCESS_H
+#define ANDROID_AUDIO_RESAMPLER_FIR_PROCESS_H
+
+namespace android {
+
+// depends on AudioResamplerFirOps.h
+
+template<int CHANNELS, typename TC>
+static inline
+void mac(
+        int32_t& l, int32_t& r,
+        const TC coef,
+        const int16_t* samples)
+{
+    if (CHANNELS == 2) {
+        uint32_t rl = *reinterpret_cast<const uint32_t*>(samples);
+        l = mulAddRL(1, rl, coef, l);
+        r = mulAddRL(0, rl, coef, r);
+    } else {
+        r = l = mulAdd(samples[0], coef, l);
+    }
+}
+
+template<int CHANNELS, typename TC>
+static inline
+void interpolate(
+        int32_t& l, int32_t& r,
+        const TC coef_0, const TC coef_1,
+        const int16_t lerp, const int16_t* samples)
+{
+    TC sinc;
+
+    if (is_same<TC, int16_t>::value) {
+        sinc = (lerp * ((coef_1-coef_0)<<1)>>16) + coef_0;
+    } else {
+        sinc = mulAdd(lerp, (coef_1-coef_0)<<1, coef_0);
+    }
+    if (CHANNELS == 2) {
+        uint32_t rl = *reinterpret_cast<const uint32_t*>(samples);
+        l = mulAddRL(1, rl, sinc, l);
+        r = mulAddRL(0, rl, sinc, r);
+    } else {
+        r = l = mulAdd(samples[0], sinc, l);
+    }
+}
+
+/*
+ * Calculates a single output sample (two stereo frames).
+ *
+ * This function computes both the positive half FIR dot product and
+ * the negative half FIR dot product, accumulates, and then applies the volume.
+ *
+ * This is a locked phase filter (it does not compute the interpolation).
+ *
+ * Use fir() to compute the proper coefficient pointers for a polyphase
+ * filter bank.
+ */
+
+template <int CHANNELS, int STRIDE, typename TC>
+static inline
+void ProcessL(int32_t* const out,
+        int count,
+        const TC* coefsP,
+        const TC* coefsN,
+        const int16_t* sP,
+        const int16_t* sN,
+        const int32_t* const volumeLR)
+{
+    int32_t l = 0;
+    int32_t r = 0;
+    do {
+        mac<CHANNELS>(l, r, *coefsP++, sP);
+        sP -= CHANNELS;
+        mac<CHANNELS>(l, r, *coefsN++, sN);
+        sN += CHANNELS;
+    } while (--count > 0);
+    out[0] += 2 * mulRL(0, l, volumeLR[0]); // Note: only use top 16b
+    out[1] += 2 * mulRL(0, r, volumeLR[1]); // Note: only use top 16b
+}
+
+/*
+ * Calculates a single output sample (two stereo frames) interpolating phase.
+ *
+ * This function computes both the positive half FIR dot product and
+ * the negative half FIR dot product, accumulates, and then applies the volume.
+ *
+ * This is an interpolated phase filter.
+ *
+ * Use fir() to compute the proper coefficient pointers for a polyphase
+ * filter bank.
+ */
+
+template <int CHANNELS, int STRIDE, typename TC>
+static inline
+void Process(int32_t* const out,
+        int count,
+        const TC* coefsP,
+        const TC* coefsN,
+        const TC* coefsP1,
+        const TC* coefsN1,
+        const int16_t* sP,
+        const int16_t* sN,
+        uint32_t lerpP,
+        const int32_t* const volumeLR)
+{
+    (void) coefsP1; // suppress unused parameter warning
+    (void) coefsN1;
+    if (sizeof(*coefsP)==4) {
+        lerpP >>= 16;   // ensure lerpP is 16b
+    }
+    int32_t l = 0;
+    int32_t r = 0;
+    for (size_t i = 0; i < count; ++i) {
+        interpolate<CHANNELS>(l, r, coefsP[0], coefsP[count], lerpP, sP);
+        coefsP++;
+        sP -= CHANNELS;
+        interpolate<CHANNELS>(l, r, coefsN[count], coefsN[0], lerpP, sN);
+        coefsN++;
+        sN += CHANNELS;
+    }
+    out[0] += 2 * mulRL(0, l, volumeLR[0]); // Note: only use top 16b
+    out[1] += 2 * mulRL(0, r, volumeLR[1]); // Note: only use top 16b
+}
+
+/*
+ * Calculates a single output sample (two stereo frames) from input sample pointer.
+ *
+ * This sets up the params for the accelerated Process() and ProcessL()
+ * functions to do the appropriate dot products.
+ *
+ * @param out should point to the output buffer with at least enough space for 2 output frames.
+ *
+ * @param phase is the fractional distance between input samples for interpolation:
+ * phase >= 0  && phase < phaseWrapLimit.  It can be thought of as a rational fraction
+ * of phase/phaseWrapLimit.
+ *
+ * @param phaseWrapLimit is #polyphases<<coefShift, where #polyphases is the number of polyphases
+ * in the polyphase filter. Likewise, #polyphases can be obtained as (phaseWrapLimit>>coefShift).
+ *
+ * @param coefShift gives the bit alignment of the polyphase index in the phase parameter.
+ *
+ * @param halfNumCoefs is the half the number of coefficients per polyphase filter. Since the
+ * overall filterbank is odd-length symmetric, only halfNumCoefs need be stored.
+ *
+ * @param coefs is the polyphase filter bank, starting at from polyphase index 0, and ranging to
+ * and including the #polyphases.  Each polyphase of the filter has half-length halfNumCoefs
+ * (due to symmetry).  The total size of the filter bank in coefficients is
+ * (#polyphases+1)*halfNumCoefs.
+ *
+ * The filter bank coefs should be aligned to a minimum of 16 bytes (preferrably to cache line).
+ *
+ * The coefs should be attenuated (to compensate for passband ripple)
+ * if storing back into the native format.
+ *
+ * @param samples are unaligned input samples.  The position is in the "middle" of the
+ * sample array with respect to the FIR filter:
+ * the negative half of the filter is dot product from samples+1 to samples+halfNumCoefs;
+ * the positive half of the filter is dot product from samples to samples-halfNumCoefs+1.
+ *
+ * @param volumeLR is a pointer to an array of two 32 bit volume values, one per stereo channel,
+ * expressed as a S32 integer.  A negative value inverts the channel 180 degrees.
+ * The pointer volumeLR should be aligned to a minimum of 8 bytes.
+ * A typical value for volume is 0x1000 to align to a unity gain output of 20.12.
+ *
+ * In between calls to filterCoefficient, the phase is incremented by phaseIncrement, where
+ * phaseIncrement is calculated as inputSampling * phaseWrapLimit / outputSampling.
+ *
+ * The filter polyphase index is given by indexP = phase >> coefShift. Due to
+ * odd length symmetric filter, the polyphase index of the negative half depends on
+ * whether interpolation is used.
+ *
+ * The fractional siting between the polyphase indices is given by the bits below coefShift:
+ *
+ * lerpP = phase << 32 - coefShift >> 1;  // for 32 bit unsigned phase multiply
+ * lerpP = phase << 32 - coefShift >> 17; // for 16 bit unsigned phase multiply
+ *
+ * For integer types, this is expressed as:
+ *
+ * lerpP = phase << sizeof(phase)*8 - coefShift
+ *              >> (sizeof(phase)-sizeof(*coefs))*8 + 1;
+ *
+ */
+
+template<int CHANNELS, bool LOCKED, int STRIDE, typename TC>
+static inline
+void fir(int32_t* const out,
+        const uint32_t phase, const uint32_t phaseWrapLimit,
+        const int coefShift, const int halfNumCoefs, const TC* const coefs,
+        const int16_t* const samples, const int32_t* const volumeLR)
+{
+    // NOTE: be very careful when modifying the code here. register
+    // pressure is very high and a small change might cause the compiler
+    // to generate far less efficient code.
+    // Always sanity check the result with objdump or test-resample.
+
+    if (LOCKED) {
+        // locked polyphase (no interpolation)
+        // Compute the polyphase filter index on the positive and negative side.
+        uint32_t indexP = phase >> coefShift;
+        uint32_t indexN = (phaseWrapLimit - phase) >> coefShift;
+        const TC* coefsP = coefs + indexP*halfNumCoefs;
+        const TC* coefsN = coefs + indexN*halfNumCoefs;
+        const int16_t* sP = samples;
+        const int16_t* sN = samples + CHANNELS;
+
+        // dot product filter.
+        ProcessL<CHANNELS, STRIDE>(out,
+                halfNumCoefs, coefsP, coefsN, sP, sN, volumeLR);
+    } else {
+        // interpolated polyphase
+        // Compute the polyphase filter index on the positive and negative side.
+        uint32_t indexP = phase >> coefShift;
+        uint32_t indexN = (phaseWrapLimit - phase - 1) >> coefShift; // one's complement.
+        const TC* coefsP = coefs + indexP*halfNumCoefs;
+        const TC* coefsN = coefs + indexN*halfNumCoefs;
+        const TC* coefsP1 = coefsP + halfNumCoefs;
+        const TC* coefsN1 = coefsN + halfNumCoefs;
+        const int16_t* sP = samples;
+        const int16_t* sN = samples + CHANNELS;
+
+        // Interpolation fraction lerpP derived by shifting all the way up and down
+        // to clear the appropriate bits and align to the appropriate level
+        // for the integer multiply.  The constants should resolve in compile time.
+        //
+        // The interpolated filter coefficient is derived as follows for the pos/neg half:
+        //
+        // interpolated[P] = index[P]*lerpP + index[P+1]*(1-lerpP)
+        // interpolated[N] = index[N+1]*lerpP + index[N]*(1-lerpP)
+        uint32_t lerpP = phase << (sizeof(phase)*8 - coefShift)
+                >> ((sizeof(phase)-sizeof(*coefs))*8 + 1);
+
+        // on-the-fly interpolated dot product filter
+        Process<CHANNELS, STRIDE>(out,
+                halfNumCoefs, coefsP, coefsN, coefsP1, coefsN1, sP, sN, lerpP, volumeLR);
+    }
+}
+
+}; // namespace android
+
+#endif /*ANDROID_AUDIO_RESAMPLER_FIR_PROCESS_H*/
diff --git a/services/audioflinger/AudioResamplerFirProcessNeon.h b/services/audioflinger/AudioResamplerFirProcessNeon.h
new file mode 100644
index 0000000..f311cef
--- /dev/null
+++ b/services/audioflinger/AudioResamplerFirProcessNeon.h
@@ -0,0 +1,1149 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_AUDIO_RESAMPLER_FIR_PROCESS_NEON_H
+#define ANDROID_AUDIO_RESAMPLER_FIR_PROCESS_NEON_H
+
+namespace android {
+
+// depends on AudioResamplerFirOps.h, AudioResamplerFirProcess.h
+
+#if USE_NEON
+//
+// NEON specializations are enabled for Process() and ProcessL()
+//
+// TODO: Stride 16 and Stride 8 can be combined with one pass stride 8 (if necessary)
+// and looping stride 16 (or vice versa). This has some polyphase coef data alignment
+// issues with S16 coefs. Consider this later.
+
+// Macros to save a mono/stereo accumulator sample in q0 (and q4) as stereo out.
+#define ASSEMBLY_ACCUMULATE_MONO \
+        "vld1.s32       {d2}, [%[vLR]:64]        \n"/* (1) load volumes */\
+        "vld1.s32       {d3}, %[out]             \n"/* (2) unaligned load the output */\
+        "vpadd.s32      d0, d0, d1               \n"/* (1) add all 4 partial sums */\
+        "vpadd.s32      d0, d0, d0               \n"/* (1+4d) and replicate L/R */\
+        "vqrdmulh.s32   d0, d0, d2               \n"/* (2+3d) apply volume */\
+        "vqadd.s32      d3, d3, d0               \n"/* (1+4d) accumulate result (saturating) */\
+        "vst1.s32       {d3}, %[out]             \n"/* (2+2d) store result */
+
+#define ASSEMBLY_ACCUMULATE_STEREO \
+        "vld1.s32       {d2}, [%[vLR]:64]        \n"/* (1) load volumes*/\
+        "vld1.s32       {d3}, %[out]             \n"/* (2) unaligned load the output*/\
+        "vpadd.s32      d0, d0, d1               \n"/* (1) add all 4 partial sums from q0*/\
+        "vpadd.s32      d8, d8, d9               \n"/* (1) add all 4 partial sums from q4*/\
+        "vpadd.s32      d0, d0, d8               \n"/* (1+4d) combine into L/R*/\
+        "vqrdmulh.s32   d0, d0, d2               \n"/* (2+3d) apply volume*/\
+        "vqadd.s32      d3, d3, d0               \n"/* (1+4d) accumulate result (saturating)*/\
+        "vst1.s32       {d3}, %[out]             \n"/* (2+2d)store result*/
+
+template <>
+inline void ProcessL<1, 16>(int32_t* const out,
+        int count,
+        const int16_t* coefsP,
+        const int16_t* coefsN,
+        const int16_t* sP,
+        const int16_t* sN,
+        const int32_t* const volumeLR)
+{
+    const int CHANNELS = 1; // template specialization does not preserve params
+    const int STRIDE = 16;
+    sP -= CHANNELS*((STRIDE>>1)-1);
+    asm (
+        "veor           q0, q0, q0               \n"// (0 - combines+) accumulator = 0
+
+        "1:                                      \n"
+
+        "vld1.16        {q2}, [%[sP]]            \n"// (2+0d) load 8 16-bits mono samples
+        "vld1.16        {q3}, [%[sN]]!           \n"// (2) load 8 16-bits mono samples
+        "vld1.16        {q8}, [%[coefsP0]:128]!  \n"// (1) load 8 16-bits coefs
+        "vld1.16        {q10}, [%[coefsN0]:128]! \n"// (1) load 8 16-bits coefs
+
+        "vrev64.16      q2, q2                   \n"// (1) reverse s3, s2, s1, s0, s7, s6, s5, s4
+
+        // reordering the vmal to do d6, d7 before d4, d5 is slower(?)
+        "vmlal.s16      q0, d4, d17              \n"// (1+0d) multiply (reversed)samples by coef
+        "vmlal.s16      q0, d5, d16              \n"// (1) multiply (reversed)samples by coef
+        "vmlal.s16      q0, d6, d20              \n"// (1) multiply neg samples
+        "vmlal.s16      q0, d7, d21              \n"// (1) multiply neg samples
+
+        // moving these ARM instructions before neon above seems to be slower
+        "subs           %[count], %[count], #8   \n"// (1) update loop counter
+        "sub            %[sP], %[sP], #16        \n"// (0) move pointer to next set of samples
+
+        // sP used after branch (warning)
+        "bne            1b                       \n"// loop
+
+         ASSEMBLY_ACCUMULATE_MONO
+
+        : [out]     "=Uv" (out[0]),
+          [count]   "+r" (count),
+          [coefsP0] "+r" (coefsP),
+          [coefsN0] "+r" (coefsN),
+          [sP]      "+r" (sP),
+          [sN]      "+r" (sN)
+        : [vLR]     "r" (volumeLR)
+        : "cc", "memory",
+          "q0", "q1", "q2", "q3",
+          "q8", "q10"
+    );
+}
+
+template <>
+inline void ProcessL<2, 16>(int32_t* const out,
+        int count,
+        const int16_t* coefsP,
+        const int16_t* coefsN,
+        const int16_t* sP,
+        const int16_t* sN,
+        const int32_t* const volumeLR)
+{
+    const int CHANNELS = 2; // template specialization does not preserve params
+    const int STRIDE = 16;
+    sP -= CHANNELS*((STRIDE>>1)-1);
+    asm (
+        "veor           q0, q0, q0               \n"// (1) acc_L = 0
+        "veor           q4, q4, q4               \n"// (0 combines+) acc_R = 0
+
+        "1:                                      \n"
+
+        "vld2.16        {q2, q3}, [%[sP]]        \n"// (3+0d) load 8 16-bits stereo samples
+        "vld2.16        {q5, q6}, [%[sN]]!       \n"// (3) load 8 16-bits stereo samples
+        "vld1.16        {q8}, [%[coefsP0]:128]!  \n"// (1) load 8 16-bits coefs
+        "vld1.16        {q10}, [%[coefsN0]:128]! \n"// (1) load 8 16-bits coefs
+
+        "vrev64.16      q2, q2                   \n"// (1) reverse 8 frames of the left positive
+        "vrev64.16      q3, q3                   \n"// (0 combines+) reverse right positive
+
+        "vmlal.s16      q0, d4, d17              \n"// (1) multiply (reversed) samples left
+        "vmlal.s16      q0, d5, d16              \n"// (1) multiply (reversed) samples left
+        "vmlal.s16      q4, d6, d17              \n"// (1) multiply (reversed) samples right
+        "vmlal.s16      q4, d7, d16              \n"// (1) multiply (reversed) samples right
+        "vmlal.s16      q0, d10, d20             \n"// (1) multiply samples left
+        "vmlal.s16      q0, d11, d21             \n"// (1) multiply samples left
+        "vmlal.s16      q4, d12, d20             \n"// (1) multiply samples right
+        "vmlal.s16      q4, d13, d21             \n"// (1) multiply samples right
+
+        // moving these ARM before neon seems to be slower
+        "subs           %[count], %[count], #8   \n"// (1) update loop counter
+        "sub            %[sP], %[sP], #32        \n"// (0) move pointer to next set of samples
+
+        // sP used after branch (warning)
+        "bne            1b                       \n"// loop
+
+        ASSEMBLY_ACCUMULATE_STEREO
+
+        : [out] "=Uv" (out[0]),
+          [count] "+r" (count),
+          [coefsP0] "+r" (coefsP),
+          [coefsN0] "+r" (coefsN),
+          [sP] "+r" (sP),
+          [sN] "+r" (sN)
+        : [vLR] "r" (volumeLR)
+        : "cc", "memory",
+          "q0", "q1", "q2", "q3",
+          "q4", "q5", "q6",
+          "q8", "q10"
+     );
+}
+
+template <>
+inline void Process<1, 16>(int32_t* const out,
+        int count,
+        const int16_t* coefsP,
+        const int16_t* coefsN,
+        const int16_t* coefsP1,
+        const int16_t* coefsN1,
+        const int16_t* sP,
+        const int16_t* sN,
+        uint32_t lerpP,
+        const int32_t* const volumeLR)
+{
+    const int CHANNELS = 1; // template specialization does not preserve params
+    const int STRIDE = 16;
+    sP -= CHANNELS*((STRIDE>>1)-1);
+    asm (
+        "vmov.32        d2[0], %[lerpP]          \n"// load the positive phase S32 Q15
+        "veor           q0, q0, q0               \n"// (0 - combines+) accumulator = 0
+
+        "1:                                      \n"
+
+        "vld1.16        {q2}, [%[sP]]            \n"// (2+0d) load 8 16-bits mono samples
+        "vld1.16        {q3}, [%[sN]]!           \n"// (2) load 8 16-bits mono samples
+        "vld1.16        {q8}, [%[coefsP0]:128]!  \n"// (1) load 8 16-bits coefs
+        "vld1.16        {q9}, [%[coefsP1]:128]!  \n"// (1) load 8 16-bits coefs for interpolation
+        "vld1.16        {q10}, [%[coefsN1]:128]! \n"// (1) load 8 16-bits coefs
+        "vld1.16        {q11}, [%[coefsN0]:128]! \n"// (1) load 8 16-bits coefs for interpolation
+
+        "vsub.s16       q9, q9, q8               \n"// (1) interpolate (step1) 1st set of coefs
+        "vsub.s16       q11, q11, q10            \n"// (1) interpolate (step1) 2nd set of coets
+
+        "vqrdmulh.s16   q9, q9, d2[0]            \n"// (2) interpolate (step2) 1st set of coefs
+        "vqrdmulh.s16   q11, q11, d2[0]          \n"// (2) interpolate (step2) 2nd set of coefs
+
+        "vrev64.16      q2, q2                   \n"// (1) reverse s3, s2, s1, s0, s7, s6, s5, s4
+
+        "vadd.s16       q8, q8, q9               \n"// (1+2d) interpolate (step3) 1st set
+        "vadd.s16       q10, q10, q11            \n"// (1+1d) interpolate (step3) 2nd set
+
+        // reordering the vmal to do d6, d7 before d4, d5 is slower(?)
+        "vmlal.s16      q0, d4, d17              \n"// (1+0d) multiply reversed samples by coef
+        "vmlal.s16      q0, d5, d16              \n"// (1) multiply reversed samples by coef
+        "vmlal.s16      q0, d6, d20              \n"// (1) multiply neg samples
+        "vmlal.s16      q0, d7, d21              \n"// (1) multiply neg samples
+
+        // moving these ARM instructions before neon above seems to be slower
+        "subs           %[count], %[count], #8   \n"// (1) update loop counter
+        "sub            %[sP], %[sP], #16        \n"// (0) move pointer to next set of samples
+
+        // sP used after branch (warning)
+        "bne            1b                       \n"// loop
+
+        ASSEMBLY_ACCUMULATE_MONO
+
+        : [out]     "=Uv" (out[0]),
+          [count]   "+r" (count),
+          [coefsP0] "+r" (coefsP),
+          [coefsN0] "+r" (coefsN),
+          [coefsP1] "+r" (coefsP1),
+          [coefsN1] "+r" (coefsN1),
+          [sP]      "+r" (sP),
+          [sN]      "+r" (sN)
+        : [lerpP]   "r" (lerpP),
+          [vLR]     "r" (volumeLR)
+        : "cc", "memory",
+          "q0", "q1", "q2", "q3",
+          "q8", "q9", "q10", "q11"
+    );
+}
+
+template <>
+inline void Process<2, 16>(int32_t* const out,
+        int count,
+        const int16_t* coefsP,
+        const int16_t* coefsN,
+        const int16_t* coefsP1,
+        const int16_t* coefsN1,
+        const int16_t* sP,
+        const int16_t* sN,
+        uint32_t lerpP,
+        const int32_t* const volumeLR)
+{
+    const int CHANNELS = 2; // template specialization does not preserve params
+    const int STRIDE = 16;
+    sP -= CHANNELS*((STRIDE>>1)-1);
+    asm (
+        "vmov.32        d2[0], %[lerpP]          \n"// load the positive phase
+        "veor           q0, q0, q0               \n"// (1) acc_L = 0
+        "veor           q4, q4, q4               \n"// (0 combines+) acc_R = 0
+
+        "1:                                      \n"
+
+        "vld2.16        {q2, q3}, [%[sP]]        \n"// (3+0d) load 8 16-bits stereo samples
+        "vld2.16        {q5, q6}, [%[sN]]!       \n"// (3) load 8 16-bits stereo samples
+        "vld1.16        {q8}, [%[coefsP0]:128]!  \n"// (1) load 8 16-bits coefs
+        "vld1.16        {q9}, [%[coefsP1]:128]!  \n"// (1) load 8 16-bits coefs for interpolation
+        "vld1.16        {q10}, [%[coefsN1]:128]! \n"// (1) load 8 16-bits coefs
+        "vld1.16        {q11}, [%[coefsN0]:128]! \n"// (1) load 8 16-bits coefs for interpolation
+
+        "vsub.s16       q9, q9, q8               \n"// (1) interpolate (step1) 1st set of coefs
+        "vsub.s16       q11, q11, q10            \n"// (1) interpolate (step1) 2nd set of coets
+
+        "vqrdmulh.s16   q9, q9, d2[0]            \n"// (2) interpolate (step2) 1st set of coefs
+        "vqrdmulh.s16   q11, q11, d2[0]          \n"// (2) interpolate (step2) 2nd set of coefs
+
+        "vrev64.16      q2, q2                   \n"// (1) reverse 8 frames of the left positive
+        "vrev64.16      q3, q3                   \n"// (1) reverse 8 frames of the right positive
+
+        "vadd.s16       q8, q8, q9               \n"// (1+1d) interpolate (step3) 1st set
+        "vadd.s16       q10, q10, q11            \n"// (1+1d) interpolate (step3) 2nd set
+
+        "vmlal.s16      q0, d4, d17              \n"// (1) multiply reversed samples left
+        "vmlal.s16      q0, d5, d16              \n"// (1) multiply reversed samples left
+        "vmlal.s16      q4, d6, d17              \n"// (1) multiply reversed samples right
+        "vmlal.s16      q4, d7, d16              \n"// (1) multiply reversed samples right
+        "vmlal.s16      q0, d10, d20             \n"// (1) multiply samples left
+        "vmlal.s16      q0, d11, d21             \n"// (1) multiply samples left
+        "vmlal.s16      q4, d12, d20             \n"// (1) multiply samples right
+        "vmlal.s16      q4, d13, d21             \n"// (1) multiply samples right
+
+        // moving these ARM before neon seems to be slower
+        "subs           %[count], %[count], #8   \n"// (1) update loop counter
+        "sub            %[sP], %[sP], #32        \n"// (0) move pointer to next set of samples
+
+        // sP used after branch (warning)
+        "bne            1b                       \n"// loop
+
+        ASSEMBLY_ACCUMULATE_STEREO
+
+        : [out] "=Uv" (out[0]),
+          [count] "+r" (count),
+          [coefsP0] "+r" (coefsP),
+          [coefsN0] "+r" (coefsN),
+          [coefsP1] "+r" (coefsP1),
+          [coefsN1] "+r" (coefsN1),
+          [sP] "+r" (sP),
+          [sN] "+r" (sN)
+        : [lerpP]   "r" (lerpP),
+          [vLR] "r" (volumeLR)
+        : "cc", "memory",
+          "q0", "q1", "q2", "q3",
+          "q4", "q5", "q6",
+          "q8", "q9", "q10", "q11"
+    );
+}
+
+template <>
+inline void ProcessL<1, 16>(int32_t* const out,
+        int count,
+        const int32_t* coefsP,
+        const int32_t* coefsN,
+        const int16_t* sP,
+        const int16_t* sN,
+        const int32_t* const volumeLR)
+{
+    const int CHANNELS = 1; // template specialization does not preserve params
+    const int STRIDE = 16;
+    sP -= CHANNELS*((STRIDE>>1)-1);
+    asm (
+        "veor           q0, q0, q0                    \n"// result, initialize to 0
+
+        "1:                                           \n"
+
+        "vld1.16        {q2}, [%[sP]]                 \n"// load 8 16-bits mono samples
+        "vld1.16        {q3}, [%[sN]]!                \n"// load 8 16-bits mono samples
+        "vld1.32        {q8, q9}, [%[coefsP0]:128]!   \n"// load 8 32-bits coefs
+        "vld1.32        {q10, q11}, [%[coefsN0]:128]! \n"// load 8 32-bits coefs
+
+        "vrev64.16      q2, q2                        \n"// reverse 8 frames of the positive side
+
+        "vshll.s16      q12, d4, #15                  \n"// extend samples to 31 bits
+        "vshll.s16      q13, d5, #15                  \n"// extend samples to 31 bits
+
+        "vshll.s16      q14, d6, #15                  \n"// extend samples to 31 bits
+        "vshll.s16      q15, d7, #15                  \n"// extend samples to 31 bits
+
+        "vqrdmulh.s32   q12, q12, q9                  \n"// multiply samples by interpolated coef
+        "vqrdmulh.s32   q13, q13, q8                  \n"// multiply samples by interpolated coef
+        "vqrdmulh.s32   q14, q14, q10                 \n"// multiply samples by interpolated coef
+        "vqrdmulh.s32   q15, q15, q11                 \n"// multiply samples by interpolated coef
+
+        "vadd.s32       q0, q0, q12                   \n"// accumulate result
+        "vadd.s32       q13, q13, q14                 \n"// accumulate result
+        "vadd.s32       q0, q0, q15                   \n"// accumulate result
+        "vadd.s32       q0, q0, q13                   \n"// accumulate result
+
+        "sub            %[sP], %[sP], #16             \n"// move pointer to next set of samples
+        "subs           %[count], %[count], #8        \n"// update loop counter
+
+        "bne            1b                            \n"// loop
+
+        ASSEMBLY_ACCUMULATE_MONO
+
+        : [out]     "=Uv" (out[0]),
+          [count]   "+r" (count),
+          [coefsP0] "+r" (coefsP),
+          [coefsN0] "+r" (coefsN),
+          [sP]      "+r" (sP),
+          [sN]      "+r" (sN)
+        : [vLR]     "r" (volumeLR)
+        : "cc", "memory",
+          "q0", "q1", "q2", "q3",
+          "q8", "q9", "q10", "q11",
+          "q12", "q13", "q14", "q15"
+    );
+}
+
+template <>
+inline void ProcessL<2, 16>(int32_t* const out,
+        int count,
+        const int32_t* coefsP,
+        const int32_t* coefsN,
+        const int16_t* sP,
+        const int16_t* sN,
+        const int32_t* const volumeLR)
+{
+    const int CHANNELS = 2; // template specialization does not preserve params
+    const int STRIDE = 16;
+    sP -= CHANNELS*((STRIDE>>1)-1);
+    asm (
+        "veor           q0, q0, q0                    \n"// result, initialize to 0
+        "veor           q4, q4, q4                    \n"// result, initialize to 0
+
+        "1:                                           \n"
+
+        "vld2.16        {q2, q3}, [%[sP]]             \n"// load 4 16-bits stereo samples
+        "vld2.16        {q5, q6}, [%[sN]]!            \n"// load 4 16-bits stereo samples
+        "vld1.32        {q8, q9}, [%[coefsP0]:128]!   \n"// load 4 32-bits coefs
+        "vld1.32        {q10, q11}, [%[coefsN0]:128]! \n"// load 4 32-bits coefs
+
+        "vrev64.16      q2, q2                        \n"// reverse 8 frames of the positive side
+        "vrev64.16      q3, q3                        \n"// reverse 8 frames of the positive side
+
+        "vshll.s16      q12,  d4, #15                 \n"// extend samples to 31 bits
+        "vshll.s16      q13,  d5, #15                 \n"// extend samples to 31 bits
+
+        "vshll.s16      q14,  d10, #15                \n"// extend samples to 31 bits
+        "vshll.s16      q15,  d11, #15                \n"// extend samples to 31 bits
+
+        "vqrdmulh.s32   q12, q12, q9                  \n"// multiply samples by interpolated coef
+        "vqrdmulh.s32   q13, q13, q8                  \n"// multiply samples by interpolated coef
+        "vqrdmulh.s32   q14, q14, q10                 \n"// multiply samples by interpolated coef
+        "vqrdmulh.s32   q15, q15, q11                 \n"// multiply samples by interpolated coef
+
+        "vadd.s32       q0, q0, q12                   \n"// accumulate result
+        "vadd.s32       q13, q13, q14                 \n"// accumulate result
+        "vadd.s32       q0, q0, q15                   \n"// (+1) accumulate result
+        "vadd.s32       q0, q0, q13                   \n"// (+1) accumulate result
+
+        "vshll.s16      q12,  d6, #15                 \n"// extend samples to 31 bits
+        "vshll.s16      q13,  d7, #15                 \n"// extend samples to 31 bits
+
+        "vshll.s16      q14,  d12, #15                \n"// extend samples to 31 bits
+        "vshll.s16      q15,  d13, #15                \n"// extend samples to 31 bits
+
+        "vqrdmulh.s32   q12, q12, q9                  \n"// multiply samples by interpolated coef
+        "vqrdmulh.s32   q13, q13, q8                  \n"// multiply samples by interpolated coef
+        "vqrdmulh.s32   q14, q14, q10                 \n"// multiply samples by interpolated coef
+        "vqrdmulh.s32   q15, q15, q11                 \n"// multiply samples by interpolated coef
+
+        "vadd.s32       q4, q4, q12                   \n"// accumulate result
+        "vadd.s32       q13, q13, q14                 \n"// accumulate result
+        "vadd.s32       q4, q4, q15                   \n"// (+1) accumulate result
+        "vadd.s32       q4, q4, q13                   \n"// (+1) accumulate result
+
+        "subs           %[count], %[count], #8        \n"// update loop counter
+        "sub            %[sP], %[sP], #32             \n"// move pointer to next set of samples
+
+        "bne            1b                            \n"// loop
+
+        ASSEMBLY_ACCUMULATE_STEREO
+
+        : [out]     "=Uv" (out[0]),
+          [count]   "+r" (count),
+          [coefsP0] "+r" (coefsP),
+          [coefsN0] "+r" (coefsN),
+          [sP]      "+r" (sP),
+          [sN]      "+r" (sN)
+        : [vLR]     "r" (volumeLR)
+        : "cc", "memory",
+          "q0", "q1", "q2", "q3",
+          "q4", "q5", "q6",
+          "q8", "q9", "q10", "q11",
+          "q12", "q13", "q14", "q15"
+    );
+}
+
+template <>
+inline void Process<1, 16>(int32_t* const out,
+        int count,
+        const int32_t* coefsP,
+        const int32_t* coefsN,
+        const int32_t* coefsP1,
+        const int32_t* coefsN1,
+        const int16_t* sP,
+        const int16_t* sN,
+        uint32_t lerpP,
+        const int32_t* const volumeLR)
+{
+    const int CHANNELS = 1; // template specialization does not preserve params
+    const int STRIDE = 16;
+    sP -= CHANNELS*((STRIDE>>1)-1);
+    asm (
+        "vmov.32        d2[0], %[lerpP]               \n"// load the positive phase
+        "veor           q0, q0, q0                    \n"// result, initialize to 0
+
+        "1:                                           \n"
+
+        "vld1.16        {q2}, [%[sP]]                 \n"// load 8 16-bits mono samples
+        "vld1.16        {q3}, [%[sN]]!                \n"// load 8 16-bits mono samples
+        "vld1.32        {q8, q9}, [%[coefsP0]:128]!   \n"// load 8 32-bits coefs
+        "vld1.32        {q12, q13}, [%[coefsP1]:128]! \n"// load 8 32-bits coefs
+        "vld1.32        {q10, q11}, [%[coefsN1]:128]! \n"// load 8 32-bits coefs
+        "vld1.32        {q14, q15}, [%[coefsN0]:128]! \n"// load 8 32-bits coefs
+
+        "vsub.s32       q12, q12, q8                  \n"// interpolate (step1)
+        "vsub.s32       q13, q13, q9                  \n"// interpolate (step1)
+        "vsub.s32       q14, q14, q10                 \n"// interpolate (step1)
+        "vsub.s32       q15, q15, q11                 \n"// interpolate (step1)
+
+        "vqrdmulh.s32   q12, q12, d2[0]               \n"// interpolate (step2)
+        "vqrdmulh.s32   q13, q13, d2[0]               \n"// interpolate (step2)
+        "vqrdmulh.s32   q14, q14, d2[0]               \n"// interpolate (step2)
+        "vqrdmulh.s32   q15, q15, d2[0]               \n"// interpolate (step2)
+
+        "vadd.s32       q8, q8, q12                   \n"// interpolate (step3)
+        "vadd.s32       q9, q9, q13                   \n"// interpolate (step3)
+        "vadd.s32       q10, q10, q14                 \n"// interpolate (step3)
+        "vadd.s32       q11, q11, q15                 \n"// interpolate (step3)
+
+        "vrev64.16      q2, q2                        \n"// reverse 8 frames of the positive side
+
+        "vshll.s16      q12,  d4, #15                 \n"// extend samples to 31 bits
+        "vshll.s16      q13,  d5, #15                 \n"// extend samples to 31 bits
+
+        "vshll.s16      q14,  d6, #15                 \n"// extend samples to 31 bits
+        "vshll.s16      q15,  d7, #15                 \n"// extend samples to 31 bits
+
+        "vqrdmulh.s32   q12, q12, q9                  \n"// multiply samples by interpolated coef
+        "vqrdmulh.s32   q13, q13, q8                  \n"// multiply samples by interpolated coef
+        "vqrdmulh.s32   q14, q14, q10                 \n"// multiply samples by interpolated coef
+        "vqrdmulh.s32   q15, q15, q11                 \n"// multiply samples by interpolated coef
+
+        "vadd.s32       q0, q0, q12                   \n"// accumulate result
+        "vadd.s32       q13, q13, q14                 \n"// accumulate result
+        "vadd.s32       q0, q0, q15                   \n"// accumulate result
+        "vadd.s32       q0, q0, q13                   \n"// accumulate result
+
+        "sub            %[sP], %[sP], #16             \n"// move pointer to next set of samples
+        "subs           %[count], %[count], #8        \n"// update loop counter
+
+        "bne            1b                            \n"// loop
+
+        ASSEMBLY_ACCUMULATE_MONO
+
+        : [out]     "=Uv" (out[0]),
+          [count]   "+r" (count),
+          [coefsP0] "+r" (coefsP),
+          [coefsN0] "+r" (coefsN),
+          [coefsP1] "+r" (coefsP1),
+          [coefsN1] "+r" (coefsN1),
+          [sP]      "+r" (sP),
+          [sN]      "+r" (sN)
+        : [lerpP]   "r" (lerpP),
+          [vLR]     "r" (volumeLR)
+        : "cc", "memory",
+          "q0", "q1", "q2", "q3",
+          "q8", "q9", "q10", "q11",
+          "q12", "q13", "q14", "q15"
+    );
+}
+
+template <>
+inline void Process<2, 16>(int32_t* const out,
+        int count,
+        const int32_t* coefsP,
+        const int32_t* coefsN,
+        const int32_t* coefsP1,
+        const int32_t* coefsN1,
+        const int16_t* sP,
+        const int16_t* sN,
+        uint32_t lerpP,
+        const int32_t* const volumeLR)
+{
+    const int CHANNELS = 2; // template specialization does not preserve params
+    const int STRIDE = 16;
+    sP -= CHANNELS*((STRIDE>>1)-1);
+    asm (
+        "vmov.32        d2[0], %[lerpP]               \n"// load the positive phase
+        "veor           q0, q0, q0                    \n"// result, initialize to 0
+        "veor           q4, q4, q4                    \n"// result, initialize to 0
+
+        "1:                                           \n"
+
+        "vld2.16        {q2, q3}, [%[sP]]             \n"// load 4 16-bits stereo samples
+        "vld2.16        {q5, q6}, [%[sN]]!            \n"// load 4 16-bits stereo samples
+        "vld1.32        {q8, q9}, [%[coefsP0]:128]!   \n"// load 8 32-bits coefs
+        "vld1.32        {q12, q13}, [%[coefsP1]:128]! \n"// load 8 32-bits coefs
+        "vld1.32        {q10, q11}, [%[coefsN1]:128]! \n"// load 8 32-bits coefs
+        "vld1.32        {q14, q15}, [%[coefsN0]:128]! \n"// load 8 32-bits coefs
+
+        "vsub.s32       q12, q12, q8                  \n"// interpolate (step1)
+        "vsub.s32       q13, q13, q9                  \n"// interpolate (step1)
+        "vsub.s32       q14, q14, q10                 \n"// interpolate (step1)
+        "vsub.s32       q15, q15, q11                 \n"// interpolate (step1)
+
+        "vqrdmulh.s32   q12, q12, d2[0]               \n"// interpolate (step2)
+        "vqrdmulh.s32   q13, q13, d2[0]               \n"// interpolate (step2)
+        "vqrdmulh.s32   q14, q14, d2[0]               \n"// interpolate (step2)
+        "vqrdmulh.s32   q15, q15, d2[0]               \n"// interpolate (step2)
+
+        "vadd.s32       q8, q8, q12                   \n"// interpolate (step3)
+        "vadd.s32       q9, q9, q13                   \n"// interpolate (step3)
+        "vadd.s32       q10, q10, q14                 \n"// interpolate (step3)
+        "vadd.s32       q11, q11, q15                 \n"// interpolate (step3)
+
+        "vrev64.16      q2, q2                        \n"// reverse 8 frames of the positive side
+        "vrev64.16      q3, q3                        \n"// reverse 8 frames of the positive side
+
+        "vshll.s16      q12,  d4, #15                 \n"// extend samples to 31 bits
+        "vshll.s16      q13,  d5, #15                 \n"// extend samples to 31 bits
+
+        "vshll.s16      q14,  d10, #15                \n"// extend samples to 31 bits
+        "vshll.s16      q15,  d11, #15                \n"// extend samples to 31 bits
+
+        "vqrdmulh.s32   q12, q12, q9                  \n"// multiply samples by interpolated coef
+        "vqrdmulh.s32   q13, q13, q8                  \n"// multiply samples by interpolated coef
+        "vqrdmulh.s32   q14, q14, q10                 \n"// multiply samples by interpolated coef
+        "vqrdmulh.s32   q15, q15, q11                 \n"// multiply samples by interpolated coef
+
+        "vadd.s32       q0, q0, q12                   \n"// accumulate result
+        "vadd.s32       q13, q13, q14                 \n"// accumulate result
+        "vadd.s32       q0, q0, q15                   \n"// (+1) accumulate result
+        "vadd.s32       q0, q0, q13                   \n"// (+1) accumulate result
+
+        "vshll.s16      q12,  d6, #15                 \n"// extend samples to 31 bits
+        "vshll.s16      q13,  d7, #15                 \n"// extend samples to 31 bits
+
+        "vshll.s16      q14,  d12, #15                \n"// extend samples to 31 bits
+        "vshll.s16      q15,  d13, #15                \n"// extend samples to 31 bits
+
+        "vqrdmulh.s32   q12, q12, q9                  \n"// multiply samples by interpolated coef
+        "vqrdmulh.s32   q13, q13, q8                  \n"// multiply samples by interpolated coef
+        "vqrdmulh.s32   q14, q14, q10                 \n"// multiply samples by interpolated coef
+        "vqrdmulh.s32   q15, q15, q11                 \n"// multiply samples by interpolated coef
+
+        "vadd.s32       q4, q4, q12                   \n"// accumulate result
+        "vadd.s32       q13, q13, q14                 \n"// accumulate result
+        "vadd.s32       q4, q4, q15                   \n"// (+1) accumulate result
+        "vadd.s32       q4, q4, q13                   \n"// (+1) accumulate result
+
+        "subs           %[count], %[count], #8        \n"// update loop counter
+        "sub            %[sP], %[sP], #32             \n"// move pointer to next set of samples
+
+        "bne            1b                            \n"// loop
+
+        ASSEMBLY_ACCUMULATE_STEREO
+
+        : [out]     "=Uv" (out[0]),
+          [count]   "+r" (count),
+          [coefsP0] "+r" (coefsP),
+          [coefsN0] "+r" (coefsN),
+          [coefsP1] "+r" (coefsP1),
+          [coefsN1] "+r" (coefsN1),
+          [sP]      "+r" (sP),
+          [sN]      "+r" (sN)
+        : [lerpP]   "r" (lerpP),
+          [vLR]     "r" (volumeLR)
+        : "cc", "memory",
+          "q0", "q1", "q2", "q3",
+          "q4", "q5", "q6",
+          "q8", "q9", "q10", "q11",
+          "q12", "q13", "q14", "q15"
+    );
+}
+
+template <>
+inline void ProcessL<1, 8>(int32_t* const out,
+        int count,
+        const int16_t* coefsP,
+        const int16_t* coefsN,
+        const int16_t* sP,
+        const int16_t* sN,
+        const int32_t* const volumeLR)
+{
+    const int CHANNELS = 1; // template specialization does not preserve params
+    const int STRIDE = 8;
+    sP -= CHANNELS*((STRIDE>>1)-1);
+    asm (
+        "veor           q0, q0, q0               \n"// (0 - combines+) accumulator = 0
+
+        "1:                                      \n"
+
+        "vld1.16        {d4}, [%[sP]]            \n"// (2+0d) load 4 16-bits mono samples
+        "vld1.16        {d6}, [%[sN]]!           \n"// (2) load 4 16-bits mono samples
+        "vld1.16        {d16}, [%[coefsP0]:64]!  \n"// (1) load 4 16-bits coefs
+        "vld1.16        {d20}, [%[coefsN0]:64]!  \n"// (1) load 4 16-bits coefs
+
+        "vrev64.16      d4, d4                   \n"// (1) reversed s3, s2, s1, s0, s7, s6, s5, s4
+
+        // reordering the vmal to do d6, d7 before d4, d5 is slower(?)
+        "vmlal.s16      q0, d4, d16              \n"// (1) multiply (reversed)samples by coef
+        "vmlal.s16      q0, d6, d20              \n"// (1) multiply neg samples
+
+        // moving these ARM instructions before neon above seems to be slower
+        "subs           %[count], %[count], #4   \n"// (1) update loop counter
+        "sub            %[sP], %[sP], #8         \n"// (0) move pointer to next set of samples
+
+        // sP used after branch (warning)
+        "bne            1b                       \n"// loop
+
+        ASSEMBLY_ACCUMULATE_MONO
+
+        : [out]     "=Uv" (out[0]),
+          [count]   "+r" (count),
+          [coefsP0] "+r" (coefsP),
+          [coefsN0] "+r" (coefsN),
+          [sP]      "+r" (sP),
+          [sN]      "+r" (sN)
+        : [vLR]     "r" (volumeLR)
+        : "cc", "memory",
+          "q0", "q1", "q2", "q3",
+          "q8", "q10"
+    );
+}
+
+template <>
+inline void ProcessL<2, 8>(int32_t* const out,
+        int count,
+        const int16_t* coefsP,
+        const int16_t* coefsN,
+        const int16_t* sP,
+        const int16_t* sN,
+        const int32_t* const volumeLR)
+{
+    const int CHANNELS = 2; // template specialization does not preserve params
+    const int STRIDE = 8;
+    sP -= CHANNELS*((STRIDE>>1)-1);
+    asm (
+        "veor           q0, q0, q0               \n"// (1) acc_L = 0
+        "veor           q4, q4, q4               \n"// (0 combines+) acc_R = 0
+
+        "1:                                      \n"
+
+        "vld2.16        {d4, d5}, [%[sP]]        \n"// (2+0d) load 8 16-bits stereo samples
+        "vld2.16        {d6, d7}, [%[sN]]!       \n"// (2) load 8 16-bits stereo samples
+        "vld1.16        {d16}, [%[coefsP0]:64]!  \n"// (1) load 8 16-bits coefs
+        "vld1.16        {d20}, [%[coefsN0]:64]!  \n"// (1) load 8 16-bits coefs
+
+        "vrev64.16      q2, q2                   \n"// (1) reverse 8 frames of the left positive
+
+        "vmlal.s16      q0, d4, d16              \n"// (1) multiply (reversed) samples left
+        "vmlal.s16      q4, d5, d16              \n"// (1) multiply (reversed) samples right
+        "vmlal.s16      q0, d6, d20              \n"// (1) multiply samples left
+        "vmlal.s16      q4, d7, d20              \n"// (1) multiply samples right
+
+        // moving these ARM before neon seems to be slower
+        "subs           %[count], %[count], #4   \n"// (1) update loop counter
+        "sub            %[sP], %[sP], #16        \n"// (0) move pointer to next set of samples
+
+        // sP used after branch (warning)
+        "bne            1b                       \n"// loop
+
+        ASSEMBLY_ACCUMULATE_STEREO
+
+        : [out] "=Uv" (out[0]),
+          [count] "+r" (count),
+          [coefsP0] "+r" (coefsP),
+          [coefsN0] "+r" (coefsN),
+          [sP] "+r" (sP),
+          [sN] "+r" (sN)
+        : [vLR] "r" (volumeLR)
+        : "cc", "memory",
+          "q0", "q1", "q2", "q3",
+          "q4", "q5", "q6",
+          "q8", "q10"
+     );
+}
+
+template <>
+inline void Process<1, 8>(int32_t* const out,
+        int count,
+        const int16_t* coefsP,
+        const int16_t* coefsN,
+        const int16_t* coefsP1,
+        const int16_t* coefsN1,
+        const int16_t* sP,
+        const int16_t* sN,
+        uint32_t lerpP,
+        const int32_t* const volumeLR)
+{
+    const int CHANNELS = 1; // template specialization does not preserve params
+    const int STRIDE = 8;
+    sP -= CHANNELS*((STRIDE>>1)-1);
+    asm (
+        "vmov.32        d2[0], %[lerpP]          \n"// load the positive phase S32 Q15
+        "veor           q0, q0, q0               \n"// (0 - combines+) accumulator = 0
+
+        "1:                                      \n"
+
+        "vld1.16        {d4}, [%[sP]]            \n"// (2+0d) load 4 16-bits mono samples
+        "vld1.16        {d6}, [%[sN]]!           \n"// (2) load 4 16-bits mono samples
+        "vld1.16        {d16}, [%[coefsP0]:64]!  \n"// (1) load 4 16-bits coefs
+        "vld1.16        {d17}, [%[coefsP1]:64]!  \n"// (1) load 4 16-bits coefs for interpolation
+        "vld1.16        {d20}, [%[coefsN1]:64]!  \n"// (1) load 4 16-bits coefs
+        "vld1.16        {d21}, [%[coefsN0]:64]!  \n"// (1) load 4 16-bits coefs for interpolation
+
+        "vsub.s16       d17, d17, d16            \n"// (1) interpolate (step1) 1st set of coefs
+        "vsub.s16       d21, d21, d20            \n"// (1) interpolate (step1) 2nd set of coets
+
+        "vqrdmulh.s16   d17, d17, d2[0]          \n"// (2) interpolate (step2) 1st set of coefs
+        "vqrdmulh.s16   d21, d21, d2[0]          \n"// (2) interpolate (step2) 2nd set of coefs
+
+        "vrev64.16      d4, d4                   \n"// (1) reverse s3, s2, s1, s0, s7, s6, s5, s4
+
+        "vadd.s16       d16, d16, d17            \n"// (1+2d) interpolate (step3) 1st set
+        "vadd.s16       d20, d20, d21            \n"// (1+1d) interpolate (step3) 2nd set
+
+        // reordering the vmal to do d6, d7 before d4, d5 is slower(?)
+        "vmlal.s16      q0, d4, d16              \n"// (1+0d) multiply (reversed)by coef
+        "vmlal.s16      q0, d6, d20              \n"// (1) multiply neg samples
+
+        // moving these ARM instructions before neon above seems to be slower
+        "subs           %[count], %[count], #4   \n"// (1) update loop counter
+        "sub            %[sP], %[sP], #8        \n"// move pointer to next set of samples
+
+        // sP used after branch (warning)
+        "bne            1b                       \n"// loop
+
+        ASSEMBLY_ACCUMULATE_MONO
+
+        : [out]     "=Uv" (out[0]),
+          [count]   "+r" (count),
+          [coefsP0] "+r" (coefsP),
+          [coefsN0] "+r" (coefsN),
+          [coefsP1] "+r" (coefsP1),
+          [coefsN1] "+r" (coefsN1),
+          [sP]      "+r" (sP),
+          [sN]      "+r" (sN)
+        : [lerpP]   "r" (lerpP),
+          [vLR]     "r" (volumeLR)
+        : "cc", "memory",
+          "q0", "q1", "q2", "q3",
+          "q8", "q9", "q10", "q11"
+    );
+}
+
+template <>
+inline void Process<2, 8>(int32_t* const out,
+        int count,
+        const int16_t* coefsP,
+        const int16_t* coefsN,
+        const int16_t* coefsP1,
+        const int16_t* coefsN1,
+        const int16_t* sP,
+        const int16_t* sN,
+        uint32_t lerpP,
+        const int32_t* const volumeLR)
+{
+    const int CHANNELS = 2; // template specialization does not preserve params
+    const int STRIDE = 8;
+    sP -= CHANNELS*((STRIDE>>1)-1);
+    asm (
+        "vmov.32        d2[0], %[lerpP]          \n"// load the positive phase
+        "veor           q0, q0, q0               \n"// (1) acc_L = 0
+        "veor           q4, q4, q4               \n"// (0 combines+) acc_R = 0
+
+        "1:                                      \n"
+
+        "vld2.16        {d4, d5}, [%[sP]]        \n"// (3+0d) load 8 16-bits stereo samples
+        "vld2.16        {d6, d7}, [%[sN]]!       \n"// (3) load 8 16-bits stereo samples
+        "vld1.16        {d16}, [%[coefsP0]:64]!  \n"// (1) load 8 16-bits coefs
+        "vld1.16        {d17}, [%[coefsP1]:64]!  \n"// (1) load 8 16-bits coefs for interpolation
+        "vld1.16        {d20}, [%[coefsN1]:64]!  \n"// (1) load 8 16-bits coefs
+        "vld1.16        {d21}, [%[coefsN0]:64]!  \n"// (1) load 8 16-bits coefs for interpolation
+
+        "vsub.s16       d17, d17, d16            \n"// (1) interpolate (step1) 1st set of coefs
+        "vsub.s16       d21, d21, d20            \n"// (1) interpolate (step1) 2nd set of coets
+
+        "vqrdmulh.s16   d17, d17, d2[0]          \n"// (2) interpolate (step2) 1st set of coefs
+        "vqrdmulh.s16   d21, d21, d2[0]          \n"// (2) interpolate (step2) 2nd set of coefs
+
+        "vrev64.16      q2, q2                   \n"// (1) reverse 8 frames of the left positive
+
+        "vadd.s16       d16, d16, d17            \n"// (1+1d) interpolate (step3) 1st set
+        "vadd.s16       d20, d20, d21            \n"// (1+1d) interpolate (step3) 2nd set
+
+        "vmlal.s16      q0, d4, d16              \n"// (1) multiply (reversed) samples left
+        "vmlal.s16      q4, d5, d16              \n"// (1) multiply (reversed) samples right
+        "vmlal.s16      q0, d6, d20              \n"// (1) multiply samples left
+        "vmlal.s16      q4, d7, d20              \n"// (1) multiply samples right
+
+        // moving these ARM before neon seems to be slower
+        "subs           %[count], %[count], #4   \n"// (1) update loop counter
+        "sub            %[sP], %[sP], #16        \n"// move pointer to next set of samples
+
+        // sP used after branch (warning)
+        "bne            1b                       \n"// loop
+
+        ASSEMBLY_ACCUMULATE_STEREO
+
+        : [out] "=Uv" (out[0]),
+          [count] "+r" (count),
+          [coefsP0] "+r" (coefsP),
+          [coefsN0] "+r" (coefsN),
+          [coefsP1] "+r" (coefsP1),
+          [coefsN1] "+r" (coefsN1),
+          [sP] "+r" (sP),
+          [sN] "+r" (sN)
+        : [lerpP]   "r" (lerpP),
+          [vLR] "r" (volumeLR)
+        : "cc", "memory",
+          "q0", "q1", "q2", "q3",
+          "q4", "q5", "q6",
+          "q8", "q9", "q10", "q11"
+    );
+}
+
+template <>
+inline void ProcessL<1, 8>(int32_t* const out,
+        int count,
+        const int32_t* coefsP,
+        const int32_t* coefsN,
+        const int16_t* sP,
+        const int16_t* sN,
+        const int32_t* const volumeLR)
+{
+    const int CHANNELS = 1; // template specialization does not preserve params
+    const int STRIDE = 8;
+    sP -= CHANNELS*((STRIDE>>1)-1);
+    asm (
+        "veor           q0, q0, q0               \n"// result, initialize to 0
+
+        "1:                                      \n"
+
+        "vld1.16        {d4}, [%[sP]]            \n"// load 4 16-bits mono samples
+        "vld1.16        {d6}, [%[sN]]!           \n"// load 4 16-bits mono samples
+        "vld1.32        {q8}, [%[coefsP0]:128]!  \n"// load 4 32-bits coefs
+        "vld1.32        {q10}, [%[coefsN0]:128]! \n"// load 4 32-bits coefs
+
+        "vrev64.16      d4, d4                   \n"// reverse 2 frames of the positive side
+
+        "vshll.s16      q12, d4, #15             \n"// (stall) extend samples to 31 bits
+        "vshll.s16      q14, d6, #15             \n"// extend samples to 31 bits
+
+        "vqrdmulh.s32   q12, q12, q8             \n"// multiply samples by interpolated coef
+        "vqrdmulh.s32   q14, q14, q10            \n"// multiply samples by interpolated coef
+
+        "vadd.s32       q0, q0, q12              \n"// accumulate result
+        "vadd.s32       q0, q0, q14              \n"// (stall) accumulate result
+
+        "subs           %[count], %[count], #4   \n"// update loop counter
+        "sub            %[sP], %[sP], #8         \n"// move pointer to next set of samples
+
+        "bne            1b                       \n"// loop
+
+        ASSEMBLY_ACCUMULATE_MONO
+
+        : [out] "=Uv" (out[0]),
+          [count] "+r" (count),
+          [coefsP0] "+r" (coefsP),
+          [coefsN0] "+r" (coefsN),
+          [sP] "+r" (sP),
+          [sN] "+r" (sN)
+        : [vLR] "r" (volumeLR)
+        : "cc", "memory",
+          "q0", "q1", "q2", "q3",
+          "q8", "q9", "q10", "q11",
+          "q12", "q14"
+    );
+}
+
+template <>
+inline void ProcessL<2, 8>(int32_t* const out,
+        int count,
+        const int32_t* coefsP,
+        const int32_t* coefsN,
+        const int16_t* sP,
+        const int16_t* sN,
+        const int32_t* const volumeLR)
+{
+    const int CHANNELS = 2; // template specialization does not preserve params
+    const int STRIDE = 8;
+    sP -= CHANNELS*((STRIDE>>1)-1);
+    asm (
+        "veor           q0, q0, q0               \n"// result, initialize to 0
+        "veor           q4, q4, q4               \n"// result, initialize to 0
+
+        "1:                                      \n"
+
+        "vld2.16        {d4, d5}, [%[sP]]        \n"// load 4 16-bits stereo samples
+        "vld2.16        {d6, d7}, [%[sN]]!       \n"// load 4 16-bits stereo samples
+        "vld1.32        {q8}, [%[coefsP0]:128]!  \n"// load 4 32-bits coefs
+        "vld1.32        {q10}, [%[coefsN0]:128]! \n"// load 4 32-bits coefs
+
+        "vrev64.16      q2, q2                   \n"// reverse 2 frames of the positive side
+
+        "vshll.s16      q12, d4, #15             \n"// extend samples to 31 bits
+        "vshll.s16      q13, d5, #15             \n"// extend samples to 31 bits
+
+        "vshll.s16      q14, d6, #15             \n"// extend samples to 31 bits
+        "vshll.s16      q15, d7, #15             \n"// extend samples to 31 bits
+
+        "vqrdmulh.s32   q12, q12, q8             \n"// multiply samples by coef
+        "vqrdmulh.s32   q13, q13, q8             \n"// multiply samples by coef
+        "vqrdmulh.s32   q14, q14, q10            \n"// multiply samples by coef
+        "vqrdmulh.s32   q15, q15, q10            \n"// multiply samples by coef
+
+        "vadd.s32       q0, q0, q12              \n"// accumulate result
+        "vadd.s32       q4, q4, q13              \n"// accumulate result
+        "vadd.s32       q0, q0, q14              \n"// accumulate result
+        "vadd.s32       q4, q4, q15              \n"// accumulate result
+
+        "subs           %[count], %[count], #4   \n"// update loop counter
+        "sub            %[sP], %[sP], #16        \n"// move pointer to next set of samples
+
+        "bne            1b                       \n"// loop
+
+        ASSEMBLY_ACCUMULATE_STEREO
+
+        : [out]     "=Uv" (out[0]),
+          [count]   "+r" (count),
+          [coefsP0] "+r" (coefsP),
+          [coefsN0] "+r" (coefsN),
+          [sP]      "+r" (sP),
+          [sN]      "+r" (sN)
+        : [vLR]     "r" (volumeLR)
+        : "cc", "memory",
+          "q0", "q1", "q2", "q3", "q4",
+          "q8", "q9", "q10", "q11",
+          "q12", "q13", "q14", "q15"
+    );
+}
+
+template <>
+inline void Process<1, 8>(int32_t* const out,
+        int count,
+        const int32_t* coefsP,
+        const int32_t* coefsN,
+        const int32_t* coefsP1,
+        const int32_t* coefsN1,
+        const int16_t* sP,
+        const int16_t* sN,
+        uint32_t lerpP,
+        const int32_t* const volumeLR)
+{
+    const int CHANNELS = 1; // template specialization does not preserve params
+    const int STRIDE = 8;
+    sP -= CHANNELS*((STRIDE>>1)-1);
+    asm (
+        "vmov.32        d2[0], %[lerpP]          \n"// load the positive phase
+        "veor           q0, q0, q0               \n"// result, initialize to 0
+
+        "1:                                      \n"
+
+        "vld1.16        {d4}, [%[sP]]            \n"// load 4 16-bits mono samples
+        "vld1.16        {d6}, [%[sN]]!           \n"// load 4 16-bits mono samples
+        "vld1.32        {q8}, [%[coefsP0]:128]!  \n"// load 4 32-bits coefs
+        "vld1.32        {q9}, [%[coefsP1]:128]!  \n"// load 4 32-bits coefs for interpolation
+        "vld1.32        {q10}, [%[coefsN1]:128]! \n"// load 4 32-bits coefs
+        "vld1.32        {q11}, [%[coefsN0]:128]! \n"// load 4 32-bits coefs for interpolation
+
+        "vrev64.16      d4, d4                   \n"// reverse 2 frames of the positive side
+
+        "vsub.s32       q9, q9, q8               \n"// interpolate (step1) 1st set of coefs
+        "vsub.s32       q11, q11, q10            \n"// interpolate (step1) 2nd set of coets
+        "vshll.s16      q12, d4, #15             \n"// extend samples to 31 bits
+
+        "vqrdmulh.s32   q9, q9, d2[0]            \n"// interpolate (step2) 1st set of coefs
+        "vqrdmulh.s32   q11, q11, d2[0]          \n"// interpolate (step2) 2nd set of coefs
+        "vshll.s16      q14, d6, #15             \n"// extend samples to 31 bits
+
+        "vadd.s32       q8, q8, q9               \n"// interpolate (step3) 1st set
+        "vadd.s32       q10, q10, q11            \n"// interpolate (step4) 2nd set
+
+        "vqrdmulh.s32   q12, q12, q8             \n"// multiply samples by interpolated coef
+        "vqrdmulh.s32   q14, q14, q10            \n"// multiply samples by interpolated coef
+
+        "vadd.s32       q0, q0, q12              \n"// accumulate result
+        "vadd.s32       q0, q0, q14              \n"// accumulate result
+
+        "subs           %[count], %[count], #4   \n"// update loop counter
+        "sub            %[sP], %[sP], #8         \n"// move pointer to next set of samples
+
+        "bne            1b                       \n"// loop
+
+        ASSEMBLY_ACCUMULATE_MONO
+
+        : [out]     "=Uv" (out[0]),
+          [count]   "+r" (count),
+          [coefsP0] "+r" (coefsP),
+          [coefsP1] "+r" (coefsP1),
+          [coefsN0] "+r" (coefsN),
+          [coefsN1] "+r" (coefsN1),
+          [sP]      "+r" (sP),
+          [sN]      "+r" (sN)
+        : [lerpP]   "r" (lerpP),
+          [vLR]     "r" (volumeLR)
+        : "cc", "memory",
+          "q0", "q1", "q2", "q3",
+          "q8", "q9", "q10", "q11",
+          "q12", "q14"
+    );
+}
+
+template <>
+inline
+void Process<2, 8>(int32_t* const out,
+        int count,
+        const int32_t* coefsP,
+        const int32_t* coefsN,
+        const int32_t* coefsP1,
+        const int32_t* coefsN1,
+        const int16_t* sP,
+        const int16_t* sN,
+        uint32_t lerpP,
+        const int32_t* const volumeLR)
+{
+    const int CHANNELS = 2; // template specialization does not preserve params
+    const int STRIDE = 8;
+    sP -= CHANNELS*((STRIDE>>1)-1);
+    asm (
+        "vmov.32        d2[0], %[lerpP]          \n"// load the positive phase
+        "veor           q0, q0, q0               \n"// result, initialize to 0
+        "veor           q4, q4, q4               \n"// result, initialize to 0
+
+        "1:                                      \n"
+        "vld2.16        {d4, d5}, [%[sP]]        \n"// load 4 16-bits stereo samples
+        "vld2.16        {d6, d7}, [%[sN]]!       \n"// load 4 16-bits stereo samples
+        "vld1.32        {q8}, [%[coefsP0]:128]!  \n"// load 4 32-bits coefs
+        "vld1.32        {q9}, [%[coefsP1]:128]!  \n"// load 4 32-bits coefs for interpolation
+        "vld1.32        {q10}, [%[coefsN1]:128]! \n"// load 4 32-bits coefs
+        "vld1.32        {q11}, [%[coefsN0]:128]! \n"// load 4 32-bits coefs for interpolation
+
+        "vrev64.16      q2, q2                   \n"// (reversed) 2 frames of the positive side
+
+        "vsub.s32       q9, q9, q8               \n"// interpolate (step1) 1st set of coefs
+        "vsub.s32       q11, q11, q10            \n"// interpolate (step1) 2nd set of coets
+        "vshll.s16      q12, d4, #15             \n"// extend samples to 31 bits
+        "vshll.s16      q13, d5, #15             \n"// extend samples to 31 bits
+
+        "vqrdmulh.s32   q9, q9, d2[0]            \n"// interpolate (step2) 1st set of coefs
+        "vqrdmulh.s32   q11, q11, d2[1]          \n"// interpolate (step3) 2nd set of coefs
+        "vshll.s16      q14, d6, #15             \n"// extend samples to 31 bits
+        "vshll.s16      q15, d7, #15             \n"// extend samples to 31 bits
+
+        "vadd.s32       q8, q8, q9               \n"// interpolate (step3) 1st set
+        "vadd.s32       q10, q10, q11            \n"// interpolate (step4) 2nd set
+
+        "vqrdmulh.s32   q12, q12, q8             \n"// multiply samples by interpolated coef
+        "vqrdmulh.s32   q13, q13, q8             \n"// multiply samples by interpolated coef
+        "vqrdmulh.s32   q14, q14, q10            \n"// multiply samples by interpolated coef
+        "vqrdmulh.s32   q15, q15, q10            \n"// multiply samples by interpolated coef
+
+        "vadd.s32       q0, q0, q12              \n"// accumulate result
+        "vadd.s32       q4, q4, q13              \n"// accumulate result
+        "vadd.s32       q0, q0, q14              \n"// accumulate result
+        "vadd.s32       q4, q4, q15              \n"// accumulate result
+
+        "subs           %[count], %[count], #4   \n"// update loop counter
+        "sub            %[sP], %[sP], #16        \n"// move pointer to next set of samples
+
+        "bne            1b                       \n"// loop
+
+        ASSEMBLY_ACCUMULATE_STEREO
+
+        : [out]     "=Uv" (out[0]),
+          [count]   "+r" (count),
+          [coefsP0] "+r" (coefsP),
+          [coefsP1] "+r" (coefsP1),
+          [coefsN0] "+r" (coefsN),
+          [coefsN1] "+r" (coefsN1),
+          [sP]      "+r" (sP),
+          [sN]      "+r" (sN)
+        : [lerpP]   "r" (lerpP),
+          [vLR]     "r" (volumeLR)
+        : "cc", "memory",
+          "q0", "q1", "q2", "q3", "q4",
+          "q8", "q9", "q10", "q11",
+          "q12", "q13", "q14", "q15"
+    );
+}
+
+#endif //USE_NEON
+
+}; // namespace android
+
+#endif /*ANDROID_AUDIO_RESAMPLER_FIR_PROCESS_NEON_H*/
diff --git a/services/audioflinger/Effects.cpp b/services/audioflinger/Effects.cpp
index 010e233..29b56db 100644
--- a/services/audioflinger/Effects.cpp
+++ b/services/audioflinger/Effects.cpp
@@ -116,8 +116,9 @@
             continue;
         }
         // first non destroyed handle is considered in control
-        if (controlHandle == NULL)
+        if (controlHandle == NULL) {
             controlHandle = h;
+        }
         if (h->priority() <= priority) {
             break;
         }
@@ -804,7 +805,112 @@
     return mOffloaded;
 }
 
-void AudioFlinger::EffectModule::dump(int fd, const Vector<String16>& args)
+String8 effectFlagsToString(uint32_t flags) {
+    String8 s;
+
+    s.append("conn. mode: ");
+    switch (flags & EFFECT_FLAG_TYPE_MASK) {
+    case EFFECT_FLAG_TYPE_INSERT: s.append("insert"); break;
+    case EFFECT_FLAG_TYPE_AUXILIARY: s.append("auxiliary"); break;
+    case EFFECT_FLAG_TYPE_REPLACE: s.append("replace"); break;
+    case EFFECT_FLAG_TYPE_PRE_PROC: s.append("preproc"); break;
+    case EFFECT_FLAG_TYPE_POST_PROC: s.append("postproc"); break;
+    default: s.append("unknown/reserved"); break;
+    }
+    s.append(", ");
+
+    s.append("insert pref: ");
+    switch (flags & EFFECT_FLAG_INSERT_MASK) {
+    case EFFECT_FLAG_INSERT_ANY: s.append("any"); break;
+    case EFFECT_FLAG_INSERT_FIRST: s.append("first"); break;
+    case EFFECT_FLAG_INSERT_LAST: s.append("last"); break;
+    case EFFECT_FLAG_INSERT_EXCLUSIVE: s.append("exclusive"); break;
+    default: s.append("unknown/reserved"); break;
+    }
+    s.append(", ");
+
+    s.append("volume mgmt: ");
+    switch (flags & EFFECT_FLAG_VOLUME_MASK) {
+    case EFFECT_FLAG_VOLUME_NONE: s.append("none"); break;
+    case EFFECT_FLAG_VOLUME_CTRL: s.append("implements control"); break;
+    case EFFECT_FLAG_VOLUME_IND: s.append("requires indication"); break;
+    default: s.append("unknown/reserved"); break;
+    }
+    s.append(", ");
+
+    uint32_t devind = flags & EFFECT_FLAG_DEVICE_MASK;
+    if (devind) {
+        s.append("device indication: ");
+        switch (devind) {
+        case EFFECT_FLAG_DEVICE_IND: s.append("requires updates"); break;
+        default: s.append("unknown/reserved"); break;
+        }
+        s.append(", ");
+    }
+
+    s.append("input mode: ");
+    switch (flags & EFFECT_FLAG_INPUT_MASK) {
+    case EFFECT_FLAG_INPUT_DIRECT: s.append("direct"); break;
+    case EFFECT_FLAG_INPUT_PROVIDER: s.append("provider"); break;
+    case EFFECT_FLAG_INPUT_BOTH: s.append("direct+provider"); break;
+    default: s.append("not set"); break;
+    }
+    s.append(", ");
+
+    s.append("output mode: ");
+    switch (flags & EFFECT_FLAG_OUTPUT_MASK) {
+    case EFFECT_FLAG_OUTPUT_DIRECT: s.append("direct"); break;
+    case EFFECT_FLAG_OUTPUT_PROVIDER: s.append("provider"); break;
+    case EFFECT_FLAG_OUTPUT_BOTH: s.append("direct+provider"); break;
+    default: s.append("not set"); break;
+    }
+    s.append(", ");
+
+    uint32_t accel = flags & EFFECT_FLAG_HW_ACC_MASK;
+    if (accel) {
+        s.append("hardware acceleration: ");
+        switch (accel) {
+        case EFFECT_FLAG_HW_ACC_SIMPLE: s.append("non-tunneled"); break;
+        case EFFECT_FLAG_HW_ACC_TUNNEL: s.append("tunneled"); break;
+        default: s.append("unknown/reserved"); break;
+        }
+        s.append(", ");
+    }
+
+    uint32_t modeind = flags & EFFECT_FLAG_AUDIO_MODE_MASK;
+    if (modeind) {
+        s.append("mode indication: ");
+        switch (modeind) {
+        case EFFECT_FLAG_AUDIO_MODE_IND: s.append("required"); break;
+        default: s.append("unknown/reserved"); break;
+        }
+        s.append(", ");
+    }
+
+    uint32_t srcind = flags & EFFECT_FLAG_AUDIO_SOURCE_MASK;
+    if (srcind) {
+        s.append("source indication: ");
+        switch (srcind) {
+        case EFFECT_FLAG_AUDIO_SOURCE_IND: s.append("required"); break;
+        default: s.append("unknown/reserved"); break;
+        }
+        s.append(", ");
+    }
+
+    if (flags & EFFECT_FLAG_OFFLOAD_MASK) {
+        s.append("offloadable, ");
+    }
+
+    int len = s.length();
+    if (s.length() > 2) {
+        char *str = s.lockBuffer(len);
+        s.unlockBuffer(len - 2);
+    }
+    return s;
+}
+
+
+void AudioFlinger::EffectModule::dump(int fd, const Vector<String16>& args __unused)
 {
     const size_t SIZE = 256;
     char buffer[SIZE];
@@ -838,9 +944,10 @@
                     mDescriptor.type.node[2],
                 mDescriptor.type.node[3],mDescriptor.type.node[4],mDescriptor.type.node[5]);
     result.append(buffer);
-    snprintf(buffer, SIZE, "\t\t- apiVersion: %08X\n\t\t- flags: %08X\n",
+    snprintf(buffer, SIZE, "\t\t- apiVersion: %08X\n\t\t- flags: %08X (%s)\n",
             mDescriptor.apiVersion,
-            mDescriptor.flags);
+            mDescriptor.flags,
+            effectFlagsToString(mDescriptor.flags).string());
     result.append(buffer);
     snprintf(buffer, SIZE, "\t\t- name: %s\n",
             mDescriptor.name);
@@ -851,37 +958,37 @@
 
     result.append("\t\t- Input configuration:\n");
     result.append("\t\t\tFrames  Smp rate Channels Format Buffer\n");
-    snprintf(buffer, SIZE, "\t\t\t%05zu   %05d    %08x %6d %p\n",
+    snprintf(buffer, SIZE, "\t\t\t%05zu   %05d    %08x %6d (%s) %p\n",
             mConfig.inputCfg.buffer.frameCount,
             mConfig.inputCfg.samplingRate,
             mConfig.inputCfg.channels,
             mConfig.inputCfg.format,
+            formatToString((audio_format_t)mConfig.inputCfg.format),
             mConfig.inputCfg.buffer.raw);
     result.append(buffer);
 
     result.append("\t\t- Output configuration:\n");
     result.append("\t\t\tBuffer     Frames  Smp rate Channels Format\n");
-    snprintf(buffer, SIZE, "\t\t\t%p %05zu   %05d    %08x %d\n",
+    snprintf(buffer, SIZE, "\t\t\t%p %05zu   %05d    %08x %d (%s)\n",
             mConfig.outputCfg.buffer.raw,
             mConfig.outputCfg.buffer.frameCount,
             mConfig.outputCfg.samplingRate,
             mConfig.outputCfg.channels,
-            mConfig.outputCfg.format);
+            mConfig.outputCfg.format,
+            formatToString((audio_format_t)mConfig.outputCfg.format));
     result.append(buffer);
 
     snprintf(buffer, SIZE, "\t\t%zu Clients:\n", mHandles.size());
     result.append(buffer);
-    result.append("\t\t\tPid   Priority Ctrl Locked client server\n");
+    result.append("\t\t\t  Pid Priority Ctrl Locked client server\n");
     for (size_t i = 0; i < mHandles.size(); ++i) {
         EffectHandle *handle = mHandles[i];
         if (handle != NULL && !handle->destroyed_l()) {
-            handle->dump(buffer, SIZE);
+            handle->dumpToBuffer(buffer, SIZE);
             result.append(buffer);
         }
     }
 
-    result.append("\n");
-
     write(fd, result.string(), result.length());
 
     if (locked) {
@@ -911,18 +1018,15 @@
     }
     int bufOffset = ((sizeof(effect_param_cblk_t) - 1) / sizeof(int) + 1) * sizeof(int);
     mCblkMemory = client->heap()->allocate(EFFECT_PARAM_BUFFER_SIZE + bufOffset);
-    if (mCblkMemory != 0) {
-        mCblk = static_cast<effect_param_cblk_t *>(mCblkMemory->pointer());
-
-        if (mCblk != NULL) {
-            new(mCblk) effect_param_cblk_t();
-            mBuffer = (uint8_t *)mCblk + bufOffset;
-        }
-    } else {
+    if (mCblkMemory == 0 ||
+            (mCblk = static_cast<effect_param_cblk_t *>(mCblkMemory->pointer())) == NULL) {
         ALOGE("not enough memory for Effect size=%u", EFFECT_PARAM_BUFFER_SIZE +
                 sizeof(effect_param_cblk_t));
+        mCblkMemory.clear();
         return;
     }
+    new(mCblk) effect_param_cblk_t();
+    mBuffer = (uint8_t *)mCblk + bufOffset;
 }
 
 AudioFlinger::EffectHandle::~EffectHandle()
@@ -939,6 +1043,11 @@
     disconnect(false);
 }
 
+status_t AudioFlinger::EffectHandle::initCheck()
+{
+    return mClient == 0 || mCblkMemory != 0 ? OK : NO_MEMORY;
+}
+
 status_t AudioFlinger::EffectHandle::enable()
 {
     ALOGV("enable %p", this);
@@ -1179,15 +1288,15 @@
 }
 
 
-void AudioFlinger::EffectHandle::dump(char* buffer, size_t size)
+void AudioFlinger::EffectHandle::dumpToBuffer(char* buffer, size_t size)
 {
     bool locked = mCblk != NULL && AudioFlinger::dumpTryLock(mCblk->lock);
 
-    snprintf(buffer, size, "\t\t\t%05d %05d    %01u    %01u      %05u  %05u\n",
+    snprintf(buffer, size, "\t\t\t%5d    %5d  %3s    %3s  %5u  %5u\n",
             (mClient == 0) ? getpid_cached : mClient->pid(),
             mPriority,
-            mHasControl,
-            !locked,
+            mHasControl ? "yes" : "no",
+            locked ? "yes" : "no",
             mCblk ? mCblk->clientIndex : 0,
             mCblk ? mCblk->serverIndex : 0
             );
@@ -1568,33 +1677,35 @@
     char buffer[SIZE];
     String8 result;
 
-    snprintf(buffer, SIZE, "Effects for session %d:\n", mSessionId);
+    size_t numEffects = mEffects.size();
+    snprintf(buffer, SIZE, "    %d effects for session %d\n", numEffects, mSessionId);
     result.append(buffer);
 
-    bool locked = AudioFlinger::dumpTryLock(mLock);
-    // failed to lock - AudioFlinger is probably deadlocked
-    if (!locked) {
-        result.append("\tCould not lock mutex:\n");
-    }
-
-    result.append("\tNum fx In buffer   Out buffer   Active tracks:\n");
-    snprintf(buffer, SIZE, "\t%02zu     %p  %p   %d\n",
-            mEffects.size(),
-            mInBuffer,
-            mOutBuffer,
-            mActiveTrackCnt);
-    result.append(buffer);
-    write(fd, result.string(), result.size());
-
-    for (size_t i = 0; i < mEffects.size(); ++i) {
-        sp<EffectModule> effect = mEffects[i];
-        if (effect != 0) {
-            effect->dump(fd, args);
+    if (numEffects) {
+        bool locked = AudioFlinger::dumpTryLock(mLock);
+        // failed to lock - AudioFlinger is probably deadlocked
+        if (!locked) {
+            result.append("\tCould not lock mutex:\n");
         }
-    }
 
-    if (locked) {
-        mLock.unlock();
+        result.append("\tIn buffer   Out buffer   Active tracks:\n");
+        snprintf(buffer, SIZE, "\t%p  %p   %d\n",
+                mInBuffer,
+                mOutBuffer,
+                mActiveTrackCnt);
+        result.append(buffer);
+        write(fd, result.string(), result.size());
+
+        for (size_t i = 0; i < numEffects; ++i) {
+            sp<EffectModule> effect = mEffects[i];
+            if (effect != 0) {
+                effect->dump(fd, args);
+            }
+        }
+
+        if (locked) {
+            mLock.unlock();
+        }
     }
 }
 
diff --git a/services/audioflinger/Effects.h b/services/audioflinger/Effects.h
index b717857..ccc4825 100644
--- a/services/audioflinger/Effects.h
+++ b/services/audioflinger/Effects.h
@@ -169,6 +169,7 @@
             const sp<IEffectClient>& effectClient,
             int32_t priority);
     virtual ~EffectHandle();
+    virtual status_t initCheck();
 
     // IEffect
     virtual status_t enable();
@@ -208,7 +209,7 @@
     // destroyed_l() must be called with the associated EffectModule mLock held
     bool destroyed_l() const { return mDestroyed; }
 
-    void dump(char* buffer, size_t size);
+    void dumpToBuffer(char* buffer, size_t size);
 
 protected:
     friend class AudioFlinger;          // for mEffect, mHasControl, mEnabled
diff --git a/services/audioflinger/FastMixer.cpp b/services/audioflinger/FastMixer.cpp
index 85d637e..90122e0 100644
--- a/services/audioflinger/FastMixer.cpp
+++ b/services/audioflinger/FastMixer.cpp
@@ -238,7 +238,7 @@
                 }
             }
 
-            if ((format != previousFormat) || (frameCount != previous->mFrameCount)) {
+            if ((!Format_isEqual(format, previousFormat)) || (frameCount != previous->mFrameCount)) {
                 // FIXME to avoid priority inversion, don't delete here
                 delete mixer;
                 mixer = NULL;
@@ -440,8 +440,9 @@
             }
 
             int64_t pts;
-            if (outputSink == NULL || (OK != outputSink->getNextWriteTimestamp(&pts)))
+            if (outputSink == NULL || (OK != outputSink->getNextWriteTimestamp(&pts))) {
                 pts = AudioBufferProvider::kInvalidPTS;
+            }
 
             // process() is CPU-bound
             mixer->process(pts);
@@ -695,7 +696,7 @@
 void FastMixerDumpState::dump(int fd) const
 {
     if (mCommand == FastMixerState::INITIAL) {
-        fdprintf(fd, "FastMixer not initialized\n");
+        fdprintf(fd, "  FastMixer not initialized\n");
         return;
     }
 #define COMMAND_MAX 32
@@ -729,10 +730,10 @@
     double measuredWarmupMs = (mMeasuredWarmupTs.tv_sec * 1000.0) +
             (mMeasuredWarmupTs.tv_nsec / 1000000.0);
     double mixPeriodSec = (double) mFrameCount / (double) mSampleRate;
-    fdprintf(fd, "FastMixer command=%s writeSequence=%u framesWritten=%u\n"
-                 "          numTracks=%u writeErrors=%u underruns=%u overruns=%u\n"
-                 "          sampleRate=%u frameCount=%zu measuredWarmup=%.3g ms, warmupCycles=%u\n"
-                 "          mixPeriod=%.2f ms\n",
+    fdprintf(fd, "  FastMixer command=%s writeSequence=%u framesWritten=%u\n"
+                 "            numTracks=%u writeErrors=%u underruns=%u overruns=%u\n"
+                 "            sampleRate=%u frameCount=%zu measuredWarmup=%.3g ms, warmupCycles=%u\n"
+                 "            mixPeriod=%.2f ms\n",
                  string, mWriteSequence, mFramesWritten,
                  mNumTracks, mWriteErrors, mUnderruns, mOverruns,
                  mSampleRate, mFrameCount, measuredWarmupMs, mWarmupCycles,
@@ -783,14 +784,20 @@
         previousCpukHz = sampleCpukHz;
 #endif
     }
-    fdprintf(fd, "Simple moving statistics over last %.1f seconds:\n", wall.n() * mixPeriodSec);
-    fdprintf(fd, "  wall clock time in ms per mix cycle:\n"
-                 "    mean=%.2f min=%.2f max=%.2f stddev=%.2f\n",
-                 wall.mean()*1e-6, wall.minimum()*1e-6, wall.maximum()*1e-6, wall.stddev()*1e-6);
-    fdprintf(fd, "  raw CPU load in us per mix cycle:\n"
-                 "    mean=%.0f min=%.0f max=%.0f stddev=%.0f\n",
-                 loadNs.mean()*1e-3, loadNs.minimum()*1e-3, loadNs.maximum()*1e-3,
-                 loadNs.stddev()*1e-3);
+    if (n) {
+        fdprintf(fd, "  Simple moving statistics over last %.1f seconds:\n",
+                     wall.n() * mixPeriodSec);
+        fdprintf(fd, "    wall clock time in ms per mix cycle:\n"
+                     "      mean=%.2f min=%.2f max=%.2f stddev=%.2f\n",
+                     wall.mean()*1e-6, wall.minimum()*1e-6, wall.maximum()*1e-6,
+                     wall.stddev()*1e-6);
+        fdprintf(fd, "    raw CPU load in us per mix cycle:\n"
+                     "      mean=%.0f min=%.0f max=%.0f stddev=%.0f\n",
+                     loadNs.mean()*1e-3, loadNs.minimum()*1e-3, loadNs.maximum()*1e-3,
+                     loadNs.stddev()*1e-3);
+    } else {
+        fdprintf(fd, "  No FastMixer statistics available currently\n");
+    }
 #ifdef CPU_FREQUENCY_STATISTICS
     fdprintf(fd, "  CPU clock frequency in MHz:\n"
                  "    mean=%.0f min=%.0f max=%.0f stddev=%.0f\n",
@@ -808,9 +815,9 @@
             left.sample(tail[i]);
             right.sample(tail[n - (i + 1)]);
         }
-        fdprintf(fd, "Distribution of mix cycle times in ms for the tails (> ~3 stddev outliers):\n"
-                     "  left tail: mean=%.2f min=%.2f max=%.2f stddev=%.2f\n"
-                     "  right tail: mean=%.2f min=%.2f max=%.2f stddev=%.2f\n",
+        fdprintf(fd, "  Distribution of mix cycle times in ms for the tails (> ~3 stddev outliers):\n"
+                     "    left tail: mean=%.2f min=%.2f max=%.2f stddev=%.2f\n"
+                     "    right tail: mean=%.2f min=%.2f max=%.2f stddev=%.2f\n",
                      left.mean()*1e-6, left.minimum()*1e-6, left.maximum()*1e-6, left.stddev()*1e-6,
                      right.mean()*1e-6, right.minimum()*1e-6, right.maximum()*1e-6,
                      right.stddev()*1e-6);
@@ -823,9 +830,9 @@
     // Instead we always display all tracks, with an indication
     // of whether we think the track is active.
     uint32_t trackMask = mTrackMask;
-    fdprintf(fd, "Fast tracks: kMaxFastTracks=%u activeMask=%#x\n",
+    fdprintf(fd, "  Fast tracks: kMaxFastTracks=%u activeMask=%#x\n",
             FastMixerState::kMaxFastTracks, trackMask);
-    fdprintf(fd, "Index Active Full Partial Empty  Recent Ready\n");
+    fdprintf(fd, "  Index Active Full Partial Empty  Recent Ready\n");
     for (uint32_t i = 0; i < FastMixerState::kMaxFastTracks; ++i, trackMask >>= 1) {
         bool isActive = trackMask & 1;
         const FastTrackDump *ftDump = &mTracks[i];
@@ -845,7 +852,7 @@
             mostRecent = "?";
             break;
         }
-        fdprintf(fd, "%5u %6s %4u %7u %5u %7s %5zu\n", i, isActive ? "yes" : "no",
+        fdprintf(fd, "  %5u %6s %4u %7u %5u %7s %5zu\n", i, isActive ? "yes" : "no",
                 (underruns.mBitFields.mFull) & UNDERRUN_MASK,
                 (underruns.mBitFields.mPartial) & UNDERRUN_MASK,
                 (underruns.mBitFields.mEmpty) & UNDERRUN_MASK,
diff --git a/services/audioflinger/PlaybackTracks.h b/services/audioflinger/PlaybackTracks.h
index 43b77f3..b5e763d 100644
--- a/services/audioflinger/PlaybackTracks.h
+++ b/services/audioflinger/PlaybackTracks.h
@@ -34,9 +34,10 @@
                                 int uid,
                                 IAudioFlinger::track_flags_t flags);
     virtual             ~Track();
+    virtual status_t    initCheck() const;
 
     static  void        appendDumpHeader(String8& result);
-            void        dump(char* buffer, size_t size);
+            void        dump(char* buffer, size_t size, bool active);
     virtual status_t    start(AudioSystem::sync_event_t event =
                                     AudioSystem::SYNC_EVENT_NONE,
                              int triggerSession = 0);
@@ -93,6 +94,8 @@
     bool isReady() const;
     void setPaused() { mState = PAUSED; }
     void reset();
+    bool isFlushPending() const { return mFlushHwPending; }
+    void flushAck();
 
     bool isOutputTrack() const {
         return (mStreamType == AUDIO_STREAM_CNT);
@@ -154,6 +157,7 @@
     bool                mIsInvalid; // non-resettable latch, set by invalidate()
     AudioTrackServerProxy*  mAudioTrackServerProxy;
     bool                mResumeToStopping; // track was paused in stopping state.
+    bool                mFlushHwPending; // track requests for thread flush
 };  // end of Track
 
 class TimedTrack : public Track {
diff --git a/services/audioflinger/RecordTracks.h b/services/audioflinger/RecordTracks.h
index 57de568..fc3171f 100644
--- a/services/audioflinger/RecordTracks.h
+++ b/services/audioflinger/RecordTracks.h
@@ -45,7 +45,7 @@
                                                 return tmp; }
 
     static  void        appendDumpHeader(String8& result);
-            void        dump(char* buffer, size_t size);
+            void        dump(char* buffer, size_t size, bool active);
 
 private:
     friend class AudioFlinger;  // for mState
@@ -59,5 +59,4 @@
     // releaseBuffer() not overridden
 
     bool                mOverflow;  // overflow on most recent attempt to fill client buffer
-    AudioRecordServerProxy* mAudioRecordServerProxy;
 };
diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp
index 498ddb6..b064e89 100644
--- a/services/audioflinger/Threads.cpp
+++ b/services/audioflinger/Threads.cpp
@@ -185,7 +185,11 @@
 {
 }
 
-void CpuStats::sample(const String8 &title) {
+void CpuStats::sample(const String8 &title
+#ifndef DEBUG_CPU_USAGE
+                __unused
+#endif
+        ) {
 #ifdef DEBUG_CPU_USAGE
     // get current thread's delta CPU time in wall clock ns
     double wcNs;
@@ -269,8 +273,8 @@
     :   Thread(false /*canCallJava*/),
         mType(type),
         mAudioFlinger(audioFlinger),
-        // mSampleRate, mFrameCount, mChannelMask, mChannelCount, mFrameSize, and mFormat are
-        // set by PlaybackThread::readOutputParameters() or RecordThread::readInputParameters()
+        // mSampleRate, mFrameCount, mChannelMask, mChannelCount, mFrameSize, mFormat, mBufferSize
+        // are set by PlaybackThread::readOutputParameters() or RecordThread::readInputParameters()
         mParamStatus(NO_ERROR),
         //FIXME: mStandby should be true here. Is this some kind of hack?
         mStandby(false), mOutDevice(outDevice), mInDevice(inDevice),
@@ -297,6 +301,17 @@
     }
 }
 
+status_t AudioFlinger::ThreadBase::readyToRun()
+{
+    status_t status = initCheck();
+    if (status == NO_ERROR) {
+        ALOGI("AudioFlinger's thread %p ready to run", this);
+    } else {
+        ALOGE("No working audio driver found.");
+    }
+    return status;
+}
+
 void AudioFlinger::ThreadBase::exit()
 {
     ALOGV("ThreadBase::exit");
@@ -369,7 +384,13 @@
 
 void AudioFlinger::ThreadBase::processConfigEvents()
 {
-    mLock.lock();
+    Mutex::Autolock _l(mLock);
+    processConfigEvents_l();
+}
+
+// post condition: mConfigEvents.isEmpty()
+void AudioFlinger::ThreadBase::processConfigEvents_l()
+{
     while (!mConfigEvents.isEmpty()) {
         ALOGV("processConfigEvents() remaining events %d", mConfigEvents.size());
         ConfigEvent *event = mConfigEvents[0];
@@ -377,35 +398,81 @@
         // release mLock before locking AudioFlinger mLock: lock order is always
         // AudioFlinger then ThreadBase to avoid cross deadlock
         mLock.unlock();
-        switch(event->type()) {
-            case CFG_EVENT_PRIO: {
-                PrioConfigEvent *prioEvent = static_cast<PrioConfigEvent *>(event);
-                // FIXME Need to understand why this has be done asynchronously
-                int err = requestPriority(prioEvent->pid(), prioEvent->tid(), prioEvent->prio(),
-                        true /*asynchronous*/);
-                if (err != 0) {
-                    ALOGW("Policy SCHED_FIFO priority %d is unavailable for pid %d tid %d; "
-                          "error %d",
-                          prioEvent->prio(), prioEvent->pid(), prioEvent->tid(), err);
-                }
-            } break;
-            case CFG_EVENT_IO: {
-                IoConfigEvent *ioEvent = static_cast<IoConfigEvent *>(event);
-                mAudioFlinger->mLock.lock();
+        switch (event->type()) {
+        case CFG_EVENT_PRIO: {
+            PrioConfigEvent *prioEvent = static_cast<PrioConfigEvent *>(event);
+            // FIXME Need to understand why this has be done asynchronously
+            int err = requestPriority(prioEvent->pid(), prioEvent->tid(), prioEvent->prio(),
+                    true /*asynchronous*/);
+            if (err != 0) {
+                ALOGW("Policy SCHED_FIFO priority %d is unavailable for pid %d tid %d; error %d",
+                      prioEvent->prio(), prioEvent->pid(), prioEvent->tid(), err);
+            }
+        } break;
+        case CFG_EVENT_IO: {
+            IoConfigEvent *ioEvent = static_cast<IoConfigEvent *>(event);
+            {
+                Mutex::Autolock _l(mAudioFlinger->mLock);
                 audioConfigChanged_l(ioEvent->event(), ioEvent->param());
-                mAudioFlinger->mLock.unlock();
-            } break;
-            default:
-                ALOGE("processConfigEvents() unknown event type %d", event->type());
-                break;
+            }
+        } break;
+        default:
+            ALOGE("processConfigEvents() unknown event type %d", event->type());
+            break;
         }
         delete event;
         mLock.lock();
     }
-    mLock.unlock();
 }
 
-void AudioFlinger::ThreadBase::dumpBase(int fd, const Vector<String16>& args)
+String8 channelMaskToString(audio_channel_mask_t mask, bool output) {
+    String8 s;
+    if (output) {
+        if (mask & AUDIO_CHANNEL_OUT_FRONT_LEFT) s.append("front-left, ");
+        if (mask & AUDIO_CHANNEL_OUT_FRONT_RIGHT) s.append("front-right, ");
+        if (mask & AUDIO_CHANNEL_OUT_FRONT_CENTER) s.append("front-center, ");
+        if (mask & AUDIO_CHANNEL_OUT_LOW_FREQUENCY) s.append("low freq, ");
+        if (mask & AUDIO_CHANNEL_OUT_BACK_LEFT) s.append("back-left, ");
+        if (mask & AUDIO_CHANNEL_OUT_BACK_RIGHT) s.append("back-right, ");
+        if (mask & AUDIO_CHANNEL_OUT_FRONT_LEFT_OF_CENTER) s.append("front-left-of-center, ");
+        if (mask & AUDIO_CHANNEL_OUT_FRONT_RIGHT_OF_CENTER) s.append("front-right-of-center, ");
+        if (mask & AUDIO_CHANNEL_OUT_BACK_CENTER) s.append("back-center, ");
+        if (mask & AUDIO_CHANNEL_OUT_SIDE_LEFT) s.append("side-left, ");
+        if (mask & AUDIO_CHANNEL_OUT_SIDE_RIGHT) s.append("side-right, ");
+        if (mask & AUDIO_CHANNEL_OUT_TOP_CENTER) s.append("top-center ,");
+        if (mask & AUDIO_CHANNEL_OUT_TOP_FRONT_LEFT) s.append("top-front-left, ");
+        if (mask & AUDIO_CHANNEL_OUT_TOP_FRONT_CENTER) s.append("top-front-center, ");
+        if (mask & AUDIO_CHANNEL_OUT_TOP_FRONT_RIGHT) s.append("top-front-right, ");
+        if (mask & AUDIO_CHANNEL_OUT_TOP_BACK_LEFT) s.append("top-back-left, ");
+        if (mask & AUDIO_CHANNEL_OUT_TOP_BACK_CENTER) s.append("top-back-center, " );
+        if (mask & AUDIO_CHANNEL_OUT_TOP_BACK_RIGHT) s.append("top-back-right, " );
+        if (mask & ~AUDIO_CHANNEL_OUT_ALL) s.append("unknown,  ");
+    } else {
+        if (mask & AUDIO_CHANNEL_IN_LEFT) s.append("left, ");
+        if (mask & AUDIO_CHANNEL_IN_RIGHT) s.append("right, ");
+        if (mask & AUDIO_CHANNEL_IN_FRONT) s.append("front, ");
+        if (mask & AUDIO_CHANNEL_IN_BACK) s.append("back, ");
+        if (mask & AUDIO_CHANNEL_IN_LEFT_PROCESSED) s.append("left-processed, ");
+        if (mask & AUDIO_CHANNEL_IN_RIGHT_PROCESSED) s.append("right-processed, ");
+        if (mask & AUDIO_CHANNEL_IN_FRONT_PROCESSED) s.append("front-processed, ");
+        if (mask & AUDIO_CHANNEL_IN_BACK_PROCESSED) s.append("back-processed, ");
+        if (mask & AUDIO_CHANNEL_IN_PRESSURE) s.append("pressure, ");
+        if (mask & AUDIO_CHANNEL_IN_X_AXIS) s.append("X, ");
+        if (mask & AUDIO_CHANNEL_IN_Y_AXIS) s.append("Y, ");
+        if (mask & AUDIO_CHANNEL_IN_Z_AXIS) s.append("Z, ");
+        if (mask & AUDIO_CHANNEL_IN_VOICE_UPLINK) s.append("voice-uplink, ");
+        if (mask & AUDIO_CHANNEL_IN_VOICE_DNLINK) s.append("voice-dnlink, ");
+        if (mask & ~AUDIO_CHANNEL_IN_ALL) s.append("unknown,  ");
+    }
+    int len = s.length();
+    if (s.length() > 2) {
+        char *str = s.lockBuffer(len);
+        s.unlockBuffer(len - 2);
+    }
+    return s;
+}
+
+void AudioFlinger::ThreadBase::dumpBase(int fd, const Vector<String16>& args __unused)
 {
     const size_t SIZE = 256;
     char buffer[SIZE];
@@ -413,47 +480,43 @@
 
     bool locked = AudioFlinger::dumpTryLock(mLock);
     if (!locked) {
-        snprintf(buffer, SIZE, "thread %p maybe dead locked\n", this);
-        write(fd, buffer, strlen(buffer));
+        fdprintf(fd, "thread %p maybe dead locked\n", this);
     }
 
-    snprintf(buffer, SIZE, "io handle: %d\n", mId);
-    result.append(buffer);
-    snprintf(buffer, SIZE, "TID: %d\n", getTid());
-    result.append(buffer);
-    snprintf(buffer, SIZE, "standby: %d\n", mStandby);
-    result.append(buffer);
-    snprintf(buffer, SIZE, "Sample rate: %u\n", mSampleRate);
-    result.append(buffer);
-    snprintf(buffer, SIZE, "HAL frame count: %zu\n", mFrameCount);
-    result.append(buffer);
-    snprintf(buffer, SIZE, "Channel Count: %u\n", mChannelCount);
-    result.append(buffer);
-    snprintf(buffer, SIZE, "Channel Mask: 0x%08x\n", mChannelMask);
-    result.append(buffer);
-    snprintf(buffer, SIZE, "Format: %d\n", mFormat);
-    result.append(buffer);
-    snprintf(buffer, SIZE, "Frame size: %zu\n", mFrameSize);
-    result.append(buffer);
-
-    snprintf(buffer, SIZE, "\nPending setParameters commands: \n");
-    result.append(buffer);
-    result.append(" Index Command");
-    for (size_t i = 0; i < mNewParameters.size(); ++i) {
-        snprintf(buffer, SIZE, "\n %02zu    ", i);
-        result.append(buffer);
-        result.append(mNewParameters[i]);
+    fdprintf(fd, "  I/O handle: %d\n", mId);
+    fdprintf(fd, "  TID: %d\n", getTid());
+    fdprintf(fd, "  Standby: %s\n", mStandby ? "yes" : "no");
+    fdprintf(fd, "  Sample rate: %u\n", mSampleRate);
+    fdprintf(fd, "  HAL frame count: %zu\n", mFrameCount);
+    fdprintf(fd, "  HAL buffer size: %u bytes\n", mBufferSize);
+    fdprintf(fd, "  Channel Count: %u\n", mChannelCount);
+    fdprintf(fd, "  Channel Mask: 0x%08x (%s)\n", mChannelMask,
+            channelMaskToString(mChannelMask, mType != RECORD).string());
+    fdprintf(fd, "  Format: 0x%x (%s)\n", mFormat, formatToString(mFormat));
+    fdprintf(fd, "  Frame size: %zu\n", mFrameSize);
+    fdprintf(fd, "  Pending setParameters commands:");
+    size_t numParams = mNewParameters.size();
+    if (numParams) {
+        fdprintf(fd, "\n   Index Command");
+        for (size_t i = 0; i < numParams; ++i) {
+            fdprintf(fd, "\n   %02zu    ", i);
+            fdprintf(fd, mNewParameters[i]);
+        }
+        fdprintf(fd, "\n");
+    } else {
+        fdprintf(fd, " none\n");
     }
-
-    snprintf(buffer, SIZE, "\n\nPending config events: \n");
-    result.append(buffer);
-    for (size_t i = 0; i < mConfigEvents.size(); i++) {
-        mConfigEvents[i]->dump(buffer, SIZE);
-        result.append(buffer);
+    fdprintf(fd, "  Pending config events:");
+    size_t numConfig = mConfigEvents.size();
+    if (numConfig) {
+        for (size_t i = 0; i < numConfig; i++) {
+            mConfigEvents[i]->dump(buffer, SIZE);
+            fdprintf(fd, "\n    %s", buffer);
+        }
+        fdprintf(fd, "\n");
+    } else {
+        fdprintf(fd, " none\n");
     }
-    result.append("\n");
-
-    write(fd, result.string(), result.size());
 
     if (locked) {
         mLock.unlock();
@@ -466,10 +529,11 @@
     char buffer[SIZE];
     String8 result;
 
-    snprintf(buffer, SIZE, "\n- %zu Effect Chains:\n", mEffectChains.size());
+    size_t numEffectChains = mEffectChains.size();
+    snprintf(buffer, SIZE, "  %zu Effect Chains\n", numEffectChains);
     write(fd, buffer, strlen(buffer));
 
-    for (size_t i = 0; i < mEffectChains.size(); ++i) {
+    for (size_t i = 0; i < numEffectChains; ++i) {
         sp<EffectChain> chain = mEffectChains[i];
         if (chain != 0) {
             chain->dump(fd, args);
@@ -586,7 +650,7 @@
     mPowerManager.clear();
 }
 
-void AudioFlinger::ThreadBase::PMDeathRecipient::binderDied(const wp<IBinder>& who)
+void AudioFlinger::ThreadBase::PMDeathRecipient::binderDied(const wp<IBinder>& who __unused)
 {
     sp<ThreadBase> thread = mThread.promote();
     if (thread != 0) {
@@ -739,8 +803,7 @@
         int sessionId,
         effect_descriptor_t *desc,
         int *enabled,
-        status_t *status
-        )
+        status_t *status)
 {
     sp<EffectModule> effect;
     sp<EffectHandle> handle;
@@ -829,7 +892,10 @@
         }
         // create effect handle and connect it to effect module
         handle = new EffectHandle(effect, client, effectClient, priority);
-        lStatus = effect->addHandle(handle.get());
+        lStatus = handle->initCheck();
+        if (lStatus == OK) {
+            lStatus = effect->addHandle(handle.get());
+        }
         if (enabled != NULL) {
             *enabled = (int)effect->isEnabled();
         }
@@ -850,9 +916,7 @@
         handle.clear();
     }
 
-    if (status != NULL) {
-        *status = lStatus;
-    }
+    *status = lStatus;
     return handle;
 }
 
@@ -1002,7 +1066,7 @@
                                              type_t type)
     :   ThreadBase(audioFlinger, id, device, AUDIO_DEVICE_NONE, type),
         mNormalFrameCount(0), mMixBuffer(NULL),
-        mAllocMixBuffer(NULL), mSuspended(0), mBytesWritten(0),
+        mSuspended(0), mBytesWritten(0),
         mActiveTracksGeneration(0),
         // mStreamTypes[] initialized in constructor body
         mOutput(output),
@@ -1060,7 +1124,7 @@
 AudioFlinger::PlaybackThread::~PlaybackThread()
 {
     mAudioFlinger->unregisterWriter(mNBLogWriter);
-    delete [] mAllocMixBuffer;
+    delete[] mMixBuffer;
 }
 
 void AudioFlinger::PlaybackThread::dump(int fd, const Vector<String16>& args)
@@ -1070,13 +1134,13 @@
     dumpEffectChains(fd, args);
 }
 
-void AudioFlinger::PlaybackThread::dumpTracks(int fd, const Vector<String16>& args)
+void AudioFlinger::PlaybackThread::dumpTracks(int fd, const Vector<String16>& args __unused)
 {
     const size_t SIZE = 256;
     char buffer[SIZE];
     String8 result;
 
-    result.appendFormat("Output thread %p stream volumes in dB:\n    ", this);
+    result.appendFormat("  Stream volumes in dB: ");
     for (int i = 0; i < AUDIO_STREAM_CNT; ++i) {
         const stream_type_t *st = &mStreamTypes[i];
         if (i > 0) {
@@ -1091,75 +1155,67 @@
     write(fd, result.string(), result.length());
     result.clear();
 
-    snprintf(buffer, SIZE, "Output thread %p tracks\n", this);
-    result.append(buffer);
-    Track::appendDumpHeader(result);
-    for (size_t i = 0; i < mTracks.size(); ++i) {
-        sp<Track> track = mTracks[i];
-        if (track != 0) {
-            track->dump(buffer, SIZE);
-            result.append(buffer);
-        }
-    }
-
-    snprintf(buffer, SIZE, "Output thread %p active tracks\n", this);
-    result.append(buffer);
-    Track::appendDumpHeader(result);
-    for (size_t i = 0; i < mActiveTracks.size(); ++i) {
-        sp<Track> track = mActiveTracks[i].promote();
-        if (track != 0) {
-            track->dump(buffer, SIZE);
-            result.append(buffer);
-        }
-    }
-    write(fd, result.string(), result.size());
-
     // These values are "raw"; they will wrap around.  See prepareTracks_l() for a better way.
     FastTrackUnderruns underruns = getFastTrackUnderruns(0);
-    fdprintf(fd, "Normal mixer raw underrun counters: partial=%u empty=%u\n",
+    fdprintf(fd, "  Normal mixer raw underrun counters: partial=%u empty=%u\n",
             underruns.mBitFields.mPartial, underruns.mBitFields.mEmpty);
+
+    size_t numtracks = mTracks.size();
+    size_t numactive = mActiveTracks.size();
+    fdprintf(fd, "  %d Tracks", numtracks);
+    size_t numactiveseen = 0;
+    if (numtracks) {
+        fdprintf(fd, " of which %d are active\n", numactive);
+        Track::appendDumpHeader(result);
+        for (size_t i = 0; i < numtracks; ++i) {
+            sp<Track> track = mTracks[i];
+            if (track != 0) {
+                bool active = mActiveTracks.indexOf(track) >= 0;
+                if (active) {
+                    numactiveseen++;
+                }
+                track->dump(buffer, SIZE, active);
+                result.append(buffer);
+            }
+        }
+    } else {
+        result.append("\n");
+    }
+    if (numactiveseen != numactive) {
+        // some tracks in the active list were not in the tracks list
+        snprintf(buffer, SIZE, "  The following tracks are in the active list but"
+                " not in the track list\n");
+        result.append(buffer);
+        Track::appendDumpHeader(result);
+        for (size_t i = 0; i < numactive; ++i) {
+            sp<Track> track = mActiveTracks[i].promote();
+            if (track != 0 && mTracks.indexOf(track) < 0) {
+                track->dump(buffer, SIZE, true);
+                result.append(buffer);
+            }
+        }
+    }
+
+    write(fd, result.string(), result.size());
+
 }
 
 void AudioFlinger::PlaybackThread::dumpInternals(int fd, const Vector<String16>& args)
 {
-    const size_t SIZE = 256;
-    char buffer[SIZE];
-    String8 result;
-
-    snprintf(buffer, SIZE, "\nOutput thread %p internals\n", this);
-    result.append(buffer);
-    snprintf(buffer, SIZE, "Normal frame count: %zu\n", mNormalFrameCount);
-    result.append(buffer);
-    snprintf(buffer, SIZE, "last write occurred (msecs): %llu\n",
-            ns2ms(systemTime() - mLastWriteTime));
-    result.append(buffer);
-    snprintf(buffer, SIZE, "total writes: %d\n", mNumWrites);
-    result.append(buffer);
-    snprintf(buffer, SIZE, "delayed writes: %d\n", mNumDelayedWrites);
-    result.append(buffer);
-    snprintf(buffer, SIZE, "blocked in write: %d\n", mInWrite);
-    result.append(buffer);
-    snprintf(buffer, SIZE, "suspend count: %d\n", mSuspended);
-    result.append(buffer);
-    snprintf(buffer, SIZE, "mix buffer : %p\n", mMixBuffer);
-    result.append(buffer);
-    write(fd, result.string(), result.size());
-    fdprintf(fd, "Fast track availMask=%#x\n", mFastTrackAvailMask);
+    fdprintf(fd, "\nOutput thread %p:\n", this);
+    fdprintf(fd, "  Normal frame count: %zu\n", mNormalFrameCount);
+    fdprintf(fd, "  Last write occurred (msecs): %llu\n", ns2ms(systemTime() - mLastWriteTime));
+    fdprintf(fd, "  Total writes: %d\n", mNumWrites);
+    fdprintf(fd, "  Delayed writes: %d\n", mNumDelayedWrites);
+    fdprintf(fd, "  Blocked in write: %s\n", mInWrite ? "yes" : "no");
+    fdprintf(fd, "  Suspend count: %d\n", mSuspended);
+    fdprintf(fd, "  Mix buffer : %p\n", mMixBuffer);
+    fdprintf(fd, "  Fast track availMask=%#x\n", mFastTrackAvailMask);
 
     dumpBase(fd, args);
 }
 
 // Thread virtuals
-status_t AudioFlinger::PlaybackThread::readyToRun()
-{
-    status_t status = initCheck();
-    if (status == NO_ERROR) {
-        ALOGI("AudioFlinger's thread %p ready to run", this);
-    } else {
-        ALOGE("No working audio driver found.");
-    }
-    return status;
-}
 
 void AudioFlinger::PlaybackThread::onFirstRef()
 {
@@ -1182,7 +1238,7 @@
         uint32_t sampleRate,
         audio_format_t format,
         audio_channel_mask_t channelMask,
-        size_t frameCount,
+        size_t *pFrameCount,
         const sp<IMemory>& sharedBuffer,
         int sessionId,
         IAudioFlinger::track_flags_t *flags,
@@ -1190,6 +1246,7 @@
         int uid,
         status_t *status)
 {
+    size_t frameCount = *pFrameCount;
     sp<Track> track;
     status_t lStatus;
 
@@ -1256,12 +1313,13 @@
         }
       }
     }
+    *pFrameCount = frameCount;
 
     if (mType == DIRECT) {
         if ((format & AUDIO_FORMAT_MAIN_MASK) == AUDIO_FORMAT_PCM) {
             if (sampleRate != mSampleRate || format != mFormat || channelMask != mChannelMask) {
-                ALOGE("createTrack_l() Bad parameter: sampleRate %u format %d, channelMask 0x%08x "
-                        "for output %p with format %d",
+                ALOGE("createTrack_l() Bad parameter: sampleRate %u format %#x, channelMask 0x%08x "
+                        "for output %p with format %#x",
                         sampleRate, format, channelMask, mOutput, mFormat);
                 lStatus = BAD_VALUE;
                 goto Exit;
@@ -1269,16 +1327,16 @@
         }
     } else if (mType == OFFLOAD) {
         if (sampleRate != mSampleRate || format != mFormat || channelMask != mChannelMask) {
-            ALOGE("createTrack_l() Bad parameter: sampleRate %d format %d, channelMask 0x%08x \""
-                    "for output %p with format %d",
+            ALOGE("createTrack_l() Bad parameter: sampleRate %d format %#x, channelMask 0x%08x \""
+                    "for output %p with format %#x",
                     sampleRate, format, channelMask, mOutput, mFormat);
             lStatus = BAD_VALUE;
             goto Exit;
         }
     } else {
         if ((format & AUDIO_FORMAT_MAIN_MASK) != AUDIO_FORMAT_PCM) {
-                ALOGE("createTrack_l() Bad parameter: format %d \""
-                        "for output %p with format %d",
+                ALOGE("createTrack_l() Bad parameter: format %#x \""
+                        "for output %p with format %#x",
                         format, mOutput, mFormat);
                 lStatus = BAD_VALUE;
                 goto Exit;
@@ -1324,8 +1382,13 @@
             track = TimedTrack::create(this, client, streamType, sampleRate, format,
                     channelMask, frameCount, sharedBuffer, sessionId, uid);
         }
-        if (track == 0 || track->getCblk() == NULL || track->name() < 0) {
-            lStatus = NO_MEMORY;
+
+        // new Track always returns non-NULL,
+        // but TimedTrack::create() is a factory that could fail by returning NULL
+        lStatus = track != 0 ? track->initCheck() : (status_t) NO_MEMORY;
+        if (lStatus != NO_ERROR) {
+            ALOGE("createTrack_l() initCheck failed %d; no control block?", lStatus);
+            // track must be cleared from the caller as the caller has the AF lock
             goto Exit;
         }
 
@@ -1350,9 +1413,7 @@
     lStatus = NO_ERROR;
 
 Exit:
-    if (status) {
-        *status = lStatus;
-    }
+    *status = lStatus;
     return track;
 }
 
@@ -1471,9 +1532,7 @@
         status = NO_ERROR;
     }
 
-    ALOGV("signal playback thread");
-    broadcast_l();
-
+    onAddNewTrack_l();
     return status;
 }
 
@@ -1599,7 +1658,7 @@
 
 // static
 int AudioFlinger::PlaybackThread::asyncCallback(stream_callback_event_t event,
-                                                void *param,
+                                                void *param __unused,
                                                 void *cookie)
 {
     AudioFlinger::PlaybackThread *me = (AudioFlinger::PlaybackThread *)cookie;
@@ -1633,14 +1692,15 @@
     mChannelCount = popcount(mChannelMask);
     mFormat = mOutput->stream->common.get_format(&mOutput->stream->common);
     if (!audio_is_valid_format(mFormat)) {
-        LOG_FATAL("HAL format %d not valid for output", mFormat);
+        LOG_FATAL("HAL format %#x not valid for output", mFormat);
     }
     if ((mType == MIXER || mType == DUPLICATING) && mFormat != AUDIO_FORMAT_PCM_16_BIT) {
-        LOG_FATAL("HAL format %d not supported for mixed output; must be AUDIO_FORMAT_PCM_16_BIT",
+        LOG_FATAL("HAL format %#x not supported for mixed output; must be AUDIO_FORMAT_PCM_16_BIT",
                 mFormat);
     }
     mFrameSize = audio_stream_frame_size(&mOutput->stream->common);
-    mFrameCount = mOutput->stream->common.get_buffer_size(&mOutput->stream->common) / mFrameSize;
+    mBufferSize = mOutput->stream->common.get_buffer_size(&mOutput->stream->common);
+    mFrameCount = mBufferSize / mFrameSize;
     if (mFrameCount & 15) {
         ALOGW("HAL output buffer size is %u frames but AudioMixer requires multiples of 16 frames",
                 mFrameCount);
@@ -1697,11 +1757,11 @@
     ALOGI("HAL output buffer size %u frames, normal mix buffer size %u frames", mFrameCount,
             mNormalFrameCount);
 
-    delete[] mAllocMixBuffer;
-    size_t align = (mFrameSize < sizeof(int16_t)) ? sizeof(int16_t) : mFrameSize;
-    mAllocMixBuffer = new int8_t[mNormalFrameCount * mFrameSize + align - 1];
-    mMixBuffer = (int16_t *) ((((size_t)mAllocMixBuffer + align - 1) / align) * align);
-    memset(mMixBuffer, 0, mNormalFrameCount * mFrameSize);
+    delete[] mMixBuffer;
+    size_t normalBufferSize = mNormalFrameCount * mFrameSize;
+    // For historical reasons mMixBuffer is int16_t[], but mFrameSize can be odd (such as 1)
+    mMixBuffer = new int16_t[(normalBufferSize + 1) >> 1];
+    memset(mMixBuffer, 0, normalBufferSize);
 
     // force reconfiguration of effect chains and engines to take new buffer size and audio
     // parameters into account
@@ -1839,7 +1899,7 @@
         const Vector< sp<Track> >& tracksToRemove)
 {
     size_t count = tracksToRemove.size();
-    if (count) {
+    if (count > 0) {
         for (size_t i = 0 ; i < count ; i++) {
             const sp<Track>& track = tracksToRemove.itemAt(i);
             if (!track->isOutputTrack()) {
@@ -1915,7 +1975,7 @@
     // otherwise use the HAL / AudioStreamOut directly
     } else {
         // Direct output and offload threads
-        size_t offset = (mCurrentWriteLength - mBytesRemaining) / sizeof(int16_t);
+        size_t offset = (mCurrentWriteLength - mBytesRemaining);
         if (mUseAsyncWrite) {
             ALOGW_IF(mWriteAckSequence & 1, "threadLoop_write(): out of sequence write request");
             mWriteAckSequence += 2;
@@ -1926,7 +1986,7 @@
         // FIXME We should have an implementation of timestamps for direct output threads.
         // They are used e.g for multichannel PCM playback over HDMI.
         bytesWritten = mOutput->stream->write(mOutput->stream,
-                                                   mMixBuffer + offset, mBytesRemaining);
+                                                   (char *)mMixBuffer + offset, mBytesRemaining);
         if (mUseAsyncWrite &&
                 ((bytesWritten < 0) || (bytesWritten == (ssize_t)mBytesRemaining))) {
             // do not wait for async callback in case of error of full write
@@ -2346,20 +2406,20 @@
                         (mMixerStatus == MIXER_DRAIN_ALL)) {
                     threadLoop_drain();
                 }
-if (mType == MIXER) {
-                // write blocked detection
-                nsecs_t now = systemTime();
-                nsecs_t delta = now - mLastWriteTime;
-                if (!mStandby && delta > maxPeriod) {
-                    mNumDelayedWrites++;
-                    if ((now - lastWarning) > kWarningThrottleNs) {
-                        ATRACE_NAME("underrun");
-                        ALOGW("write blocked for %llu msecs, %d delayed writes, thread %p",
-                                ns2ms(delta), mNumDelayedWrites, this);
-                        lastWarning = now;
+                if (mType == MIXER) {
+                    // write blocked detection
+                    nsecs_t now = systemTime();
+                    nsecs_t delta = now - mLastWriteTime;
+                    if (!mStandby && delta > maxPeriod) {
+                        mNumDelayedWrites++;
+                        if ((now - lastWarning) > kWarningThrottleNs) {
+                            ATRACE_NAME("underrun");
+                            ALOGW("write blocked for %llu msecs, %d delayed writes, thread %p",
+                                    ns2ms(delta), mNumDelayedWrites, this);
+                            lastWarning = now;
+                        }
                     }
                 }
-}
 
             } else {
                 usleep(sleepTime);
@@ -2407,7 +2467,7 @@
 void AudioFlinger::PlaybackThread::removeTracks_l(const Vector< sp<Track> >& tracksToRemove)
 {
     size_t count = tracksToRemove.size();
-    if (count) {
+    if (count > 0) {
         for (size_t i=0 ; i<count ; i++) {
             const sp<Track>& track = tracksToRemove.itemAt(i);
             mActiveTracks.remove(track);
@@ -2711,12 +2771,6 @@
     PlaybackThread::threadLoop_standby();
 }
 
-// Empty implementation for standard mixer
-// Overridden for offloaded playback
-void AudioFlinger::PlaybackThread::flushOutput_l()
-{
-}
-
 bool AudioFlinger::PlaybackThread::waitingAsyncCallback_l()
 {
     return false;
@@ -2748,6 +2802,12 @@
     }
 }
 
+void AudioFlinger::PlaybackThread::onAddNewTrack_l()
+{
+    ALOGV("signal playback thread");
+    broadcast_l();
+}
+
 void AudioFlinger::MixerThread::threadLoop_mix()
 {
     // obtain the presentation timestamp of the next output buffer
@@ -2800,7 +2860,7 @@
             sleepTime = idleSleepTime;
         }
     } else if (mBytesWritten != 0 || (mMixerStatus == MIXER_TRACKS_ENABLED)) {
-        memset (mMixBuffer, 0, mixBufferSize);
+        memset(mMixBuffer, 0, mixBufferSize);
         sleepTime = 0;
         ALOGV_IF(mBytesWritten == 0 && (mMixerStatus == MIXER_TRACKS_ENABLED),
                 "anticipated start");
@@ -3025,12 +3085,14 @@
             // +1 for rounding and +1 for additional sample needed for interpolation
             desiredFrames = (mNormalFrameCount * sr) / mSampleRate + 1 + 1;
             // add frames already consumed but not yet released by the resampler
-            // because cblk->framesReady() will include these frames
+            // because mAudioTrackServerProxy->framesReady() will include these frames
             desiredFrames += mAudioMixer->getUnreleasedFrames(track->name());
+#if 0
             // the minimum track buffer size is normally twice the number of frames necessary
             // to fill one buffer and the resampler should not leave more than one buffer worth
             // of unreleased frames after each pass, but just in case...
             ALOG_ASSERT(desiredFrames <= cblk->frameCount_);
+#endif
         }
         uint32_t minFrames = 1;
         if ((track->sharedBuffer() == 0) && !track->isStopped() && !track->isPausing() &&
@@ -3356,6 +3418,7 @@
             if ((audio_format_t) value != AUDIO_FORMAT_PCM_16_BIT) {
                 status = BAD_VALUE;
             } else {
+                // no need to save value, since it's constant
                 reconfig = true;
             }
         }
@@ -3363,6 +3426,7 @@
             if ((audio_channel_mask_t) value != AUDIO_CHANNEL_OUT_STEREO) {
                 status = BAD_VALUE;
             } else {
+                // no need to save value, since it's constant
                 reconfig = true;
             }
         }
@@ -3466,9 +3530,7 @@
 
     PlaybackThread::dumpInternals(fd, args);
 
-    snprintf(buffer, SIZE, "AudioMixer tracks: %08x\n", mAudioMixer->trackNames());
-    result.append(buffer);
-    write(fd, result.string(), result.size());
+    fdprintf(fd, "  AudioMixer tracks: 0x%08x\n", mAudioMixer->trackNames());
 
     // Make a non-atomic copy of fast mixer dump state so it won't change underneath us
     const FastMixerDumpState copy(mFastMixerDumpState);
@@ -3722,14 +3784,14 @@
 }
 
 // getTrackName_l() must be called with ThreadBase::mLock held
-int AudioFlinger::DirectOutputThread::getTrackName_l(audio_channel_mask_t channelMask,
-        int sessionId)
+int AudioFlinger::DirectOutputThread::getTrackName_l(audio_channel_mask_t channelMask __unused,
+        int sessionId __unused)
 {
     return 0;
 }
 
 // deleteTrackName_l() must be called with ThreadBase::mLock held
-void AudioFlinger::DirectOutputThread::deleteTrackName_l(int name)
+void AudioFlinger::DirectOutputThread::deleteTrackName_l(int name __unused)
 {
 }
 
@@ -3982,6 +4044,17 @@
         sp<Track> l = mLatestActiveTrack.promote();
         bool last = l.get() == track;
 
+        if (track->isInvalid()) {
+            ALOGW("An invalidated track shouldn't be in active list");
+            tracksToRemove->add(track);
+            continue;
+        }
+
+        if (track->mState == TrackBase::IDLE) {
+            ALOGW("An idle track shouldn't be in active list");
+            continue;
+        }
+
         if (track->isPausing()) {
             track->setPaused();
             if (last) {
@@ -4000,6 +4073,11 @@
                 mBytesRemaining = 0;    // stop writing
             }
             tracksToRemove->add(track);
+        } else if (track->isFlushPending()) {
+            track->flushAck();
+            if (last) {
+                mFlushPending = true;
+            }
         } else if (track->framesReady() && track->isReady() &&
                 !track->isPaused() && !track->isTerminated() && !track->isStopping_2()) {
             ALOGVV("OffloadThread: track %d s=%08x [OK]", track->name(), cblk->mServer);
@@ -4049,7 +4127,6 @@
                         // seek when resuming.
                         if (previousTrack->sessionId() != track->sessionId()) {
                             previousTrack->invalidate();
-                            mFlushPending = true;
                         }
                     }
                 }
@@ -4125,9 +4202,6 @@
     // if resume is received before pause is executed.
     if (!mStandby && (doHwPause || (mFlushPending && !mHwPaused && (count != 0)))) {
         mOutput->stream->pause(mOutput->stream);
-        if (!doHwPause) {
-            doHwResume = true;
-        }
     }
     if (mFlushPending) {
         flushHw_l();
@@ -4143,11 +4217,6 @@
     return mixerStatus;
 }
 
-void AudioFlinger::OffloadThread::flushOutput_l()
-{
-    mFlushPending = true;
-}
-
 // must be called with thread mutex locked
 bool AudioFlinger::OffloadThread::waitingAsyncCallback_l()
 {
@@ -4162,15 +4231,15 @@
 // must be called with thread mutex locked
 bool AudioFlinger::OffloadThread::shouldStandby_l()
 {
-    bool TrackPaused = false;
+    bool trackPaused = false;
 
     // do not put the HAL in standby when paused. AwesomePlayer clear the offloaded AudioTrack
     // after a timeout and we will enter standby then.
     if (mTracks.size() > 0) {
-        TrackPaused = mTracks[mTracks.size() - 1]->isPaused();
+        trackPaused = mTracks[mTracks.size() - 1]->isPaused();
     }
 
-    return !mStandby && !TrackPaused;
+    return !mStandby && !trackPaused;
 }
 
 
@@ -4188,6 +4257,8 @@
     mBytesRemaining = 0;
     mPausedWriteLength = 0;
     mPausedBytesRemaining = 0;
+    mHwPaused = false;
+
     if (mUseAsyncWrite) {
         // discard any pending drain or write ack by incrementing sequence
         mWriteAckSequence = (mWriteAckSequence + 2) & ~1;
@@ -4198,6 +4269,18 @@
     }
 }
 
+void AudioFlinger::OffloadThread::onAddNewTrack_l()
+{
+    sp<Track> previousTrack = mPreviousTrack.promote();
+    sp<Track> latestTrack = mLatestActiveTrack.promote();
+
+    if (previousTrack != 0 && latestTrack != 0 &&
+        (previousTrack->sessionId() != latestTrack->sessionId())) {
+        mFlushPending = true;
+    }
+    PlaybackThread::onAddNewTrack_l();
+}
+
 // ----------------------------------------------------------------------------
 
 AudioFlinger::DuplicatingThread::DuplicatingThread(const sp<AudioFlinger>& audioFlinger,
@@ -4377,8 +4460,10 @@
 #endif
                                          ) :
     ThreadBase(audioFlinger, id, outDevice, inDevice, RECORD),
-    mInput(input), mResampler(NULL), mRsmpOutBuffer(NULL), mRsmpInBuffer(NULL),
-    // mRsmpInIndex and mBufferSize set by readInputParameters()
+    mInput(input), mActiveTracksGen(0), mResampler(NULL), mRsmpOutBuffer(NULL), mRsmpInBuffer(NULL),
+    // mRsmpInFrames, mRsmpInFramesP2, mRsmpInUnrel, mRsmpInFront, and mRsmpInRear
+    //      are set by readInputParameters()
+    // mRsmpInIndex LEGACY
     mReqChannelCount(popcount(channelMask)),
     mReqSampleRate(sampleRate)
     // mBytesRead is only meaningful while active, and so is cleared in start()
@@ -4388,6 +4473,7 @@
 #endif
 {
     snprintf(mName, kNameLength, "AudioIn_%X", id);
+    mNBLogWriter = audioFlinger->newWriter_l(kLogSize, mName);
 
     readInputParameters();
 }
@@ -4395,6 +4481,7 @@
 
 AudioFlinger::RecordThread::~RecordThread()
 {
+    mAudioFlinger->unregisterWriter(mNBLogWriter);
     delete[] mRsmpInBuffer;
     delete mResampler;
     delete[] mRsmpOutBuffer;
@@ -4405,230 +4492,323 @@
     run(mName, PRIORITY_URGENT_AUDIO);
 }
 
-status_t AudioFlinger::RecordThread::readyToRun()
-{
-    status_t status = initCheck();
-    ALOGW_IF(status != NO_ERROR,"RecordThread %p could not initialize", this);
-    return status;
-}
-
 bool AudioFlinger::RecordThread::threadLoop()
 {
-    AudioBufferProvider::Buffer buffer;
-    sp<RecordTrack> activeTrack;
-    Vector< sp<EffectChain> > effectChains;
-
     nsecs_t lastWarning = 0;
 
     inputStandBy();
-    {
-        Mutex::Autolock _l(mLock);
-        activeTrack = mActiveTrack;
-        acquireWakeLock_l(activeTrack != 0 ? activeTrack->uid() : -1);
-    }
 
     // used to verify we've read at least once before evaluating how many bytes were read
     bool readOnce = false;
 
-    // start recording
-    while (!exitPending()) {
+    // used to request a deferred sleep, to be executed later while mutex is unlocked
+    bool doSleep = false;
 
-        processConfigEvents();
+reacquire_wakelock:
+    sp<RecordTrack> activeTrack;
+    int activeTracksGen;
+    {
+        Mutex::Autolock _l(mLock);
+        size_t size = mActiveTracks.size();
+        activeTracksGen = mActiveTracksGen;
+        if (size > 0) {
+            // FIXME an arbitrary choice
+            activeTrack = mActiveTracks[0];
+            acquireWakeLock_l(activeTrack->uid());
+            if (size > 1) {
+                SortedVector<int> tmp;
+                for (size_t i = 0; i < size; i++) {
+                    tmp.add(mActiveTracks[i]->uid());
+                }
+                updateWakeLockUids_l(tmp);
+            }
+        } else {
+            acquireWakeLock_l(-1);
+        }
+    }
+
+    // start recording
+    for (;;) {
+        TrackBase::track_state activeTrackState;
+        Vector< sp<EffectChain> > effectChains;
+
+        // sleep with mutex unlocked
+        if (doSleep) {
+            doSleep = false;
+            usleep(kRecordThreadSleepUs);
+        }
 
         { // scope for mLock
             Mutex::Autolock _l(mLock);
-            checkForNewParameters_l();
-            if (mActiveTrack != 0 && activeTrack != mActiveTrack) {
-                SortedVector<int> tmp;
-                tmp.add(mActiveTrack->uid());
-                updateWakeLockUids_l(tmp);
+
+            processConfigEvents_l();
+            // return value 'reconfig' is currently unused
+            bool reconfig = checkForNewParameters_l();
+
+            // check exitPending here because checkForNewParameters_l() and
+            // checkForNewParameters_l() can temporarily release mLock
+            if (exitPending()) {
+                break;
             }
-            activeTrack = mActiveTrack;
-            if (mActiveTrack == 0 && mConfigEvents.isEmpty()) {
-                standby();
 
-                if (exitPending()) {
-                    break;
-                }
-
+            // if no active track(s), then standby and release wakelock
+            size_t size = mActiveTracks.size();
+            if (size == 0) {
+                standbyIfNotAlreadyInStandby();
+                // exitPending() can't become true here
                 releaseWakeLock_l();
                 ALOGV("RecordThread: loop stopping");
                 // go to sleep
                 mWaitWorkCV.wait(mLock);
                 ALOGV("RecordThread: loop starting");
-                acquireWakeLock_l(mActiveTrack != 0 ? mActiveTrack->uid() : -1);
+                goto reacquire_wakelock;
+            }
+
+            if (mActiveTracksGen != activeTracksGen) {
+                activeTracksGen = mActiveTracksGen;
+                SortedVector<int> tmp;
+                for (size_t i = 0; i < size; i++) {
+                    tmp.add(mActiveTracks[i]->uid());
+                }
+                updateWakeLockUids_l(tmp);
+                // FIXME an arbitrary choice
+                activeTrack = mActiveTracks[0];
+            }
+
+            if (activeTrack->isTerminated()) {
+                removeTrack_l(activeTrack);
+                mActiveTracks.remove(activeTrack);
+                mActiveTracksGen++;
                 continue;
             }
-            if (mActiveTrack != 0) {
-                if (mActiveTrack->isTerminated()) {
-                    removeTrack_l(mActiveTrack);
-                    mActiveTrack.clear();
-                } else if (mActiveTrack->mState == TrackBase::PAUSING) {
-                    standby();
-                    mActiveTrack.clear();
+
+            activeTrackState = activeTrack->mState;
+            switch (activeTrackState) {
+            case TrackBase::PAUSING:
+                standbyIfNotAlreadyInStandby();
+                mActiveTracks.remove(activeTrack);
+                mActiveTracksGen++;
+                mStartStopCond.broadcast();
+                doSleep = true;
+                continue;
+
+            case TrackBase::RESUMING:
+                mStandby = false;
+                if (mReqChannelCount != activeTrack->channelCount()) {
+                    mActiveTracks.remove(activeTrack);
+                    mActiveTracksGen++;
                     mStartStopCond.broadcast();
-                } else if (mActiveTrack->mState == TrackBase::RESUMING) {
-                    if (mReqChannelCount != mActiveTrack->channelCount()) {
-                        mActiveTrack.clear();
-                        mStartStopCond.broadcast();
-                    } else if (readOnce) {
-                        // record start succeeds only if first read from audio input
-                        // succeeds
-                        if (mBytesRead >= 0) {
-                            mActiveTrack->mState = TrackBase::ACTIVE;
-                        } else {
-                            mActiveTrack.clear();
-                        }
-                        mStartStopCond.broadcast();
-                    }
-                    mStandby = false;
+                    continue;
                 }
+                if (readOnce) {
+                    mStartStopCond.broadcast();
+                    // record start succeeds only if first read from audio input succeeds
+                    if (mBytesRead < 0) {
+                        mActiveTracks.remove(activeTrack);
+                        mActiveTracksGen++;
+                        continue;
+                    }
+                    activeTrack->mState = TrackBase::ACTIVE;
+                }
+                break;
+
+            case TrackBase::ACTIVE:
+                break;
+
+            case TrackBase::IDLE:
+                doSleep = true;
+                continue;
+
+            default:
+                LOG_FATAL("Unexpected activeTrackState %d", activeTrackState);
             }
 
             lockEffectChains_l(effectChains);
         }
 
-        if (mActiveTrack != 0) {
-            if (mActiveTrack->mState != TrackBase::ACTIVE &&
-                mActiveTrack->mState != TrackBase::RESUMING) {
-                unlockEffectChains(effectChains);
-                usleep(kRecordThreadSleepUs);
-                continue;
-            }
-            for (size_t i = 0; i < effectChains.size(); i ++) {
-                effectChains[i]->process_l();
-            }
+        // thread mutex is now unlocked, mActiveTracks unknown, activeTrack != 0, kept, immutable
+        // activeTrack->mState unknown, activeTrackState immutable and is ACTIVE or RESUMING
 
-            buffer.frameCount = mFrameCount;
-            status_t status = mActiveTrack->getNextBuffer(&buffer);
-            if (status == NO_ERROR) {
-                readOnce = true;
-                size_t framesOut = buffer.frameCount;
-                if (mResampler == NULL) {
-                    // no resampling
-                    while (framesOut) {
-                        size_t framesIn = mFrameCount - mRsmpInIndex;
-                        if (framesIn) {
-                            int8_t *src = (int8_t *)mRsmpInBuffer + mRsmpInIndex * mFrameSize;
-                            int8_t *dst = buffer.i8 + (buffer.frameCount - framesOut) *
-                                    mActiveTrack->mFrameSize;
-                            if (framesIn > framesOut)
-                                framesIn = framesOut;
-                            mRsmpInIndex += framesIn;
-                            framesOut -= framesIn;
-                            if (mChannelCount == mReqChannelCount) {
-                                memcpy(dst, src, framesIn * mFrameSize);
-                            } else {
-                                if (mChannelCount == 1) {
-                                    upmix_to_stereo_i16_from_mono_i16((int16_t *)dst,
-                                            (int16_t *)src, framesIn);
-                                } else {
-                                    downmix_to_mono_i16_from_stereo_i16((int16_t *)dst,
-                                            (int16_t *)src, framesIn);
-                                }
-                            }
-                        }
-                        if (framesOut && mFrameCount == mRsmpInIndex) {
-                            void *readInto;
-                            if (framesOut == mFrameCount && mChannelCount == mReqChannelCount) {
-                                readInto = buffer.raw;
-                                framesOut = 0;
-                            } else {
-                                readInto = mRsmpInBuffer;
-                                mRsmpInIndex = 0;
-                            }
-                            mBytesRead = mInput->stream->read(mInput->stream, readInto,
-                                    mBufferSize);
-                            if (mBytesRead <= 0) {
-                                if ((mBytesRead < 0) && (mActiveTrack->mState == TrackBase::ACTIVE))
-                                {
-                                    ALOGE("Error reading audio input");
-                                    // Force input into standby so that it tries to
-                                    // recover at next read attempt
-                                    inputStandBy();
-                                    usleep(kRecordThreadSleepUs);
-                                }
-                                mRsmpInIndex = mFrameCount;
-                                framesOut = 0;
-                                buffer.frameCount = 0;
-                            }
-#ifdef TEE_SINK
-                            else if (mTeeSink != 0) {
-                                (void) mTeeSink->write(readInto,
-                                        mBytesRead >> Format_frameBitShift(mTeeSink->format()));
-                            }
-#endif
-                        }
-                    }
-                } else {
-                    // resampling
-
-                    // resampler accumulates, but we only have one source track
-                    memset(mRsmpOutBuffer, 0, framesOut * FCC_2 * sizeof(int32_t));
-                    // alter output frame count as if we were expecting stereo samples
-                    if (mChannelCount == 1 && mReqChannelCount == 1) {
-                        framesOut >>= 1;
-                    }
-                    mResampler->resample(mRsmpOutBuffer, framesOut,
-                            this /* AudioBufferProvider* */);
-                    // ditherAndClamp() works as long as all buffers returned by
-                    // mActiveTrack->getNextBuffer() are 32 bit aligned which should be always true.
-                    if (mChannelCount == 2 && mReqChannelCount == 1) {
-                        // temporarily type pun mRsmpOutBuffer from Q19.12 to int16_t
-                        ditherAndClamp(mRsmpOutBuffer, mRsmpOutBuffer, framesOut);
-                        // the resampler always outputs stereo samples:
-                        // do post stereo to mono conversion
-                        downmix_to_mono_i16_from_stereo_i16(buffer.i16, (int16_t *)mRsmpOutBuffer,
-                                framesOut);
-                    } else {
-                        ditherAndClamp((int32_t *)buffer.raw, mRsmpOutBuffer, framesOut);
-                    }
-                    // now done with mRsmpOutBuffer
-
-                }
-                if (mFramestoDrop == 0) {
-                    mActiveTrack->releaseBuffer(&buffer);
-                } else {
-                    if (mFramestoDrop > 0) {
-                        mFramestoDrop -= buffer.frameCount;
-                        if (mFramestoDrop <= 0) {
-                            clearSyncStartEvent();
-                        }
-                    } else {
-                        mFramestoDrop += buffer.frameCount;
-                        if (mFramestoDrop >= 0 || mSyncStartEvent == 0 ||
-                                mSyncStartEvent->isCancelled()) {
-                            ALOGW("Synced record %s, session %d, trigger session %d",
-                                  (mFramestoDrop >= 0) ? "timed out" : "cancelled",
-                                  mActiveTrack->sessionId(),
-                                  (mSyncStartEvent != 0) ? mSyncStartEvent->triggerSession() : 0);
-                            clearSyncStartEvent();
-                        }
-                    }
-                }
-                mActiveTrack->clearOverflow();
-            }
-            // client isn't retrieving buffers fast enough
-            else {
-                if (!mActiveTrack->setOverflow()) {
-                    nsecs_t now = systemTime();
-                    if ((now - lastWarning) > kWarningThrottleNs) {
-                        ALOGW("RecordThread: buffer overflow");
-                        lastWarning = now;
-                    }
-                }
-                // Release the processor for a while before asking for a new buffer.
-                // This will give the application more chance to read from the buffer and
-                // clear the overflow.
-                usleep(kRecordThreadSleepUs);
-            }
+        for (size_t i = 0; i < effectChains.size(); i ++) {
+            // thread mutex is not locked, but effect chain is locked
+            effectChains[i]->process_l();
         }
+
+        AudioBufferProvider::Buffer buffer;
+        buffer.frameCount = mFrameCount;
+        status_t status = activeTrack->getNextBuffer(&buffer);
+        if (status == NO_ERROR) {
+            readOnce = true;
+            size_t framesOut = buffer.frameCount;
+            if (mResampler == NULL) {
+                // no resampling
+                while (framesOut) {
+                    size_t framesIn = mFrameCount - mRsmpInIndex;
+                    if (framesIn > 0) {
+                        int8_t *src = (int8_t *)mRsmpInBuffer + mRsmpInIndex * mFrameSize;
+                        int8_t *dst = buffer.i8 + (buffer.frameCount - framesOut) *
+                                activeTrack->mFrameSize;
+                        if (framesIn > framesOut) {
+                            framesIn = framesOut;
+                        }
+                        mRsmpInIndex += framesIn;
+                        framesOut -= framesIn;
+                        if (mChannelCount == mReqChannelCount) {
+                            memcpy(dst, src, framesIn * mFrameSize);
+                        } else {
+                            if (mChannelCount == 1) {
+                                upmix_to_stereo_i16_from_mono_i16((int16_t *)dst,
+                                        (int16_t *)src, framesIn);
+                            } else {
+                                downmix_to_mono_i16_from_stereo_i16((int16_t *)dst,
+                                        (int16_t *)src, framesIn);
+                            }
+                        }
+                    }
+                    if (framesOut > 0 && mFrameCount == mRsmpInIndex) {
+                        void *readInto;
+                        if (framesOut == mFrameCount && mChannelCount == mReqChannelCount) {
+                            readInto = buffer.raw;
+                            framesOut = 0;
+                        } else {
+                            readInto = mRsmpInBuffer;
+                            mRsmpInIndex = 0;
+                        }
+                        mBytesRead = mInput->stream->read(mInput->stream, readInto, mBufferSize);
+                        if (mBytesRead <= 0) {
+                            // TODO: verify that it's benign to use a stale track state
+                            if ((mBytesRead < 0) && (activeTrackState == TrackBase::ACTIVE))
+                            {
+                                ALOGE("Error reading audio input");
+                                // Force input into standby so that it tries to
+                                // recover at next read attempt
+                                inputStandBy();
+                                doSleep = true;
+                            }
+                            mRsmpInIndex = mFrameCount;
+                            framesOut = 0;
+                            buffer.frameCount = 0;
+                        }
+#ifdef TEE_SINK
+                        else if (mTeeSink != 0) {
+                            (void) mTeeSink->write(readInto,
+                                    mBytesRead >> Format_frameBitShift(mTeeSink->format()));
+                        }
+#endif
+                    }
+                }
+            } else {
+                // resampling
+
+                // avoid busy-waiting if client doesn't keep up
+                bool madeProgress = false;
+
+                // keep mRsmpInBuffer full so resampler always has sufficient input
+                for (;;) {
+                    int32_t rear = mRsmpInRear;
+                    ssize_t filled = rear - mRsmpInFront;
+                    ALOG_ASSERT(0 <= filled && (size_t) filled <= mRsmpInFramesP2);
+                    // exit once there is enough data in buffer for resampler
+                    if ((size_t) filled >= mRsmpInFrames) {
+                        break;
+                    }
+                    size_t avail = mRsmpInFramesP2 - filled;
+                    // Only try to read full HAL buffers.
+                    // But if the HAL read returns a partial buffer, use it.
+                    if (avail < mFrameCount) {
+                        ALOGE("insufficient space to read: avail %d < mFrameCount %d",
+                                avail, mFrameCount);
+                        break;
+                    }
+                    // If 'avail' is non-contiguous, first read past the nominal end of buffer, then
+                    // copy to the right place.  Permitted because mRsmpInBuffer was over-allocated.
+                    rear &= mRsmpInFramesP2 - 1;
+                    mBytesRead = mInput->stream->read(mInput->stream,
+                            &mRsmpInBuffer[rear * mChannelCount], mBufferSize);
+                    if (mBytesRead <= 0) {
+                        ALOGE("read failed: mBytesRead=%d < %u", mBytesRead, mBufferSize);
+                        break;
+                    }
+                    ALOG_ASSERT((size_t) mBytesRead <= mBufferSize);
+                    size_t framesRead = mBytesRead / mFrameSize;
+                    ALOG_ASSERT(framesRead > 0);
+                    madeProgress = true;
+                    // If 'avail' was non-contiguous, we now correct for reading past end of buffer.
+                    size_t part1 = mRsmpInFramesP2 - rear;
+                    if (framesRead > part1) {
+                        memcpy(mRsmpInBuffer, &mRsmpInBuffer[mRsmpInFramesP2 * mChannelCount],
+                                (framesRead - part1) * mFrameSize);
+                    }
+                    mRsmpInRear += framesRead;
+                }
+
+                if (!madeProgress) {
+                    ALOGV("Did not make progress");
+                    usleep(((mFrameCount * 1000) / mSampleRate) * 1000);
+                }
+
+                // resampler accumulates, but we only have one source track
+                memset(mRsmpOutBuffer, 0, framesOut * FCC_2 * sizeof(int32_t));
+                mResampler->resample(mRsmpOutBuffer, framesOut,
+                        this /* AudioBufferProvider* */);
+                // ditherAndClamp() works as long as all buffers returned by
+                // activeTrack->getNextBuffer() are 32 bit aligned which should be always true.
+                if (mReqChannelCount == 1) {
+                    // temporarily type pun mRsmpOutBuffer from Q19.12 to int16_t
+                    ditherAndClamp(mRsmpOutBuffer, mRsmpOutBuffer, framesOut);
+                    // the resampler always outputs stereo samples:
+                    // do post stereo to mono conversion
+                    downmix_to_mono_i16_from_stereo_i16(buffer.i16, (int16_t *)mRsmpOutBuffer,
+                            framesOut);
+                } else {
+                    ditherAndClamp((int32_t *)buffer.raw, mRsmpOutBuffer, framesOut);
+                }
+                // now done with mRsmpOutBuffer
+
+            }
+            if (mFramestoDrop == 0) {
+                activeTrack->releaseBuffer(&buffer);
+            } else {
+                if (mFramestoDrop > 0) {
+                    mFramestoDrop -= buffer.frameCount;
+                    if (mFramestoDrop <= 0) {
+                        clearSyncStartEvent();
+                    }
+                } else {
+                    mFramestoDrop += buffer.frameCount;
+                    if (mFramestoDrop >= 0 || mSyncStartEvent == 0 ||
+                            mSyncStartEvent->isCancelled()) {
+                        ALOGW("Synced record %s, session %d, trigger session %d",
+                              (mFramestoDrop >= 0) ? "timed out" : "cancelled",
+                              activeTrack->sessionId(),
+                              (mSyncStartEvent != 0) ? mSyncStartEvent->triggerSession() : 0);
+                        clearSyncStartEvent();
+                    }
+                }
+            }
+            activeTrack->clearOverflow();
+        }
+        // client isn't retrieving buffers fast enough
+        else {
+            if (!activeTrack->setOverflow()) {
+                nsecs_t now = systemTime();
+                if ((now - lastWarning) > kWarningThrottleNs) {
+                    ALOGW("RecordThread: buffer overflow");
+                    lastWarning = now;
+                }
+            }
+            // Release the processor for a while before asking for a new buffer.
+            // This will give the application more chance to read from the buffer and
+            // clear the overflow.
+            doSleep = true;
+        }
+
         // enable changes in effect chain
         unlockEffectChains(effectChains);
-        effectChains.clear();
+        // effectChains doesn't need to be cleared, since it is cleared by destructor at scope end
     }
 
-    standby();
+    standbyIfNotAlreadyInStandby();
 
     {
         Mutex::Autolock _l(mLock);
@@ -4636,7 +4816,8 @@
             sp<RecordTrack> track = mTracks[i];
             track->invalidate();
         }
-        mActiveTrack.clear();
+        mActiveTracks.clear();
+        mActiveTracksGen++;
         mStartStopCond.broadcast();
     }
 
@@ -4646,7 +4827,7 @@
     return false;
 }
 
-void AudioFlinger::RecordThread::standby()
+void AudioFlinger::RecordThread::standbyIfNotAlreadyInStandby()
 {
     if (!mStandby) {
         inputStandBy();
@@ -4659,18 +4840,19 @@
     mInput->stream->common.standby(&mInput->stream->common);
 }
 
-sp<AudioFlinger::RecordThread::RecordTrack>  AudioFlinger::RecordThread::createRecordTrack_l(
+sp<AudioFlinger::RecordThread::RecordTrack> AudioFlinger::RecordThread::createRecordTrack_l(
         const sp<AudioFlinger::Client>& client,
         uint32_t sampleRate,
         audio_format_t format,
         audio_channel_mask_t channelMask,
-        size_t frameCount,
+        size_t *pFrameCount,
         int sessionId,
         int uid,
         IAudioFlinger::track_flags_t *flags,
         pid_t tid,
         status_t *status)
 {
+    size_t frameCount = *pFrameCount;
     sp<RecordTrack> track;
     status_t lStatus;
 
@@ -4679,6 +4861,7 @@
         ALOGE("createRecordTrack_l() audio driver not initialized");
         goto Exit;
     }
+
     // client expresses a preference for FAST, but we get the final say
     if (*flags & IAudioFlinger::TRACK_FAST) {
       if (
@@ -4729,6 +4912,7 @@
         }
       }
     }
+    *pFrameCount = frameCount;
 
     // FIXME use flags and tid similar to createTrack_l()
 
@@ -4738,10 +4922,10 @@
         track = new RecordTrack(this, client, sampleRate,
                       format, channelMask, frameCount, sessionId, uid);
 
-        if (track->getCblk() == 0) {
-            ALOGE("createRecordTrack_l() no control block");
-            lStatus = NO_MEMORY;
-            track.clear();
+        lStatus = track->initCheck();
+        if (lStatus != NO_ERROR) {
+            ALOGE("createRecordTrack_l() initCheck failed %d; no control block?", lStatus);
+            // track must be cleared from the caller as the caller has the AF lock
             goto Exit;
         }
         mTracks.add(track);
@@ -4762,9 +4946,7 @@
     lStatus = NO_ERROR;
 
 Exit:
-    if (status) {
-        *status = lStatus;
-    }
+    *status = lStatus;
     return track;
 }
 
@@ -4795,43 +4977,57 @@
     }
 
     {
+        // This section is a rendezvous between binder thread executing start() and RecordThread
         AutoMutex lock(mLock);
-        if (mActiveTrack != 0) {
-            if (recordTrack != mActiveTrack.get()) {
+        if (mActiveTracks.size() > 0) {
+            // FIXME does not work for multiple active tracks
+            if (mActiveTracks.indexOf(recordTrack) != 0) {
                 status = -EBUSY;
-            } else if (mActiveTrack->mState == TrackBase::PAUSING) {
-                mActiveTrack->mState = TrackBase::ACTIVE;
+            } else if (recordTrack->mState == TrackBase::PAUSING) {
+                recordTrack->mState = TrackBase::ACTIVE;
             }
             return status;
         }
 
+        // FIXME why? already set in constructor, 'STARTING_1' would be more accurate
         recordTrack->mState = TrackBase::IDLE;
-        mActiveTrack = recordTrack;
+        mActiveTracks.add(recordTrack);
+        mActiveTracksGen++;
         mLock.unlock();
         status_t status = AudioSystem::startInput(mId);
         mLock.lock();
+        // FIXME should verify that mActiveTrack is still == recordTrack
         if (status != NO_ERROR) {
-            mActiveTrack.clear();
+            mActiveTracks.remove(recordTrack);
+            mActiveTracksGen++;
             clearSyncStartEvent();
             return status;
         }
+        // FIXME LEGACY
         mRsmpInIndex = mFrameCount;
+        mRsmpInFront = 0;
+        mRsmpInRear = 0;
+        mRsmpInUnrel = 0;
         mBytesRead = 0;
         if (mResampler != NULL) {
             mResampler->reset();
         }
-        mActiveTrack->mState = TrackBase::RESUMING;
+        // FIXME hijacking a playback track state name which was intended for start after pause;
+        //       here 'STARTING_2' would be more accurate
+        recordTrack->mState = TrackBase::RESUMING;
         // signal thread to start
         ALOGV("Signal record thread");
         mWaitWorkCV.broadcast();
         // do not wait for mStartStopCond if exiting
         if (exitPending()) {
-            mActiveTrack.clear();
+            mActiveTracks.remove(recordTrack);
+            mActiveTracksGen++;
             status = INVALID_OPERATION;
             goto startError;
         }
+        // FIXME incorrect usage of wait: no explicit predicate or loop
         mStartStopCond.wait(mLock);
-        if (mActiveTrack == 0) {
+        if (mActiveTracks.indexOf(recordTrack) < 0) {
             ALOGV("Record failed to start");
             status = BAD_VALUE;
             goto startError;
@@ -4877,29 +5073,31 @@
 bool AudioFlinger::RecordThread::stop(RecordThread::RecordTrack* recordTrack) {
     ALOGV("RecordThread::stop");
     AutoMutex _l(mLock);
-    if (recordTrack != mActiveTrack.get() || recordTrack->mState == TrackBase::PAUSING) {
+    if (mActiveTracks.indexOf(recordTrack) != 0 || recordTrack->mState == TrackBase::PAUSING) {
         return false;
     }
+    // note that threadLoop may still be processing the track at this point [without lock]
     recordTrack->mState = TrackBase::PAUSING;
     // do not wait for mStartStopCond if exiting
     if (exitPending()) {
         return true;
     }
+    // FIXME incorrect usage of wait: no explicit predicate or loop
     mStartStopCond.wait(mLock);
-    // if we have been restarted, recordTrack == mActiveTrack.get() here
-    if (exitPending() || recordTrack != mActiveTrack.get()) {
+    // if we have been restarted, recordTrack is in mActiveTracks here
+    if (exitPending() || mActiveTracks.indexOf(recordTrack) != 0) {
         ALOGV("Record stopped OK");
         return true;
     }
     return false;
 }
 
-bool AudioFlinger::RecordThread::isValidSyncEvent(const sp<SyncEvent>& event) const
+bool AudioFlinger::RecordThread::isValidSyncEvent(const sp<SyncEvent>& event __unused) const
 {
     return false;
 }
 
-status_t AudioFlinger::RecordThread::setSyncEvent(const sp<SyncEvent>& event)
+status_t AudioFlinger::RecordThread::setSyncEvent(const sp<SyncEvent>& event __unused)
 {
 #if 0   // This branch is currently dead code, but is preserved in case it will be needed in future
     if (!isValidSyncEvent(event)) {
@@ -4930,7 +5128,7 @@
     track->terminate();
     track->mState = TrackBase::STOPPED;
     // active tracks are removed by threadLoop()
-    if (mActiveTrack != track) {
+    if (mActiveTracks.indexOf(track) < 0) {
         removeTrack_l(track);
     }
 }
@@ -4950,104 +5148,110 @@
 
 void AudioFlinger::RecordThread::dumpInternals(int fd, const Vector<String16>& args)
 {
-    const size_t SIZE = 256;
-    char buffer[SIZE];
-    String8 result;
+    fdprintf(fd, "\nInput thread %p:\n", this);
 
-    snprintf(buffer, SIZE, "\nInput thread %p internals\n", this);
-    result.append(buffer);
-
-    if (mActiveTrack != 0) {
-        snprintf(buffer, SIZE, "In index: %zu\n", mRsmpInIndex);
-        result.append(buffer);
-        snprintf(buffer, SIZE, "Buffer size: %zu bytes\n", mBufferSize);
-        result.append(buffer);
-        snprintf(buffer, SIZE, "Resampling: %d\n", (mResampler != NULL));
-        result.append(buffer);
-        snprintf(buffer, SIZE, "Out channel count: %u\n", mReqChannelCount);
-        result.append(buffer);
-        snprintf(buffer, SIZE, "Out sample rate: %u\n", mReqSampleRate);
-        result.append(buffer);
+    if (mActiveTracks.size() > 0) {
+        fdprintf(fd, "  In index: %zu\n", mRsmpInIndex);
+        fdprintf(fd, "  Buffer size: %zu bytes\n", mBufferSize);
+        fdprintf(fd, "  Resampling: %d\n", (mResampler != NULL));
+        fdprintf(fd, "  Out channel count: %u\n", mReqChannelCount);
+        fdprintf(fd, "  Out sample rate: %u\n", mReqSampleRate);
     } else {
-        result.append("No active record client\n");
+        fdprintf(fd, "  No active record client\n");
     }
 
-    write(fd, result.string(), result.size());
-
     dumpBase(fd, args);
 }
 
-void AudioFlinger::RecordThread::dumpTracks(int fd, const Vector<String16>& args)
+void AudioFlinger::RecordThread::dumpTracks(int fd, const Vector<String16>& args __unused)
 {
     const size_t SIZE = 256;
     char buffer[SIZE];
     String8 result;
 
-    snprintf(buffer, SIZE, "Input thread %p tracks\n", this);
-    result.append(buffer);
-    RecordTrack::appendDumpHeader(result);
-    for (size_t i = 0; i < mTracks.size(); ++i) {
-        sp<RecordTrack> track = mTracks[i];
-        if (track != 0) {
-            track->dump(buffer, SIZE);
-            result.append(buffer);
+    size_t numtracks = mTracks.size();
+    size_t numactive = mActiveTracks.size();
+    size_t numactiveseen = 0;
+    fdprintf(fd, "  %d Tracks", numtracks);
+    if (numtracks) {
+        fdprintf(fd, " of which %d are active\n", numactive);
+        RecordTrack::appendDumpHeader(result);
+        for (size_t i = 0; i < numtracks ; ++i) {
+            sp<RecordTrack> track = mTracks[i];
+            if (track != 0) {
+                bool active = mActiveTracks.indexOf(track) >= 0;
+                if (active) {
+                    numactiveseen++;
+                }
+                track->dump(buffer, SIZE, active);
+                result.append(buffer);
+            }
         }
+    } else {
+        fdprintf(fd, "\n");
     }
 
-    if (mActiveTrack != 0) {
-        snprintf(buffer, SIZE, "\nInput thread %p active tracks\n", this);
+    if (numactiveseen != numactive) {
+        snprintf(buffer, SIZE, "  The following tracks are in the active list but"
+                " not in the track list\n");
         result.append(buffer);
         RecordTrack::appendDumpHeader(result);
-        mActiveTrack->dump(buffer, SIZE);
-        result.append(buffer);
+        for (size_t i = 0; i < numactive; ++i) {
+            sp<RecordTrack> track = mActiveTracks[i];
+            if (mTracks.indexOf(track) < 0) {
+                track->dump(buffer, SIZE, true);
+                result.append(buffer);
+            }
+        }
 
     }
     write(fd, result.string(), result.size());
 }
 
 // AudioBufferProvider interface
-status_t AudioFlinger::RecordThread::getNextBuffer(AudioBufferProvider::Buffer* buffer, int64_t pts)
+status_t AudioFlinger::RecordThread::getNextBuffer(AudioBufferProvider::Buffer* buffer, int64_t pts __unused)
 {
-    size_t framesReq = buffer->frameCount;
-    size_t framesReady = mFrameCount - mRsmpInIndex;
-    int channelCount;
-
-    if (framesReady == 0) {
-        mBytesRead = mInput->stream->read(mInput->stream, mRsmpInBuffer, mBufferSize);
-        if (mBytesRead <= 0) {
-            if ((mBytesRead < 0) && (mActiveTrack->mState == TrackBase::ACTIVE)) {
-                ALOGE("RecordThread::getNextBuffer() Error reading audio input");
-                // Force input into standby so that it tries to
-                // recover at next read attempt
-                inputStandBy();
-                usleep(kRecordThreadSleepUs);
-            }
-            buffer->raw = NULL;
-            buffer->frameCount = 0;
-            return NOT_ENOUGH_DATA;
-        }
-        mRsmpInIndex = 0;
-        framesReady = mFrameCount;
+    int32_t rear = mRsmpInRear;
+    int32_t front = mRsmpInFront;
+    ssize_t filled = rear - front;
+    ALOG_ASSERT(0 <= filled && (size_t) filled <= mRsmpInFramesP2);
+    // 'filled' may be non-contiguous, so return only the first contiguous chunk
+    front &= mRsmpInFramesP2 - 1;
+    size_t part1 = mRsmpInFramesP2 - front;
+    if (part1 > (size_t) filled) {
+        part1 = filled;
+    }
+    size_t ask = buffer->frameCount;
+    ALOG_ASSERT(ask > 0);
+    if (part1 > ask) {
+        part1 = ask;
+    }
+    if (part1 == 0) {
+        // Higher-level should keep mRsmpInBuffer full, and not call resampler if empty
+        ALOGE("RecordThread::getNextBuffer() starved");
+        buffer->raw = NULL;
+        buffer->frameCount = 0;
+        mRsmpInUnrel = 0;
+        return NOT_ENOUGH_DATA;
     }
 
-    if (framesReq > framesReady) {
-        framesReq = framesReady;
-    }
-
-    if (mChannelCount == 1 && mReqChannelCount == 2) {
-        channelCount = 1;
-    } else {
-        channelCount = 2;
-    }
-    buffer->raw = mRsmpInBuffer + mRsmpInIndex * channelCount;
-    buffer->frameCount = framesReq;
+    buffer->raw = mRsmpInBuffer + front * mChannelCount;
+    buffer->frameCount = part1;
+    mRsmpInUnrel = part1;
     return NO_ERROR;
 }
 
 // AudioBufferProvider interface
 void AudioFlinger::RecordThread::releaseBuffer(AudioBufferProvider::Buffer* buffer)
 {
-    mRsmpInIndex += buffer->frameCount;
+    size_t stepCount = buffer->frameCount;
+    if (stepCount == 0) {
+        return;
+    }
+    ALOG_ASSERT(stepCount <= mRsmpInUnrel);
+    mRsmpInUnrel -= stepCount;
+    mRsmpInFront += stepCount;
+    buffer->raw = NULL;
     buffer->frameCount = 0;
 }
 
@@ -5062,7 +5266,7 @@
         int value;
         audio_format_t reqFormat = mFormat;
         uint32_t reqSamplingRate = mReqSampleRate;
-        uint32_t reqChannelCount = mReqChannelCount;
+        audio_channel_mask_t reqChannelMask = audio_channel_in_mask_from_count(mReqChannelCount);
 
         if (param.getInt(String8(AudioParameter::keySamplingRate), value) == NO_ERROR) {
             reqSamplingRate = value;
@@ -5077,14 +5281,19 @@
             }
         }
         if (param.getInt(String8(AudioParameter::keyChannels), value) == NO_ERROR) {
-            reqChannelCount = popcount(value);
-            reconfig = true;
+            audio_channel_mask_t mask = (audio_channel_mask_t) value;
+            if (mask != AUDIO_CHANNEL_IN_MONO && mask != AUDIO_CHANNEL_IN_STEREO) {
+                status = BAD_VALUE;
+            } else {
+                reqChannelMask = mask;
+                reconfig = true;
+            }
         }
         if (param.getInt(String8(AudioParameter::keyFrameCount), value) == NO_ERROR) {
             // do not accept frame count changes if tracks are open as the track buffer
             // size depends on frame count and correct behavior would not be guaranteed
             // if frame count is changed after track creation
-            if (mActiveTrack != 0) {
+            if (mActiveTracks.size() > 0) {
                 status = INVALID_OPERATION;
             } else {
                 reconfig = true;
@@ -5127,6 +5336,7 @@
             }
             mAudioSource = (audio_source_t)value;
         }
+
         if (status == NO_ERROR) {
             status = mInput->stream->common.set_parameters(&mInput->stream->common,
                     keyValuePair.string());
@@ -5143,7 +5353,8 @@
                             <= (2 * reqSamplingRate)) &&
                     popcount(mInput->stream->common.get_channels(&mInput->stream->common))
                             <= FCC_2 &&
-                    (reqChannelCount <= FCC_2)) {
+                    (reqChannelMask == AUDIO_CHANNEL_IN_MONO ||
+                            reqChannelMask == AUDIO_CHANNEL_IN_STEREO)) {
                     status = NO_ERROR;
                 }
                 if (status == NO_ERROR) {
@@ -5177,9 +5388,9 @@
     return out_s8;
 }
 
-void AudioFlinger::RecordThread::audioConfigChanged_l(int event, int param) {
+void AudioFlinger::RecordThread::audioConfigChanged_l(int event, int param __unused) {
     AudioSystem::OutputDescriptor desc;
-    void *param2 = NULL;
+    const void *param2 = NULL;
 
     switch (event) {
     case AudioSystem::INPUT_OPENED:
@@ -5213,39 +5424,32 @@
     mChannelCount = popcount(mChannelMask);
     mFormat = mInput->stream->common.get_format(&mInput->stream->common);
     if (mFormat != AUDIO_FORMAT_PCM_16_BIT) {
-        ALOGE("HAL format %d not supported; must be AUDIO_FORMAT_PCM_16_BIT", mFormat);
+        ALOGE("HAL format %#x not supported; must be AUDIO_FORMAT_PCM_16_BIT", mFormat);
     }
     mFrameSize = audio_stream_frame_size(&mInput->stream->common);
     mBufferSize = mInput->stream->common.get_buffer_size(&mInput->stream->common);
     mFrameCount = mBufferSize / mFrameSize;
-    mRsmpInBuffer = new int16_t[mFrameCount * mChannelCount];
+    // With 3 HAL buffers, we can guarantee ability to down-sample the input by ratio of 2:1 to
+    // 1 full output buffer, regardless of the alignment of the available input.
+    mRsmpInFrames = mFrameCount * 3;
+    mRsmpInFramesP2 = roundup(mRsmpInFrames);
+    // Over-allocate beyond mRsmpInFramesP2 to permit a HAL read past end of buffer
+    mRsmpInBuffer = new int16_t[(mRsmpInFramesP2 + mFrameCount - 1) * mChannelCount];
+    mRsmpInFront = 0;
+    mRsmpInRear = 0;
+    mRsmpInUnrel = 0;
 
-    if (mSampleRate != mReqSampleRate && mChannelCount <= FCC_2 && mReqChannelCount <= FCC_2)
-    {
-        int channelCount;
-        // optimization: if mono to mono, use the resampler in stereo to stereo mode to avoid
-        // stereo to mono post process as the resampler always outputs stereo.
-        if (mChannelCount == 1 && mReqChannelCount == 2) {
-            channelCount = 1;
-        } else {
-            channelCount = 2;
-        }
-        mResampler = AudioResampler::create(16, channelCount, mReqSampleRate);
+    if (mSampleRate != mReqSampleRate && mChannelCount <= FCC_2 && mReqChannelCount <= FCC_2) {
+        mResampler = AudioResampler::create(16, (int) mChannelCount, mReqSampleRate);
         mResampler->setSampleRate(mSampleRate);
         mResampler->setVolume(AudioMixer::UNITY_GAIN, AudioMixer::UNITY_GAIN);
+        // resampler always outputs stereo
         mRsmpOutBuffer = new int32_t[mFrameCount * FCC_2];
-
-        // optmization: if mono to mono, alter input frame count as if we were inputing
-        // stereo samples
-        if (mChannelCount == 1 && mReqChannelCount == 1) {
-            mFrameCount >>= 1;
-        }
-
     }
     mRsmpInIndex = mFrameCount;
 }
 
-unsigned int AudioFlinger::RecordThread::getInputFramesLost()
+uint32_t AudioFlinger::RecordThread::getInputFramesLost()
 {
     Mutex::Autolock _l(mLock);
     if (initCheck() != NO_ERROR) {
diff --git a/services/audioflinger/Threads.h b/services/audioflinger/Threads.h
index a2fb874..999fea3 100644
--- a/services/audioflinger/Threads.h
+++ b/services/audioflinger/Threads.h
@@ -36,6 +36,8 @@
                 audio_devices_t outDevice, audio_devices_t inDevice, type_t type);
     virtual             ~ThreadBase();
 
+    virtual status_t    readyToRun();
+
     void dumpBase(int fd, const Vector<String16>& args);
     void dumpEffectChains(int fd, const Vector<String16>& args);
 
@@ -63,7 +65,7 @@
     class IoConfigEvent : public ConfigEvent {
     public:
         IoConfigEvent(int event, int param) :
-            ConfigEvent(CFG_EVENT_IO), mEvent(event), mParam(event) {}
+            ConfigEvent(CFG_EVENT_IO), mEvent(event), mParam(param) {}
         virtual ~IoConfigEvent() {}
 
                 int event() const { return mEvent; }
@@ -141,6 +143,7 @@
                 void        sendIoConfigEvent_l(int event, int param = 0);
                 void        sendPrioConfigEvent_l(pid_t pid, pid_t tid, int32_t prio);
                 void        processConfigEvents();
+                void        processConfigEvents_l();
 
                 // see note at declaration of mStandby, mOutDevice and mInDevice
                 bool        standby() const { return mStandby; }
@@ -156,7 +159,7 @@
                                     int sessionId,
                                     effect_descriptor_t *desc,
                                     int *enabled,
-                                    status_t *status);
+                                    status_t *status /*non-NULL*/);
                 void disconnectEffect(const sp< EffectModule>& effect,
                                       EffectHandle *handle,
                                       bool unpinIfLast);
@@ -198,13 +201,13 @@
                 // effect
                 void removeEffect_l(const sp< EffectModule>& effect);
                 // detach all tracks connected to an auxiliary effect
-    virtual     void detachAuxEffect_l(int effectId) {}
+    virtual     void detachAuxEffect_l(int effectId __unused) {}
                 // returns either EFFECT_SESSION if effects on this audio session exist in one
                 // chain, or TRACK_SESSION if tracks on this audio session exist, or both
                 virtual uint32_t hasAudioSession(int sessionId) const = 0;
                 // the value returned by default implementation is not important as the
                 // strategy is only meaningful for PlaybackThread which implements this method
-                virtual uint32_t getStrategyForSession_l(int sessionId) { return 0; }
+                virtual uint32_t getStrategyForSession_l(int sessionId __unused) { return 0; }
 
                 // suspend or restore effect according to the type of effect passed. a NULL
                 // type pointer means suspend all effects in the session
@@ -275,6 +278,7 @@
                 uint32_t                mChannelCount;
                 size_t                  mFrameSize;
                 audio_format_t          mFormat;
+                size_t                  mBufferSize;       // HAL buffer size for read() or write()
 
                 // Parameter sequence by client: binder thread calling setParameters():
                 //  1. Lock mLock
@@ -303,12 +307,12 @@
                 Vector<ConfigEvent *>     mConfigEvents;
 
                 // These fields are written and read by thread itself without lock or barrier,
-                // and read by other threads without lock or barrier via standby() , outDevice()
+                // and read by other threads without lock or barrier via standby(), outDevice()
                 // and inDevice().
                 // Because of the absence of a lock or barrier, any other thread that reads
                 // these fields must use the information in isolation, or be prepared to deal
                 // with possibility that it might be inconsistent with other information.
-                bool                    mStandby;   // Whether thread is currently in standby.
+                bool                    mStandby;     // Whether thread is currently in standby.
                 audio_devices_t         mOutDevice;   // output device
                 audio_devices_t         mInDevice;    // input device
                 audio_source_t          mAudioSource; // (see audio.h, audio_source_t)
@@ -358,7 +362,6 @@
                 void        dump(int fd, const Vector<String16>& args);
 
     // Thread virtuals
-    virtual     status_t    readyToRun();
     virtual     bool        threadLoop();
 
     // RefBase
@@ -391,7 +394,7 @@
     virtual     bool        waitingAsyncCallback();
     virtual     bool        waitingAsyncCallback_l();
     virtual     bool        shouldStandby_l();
-
+    virtual     void        onAddNewTrack_l();
 
     // ThreadBase virtuals
     virtual     void        preExit();
@@ -419,13 +422,13 @@
                                 uint32_t sampleRate,
                                 audio_format_t format,
                                 audio_channel_mask_t channelMask,
-                                size_t frameCount,
+                                size_t *pFrameCount,
                                 const sp<IMemory>& sharedBuffer,
                                 int sessionId,
                                 IAudioFlinger::track_flags_t *flags,
                                 pid_t tid,
                                 int uid,
-                                status_t *status);
+                                status_t *status /*non-NULL*/);
 
                 AudioStreamOut* getOutput() const;
                 AudioStreamOut* clearOutput();
@@ -479,7 +482,6 @@
     size_t                          mNormalFrameCount;  // normal mixer and effects
 
     int16_t*                        mMixBuffer;         // frame size aligned mix buffer
-    int8_t*                         mAllocMixBuffer;    // mixer buffer allocation address
 
     // suspend count, > 0 means suspended.  While suspended, the thread continues to pull from
     // tracks and mix, but doesn't write to HAL.  A2DP and SCO HAL implementations can't handle
@@ -623,13 +625,12 @@
     sp<NBLog::Writer>       mFastMixerNBLogWriter;
 public:
     virtual     bool        hasFastMixer() const = 0;
-    virtual     FastTrackUnderruns getFastTrackUnderruns(size_t fastIndex) const
+    virtual     FastTrackUnderruns getFastTrackUnderruns(size_t fastIndex __unused) const
                                 { FastTrackUnderruns dummy; return dummy; }
 
 protected:
                 // accessed by both binder threads and within threadLoop(), lock on mutex needed
                 unsigned    mFastTrackAvailMask;    // bit i set if fast track [i] is available
-    virtual     void        flushOutput_l();
 
 private:
     // timestamp latch:
@@ -748,11 +749,11 @@
     // threadLoop snippets
     virtual     mixer_state prepareTracks_l(Vector< sp<Track> > *tracksToRemove);
     virtual     void        threadLoop_exit();
-    virtual     void        flushOutput_l();
 
     virtual     bool        waitingAsyncCallback();
     virtual     bool        waitingAsyncCallback_l();
     virtual     bool        shouldStandby_l();
+    virtual     void        onAddNewTrack_l();
 
 private:
                 void        flushHw_l();
@@ -867,23 +868,23 @@
 
     // Thread virtuals
     virtual bool        threadLoop();
-    virtual status_t    readyToRun();
 
     // RefBase
     virtual void        onFirstRef();
 
     virtual status_t    initCheck() const { return (mInput == NULL) ? NO_INIT : NO_ERROR; }
+
             sp<AudioFlinger::RecordThread::RecordTrack>  createRecordTrack_l(
                     const sp<AudioFlinger::Client>& client,
                     uint32_t sampleRate,
                     audio_format_t format,
                     audio_channel_mask_t channelMask,
-                    size_t frameCount,
+                    size_t *pFrameCount,
                     int sessionId,
                     int uid,
                     IAudioFlinger::track_flags_t *flags,
                     pid_t tid,
-                    status_t *status);
+                    status_t *status /*non-NULL*/);
 
             status_t    start(RecordTrack* recordTrack,
                               AudioSystem::sync_event_t event,
@@ -905,7 +906,7 @@
     virtual String8     getParameters(const String8& keys);
     virtual void        audioConfigChanged_l(int event, int param = 0);
             void        readInputParameters();
-    virtual unsigned int  getInputFramesLost();
+    virtual uint32_t    getInputFramesLost();
 
     virtual status_t addEffectChain_l(const sp<EffectChain>& chain);
     virtual size_t removeEffectChain_l(const sp<EffectChain>& chain);
@@ -926,30 +927,43 @@
             bool        hasFastRecorder() const { return false; }
 
 private:
-            void clearSyncStartEvent();
+            void    clearSyncStartEvent();
 
             // Enter standby if not already in standby, and set mStandby flag
-            void standby();
+            void    standbyIfNotAlreadyInStandby();
 
             // Call the HAL standby method unconditionally, and don't change mStandby flag
-            void inputStandBy();
+            void    inputStandBy();
 
             AudioStreamIn                       *mInput;
             SortedVector < sp<RecordTrack> >    mTracks;
-            // mActiveTrack has dual roles:  it indicates the current active track, and
+            // mActiveTracks has dual roles:  it indicates the current active track(s), and
             // is used together with mStartStopCond to indicate start()/stop() progress
-            sp<RecordTrack>                     mActiveTrack;
+            SortedVector< sp<RecordTrack> >     mActiveTracks;
+            // generation counter for mActiveTracks
+            int                                 mActiveTracksGen;
             Condition                           mStartStopCond;
 
             // updated by RecordThread::readInputParameters()
             AudioResampler                      *mResampler;
             // interleaved stereo pairs of fixed-point signed Q19.12
             int32_t                             *mRsmpOutBuffer;
-            int16_t                             *mRsmpInBuffer; // [mFrameCount * mChannelCount]
-            size_t                              mRsmpInIndex;
-            size_t                              mBufferSize;    // stream buffer size for read()
+
+            // resampler converts input at HAL Hz to output at AudioRecord client Hz
+            int16_t                             *mRsmpInBuffer; // see new[] for details on the size
+            size_t                              mRsmpInFrames;  // size of resampler input in frames
+            size_t                              mRsmpInFramesP2;// size rounded up to a power-of-2
+            size_t                              mRsmpInUnrel;   // unreleased frames remaining from
+                                                                // most recent getNextBuffer
+            // these are rolling counters that are never cleared
+            int32_t                             mRsmpInFront;   // next available frame
+            int32_t                             mRsmpInRear;    // last filled frame + 1
+            size_t                              mRsmpInIndex;   // FIXME legacy
+
+            // client's requested configuration, which may differ from the HAL configuration
             const uint32_t                      mReqChannelCount;
             const uint32_t                      mReqSampleRate;
+
             ssize_t                             mBytesRead;
             // sync event triggering actual audio capture. Frames read before this event will
             // be dropped and therefore not read by the application.
diff --git a/services/audioflinger/TrackBase.h b/services/audioflinger/TrackBase.h
index cd201d9..05fde7c 100644
--- a/services/audioflinger/TrackBase.h
+++ b/services/audioflinger/TrackBase.h
@@ -48,6 +48,7 @@
                                 int uid,
                                 bool isOut);
     virtual             ~TrackBase();
+    virtual status_t    initCheck() const { return getCblk() != 0 ? NO_ERROR : NO_MEMORY; }
 
     virtual status_t    start(AudioSystem::sync_event_t event,
                              int triggerSession) = 0;
@@ -78,15 +79,6 @@
 
     virtual uint32_t sampleRate() const { return mSampleRate; }
 
-    // Return a pointer to the start of a contiguous slice of the track buffer.
-    // Parameter 'offset' is the requested start position, expressed in
-    // monotonically increasing frame units relative to the track epoch.
-    // Parameter 'frames' is the requested length, also in frame units.
-    // Always returns non-NULL.  It is the caller's responsibility to
-    // verify that this will be successful; the result of calling this
-    // function with invalid 'offset' or 'frames' is undefined.
-    void* getBuffer(uint32_t offset, uint32_t frames) const;
-
     bool isStopped() const {
         return (mState == STOPPED || mState == FLUSHED);
     }
diff --git a/services/audioflinger/Tracks.cpp b/services/audioflinger/Tracks.cpp
index fccc7b8..e5152b8 100644
--- a/services/audioflinger/Tracks.cpp
+++ b/services/audioflinger/Tracks.cpp
@@ -116,12 +116,11 @@
 
     if (client != 0) {
         mCblkMemory = client->heap()->allocate(size);
-        if (mCblkMemory != 0) {
-            mCblk = static_cast<audio_track_cblk_t *>(mCblkMemory->pointer());
-            // can't assume mCblk != NULL
-        } else {
+        if (mCblkMemory == 0 ||
+                (mCblk = static_cast<audio_track_cblk_t *>(mCblkMemory->pointer())) == NULL) {
             ALOGE("not enough memory for AudioTrack size=%u", size);
             client->heap()->dump("AudioTrack");
+            mCblkMemory.clear();
             return;
         }
     } else {
@@ -134,7 +133,6 @@
     if (mCblk != NULL) {
         new(mCblk) audio_track_cblk_t();
         // clear all buffers
-        mCblk->frameCount_ = frameCount;
         if (sharedBuffer == 0) {
             mBuffer = (char*)mCblk + sizeof(audio_track_cblk_t);
             memset(mBuffer, 0, bufferSize);
@@ -148,7 +146,7 @@
 #ifdef TEE_SINK
         if (mTeeSinkTrackEnabled) {
             NBAIO_Format pipeFormat = Format_from_SR_C(mSampleRate, mChannelCount);
-            if (pipeFormat != Format_Invalid) {
+            if (Format_isValid(pipeFormat)) {
                 Pipe *pipe = new Pipe(mTeeSinkTrackFrames, pipeFormat);
                 size_t numCounterOffers = 0;
                 const NBAIO_Format offers[1] = {pipeFormat};
@@ -275,6 +273,11 @@
     if (!mTrack->isTimedTrack())
         return INVALID_OPERATION;
 
+    if (buffer == 0 || buffer->pointer() == NULL) {
+        ALOGE("queueTimedBuffer() buffer is 0 or has NULL pointer()");
+        return BAD_VALUE;
+    }
+
     PlaybackThread::TimedTrack* tt =
             reinterpret_cast<PlaybackThread::TimedTrack*>(mTrack.get());
     return tt->queueTimedBuffer(buffer, pts);
@@ -344,7 +347,8 @@
     mCachedVolume(1.0),
     mIsInvalid(false),
     mAudioTrackServerProxy(NULL),
-    mResumeToStopping(false)
+    mResumeToStopping(false),
+    mFlushHwPending(false)
 {
     if (mCblk != NULL) {
         if (sharedBuffer == 0) {
@@ -396,6 +400,15 @@
     }
 }
 
+status_t AudioFlinger::PlaybackThread::Track::initCheck() const
+{
+    status_t status = TrackBase::initCheck();
+    if (status == NO_ERROR && mName < 0) {
+        status = NO_MEMORY;
+    }
+    return status;
+}
+
 void AudioFlinger::PlaybackThread::Track::destroy()
 {
     // NOTE: destroyTrack_l() can remove a strong reference to this Track
@@ -422,17 +435,19 @@
 
 /*static*/ void AudioFlinger::PlaybackThread::Track::appendDumpHeader(String8& result)
 {
-    result.append("   Name Client Type      Fmt Chn mask Session fCount S F SRate  "
+    result.append("    Name Active Client Type      Fmt Chn mask Session fCount S F SRate  "
                   "L dB  R dB    Server Main buf  Aux Buf Flags UndFrmCnt\n");
 }
 
-void AudioFlinger::PlaybackThread::Track::dump(char* buffer, size_t size)
+void AudioFlinger::PlaybackThread::Track::dump(char* buffer, size_t size, bool active)
 {
     uint32_t vlr = mAudioTrackServerProxy->getVolumeLR();
     if (isFastTrack()) {
-        sprintf(buffer, "   F %2d", mFastIndex);
+        sprintf(buffer, "    F %2d", mFastIndex);
+    } else if (mName >= AudioMixer::TRACK0) {
+        sprintf(buffer, "    %4d", mName - AudioMixer::TRACK0);
     } else {
-        sprintf(buffer, "   %4d", mName - AudioMixer::TRACK0);
+        sprintf(buffer, "    none");
     }
     track_state state = mState;
     char stateChar;
@@ -487,8 +502,9 @@
         nowInUnderrun = '?';
         break;
     }
-    snprintf(&buffer[7], size-7, " %6u %4u %08X %08X %7u %6zu %1c %1d %5u %5.2g %5.2g  "
+    snprintf(&buffer[8], size-8, " %6s %6u %4u %08X %08X %7u %6zu %1c %1d %5u %5.2g %5.2g  "
                                  "%08X %p %p 0x%03X %9u%c\n",
+            active ? "yes" : "no",
             (mClient == 0) ? getpid_cached : mClient->pid(),
             mStreamType,
             mFormat,
@@ -514,7 +530,7 @@
 
 // AudioBufferProvider interface
 status_t AudioFlinger::PlaybackThread::Track::getNextBuffer(
-        AudioBufferProvider::Buffer* buffer, int64_t pts)
+        AudioBufferProvider::Buffer* buffer, int64_t pts __unused)
 {
     ServerProxy::Buffer buf;
     size_t desiredFrames = buffer->frameCount;
@@ -551,7 +567,7 @@
 
 // Don't call for fast tracks; the framesReady() could result in priority inversion
 bool AudioFlinger::PlaybackThread::Track::isReady() const {
-    if (mFillingUpStatus != FS_FILLING || isStopped() || isPausing()) {
+    if (mFillingUpStatus != FS_FILLING || isStopped() || isPausing() || isStopping()) {
         return true;
     }
 
@@ -564,8 +580,8 @@
     return false;
 }
 
-status_t AudioFlinger::PlaybackThread::Track::start(AudioSystem::sync_event_t event,
-                                                    int triggerSession)
+status_t AudioFlinger::PlaybackThread::Track::start(AudioSystem::sync_event_t event __unused,
+                                                    int triggerSession __unused)
 {
     status_t status = NO_ERROR;
     ALOGV("start(%d), calling pid %d session %d",
@@ -719,6 +735,7 @@
                 mRetryCount = PlaybackThread::kMaxTrackRetriesOffload;
             }
 
+            mFlushHwPending = true;
             mResumeToStopping = false;
         } else {
             if (mState != STOPPING_1 && mState != STOPPING_2 && mState != STOPPED &&
@@ -739,11 +756,19 @@
         // Prevent flush being lost if the track is flushed and then resumed
         // before mixer thread can run. This is important when offloading
         // because the hardware buffer could hold a large amount of audio
-        playbackThread->flushOutput_l();
         playbackThread->broadcast_l();
     }
 }
 
+// must be called with thread lock held
+void AudioFlinger::PlaybackThread::Track::flushAck()
+{
+    if (!isOffloaded())
+        return;
+
+    mFlushHwPending = false;
+}
+
 void AudioFlinger::PlaybackThread::Track::reset()
 {
     // Do not reset twice to avoid discarding data written just after a flush and before
@@ -979,7 +1004,8 @@
             size_t frameCount,
             const sp<IMemory>& sharedBuffer,
             int sessionId,
-            int uid) {
+            int uid)
+{
     if (!client->reserveTimedTrack())
         return 0;
 
@@ -1045,15 +1071,14 @@
 
         mTimedMemoryDealer = new MemoryDealer(kTimedBufferHeapSize,
                                               "AudioFlingerTimed");
-        if (mTimedMemoryDealer == NULL)
+        if (mTimedMemoryDealer == NULL) {
             return NO_MEMORY;
+        }
     }
 
     sp<IMemory> newBuffer = mTimedMemoryDealer->allocate(size);
-    if (newBuffer == NULL) {
-        newBuffer = mTimedMemoryDealer->allocate(size);
-        if (newBuffer == NULL)
-            return NO_MEMORY;
+    if (newBuffer == 0 || newBuffer->pointer() == NULL) {
+        return NO_MEMORY;
     }
 
     *buffer = newBuffer;
@@ -1152,7 +1177,7 @@
 
 void AudioFlinger::PlaybackThread::TimedTrack::updateFramesPendingAfterTrim_l(
         const TimedBuffer& buf,
-        const char* logTag) {
+        const char* logTag __unused) {
     uint32_t bufBytes        = buf.buffer()->size();
     uint32_t consumedAlready = buf.position();
 
@@ -1504,9 +1529,9 @@
         mOutBuffer.frameCount = 0;
         playbackThread->mTracks.add(this);
         ALOGV("OutputTrack constructor mCblk %p, mBuffer %p, "
-                "mCblk->frameCount_ %u, mChannelMask 0x%08x",
+                "frameCount %u, mChannelMask 0x%08x",
                 mCblk, mBuffer,
-                mCblk->frameCount_, mChannelMask);
+                frameCount, mChannelMask);
         // since client and server are in the same process,
         // the buffer has the same virtual address on both sides
         mClientProxy = new AudioTrackClientProxy(mCblk, mBuffer, mFrameCount, mFrameSize);
@@ -1764,9 +1789,7 @@
 {
     ALOGV("RecordTrack constructor");
     if (mCblk != NULL) {
-        mAudioRecordServerProxy = new AudioRecordServerProxy(mCblk, mBuffer, frameCount,
-                mFrameSize);
-        mServerProxy = mAudioRecordServerProxy;
+        mServerProxy = new AudioRecordServerProxy(mCblk, mBuffer, frameCount, mFrameSize);
     }
 }
 
@@ -1777,7 +1800,7 @@
 
 // AudioBufferProvider interface
 status_t AudioFlinger::RecordThread::RecordTrack::getNextBuffer(AudioBufferProvider::Buffer* buffer,
-        int64_t pts)
+        int64_t pts __unused)
 {
     ServerProxy::Buffer buf;
     buf.mFrameCount = buffer->frameCount;
@@ -1845,12 +1868,13 @@
 
 /*static*/ void AudioFlinger::RecordThread::RecordTrack::appendDumpHeader(String8& result)
 {
-    result.append("Client Fmt Chn mask Session S   Server fCount\n");
+    result.append("    Active Client Fmt Chn mask Session S   Server fCount\n");
 }
 
-void AudioFlinger::RecordThread::RecordTrack::dump(char* buffer, size_t size)
+void AudioFlinger::RecordThread::RecordTrack::dump(char* buffer, size_t size, bool active)
 {
-    snprintf(buffer, size, "%6u %3u %08X %7u %1d %08X %6zu\n",
+    snprintf(buffer, size, "    %6s %6u %3u %08X %7u %1d %08X %6zu\n",
+            active ? "yes" : "no",
             (mClient == 0) ? getpid_cached : mClient->pid(),
             mFormat,
             mChannelMask,
diff --git a/services/audioflinger/test-resample.cpp b/services/audioflinger/test-resample.cpp
index 7a314cf..66fcd90 100644
--- a/services/audioflinger/test-resample.cpp
+++ b/services/audioflinger/test-resample.cpp
@@ -26,54 +26,30 @@
 #include <errno.h>
 #include <time.h>
 #include <math.h>
+#include <audio_utils/sndfile.h>
 
 using namespace android;
 
-struct HeaderWav {
-    HeaderWav(size_t size, int nc, int sr, int bits) {
-        strncpy(RIFF, "RIFF", 4);
-        chunkSize = size + sizeof(HeaderWav);
-        strncpy(WAVE, "WAVE", 4);
-        strncpy(fmt,  "fmt ", 4);
-        fmtSize = 16;
-        audioFormat = 1;
-        numChannels = nc;
-        samplesRate = sr;
-        byteRate = sr * numChannels * (bits/8);
-        align = nc*(bits/8);
-        bitsPerSample = bits;
-        strncpy(data, "data", 4);
-        dataSize = size;
-    }
-
-    char RIFF[4];           // RIFF
-    uint32_t chunkSize;     // File size
-    char WAVE[4];        // WAVE
-    char fmt[4];            // fmt\0
-    uint32_t fmtSize;       // fmt size
-    uint16_t audioFormat;   // 1=PCM
-    uint16_t numChannels;   // num channels
-    uint32_t samplesRate;   // sample rate in hz
-    uint32_t byteRate;      // Bps
-    uint16_t align;         // 2=16-bit mono, 4=16-bit stereo
-    uint16_t bitsPerSample; // bits per sample
-    char data[4];           // "data"
-    uint32_t dataSize;      // size
-};
+bool gVerbose = false;
 
 static int usage(const char* name) {
-    fprintf(stderr,"Usage: %s [-p] [-h] [-s] [-q {dq|lq|mq|hq|vhq}] [-i input-sample-rate] "
-                   "[-o output-sample-rate] [<input-file>] <output-file>\n", name);
+    fprintf(stderr,"Usage: %s [-p] [-h] [-v] [-s] [-q {dq|lq|mq|hq|vhq|dlq|dmq|dhq}]"
+                   " [-i input-sample-rate] [-o output-sample-rate] [<input-file>]"
+                   " <output-file>\n", name);
     fprintf(stderr,"    -p    enable profiling\n");
     fprintf(stderr,"    -h    create wav file\n");
-    fprintf(stderr,"    -s    stereo\n");
+    fprintf(stderr,"    -v    verbose : log buffer provider calls\n");
+    fprintf(stderr,"    -s    stereo (ignored if input file is specified)\n");
     fprintf(stderr,"    -q    resampler quality\n");
     fprintf(stderr,"              dq  : default quality\n");
     fprintf(stderr,"              lq  : low quality\n");
     fprintf(stderr,"              mq  : medium quality\n");
     fprintf(stderr,"              hq  : high quality\n");
     fprintf(stderr,"              vhq : very high quality\n");
-    fprintf(stderr,"    -i    input file sample rate\n");
+    fprintf(stderr,"              dlq : dynamic low quality\n");
+    fprintf(stderr,"              dmq : dynamic medium quality\n");
+    fprintf(stderr,"              dhq : dynamic high quality\n");
+    fprintf(stderr,"    -i    input file sample rate (ignored if input file is specified)\n");
     fprintf(stderr,"    -o    output file sample rate\n");
     return -1;
 }
@@ -81,7 +57,8 @@
 int main(int argc, char* argv[]) {
 
     const char* const progname = argv[0];
-    bool profiling = false;
+    bool profileResample = false;
+    bool profileFilter = false;
     bool writeHeader = false;
     int channels = 1;
     int input_freq = 0;
@@ -89,14 +66,20 @@
     AudioResampler::src_quality quality = AudioResampler::DEFAULT_QUALITY;
 
     int ch;
-    while ((ch = getopt(argc, argv, "phsq:i:o:")) != -1) {
+    while ((ch = getopt(argc, argv, "pfhvsq:i:o:")) != -1) {
         switch (ch) {
         case 'p':
-            profiling = true;
+            profileResample = true;
+            break;
+        case 'f':
+            profileFilter = true;
             break;
         case 'h':
             writeHeader = true;
             break;
+        case 'v':
+            gVerbose = true;
+            break;
         case 's':
             channels = 2;
             break;
@@ -111,6 +94,12 @@
                 quality = AudioResampler::HIGH_QUALITY;
             else if (!strcmp(optarg, "vhq"))
                 quality = AudioResampler::VERY_HIGH_QUALITY;
+            else if (!strcmp(optarg, "dlq"))
+                quality = AudioResampler::DYN_LOW_QUALITY;
+            else if (!strcmp(optarg, "dmq"))
+                quality = AudioResampler::DYN_MED_QUALITY;
+            else if (!strcmp(optarg, "dhq"))
+                quality = AudioResampler::DYN_HIGH_QUALITY;
             else {
                 usage(progname);
                 return -1;
@@ -148,25 +137,22 @@
     size_t input_size;
     void* input_vaddr;
     if (argc == 2) {
-        struct stat st;
-        if (stat(file_in, &st) < 0) {
-            fprintf(stderr, "stat: %s\n", strerror(errno));
-            return -1;
+        SF_INFO info;
+        info.format = 0;
+        SNDFILE *sf = sf_open(file_in, SFM_READ, &info);
+        if (sf == NULL) {
+            perror(file_in);
+            return EXIT_FAILURE;
         }
-
-        int input_fd = open(file_in, O_RDONLY);
-        if (input_fd < 0) {
-            fprintf(stderr, "open: %s\n", strerror(errno));
-            return -1;
-        }
-
-        input_size = st.st_size;
-        input_vaddr = mmap(0, input_size, PROT_READ, MAP_PRIVATE, input_fd, 0);
-        if (input_vaddr == MAP_FAILED ) {
-            fprintf(stderr, "mmap: %s\n", strerror(errno));
-            return -1;
-        }
+        input_size = info.frames * info.channels * sizeof(short);
+        input_vaddr = malloc(input_size);
+        (void) sf_readf_short(sf, (short *) input_vaddr, info.frames);
+        sf_close(sf);
+        channels = info.channels;
+        input_freq = info.samplerate;
     } else {
+        // data for testing is exactly (input sampling rate/1000)/2 seconds
+        // so 44.1khz input is 22.05 seconds
         double k = 1000; // Hz / s
         double time = (input_freq / 2) / k;
         size_t input_frames = size_t(input_freq * time);
@@ -178,7 +164,7 @@
             double y = sin(M_PI * k * t * t);
             int16_t yi = floor(y * 32767.0 + 0.5);
             for (size_t j=0 ; j<(size_t)channels ; j++) {
-                in[i*channels + j] = yi / (1+j);
+                in[i*channels + j] = yi / (1+j); // right ch. 1/2 left ch.
             }
         }
     }
@@ -186,89 +172,238 @@
     // ----------------------------------------------------------
 
     class Provider: public AudioBufferProvider {
-        int16_t* mAddr;
-        size_t mNumFrames;
+        int16_t* const  mAddr;      // base address
+        const size_t    mNumFrames; // total frames
+        const int       mChannels;
+        size_t          mNextFrame; // index of next frame to provide
+        size_t          mUnrel;     // number of frames not yet released
     public:
-        Provider(const void* addr, size_t size, int channels) {
-            mAddr = (int16_t*) addr;
-            mNumFrames = size / (channels*sizeof(int16_t));
+        Provider(const void* addr, size_t size, int channels)
+          : mAddr((int16_t*) addr),
+            mNumFrames(size / (channels*sizeof(int16_t))),
+            mChannels(channels),
+            mNextFrame(0), mUnrel(0) {
         }
         virtual status_t getNextBuffer(Buffer* buffer,
                 int64_t pts = kInvalidPTS) {
-            buffer->frameCount = mNumFrames;
-            buffer->i16 = mAddr;
-            return NO_ERROR;
+            (void)pts; // suppress warning
+            size_t requestedFrames = buffer->frameCount;
+            if (requestedFrames > mNumFrames - mNextFrame) {
+                buffer->frameCount = mNumFrames - mNextFrame;
+            }
+            if (gVerbose) {
+                printf("getNextBuffer() requested %u frames out of %u frames available,"
+                        " and returned %u frames\n",
+                        requestedFrames, mNumFrames - mNextFrame, buffer->frameCount);
+            }
+            mUnrel = buffer->frameCount;
+            if (buffer->frameCount > 0) {
+                buffer->i16 = &mAddr[mChannels * mNextFrame];
+                return NO_ERROR;
+            } else {
+                buffer->i16 = NULL;
+                return NOT_ENOUGH_DATA;
+            }
         }
         virtual void releaseBuffer(Buffer* buffer) {
+            if (buffer->frameCount > mUnrel) {
+                fprintf(stderr, "ERROR releaseBuffer() released %u frames but only %u available "
+                        "to release\n", buffer->frameCount, mUnrel);
+                mNextFrame += mUnrel;
+                mUnrel = 0;
+            } else {
+                if (gVerbose) {
+                    printf("releaseBuffer() released %u frames out of %u frames available "
+                            "to release\n", buffer->frameCount, mUnrel);
+                }
+                mNextFrame += buffer->frameCount;
+                mUnrel -= buffer->frameCount;
+            }
+            buffer->frameCount = 0;
+            buffer->i16 = NULL;
+        }
+        void reset() {
+            mNextFrame = 0;
         }
     } provider(input_vaddr, input_size, channels);
 
     size_t input_frames = input_size / (channels * sizeof(int16_t));
+    if (gVerbose) {
+        printf("%u input frames\n", input_frames);
+    }
     size_t output_size = 2 * 4 * ((int64_t) input_frames * output_freq) / input_freq;
     output_size &= ~7; // always stereo, 32-bits
 
-    void* output_vaddr = malloc(output_size);
-
-    if (profiling) {
+    if (profileFilter) {
+        // Check how fast sample rate changes are that require filter changes.
+        // The delta sample rate changes must indicate a downsampling ratio,
+        // and must be larger than 10% changes.
+        //
+        // On fast devices, filters should be generated between 0.1ms - 1ms.
+        // (single threaded).
         AudioResampler* resampler = AudioResampler::create(16, channels,
-                output_freq, quality);
-
-        size_t out_frames = output_size/8;
-        resampler->setSampleRate(input_freq);
-        resampler->setVolume(0x1000, 0x1000);
-
-        memset(output_vaddr, 0, output_size);
+                8000, quality);
+        int looplimit = 100;
         timespec start, end;
         clock_gettime(CLOCK_MONOTONIC, &start);
-        resampler->resample((int*) output_vaddr, out_frames, &provider);
-        resampler->resample((int*) output_vaddr, out_frames, &provider);
-        resampler->resample((int*) output_vaddr, out_frames, &provider);
-        resampler->resample((int*) output_vaddr, out_frames, &provider);
+        for (int i = 0; i < looplimit; ++i) {
+            resampler->setSampleRate(9000);
+            resampler->setSampleRate(12000);
+            resampler->setSampleRate(20000);
+            resampler->setSampleRate(30000);
+        }
         clock_gettime(CLOCK_MONOTONIC, &end);
         int64_t start_ns = start.tv_sec * 1000000000LL + start.tv_nsec;
         int64_t end_ns = end.tv_sec * 1000000000LL + end.tv_nsec;
-        int64_t time = (end_ns - start_ns)/4;
-        printf("%f Mspl/s\n", out_frames/(time/1e9)/1e6);
+        int64_t time = end_ns - start_ns;
+        printf("%.2f sample rate changes with filter calculation/sec\n",
+                looplimit * 4 / (time / 1e9));
 
+        // Check how fast sample rate changes are without filter changes.
+        // This should be very fast, probably 0.1us - 1us per sample rate
+        // change.
+        resampler->setSampleRate(1000);
+        looplimit = 1000;
+        clock_gettime(CLOCK_MONOTONIC, &start);
+        for (int i = 0; i < looplimit; ++i) {
+            resampler->setSampleRate(1000+i);
+        }
+        clock_gettime(CLOCK_MONOTONIC, &end);
+        start_ns = start.tv_sec * 1000000000LL + start.tv_nsec;
+        end_ns = end.tv_sec * 1000000000LL + end.tv_nsec;
+        time = end_ns - start_ns;
+        printf("%.2f sample rate changes without filter calculation/sec\n",
+                looplimit / (time / 1e9));
+        resampler->reset();
         delete resampler;
     }
 
+    void* output_vaddr = malloc(output_size);
     AudioResampler* resampler = AudioResampler::create(16, channels,
             output_freq, quality);
     size_t out_frames = output_size/8;
+
+    /* set volume precision to 12 bits, so the volume scale is 1<<12.
+     * This means the "integer" part fits in the Q19.12 precision
+     * representation of output int32_t.
+     *
+     * Generally 0 < volumePrecision <= 14 (due to the limits of
+     * int16_t values for Volume). volumePrecision cannot be 0 due
+     * to rounding and shifts.
+     */
+    const int volumePrecision = 12; // in bits
+
     resampler->setSampleRate(input_freq);
-    resampler->setVolume(0x1000, 0x1000);
+    resampler->setVolume(1 << volumePrecision, 1 << volumePrecision);
+
+    if (profileResample) {
+        /*
+         * For profiling on mobile devices, upon experimentation
+         * it is better to run a few trials with a shorter loop limit,
+         * and take the minimum time.
+         *
+         * Long tests can cause CPU temperature to build up and thermal throttling
+         * to reduce CPU frequency.
+         *
+         * For frequency checks (index=0, or 1, etc.):
+         * "cat /sys/devices/system/cpu/cpu${index}/cpufreq/scaling_*_freq"
+         *
+         * For temperature checks (index=0, or 1, etc.):
+         * "cat /sys/class/thermal/thermal_zone${index}/temp"
+         *
+         * Another way to avoid thermal throttling is to fix the CPU frequency
+         * at a lower level which prevents excessive temperatures.
+         */
+        const int trials = 4;
+        const int looplimit = 4;
+        timespec start, end;
+        int64_t time;
+
+        for (int n = 0; n < trials; ++n) {
+            clock_gettime(CLOCK_MONOTONIC, &start);
+            for (int i = 0; i < looplimit; ++i) {
+                resampler->resample((int*) output_vaddr, out_frames, &provider);
+                provider.reset(); //  during benchmarking reset only the provider
+            }
+            clock_gettime(CLOCK_MONOTONIC, &end);
+            int64_t start_ns = start.tv_sec * 1000000000LL + start.tv_nsec;
+            int64_t end_ns = end.tv_sec * 1000000000LL + end.tv_nsec;
+            int64_t diff_ns = end_ns - start_ns;
+            if (n == 0 || diff_ns < time) {
+                time = diff_ns;   // save the best out of our trials.
+            }
+        }
+        // Mfrms/s is "Millions of output frames per second".
+        printf("quality: %d  channels: %d  msec: %lld  Mfrms/s: %.2lf\n",
+                quality, channels, time/1000000, out_frames * looplimit / (time / 1e9) / 1e6);
+        resampler->reset();
+    }
 
     memset(output_vaddr, 0, output_size);
+    if (gVerbose) {
+        printf("resample() %u output frames\n", out_frames);
+    }
     resampler->resample((int*) output_vaddr, out_frames, &provider);
+    if (gVerbose) {
+        printf("resample() complete\n");
+    }
+    resampler->reset();
+    if (gVerbose) {
+        printf("reset() complete\n");
+    }
+    delete resampler;
+    resampler = NULL;
 
-    // down-mix (we just truncate and keep the left channel)
+    // mono takes left channel only
+    // stereo right channel is half amplitude of stereo left channel (due to input creation)
     int32_t* out = (int32_t*) output_vaddr;
     int16_t* convert = (int16_t*) malloc(out_frames * channels * sizeof(int16_t));
+
+    // round to half towards zero and saturate at int16 (non-dithered)
+    const int roundVal = (1<<(volumePrecision-1)) - 1; // volumePrecision > 0
+
     for (size_t i = 0; i < out_frames; i++) {
-        for (int j=0 ; j<channels ; j++) {
-            int32_t s = out[i * 2 + j] >> 12;
-            if (s > 32767)       s =  32767;
-            else if (s < -32768) s = -32768;
+        for (int j = 0; j < channels; j++) {
+            int32_t s = out[i * 2 + j] + roundVal; // add offset here
+            if (s < 0) {
+                s = (s + 1) >> volumePrecision; // round to 0
+                if (s < -32768) {
+                    s = -32768;
+                }
+            } else {
+                s = s >> volumePrecision;
+                if (s > 32767) {
+                    s = 32767;
+                }
+            }
             convert[i * channels + j] = int16_t(s);
         }
     }
 
     // write output to disk
-    int output_fd = open(file_out, O_WRONLY | O_CREAT | O_TRUNC,
-            S_IRUSR | S_IWUSR | S_IRGRP | S_IROTH);
-    if (output_fd < 0) {
-        fprintf(stderr, "open: %s\n", strerror(errno));
-        return -1;
-    }
-
     if (writeHeader) {
-        HeaderWav wav(out_frames * channels * sizeof(int16_t), channels, output_freq, 16);
-        write(output_fd, &wav, sizeof(wav));
+        SF_INFO info;
+        info.frames = 0;
+        info.samplerate = output_freq;
+        info.channels = channels;
+        info.format = SF_FORMAT_WAV | SF_FORMAT_PCM_16;
+        SNDFILE *sf = sf_open(file_out, SFM_WRITE, &info);
+        if (sf == NULL) {
+            perror(file_out);
+            return EXIT_FAILURE;
+        }
+        (void) sf_writef_short(sf, convert, out_frames);
+        sf_close(sf);
+    } else {
+        int output_fd = open(file_out, O_WRONLY | O_CREAT | O_TRUNC,
+                S_IRUSR | S_IWUSR | S_IRGRP | S_IROTH);
+        if (output_fd < 0) {
+            perror(file_out);
+            return EXIT_FAILURE;
+        }
+        write(output_fd, convert, out_frames * channels * sizeof(int16_t));
+        close(output_fd);
     }
 
-    write(output_fd, convert, out_frames * channels * sizeof(int16_t));
-    close(output_fd);
-
-    return 0;
+    return EXIT_SUCCESS;
 }
diff --git a/services/camera/libcameraservice/CameraService.cpp b/services/camera/libcameraservice/CameraService.cpp
index 87027f7..79fbf76 100644
--- a/services/camera/libcameraservice/CameraService.cpp
+++ b/services/camera/libcameraservice/CameraService.cpp
@@ -32,6 +32,7 @@
 #include <gui/Surface.h>
 #include <hardware/hardware.h>
 #include <media/AudioSystem.h>
+#include <media/IMediaHTTPService.h>
 #include <media/mediaplayer.h>
 #include <utils/Errors.h>
 #include <utils/Log.h>
@@ -866,7 +867,7 @@
 
 MediaPlayer* CameraService::newMediaPlayer(const char *file) {
     MediaPlayer* mp = new MediaPlayer();
-    if (mp->setDataSource(file, NULL) == NO_ERROR) {
+    if (mp->setDataSource(NULL /* httpService */, file, NULL) == NO_ERROR) {
         mp->setAudioStreamType(AUDIO_STREAM_ENFORCED_AUDIBLE);
         mp->prepare();
     } else {
diff --git a/services/camera/libcameraservice/api1/Camera2Client.cpp b/services/camera/libcameraservice/api1/Camera2Client.cpp
index ba1e772..0a88a75 100644
--- a/services/camera/libcameraservice/api1/Camera2Client.cpp
+++ b/services/camera/libcameraservice/api1/Camera2Client.cpp
@@ -238,7 +238,7 @@
 
     result.append("    Scene mode: ");
     switch (p.sceneMode) {
-        case ANDROID_CONTROL_SCENE_MODE_UNSUPPORTED:
+        case ANDROID_CONTROL_SCENE_MODE_DISABLED:
             result.append("AUTO\n"); break;
         CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_ACTION)
         CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_PORTRAIT)
@@ -732,6 +732,7 @@
         return OK;
     }
     params.state = Parameters::STOPPED;
+    int lastPreviewStreamId = mStreamingProcessor->getPreviewStreamId();
 
     res = mStreamingProcessor->updatePreviewStream(params);
     if (res != OK) {
@@ -740,6 +741,8 @@
         return res;
     }
 
+    bool previewStreamChanged = mStreamingProcessor->getPreviewStreamId() != lastPreviewStreamId;
+
     // We could wait to create the JPEG output stream until first actual use
     // (first takePicture call). However, this would substantially increase the
     // first capture latency on HAL3 devices, and potentially on some HAL2
@@ -789,6 +792,19 @@
             return res;
         }
         outputStreams.push(getCallbackStreamId());
+    } else if (previewStreamChanged && mCallbackProcessor->getStreamId() != NO_STREAM) {
+        /**
+         * Delete the unused callback stream when preview stream is changed and
+         * preview is not enabled. Don't need stop preview stream as preview is in
+         * STOPPED state now.
+         */
+        ALOGV("%s: Camera %d: Delete unused preview callback stream.",  __FUNCTION__, mCameraId);
+        res = mCallbackProcessor->deleteStream();
+        if (res != OK) {
+            ALOGE("%s: Camera %d: Unable to delete callback stream %s (%d)",
+                    __FUNCTION__, mCameraId, strerror(-res), res);
+            return res;
+        }
     }
     if (params.zslMode && !params.recordingHint) {
         res = updateProcessorStream(mZslProcessor, params);
@@ -798,6 +814,8 @@
             return res;
         }
         outputStreams.push(getZslStreamId());
+    } else {
+        mZslProcessor->deleteStream();
     }
 
     outputStreams.push(getPreviewStreamId());
@@ -1144,7 +1162,7 @@
          * Handle quirk mode for AF in scene modes
          */
         if (l.mParameters.quirks.triggerAfWithAuto &&
-                l.mParameters.sceneMode != ANDROID_CONTROL_SCENE_MODE_UNSUPPORTED &&
+                l.mParameters.sceneMode != ANDROID_CONTROL_SCENE_MODE_DISABLED &&
                 l.mParameters.focusMode != Parameters::FOCUS_MODE_AUTO &&
                 !l.mParameters.focusingAreas[0].isEmpty()) {
             ALOGV("%s: Quirk: Switching from focusMode %d to AUTO",
diff --git a/services/camera/libcameraservice/api1/client2/JpegProcessor.cpp b/services/camera/libcameraservice/api1/client2/JpegProcessor.cpp
index 77d5c8a..ec81456 100644
--- a/services/camera/libcameraservice/api1/client2/JpegProcessor.cpp
+++ b/services/camera/libcameraservice/api1/client2/JpegProcessor.cpp
@@ -200,51 +200,60 @@
     ATRACE_CALL();
     status_t res;
     sp<Camera2Heap> captureHeap;
+    sp<MemoryBase> captureBuffer;
 
     CpuConsumer::LockedBuffer imgBuffer;
 
-    res = mCaptureConsumer->lockNextBuffer(&imgBuffer);
-    if (res != OK) {
-        if (res != BAD_VALUE) {
-            ALOGE("%s: Camera %d: Error receiving still image buffer: "
-                    "%s (%d)", __FUNCTION__,
-                    mId, strerror(-res), res);
+    {
+        Mutex::Autolock l(mInputMutex);
+        if (mCaptureStreamId == NO_STREAM) {
+            ALOGW("%s: Camera %d: No stream is available", __FUNCTION__, mId);
+            return INVALID_OPERATION;
         }
-        return res;
-    }
 
-    ALOGV("%s: Camera %d: Still capture available", __FUNCTION__,
-            mId);
+        res = mCaptureConsumer->lockNextBuffer(&imgBuffer);
+        if (res != OK) {
+            if (res != BAD_VALUE) {
+                ALOGE("%s: Camera %d: Error receiving still image buffer: "
+                        "%s (%d)", __FUNCTION__,
+                        mId, strerror(-res), res);
+            }
+            return res;
+        }
 
-    if (imgBuffer.format != HAL_PIXEL_FORMAT_BLOB) {
-        ALOGE("%s: Camera %d: Unexpected format for still image: "
-                "%x, expected %x", __FUNCTION__, mId,
-                imgBuffer.format,
-                HAL_PIXEL_FORMAT_BLOB);
+        ALOGV("%s: Camera %d: Still capture available", __FUNCTION__,
+                mId);
+
+        if (imgBuffer.format != HAL_PIXEL_FORMAT_BLOB) {
+            ALOGE("%s: Camera %d: Unexpected format for still image: "
+                    "%x, expected %x", __FUNCTION__, mId,
+                    imgBuffer.format,
+                    HAL_PIXEL_FORMAT_BLOB);
+            mCaptureConsumer->unlockBuffer(imgBuffer);
+            return OK;
+        }
+
+        // Find size of JPEG image
+        size_t jpegSize = findJpegSize(imgBuffer.data, imgBuffer.width);
+        if (jpegSize == 0) { // failed to find size, default to whole buffer
+            jpegSize = imgBuffer.width;
+        }
+        size_t heapSize = mCaptureHeap->getSize();
+        if (jpegSize > heapSize) {
+            ALOGW("%s: JPEG image is larger than expected, truncating "
+                    "(got %d, expected at most %d bytes)",
+                    __FUNCTION__, jpegSize, heapSize);
+            jpegSize = heapSize;
+        }
+
+        // TODO: Optimize this to avoid memcopy
+        captureBuffer = new MemoryBase(mCaptureHeap, 0, jpegSize);
+        void* captureMemory = mCaptureHeap->getBase();
+        memcpy(captureMemory, imgBuffer.data, jpegSize);
+
         mCaptureConsumer->unlockBuffer(imgBuffer);
-        return OK;
     }
 
-    // Find size of JPEG image
-    size_t jpegSize = findJpegSize(imgBuffer.data, imgBuffer.width);
-    if (jpegSize == 0) { // failed to find size, default to whole buffer
-        jpegSize = imgBuffer.width;
-    }
-    size_t heapSize = mCaptureHeap->getSize();
-    if (jpegSize > heapSize) {
-        ALOGW("%s: JPEG image is larger than expected, truncating "
-                "(got %d, expected at most %d bytes)",
-                __FUNCTION__, jpegSize, heapSize);
-        jpegSize = heapSize;
-    }
-
-    // TODO: Optimize this to avoid memcopy
-    sp<MemoryBase> captureBuffer = new MemoryBase(mCaptureHeap, 0, jpegSize);
-    void* captureMemory = mCaptureHeap->getBase();
-    memcpy(captureMemory, imgBuffer.data, jpegSize);
-
-    mCaptureConsumer->unlockBuffer(imgBuffer);
-
     sp<CaptureSequencer> sequencer = mSequencer.promote();
     if (sequencer != 0) {
         sequencer->onCaptureAvailable(imgBuffer.timestamp, captureBuffer);
diff --git a/services/camera/libcameraservice/api1/client2/Parameters.cpp b/services/camera/libcameraservice/api1/client2/Parameters.cpp
index 08af566..2cf0d29 100644
--- a/services/camera/libcameraservice/api1/client2/Parameters.cpp
+++ b/services/camera/libcameraservice/api1/client2/Parameters.cpp
@@ -466,7 +466,7 @@
                 supportedAntibanding);
     }
 
-    sceneMode = ANDROID_CONTROL_SCENE_MODE_UNSUPPORTED;
+    sceneMode = ANDROID_CONTROL_SCENE_MODE_DISABLED;
     params.set(CameraParameters::KEY_SCENE_MODE,
             CameraParameters::SCENE_MODE_AUTO);
 
@@ -482,7 +482,7 @@
             if (addComma) supportedSceneModes += ",";
             addComma = true;
             switch (availableSceneModes.data.u8[i]) {
-                case ANDROID_CONTROL_SCENE_MODE_UNSUPPORTED:
+                case ANDROID_CONTROL_SCENE_MODE_DISABLED:
                     noSceneModes = true;
                     break;
                 case ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY:
@@ -664,13 +664,13 @@
     focusState = ANDROID_CONTROL_AF_STATE_INACTIVE;
     shadowFocusMode = FOCUS_MODE_INVALID;
 
-    camera_metadata_ro_entry_t max3aRegions =
-        staticInfo(ANDROID_CONTROL_MAX_REGIONS, 1, 1);
-    if (!max3aRegions.count) return NO_INIT;
+    camera_metadata_ro_entry_t max3aRegions = staticInfo(ANDROID_CONTROL_MAX_REGIONS,
+            Parameters::NUM_REGION, Parameters::NUM_REGION);
+    if (max3aRegions.count != Parameters::NUM_REGION) return NO_INIT;
 
     int32_t maxNumFocusAreas = 0;
     if (focusMode != Parameters::FOCUS_MODE_FIXED) {
-        maxNumFocusAreas = max3aRegions.data.i32[0];
+        maxNumFocusAreas = max3aRegions.data.i32[Parameters::REGION_AF];
     }
     params.set(CameraParameters::KEY_MAX_NUM_FOCUS_AREAS, maxNumFocusAreas);
     params.set(CameraParameters::KEY_FOCUS_AREAS,
@@ -730,7 +730,7 @@
 
     meteringAreas.add(Parameters::Area(0, 0, 0, 0, 0));
     params.set(CameraParameters::KEY_MAX_NUM_METERING_AREAS,
-            max3aRegions.data.i32[0]);
+            max3aRegions.data.i32[Parameters::REGION_AE]);
     params.set(CameraParameters::KEY_METERING_AREAS,
             "(0,0,0,0,0)");
 
@@ -1446,7 +1446,7 @@
         newParams.get(CameraParameters::KEY_SCENE_MODE) );
     if (validatedParams.sceneMode != sceneMode &&
             validatedParams.sceneMode !=
-            ANDROID_CONTROL_SCENE_MODE_UNSUPPORTED) {
+            ANDROID_CONTROL_SCENE_MODE_DISABLED) {
         camera_metadata_ro_entry_t availableSceneModes =
             staticInfo(ANDROID_CONTROL_AVAILABLE_SCENE_MODES);
         for (i = 0; i < availableSceneModes.count; i++) {
@@ -1461,7 +1461,7 @@
         }
     }
     bool sceneModeSet =
-            validatedParams.sceneMode != ANDROID_CONTROL_SCENE_MODE_UNSUPPORTED;
+            validatedParams.sceneMode != ANDROID_CONTROL_SCENE_MODE_DISABLED;
 
     // FLASH_MODE
     if (sceneModeSet) {
@@ -1591,10 +1591,11 @@
     // FOCUS_AREAS
     res = parseAreas(newParams.get(CameraParameters::KEY_FOCUS_AREAS),
             &validatedParams.focusingAreas);
-    size_t max3aRegions =
-        (size_t)staticInfo(ANDROID_CONTROL_MAX_REGIONS, 1, 1).data.i32[0];
+    size_t maxAfRegions = (size_t)staticInfo(ANDROID_CONTROL_MAX_REGIONS,
+              Parameters::NUM_REGION, Parameters::NUM_REGION).
+              data.i32[Parameters::REGION_AF];
     if (res == OK) res = validateAreas(validatedParams.focusingAreas,
-            max3aRegions, AREA_KIND_FOCUS);
+            maxAfRegions, AREA_KIND_FOCUS);
     if (res != OK) {
         ALOGE("%s: Requested focus areas are malformed: %s",
                 __FUNCTION__, newParams.get(CameraParameters::KEY_FOCUS_AREAS));
@@ -1624,10 +1625,13 @@
         newParams.get(CameraParameters::KEY_AUTO_WHITEBALANCE_LOCK));
 
     // METERING_AREAS
+    size_t maxAeRegions = (size_t)staticInfo(ANDROID_CONTROL_MAX_REGIONS,
+            Parameters::NUM_REGION, Parameters::NUM_REGION).
+            data.i32[Parameters::REGION_AE];
     res = parseAreas(newParams.get(CameraParameters::KEY_METERING_AREAS),
             &validatedParams.meteringAreas);
     if (res == OK) {
-        res = validateAreas(validatedParams.meteringAreas, max3aRegions,
+        res = validateAreas(validatedParams.meteringAreas, maxAeRegions,
                             AREA_KIND_METERING);
     }
     if (res != OK) {
@@ -1776,7 +1780,7 @@
     // (face detection statistics and face priority scene mode). Map from other
     // to the other.
     bool sceneModeActive =
-            sceneMode != (uint8_t)ANDROID_CONTROL_SCENE_MODE_UNSUPPORTED;
+            sceneMode != (uint8_t)ANDROID_CONTROL_SCENE_MODE_DISABLED;
     uint8_t reqControlMode = ANDROID_CONTROL_MODE_AUTO;
     if (enableFaceDetect || sceneModeActive) {
         reqControlMode = ANDROID_CONTROL_MODE_USE_SCENE_MODE;
@@ -1788,7 +1792,7 @@
     uint8_t reqSceneMode =
             sceneModeActive ? sceneMode :
             enableFaceDetect ? (uint8_t)ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY :
-            (uint8_t)ANDROID_CONTROL_SCENE_MODE_UNSUPPORTED;
+            (uint8_t)ANDROID_CONTROL_SCENE_MODE_DISABLED;
     res = request->update(ANDROID_CONTROL_SCENE_MODE,
             &reqSceneMode, 1);
     if (res != OK) return res;
@@ -1858,23 +1862,23 @@
 
     size_t reqFocusingAreasSize = focusingAreas.size() * 5;
     int32_t *reqFocusingAreas = new int32_t[reqFocusingAreasSize];
-    for (size_t i = 0; i < reqFocusingAreasSize; i += 5) {
-        if (focusingAreas[i].weight != 0) {
+    for (size_t i = 0, j = 0; i < reqFocusingAreasSize; i += 5, j++) {
+        if (focusingAreas[j].weight != 0) {
             reqFocusingAreas[i + 0] =
-                    normalizedXToArray(focusingAreas[i].left);
+                    normalizedXToArray(focusingAreas[j].left);
             reqFocusingAreas[i + 1] =
-                    normalizedYToArray(focusingAreas[i].top);
+                    normalizedYToArray(focusingAreas[j].top);
             reqFocusingAreas[i + 2] =
-                    normalizedXToArray(focusingAreas[i].right);
+                    normalizedXToArray(focusingAreas[j].right);
             reqFocusingAreas[i + 3] =
-                    normalizedYToArray(focusingAreas[i].bottom);
+                    normalizedYToArray(focusingAreas[j].bottom);
         } else {
             reqFocusingAreas[i + 0] = 0;
             reqFocusingAreas[i + 1] = 0;
             reqFocusingAreas[i + 2] = 0;
             reqFocusingAreas[i + 3] = 0;
         }
-        reqFocusingAreas[i + 4] = focusingAreas[i].weight;
+        reqFocusingAreas[i + 4] = focusingAreas[j].weight;
     }
     res = request->update(ANDROID_CONTROL_AF_REGIONS,
             reqFocusingAreas, reqFocusingAreasSize);
@@ -1887,28 +1891,45 @@
 
     size_t reqMeteringAreasSize = meteringAreas.size() * 5;
     int32_t *reqMeteringAreas = new int32_t[reqMeteringAreasSize];
-    for (size_t i = 0; i < reqMeteringAreasSize; i += 5) {
-        if (meteringAreas[i].weight != 0) {
+    for (size_t i = 0, j = 0; i < reqMeteringAreasSize; i += 5, j++) {
+        if (meteringAreas[j].weight != 0) {
             reqMeteringAreas[i + 0] =
-                normalizedXToArray(meteringAreas[i].left);
+                normalizedXToArray(meteringAreas[j].left);
             reqMeteringAreas[i + 1] =
-                normalizedYToArray(meteringAreas[i].top);
+                normalizedYToArray(meteringAreas[j].top);
             reqMeteringAreas[i + 2] =
-                normalizedXToArray(meteringAreas[i].right);
+                normalizedXToArray(meteringAreas[j].right);
             reqMeteringAreas[i + 3] =
-                normalizedYToArray(meteringAreas[i].bottom);
+                normalizedYToArray(meteringAreas[j].bottom);
         } else {
             reqMeteringAreas[i + 0] = 0;
             reqMeteringAreas[i + 1] = 0;
             reqMeteringAreas[i + 2] = 0;
             reqMeteringAreas[i + 3] = 0;
         }
-        reqMeteringAreas[i + 4] = meteringAreas[i].weight;
+        reqMeteringAreas[i + 4] = meteringAreas[j].weight;
     }
     res = request->update(ANDROID_CONTROL_AE_REGIONS,
             reqMeteringAreas, reqMeteringAreasSize);
     if (res != OK) return res;
 
+    // Set awb regions to be the same as the metering regions if allowed
+    size_t maxAwbRegions = (size_t)staticInfo(ANDROID_CONTROL_MAX_REGIONS,
+            Parameters::NUM_REGION, Parameters::NUM_REGION).
+            data.i32[Parameters::REGION_AWB];
+    if (maxAwbRegions > 0) {
+        if (maxAwbRegions >= meteringAreas.size()) {
+            res = request->update(ANDROID_CONTROL_AWB_REGIONS,
+                    reqMeteringAreas, reqMeteringAreasSize);
+        } else {
+            // Ensure the awb regions are zeroed if the region count is too high.
+            int32_t zeroedAwbAreas[5] = {0, 0, 0, 0, 0};
+            res = request->update(ANDROID_CONTROL_AWB_REGIONS,
+                    zeroedAwbAreas, sizeof(zeroedAwbAreas)/sizeof(int32_t));
+        }
+        if (res != OK) return res;
+    }
+
     delete[] reqMeteringAreas;
 
     /* don't include jpeg thumbnail size - it's valid for
@@ -2149,9 +2170,9 @@
 int Parameters::sceneModeStringToEnum(const char *sceneMode) {
     return
         !sceneMode ?
-            ANDROID_CONTROL_SCENE_MODE_UNSUPPORTED :
+            ANDROID_CONTROL_SCENE_MODE_DISABLED :
         !strcmp(sceneMode, CameraParameters::SCENE_MODE_AUTO) ?
-            ANDROID_CONTROL_SCENE_MODE_UNSUPPORTED :
+            ANDROID_CONTROL_SCENE_MODE_DISABLED :
         !strcmp(sceneMode, CameraParameters::SCENE_MODE_ACTION) ?
             ANDROID_CONTROL_SCENE_MODE_ACTION :
         !strcmp(sceneMode, CameraParameters::SCENE_MODE_PORTRAIT) ?
diff --git a/services/camera/libcameraservice/api1/client2/Parameters.h b/services/camera/libcameraservice/api1/client2/Parameters.h
index 32dbd42..60c4687 100644
--- a/services/camera/libcameraservice/api1/client2/Parameters.h
+++ b/services/camera/libcameraservice/api1/client2/Parameters.h
@@ -113,6 +113,14 @@
     bool autoExposureLock;
     bool autoWhiteBalanceLock;
 
+    // 3A region types, for use with ANDROID_CONTROL_MAX_REGIONS
+    enum region_t {
+        REGION_AE = 0,
+        REGION_AWB,
+        REGION_AF,
+        NUM_REGION // Number of region types
+    } region;
+
     Vector<Area> meteringAreas;
 
     int zoom;
diff --git a/services/camera/libcameraservice/api1/client2/ZslProcessor3.cpp b/services/camera/libcameraservice/api1/client2/ZslProcessor3.cpp
index 6b4e57a..c1d0496 100644
--- a/services/camera/libcameraservice/api1/client2/ZslProcessor3.cpp
+++ b/services/camera/libcameraservice/api1/client2/ZslProcessor3.cpp
@@ -273,6 +273,15 @@
             return INVALID_OPERATION;
         }
 
+        // Flush device to clear out all in-flight requests pending in HAL.
+        res = client->getCameraDevice()->flush();
+        if (res != OK) {
+            ALOGE("%s: Camera %d: Failed to flush device: "
+                "%s (%d)",
+                __FUNCTION__, client->getCameraId(), strerror(-res), res);
+            return res;
+        }
+
         // Update JPEG settings
         {
             SharedParameters::Lock l(client->getParameters());
diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
index 1cdf8dc..187220e 100644
--- a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
+++ b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
@@ -635,26 +635,56 @@
         return INVALID_OPERATION;
     }
 
+    camera_metadata_ro_entry_t entryFacing = staticInfo.find(ANDROID_LENS_FACING);
+    if (entry.count == 0) {
+        ALOGE("%s: Camera %d: Can't find android.lens.facing in "
+                "static metadata!", __FUNCTION__, mCameraId);
+        return INVALID_OPERATION;
+    }
+
     int32_t& flags = *transform;
 
+    bool mirror = (entryFacing.data.u8[0] == ANDROID_LENS_FACING_FRONT);
     int orientation = entry.data.i32[0];
-    switch (orientation) {
-        case 0:
-            flags = 0;
-            break;
-        case 90:
-            flags = NATIVE_WINDOW_TRANSFORM_ROT_90;
-            break;
-        case 180:
-            flags = NATIVE_WINDOW_TRANSFORM_ROT_180;
-            break;
-        case 270:
-            flags = NATIVE_WINDOW_TRANSFORM_ROT_270;
-            break;
-        default:
-            ALOGE("%s: Invalid HAL android.sensor.orientation value: %d",
-                  __FUNCTION__, orientation);
-            return INVALID_OPERATION;
+    if (!mirror) {
+        switch (orientation) {
+            case 0:
+                flags = 0;
+                break;
+            case 90:
+                flags = NATIVE_WINDOW_TRANSFORM_ROT_90;
+                break;
+            case 180:
+                flags = NATIVE_WINDOW_TRANSFORM_ROT_180;
+                break;
+            case 270:
+                flags = NATIVE_WINDOW_TRANSFORM_ROT_270;
+                break;
+            default:
+                ALOGE("%s: Invalid HAL android.sensor.orientation value: %d",
+                      __FUNCTION__, orientation);
+                return INVALID_OPERATION;
+        }
+    } else {
+        switch (orientation) {
+            case 0:
+                flags = HAL_TRANSFORM_FLIP_H;
+                break;
+            case 90:
+                flags = HAL_TRANSFORM_FLIP_H | HAL_TRANSFORM_ROT_90;
+                break;
+            case 180:
+                flags = HAL_TRANSFORM_FLIP_V;
+                break;
+            case 270:
+                flags = HAL_TRANSFORM_FLIP_V | HAL_TRANSFORM_ROT_90;
+                break;
+            default:
+                ALOGE("%s: Invalid HAL android.sensor.orientation value: %d",
+                      __FUNCTION__, orientation);
+                return INVALID_OPERATION;
+        }
+
     }
 
     /**
diff --git a/services/camera/libcameraservice/device3/Camera3Device.cpp b/services/camera/libcameraservice/device3/Camera3Device.cpp
index 3dbc1b0..da3e121 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Device.cpp
@@ -100,8 +100,10 @@
 
     camera3_device_t *device;
 
+    ATRACE_BEGIN("camera3->open");
     res = module->common.methods->open(&module->common, deviceName.string(),
             reinterpret_cast<hw_device_t**>(&device));
+    ATRACE_END();
 
     if (res != OK) {
         SET_ERR_L("Could not open camera: %s (%d)", strerror(-res), res);
@@ -269,7 +271,9 @@
         mStatusTracker.clear();
 
         if (mHal3Device != NULL) {
+            ATRACE_BEGIN("camera3->close");
             mHal3Device->common.close(&mHal3Device->common);
+            ATRACE_END();
             mHal3Device = NULL;
         }
 
@@ -836,16 +840,20 @@
     }
 
     sp<Camera3StreamInterface> deletedStream;
+    ssize_t outputStreamIdx = mOutputStreams.indexOfKey(id);
     if (mInputStream != NULL && id == mInputStream->getId()) {
         deletedStream = mInputStream;
         mInputStream.clear();
     } else {
-        ssize_t idx = mOutputStreams.indexOfKey(id);
-        if (idx == NAME_NOT_FOUND) {
+        if (outputStreamIdx == NAME_NOT_FOUND) {
             CLOGE("Stream %d does not exist", id);
             return BAD_VALUE;
         }
-        deletedStream = mOutputStreams.editValueAt(idx);
+    }
+
+    // Delete output stream or the output part of a bi-directional stream.
+    if (outputStreamIdx != NAME_NOT_FOUND) {
+        deletedStream = mOutputStreams.editValueAt(outputStreamIdx);
         mOutputStreams.removeItem(id);
     }
 
@@ -1120,7 +1128,14 @@
     Mutex::Autolock l(mLock);
 
     mRequestThread->clear();
-    return mHal3Device->ops->flush(mHal3Device);
+    status_t res;
+    if (mHal3Device->common.version >= CAMERA_DEVICE_API_VERSION_3_1) {
+        res = mHal3Device->ops->flush(mHal3Device);
+    } else {
+        res = waitUntilDrained();
+    }
+
+    return res;
 }
 
 /**
@@ -1664,8 +1679,10 @@
             return;
         }
 
-        // Check if everything has arrived for this result (buffers and metadata)
-        if (request.haveResultMetadata && request.numBuffersLeft == 0) {
+        // Check if everything has arrived for this result (buffers and metadata), remove it from
+        // InFlightMap if both arrived or HAL reports error for this request (i.e. during flush).
+        if ((request.requestStatus != OK) ||
+                (request.haveResultMetadata && request.numBuffersLeft == 0)) {
             ATRACE_ASYNC_END("frame capture", frameNumber);
             mInFlightMap.removeItemsAt(idx, 1);
         }
diff --git a/services/camera/libcameraservice/device3/Camera3InputStream.cpp b/services/camera/libcameraservice/device3/Camera3InputStream.cpp
index 5aa9a3e..e1c492b 100644
--- a/services/camera/libcameraservice/device3/Camera3InputStream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3InputStream.cpp
@@ -199,13 +199,33 @@
     assert(mMaxSize == 0);
     assert(camera3_stream::format != HAL_PIXEL_FORMAT_BLOB);
 
-    mTotalBufferCount = BufferQueue::MIN_UNDEQUEUED_BUFFERS +
-                        camera3_stream::max_buffers;
     mDequeuedBufferCount = 0;
     mFrameCount = 0;
 
     if (mConsumer.get() == 0) {
         sp<BufferQueue> bq = new BufferQueue();
+
+        int minUndequeuedBuffers = 0;
+        res = bq->query(NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS, &minUndequeuedBuffers);
+        if (res != OK || minUndequeuedBuffers < 0) {
+            ALOGE("%s: Stream %d: Could not query min undequeued buffers (error %d, bufCount %d)",
+                  __FUNCTION__, mId, res, minUndequeuedBuffers);
+            return res;
+        }
+        size_t minBufs = static_cast<size_t>(minUndequeuedBuffers);
+        /*
+         * We promise never to 'acquire' more than camera3_stream::max_buffers
+         * at any one time.
+         *
+         * Boost the number up to meet the minimum required buffer count.
+         *
+         * (Note that this sets consumer-side buffer count only,
+         * and not the sum of producer+consumer side as in other camera streams).
+         */
+        mTotalBufferCount = camera3_stream::max_buffers > minBufs ?
+            camera3_stream::max_buffers : minBufs;
+        // TODO: somehow set the total buffer count when producer connects?
+
         mConsumer = new BufferItemConsumer(bq, camera3_stream::usage,
                                            mTotalBufferCount);
         mConsumer->setName(String8::format("Camera3-InputStream-%d", mId));
diff --git a/services/camera/libcameraservice/device3/Camera3InputStream.h b/services/camera/libcameraservice/device3/Camera3InputStream.h
index 681d684..ae49467 100644
--- a/services/camera/libcameraservice/device3/Camera3InputStream.h
+++ b/services/camera/libcameraservice/device3/Camera3InputStream.h
@@ -44,6 +44,8 @@
 
     virtual void     dump(int fd, const Vector<String16> &args) const;
 
+    // TODO: expose an interface to get the IGraphicBufferProducer
+
   private:
 
     typedef BufferItemConsumer::BufferItem BufferItem;
diff --git a/services/camera/libcameraservice/device3/Camera3ZslStream.cpp b/services/camera/libcameraservice/device3/Camera3ZslStream.cpp
index 1a54923..e509350 100644
--- a/services/camera/libcameraservice/device3/Camera3ZslStream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3ZslStream.cpp
@@ -109,14 +109,14 @@
 } // namespace anonymous
 
 Camera3ZslStream::Camera3ZslStream(int id, uint32_t width, uint32_t height,
-        int depth) :
+        int bufferCount) :
         Camera3OutputStream(id, CAMERA3_STREAM_BIDIRECTIONAL,
                             width, height,
                             HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED),
-        mDepth(depth) {
+        mDepth(bufferCount) {
 
     sp<BufferQueue> bq = new BufferQueue();
-    mProducer = new RingBufferConsumer(bq, GRALLOC_USAGE_HW_CAMERA_ZSL, depth);
+    mProducer = new RingBufferConsumer(bq, GRALLOC_USAGE_HW_CAMERA_ZSL, bufferCount);
     mConsumer = new Surface(bq);
 }
 
diff --git a/services/camera/libcameraservice/device3/Camera3ZslStream.h b/services/camera/libcameraservice/device3/Camera3ZslStream.h
index c7f4490..6721832 100644
--- a/services/camera/libcameraservice/device3/Camera3ZslStream.h
+++ b/services/camera/libcameraservice/device3/Camera3ZslStream.h
@@ -37,10 +37,10 @@
         public Camera3OutputStream {
   public:
     /**
-     * Set up a ZSL stream of a given resolution. Depth is the number of buffers
+     * Set up a ZSL stream of a given resolution. bufferCount is the number of buffers
      * cached within the stream that can be retrieved for input.
      */
-    Camera3ZslStream(int id, uint32_t width, uint32_t height, int depth);
+    Camera3ZslStream(int id, uint32_t width, uint32_t height, int bufferCount);
     ~Camera3ZslStream();
 
     virtual void     dump(int fd, const Vector<String16> &args) const;
diff --git a/services/camera/libcameraservice/gui/RingBufferConsumer.h b/services/camera/libcameraservice/gui/RingBufferConsumer.h
index b4ad824..a03736d 100644
--- a/services/camera/libcameraservice/gui/RingBufferConsumer.h
+++ b/services/camera/libcameraservice/gui/RingBufferConsumer.h
@@ -64,7 +64,7 @@
     // bufferCount parameter specifies how many buffers can be pinned for user
     // access at the same time.
     RingBufferConsumer(const sp<IGraphicBufferConsumer>& consumer, uint32_t consumerUsage,
-            int bufferCount = BufferQueue::MIN_UNDEQUEUED_BUFFERS);
+            int bufferCount);
 
     virtual ~RingBufferConsumer();
 
diff --git a/services/medialog/MediaLogService.cpp b/services/medialog/MediaLogService.cpp
index 683fdf3..0c7fbbd 100644
--- a/services/medialog/MediaLogService.cpp
+++ b/services/medialog/MediaLogService.cpp
@@ -54,7 +54,7 @@
     }
 }
 
-status_t MediaLogService::dump(int fd, const Vector<String16>& args)
+status_t MediaLogService::dump(int fd, const Vector<String16>& args __unused)
 {
     // FIXME merge with similar but not identical code at services/audioflinger/ServiceUtilities.cpp
     static const String16 sDump("android.permission.DUMP");
diff --git a/tools/resampler_tools/fir.cpp b/tools/resampler_tools/fir.cpp
index cc3d509..3d6a74d 100644
--- a/tools/resampler_tools/fir.cpp
+++ b/tools/resampler_tools/fir.cpp
@@ -20,15 +20,25 @@
 #include <stdlib.h>
 #include <string.h>
 
-static double sinc(double x) {
+static inline double sinc(double x) {
     if (fabs(x) == 0.0f) return 1.0f;
     return sin(x) / x;
 }
 
-static double sqr(double x) {
+static inline double sqr(double x) {
     return x*x;
 }
 
+static inline int64_t toint(double x, int64_t maxval) {
+    int64_t v;
+
+    v = static_cast<int64_t>(floor(x * maxval + 0.5));
+    if (v >= maxval) {
+        return maxval - 1; // error!
+    }
+    return v;
+}
+
 static double I0(double x) {
     // from the Numerical Recipes in C p. 237
     double ax,ans,y;
@@ -54,11 +64,12 @@
     return I0(beta * sqrt(1.0 - sqr((2.0*k)/N - 1.0))) / I0(beta);
 }
 
-
 static void usage(char* name) {
     fprintf(stderr,
-            "usage: %s [-h] [-d] [-s sample_rate] [-c cut-off_frequency] [-n half_zero_crossings] [-f {float|fixed}] [-b beta] [-v dBFS] [-l lerp]\n"
-            "       %s [-h] [-d] [-s sample_rate] [-c cut-off_frequency] [-n half_zero_crossings] [-f {float|fixed}] [-b beta] [-v dBFS] -p M/N\n"
+            "usage: %s [-h] [-d] [-s sample_rate] [-c cut-off_frequency] [-n half_zero_crossings]"
+            " [-f {float|fixed|fixed16}] [-b beta] [-v dBFS] [-l lerp]\n"
+            "       %s [-h] [-d] [-s sample_rate] [-c cut-off_frequency] [-n half_zero_crossings]"
+            " [-f {float|fixed|fixed16}] [-b beta] [-v dBFS] -p M/N\n"
             "    -h    this help message\n"
             "    -d    debug, print comma-separated coefficient table\n"
             "    -p    generate poly-phase filter coefficients, with sample increment M/N\n"
@@ -66,6 +77,7 @@
             "    -c    cut-off frequency (20478)\n"
             "    -n    number of zero-crossings on one side (8)\n"
             "    -l    number of lerping bits (4)\n"
+            "    -m    number of polyphases (related to -l, default 16)\n"
             "    -f    output format, can be fixed-point or floating-point (fixed)\n"
             "    -b    kaiser window parameter beta (7.865 [-80dB])\n"
             "    -v    attenuation in dBFS (0)\n",
@@ -77,8 +89,7 @@
 int main(int argc, char** argv)
 {
     // nc is the number of bits to store the coefficients
-    const int nc = 32;
-
+    int nc = 32;
     bool polyphase = false;
     unsigned int polyM = 160;
     unsigned int polyN = 147;
@@ -88,7 +99,6 @@
     double atten = 1;
     int format = 0;
 
-
     // in order to keep the errors associated with the linear
     // interpolation of the coefficients below the quantization error
     // we must satisfy:
@@ -104,7 +114,6 @@
     // Smith, J.O. Digital Audio Resampling Home Page
     // https://ccrma.stanford.edu/~jos/resample/, 2011-03-29
     //
-    int nz = 4;
 
     //         | 0.1102*(A - 8.7)                         A > 50
     //  beta = | 0.5842*(A - 21)^0.4 + 0.07886*(A - 21)   21 <= A <= 50
@@ -123,7 +132,6 @@
     //   100 dB   10.056
     double beta = 7.865;
 
-
     // 2*nzc = (A - 8) / (2.285 * dw)
     //      with dw the transition width = 2*pi*dF/Fs
     //
@@ -148,8 +156,9 @@
     // nzc  = 20
     //
 
+    int M = 1 << 4; // number of phases for interpolation
     int ch;
-    while ((ch = getopt(argc, argv, ":hds:c:n:f:l:b:p:v:")) != -1) {
+    while ((ch = getopt(argc, argv, ":hds:c:n:f:l:m:b:p:v:z:")) != -1) {
         switch (ch) {
             case 'd':
                 debug = true;
@@ -169,13 +178,26 @@
             case 'n':
                 nzc = atoi(optarg);
                 break;
+            case 'm':
+                M = atoi(optarg);
+                break;
             case 'l':
-                nz = atoi(optarg);
+                M = 1 << atoi(optarg);
                 break;
             case 'f':
-                if (!strcmp(optarg,"fixed")) format = 0;
-                else if (!strcmp(optarg,"float")) format = 1;
-                else usage(argv[0]);
+                if (!strcmp(optarg, "fixed")) {
+                    format = 0;
+                }
+                else if (!strcmp(optarg, "fixed16")) {
+                    format = 0;
+                    nc = 16;
+                }
+                else if (!strcmp(optarg, "float")) {
+                    format = 1;
+                }
+                else {
+                    usage(argv[0]);
+                }
                 break;
             case 'b':
                 beta = atof(optarg);
@@ -193,11 +215,14 @@
     // cut off frequency ratio Fc/Fs
     double Fcr = Fc / Fs;
 
-
     // total number of coefficients (one side)
-    const int M = (1 << nz);
+
     const int N = M * nzc;
 
+    // lerp (which is most useful if M is a power of 2)
+
+    int nz = 0; // recalculate nz as the bits needed to represent M
+    for (int i = M-1 ; i; i>>=1, nz++);
     // generate the right half of the filter
     if (!debug) {
         printf("// cmd-line: ");
@@ -207,7 +232,7 @@
         printf("\n");
         if (!polyphase) {
             printf("const int32_t RESAMPLE_FIR_SIZE           = %d;\n", N);
-            printf("const int32_t RESAMPLE_FIR_LERP_INT_BITS  = %d;\n", nz);
+            printf("const int32_t RESAMPLE_FIR_INT_PHASES     = %d;\n", M);
             printf("const int32_t RESAMPLE_FIR_NUM_COEF       = %d;\n", nzc);
         } else {
             printf("const int32_t RESAMPLE_FIR_SIZE           = %d;\n", 2*nzc*polyN);
@@ -224,7 +249,7 @@
         for (int i=0 ; i<=M ; i++) { // an extra set of coefs for interpolation
             for (int j=0 ; j<nzc ; j++) {
                 int ix = j*M + i;
-                double x = (2.0 * M_PI * ix * Fcr) / (1 << nz);
+                double x = (2.0 * M_PI * ix * Fcr) / M;
                 double y = kaiser(ix+N, 2*N, beta) * sinc(x) * 2.0 * Fcr;
                 y *= atten;
 
@@ -232,11 +257,13 @@
                     if (j == 0)
                         printf("\n    ");
                 }
-
                 if (!format) {
-                    int64_t yi = floor(y * ((1ULL<<(nc-1))) + 0.5);
-                    if (yi >= (1LL<<(nc-1))) yi = (1LL<<(nc-1))-1;
-                    printf("0x%08x, ", int32_t(yi));
+                    int64_t yi = toint(y, 1ULL<<(nc-1));
+                    if (nc > 16) {
+                        printf("0x%08x, ", int32_t(yi));
+                    } else {
+                        printf("0x%04x, ", int32_t(yi)&0xffff);
+                    }
                 } else {
                     printf("%.9g%s ", y, debug ? "," : "f,");
                 }
@@ -254,9 +281,12 @@
                 double y = kaiser(i+N, 2*N, beta) * sinc(x) * 2.0 * Fcr;;
                 y *= atten;
                 if (!format) {
-                    int64_t yi = floor(y * ((1ULL<<(nc-1))) + 0.5);
-                    if (yi >= (1LL<<(nc-1))) yi = (1LL<<(nc-1))-1;
-                    printf("0x%08x", int32_t(yi));
+                    int64_t yi = toint(y, 1ULL<<(nc-1));
+                    if (nc > 16) {
+                        printf("0x%08x, ", int32_t(yi));
+                    } else {
+                        printf("0x%04x, ", int32_t(yi)&0xffff);
+                    }
                 } else {
                     printf("%.9g%s", y, debug ? "" : "f");
                 }
@@ -277,5 +307,3 @@
 }
 
 // http://www.csee.umbc.edu/help/sound/AFsp-V2R1/html/audio/ResampAudio.html
-
-