Make IGraphicBufferSource::setTimeLapseConfig take fps

Test: Manual use of Camera, Movies, Photos and YouTube apps.

Test: With CtsMediaTestCases.apk installed,
adb shell am instrument -e size small -w
'android.media.cts/android.support.test.runner.AndroidJUnitRunner'

Bug: 37302530
Change-Id: Id0d4f7fed57349a2f5484f6b6264981471e8f022
diff --git a/include/media/omx/1.0/WGraphicBufferSource.h b/include/media/omx/1.0/WGraphicBufferSource.h
index 0ca5f44..397e576 100644
--- a/include/media/omx/1.0/WGraphicBufferSource.h
+++ b/include/media/omx/1.0/WGraphicBufferSource.h
@@ -67,14 +67,11 @@
 struct LWGraphicBufferSource : public BnGraphicBufferSource {
     sp<TGraphicBufferSource> mBase;
     LWGraphicBufferSource(sp<TGraphicBufferSource> const& base);
-    BnStatus configure(
-            const sp<IOMXNode>& omxNode, int32_t dataSpace) override;
+    BnStatus configure(const sp<IOMXNode>& omxNode, int32_t dataSpace) override;
     BnStatus setSuspend(bool suspend, int64_t timeUs) override;
-    BnStatus setRepeatPreviousFrameDelayUs(
-            int64_t repeatAfterUs) override;
+    BnStatus setRepeatPreviousFrameDelayUs(int64_t repeatAfterUs) override;
     BnStatus setMaxFps(float maxFps) override;
-    BnStatus setTimeLapseConfig(
-            int64_t timePerFrameUs, int64_t timePerCaptureUs) override;
+    BnStatus setTimeLapseConfig(double fps, double captureFps) override;
     BnStatus setStartTimeUs(int64_t startTimeUs) override;
     BnStatus setStopTimeUs(int64_t stopTimeUs) override;
     BnStatus setColorAspects(int32_t aspects) override;
diff --git a/media/libmedia/aidl/android/IGraphicBufferSource.aidl b/media/libmedia/aidl/android/IGraphicBufferSource.aidl
index 325c631..f3c7abc 100644
--- a/media/libmedia/aidl/android/IGraphicBufferSource.aidl
+++ b/media/libmedia/aidl/android/IGraphicBufferSource.aidl
@@ -28,10 +28,10 @@
     void setSuspend(boolean suspend, long suspendTimeUs);
     void setRepeatPreviousFrameDelayUs(long repeatAfterUs);
     void setMaxFps(float maxFps);
-    void setTimeLapseConfig(long timePerFrameUs, long timePerCaptureUs);
+    void setTimeLapseConfig(double fps, double captureFps);
     void setStartTimeUs(long startTimeUs);
     void setStopTimeUs(long stopTimeUs);
     void setColorAspects(int aspects);
     void setTimeOffsetUs(long timeOffsetsUs);
     void signalEndOfInputStream();
-}
\ No newline at end of file
+}
diff --git a/media/libmedia/omx/1.0/WGraphicBufferSource.cpp b/media/libmedia/omx/1.0/WGraphicBufferSource.cpp
index b4e2975..4c543fa 100644
--- a/media/libmedia/omx/1.0/WGraphicBufferSource.cpp
+++ b/media/libmedia/omx/1.0/WGraphicBufferSource.cpp
@@ -53,9 +53,8 @@
 }
 
 BnStatus LWGraphicBufferSource::setTimeLapseConfig(
-        int64_t timePerFrameUs, int64_t timePerCaptureUs) {
-    return toBinderStatus(mBase->setTimeLapseConfig(
-            timePerFrameUs, timePerCaptureUs));
+        double fps, double captureFps) {
+    return toBinderStatus(mBase->setTimeLapseConfig(fps, captureFps));
 }
 
 BnStatus LWGraphicBufferSource::setStartTimeUs(
diff --git a/media/libmediaplayerservice/StagefrightRecorder.cpp b/media/libmediaplayerservice/StagefrightRecorder.cpp
index 3b7c61b..e1d762f 100644
--- a/media/libmediaplayerservice/StagefrightRecorder.cpp
+++ b/media/libmediaplayerservice/StagefrightRecorder.cpp
@@ -163,7 +163,7 @@
     // TBD mTrackEveryTimeDurationUs = 0;
     mAnalyticsItem->setInt32(kRecorderCaptureFpsEnable, mCaptureFpsEnable);
     mAnalyticsItem->setDouble(kRecorderCaptureFps, mCaptureFps);
-    // TBD mTimeBetweenCaptureUs = -1;
+    // TBD mCaptureFps = -1.0;
     // TBD mCameraSourceTimeLapse = NULL;
     // TBD mMetaDataStoredInVideoBuffers = kMetadataBufferTypeInvalid;
     // TBD mEncoderProfiles = MediaProfiles::getInstance();
@@ -709,18 +709,11 @@
 status_t StagefrightRecorder::setParamCaptureFps(double fps) {
     ALOGV("setParamCaptureFps: %.2f", fps);
 
-    // FPS value is from Java layer where double follows IEEE-754, which is not
-    // necessarily true for double here.
-    constexpr double kIeee754Epsilon = 2.220446049250313e-16;
-    constexpr double kEpsilon = std::max(std::numeric_limits<double>::epsilon(), kIeee754Epsilon);
-    // Not allowing fps less than 1 frame / day minus epsilon.
-    if (fps < 1.0 / 86400 - kEpsilon) {
-        ALOGE("fps (%lf) is out of range (>= 1 frame / day)", fps);
+    if (!(fps >= 1.0 / 86400)) {
+        ALOGE("FPS is too small");
         return BAD_VALUE;
     }
-
     mCaptureFps = fps;
-    mTimeBetweenCaptureUs = std::llround(1e6 / fps);
     return OK;
 }
 
@@ -1574,16 +1567,15 @@
     videoSize.width = mVideoWidth;
     videoSize.height = mVideoHeight;
     if (mCaptureFpsEnable) {
-        if (mTimeBetweenCaptureUs < 0) {
-            ALOGE("Invalid mTimeBetweenTimeLapseFrameCaptureUs value: %lld",
-                    (long long)mTimeBetweenCaptureUs);
+        if (!(mCaptureFps > 0.)) {
+            ALOGE("Invalid mCaptureFps value: %lf", mCaptureFps);
             return BAD_VALUE;
         }
 
         mCameraSourceTimeLapse = CameraSourceTimeLapse::CreateFromCamera(
                 mCamera, mCameraProxy, mCameraId, mClientName, mClientUid, mClientPid,
                 videoSize, mFrameRate, mPreviewSurface,
-                mTimeBetweenCaptureUs);
+                std::llround(1e6 / mCaptureFps));
         *cameraSource = mCameraSourceTimeLapse;
     } else {
         *cameraSource = CameraSource::CreateFromCamera(
@@ -1679,12 +1671,11 @@
 
         // set up time lapse/slow motion for surface source
         if (mCaptureFpsEnable) {
-            if (mTimeBetweenCaptureUs <= 0) {
-                ALOGE("Invalid mTimeBetweenCaptureUs value: %lld",
-                        (long long)mTimeBetweenCaptureUs);
+            if (!(mCaptureFps > 0.)) {
+                ALOGE("Invalid mCaptureFps value: %lf", mCaptureFps);
                 return BAD_VALUE;
             }
-            format->setInt64("time-lapse", mTimeBetweenCaptureUs);
+            format->setDouble("time-lapse-fps", mCaptureFps);
         }
     }
 
@@ -2075,8 +2066,7 @@
     mMaxFileSizeBytes = 0;
     mTrackEveryTimeDurationUs = 0;
     mCaptureFpsEnable = false;
-    mCaptureFps = 0.0;
-    mTimeBetweenCaptureUs = -1;
+    mCaptureFps = -1.0;
     mCameraSourceTimeLapse = NULL;
     mMetaDataStoredInVideoBuffers = kMetadataBufferTypeInvalid;
     mEncoderProfiles = MediaProfiles::getInstance();
diff --git a/media/libstagefright/ACodec.cpp b/media/libstagefright/ACodec.cpp
index 63b9571..8b91541 100644
--- a/media/libstagefright/ACodec.cpp
+++ b/media/libstagefright/ACodec.cpp
@@ -546,8 +546,8 @@
       mRepeatFrameDelayUs(-1ll),
       mMaxPtsGapUs(-1ll),
       mMaxFps(-1),
-      mTimePerFrameUs(-1ll),
-      mTimePerCaptureUs(-1ll),
+      mFps(-1.0),
+      mCaptureFps(-1.0),
       mCreateInputBuffersSuspended(false),
       mLatency(0),
       mTunneled(false),
@@ -1802,8 +1802,8 @@
             mMaxFps = -1;
         }
 
-        if (!msg->findInt64("time-lapse", &mTimePerCaptureUs)) {
-            mTimePerCaptureUs = -1ll;
+        if (!msg->findDouble("time-lapse-fps", &mCaptureFps)) {
+            mCaptureFps = -1.0;
         }
 
         if (!msg->findInt32(
@@ -3739,17 +3739,18 @@
 
     def.nBufferSize = (video_def->nStride * video_def->nSliceHeight * 3) / 2;
 
-    float frameRate;
-    if (!msg->findFloat("frame-rate", &frameRate)) {
+    float framerate;
+    if (!msg->findFloat("frame-rate", &framerate)) {
         int32_t tmp;
         if (!msg->findInt32("frame-rate", &tmp)) {
             return INVALID_OPERATION;
         }
-        frameRate = (float)tmp;
-        mTimePerFrameUs = (int64_t) (1000000.0f / frameRate);
+        mFps = (double)tmp;
+    } else {
+        mFps = (double)framerate;
     }
 
-    video_def->xFramerate = (OMX_U32)(frameRate * 65536.0f);
+    video_def->xFramerate = (OMX_U32)(mFps * 65536);
     video_def->eCompressionFormat = OMX_VIDEO_CodingUnused;
     // this is redundant as it was already set up in setVideoPortFormatType
     // FIXME for now skip this only for flexible YUV formats
@@ -6597,11 +6598,10 @@
         }
     }
 
-    if (mCodec->mTimePerCaptureUs > 0ll
-            && mCodec->mTimePerFrameUs > 0ll) {
+    if (mCodec->mCaptureFps > 0. && mCodec->mFps > 0.) {
         err = statusFromBinderStatus(
                 mCodec->mGraphicBufferSource->setTimeLapseConfig(
-                        mCodec->mTimePerFrameUs, mCodec->mTimePerCaptureUs));
+                        mCodec->mFps, mCodec->mCaptureFps));
 
         if (err != OK) {
             ALOGE("[%s] Unable to configure time lapse (err %d)",
diff --git a/media/libstagefright/include/ACodec.h b/media/libstagefright/include/ACodec.h
index 6c1a5c6..06ee0e8 100644
--- a/media/libstagefright/include/ACodec.h
+++ b/media/libstagefright/include/ACodec.h
@@ -293,8 +293,8 @@
     int64_t mRepeatFrameDelayUs;
     int64_t mMaxPtsGapUs;
     float mMaxFps;
-    int64_t mTimePerFrameUs;
-    int64_t mTimePerCaptureUs;
+    double mFps;
+    double mCaptureFps;
     bool mCreateInputBuffersSuspended;
     uint32_t mLatency;
 
diff --git a/media/libstagefright/omx/1.0/WGraphicBufferSource.cpp b/media/libstagefright/omx/1.0/WGraphicBufferSource.cpp
index ebca879..e876306 100644
--- a/media/libstagefright/omx/1.0/WGraphicBufferSource.cpp
+++ b/media/libstagefright/omx/1.0/WGraphicBufferSource.cpp
@@ -192,8 +192,8 @@
 }
 
 Return<Status> TWGraphicBufferSource::setTimeLapseConfig(
-        int64_t timePerFrameUs, int64_t timePerCaptureUs) {
-    return toStatus(mBase->setTimeLapseConfig(timePerFrameUs, timePerCaptureUs));
+        double fps, double captureFps) {
+    return toStatus(mBase->setTimeLapseConfig(fps, captureFps));
 }
 
 Return<Status> TWGraphicBufferSource::setStartTimeUs(int64_t startTimeUs) {
diff --git a/media/libstagefright/omx/1.0/WGraphicBufferSource.h b/media/libstagefright/omx/1.0/WGraphicBufferSource.h
index 8f822b8..4549c97 100644
--- a/media/libstagefright/omx/1.0/WGraphicBufferSource.h
+++ b/media/libstagefright/omx/1.0/WGraphicBufferSource.h
@@ -78,8 +78,7 @@
     Return<Status> setSuspend(bool suspend, int64_t timeUs) override;
     Return<Status> setRepeatPreviousFrameDelayUs(int64_t repeatAfterUs) override;
     Return<Status> setMaxFps(float maxFps) override;
-    Return<Status> setTimeLapseConfig(
-            int64_t timePerFrameUs, int64_t timePerCaptureUs) override;
+    Return<Status> setTimeLapseConfig(double fps, double captureFps) override;
     Return<Status> setStartTimeUs(int64_t startTimeUs) override;
     Return<Status> setStopTimeUs(int64_t stopTimeUs) override;
     Return<void> getStopTimeOffsetUs(getStopTimeOffsetUs_cb _hidl_cb) override;
diff --git a/media/libstagefright/omx/BWGraphicBufferSource.cpp b/media/libstagefright/omx/BWGraphicBufferSource.cpp
index 4e0f6dd..f2a454f 100644
--- a/media/libstagefright/omx/BWGraphicBufferSource.cpp
+++ b/media/libstagefright/omx/BWGraphicBufferSource.cpp
@@ -145,9 +145,9 @@
 }
 
 ::android::binder::Status BWGraphicBufferSource::setTimeLapseConfig(
-        int64_t timePerFrameUs, int64_t timePerCaptureUs) {
+        double fps, double captureFps) {
     return Status::fromStatusT(mBase->setTimeLapseConfig(
-            timePerFrameUs, timePerCaptureUs));
+            fps, captureFps));
 }
 
 ::android::binder::Status BWGraphicBufferSource::setStartTimeUs(
diff --git a/media/libstagefright/omx/BWGraphicBufferSource.h b/media/libstagefright/omx/BWGraphicBufferSource.h
index f1ce2af..43763c2 100644
--- a/media/libstagefright/omx/BWGraphicBufferSource.h
+++ b/media/libstagefright/omx/BWGraphicBufferSource.h
@@ -50,7 +50,7 @@
             int64_t repeatAfterUs) override;
     Status setMaxFps(float maxFps) override;
     Status setTimeLapseConfig(
-            int64_t timePerFrameUs, int64_t timePerCaptureUs) override;
+            double fps, double captureFps) override;
     Status setStartTimeUs(int64_t startTimeUs) override;
     Status setStopTimeUs(int64_t stopTimeUs) override;
     Status setColorAspects(int32_t aspects) override;
diff --git a/media/libstagefright/omx/GraphicBufferSource.cpp b/media/libstagefright/omx/GraphicBufferSource.cpp
index 5257b50..0521460 100644
--- a/media/libstagefright/omx/GraphicBufferSource.cpp
+++ b/media/libstagefright/omx/GraphicBufferSource.cpp
@@ -42,6 +42,7 @@
 
 #include <functional>
 #include <memory>
+#include <cmath>
 
 namespace android {
 
@@ -270,8 +271,11 @@
     mRepeatLastFrameGeneration(0),
     mOutstandingFrameRepeatCount(0),
     mFrameRepeatBlockedOnCodecBuffer(false),
-    mTimePerCaptureUs(-1ll),
-    mTimePerFrameUs(-1ll),
+    mFps(-1.0),
+    mCaptureFps(-1.0),
+    mBaseCaptureUs(-1ll),
+    mBaseFrameUs(-1ll),
+    mFrameCount(0),
     mPrevCaptureUs(-1ll),
     mPrevFrameUs(-1ll),
     mInputBufferTimeOffsetUs(0ll) {
@@ -713,26 +717,31 @@
     int64_t timeUs = bufferTimeNs / 1000;
     timeUs += mInputBufferTimeOffsetUs;
 
-    if (mTimePerCaptureUs > 0ll
-            && (mTimePerCaptureUs > 2 * mTimePerFrameUs
-            || mTimePerFrameUs > 2 * mTimePerCaptureUs)) {
+    if (mCaptureFps > 0.
+            && (mFps > 2 * mCaptureFps
+            || mCaptureFps > 2 * mFps)) {
         // Time lapse or slow motion mode
         if (mPrevCaptureUs < 0ll) {
             // first capture
-            mPrevCaptureUs = timeUs;
+            mPrevCaptureUs = mBaseCaptureUs = timeUs;
             // adjust the first sample timestamp.
-            mPrevFrameUs = (timeUs * mTimePerFrameUs) / mTimePerCaptureUs;
+            mPrevFrameUs = mBaseFrameUs =
+                    std::llround((timeUs * mCaptureFps) / mFps);
+            mFrameCount = 0;
         } else {
             // snap to nearest capture point
-            int64_t nFrames = (timeUs + mTimePerCaptureUs / 2 - mPrevCaptureUs)
-                    / mTimePerCaptureUs;
+            int64_t nFrames = std::llround(
+                    (timeUs - mPrevCaptureUs) * mCaptureFps);
             if (nFrames <= 0) {
                 // skip this frame as it's too close to previous capture
                 ALOGV("skipping frame, timeUs %lld", static_cast<long long>(timeUs));
                 return false;
             }
-            mPrevCaptureUs += mTimePerCaptureUs * nFrames;
-            mPrevFrameUs += mTimePerFrameUs * nFrames;
+            mFrameCount += nFrames;
+            mPrevCaptureUs = mBaseCaptureUs + std::llround(
+                    mFrameCount / mCaptureFps);
+            mPrevFrameUs = mBaseFrameUs + std::llround(
+                    mFrameCount / mFps);
         }
 
         ALOGV("timeUs %lld, captureUs %lld, frameUs %lld",
@@ -1054,10 +1063,13 @@
         mOutstandingFrameRepeatCount = 0;
         mLatestBuffer.mBuffer.reset();
         mFrameRepeatBlockedOnCodecBuffer = false;
-        mTimePerCaptureUs = -1ll;
-        mTimePerFrameUs = -1ll;
+        mFps = -1.0;
+        mCaptureFps = -1.0;
+        mBaseCaptureUs = -1ll;
+        mBaseFrameUs = -1ll;
         mPrevCaptureUs = -1ll;
         mPrevFrameUs = -1ll;
+        mFrameCount = 0;
         mInputBufferTimeOffsetUs = 0;
         mStopTimeUs = -1;
         mActionQueue.clear();
@@ -1202,18 +1214,18 @@
     return OK;
 }
 
-status_t GraphicBufferSource::setTimeLapseConfig(int64_t timePerFrameUs, int64_t timePerCaptureUs) {
-    ALOGV("setTimeLapseConfig: timePerFrameUs=%lld, timePerCaptureUs=%lld",
-            (long long)timePerFrameUs, (long long)timePerCaptureUs);
+status_t GraphicBufferSource::setTimeLapseConfig(double fps, double captureFps) {
+    ALOGV("setTimeLapseConfig: fps=%lg, captureFps=%lg",
+            fps, captureFps);
 
     Mutex::Autolock autoLock(mMutex);
 
-    if (mExecuting || timePerFrameUs <= 0ll || timePerCaptureUs <= 0ll) {
+    if (mExecuting || !(fps > 0) || !(captureFps > 0)) {
         return INVALID_OPERATION;
     }
 
-    mTimePerFrameUs = timePerFrameUs;
-    mTimePerCaptureUs = timePerCaptureUs;
+    mFps = fps;
+    mCaptureFps = captureFps;
 
     return OK;
 }
diff --git a/media/libstagefright/omx/GraphicBufferSource.h b/media/libstagefright/omx/GraphicBufferSource.h
index 635cfd6..3df1aa1 100644
--- a/media/libstagefright/omx/GraphicBufferSource.h
+++ b/media/libstagefright/omx/GraphicBufferSource.h
@@ -162,7 +162,7 @@
     // Sets the time lapse (or slow motion) parameters.
     // When set, the sample's timestamp will be modified to playback framerate,
     // and capture timestamp will be modified to capture rate.
-    status_t setTimeLapseConfig(int64_t timePerFrameUs, int64_t timePerCaptureUs);
+    status_t setTimeLapseConfig(double fps, double captureFps);
 
     // Sets the start time us (in system time), samples before which should
     // be dropped and not submitted to encoder
@@ -417,27 +417,39 @@
     // Time lapse / slow motion configuration
     // --------------------------------------
 
-    // desired time interval between captured frames (capture interval) - value <= 0 if undefined
-    int64_t mTimePerCaptureUs;
+    // desired frame rate for encoding - value <= 0 if undefined
+    double mFps;
 
-    // desired time interval between encoded frames (media time interval) - value <= 0 if undefined
-    int64_t mTimePerFrameUs;
+    // desired frame rate for capture - value <= 0 if undefined
+    double mCaptureFps;
 
-    // Time lapse mode is enabled if capture interval is defined and it is more than twice the
-    // media time interval (if defined). In this mode frames that come in between the capture
-    // interval are dropped and the media timestamp is adjusted to have exactly the desired
-    // media time interval.
+    // Time lapse mode is enabled if the capture frame rate is defined and it is
+    // smaller than half the encoding frame rate (if defined). In this mode,
+    // frames that come in between the capture interval (the reciprocal of the
+    // capture frame rate) are dropped and the encoding timestamp is adjusted to
+    // match the desired encoding frame rate.
     //
-    // Slow motion mode is enabled if both media and capture intervals are defined and the media
-    // time interval is more than twice the capture interval. In this mode frames that come in
-    // between the capture interval are dropped (though there isn't expected to be any, but there
-    // could eventually be a frame drop if the actual capture interval is smaller than the
-    // configured capture interval). The media timestamp is adjusted to have exactly the desired
-    // media time interval.
+    // Slow motion mode is enabled if both encoding and capture frame rates are
+    // defined and the encoding frame rate is less than half the capture frame
+    // rate. In this mode, the source is expected to produce frames with an even
+    // timestamp interval (after rounding) with the configured capture fps. The
+    // first source timestamp is used as the source base time. Afterwards, the
+    // timestamp of each source frame is snapped to the nearest expected capture
+    // timestamp and scaled to match the configured encoding frame rate.
 
     // These modes must be enabled before using this source.
 
-    // adjusted capture timestamp for previous frame
+    // adjusted capture timestamp of the base frame
+    int64_t mBaseCaptureUs;
+
+    // adjusted encoding timestamp of the base frame
+    int64_t mBaseFrameUs;
+
+    // number of frames from the base time
+    int64_t mFrameCount;
+
+    // adjusted capture timestamp for previous frame (negative if there were
+    // none)
     int64_t mPrevCaptureUs;
 
     // adjusted media timestamp for previous frame (negative if there were none)