Merge "audio policy: binder calls for dynamic audio policy mixes" into lmp-mr1-dev
diff --git a/media/libstagefright/AACWriter.cpp b/media/libstagefright/AACWriter.cpp
index 353920e..7cc9430 100644
--- a/media/libstagefright/AACWriter.cpp
+++ b/media/libstagefright/AACWriter.cpp
@@ -61,7 +61,8 @@
mPaused(false),
mResumed(false),
mChannelCount(-1),
- mSampleRate(-1) {
+ mSampleRate(-1),
+ mAACProfile(OMX_AUDIO_AACObjectLC) {
}
AACWriter::~AACWriter() {
diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
index e3301aa..e6865bb 100644
--- a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
+++ b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
@@ -353,6 +353,14 @@
useAsync = true;
}
+ int32_t disallowedFlags = GraphicBuffer::USAGE_HW_VIDEO_ENCODER |
+ GRALLOC_USAGE_RENDERSCRIPT;
+ int32_t allowedFlags = GraphicBuffer::USAGE_SW_READ_MASK |
+ GraphicBuffer::USAGE_HW_TEXTURE |
+ GraphicBuffer::USAGE_HW_COMPOSER;
+ bool flexibleConsumer = (consumerUsage & disallowedFlags) == 0 &&
+ (consumerUsage & allowedFlags) != 0;
+
sp<IBinder> binder;
sp<ANativeWindow> anw;
if (bufferProducer != 0) {
@@ -382,14 +390,18 @@
// IMPLEMENTATION_DEFINED. b/9487482
if (format >= HAL_PIXEL_FORMAT_RGBA_8888 &&
format <= HAL_PIXEL_FORMAT_BGRA_8888) {
- ALOGW("%s: Camera %d: Overriding format 0x%x to IMPLEMENTATION_DEFINED",
+ ALOGW("%s: Camera %d: Overriding format %#x to IMPLEMENTATION_DEFINED",
__FUNCTION__, mCameraId, format);
format = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
}
- // TODO: add startConfigure/stopConfigure call to CameraDeviceBase
- // this will make it so Camera3Device doesn't call configure_streams
- // after each call, but only once we are done with all.
+ // Round dimensions to the nearest dimensions available for this format
+ if (flexibleConsumer && !CameraDeviceClient::roundBufferDimensionNearest(width, height,
+ format, mDevice->info(), /*out*/&width, /*out*/&height)) {
+ ALOGE("%s: No stream configurations with the format %#x defined, failed to create stream.",
+ __FUNCTION__, format);
+ return BAD_VALUE;
+ }
int streamId = -1;
res = mDevice->createStream(anw, width, height, format, &streamId);
@@ -425,6 +437,62 @@
return res;
}
+
+bool CameraDeviceClient::roundBufferDimensionNearest(int32_t width, int32_t height,
+ int32_t format, const CameraMetadata& info,
+ /*out*/int32_t* outWidth, /*out*/int32_t* outHeight) {
+
+ camera_metadata_ro_entry streamConfigs =
+ info.find(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS);
+
+ int32_t bestWidth = -1;
+ int32_t bestHeight = -1;
+
+ // Iterate through listed stream configurations and find the one with the smallest euclidean
+ // distance from the given dimensions for the given format.
+ for (size_t i = 0; i < streamConfigs.count; i += 4) {
+ int32_t fmt = streamConfigs.data.i32[i];
+ int32_t w = streamConfigs.data.i32[i + 1];
+ int32_t h = streamConfigs.data.i32[i + 2];
+
+ // Ignore input/output type for now
+ if (fmt == format) {
+ if (w == width && h == height) {
+ bestWidth = width;
+ bestHeight = height;
+ break;
+ } else if (w <= ROUNDING_WIDTH_CAP && (bestWidth == -1 ||
+ CameraDeviceClient::euclidDistSquare(w, h, width, height) <
+ CameraDeviceClient::euclidDistSquare(bestWidth, bestHeight, width, height))) {
+ bestWidth = w;
+ bestHeight = h;
+ }
+ }
+ }
+
+ if (bestWidth == -1) {
+ // Return false if no configurations for this format were listed
+ return false;
+ }
+
+ // Set the outputs to the closet width/height
+ if (outWidth != NULL) {
+ *outWidth = bestWidth;
+ }
+ if (outHeight != NULL) {
+ *outHeight = bestHeight;
+ }
+
+ // Return true if at least one configuration for this format was listed
+ return true;
+}
+
+int64_t CameraDeviceClient::euclidDistSquare(int32_t x0, int32_t y0, int32_t x1, int32_t y1) {
+ int64_t d0 = x0 - x1;
+ int64_t d1 = y0 - y1;
+ return d0 * d0 + d1 * d1;
+}
+
// Create a request object from a template.
status_t CameraDeviceClient::createDefaultRequest(int templateId,
/*out*/
diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.h b/services/camera/libcameraservice/api2/CameraDeviceClient.h
index 9981dfe..84e46b7 100644
--- a/services/camera/libcameraservice/api2/CameraDeviceClient.h
+++ b/services/camera/libcameraservice/api2/CameraDeviceClient.h
@@ -154,6 +154,15 @@
/** Utility members */
bool enforceRequestPermissions(CameraMetadata& metadata);
+ // Find the square of the euclidean distance between two points
+ static int64_t euclidDistSquare(int32_t x0, int32_t y0, int32_t x1, int32_t y1);
+
+ // Find the closest dimensions for a given format in available stream configurations with
+ // a width <= ROUNDING_WIDTH_CAP
+ static const int32_t ROUNDING_WIDTH_CAP = 1080;
+ static bool roundBufferDimensionNearest(int32_t width, int32_t height, int32_t format,
+ const CameraMetadata& info, /*out*/int32_t* outWidth, /*out*/int32_t* outHeight);
+
// IGraphicsBufferProducer binder -> Stream ID
KeyedVector<sp<IBinder>, int> mStreamMap;
diff --git a/services/camera/libcameraservice/device3/Camera3Device.cpp b/services/camera/libcameraservice/device3/Camera3Device.cpp
index 5281ea6..bba3905 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Device.cpp
@@ -427,7 +427,7 @@
InFlightRequest r = mInFlightMap.valueAt(i);
lines.appendFormat(" Frame %d | Timestamp: %" PRId64 ", metadata"
" arrived: %s, buffers left: %d\n", mInFlightMap.keyAt(i),
- r.captureTimestamp, r.haveResultMetadata ? "true" : "false",
+ r.shutterTimestamp, r.haveResultMetadata ? "true" : "false",
r.numBuffersLeft);
}
}
@@ -1880,6 +1880,131 @@
return true;
}
+
+void Camera3Device::returnOutputBuffers(
+ const camera3_stream_buffer_t *outputBuffers, size_t numBuffers,
+ nsecs_t timestamp) {
+ for (size_t i = 0; i < numBuffers; i++)
+ {
+ Camera3Stream *stream = Camera3Stream::cast(outputBuffers[i].stream);
+ status_t res = stream->returnBuffer(outputBuffers[i], timestamp);
+ // Note: stream may be deallocated at this point, if this buffer was
+ // the last reference to it.
+ if (res != OK) {
+ ALOGE("Can't return buffer to its stream: %s (%d)",
+ strerror(-res), res);
+ }
+ }
+}
+
+
+void Camera3Device::removeInFlightRequestIfReadyLocked(int idx) {
+
+ const InFlightRequest &request = mInFlightMap.valueAt(idx);
+ const uint32_t frameNumber = mInFlightMap.keyAt(idx);
+
+ nsecs_t sensorTimestamp = request.sensorTimestamp;
+ nsecs_t shutterTimestamp = request.shutterTimestamp;
+
+ // Check if it's okay to remove the request from InFlightMap:
+ // In the case of a successful request:
+ // all input and output buffers, all result metadata, shutter callback
+ // arrived.
+ // In the case of a unsuccessful request:
+ // all input and output buffers arrived.
+ if (request.numBuffersLeft == 0 &&
+ (request.requestStatus != OK ||
+ (request.haveResultMetadata && shutterTimestamp != 0))) {
+ ATRACE_ASYNC_END("frame capture", frameNumber);
+
+ // Sanity check - if sensor timestamp matches shutter timestamp
+ if (request.requestStatus == OK &&
+ sensorTimestamp != shutterTimestamp) {
+ SET_ERR("sensor timestamp (%" PRId64
+ ") for frame %d doesn't match shutter timestamp (%" PRId64 ")",
+ sensorTimestamp, frameNumber, shutterTimestamp);
+ }
+
+ // for an unsuccessful request, it may have pending output buffers to
+ // return.
+ assert(request.requestStatus != OK ||
+ request.pendingOutputBuffers.size() == 0);
+ returnOutputBuffers(request.pendingOutputBuffers.array(),
+ request.pendingOutputBuffers.size(), 0);
+
+ mInFlightMap.removeItemsAt(idx, 1);
+
+ ALOGVV("%s: removed frame %d from InFlightMap", __FUNCTION__, frameNumber);
+ }
+
+ // Sanity check - if we have too many in-flight frames, something has
+ // likely gone wrong
+ if (mInFlightMap.size() > kInFlightWarnLimit) {
+ CLOGE("In-flight list too large: %zu", mInFlightMap.size());
+ }
+}
+
+
+void Camera3Device::sendCaptureResult(CameraMetadata &pendingMetadata,
+ CaptureResultExtras &resultExtras,
+ CameraMetadata &collectedPartialResult,
+ uint32_t frameNumber) {
+ if (pendingMetadata.isEmpty())
+ return;
+
+ Mutex::Autolock l(mOutputLock);
+
+ // TODO: need to track errors for tighter bounds on expected frame number
+ if (frameNumber < mNextResultFrameNumber) {
+ SET_ERR("Out-of-order capture result metadata submitted! "
+ "(got frame number %d, expecting %d)",
+ frameNumber, mNextResultFrameNumber);
+ return;
+ }
+ mNextResultFrameNumber = frameNumber + 1;
+
+ CaptureResult captureResult;
+ captureResult.mResultExtras = resultExtras;
+ captureResult.mMetadata = pendingMetadata;
+
+ if (captureResult.mMetadata.update(ANDROID_REQUEST_FRAME_COUNT,
+ (int32_t*)&frameNumber, 1) != OK) {
+ SET_ERR("Failed to set frame# in metadata (%d)",
+ frameNumber);
+ return;
+ } else {
+ ALOGVV("%s: Camera %d: Set frame# in metadata (%d)",
+ __FUNCTION__, mId, frameNumber);
+ }
+
+ // Append any previous partials to form a complete result
+ if (mUsePartialResult && !collectedPartialResult.isEmpty()) {
+ captureResult.mMetadata.append(collectedPartialResult);
+ }
+
+ captureResult.mMetadata.sort();
+
+ // Check that there's a timestamp in the result metadata
+ camera_metadata_entry entry =
+ captureResult.mMetadata.find(ANDROID_SENSOR_TIMESTAMP);
+ if (entry.count == 0) {
+ SET_ERR("No timestamp provided by HAL for frame %d!",
+ frameNumber);
+ return;
+ }
+
+ // Valid result, insert into queue
+ List<CaptureResult>::iterator queuedResult =
+ mResultQueue.insert(mResultQueue.end(), CaptureResult(captureResult));
+ ALOGVV("%s: result requestId = %" PRId32 ", frameNumber = %" PRId64
+ ", burstId = %" PRId32, __FUNCTION__,
+ queuedResult->mResultExtras.requestId,
+ queuedResult->mResultExtras.frameNumber,
+ queuedResult->mResultExtras.burstId);
+
+ mResultSignal.signal();
+}
+
/**
* Camera HAL device callback methods
*/
@@ -1914,11 +2039,14 @@
CaptureResultExtras resultExtras;
bool hasInputBufferInRequest = false;
- // Get capture timestamp and resultExtras from list of in-flight requests,
- // where it was added by the shutter notification for this frame.
- // Then update the in-flight status and remove the in-flight entry if
- // all result data has been received.
- nsecs_t timestamp = 0;
+ // Get shutter timestamp and resultExtras from list of in-flight requests,
+ // where it was added by the shutter notification for this frame. If the
+ // shutter timestamp isn't received yet, append the output buffers to the
+ // in-flight request and they will be returned when the shutter timestamp
+ // arrives. Update the in-flight status and remove the in-flight entry if
+ // all result data and shutter timestamp have been received.
+ nsecs_t shutterTimestamp = 0;
+
{
Mutex::Autolock l(mInFlightLock);
ssize_t idx = mInFlightMap.indexOfKey(frameNumber);
@@ -1928,13 +2056,17 @@
return;
}
InFlightRequest &request = mInFlightMap.editValueAt(idx);
- ALOGVV("%s: got InFlightRequest requestId = %" PRId32 ", frameNumber = %" PRId64
- ", burstId = %" PRId32,
- __FUNCTION__, request.resultExtras.requestId, request.resultExtras.frameNumber,
- request.resultExtras.burstId);
- // Always update the partial count to the latest one. When framework aggregates adjacent
- // partial results into one, the latest partial count will be used.
- request.resultExtras.partialResultCount = result->partial_result;
+ ALOGVV("%s: got InFlightRequest requestId = %" PRId32
+ ", frameNumber = %" PRId64 ", burstId = %" PRId32
+ ", partialResultCount = %d",
+ __FUNCTION__, request.resultExtras.requestId,
+ request.resultExtras.frameNumber, request.resultExtras.burstId,
+ result->partial_result);
+ // Always update the partial count to the latest one if it's not 0
+ // (buffers only). When framework aggregates adjacent partial results
+ // into one, the latest partial count will be used.
+ if (result->partial_result != 0)
+ request.resultExtras.partialResultCount = result->partial_result;
// Check if this result carries only partial metadata
if (mUsePartialResult && result->result != NULL) {
@@ -1978,22 +2110,9 @@
}
}
- timestamp = request.captureTimestamp;
- resultExtras = request.resultExtras;
+ shutterTimestamp = request.shutterTimestamp;
hasInputBufferInRequest = request.hasInputBuffer;
- /**
- * One of the following must happen before it's legal to call process_capture_result,
- * unless partial metadata is being provided:
- * - CAMERA3_MSG_SHUTTER (expected during normal operation)
- * - CAMERA3_MSG_ERROR (expected during flush)
- */
- if (request.requestStatus == OK && timestamp == 0 && !isPartialResult) {
- SET_ERR("Called before shutter notify for frame %d",
- frameNumber);
- return;
- }
-
// Did we get the (final) result metadata for this capture?
if (result->result != NULL && !isPartialResult) {
if (request.haveResultMetadata) {
@@ -2026,103 +2145,38 @@
return;
}
- // Check if everything has arrived for this result (buffers and metadata), remove it from
- // InFlightMap if both arrived or HAL reports error for this request (i.e. during flush).
- // For per-frame error notifications, camera3.h requirements state that all the
- // buffer handles for a failed frame capture must be returned via process_capture_result()
- // call(s). Hence, Camera3Device needs to ensure that the frame entry is not deleted from
- // mInFlightMap until all buffers for that frame have been returned by HAL.
- if ((request.numBuffersLeft == 0) &&
- ((request.requestStatus != OK) || (request.haveResultMetadata))) {
- ATRACE_ASYNC_END("frame capture", frameNumber);
- mInFlightMap.removeItemsAt(idx, 1);
+ camera_metadata_ro_entry_t entry;
+ res = find_camera_metadata_ro_entry(result->result,
+ ANDROID_SENSOR_TIMESTAMP, &entry);
+ if (res == OK && entry.count == 1) {
+ request.sensorTimestamp = entry.data.i64[0];
}
- // Sanity check - if we have too many in-flight frames, something has
- // likely gone wrong
- if (mInFlightMap.size() > kInFlightWarnLimit) {
- CLOGE("In-flight list too large: %zu", mInFlightMap.size());
- }
-
- }
-
- // Process the result metadata, if provided
- bool gotResult = false;
- if (result->result != NULL && !isPartialResult) {
- Mutex::Autolock l(mOutputLock);
-
- gotResult = true;
-
- // TODO: need to track errors for tighter bounds on expected frame number
- if (frameNumber < mNextResultFrameNumber) {
- SET_ERR("Out-of-order capture result metadata submitted! "
- "(got frame number %d, expecting %d)",
- frameNumber, mNextResultFrameNumber);
- return;
- }
- mNextResultFrameNumber = frameNumber + 1;
-
- CaptureResult captureResult;
- captureResult.mResultExtras = resultExtras;
- captureResult.mMetadata = result->result;
-
- if (captureResult.mMetadata.update(ANDROID_REQUEST_FRAME_COUNT,
- (int32_t*)&frameNumber, 1) != OK) {
- SET_ERR("Failed to set frame# in metadata (%d)",
- frameNumber);
- gotResult = false;
+ // If shutter event isn't received yet, append the output buffers to
+ // the in-flight request. Otherwise, return the output buffers to
+ // streams.
+ if (shutterTimestamp == 0) {
+ request.pendingOutputBuffers.appendArray(result->output_buffers,
+ result->num_output_buffers);
} else {
- ALOGVV("%s: Camera %d: Set frame# in metadata (%d)",
- __FUNCTION__, mId, frameNumber);
+ returnOutputBuffers(result->output_buffers,
+ result->num_output_buffers, shutterTimestamp);
}
- // Append any previous partials to form a complete result
- if (mUsePartialResult && !collectedPartialResult.isEmpty()) {
- captureResult.mMetadata.append(collectedPartialResult);
+ if (result->result != NULL && !isPartialResult) {
+ if (shutterTimestamp == 0) {
+ request.pendingMetadata = result->result;
+ request.partialResult.collectedResult = collectedPartialResult;
+ } else {
+ CameraMetadata metadata;
+ metadata = result->result;
+ sendCaptureResult(metadata, request.resultExtras,
+ collectedPartialResult, frameNumber);
+ }
}
- captureResult.mMetadata.sort();
-
- // Check that there's a timestamp in the result metadata
-
- camera_metadata_entry entry =
- captureResult.mMetadata.find(ANDROID_SENSOR_TIMESTAMP);
- if (entry.count == 0) {
- SET_ERR("No timestamp provided by HAL for frame %d!",
- frameNumber);
- gotResult = false;
- } else if (timestamp != entry.data.i64[0]) {
- SET_ERR("Timestamp mismatch between shutter notify and result"
- " metadata for frame %d (%" PRId64 " vs %" PRId64 " respectively)",
- frameNumber, timestamp, entry.data.i64[0]);
- gotResult = false;
- }
-
- if (gotResult) {
- // Valid result, insert into queue
- List<CaptureResult>::iterator queuedResult =
- mResultQueue.insert(mResultQueue.end(), CaptureResult(captureResult));
- ALOGVV("%s: result requestId = %" PRId32 ", frameNumber = %" PRId64
- ", burstId = %" PRId32, __FUNCTION__,
- queuedResult->mResultExtras.requestId,
- queuedResult->mResultExtras.frameNumber,
- queuedResult->mResultExtras.burstId);
- }
- } // scope for mOutputLock
-
- // Return completed buffers to their streams with the timestamp
-
- for (size_t i = 0; i < result->num_output_buffers; i++) {
- Camera3Stream *stream =
- Camera3Stream::cast(result->output_buffers[i].stream);
- res = stream->returnBuffer(result->output_buffers[i], timestamp);
- // Note: stream may be deallocated at this point, if this buffer was the
- // last reference to it.
- if (res != OK) {
- ALOGE("Can't return buffer %zu for frame %d to its stream: "
- " %s (%d)", i, frameNumber, strerror(-res), res);
- }
- }
+ removeInFlightRequestIfReadyLocked(idx);
+ } // scope for mInFlightLock
if (result->input_buffer != NULL) {
if (hasInputBufferInRequest) {
@@ -2142,13 +2196,6 @@
__FUNCTION__);
}
}
-
- // Finally, signal any waiters for new frames
-
- if (gotResult) {
- mResultSignal.signal();
- }
-
}
void Camera3Device::notify(const camera3_notify_msg *msg) {
@@ -2266,8 +2313,6 @@
mNextShutterFrameNumber = msg.frame_number + 1;
}
- CaptureResultExtras resultExtras;
-
// Set timestamp for the request in the in-flight tracking
// and get the request ID to send upstream
{
@@ -2275,21 +2320,30 @@
idx = mInFlightMap.indexOfKey(msg.frame_number);
if (idx >= 0) {
InFlightRequest &r = mInFlightMap.editValueAt(idx);
- r.captureTimestamp = msg.timestamp;
- resultExtras = r.resultExtras;
+
+ ALOGVV("Camera %d: %s: Shutter fired for frame %d (id %d) at %" PRId64,
+ mId, __FUNCTION__,
+ msg.frame_number, r.resultExtras.requestId, msg.timestamp);
+ // Call listener, if any
+ if (listener != NULL) {
+ listener->notifyShutter(r.resultExtras, msg.timestamp);
+ }
+
+ r.shutterTimestamp = msg.timestamp;
+
+ // send pending result and buffers
+ sendCaptureResult(r.pendingMetadata, r.resultExtras,
+ r.partialResult.collectedResult, msg.frame_number);
+ returnOutputBuffers(r.pendingOutputBuffers.array(),
+ r.pendingOutputBuffers.size(), r.shutterTimestamp);
+ r.pendingOutputBuffers.clear();
+
+ removeInFlightRequestIfReadyLocked(idx);
}
}
if (idx < 0) {
SET_ERR("Shutter notification for non-existent frame number %d",
msg.frame_number);
- return;
- }
- ALOGVV("Camera %d: %s: Shutter fired for frame %d (id %d) at %" PRId64,
- mId, __FUNCTION__,
- msg.frame_number, resultExtras.requestId, msg.timestamp);
- // Call listener, if any
- if (listener != NULL) {
- listener->notifyShutter(resultExtras, msg.timestamp);
}
}
diff --git a/services/camera/libcameraservice/device3/Camera3Device.h b/services/camera/libcameraservice/device3/Camera3Device.h
index ec6bba1..ec8dc10 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.h
+++ b/services/camera/libcameraservice/device3/Camera3Device.h
@@ -521,7 +521,9 @@
struct InFlightRequest {
// Set by notify() SHUTTER call.
- nsecs_t captureTimestamp;
+ nsecs_t shutterTimestamp;
+ // Set by process_capture_result().
+ nsecs_t sensorTimestamp;
int requestStatus;
// Set by process_capture_result call with valid metadata
bool haveResultMetadata;
@@ -532,6 +534,21 @@
// If this request has any input buffer
bool hasInputBuffer;
+
+ // The last metadata that framework receives from HAL and
+ // not yet send out because the shutter event hasn't arrived.
+ // It's added by process_capture_result and sent when framework
+ // receives the shutter event.
+ CameraMetadata pendingMetadata;
+
+ // Buffers are added by process_capture_result when output buffers
+ // return from HAL but framework has not yet received the shutter
+ // event. They will be returned to the streams when framework receives
+ // the shutter event.
+ Vector<camera3_stream_buffer_t> pendingOutputBuffers;
+
+
+
// Fields used by the partial result only
struct PartialResultInFlight {
// Set by process_capture_result once 3A has been sent to clients
@@ -546,7 +563,8 @@
// Default constructor needed by KeyedVector
InFlightRequest() :
- captureTimestamp(0),
+ shutterTimestamp(0),
+ sensorTimestamp(0),
requestStatus(OK),
haveResultMetadata(false),
numBuffersLeft(0),
@@ -554,7 +572,8 @@
}
InFlightRequest(int numBuffers) :
- captureTimestamp(0),
+ shutterTimestamp(0),
+ sensorTimestamp(0),
requestStatus(OK),
haveResultMetadata(false),
numBuffersLeft(numBuffers),
@@ -562,7 +581,8 @@
}
InFlightRequest(int numBuffers, CaptureResultExtras extras) :
- captureTimestamp(0),
+ shutterTimestamp(0),
+ sensorTimestamp(0),
requestStatus(OK),
haveResultMetadata(false),
numBuffersLeft(numBuffers),
@@ -571,7 +591,8 @@
}
InFlightRequest(int numBuffers, CaptureResultExtras extras, bool hasInput) :
- captureTimestamp(0),
+ shutterTimestamp(0),
+ sensorTimestamp(0),
requestStatus(OK),
haveResultMetadata(false),
numBuffersLeft(numBuffers),
@@ -639,6 +660,24 @@
void notifyShutter(const camera3_shutter_msg_t &msg,
NotificationListener *listener);
+ // helper function to return the output buffers to the streams.
+ void returnOutputBuffers(const camera3_stream_buffer_t *outputBuffers,
+ size_t numBuffers, nsecs_t timestamp);
+
+ // Insert the capture result given the pending metadata, result extras,
+ // partial results, and the frame number to the result queue.
+ void sendCaptureResult(CameraMetadata &pendingMetadata,
+ CaptureResultExtras &resultExtras,
+ CameraMetadata &collectedPartialResult, uint32_t frameNumber);
+
+ /**** Scope for mInFlightLock ****/
+
+ // Remove the in-flight request of the given index from mInFlightMap
+ // if it's no longer needed. It must only be called with mInFlightLock held.
+ void removeInFlightRequestIfReadyLocked(int idx);
+
+ /**** End scope for mInFlightLock ****/
+
/**
* Static callback forwarding methods from HAL to instance
*/