blob: d25e467fb70e5bec17dd85b178fa8ae769d17c2f [file] [log] [blame]
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -08001/*
2 * Copyright (C) 2019 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#define LOG_TAG "Camera3-HeicCompositeStream"
18#define ATRACE_TAG ATRACE_TAG_CAMERA
19//#define LOG_NDEBUG 0
20
21#include <linux/memfd.h>
22#include <pthread.h>
23#include <sys/syscall.h>
24
25#include <android/hardware/camera/device/3.5/types.h>
Shuzhen Wang219c2992019-02-15 17:24:28 -080026#include <libyuv.h>
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -080027#include <gui/Surface.h>
28#include <utils/Log.h>
29#include <utils/Trace.h>
30
Marco Nelissen13aa1a42019-09-27 10:21:55 -070031#include <mediadrm/ICrypto.h>
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -080032#include <media/MediaCodecBuffer.h>
33#include <media/stagefright/foundation/ABuffer.h>
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -080034#include <media/stagefright/foundation/MediaDefs.h>
35#include <media/stagefright/MediaCodecConstants.h>
36
37#include "common/CameraDeviceBase.h"
38#include "utils/ExifUtils.h"
39#include "HeicEncoderInfoManager.h"
40#include "HeicCompositeStream.h"
41
42using android::hardware::camera::device::V3_5::CameraBlob;
43using android::hardware::camera::device::V3_5::CameraBlobId;
44
45namespace android {
46namespace camera3 {
47
48HeicCompositeStream::HeicCompositeStream(wp<CameraDeviceBase> device,
49 wp<hardware::camera2::ICameraDeviceCallbacks> cb) :
50 CompositeStream(device, cb),
51 mUseHeic(false),
52 mNumOutputTiles(1),
53 mOutputWidth(0),
54 mOutputHeight(0),
55 mMaxHeicBufferSize(0),
56 mGridWidth(HeicEncoderInfoManager::kGridWidth),
57 mGridHeight(HeicEncoderInfoManager::kGridHeight),
58 mGridRows(1),
59 mGridCols(1),
60 mUseGrid(false),
61 mAppSegmentStreamId(-1),
62 mAppSegmentSurfaceId(-1),
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -080063 mMainImageStreamId(-1),
64 mMainImageSurfaceId(-1),
65 mYuvBufferAcquired(false),
66 mProducerListener(new ProducerListener()),
Shuzhen Wang3d00ee52019-09-25 14:19:28 -070067 mDequeuedOutputBufferCnt(0),
Michael Gonzalezb5986a32019-10-09 15:38:17 -070068 mLockedAppSegmentBufferCnt(0),
Shuzhen Wang3d00ee52019-09-25 14:19:28 -070069 mCodecOutputCounter(0),
Shuzhen Wang62f49ed2019-09-04 14:07:53 -070070 mQuality(-1),
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -080071 mGridTimestampUs(0) {
72}
73
74HeicCompositeStream::~HeicCompositeStream() {
75 // Call deinitCodec in case stream hasn't been deleted yet to avoid any
76 // memory/resource leak.
77 deinitCodec();
78
79 mInputAppSegmentBuffers.clear();
80 mCodecOutputBuffers.clear();
81
82 mAppSegmentStreamId = -1;
83 mAppSegmentSurfaceId = -1;
84 mAppSegmentConsumer.clear();
85 mAppSegmentSurface.clear();
86
87 mMainImageStreamId = -1;
88 mMainImageSurfaceId = -1;
89 mMainImageConsumer.clear();
90 mMainImageSurface.clear();
91}
92
93bool HeicCompositeStream::isHeicCompositeStream(const sp<Surface> &surface) {
94 ANativeWindow *anw = surface.get();
95 status_t err;
96 int format;
97 if ((err = anw->query(anw, NATIVE_WINDOW_FORMAT, &format)) != OK) {
98 String8 msg = String8::format("Failed to query Surface format: %s (%d)", strerror(-err),
99 err);
100 ALOGE("%s: %s", __FUNCTION__, msg.string());
101 return false;
102 }
103
104 int dataspace;
105 if ((err = anw->query(anw, NATIVE_WINDOW_DEFAULT_DATASPACE, &dataspace)) != OK) {
106 String8 msg = String8::format("Failed to query Surface dataspace: %s (%d)", strerror(-err),
107 err);
108 ALOGE("%s: %s", __FUNCTION__, msg.string());
109 return false;
110 }
111
112 return ((format == HAL_PIXEL_FORMAT_BLOB) && (dataspace == HAL_DATASPACE_HEIF));
113}
114
115status_t HeicCompositeStream::createInternalStreams(const std::vector<sp<Surface>>& consumers,
116 bool /*hasDeferredConsumer*/, uint32_t width, uint32_t height, int format,
117 camera3_stream_rotation_t rotation, int *id, const String8& physicalCameraId,
118 std::vector<int> *surfaceIds, int /*streamSetId*/, bool /*isShared*/) {
119
120 sp<CameraDeviceBase> device = mDevice.promote();
121 if (!device.get()) {
122 ALOGE("%s: Invalid camera device!", __FUNCTION__);
123 return NO_INIT;
124 }
125
126 status_t res = initializeCodec(width, height, device);
127 if (res != OK) {
128 ALOGE("%s: Failed to initialize HEIC/HEVC codec: %s (%d)",
129 __FUNCTION__, strerror(-res), res);
130 return NO_INIT;
131 }
132
133 sp<IGraphicBufferProducer> producer;
134 sp<IGraphicBufferConsumer> consumer;
135 BufferQueue::createBufferQueue(&producer, &consumer);
Michael Gonzalezb5986a32019-10-09 15:38:17 -0700136 mAppSegmentConsumer = new CpuConsumer(consumer, kMaxAcquiredAppSegment);
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800137 mAppSegmentConsumer->setFrameAvailableListener(this);
138 mAppSegmentConsumer->setName(String8("Camera3-HeicComposite-AppSegmentStream"));
139 mAppSegmentSurface = new Surface(producer);
140
Shuzhen Wange7f4b462019-02-12 08:43:07 -0800141 mStaticInfo = device->info();
142
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800143 res = device->createStream(mAppSegmentSurface, mAppSegmentMaxSize, 1, format,
144 kAppSegmentDataSpace, rotation, &mAppSegmentStreamId, physicalCameraId, surfaceIds);
145 if (res == OK) {
146 mAppSegmentSurfaceId = (*surfaceIds)[0];
147 } else {
148 ALOGE("%s: Failed to create JPEG App segment stream: %s (%d)", __FUNCTION__,
149 strerror(-res), res);
150 return res;
151 }
152
153 if (!mUseGrid) {
154 res = mCodec->createInputSurface(&producer);
155 if (res != OK) {
156 ALOGE("%s: Failed to create input surface for Heic codec: %s (%d)",
157 __FUNCTION__, strerror(-res), res);
158 return res;
159 }
160 } else {
161 BufferQueue::createBufferQueue(&producer, &consumer);
162 mMainImageConsumer = new CpuConsumer(consumer, 1);
163 mMainImageConsumer->setFrameAvailableListener(this);
164 mMainImageConsumer->setName(String8("Camera3-HeicComposite-HevcInputYUVStream"));
165 }
166 mMainImageSurface = new Surface(producer);
167
168 res = mCodec->start();
169 if (res != OK) {
170 ALOGE("%s: Failed to start codec: %s (%d)", __FUNCTION__,
171 strerror(-res), res);
172 return res;
173 }
174
175 std::vector<int> sourceSurfaceId;
176 //Use YUV_888 format if framework tiling is needed.
177 int srcStreamFmt = mUseGrid ? HAL_PIXEL_FORMAT_YCbCr_420_888 :
178 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
179 res = device->createStream(mMainImageSurface, width, height, srcStreamFmt, kHeifDataSpace,
180 rotation, id, physicalCameraId, &sourceSurfaceId);
181 if (res == OK) {
182 mMainImageSurfaceId = sourceSurfaceId[0];
183 mMainImageStreamId = *id;
184 } else {
185 ALOGE("%s: Failed to create main image stream: %s (%d)", __FUNCTION__,
186 strerror(-res), res);
187 return res;
188 }
189
190 mOutputSurface = consumers[0];
191 res = registerCompositeStreamListener(getStreamId());
192 if (res != OK) {
193 ALOGE("%s: Failed to register HAL main image stream", __FUNCTION__);
194 return res;
195 }
196
Shuzhen Wang219c2992019-02-15 17:24:28 -0800197 initCopyRowFunction(width);
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800198 return res;
199}
200
201status_t HeicCompositeStream::deleteInternalStreams() {
202 requestExit();
203 auto res = join();
204 if (res != OK) {
205 ALOGE("%s: Failed to join with the main processing thread: %s (%d)", __FUNCTION__,
206 strerror(-res), res);
207 }
208
209 deinitCodec();
210
211 if (mAppSegmentStreamId >= 0) {
212 sp<CameraDeviceBase> device = mDevice.promote();
213 if (!device.get()) {
214 ALOGE("%s: Invalid camera device!", __FUNCTION__);
215 return NO_INIT;
216 }
217
218 res = device->deleteStream(mAppSegmentStreamId);
219 mAppSegmentStreamId = -1;
220 }
221
Shuzhen Wang2c545042019-02-07 10:27:35 -0800222 if (mOutputSurface != nullptr) {
223 mOutputSurface->disconnect(NATIVE_WINDOW_API_CAMERA);
224 mOutputSurface.clear();
225 }
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800226 return res;
227}
228
229void HeicCompositeStream::onBufferReleased(const BufferInfo& bufferInfo) {
230 Mutex::Autolock l(mMutex);
231
232 if (bufferInfo.mError) return;
233
234 mCodecOutputBufferTimestamps.push(bufferInfo.mTimestamp);
Shuzhen Wang3d00ee52019-09-25 14:19:28 -0700235 ALOGV("%s: [%" PRId64 "]: Adding codecOutputBufferTimestamp (%zu timestamps in total)",
236 __FUNCTION__, bufferInfo.mTimestamp, mCodecOutputBufferTimestamps.size());
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800237}
238
239// We need to get the settings early to handle the case where the codec output
240// arrives earlier than result metadata.
241void HeicCompositeStream::onBufferRequestForFrameNumber(uint64_t frameNumber, int streamId,
242 const CameraMetadata& settings) {
243 ATRACE_ASYNC_BEGIN("HEIC capture", frameNumber);
244
245 Mutex::Autolock l(mMutex);
246 if (mErrorState || (streamId != getStreamId())) {
247 return;
248 }
249
250 mPendingCaptureResults.emplace(frameNumber, CameraMetadata());
251
252 camera_metadata_ro_entry entry;
253
254 int32_t orientation = 0;
255 entry = settings.find(ANDROID_JPEG_ORIENTATION);
256 if (entry.count == 1) {
257 orientation = entry.data.i32[0];
258 }
259
260 int32_t quality = kDefaultJpegQuality;
261 entry = settings.find(ANDROID_JPEG_QUALITY);
262 if (entry.count == 1) {
263 quality = entry.data.i32[0];
264 }
265
266 mSettingsByFrameNumber[frameNumber] = std::make_pair(orientation, quality);
267}
268
269void HeicCompositeStream::onFrameAvailable(const BufferItem& item) {
270 if (item.mDataSpace == static_cast<android_dataspace>(kAppSegmentDataSpace)) {
271 ALOGV("%s: JPEG APP segments buffer with ts: %" PRIu64 " ms. arrived!",
272 __func__, ns2ms(item.mTimestamp));
273
274 Mutex::Autolock l(mMutex);
275 if (!mErrorState) {
276 mInputAppSegmentBuffers.push_back(item.mTimestamp);
277 mInputReadyCondition.signal();
278 }
279 } else if (item.mDataSpace == kHeifDataSpace) {
280 ALOGV("%s: YUV_888 buffer with ts: %" PRIu64 " ms. arrived!",
281 __func__, ns2ms(item.mTimestamp));
282
283 Mutex::Autolock l(mMutex);
284 if (!mUseGrid) {
285 ALOGE("%s: YUV_888 internal stream is only supported for HEVC tiling",
286 __FUNCTION__);
287 return;
288 }
289 if (!mErrorState) {
290 mInputYuvBuffers.push_back(item.mTimestamp);
291 mInputReadyCondition.signal();
292 }
293 } else {
294 ALOGE("%s: Unexpected data space: 0x%x", __FUNCTION__, item.mDataSpace);
295 }
296}
297
298status_t HeicCompositeStream::getCompositeStreamInfo(const OutputStreamInfo &streamInfo,
299 const CameraMetadata& ch, std::vector<OutputStreamInfo>* compositeOutput /*out*/) {
300 if (compositeOutput == nullptr) {
301 return BAD_VALUE;
302 }
303
304 compositeOutput->clear();
305
306 bool useGrid, useHeic;
307 bool isSizeSupported = isSizeSupportedByHeifEncoder(
308 streamInfo.width, streamInfo.height, &useHeic, &useGrid, nullptr);
309 if (!isSizeSupported) {
310 // Size is not supported by either encoder.
311 return OK;
312 }
313
314 compositeOutput->insert(compositeOutput->end(), 2, streamInfo);
315
316 // JPEG APPS segments Blob stream info
317 (*compositeOutput)[0].width = calcAppSegmentMaxSize(ch);
318 (*compositeOutput)[0].height = 1;
319 (*compositeOutput)[0].format = HAL_PIXEL_FORMAT_BLOB;
320 (*compositeOutput)[0].dataSpace = kAppSegmentDataSpace;
321 (*compositeOutput)[0].consumerUsage = GRALLOC_USAGE_SW_READ_OFTEN;
322
323 // YUV/IMPLEMENTATION_DEFINED stream info
324 (*compositeOutput)[1].width = streamInfo.width;
325 (*compositeOutput)[1].height = streamInfo.height;
326 (*compositeOutput)[1].format = useGrid ? HAL_PIXEL_FORMAT_YCbCr_420_888 :
327 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
328 (*compositeOutput)[1].dataSpace = kHeifDataSpace;
329 (*compositeOutput)[1].consumerUsage = useHeic ? GRALLOC_USAGE_HW_IMAGE_ENCODER :
330 useGrid ? GRALLOC_USAGE_SW_READ_OFTEN : GRALLOC_USAGE_HW_VIDEO_ENCODER;
331
332 return NO_ERROR;
333}
334
335bool HeicCompositeStream::isSizeSupportedByHeifEncoder(int32_t width, int32_t height,
Chong Zhang688abaa2019-05-17 16:32:23 -0700336 bool* useHeic, bool* useGrid, int64_t* stall, AString* hevcName) {
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800337 static HeicEncoderInfoManager& heicManager = HeicEncoderInfoManager::getInstance();
Chong Zhang688abaa2019-05-17 16:32:23 -0700338 return heicManager.isSizeSupported(width, height, useHeic, useGrid, stall, hevcName);
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800339}
340
341bool HeicCompositeStream::isInMemoryTempFileSupported() {
342 int memfd = syscall(__NR_memfd_create, "HEIF-try-memfd", MFD_CLOEXEC);
343 if (memfd == -1) {
344 if (errno != ENOSYS) {
345 ALOGE("%s: Failed to create tmpfs file. errno %d", __FUNCTION__, errno);
346 }
347 return false;
348 }
349 close(memfd);
350 return true;
351}
352
353void HeicCompositeStream::onHeicOutputFrameAvailable(
354 const CodecOutputBufferInfo& outputBufferInfo) {
355 Mutex::Autolock l(mMutex);
356
357 ALOGV("%s: index %d, offset %d, size %d, time %" PRId64 ", flags 0x%x",
358 __FUNCTION__, outputBufferInfo.index, outputBufferInfo.offset,
359 outputBufferInfo.size, outputBufferInfo.timeUs, outputBufferInfo.flags);
360
361 if (!mErrorState) {
362 if ((outputBufferInfo.size > 0) &&
363 ((outputBufferInfo.flags & MediaCodec::BUFFER_FLAG_CODECCONFIG) == 0)) {
364 mCodecOutputBuffers.push_back(outputBufferInfo);
365 mInputReadyCondition.signal();
366 } else {
Shuzhen Wang3d00ee52019-09-25 14:19:28 -0700367 ALOGV("%s: Releasing output buffer: size %d flags: 0x%x ", __FUNCTION__,
368 outputBufferInfo.size, outputBufferInfo.flags);
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800369 mCodec->releaseOutputBuffer(outputBufferInfo.index);
370 }
371 } else {
372 mCodec->releaseOutputBuffer(outputBufferInfo.index);
373 }
374}
375
376void HeicCompositeStream::onHeicInputFrameAvailable(int32_t index) {
377 Mutex::Autolock l(mMutex);
378
379 if (!mUseGrid) {
380 ALOGE("%s: Codec YUV input mode must only be used for Hevc tiling mode", __FUNCTION__);
381 return;
382 }
383
384 mCodecInputBuffers.push_back(index);
385 mInputReadyCondition.signal();
386}
387
388void HeicCompositeStream::onHeicFormatChanged(sp<AMessage>& newFormat) {
389 if (newFormat == nullptr) {
390 ALOGE("%s: newFormat must not be null!", __FUNCTION__);
391 return;
392 }
393
394 Mutex::Autolock l(mMutex);
395
396 AString mime;
397 AString mimeHeic(MIMETYPE_IMAGE_ANDROID_HEIC);
398 newFormat->findString(KEY_MIME, &mime);
399 if (mime != mimeHeic) {
400 // For HEVC codec, below keys need to be filled out or overwritten so that the
401 // muxer can handle them as HEIC output image.
402 newFormat->setString(KEY_MIME, mimeHeic);
403 newFormat->setInt32(KEY_WIDTH, mOutputWidth);
404 newFormat->setInt32(KEY_HEIGHT, mOutputHeight);
405 if (mUseGrid) {
406 newFormat->setInt32(KEY_TILE_WIDTH, mGridWidth);
407 newFormat->setInt32(KEY_TILE_HEIGHT, mGridHeight);
408 newFormat->setInt32(KEY_GRID_ROWS, mGridRows);
409 newFormat->setInt32(KEY_GRID_COLUMNS, mGridCols);
410 }
411 }
412 newFormat->setInt32(KEY_IS_DEFAULT, 1 /*isPrimary*/);
413
414 int32_t gridRows, gridCols;
415 if (newFormat->findInt32(KEY_GRID_ROWS, &gridRows) &&
416 newFormat->findInt32(KEY_GRID_COLUMNS, &gridCols)) {
417 mNumOutputTiles = gridRows * gridCols;
418 } else {
419 mNumOutputTiles = 1;
420 }
421
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800422 mFormat = newFormat;
Shuzhen Wang3d00ee52019-09-25 14:19:28 -0700423
424 ALOGV("%s: mNumOutputTiles is %zu", __FUNCTION__, mNumOutputTiles);
425 mInputReadyCondition.signal();
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800426}
427
428void HeicCompositeStream::onHeicCodecError() {
429 Mutex::Autolock l(mMutex);
430 mErrorState = true;
431}
432
433status_t HeicCompositeStream::configureStream() {
434 if (isRunning()) {
435 // Processing thread is already running, nothing more to do.
436 return NO_ERROR;
437 }
438
439 if (mOutputSurface.get() == nullptr) {
440 ALOGE("%s: No valid output surface set!", __FUNCTION__);
441 return NO_INIT;
442 }
443
444 auto res = mOutputSurface->connect(NATIVE_WINDOW_API_CAMERA, mProducerListener);
445 if (res != OK) {
446 ALOGE("%s: Unable to connect to native window for stream %d",
447 __FUNCTION__, mMainImageStreamId);
448 return res;
449 }
450
451 if ((res = native_window_set_buffers_format(mOutputSurface.get(), HAL_PIXEL_FORMAT_BLOB))
452 != OK) {
453 ALOGE("%s: Unable to configure stream buffer format for stream %d", __FUNCTION__,
454 mMainImageStreamId);
455 return res;
456 }
457
458 ANativeWindow *anwConsumer = mOutputSurface.get();
459 int maxConsumerBuffers;
460 if ((res = anwConsumer->query(anwConsumer, NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS,
461 &maxConsumerBuffers)) != OK) {
462 ALOGE("%s: Unable to query consumer undequeued"
463 " buffer count for stream %d", __FUNCTION__, mMainImageStreamId);
464 return res;
465 }
466
467 // Cannot use SourceSurface buffer count since it could be codec's 512*512 tile
468 // buffer count.
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800469 if ((res = native_window_set_buffer_count(
Shuzhen Wang3d00ee52019-09-25 14:19:28 -0700470 anwConsumer, kMaxOutputSurfaceProducerCount + maxConsumerBuffers)) != OK) {
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800471 ALOGE("%s: Unable to set buffer count for stream %d", __FUNCTION__, mMainImageStreamId);
472 return res;
473 }
474
475 if ((res = native_window_set_buffers_dimensions(anwConsumer, mMaxHeicBufferSize, 1)) != OK) {
476 ALOGE("%s: Unable to set buffer dimension %zu x 1 for stream %d: %s (%d)",
477 __FUNCTION__, mMaxHeicBufferSize, mMainImageStreamId, strerror(-res), res);
478 return res;
479 }
480
481 run("HeicCompositeStreamProc");
482
483 return NO_ERROR;
484}
485
486status_t HeicCompositeStream::insertGbp(SurfaceMap* /*out*/outSurfaceMap,
487 Vector<int32_t>* /*out*/outputStreamIds, int32_t* /*out*/currentStreamId) {
488 if (outSurfaceMap->find(mAppSegmentStreamId) == outSurfaceMap->end()) {
489 (*outSurfaceMap)[mAppSegmentStreamId] = std::vector<size_t>();
490 outputStreamIds->push_back(mAppSegmentStreamId);
491 }
492 (*outSurfaceMap)[mAppSegmentStreamId].push_back(mAppSegmentSurfaceId);
493
494 if (outSurfaceMap->find(mMainImageStreamId) == outSurfaceMap->end()) {
495 (*outSurfaceMap)[mMainImageStreamId] = std::vector<size_t>();
496 outputStreamIds->push_back(mMainImageStreamId);
497 }
498 (*outSurfaceMap)[mMainImageStreamId].push_back(mMainImageSurfaceId);
499
500 if (currentStreamId != nullptr) {
501 *currentStreamId = mMainImageStreamId;
502 }
503
504 return NO_ERROR;
505}
506
Emilian Peev4697b642019-11-19 17:11:14 -0800507status_t HeicCompositeStream::insertCompositeStreamIds(
508 std::vector<int32_t>* compositeStreamIds /*out*/) {
509 if (compositeStreamIds == nullptr) {
510 return BAD_VALUE;
511 }
512
513 compositeStreamIds->push_back(mAppSegmentStreamId);
514 compositeStreamIds->push_back(mMainImageStreamId);
515
516 return OK;
517}
518
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800519void HeicCompositeStream::onShutter(const CaptureResultExtras& resultExtras, nsecs_t timestamp) {
520 Mutex::Autolock l(mMutex);
521 if (mErrorState) {
522 return;
523 }
524
525 if (mSettingsByFrameNumber.find(resultExtras.frameNumber) != mSettingsByFrameNumber.end()) {
Shuzhen Wang3d00ee52019-09-25 14:19:28 -0700526 ALOGV("%s: [%" PRId64 "]: frameNumber %" PRId64, __FUNCTION__,
527 timestamp, resultExtras.frameNumber);
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800528 mFrameNumberMap.emplace(resultExtras.frameNumber, timestamp);
529 mSettingsByTimestamp[timestamp] = mSettingsByFrameNumber[resultExtras.frameNumber];
530 mSettingsByFrameNumber.erase(resultExtras.frameNumber);
531 mInputReadyCondition.signal();
532 }
533}
534
535void HeicCompositeStream::compilePendingInputLocked() {
536 while (!mSettingsByTimestamp.empty()) {
537 auto it = mSettingsByTimestamp.begin();
538 mPendingInputFrames[it->first].orientation = it->second.first;
539 mPendingInputFrames[it->first].quality = it->second.second;
540 mSettingsByTimestamp.erase(it);
Shuzhen Wang62f49ed2019-09-04 14:07:53 -0700541
542 // Set encoder quality if no inflight encoding
543 if (mPendingInputFrames.size() == 1) {
544 int32_t newQuality = mPendingInputFrames.begin()->second.quality;
545 updateCodecQualityLocked(newQuality);
546 }
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800547 }
548
Michael Gonzalezb5986a32019-10-09 15:38:17 -0700549 while (!mInputAppSegmentBuffers.empty()) {
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800550 CpuConsumer::LockedBuffer imgBuffer;
551 auto it = mInputAppSegmentBuffers.begin();
552 auto res = mAppSegmentConsumer->lockNextBuffer(&imgBuffer);
553 if (res == NOT_ENOUGH_DATA) {
Michael Gonzalezb5986a32019-10-09 15:38:17 -0700554 // Can not lock any more buffers.
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800555 break;
556 } else if ((res != OK) || (*it != imgBuffer.timestamp)) {
557 if (res != OK) {
558 ALOGE("%s: Error locking JPEG_APP_SEGMENTS image buffer: %s (%d)", __FUNCTION__,
559 strerror(-res), res);
560 } else {
561 ALOGE("%s: Expecting JPEG_APP_SEGMENTS buffer with time stamp: %" PRId64
562 " received buffer with time stamp: %" PRId64, __FUNCTION__,
563 *it, imgBuffer.timestamp);
Michael Gonzalezb5986a32019-10-09 15:38:17 -0700564 mAppSegmentConsumer->unlockBuffer(imgBuffer);
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800565 }
566 mPendingInputFrames[*it].error = true;
567 mInputAppSegmentBuffers.erase(it);
568 continue;
569 }
570
571 if ((mPendingInputFrames.find(imgBuffer.timestamp) != mPendingInputFrames.end()) &&
572 (mPendingInputFrames[imgBuffer.timestamp].error)) {
573 mAppSegmentConsumer->unlockBuffer(imgBuffer);
574 } else {
575 mPendingInputFrames[imgBuffer.timestamp].appSegmentBuffer = imgBuffer;
Michael Gonzalezb5986a32019-10-09 15:38:17 -0700576 mLockedAppSegmentBufferCnt++;
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800577 }
578 mInputAppSegmentBuffers.erase(it);
579 }
580
581 while (!mInputYuvBuffers.empty() && !mYuvBufferAcquired) {
582 CpuConsumer::LockedBuffer imgBuffer;
583 auto it = mInputYuvBuffers.begin();
584 auto res = mMainImageConsumer->lockNextBuffer(&imgBuffer);
585 if (res == NOT_ENOUGH_DATA) {
Michael Gonzalezb5986a32019-10-09 15:38:17 -0700586 // Can not lock any more buffers.
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800587 break;
588 } else if (res != OK) {
589 ALOGE("%s: Error locking YUV_888 image buffer: %s (%d)", __FUNCTION__,
590 strerror(-res), res);
591 mPendingInputFrames[*it].error = true;
592 mInputYuvBuffers.erase(it);
593 continue;
594 } else if (*it != imgBuffer.timestamp) {
595 ALOGW("%s: Expecting YUV_888 buffer with time stamp: %" PRId64 " received buffer with "
596 "time stamp: %" PRId64, __FUNCTION__, *it, imgBuffer.timestamp);
597 mPendingInputFrames[*it].error = true;
598 mInputYuvBuffers.erase(it);
599 continue;
600 }
601
602 if ((mPendingInputFrames.find(imgBuffer.timestamp) != mPendingInputFrames.end()) &&
603 (mPendingInputFrames[imgBuffer.timestamp].error)) {
604 mMainImageConsumer->unlockBuffer(imgBuffer);
605 } else {
606 mPendingInputFrames[imgBuffer.timestamp].yuvBuffer = imgBuffer;
607 mYuvBufferAcquired = true;
608 }
609 mInputYuvBuffers.erase(it);
610 }
611
612 while (!mCodecOutputBuffers.empty()) {
613 auto it = mCodecOutputBuffers.begin();
614 // Bitstream buffer timestamp doesn't necessarily directly correlate with input
615 // buffer timestamp. Assume encoder input to output is FIFO, use a queue
616 // to look up timestamp.
617 int64_t bufferTime = -1;
618 if (mCodecOutputBufferTimestamps.empty()) {
Michael Gonzalez5c103f22019-10-08 14:30:32 -0700619 ALOGV("%s: Failed to find buffer timestamp for codec output buffer!", __FUNCTION__);
620 break;
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800621 } else {
622 // Direct mapping between camera timestamp (in ns) and codec timestamp (in us).
623 bufferTime = mCodecOutputBufferTimestamps.front();
Shuzhen Wang3d00ee52019-09-25 14:19:28 -0700624 mCodecOutputCounter++;
625 if (mCodecOutputCounter == mNumOutputTiles) {
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800626 mCodecOutputBufferTimestamps.pop();
Shuzhen Wang3d00ee52019-09-25 14:19:28 -0700627 mCodecOutputCounter = 0;
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800628 }
629
630 mPendingInputFrames[bufferTime].codecOutputBuffers.push_back(*it);
Shuzhen Wang3d00ee52019-09-25 14:19:28 -0700631 ALOGV("%s: [%" PRId64 "]: Pushing codecOutputBuffers (time %" PRId64 " us)",
632 __FUNCTION__, bufferTime, it->timeUs);
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800633 }
634 mCodecOutputBuffers.erase(it);
635 }
636
637 while (!mFrameNumberMap.empty()) {
638 auto it = mFrameNumberMap.begin();
639 mPendingInputFrames[it->second].frameNumber = it->first;
Shuzhen Wang3d00ee52019-09-25 14:19:28 -0700640 ALOGV("%s: [%" PRId64 "]: frameNumber is %" PRId64, __FUNCTION__, it->second, it->first);
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800641 mFrameNumberMap.erase(it);
642 }
643
Shuzhen Wange7f4b462019-02-12 08:43:07 -0800644 while (!mCaptureResults.empty()) {
645 auto it = mCaptureResults.begin();
646 // Negative timestamp indicates that something went wrong during the capture result
647 // collection process.
648 if (it->first >= 0) {
649 if (mPendingInputFrames[it->first].frameNumber == std::get<0>(it->second)) {
650 mPendingInputFrames[it->first].result =
651 std::make_unique<CameraMetadata>(std::get<1>(it->second));
652 } else {
653 ALOGE("%s: Capture result frameNumber/timestamp mapping changed between "
654 "shutter and capture result!", __FUNCTION__);
655 }
656 }
657 mCaptureResults.erase(it);
658 }
659
660 // mErrorFrameNumbers stores frame number of dropped buffers.
661 auto it = mErrorFrameNumbers.begin();
662 while (it != mErrorFrameNumbers.end()) {
663 bool frameFound = false;
664 for (auto &inputFrame : mPendingInputFrames) {
665 if (inputFrame.second.frameNumber == *it) {
666 inputFrame.second.error = true;
667 frameFound = true;
668 break;
669 }
670 }
671
672 if (frameFound) {
673 it = mErrorFrameNumbers.erase(it);
674 } else {
675 ALOGW("%s: Not able to find failing input with frame number: %" PRId64, __FUNCTION__,
676 *it);
677 it++;
678 }
679 }
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800680
681 // Distribute codec input buffers to be filled out from YUV output
682 for (auto it = mPendingInputFrames.begin();
683 it != mPendingInputFrames.end() && mCodecInputBuffers.size() > 0; it++) {
684 InputFrame& inputFrame(it->second);
685 if (inputFrame.codecInputCounter < mGridRows * mGridCols) {
686 // Available input tiles that are required for the current input
687 // image.
688 size_t newInputTiles = std::min(mCodecInputBuffers.size(),
689 mGridRows * mGridCols - inputFrame.codecInputCounter);
690 for (size_t i = 0; i < newInputTiles; i++) {
691 CodecInputBufferInfo inputInfo =
692 { mCodecInputBuffers[0], mGridTimestampUs++, inputFrame.codecInputCounter };
693 inputFrame.codecInputBuffers.push_back(inputInfo);
694
695 mCodecInputBuffers.erase(mCodecInputBuffers.begin());
696 inputFrame.codecInputCounter++;
697 }
698 break;
699 }
700 }
701}
702
703bool HeicCompositeStream::getNextReadyInputLocked(int64_t *currentTs /*out*/) {
704 if (currentTs == nullptr) {
705 return false;
706 }
707
708 bool newInputAvailable = false;
Shuzhen Wang3d00ee52019-09-25 14:19:28 -0700709 for (auto& it : mPendingInputFrames) {
710 // New input is considered to be available only if:
711 // 1. input buffers are ready, or
712 // 2. App segment and muxer is created, or
713 // 3. A codec output tile is ready, and an output buffer is available.
714 // This makes sure that muxer gets created only when an output tile is
715 // generated, because right now we only handle 1 HEIC output buffer at a
716 // time (max dequeued buffer count is 1).
Shuzhen Wange7f4b462019-02-12 08:43:07 -0800717 bool appSegmentReady = (it.second.appSegmentBuffer.data != nullptr) &&
Shuzhen Wang3d00ee52019-09-25 14:19:28 -0700718 !it.second.appSegmentWritten && it.second.result != nullptr &&
719 it.second.muxer != nullptr;
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800720 bool codecOutputReady = !it.second.codecOutputBuffers.empty();
721 bool codecInputReady = (it.second.yuvBuffer.data != nullptr) &&
722 (!it.second.codecInputBuffers.empty());
Shuzhen Wang3d00ee52019-09-25 14:19:28 -0700723 bool hasOutputBuffer = it.second.muxer != nullptr ||
724 (mDequeuedOutputBufferCnt < kMaxOutputSurfaceProducerCount);
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800725 if ((!it.second.error) &&
726 (it.first < *currentTs) &&
Shuzhen Wang3d00ee52019-09-25 14:19:28 -0700727 (appSegmentReady || (codecOutputReady && hasOutputBuffer) || codecInputReady)) {
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800728 *currentTs = it.first;
Shuzhen Wang3d00ee52019-09-25 14:19:28 -0700729 if (it.second.format == nullptr && mFormat != nullptr) {
730 it.second.format = mFormat->dup();
731 }
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800732 newInputAvailable = true;
733 break;
734 }
735 }
736
737 return newInputAvailable;
738}
739
740int64_t HeicCompositeStream::getNextFailingInputLocked(int64_t *currentTs /*out*/) {
741 int64_t res = -1;
742 if (currentTs == nullptr) {
743 return res;
744 }
745
746 for (const auto& it : mPendingInputFrames) {
747 if (it.second.error && !it.second.errorNotified && (it.first < *currentTs)) {
748 *currentTs = it.first;
749 res = it.second.frameNumber;
750 break;
751 }
752 }
753
754 return res;
755}
756
757status_t HeicCompositeStream::processInputFrame(nsecs_t timestamp,
758 InputFrame &inputFrame) {
759 ATRACE_CALL();
760 status_t res = OK;
761
Shuzhen Wange7f4b462019-02-12 08:43:07 -0800762 bool appSegmentReady = inputFrame.appSegmentBuffer.data != nullptr &&
Shuzhen Wang3d00ee52019-09-25 14:19:28 -0700763 !inputFrame.appSegmentWritten && inputFrame.result != nullptr &&
764 inputFrame.muxer != nullptr;
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800765 bool codecOutputReady = inputFrame.codecOutputBuffers.size() > 0;
766 bool codecInputReady = inputFrame.yuvBuffer.data != nullptr &&
Shuzhen Wang3d00ee52019-09-25 14:19:28 -0700767 !inputFrame.codecInputBuffers.empty();
768 bool hasOutputBuffer = inputFrame.muxer != nullptr ||
769 (mDequeuedOutputBufferCnt < kMaxOutputSurfaceProducerCount);
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800770
Shuzhen Wang3d00ee52019-09-25 14:19:28 -0700771 ALOGV("%s: [%" PRId64 "]: appSegmentReady %d, codecOutputReady %d, codecInputReady %d,"
772 " dequeuedOutputBuffer %d", __FUNCTION__, timestamp, appSegmentReady,
773 codecOutputReady, codecInputReady, mDequeuedOutputBufferCnt);
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800774
775 // Handle inputs for Hevc tiling
776 if (codecInputReady) {
777 res = processCodecInputFrame(inputFrame);
778 if (res != OK) {
779 ALOGE("%s: Failed to process codec input frame: %s (%d)", __FUNCTION__,
780 strerror(-res), res);
781 return res;
782 }
783 }
784
Shuzhen Wang3d00ee52019-09-25 14:19:28 -0700785 if (!(codecOutputReady && hasOutputBuffer) && !appSegmentReady) {
786 return OK;
787 }
788
789 // Initialize and start muxer if not yet done so. In this case,
790 // codecOutputReady must be true. Otherwise, appSegmentReady is guaranteed
791 // to be false, and the function must have returned early.
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800792 if (inputFrame.muxer == nullptr) {
793 res = startMuxerForInputFrame(timestamp, inputFrame);
794 if (res != OK) {
795 ALOGE("%s: Failed to create and start muxer: %s (%d)", __FUNCTION__,
796 strerror(-res), res);
797 return res;
798 }
799 }
800
801 // Write JPEG APP segments data to the muxer.
Shuzhen Wang3d00ee52019-09-25 14:19:28 -0700802 if (appSegmentReady) {
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800803 res = processAppSegment(timestamp, inputFrame);
804 if (res != OK) {
805 ALOGE("%s: Failed to process JPEG APP segments: %s (%d)", __FUNCTION__,
806 strerror(-res), res);
807 return res;
808 }
809 }
810
811 // Write media codec bitstream buffers to muxer.
812 while (!inputFrame.codecOutputBuffers.empty()) {
813 res = processOneCodecOutputFrame(timestamp, inputFrame);
814 if (res != OK) {
815 ALOGE("%s: Failed to process codec output frame: %s (%d)", __FUNCTION__,
816 strerror(-res), res);
817 return res;
818 }
819 }
820
Michael Gonzalezb5986a32019-10-09 15:38:17 -0700821 if (inputFrame.pendingOutputTiles == 0) {
822 if (inputFrame.appSegmentWritten) {
823 res = processCompletedInputFrame(timestamp, inputFrame);
824 if (res != OK) {
825 ALOGE("%s: Failed to process completed input frame: %s (%d)", __FUNCTION__,
826 strerror(-res), res);
827 return res;
828 }
829 } else if (mLockedAppSegmentBufferCnt == kMaxAcquiredAppSegment) {
830 ALOGE("%s: Out-of-order app segment buffers reaches limit %u", __FUNCTION__,
831 kMaxAcquiredAppSegment);
832 return INVALID_OPERATION;
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800833 }
834 }
835
836 return res;
837}
838
839status_t HeicCompositeStream::startMuxerForInputFrame(nsecs_t timestamp, InputFrame &inputFrame) {
840 sp<ANativeWindow> outputANW = mOutputSurface;
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800841
842 auto res = outputANW->dequeueBuffer(mOutputSurface.get(), &inputFrame.anb, &inputFrame.fenceFd);
843 if (res != OK) {
844 ALOGE("%s: Error retrieving output buffer: %s (%d)", __FUNCTION__, strerror(-res),
845 res);
846 return res;
847 }
Shuzhen Wang3d00ee52019-09-25 14:19:28 -0700848 mDequeuedOutputBufferCnt++;
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800849
850 // Combine current thread id, stream id and timestamp to uniquely identify image.
851 std::ostringstream tempOutputFile;
852 tempOutputFile << "HEIF-" << pthread_self() << "-"
853 << getStreamId() << "-" << timestamp;
854 inputFrame.fileFd = syscall(__NR_memfd_create, tempOutputFile.str().c_str(), MFD_CLOEXEC);
855 if (inputFrame.fileFd < 0) {
856 ALOGE("%s: Failed to create file %s. Error no is %d", __FUNCTION__,
857 tempOutputFile.str().c_str(), errno);
858 return NO_INIT;
859 }
860 inputFrame.muxer = new MediaMuxer(inputFrame.fileFd, MediaMuxer::OUTPUT_FORMAT_HEIF);
861 if (inputFrame.muxer == nullptr) {
862 ALOGE("%s: Failed to create MediaMuxer for file fd %d",
863 __FUNCTION__, inputFrame.fileFd);
864 return NO_INIT;
865 }
866
867 res = inputFrame.muxer->setOrientationHint(inputFrame.orientation);
868 if (res != OK) {
869 ALOGE("%s: Failed to setOrientationHint: %s (%d)", __FUNCTION__,
870 strerror(-res), res);
871 return res;
872 }
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800873
Shuzhen Wang3d00ee52019-09-25 14:19:28 -0700874 ssize_t trackId = inputFrame.muxer->addTrack(inputFrame.format);
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800875 if (trackId < 0) {
876 ALOGE("%s: Failed to addTrack to the muxer: %zd", __FUNCTION__, trackId);
877 return NO_INIT;
878 }
879
880 inputFrame.trackIndex = trackId;
881 inputFrame.pendingOutputTiles = mNumOutputTiles;
882
883 res = inputFrame.muxer->start();
884 if (res != OK) {
885 ALOGE("%s: Failed to start MediaMuxer: %s (%d)",
886 __FUNCTION__, strerror(-res), res);
887 return res;
888 }
889
Shuzhen Wang3d00ee52019-09-25 14:19:28 -0700890 ALOGV("%s: [%" PRId64 "]: Muxer started for inputFrame", __FUNCTION__,
891 timestamp);
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800892 return OK;
893}
894
895status_t HeicCompositeStream::processAppSegment(nsecs_t timestamp, InputFrame &inputFrame) {
896 size_t app1Size = 0;
897 auto appSegmentSize = findAppSegmentsSize(inputFrame.appSegmentBuffer.data,
898 inputFrame.appSegmentBuffer.width * inputFrame.appSegmentBuffer.height,
899 &app1Size);
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800900 if (appSegmentSize == 0) {
901 ALOGE("%s: Failed to find JPEG APP segment size", __FUNCTION__);
902 return NO_INIT;
903 }
904
905 std::unique_ptr<ExifUtils> exifUtils(ExifUtils::create());
906 auto exifRes = exifUtils->initialize(inputFrame.appSegmentBuffer.data, app1Size);
907 if (!exifRes) {
908 ALOGE("%s: Failed to initialize ExifUtils object!", __FUNCTION__);
909 return BAD_VALUE;
910 }
Shuzhen Wange7f4b462019-02-12 08:43:07 -0800911 exifRes = exifUtils->setFromMetadata(*inputFrame.result, mStaticInfo,
912 mOutputWidth, mOutputHeight);
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800913 if (!exifRes) {
914 ALOGE("%s: Failed to set Exif tags using metadata and main image sizes", __FUNCTION__);
915 return BAD_VALUE;
916 }
917 exifRes = exifUtils->setOrientation(inputFrame.orientation);
918 if (!exifRes) {
919 ALOGE("%s: ExifUtils failed to set orientation", __FUNCTION__);
920 return BAD_VALUE;
921 }
922 exifRes = exifUtils->generateApp1();
923 if (!exifRes) {
924 ALOGE("%s: ExifUtils failed to generate APP1 segment", __FUNCTION__);
925 return BAD_VALUE;
926 }
927
928 unsigned int newApp1Length = exifUtils->getApp1Length();
929 const uint8_t *newApp1Segment = exifUtils->getApp1Buffer();
930
931 //Assemble the APP1 marker buffer required by MediaCodec
932 uint8_t kExifApp1Marker[] = {'E', 'x', 'i', 'f', 0xFF, 0xE1, 0x00, 0x00};
933 kExifApp1Marker[6] = static_cast<uint8_t>(newApp1Length >> 8);
934 kExifApp1Marker[7] = static_cast<uint8_t>(newApp1Length & 0xFF);
935 size_t appSegmentBufferSize = sizeof(kExifApp1Marker) +
936 appSegmentSize - app1Size + newApp1Length;
937 uint8_t* appSegmentBuffer = new uint8_t[appSegmentBufferSize];
938 memcpy(appSegmentBuffer, kExifApp1Marker, sizeof(kExifApp1Marker));
939 memcpy(appSegmentBuffer + sizeof(kExifApp1Marker), newApp1Segment, newApp1Length);
940 if (appSegmentSize - app1Size > 0) {
941 memcpy(appSegmentBuffer + sizeof(kExifApp1Marker) + newApp1Length,
942 inputFrame.appSegmentBuffer.data + app1Size, appSegmentSize - app1Size);
943 }
944
945 sp<ABuffer> aBuffer = new ABuffer(appSegmentBuffer, appSegmentBufferSize);
946 auto res = inputFrame.muxer->writeSampleData(aBuffer, inputFrame.trackIndex,
947 timestamp, MediaCodec::BUFFER_FLAG_MUXER_DATA);
948 delete[] appSegmentBuffer;
949
950 if (res != OK) {
951 ALOGE("%s: Failed to write JPEG APP segments to muxer: %s (%d)",
952 __FUNCTION__, strerror(-res), res);
953 return res;
954 }
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800955
Shuzhen Wang3d00ee52019-09-25 14:19:28 -0700956 ALOGV("%s: [%" PRId64 "]: appSegmentSize is %zu, width %d, height %d, app1Size %zu",
957 __FUNCTION__, timestamp, appSegmentSize, inputFrame.appSegmentBuffer.width,
958 inputFrame.appSegmentBuffer.height, app1Size);
Michael Gonzalezb5986a32019-10-09 15:38:17 -0700959
960 inputFrame.appSegmentWritten = true;
961 // Release the buffer now so any pending input app segments can be processed
962 mAppSegmentConsumer->unlockBuffer(inputFrame.appSegmentBuffer);
963 inputFrame.appSegmentBuffer.data = nullptr;
964 mLockedAppSegmentBufferCnt--;
965
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800966 return OK;
967}
968
969status_t HeicCompositeStream::processCodecInputFrame(InputFrame &inputFrame) {
970 for (auto& inputBuffer : inputFrame.codecInputBuffers) {
971 sp<MediaCodecBuffer> buffer;
972 auto res = mCodec->getInputBuffer(inputBuffer.index, &buffer);
973 if (res != OK) {
974 ALOGE("%s: Error getting codec input buffer: %s (%d)", __FUNCTION__,
975 strerror(-res), res);
976 return res;
977 }
978
979 // Copy one tile from source to destination.
980 size_t tileX = inputBuffer.tileIndex % mGridCols;
981 size_t tileY = inputBuffer.tileIndex / mGridCols;
982 size_t top = mGridHeight * tileY;
983 size_t left = mGridWidth * tileX;
984 size_t width = (tileX == static_cast<size_t>(mGridCols) - 1) ?
985 mOutputWidth - tileX * mGridWidth : mGridWidth;
986 size_t height = (tileY == static_cast<size_t>(mGridRows) - 1) ?
987 mOutputHeight - tileY * mGridHeight : mGridHeight;
Shuzhen Wang3d00ee52019-09-25 14:19:28 -0700988 ALOGV("%s: inputBuffer tileIndex [%zu, %zu], top %zu, left %zu, width %zu, height %zu,"
989 " timeUs %" PRId64, __FUNCTION__, tileX, tileY, top, left, width, height,
990 inputBuffer.timeUs);
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800991
992 res = copyOneYuvTile(buffer, inputFrame.yuvBuffer, top, left, width, height);
993 if (res != OK) {
994 ALOGE("%s: Failed to copy YUV tile %s (%d)", __FUNCTION__,
995 strerror(-res), res);
996 return res;
997 }
998
999 res = mCodec->queueInputBuffer(inputBuffer.index, 0, buffer->capacity(),
1000 inputBuffer.timeUs, 0, nullptr /*errorDetailMsg*/);
1001 if (res != OK) {
1002 ALOGE("%s: Failed to queueInputBuffer to Codec: %s (%d)",
1003 __FUNCTION__, strerror(-res), res);
1004 return res;
1005 }
1006 }
1007
1008 inputFrame.codecInputBuffers.clear();
1009 return OK;
1010}
1011
1012status_t HeicCompositeStream::processOneCodecOutputFrame(nsecs_t timestamp,
1013 InputFrame &inputFrame) {
1014 auto it = inputFrame.codecOutputBuffers.begin();
1015 sp<MediaCodecBuffer> buffer;
1016 status_t res = mCodec->getOutputBuffer(it->index, &buffer);
1017 if (res != OK) {
1018 ALOGE("%s: Error getting Heic codec output buffer at index %d: %s (%d)",
1019 __FUNCTION__, it->index, strerror(-res), res);
1020 return res;
1021 }
1022 if (buffer == nullptr) {
1023 ALOGE("%s: Invalid Heic codec output buffer at index %d",
1024 __FUNCTION__, it->index);
1025 return BAD_VALUE;
1026 }
1027
1028 sp<ABuffer> aBuffer = new ABuffer(buffer->data(), buffer->size());
1029 res = inputFrame.muxer->writeSampleData(
1030 aBuffer, inputFrame.trackIndex, timestamp, 0 /*flags*/);
1031 if (res != OK) {
1032 ALOGE("%s: Failed to write buffer index %d to muxer: %s (%d)",
1033 __FUNCTION__, it->index, strerror(-res), res);
1034 return res;
1035 }
1036
1037 mCodec->releaseOutputBuffer(it->index);
1038 if (inputFrame.pendingOutputTiles == 0) {
1039 ALOGW("%s: Codec generated more tiles than expected!", __FUNCTION__);
1040 } else {
1041 inputFrame.pendingOutputTiles--;
1042 }
1043
1044 inputFrame.codecOutputBuffers.erase(inputFrame.codecOutputBuffers.begin());
Shuzhen Wang3d00ee52019-09-25 14:19:28 -07001045
1046 ALOGV("%s: [%" PRId64 "]: Output buffer index %d",
1047 __FUNCTION__, timestamp, it->index);
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -08001048 return OK;
1049}
1050
1051status_t HeicCompositeStream::processCompletedInputFrame(nsecs_t timestamp,
1052 InputFrame &inputFrame) {
1053 sp<ANativeWindow> outputANW = mOutputSurface;
1054 inputFrame.muxer->stop();
1055
1056 // Copy the content of the file to memory.
1057 sp<GraphicBuffer> gb = GraphicBuffer::from(inputFrame.anb);
1058 void* dstBuffer;
1059 auto res = gb->lockAsync(GRALLOC_USAGE_SW_WRITE_OFTEN, &dstBuffer, inputFrame.fenceFd);
1060 if (res != OK) {
1061 ALOGE("%s: Error trying to lock output buffer fence: %s (%d)", __FUNCTION__,
1062 strerror(-res), res);
1063 return res;
1064 }
1065
1066 off_t fSize = lseek(inputFrame.fileFd, 0, SEEK_END);
1067 if (static_cast<size_t>(fSize) > mMaxHeicBufferSize - sizeof(CameraBlob)) {
1068 ALOGE("%s: Error: MediaMuxer output size %ld is larger than buffer sizer %zu",
1069 __FUNCTION__, fSize, mMaxHeicBufferSize - sizeof(CameraBlob));
1070 return BAD_VALUE;
1071 }
1072
1073 lseek(inputFrame.fileFd, 0, SEEK_SET);
1074 ssize_t bytesRead = read(inputFrame.fileFd, dstBuffer, fSize);
1075 if (bytesRead < fSize) {
1076 ALOGE("%s: Only %zd of %ld bytes read", __FUNCTION__, bytesRead, fSize);
1077 return BAD_VALUE;
1078 }
1079
1080 close(inputFrame.fileFd);
1081 inputFrame.fileFd = -1;
1082
1083 // Fill in HEIC header
1084 uint8_t *header = static_cast<uint8_t*>(dstBuffer) + mMaxHeicBufferSize - sizeof(CameraBlob);
1085 struct CameraBlob *blobHeader = (struct CameraBlob *)header;
1086 // Must be in sync with CAMERA3_HEIC_BLOB_ID in android_media_Utils.cpp
1087 blobHeader->blobId = static_cast<CameraBlobId>(0x00FE);
1088 blobHeader->blobSize = fSize;
1089
1090 res = native_window_set_buffers_timestamp(mOutputSurface.get(), timestamp);
1091 if (res != OK) {
1092 ALOGE("%s: Stream %d: Error setting timestamp: %s (%d)",
1093 __FUNCTION__, getStreamId(), strerror(-res), res);
1094 return res;
1095 }
1096
1097 res = outputANW->queueBuffer(mOutputSurface.get(), inputFrame.anb, /*fence*/ -1);
1098 if (res != OK) {
1099 ALOGE("%s: Failed to queueBuffer to Heic stream: %s (%d)", __FUNCTION__,
1100 strerror(-res), res);
1101 return res;
1102 }
1103 inputFrame.anb = nullptr;
Shuzhen Wang3d00ee52019-09-25 14:19:28 -07001104 mDequeuedOutputBufferCnt--;
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -08001105
Shuzhen Wang3d00ee52019-09-25 14:19:28 -07001106 ALOGV("%s: [%" PRId64 "]", __FUNCTION__, timestamp);
Shuzhen Wange7f4b462019-02-12 08:43:07 -08001107 ATRACE_ASYNC_END("HEIC capture", inputFrame.frameNumber);
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -08001108 return OK;
1109}
1110
1111
1112void HeicCompositeStream::releaseInputFrameLocked(InputFrame *inputFrame /*out*/) {
1113 if (inputFrame == nullptr) {
1114 return;
1115 }
1116
1117 if (inputFrame->appSegmentBuffer.data != nullptr) {
1118 mAppSegmentConsumer->unlockBuffer(inputFrame->appSegmentBuffer);
1119 inputFrame->appSegmentBuffer.data = nullptr;
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -08001120 }
1121
1122 while (!inputFrame->codecOutputBuffers.empty()) {
1123 auto it = inputFrame->codecOutputBuffers.begin();
1124 ALOGV("%s: releaseOutputBuffer index %d", __FUNCTION__, it->index);
1125 mCodec->releaseOutputBuffer(it->index);
1126 inputFrame->codecOutputBuffers.erase(it);
1127 }
1128
1129 if (inputFrame->yuvBuffer.data != nullptr) {
1130 mMainImageConsumer->unlockBuffer(inputFrame->yuvBuffer);
1131 inputFrame->yuvBuffer.data = nullptr;
1132 mYuvBufferAcquired = false;
1133 }
1134
1135 while (!inputFrame->codecInputBuffers.empty()) {
1136 auto it = inputFrame->codecInputBuffers.begin();
1137 inputFrame->codecInputBuffers.erase(it);
1138 }
1139
1140 if ((inputFrame->error || mErrorState) && !inputFrame->errorNotified) {
1141 notifyError(inputFrame->frameNumber);
1142 inputFrame->errorNotified = true;
1143 }
1144
1145 if (inputFrame->fileFd >= 0) {
1146 close(inputFrame->fileFd);
1147 inputFrame->fileFd = -1;
1148 }
1149
1150 if (inputFrame->anb != nullptr) {
1151 sp<ANativeWindow> outputANW = mOutputSurface;
1152 outputANW->cancelBuffer(mOutputSurface.get(), inputFrame->anb, /*fence*/ -1);
1153 inputFrame->anb = nullptr;
1154 }
1155}
1156
Michael Gonzalezb5986a32019-10-09 15:38:17 -07001157void HeicCompositeStream::releaseInputFramesLocked() {
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -08001158 auto it = mPendingInputFrames.begin();
Shuzhen Wang62f49ed2019-09-04 14:07:53 -07001159 bool inputFrameDone = false;
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -08001160 while (it != mPendingInputFrames.end()) {
Michael Gonzalezb5986a32019-10-09 15:38:17 -07001161 auto& inputFrame = it->second;
1162 if (inputFrame.error ||
1163 (inputFrame.appSegmentWritten && inputFrame.pendingOutputTiles == 0)) {
1164 releaseInputFrameLocked(&inputFrame);
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -08001165 it = mPendingInputFrames.erase(it);
Shuzhen Wang62f49ed2019-09-04 14:07:53 -07001166 inputFrameDone = true;
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -08001167 } else {
1168 it++;
1169 }
1170 }
Shuzhen Wang62f49ed2019-09-04 14:07:53 -07001171
1172 // Update codec quality based on first upcoming input frame.
1173 // Note that when encoding is in surface mode, currently there is no
1174 // way for camera service to synchronize quality setting on a per-frame
1175 // basis: we don't get notification when codec is ready to consume a new
1176 // input frame. So we update codec quality on a best-effort basis.
1177 if (inputFrameDone) {
1178 auto firstPendingFrame = mPendingInputFrames.begin();
1179 if (firstPendingFrame != mPendingInputFrames.end()) {
1180 updateCodecQualityLocked(firstPendingFrame->second.quality);
1181 }
1182 }
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -08001183}
1184
1185status_t HeicCompositeStream::initializeCodec(uint32_t width, uint32_t height,
1186 const sp<CameraDeviceBase>& cameraDevice) {
1187 ALOGV("%s", __FUNCTION__);
1188
1189 bool useGrid = false;
Chong Zhang688abaa2019-05-17 16:32:23 -07001190 AString hevcName;
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -08001191 bool isSizeSupported = isSizeSupportedByHeifEncoder(width, height,
Chong Zhang688abaa2019-05-17 16:32:23 -07001192 &mUseHeic, &useGrid, nullptr, &hevcName);
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -08001193 if (!isSizeSupported) {
1194 ALOGE("%s: Encoder doesnt' support size %u x %u!",
1195 __FUNCTION__, width, height);
1196 return BAD_VALUE;
1197 }
1198
1199 // Create Looper for MediaCodec.
1200 auto desiredMime = mUseHeic ? MIMETYPE_IMAGE_ANDROID_HEIC : MIMETYPE_VIDEO_HEVC;
1201 mCodecLooper = new ALooper;
1202 mCodecLooper->setName("Camera3-HeicComposite-MediaCodecLooper");
1203 status_t res = mCodecLooper->start(
1204 false, // runOnCallingThread
1205 false, // canCallJava
1206 PRIORITY_AUDIO);
1207 if (res != OK) {
1208 ALOGE("%s: Failed to start codec looper: %s (%d)",
1209 __FUNCTION__, strerror(-res), res);
1210 return NO_INIT;
1211 }
1212
1213 // Create HEIC/HEVC codec.
Chong Zhang688abaa2019-05-17 16:32:23 -07001214 if (mUseHeic) {
1215 mCodec = MediaCodec::CreateByType(mCodecLooper, desiredMime, true /*encoder*/);
1216 } else {
1217 mCodec = MediaCodec::CreateByComponentName(mCodecLooper, hevcName);
1218 }
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -08001219 if (mCodec == nullptr) {
1220 ALOGE("%s: Failed to create codec for %s", __FUNCTION__, desiredMime);
1221 return NO_INIT;
1222 }
1223
1224 // Create Looper and handler for Codec callback.
1225 mCodecCallbackHandler = new CodecCallbackHandler(this);
1226 if (mCodecCallbackHandler == nullptr) {
1227 ALOGE("%s: Failed to create codec callback handler", __FUNCTION__);
1228 return NO_MEMORY;
1229 }
1230 mCallbackLooper = new ALooper;
1231 mCallbackLooper->setName("Camera3-HeicComposite-MediaCodecCallbackLooper");
1232 res = mCallbackLooper->start(
1233 false, // runOnCallingThread
1234 false, // canCallJava
1235 PRIORITY_AUDIO);
1236 if (res != OK) {
1237 ALOGE("%s: Failed to start media callback looper: %s (%d)",
1238 __FUNCTION__, strerror(-res), res);
1239 return NO_INIT;
1240 }
1241 mCallbackLooper->registerHandler(mCodecCallbackHandler);
1242
1243 mAsyncNotify = new AMessage(kWhatCallbackNotify, mCodecCallbackHandler);
1244 res = mCodec->setCallback(mAsyncNotify);
1245 if (res != OK) {
1246 ALOGE("%s: Failed to set MediaCodec callback: %s (%d)", __FUNCTION__,
1247 strerror(-res), res);
1248 return res;
1249 }
1250
1251 // Create output format and configure the Codec.
1252 sp<AMessage> outputFormat = new AMessage();
1253 outputFormat->setString(KEY_MIME, desiredMime);
1254 outputFormat->setInt32(KEY_BITRATE_MODE, BITRATE_MODE_CQ);
1255 outputFormat->setInt32(KEY_QUALITY, kDefaultJpegQuality);
1256 // Ask codec to skip timestamp check and encode all frames.
Chong Zhang70bfcec2019-03-18 12:52:28 -07001257 outputFormat->setInt64(KEY_MAX_PTS_GAP_TO_ENCODER, kNoFrameDropMaxPtsGap);
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -08001258
1259 int32_t gridWidth, gridHeight, gridRows, gridCols;
1260 if (useGrid || mUseHeic) {
1261 gridWidth = HeicEncoderInfoManager::kGridWidth;
1262 gridHeight = HeicEncoderInfoManager::kGridHeight;
1263 gridRows = (height + gridHeight - 1)/gridHeight;
1264 gridCols = (width + gridWidth - 1)/gridWidth;
1265
1266 if (mUseHeic) {
1267 outputFormat->setInt32(KEY_TILE_WIDTH, gridWidth);
1268 outputFormat->setInt32(KEY_TILE_HEIGHT, gridHeight);
1269 outputFormat->setInt32(KEY_GRID_COLUMNS, gridCols);
1270 outputFormat->setInt32(KEY_GRID_ROWS, gridRows);
1271 }
1272
1273 } else {
1274 gridWidth = width;
1275 gridHeight = height;
1276 gridRows = 1;
1277 gridCols = 1;
1278 }
1279
1280 outputFormat->setInt32(KEY_WIDTH, !useGrid ? width : gridWidth);
1281 outputFormat->setInt32(KEY_HEIGHT, !useGrid ? height : gridHeight);
1282 outputFormat->setInt32(KEY_I_FRAME_INTERVAL, 0);
1283 outputFormat->setInt32(KEY_COLOR_FORMAT,
1284 useGrid ? COLOR_FormatYUV420Flexible : COLOR_FormatSurface);
Shuzhen Wang0ca81522019-08-30 14:15:16 -07001285 outputFormat->setInt32(KEY_FRAME_RATE, useGrid ? gridRows * gridCols : kNoGridOpRate);
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -08001286 // This only serves as a hint to encoder when encoding is not real-time.
1287 outputFormat->setInt32(KEY_OPERATING_RATE, useGrid ? kGridOpRate : kNoGridOpRate);
1288
1289 res = mCodec->configure(outputFormat, nullptr /*nativeWindow*/,
1290 nullptr /*crypto*/, CONFIGURE_FLAG_ENCODE);
1291 if (res != OK) {
1292 ALOGE("%s: Failed to configure codec: %s (%d)", __FUNCTION__,
1293 strerror(-res), res);
1294 return res;
1295 }
1296
1297 mGridWidth = gridWidth;
1298 mGridHeight = gridHeight;
1299 mGridRows = gridRows;
1300 mGridCols = gridCols;
1301 mUseGrid = useGrid;
1302 mOutputWidth = width;
1303 mOutputHeight = height;
1304 mAppSegmentMaxSize = calcAppSegmentMaxSize(cameraDevice->info());
1305 mMaxHeicBufferSize = mOutputWidth * mOutputHeight * 3 / 2 + mAppSegmentMaxSize;
1306
1307 return OK;
1308}
1309
1310void HeicCompositeStream::deinitCodec() {
1311 ALOGV("%s", __FUNCTION__);
1312 if (mCodec != nullptr) {
1313 mCodec->stop();
1314 mCodec->release();
1315 mCodec.clear();
1316 }
1317
1318 if (mCodecLooper != nullptr) {
1319 mCodecLooper->stop();
1320 mCodecLooper.clear();
1321 }
1322
1323 if (mCallbackLooper != nullptr) {
1324 mCallbackLooper->stop();
1325 mCallbackLooper.clear();
1326 }
1327
1328 mAsyncNotify.clear();
1329 mFormat.clear();
1330}
1331
1332// Return the size of the complete list of app segment, 0 indicates failure
1333size_t HeicCompositeStream::findAppSegmentsSize(const uint8_t* appSegmentBuffer,
1334 size_t maxSize, size_t *app1SegmentSize) {
1335 if (appSegmentBuffer == nullptr || app1SegmentSize == nullptr) {
1336 ALOGE("%s: Invalid input appSegmentBuffer %p, app1SegmentSize %p",
1337 __FUNCTION__, appSegmentBuffer, app1SegmentSize);
1338 return 0;
1339 }
1340
1341 size_t expectedSize = 0;
1342 // First check for EXIF transport header at the end of the buffer
1343 const uint8_t *header = appSegmentBuffer + (maxSize - sizeof(struct CameraBlob));
1344 const struct CameraBlob *blob = (const struct CameraBlob*)(header);
1345 if (blob->blobId != CameraBlobId::JPEG_APP_SEGMENTS) {
1346 ALOGE("%s: Invalid EXIF blobId %hu", __FUNCTION__, blob->blobId);
1347 return 0;
1348 }
1349
1350 expectedSize = blob->blobSize;
1351 if (expectedSize == 0 || expectedSize > maxSize - sizeof(struct CameraBlob)) {
1352 ALOGE("%s: Invalid blobSize %zu.", __FUNCTION__, expectedSize);
1353 return 0;
1354 }
1355
1356 uint32_t totalSize = 0;
1357
1358 // Verify APP1 marker (mandatory)
1359 uint8_t app1Marker[] = {0xFF, 0xE1};
1360 if (memcmp(appSegmentBuffer, app1Marker, sizeof(app1Marker))) {
1361 ALOGE("%s: Invalid APP1 marker: %x, %x", __FUNCTION__,
1362 appSegmentBuffer[0], appSegmentBuffer[1]);
1363 return 0;
1364 }
1365 totalSize += sizeof(app1Marker);
1366
1367 uint16_t app1Size = (static_cast<uint16_t>(appSegmentBuffer[totalSize]) << 8) +
1368 appSegmentBuffer[totalSize+1];
1369 totalSize += app1Size;
1370
1371 ALOGV("%s: Expected APP segments size %zu, APP1 segment size %u",
1372 __FUNCTION__, expectedSize, app1Size);
1373 while (totalSize < expectedSize) {
1374 if (appSegmentBuffer[totalSize] != 0xFF ||
1375 appSegmentBuffer[totalSize+1] <= 0xE1 ||
1376 appSegmentBuffer[totalSize+1] > 0xEF) {
1377 // Invalid APPn marker
1378 ALOGE("%s: Invalid APPn marker: %x, %x", __FUNCTION__,
1379 appSegmentBuffer[totalSize], appSegmentBuffer[totalSize+1]);
1380 return 0;
1381 }
1382 totalSize += 2;
1383
1384 uint16_t appnSize = (static_cast<uint16_t>(appSegmentBuffer[totalSize]) << 8) +
1385 appSegmentBuffer[totalSize+1];
1386 totalSize += appnSize;
1387 }
1388
1389 if (totalSize != expectedSize) {
1390 ALOGE("%s: Invalid JPEG APP segments: totalSize %u vs expected size %zu",
1391 __FUNCTION__, totalSize, expectedSize);
1392 return 0;
1393 }
1394
1395 *app1SegmentSize = app1Size + sizeof(app1Marker);
1396 return expectedSize;
1397}
1398
1399int64_t HeicCompositeStream::findTimestampInNsLocked(int64_t timeInUs) {
1400 for (const auto& fn : mFrameNumberMap) {
1401 if (timeInUs == ns2us(fn.second)) {
1402 return fn.second;
1403 }
1404 }
1405 for (const auto& inputFrame : mPendingInputFrames) {
1406 if (timeInUs == ns2us(inputFrame.first)) {
1407 return inputFrame.first;
1408 }
1409 }
1410 return -1;
1411}
1412
1413status_t HeicCompositeStream::copyOneYuvTile(sp<MediaCodecBuffer>& codecBuffer,
1414 const CpuConsumer::LockedBuffer& yuvBuffer,
1415 size_t top, size_t left, size_t width, size_t height) {
1416 ATRACE_CALL();
1417
1418 // Get stride information for codecBuffer
1419 sp<ABuffer> imageData;
1420 if (!codecBuffer->meta()->findBuffer("image-data", &imageData)) {
1421 ALOGE("%s: Codec input buffer is not for image data!", __FUNCTION__);
1422 return BAD_VALUE;
1423 }
1424 if (imageData->size() != sizeof(MediaImage2)) {
1425 ALOGE("%s: Invalid codec input image size %zu, expected %zu",
1426 __FUNCTION__, imageData->size(), sizeof(MediaImage2));
1427 return BAD_VALUE;
1428 }
1429 MediaImage2* imageInfo = reinterpret_cast<MediaImage2*>(imageData->data());
1430 if (imageInfo->mType != MediaImage2::MEDIA_IMAGE_TYPE_YUV ||
1431 imageInfo->mBitDepth != 8 ||
1432 imageInfo->mBitDepthAllocated != 8 ||
1433 imageInfo->mNumPlanes != 3) {
1434 ALOGE("%s: Invalid codec input image info: mType %d, mBitDepth %d, "
1435 "mBitDepthAllocated %d, mNumPlanes %d!", __FUNCTION__,
1436 imageInfo->mType, imageInfo->mBitDepth,
1437 imageInfo->mBitDepthAllocated, imageInfo->mNumPlanes);
1438 return BAD_VALUE;
1439 }
1440
1441 ALOGV("%s: yuvBuffer chromaStep %d, chromaStride %d",
1442 __FUNCTION__, yuvBuffer.chromaStep, yuvBuffer.chromaStride);
1443 ALOGV("%s: U offset %u, V offset %u, U rowInc %d, V rowInc %d, U colInc %d, V colInc %d",
1444 __FUNCTION__, imageInfo->mPlane[MediaImage2::U].mOffset,
1445 imageInfo->mPlane[MediaImage2::V].mOffset,
1446 imageInfo->mPlane[MediaImage2::U].mRowInc,
1447 imageInfo->mPlane[MediaImage2::V].mRowInc,
1448 imageInfo->mPlane[MediaImage2::U].mColInc,
1449 imageInfo->mPlane[MediaImage2::V].mColInc);
1450
1451 // Y
1452 for (auto row = top; row < top+height; row++) {
1453 uint8_t *dst = codecBuffer->data() + imageInfo->mPlane[MediaImage2::Y].mOffset +
1454 imageInfo->mPlane[MediaImage2::Y].mRowInc * (row - top);
Shuzhen Wang219c2992019-02-15 17:24:28 -08001455 mFnCopyRow(yuvBuffer.data+row*yuvBuffer.stride+left, dst, width);
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -08001456 }
1457
1458 // U is Cb, V is Cr
1459 bool codecUPlaneFirst = imageInfo->mPlane[MediaImage2::V].mOffset >
1460 imageInfo->mPlane[MediaImage2::U].mOffset;
1461 uint32_t codecUvOffsetDiff = codecUPlaneFirst ?
1462 imageInfo->mPlane[MediaImage2::V].mOffset - imageInfo->mPlane[MediaImage2::U].mOffset :
1463 imageInfo->mPlane[MediaImage2::U].mOffset - imageInfo->mPlane[MediaImage2::V].mOffset;
1464 bool isCodecUvSemiplannar = (codecUvOffsetDiff == 1) &&
1465 (imageInfo->mPlane[MediaImage2::U].mRowInc ==
1466 imageInfo->mPlane[MediaImage2::V].mRowInc) &&
1467 (imageInfo->mPlane[MediaImage2::U].mColInc == 2) &&
1468 (imageInfo->mPlane[MediaImage2::V].mColInc == 2);
1469 bool isCodecUvPlannar =
1470 ((codecUPlaneFirst && codecUvOffsetDiff >=
1471 imageInfo->mPlane[MediaImage2::U].mRowInc * imageInfo->mHeight/2) ||
1472 ((!codecUPlaneFirst && codecUvOffsetDiff >=
1473 imageInfo->mPlane[MediaImage2::V].mRowInc * imageInfo->mHeight/2))) &&
1474 imageInfo->mPlane[MediaImage2::U].mColInc == 1 &&
1475 imageInfo->mPlane[MediaImage2::V].mColInc == 1;
1476 bool cameraUPlaneFirst = yuvBuffer.dataCr > yuvBuffer.dataCb;
1477
1478 if (isCodecUvSemiplannar && yuvBuffer.chromaStep == 2 &&
1479 (codecUPlaneFirst == cameraUPlaneFirst)) {
1480 // UV semiplannar
1481 // The chrome plane could be either Cb first, or Cr first. Take the
1482 // smaller address.
1483 uint8_t *src = std::min(yuvBuffer.dataCb, yuvBuffer.dataCr);
1484 MediaImage2::PlaneIndex dstPlane = codecUvOffsetDiff > 0 ? MediaImage2::U : MediaImage2::V;
1485 for (auto row = top/2; row < (top+height)/2; row++) {
1486 uint8_t *dst = codecBuffer->data() + imageInfo->mPlane[dstPlane].mOffset +
1487 imageInfo->mPlane[dstPlane].mRowInc * (row - top/2);
Shuzhen Wang219c2992019-02-15 17:24:28 -08001488 mFnCopyRow(src+row*yuvBuffer.chromaStride+left, dst, width);
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -08001489 }
1490 } else if (isCodecUvPlannar && yuvBuffer.chromaStep == 1) {
1491 // U plane
1492 for (auto row = top/2; row < (top+height)/2; row++) {
1493 uint8_t *dst = codecBuffer->data() + imageInfo->mPlane[MediaImage2::U].mOffset +
1494 imageInfo->mPlane[MediaImage2::U].mRowInc * (row - top/2);
Shuzhen Wang219c2992019-02-15 17:24:28 -08001495 mFnCopyRow(yuvBuffer.dataCb+row*yuvBuffer.chromaStride+left/2, dst, width/2);
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -08001496 }
1497
1498 // V plane
1499 for (auto row = top/2; row < (top+height)/2; row++) {
1500 uint8_t *dst = codecBuffer->data() + imageInfo->mPlane[MediaImage2::V].mOffset +
1501 imageInfo->mPlane[MediaImage2::V].mRowInc * (row - top/2);
Shuzhen Wang219c2992019-02-15 17:24:28 -08001502 mFnCopyRow(yuvBuffer.dataCr+row*yuvBuffer.chromaStride+left/2, dst, width/2);
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -08001503 }
1504 } else {
Shuzhen Wang219c2992019-02-15 17:24:28 -08001505 // Convert between semiplannar and plannar, or when UV orders are
1506 // different.
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -08001507 uint8_t *dst = codecBuffer->data();
1508 for (auto row = top/2; row < (top+height)/2; row++) {
1509 for (auto col = left/2; col < (left+width)/2; col++) {
1510 // U/Cb
1511 int32_t dstIndex = imageInfo->mPlane[MediaImage2::U].mOffset +
1512 imageInfo->mPlane[MediaImage2::U].mRowInc * (row - top/2) +
1513 imageInfo->mPlane[MediaImage2::U].mColInc * (col - left/2);
1514 int32_t srcIndex = row * yuvBuffer.chromaStride + yuvBuffer.chromaStep * col;
1515 dst[dstIndex] = yuvBuffer.dataCb[srcIndex];
1516
1517 // V/Cr
1518 dstIndex = imageInfo->mPlane[MediaImage2::V].mOffset +
1519 imageInfo->mPlane[MediaImage2::V].mRowInc * (row - top/2) +
1520 imageInfo->mPlane[MediaImage2::V].mColInc * (col - left/2);
1521 srcIndex = row * yuvBuffer.chromaStride + yuvBuffer.chromaStep * col;
1522 dst[dstIndex] = yuvBuffer.dataCr[srcIndex];
1523 }
1524 }
1525 }
1526 return OK;
1527}
1528
Shuzhen Wang219c2992019-02-15 17:24:28 -08001529void HeicCompositeStream::initCopyRowFunction(int32_t width)
1530{
1531 using namespace libyuv;
1532
1533 mFnCopyRow = CopyRow_C;
1534#if defined(HAS_COPYROW_SSE2)
1535 if (TestCpuFlag(kCpuHasSSE2)) {
1536 mFnCopyRow = IS_ALIGNED(width, 32) ? CopyRow_SSE2 : CopyRow_Any_SSE2;
1537 }
1538#endif
1539#if defined(HAS_COPYROW_AVX)
1540 if (TestCpuFlag(kCpuHasAVX)) {
1541 mFnCopyRow = IS_ALIGNED(width, 64) ? CopyRow_AVX : CopyRow_Any_AVX;
1542 }
1543#endif
1544#if defined(HAS_COPYROW_ERMS)
1545 if (TestCpuFlag(kCpuHasERMS)) {
1546 mFnCopyRow = CopyRow_ERMS;
1547 }
1548#endif
1549#if defined(HAS_COPYROW_NEON)
1550 if (TestCpuFlag(kCpuHasNEON)) {
1551 mFnCopyRow = IS_ALIGNED(width, 32) ? CopyRow_NEON : CopyRow_Any_NEON;
1552 }
1553#endif
1554#if defined(HAS_COPYROW_MIPS)
1555 if (TestCpuFlag(kCpuHasMIPS)) {
1556 mFnCopyRow = CopyRow_MIPS;
1557 }
1558#endif
1559}
1560
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -08001561size_t HeicCompositeStream::calcAppSegmentMaxSize(const CameraMetadata& info) {
1562 camera_metadata_ro_entry_t entry = info.find(ANDROID_HEIC_INFO_MAX_JPEG_APP_SEGMENTS_COUNT);
1563 size_t maxAppsSegment = 1;
1564 if (entry.count > 0) {
1565 maxAppsSegment = entry.data.u8[0] < 1 ? 1 :
1566 entry.data.u8[0] > 16 ? 16 : entry.data.u8[0];
1567 }
1568 return maxAppsSegment * (2 + 0xFFFF) + sizeof(struct CameraBlob);
1569}
1570
Shuzhen Wang62f49ed2019-09-04 14:07:53 -07001571void HeicCompositeStream::updateCodecQualityLocked(int32_t quality) {
1572 if (quality != mQuality) {
1573 sp<AMessage> qualityParams = new AMessage;
1574 qualityParams->setInt32(PARAMETER_KEY_VIDEO_BITRATE, quality);
1575 status_t res = mCodec->setParameters(qualityParams);
1576 if (res != OK) {
1577 ALOGE("%s: Failed to set codec quality: %s (%d)",
1578 __FUNCTION__, strerror(-res), res);
1579 } else {
1580 mQuality = quality;
1581 }
1582 }
1583}
1584
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -08001585bool HeicCompositeStream::threadLoop() {
1586 int64_t currentTs = INT64_MAX;
1587 bool newInputAvailable = false;
1588
1589 {
1590 Mutex::Autolock l(mMutex);
1591 if (mErrorState) {
1592 // In case we landed in error state, return any pending buffers and
1593 // halt all further processing.
1594 compilePendingInputLocked();
Michael Gonzalezb5986a32019-10-09 15:38:17 -07001595 releaseInputFramesLocked();
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -08001596 return false;
1597 }
1598
1599
1600 while (!newInputAvailable) {
1601 compilePendingInputLocked();
1602 newInputAvailable = getNextReadyInputLocked(&currentTs);
1603
1604 if (!newInputAvailable) {
1605 auto failingFrameNumber = getNextFailingInputLocked(&currentTs);
1606 if (failingFrameNumber >= 0) {
1607 // We cannot erase 'mPendingInputFrames[currentTs]' at this point because it is
1608 // possible for two internal stream buffers to fail. In such scenario the
1609 // composite stream should notify the client about a stream buffer error only
1610 // once and this information is kept within 'errorNotified'.
1611 // Any present failed input frames will be removed on a subsequent call to
1612 // 'releaseInputFramesLocked()'.
1613 releaseInputFrameLocked(&mPendingInputFrames[currentTs]);
1614 currentTs = INT64_MAX;
1615 }
1616
1617 auto ret = mInputReadyCondition.waitRelative(mMutex, kWaitDuration);
1618 if (ret == TIMED_OUT) {
1619 return true;
1620 } else if (ret != OK) {
1621 ALOGE("%s: Timed wait on condition failed: %s (%d)", __FUNCTION__,
1622 strerror(-ret), ret);
1623 return false;
1624 }
1625 }
1626 }
1627 }
1628
1629 auto res = processInputFrame(currentTs, mPendingInputFrames[currentTs]);
1630 Mutex::Autolock l(mMutex);
1631 if (res != OK) {
1632 ALOGE("%s: Failed processing frame with timestamp: %" PRIu64 ": %s (%d)",
1633 __FUNCTION__, currentTs, strerror(-res), res);
1634 mPendingInputFrames[currentTs].error = true;
1635 }
1636
Michael Gonzalezb5986a32019-10-09 15:38:17 -07001637 releaseInputFramesLocked();
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -08001638
1639 return true;
1640}
1641
1642bool HeicCompositeStream::onStreamBufferError(const CaptureResultExtras& resultExtras) {
1643 bool res = false;
1644 // Buffer errors concerning internal composite streams should not be directly visible to
1645 // camera clients. They must only receive a single buffer error with the public composite
1646 // stream id.
1647 if ((resultExtras.errorStreamId == mAppSegmentStreamId) ||
1648 (resultExtras.errorStreamId == mMainImageStreamId)) {
1649 flagAnErrorFrameNumber(resultExtras.frameNumber);
1650 res = true;
1651 }
1652
1653 return res;
1654}
1655
Shuzhen Wange7f4b462019-02-12 08:43:07 -08001656void HeicCompositeStream::onResultError(const CaptureResultExtras& resultExtras) {
1657 // For result error, since the APPS_SEGMENT buffer already contains EXIF,
1658 // simply skip using the capture result metadata to override EXIF.
1659 Mutex::Autolock l(mMutex);
1660
1661 int64_t timestamp = -1;
1662 for (const auto& fn : mFrameNumberMap) {
1663 if (fn.first == resultExtras.frameNumber) {
1664 timestamp = fn.second;
1665 break;
1666 }
1667 }
1668 if (timestamp == -1) {
1669 for (const auto& inputFrame : mPendingInputFrames) {
1670 if (inputFrame.second.frameNumber == resultExtras.frameNumber) {
1671 timestamp = inputFrame.first;
1672 break;
1673 }
1674 }
1675 }
1676
1677 if (timestamp == -1) {
1678 ALOGE("%s: Failed to find shutter timestamp for result error!", __FUNCTION__);
1679 return;
1680 }
1681
1682 mCaptureResults.emplace(timestamp, std::make_tuple(resultExtras.frameNumber, CameraMetadata()));
1683 mInputReadyCondition.signal();
1684}
1685
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -08001686void HeicCompositeStream::CodecCallbackHandler::onMessageReceived(const sp<AMessage> &msg) {
1687 sp<HeicCompositeStream> parent = mParent.promote();
1688 if (parent == nullptr) return;
1689
1690 switch (msg->what()) {
1691 case kWhatCallbackNotify: {
1692 int32_t cbID;
1693 if (!msg->findInt32("callbackID", &cbID)) {
1694 ALOGE("kWhatCallbackNotify: callbackID is expected.");
1695 break;
1696 }
1697
1698 ALOGV("kWhatCallbackNotify: cbID = %d", cbID);
1699
1700 switch (cbID) {
1701 case MediaCodec::CB_INPUT_AVAILABLE: {
1702 int32_t index;
1703 if (!msg->findInt32("index", &index)) {
1704 ALOGE("CB_INPUT_AVAILABLE: index is expected.");
1705 break;
1706 }
1707 parent->onHeicInputFrameAvailable(index);
1708 break;
1709 }
1710
1711 case MediaCodec::CB_OUTPUT_AVAILABLE: {
1712 int32_t index;
1713 size_t offset;
1714 size_t size;
1715 int64_t timeUs;
1716 int32_t flags;
1717
1718 if (!msg->findInt32("index", &index)) {
1719 ALOGE("CB_OUTPUT_AVAILABLE: index is expected.");
1720 break;
1721 }
1722 if (!msg->findSize("offset", &offset)) {
1723 ALOGE("CB_OUTPUT_AVAILABLE: offset is expected.");
1724 break;
1725 }
1726 if (!msg->findSize("size", &size)) {
1727 ALOGE("CB_OUTPUT_AVAILABLE: size is expected.");
1728 break;
1729 }
1730 if (!msg->findInt64("timeUs", &timeUs)) {
1731 ALOGE("CB_OUTPUT_AVAILABLE: timeUs is expected.");
1732 break;
1733 }
1734 if (!msg->findInt32("flags", &flags)) {
1735 ALOGE("CB_OUTPUT_AVAILABLE: flags is expected.");
1736 break;
1737 }
1738
1739 CodecOutputBufferInfo bufferInfo = {
1740 index,
1741 (int32_t)offset,
1742 (int32_t)size,
1743 timeUs,
1744 (uint32_t)flags};
1745
1746 parent->onHeicOutputFrameAvailable(bufferInfo);
1747 break;
1748 }
1749
1750 case MediaCodec::CB_OUTPUT_FORMAT_CHANGED: {
1751 sp<AMessage> format;
1752 if (!msg->findMessage("format", &format)) {
1753 ALOGE("CB_OUTPUT_FORMAT_CHANGED: format is expected.");
1754 break;
1755 }
Chong Zhang860eff12019-09-16 16:15:00 -07001756 // Here format is MediaCodec's internal copy of output format.
1757 // Make a copy since onHeicFormatChanged() might modify it.
1758 sp<AMessage> formatCopy;
1759 if (format != nullptr) {
1760 formatCopy = format->dup();
1761 }
1762 parent->onHeicFormatChanged(formatCopy);
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -08001763 break;
1764 }
1765
1766 case MediaCodec::CB_ERROR: {
1767 status_t err;
1768 int32_t actionCode;
1769 AString detail;
1770 if (!msg->findInt32("err", &err)) {
1771 ALOGE("CB_ERROR: err is expected.");
1772 break;
1773 }
1774 if (!msg->findInt32("action", &actionCode)) {
1775 ALOGE("CB_ERROR: action is expected.");
1776 break;
1777 }
1778 msg->findString("detail", &detail);
1779 ALOGE("Codec reported error(0x%x), actionCode(%d), detail(%s)",
1780 err, actionCode, detail.c_str());
1781
1782 parent->onHeicCodecError();
1783 break;
1784 }
1785
1786 default: {
1787 ALOGE("kWhatCallbackNotify: callbackID(%d) is unexpected.", cbID);
1788 break;
1789 }
1790 }
1791 break;
1792 }
1793
1794 default:
1795 ALOGE("shouldn't be here");
1796 break;
1797 }
1798}
1799
1800}; // namespace camera3
1801}; // namespace android