blob: 8e9c39ec00282dff5f241ef883958a2ee45fe970 [file] [log] [blame]
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -08001/*
2 * Copyright (C) 2019 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#define LOG_TAG "Camera3-HeicCompositeStream"
18#define ATRACE_TAG ATRACE_TAG_CAMERA
19//#define LOG_NDEBUG 0
20
21#include <linux/memfd.h>
22#include <pthread.h>
23#include <sys/syscall.h>
24
25#include <android/hardware/camera/device/3.5/types.h>
26#include <gui/Surface.h>
27#include <utils/Log.h>
28#include <utils/Trace.h>
29
30#include <media/ICrypto.h>
31#include <media/MediaCodecBuffer.h>
32#include <media/stagefright/foundation/ABuffer.h>
33#include <media/stagefright/foundation/AMessage.h>
34#include <media/stagefright/foundation/MediaDefs.h>
35#include <media/stagefright/MediaCodecConstants.h>
36
37#include "common/CameraDeviceBase.h"
38#include "utils/ExifUtils.h"
39#include "HeicEncoderInfoManager.h"
40#include "HeicCompositeStream.h"
41
42using android::hardware::camera::device::V3_5::CameraBlob;
43using android::hardware::camera::device::V3_5::CameraBlobId;
44
45namespace android {
46namespace camera3 {
47
48HeicCompositeStream::HeicCompositeStream(wp<CameraDeviceBase> device,
49 wp<hardware::camera2::ICameraDeviceCallbacks> cb) :
50 CompositeStream(device, cb),
51 mUseHeic(false),
52 mNumOutputTiles(1),
53 mOutputWidth(0),
54 mOutputHeight(0),
55 mMaxHeicBufferSize(0),
56 mGridWidth(HeicEncoderInfoManager::kGridWidth),
57 mGridHeight(HeicEncoderInfoManager::kGridHeight),
58 mGridRows(1),
59 mGridCols(1),
60 mUseGrid(false),
61 mAppSegmentStreamId(-1),
62 mAppSegmentSurfaceId(-1),
63 mAppSegmentBufferAcquired(false),
64 mMainImageStreamId(-1),
65 mMainImageSurfaceId(-1),
66 mYuvBufferAcquired(false),
67 mProducerListener(new ProducerListener()),
68 mOutputBufferCounter(0),
69 mGridTimestampUs(0) {
70}
71
72HeicCompositeStream::~HeicCompositeStream() {
73 // Call deinitCodec in case stream hasn't been deleted yet to avoid any
74 // memory/resource leak.
75 deinitCodec();
76
77 mInputAppSegmentBuffers.clear();
78 mCodecOutputBuffers.clear();
79
80 mAppSegmentStreamId = -1;
81 mAppSegmentSurfaceId = -1;
82 mAppSegmentConsumer.clear();
83 mAppSegmentSurface.clear();
84
85 mMainImageStreamId = -1;
86 mMainImageSurfaceId = -1;
87 mMainImageConsumer.clear();
88 mMainImageSurface.clear();
89}
90
91bool HeicCompositeStream::isHeicCompositeStream(const sp<Surface> &surface) {
92 ANativeWindow *anw = surface.get();
93 status_t err;
94 int format;
95 if ((err = anw->query(anw, NATIVE_WINDOW_FORMAT, &format)) != OK) {
96 String8 msg = String8::format("Failed to query Surface format: %s (%d)", strerror(-err),
97 err);
98 ALOGE("%s: %s", __FUNCTION__, msg.string());
99 return false;
100 }
101
102 int dataspace;
103 if ((err = anw->query(anw, NATIVE_WINDOW_DEFAULT_DATASPACE, &dataspace)) != OK) {
104 String8 msg = String8::format("Failed to query Surface dataspace: %s (%d)", strerror(-err),
105 err);
106 ALOGE("%s: %s", __FUNCTION__, msg.string());
107 return false;
108 }
109
110 return ((format == HAL_PIXEL_FORMAT_BLOB) && (dataspace == HAL_DATASPACE_HEIF));
111}
112
113status_t HeicCompositeStream::createInternalStreams(const std::vector<sp<Surface>>& consumers,
114 bool /*hasDeferredConsumer*/, uint32_t width, uint32_t height, int format,
115 camera3_stream_rotation_t rotation, int *id, const String8& physicalCameraId,
116 std::vector<int> *surfaceIds, int /*streamSetId*/, bool /*isShared*/) {
117
118 sp<CameraDeviceBase> device = mDevice.promote();
119 if (!device.get()) {
120 ALOGE("%s: Invalid camera device!", __FUNCTION__);
121 return NO_INIT;
122 }
123
124 status_t res = initializeCodec(width, height, device);
125 if (res != OK) {
126 ALOGE("%s: Failed to initialize HEIC/HEVC codec: %s (%d)",
127 __FUNCTION__, strerror(-res), res);
128 return NO_INIT;
129 }
130
131 sp<IGraphicBufferProducer> producer;
132 sp<IGraphicBufferConsumer> consumer;
133 BufferQueue::createBufferQueue(&producer, &consumer);
134 mAppSegmentConsumer = new CpuConsumer(consumer, 1);
135 mAppSegmentConsumer->setFrameAvailableListener(this);
136 mAppSegmentConsumer->setName(String8("Camera3-HeicComposite-AppSegmentStream"));
137 mAppSegmentSurface = new Surface(producer);
138
139 res = device->createStream(mAppSegmentSurface, mAppSegmentMaxSize, 1, format,
140 kAppSegmentDataSpace, rotation, &mAppSegmentStreamId, physicalCameraId, surfaceIds);
141 if (res == OK) {
142 mAppSegmentSurfaceId = (*surfaceIds)[0];
143 } else {
144 ALOGE("%s: Failed to create JPEG App segment stream: %s (%d)", __FUNCTION__,
145 strerror(-res), res);
146 return res;
147 }
148
149 if (!mUseGrid) {
150 res = mCodec->createInputSurface(&producer);
151 if (res != OK) {
152 ALOGE("%s: Failed to create input surface for Heic codec: %s (%d)",
153 __FUNCTION__, strerror(-res), res);
154 return res;
155 }
156 } else {
157 BufferQueue::createBufferQueue(&producer, &consumer);
158 mMainImageConsumer = new CpuConsumer(consumer, 1);
159 mMainImageConsumer->setFrameAvailableListener(this);
160 mMainImageConsumer->setName(String8("Camera3-HeicComposite-HevcInputYUVStream"));
161 }
162 mMainImageSurface = new Surface(producer);
163
164 res = mCodec->start();
165 if (res != OK) {
166 ALOGE("%s: Failed to start codec: %s (%d)", __FUNCTION__,
167 strerror(-res), res);
168 return res;
169 }
170
171 std::vector<int> sourceSurfaceId;
172 //Use YUV_888 format if framework tiling is needed.
173 int srcStreamFmt = mUseGrid ? HAL_PIXEL_FORMAT_YCbCr_420_888 :
174 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
175 res = device->createStream(mMainImageSurface, width, height, srcStreamFmt, kHeifDataSpace,
176 rotation, id, physicalCameraId, &sourceSurfaceId);
177 if (res == OK) {
178 mMainImageSurfaceId = sourceSurfaceId[0];
179 mMainImageStreamId = *id;
180 } else {
181 ALOGE("%s: Failed to create main image stream: %s (%d)", __FUNCTION__,
182 strerror(-res), res);
183 return res;
184 }
185
186 mOutputSurface = consumers[0];
187 res = registerCompositeStreamListener(getStreamId());
188 if (res != OK) {
189 ALOGE("%s: Failed to register HAL main image stream", __FUNCTION__);
190 return res;
191 }
192
193 return res;
194}
195
196status_t HeicCompositeStream::deleteInternalStreams() {
197 requestExit();
198 auto res = join();
199 if (res != OK) {
200 ALOGE("%s: Failed to join with the main processing thread: %s (%d)", __FUNCTION__,
201 strerror(-res), res);
202 }
203
204 deinitCodec();
205
206 if (mAppSegmentStreamId >= 0) {
207 sp<CameraDeviceBase> device = mDevice.promote();
208 if (!device.get()) {
209 ALOGE("%s: Invalid camera device!", __FUNCTION__);
210 return NO_INIT;
211 }
212
213 res = device->deleteStream(mAppSegmentStreamId);
214 mAppSegmentStreamId = -1;
215 }
216
Shuzhen Wang2c545042019-02-07 10:27:35 -0800217 if (mOutputSurface != nullptr) {
218 mOutputSurface->disconnect(NATIVE_WINDOW_API_CAMERA);
219 mOutputSurface.clear();
220 }
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800221 return res;
222}
223
224void HeicCompositeStream::onBufferReleased(const BufferInfo& bufferInfo) {
225 Mutex::Autolock l(mMutex);
226
227 if (bufferInfo.mError) return;
228
229 mCodecOutputBufferTimestamps.push(bufferInfo.mTimestamp);
230}
231
232// We need to get the settings early to handle the case where the codec output
233// arrives earlier than result metadata.
234void HeicCompositeStream::onBufferRequestForFrameNumber(uint64_t frameNumber, int streamId,
235 const CameraMetadata& settings) {
236 ATRACE_ASYNC_BEGIN("HEIC capture", frameNumber);
237
238 Mutex::Autolock l(mMutex);
239 if (mErrorState || (streamId != getStreamId())) {
240 return;
241 }
242
243 mPendingCaptureResults.emplace(frameNumber, CameraMetadata());
244
245 camera_metadata_ro_entry entry;
246
247 int32_t orientation = 0;
248 entry = settings.find(ANDROID_JPEG_ORIENTATION);
249 if (entry.count == 1) {
250 orientation = entry.data.i32[0];
251 }
252
253 int32_t quality = kDefaultJpegQuality;
254 entry = settings.find(ANDROID_JPEG_QUALITY);
255 if (entry.count == 1) {
256 quality = entry.data.i32[0];
257 }
258
259 mSettingsByFrameNumber[frameNumber] = std::make_pair(orientation, quality);
260}
261
262void HeicCompositeStream::onFrameAvailable(const BufferItem& item) {
263 if (item.mDataSpace == static_cast<android_dataspace>(kAppSegmentDataSpace)) {
264 ALOGV("%s: JPEG APP segments buffer with ts: %" PRIu64 " ms. arrived!",
265 __func__, ns2ms(item.mTimestamp));
266
267 Mutex::Autolock l(mMutex);
268 if (!mErrorState) {
269 mInputAppSegmentBuffers.push_back(item.mTimestamp);
270 mInputReadyCondition.signal();
271 }
272 } else if (item.mDataSpace == kHeifDataSpace) {
273 ALOGV("%s: YUV_888 buffer with ts: %" PRIu64 " ms. arrived!",
274 __func__, ns2ms(item.mTimestamp));
275
276 Mutex::Autolock l(mMutex);
277 if (!mUseGrid) {
278 ALOGE("%s: YUV_888 internal stream is only supported for HEVC tiling",
279 __FUNCTION__);
280 return;
281 }
282 if (!mErrorState) {
283 mInputYuvBuffers.push_back(item.mTimestamp);
284 mInputReadyCondition.signal();
285 }
286 } else {
287 ALOGE("%s: Unexpected data space: 0x%x", __FUNCTION__, item.mDataSpace);
288 }
289}
290
291status_t HeicCompositeStream::getCompositeStreamInfo(const OutputStreamInfo &streamInfo,
292 const CameraMetadata& ch, std::vector<OutputStreamInfo>* compositeOutput /*out*/) {
293 if (compositeOutput == nullptr) {
294 return BAD_VALUE;
295 }
296
297 compositeOutput->clear();
298
299 bool useGrid, useHeic;
300 bool isSizeSupported = isSizeSupportedByHeifEncoder(
301 streamInfo.width, streamInfo.height, &useHeic, &useGrid, nullptr);
302 if (!isSizeSupported) {
303 // Size is not supported by either encoder.
304 return OK;
305 }
306
307 compositeOutput->insert(compositeOutput->end(), 2, streamInfo);
308
309 // JPEG APPS segments Blob stream info
310 (*compositeOutput)[0].width = calcAppSegmentMaxSize(ch);
311 (*compositeOutput)[0].height = 1;
312 (*compositeOutput)[0].format = HAL_PIXEL_FORMAT_BLOB;
313 (*compositeOutput)[0].dataSpace = kAppSegmentDataSpace;
314 (*compositeOutput)[0].consumerUsage = GRALLOC_USAGE_SW_READ_OFTEN;
315
316 // YUV/IMPLEMENTATION_DEFINED stream info
317 (*compositeOutput)[1].width = streamInfo.width;
318 (*compositeOutput)[1].height = streamInfo.height;
319 (*compositeOutput)[1].format = useGrid ? HAL_PIXEL_FORMAT_YCbCr_420_888 :
320 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
321 (*compositeOutput)[1].dataSpace = kHeifDataSpace;
322 (*compositeOutput)[1].consumerUsage = useHeic ? GRALLOC_USAGE_HW_IMAGE_ENCODER :
323 useGrid ? GRALLOC_USAGE_SW_READ_OFTEN : GRALLOC_USAGE_HW_VIDEO_ENCODER;
324
325 return NO_ERROR;
326}
327
328bool HeicCompositeStream::isSizeSupportedByHeifEncoder(int32_t width, int32_t height,
329 bool* useHeic, bool* useGrid, int64_t* stall) {
330 static HeicEncoderInfoManager& heicManager = HeicEncoderInfoManager::getInstance();
331 return heicManager.isSizeSupported(width, height, useHeic, useGrid, stall);
332}
333
334bool HeicCompositeStream::isInMemoryTempFileSupported() {
335 int memfd = syscall(__NR_memfd_create, "HEIF-try-memfd", MFD_CLOEXEC);
336 if (memfd == -1) {
337 if (errno != ENOSYS) {
338 ALOGE("%s: Failed to create tmpfs file. errno %d", __FUNCTION__, errno);
339 }
340 return false;
341 }
342 close(memfd);
343 return true;
344}
345
346void HeicCompositeStream::onHeicOutputFrameAvailable(
347 const CodecOutputBufferInfo& outputBufferInfo) {
348 Mutex::Autolock l(mMutex);
349
350 ALOGV("%s: index %d, offset %d, size %d, time %" PRId64 ", flags 0x%x",
351 __FUNCTION__, outputBufferInfo.index, outputBufferInfo.offset,
352 outputBufferInfo.size, outputBufferInfo.timeUs, outputBufferInfo.flags);
353
354 if (!mErrorState) {
355 if ((outputBufferInfo.size > 0) &&
356 ((outputBufferInfo.flags & MediaCodec::BUFFER_FLAG_CODECCONFIG) == 0)) {
357 mCodecOutputBuffers.push_back(outputBufferInfo);
358 mInputReadyCondition.signal();
359 } else {
360 mCodec->releaseOutputBuffer(outputBufferInfo.index);
361 }
362 } else {
363 mCodec->releaseOutputBuffer(outputBufferInfo.index);
364 }
365}
366
367void HeicCompositeStream::onHeicInputFrameAvailable(int32_t index) {
368 Mutex::Autolock l(mMutex);
369
370 if (!mUseGrid) {
371 ALOGE("%s: Codec YUV input mode must only be used for Hevc tiling mode", __FUNCTION__);
372 return;
373 }
374
375 mCodecInputBuffers.push_back(index);
376 mInputReadyCondition.signal();
377}
378
379void HeicCompositeStream::onHeicFormatChanged(sp<AMessage>& newFormat) {
380 if (newFormat == nullptr) {
381 ALOGE("%s: newFormat must not be null!", __FUNCTION__);
382 return;
383 }
384
385 Mutex::Autolock l(mMutex);
386
387 AString mime;
388 AString mimeHeic(MIMETYPE_IMAGE_ANDROID_HEIC);
389 newFormat->findString(KEY_MIME, &mime);
390 if (mime != mimeHeic) {
391 // For HEVC codec, below keys need to be filled out or overwritten so that the
392 // muxer can handle them as HEIC output image.
393 newFormat->setString(KEY_MIME, mimeHeic);
394 newFormat->setInt32(KEY_WIDTH, mOutputWidth);
395 newFormat->setInt32(KEY_HEIGHT, mOutputHeight);
396 if (mUseGrid) {
397 newFormat->setInt32(KEY_TILE_WIDTH, mGridWidth);
398 newFormat->setInt32(KEY_TILE_HEIGHT, mGridHeight);
399 newFormat->setInt32(KEY_GRID_ROWS, mGridRows);
400 newFormat->setInt32(KEY_GRID_COLUMNS, mGridCols);
401 }
402 }
403 newFormat->setInt32(KEY_IS_DEFAULT, 1 /*isPrimary*/);
404
405 int32_t gridRows, gridCols;
406 if (newFormat->findInt32(KEY_GRID_ROWS, &gridRows) &&
407 newFormat->findInt32(KEY_GRID_COLUMNS, &gridCols)) {
408 mNumOutputTiles = gridRows * gridCols;
409 } else {
410 mNumOutputTiles = 1;
411 }
412
413 ALOGV("%s: mNumOutputTiles is %zu", __FUNCTION__, mNumOutputTiles);
414 mFormat = newFormat;
415}
416
417void HeicCompositeStream::onHeicCodecError() {
418 Mutex::Autolock l(mMutex);
419 mErrorState = true;
420}
421
422status_t HeicCompositeStream::configureStream() {
423 if (isRunning()) {
424 // Processing thread is already running, nothing more to do.
425 return NO_ERROR;
426 }
427
428 if (mOutputSurface.get() == nullptr) {
429 ALOGE("%s: No valid output surface set!", __FUNCTION__);
430 return NO_INIT;
431 }
432
433 auto res = mOutputSurface->connect(NATIVE_WINDOW_API_CAMERA, mProducerListener);
434 if (res != OK) {
435 ALOGE("%s: Unable to connect to native window for stream %d",
436 __FUNCTION__, mMainImageStreamId);
437 return res;
438 }
439
440 if ((res = native_window_set_buffers_format(mOutputSurface.get(), HAL_PIXEL_FORMAT_BLOB))
441 != OK) {
442 ALOGE("%s: Unable to configure stream buffer format for stream %d", __FUNCTION__,
443 mMainImageStreamId);
444 return res;
445 }
446
447 ANativeWindow *anwConsumer = mOutputSurface.get();
448 int maxConsumerBuffers;
449 if ((res = anwConsumer->query(anwConsumer, NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS,
450 &maxConsumerBuffers)) != OK) {
451 ALOGE("%s: Unable to query consumer undequeued"
452 " buffer count for stream %d", __FUNCTION__, mMainImageStreamId);
453 return res;
454 }
455
456 // Cannot use SourceSurface buffer count since it could be codec's 512*512 tile
457 // buffer count.
458 int maxProducerBuffers = 1;
459 if ((res = native_window_set_buffer_count(
460 anwConsumer, maxProducerBuffers + maxConsumerBuffers)) != OK) {
461 ALOGE("%s: Unable to set buffer count for stream %d", __FUNCTION__, mMainImageStreamId);
462 return res;
463 }
464
465 if ((res = native_window_set_buffers_dimensions(anwConsumer, mMaxHeicBufferSize, 1)) != OK) {
466 ALOGE("%s: Unable to set buffer dimension %zu x 1 for stream %d: %s (%d)",
467 __FUNCTION__, mMaxHeicBufferSize, mMainImageStreamId, strerror(-res), res);
468 return res;
469 }
470
471 run("HeicCompositeStreamProc");
472
473 return NO_ERROR;
474}
475
476status_t HeicCompositeStream::insertGbp(SurfaceMap* /*out*/outSurfaceMap,
477 Vector<int32_t>* /*out*/outputStreamIds, int32_t* /*out*/currentStreamId) {
478 if (outSurfaceMap->find(mAppSegmentStreamId) == outSurfaceMap->end()) {
479 (*outSurfaceMap)[mAppSegmentStreamId] = std::vector<size_t>();
480 outputStreamIds->push_back(mAppSegmentStreamId);
481 }
482 (*outSurfaceMap)[mAppSegmentStreamId].push_back(mAppSegmentSurfaceId);
483
484 if (outSurfaceMap->find(mMainImageStreamId) == outSurfaceMap->end()) {
485 (*outSurfaceMap)[mMainImageStreamId] = std::vector<size_t>();
486 outputStreamIds->push_back(mMainImageStreamId);
487 }
488 (*outSurfaceMap)[mMainImageStreamId].push_back(mMainImageSurfaceId);
489
490 if (currentStreamId != nullptr) {
491 *currentStreamId = mMainImageStreamId;
492 }
493
494 return NO_ERROR;
495}
496
497void HeicCompositeStream::onShutter(const CaptureResultExtras& resultExtras, nsecs_t timestamp) {
498 Mutex::Autolock l(mMutex);
499 if (mErrorState) {
500 return;
501 }
502
503 if (mSettingsByFrameNumber.find(resultExtras.frameNumber) != mSettingsByFrameNumber.end()) {
504 mFrameNumberMap.emplace(resultExtras.frameNumber, timestamp);
505 mSettingsByTimestamp[timestamp] = mSettingsByFrameNumber[resultExtras.frameNumber];
506 mSettingsByFrameNumber.erase(resultExtras.frameNumber);
507 mInputReadyCondition.signal();
508 }
509}
510
511void HeicCompositeStream::compilePendingInputLocked() {
512 while (!mSettingsByTimestamp.empty()) {
513 auto it = mSettingsByTimestamp.begin();
514 mPendingInputFrames[it->first].orientation = it->second.first;
515 mPendingInputFrames[it->first].quality = it->second.second;
516 mSettingsByTimestamp.erase(it);
517 }
518
519 while (!mInputAppSegmentBuffers.empty() && !mAppSegmentBufferAcquired) {
520 CpuConsumer::LockedBuffer imgBuffer;
521 auto it = mInputAppSegmentBuffers.begin();
522 auto res = mAppSegmentConsumer->lockNextBuffer(&imgBuffer);
523 if (res == NOT_ENOUGH_DATA) {
524 // Canot not lock any more buffers.
525 break;
526 } else if ((res != OK) || (*it != imgBuffer.timestamp)) {
527 if (res != OK) {
528 ALOGE("%s: Error locking JPEG_APP_SEGMENTS image buffer: %s (%d)", __FUNCTION__,
529 strerror(-res), res);
530 } else {
531 ALOGE("%s: Expecting JPEG_APP_SEGMENTS buffer with time stamp: %" PRId64
532 " received buffer with time stamp: %" PRId64, __FUNCTION__,
533 *it, imgBuffer.timestamp);
534 }
535 mPendingInputFrames[*it].error = true;
536 mInputAppSegmentBuffers.erase(it);
537 continue;
538 }
539
540 if ((mPendingInputFrames.find(imgBuffer.timestamp) != mPendingInputFrames.end()) &&
541 (mPendingInputFrames[imgBuffer.timestamp].error)) {
542 mAppSegmentConsumer->unlockBuffer(imgBuffer);
543 } else {
544 mPendingInputFrames[imgBuffer.timestamp].appSegmentBuffer = imgBuffer;
545 mAppSegmentBufferAcquired = true;
546 }
547 mInputAppSegmentBuffers.erase(it);
548 }
549
550 while (!mInputYuvBuffers.empty() && !mYuvBufferAcquired) {
551 CpuConsumer::LockedBuffer imgBuffer;
552 auto it = mInputYuvBuffers.begin();
553 auto res = mMainImageConsumer->lockNextBuffer(&imgBuffer);
554 if (res == NOT_ENOUGH_DATA) {
555 // Canot not lock any more buffers.
556 break;
557 } else if (res != OK) {
558 ALOGE("%s: Error locking YUV_888 image buffer: %s (%d)", __FUNCTION__,
559 strerror(-res), res);
560 mPendingInputFrames[*it].error = true;
561 mInputYuvBuffers.erase(it);
562 continue;
563 } else if (*it != imgBuffer.timestamp) {
564 ALOGW("%s: Expecting YUV_888 buffer with time stamp: %" PRId64 " received buffer with "
565 "time stamp: %" PRId64, __FUNCTION__, *it, imgBuffer.timestamp);
566 mPendingInputFrames[*it].error = true;
567 mInputYuvBuffers.erase(it);
568 continue;
569 }
570
571 if ((mPendingInputFrames.find(imgBuffer.timestamp) != mPendingInputFrames.end()) &&
572 (mPendingInputFrames[imgBuffer.timestamp].error)) {
573 mMainImageConsumer->unlockBuffer(imgBuffer);
574 } else {
575 mPendingInputFrames[imgBuffer.timestamp].yuvBuffer = imgBuffer;
576 mYuvBufferAcquired = true;
577 }
578 mInputYuvBuffers.erase(it);
579 }
580
581 while (!mCodecOutputBuffers.empty()) {
582 auto it = mCodecOutputBuffers.begin();
583 // Bitstream buffer timestamp doesn't necessarily directly correlate with input
584 // buffer timestamp. Assume encoder input to output is FIFO, use a queue
585 // to look up timestamp.
586 int64_t bufferTime = -1;
587 if (mCodecOutputBufferTimestamps.empty()) {
588 ALOGE("%s: Failed to find buffer timestamp for codec output buffer!", __FUNCTION__);
589 } else {
590 // Direct mapping between camera timestamp (in ns) and codec timestamp (in us).
591 bufferTime = mCodecOutputBufferTimestamps.front();
592 mOutputBufferCounter++;
593 if (mOutputBufferCounter == mNumOutputTiles) {
594 mCodecOutputBufferTimestamps.pop();
595 mOutputBufferCounter = 0;
596 }
597
598 mPendingInputFrames[bufferTime].codecOutputBuffers.push_back(*it);
599 }
600 mCodecOutputBuffers.erase(it);
601 }
602
603 while (!mFrameNumberMap.empty()) {
604 auto it = mFrameNumberMap.begin();
605 mPendingInputFrames[it->second].frameNumber = it->first;
606 mFrameNumberMap.erase(it);
607 }
608
609 // Heic composition doesn't depend on capture result, so no need to check
610 // mErrorFrameNumbers. Just remove them.
611 mErrorFrameNumbers.clear();
612
613 // Distribute codec input buffers to be filled out from YUV output
614 for (auto it = mPendingInputFrames.begin();
615 it != mPendingInputFrames.end() && mCodecInputBuffers.size() > 0; it++) {
616 InputFrame& inputFrame(it->second);
617 if (inputFrame.codecInputCounter < mGridRows * mGridCols) {
618 // Available input tiles that are required for the current input
619 // image.
620 size_t newInputTiles = std::min(mCodecInputBuffers.size(),
621 mGridRows * mGridCols - inputFrame.codecInputCounter);
622 for (size_t i = 0; i < newInputTiles; i++) {
623 CodecInputBufferInfo inputInfo =
624 { mCodecInputBuffers[0], mGridTimestampUs++, inputFrame.codecInputCounter };
625 inputFrame.codecInputBuffers.push_back(inputInfo);
626
627 mCodecInputBuffers.erase(mCodecInputBuffers.begin());
628 inputFrame.codecInputCounter++;
629 }
630 break;
631 }
632 }
633}
634
635bool HeicCompositeStream::getNextReadyInputLocked(int64_t *currentTs /*out*/) {
636 if (currentTs == nullptr) {
637 return false;
638 }
639
640 bool newInputAvailable = false;
641 for (const auto& it : mPendingInputFrames) {
642 bool appSegmentBufferReady = (it.second.appSegmentBuffer.data != nullptr) &&
643 !it.second.appSegmentWritten;
644 bool codecOutputReady = !it.second.codecOutputBuffers.empty();
645 bool codecInputReady = (it.second.yuvBuffer.data != nullptr) &&
646 (!it.second.codecInputBuffers.empty());
647 if ((!it.second.error) &&
648 (it.first < *currentTs) &&
649 (appSegmentBufferReady || codecOutputReady || codecInputReady)) {
650 *currentTs = it.first;
651 newInputAvailable = true;
652 break;
653 }
654 }
655
656 return newInputAvailable;
657}
658
659int64_t HeicCompositeStream::getNextFailingInputLocked(int64_t *currentTs /*out*/) {
660 int64_t res = -1;
661 if (currentTs == nullptr) {
662 return res;
663 }
664
665 for (const auto& it : mPendingInputFrames) {
666 if (it.second.error && !it.second.errorNotified && (it.first < *currentTs)) {
667 *currentTs = it.first;
668 res = it.second.frameNumber;
669 break;
670 }
671 }
672
673 return res;
674}
675
676status_t HeicCompositeStream::processInputFrame(nsecs_t timestamp,
677 InputFrame &inputFrame) {
678 ATRACE_CALL();
679 status_t res = OK;
680
681 bool appSegmentBufferReady = inputFrame.appSegmentBuffer.data != nullptr &&
682 !inputFrame.appSegmentWritten;
683 bool codecOutputReady = inputFrame.codecOutputBuffers.size() > 0;
684 bool codecInputReady = inputFrame.yuvBuffer.data != nullptr &&
685 !inputFrame.codecInputBuffers.empty();
686
687 if (!appSegmentBufferReady && !codecOutputReady && !codecInputReady) {
688 ALOGW("%s: No valid appSegmentBuffer/codec input/outputBuffer available!", __FUNCTION__);
689 return OK;
690 }
691
692 // Handle inputs for Hevc tiling
693 if (codecInputReady) {
694 res = processCodecInputFrame(inputFrame);
695 if (res != OK) {
696 ALOGE("%s: Failed to process codec input frame: %s (%d)", __FUNCTION__,
697 strerror(-res), res);
698 return res;
699 }
700 }
701
702 // Initialize and start muxer if not yet done so
703 if (inputFrame.muxer == nullptr) {
704 res = startMuxerForInputFrame(timestamp, inputFrame);
705 if (res != OK) {
706 ALOGE("%s: Failed to create and start muxer: %s (%d)", __FUNCTION__,
707 strerror(-res), res);
708 return res;
709 }
710 }
711
712 // Write JPEG APP segments data to the muxer.
713 if (appSegmentBufferReady && inputFrame.muxer != nullptr) {
714 res = processAppSegment(timestamp, inputFrame);
715 if (res != OK) {
716 ALOGE("%s: Failed to process JPEG APP segments: %s (%d)", __FUNCTION__,
717 strerror(-res), res);
718 return res;
719 }
720 }
721
722 // Write media codec bitstream buffers to muxer.
723 while (!inputFrame.codecOutputBuffers.empty()) {
724 res = processOneCodecOutputFrame(timestamp, inputFrame);
725 if (res != OK) {
726 ALOGE("%s: Failed to process codec output frame: %s (%d)", __FUNCTION__,
727 strerror(-res), res);
728 return res;
729 }
730 }
731
732 if (inputFrame.appSegmentWritten && inputFrame.pendingOutputTiles == 0) {
733 res = processCompletedInputFrame(timestamp, inputFrame);
734 if (res != OK) {
735 ALOGE("%s: Failed to process completed input frame: %s (%d)", __FUNCTION__,
736 strerror(-res), res);
737 return res;
738 }
739 }
740
741 return res;
742}
743
744status_t HeicCompositeStream::startMuxerForInputFrame(nsecs_t timestamp, InputFrame &inputFrame) {
745 sp<ANativeWindow> outputANW = mOutputSurface;
746 if (inputFrame.codecOutputBuffers.size() == 0) {
747 // No single codec output buffer has been generated. Continue to
748 // wait.
749 return OK;
750 }
751
752 auto res = outputANW->dequeueBuffer(mOutputSurface.get(), &inputFrame.anb, &inputFrame.fenceFd);
753 if (res != OK) {
754 ALOGE("%s: Error retrieving output buffer: %s (%d)", __FUNCTION__, strerror(-res),
755 res);
756 return res;
757 }
758
759 // Combine current thread id, stream id and timestamp to uniquely identify image.
760 std::ostringstream tempOutputFile;
761 tempOutputFile << "HEIF-" << pthread_self() << "-"
762 << getStreamId() << "-" << timestamp;
763 inputFrame.fileFd = syscall(__NR_memfd_create, tempOutputFile.str().c_str(), MFD_CLOEXEC);
764 if (inputFrame.fileFd < 0) {
765 ALOGE("%s: Failed to create file %s. Error no is %d", __FUNCTION__,
766 tempOutputFile.str().c_str(), errno);
767 return NO_INIT;
768 }
769 inputFrame.muxer = new MediaMuxer(inputFrame.fileFd, MediaMuxer::OUTPUT_FORMAT_HEIF);
770 if (inputFrame.muxer == nullptr) {
771 ALOGE("%s: Failed to create MediaMuxer for file fd %d",
772 __FUNCTION__, inputFrame.fileFd);
773 return NO_INIT;
774 }
775
776 res = inputFrame.muxer->setOrientationHint(inputFrame.orientation);
777 if (res != OK) {
778 ALOGE("%s: Failed to setOrientationHint: %s (%d)", __FUNCTION__,
779 strerror(-res), res);
780 return res;
781 }
782 // Set encoder quality
783 {
784 sp<AMessage> qualityParams = new AMessage;
785 qualityParams->setInt32(PARAMETER_KEY_VIDEO_BITRATE, inputFrame.quality);
786 res = mCodec->setParameters(qualityParams);
787 if (res != OK) {
788 ALOGE("%s: Failed to set codec quality: %s (%d)",
789 __FUNCTION__, strerror(-res), res);
790 return res;
791 }
792 }
793
794 ssize_t trackId = inputFrame.muxer->addTrack(mFormat);
795 if (trackId < 0) {
796 ALOGE("%s: Failed to addTrack to the muxer: %zd", __FUNCTION__, trackId);
797 return NO_INIT;
798 }
799
800 inputFrame.trackIndex = trackId;
801 inputFrame.pendingOutputTiles = mNumOutputTiles;
802
803 res = inputFrame.muxer->start();
804 if (res != OK) {
805 ALOGE("%s: Failed to start MediaMuxer: %s (%d)",
806 __FUNCTION__, strerror(-res), res);
807 return res;
808 }
809
810 return OK;
811}
812
813status_t HeicCompositeStream::processAppSegment(nsecs_t timestamp, InputFrame &inputFrame) {
814 size_t app1Size = 0;
815 auto appSegmentSize = findAppSegmentsSize(inputFrame.appSegmentBuffer.data,
816 inputFrame.appSegmentBuffer.width * inputFrame.appSegmentBuffer.height,
817 &app1Size);
818 ALOGV("%s: appSegmentSize is %zu, width %d, height %d, app1Size %zu", __FUNCTION__,
819 appSegmentSize, inputFrame.appSegmentBuffer.width,
820 inputFrame.appSegmentBuffer.height, app1Size);
821 if (appSegmentSize == 0) {
822 ALOGE("%s: Failed to find JPEG APP segment size", __FUNCTION__);
823 return NO_INIT;
824 }
825
826 std::unique_ptr<ExifUtils> exifUtils(ExifUtils::create());
827 auto exifRes = exifUtils->initialize(inputFrame.appSegmentBuffer.data, app1Size);
828 if (!exifRes) {
829 ALOGE("%s: Failed to initialize ExifUtils object!", __FUNCTION__);
830 return BAD_VALUE;
831 }
832 //TODO: Use capture result metadata and static metadata to fill out the
833 //rest.
834 CameraMetadata dummyMeta;
835 exifRes = exifUtils->setFromMetadata(dummyMeta, mOutputWidth, mOutputHeight);
836 if (!exifRes) {
837 ALOGE("%s: Failed to set Exif tags using metadata and main image sizes", __FUNCTION__);
838 return BAD_VALUE;
839 }
840 exifRes = exifUtils->setOrientation(inputFrame.orientation);
841 if (!exifRes) {
842 ALOGE("%s: ExifUtils failed to set orientation", __FUNCTION__);
843 return BAD_VALUE;
844 }
845 exifRes = exifUtils->generateApp1();
846 if (!exifRes) {
847 ALOGE("%s: ExifUtils failed to generate APP1 segment", __FUNCTION__);
848 return BAD_VALUE;
849 }
850
851 unsigned int newApp1Length = exifUtils->getApp1Length();
852 const uint8_t *newApp1Segment = exifUtils->getApp1Buffer();
853
854 //Assemble the APP1 marker buffer required by MediaCodec
855 uint8_t kExifApp1Marker[] = {'E', 'x', 'i', 'f', 0xFF, 0xE1, 0x00, 0x00};
856 kExifApp1Marker[6] = static_cast<uint8_t>(newApp1Length >> 8);
857 kExifApp1Marker[7] = static_cast<uint8_t>(newApp1Length & 0xFF);
858 size_t appSegmentBufferSize = sizeof(kExifApp1Marker) +
859 appSegmentSize - app1Size + newApp1Length;
860 uint8_t* appSegmentBuffer = new uint8_t[appSegmentBufferSize];
861 memcpy(appSegmentBuffer, kExifApp1Marker, sizeof(kExifApp1Marker));
862 memcpy(appSegmentBuffer + sizeof(kExifApp1Marker), newApp1Segment, newApp1Length);
863 if (appSegmentSize - app1Size > 0) {
864 memcpy(appSegmentBuffer + sizeof(kExifApp1Marker) + newApp1Length,
865 inputFrame.appSegmentBuffer.data + app1Size, appSegmentSize - app1Size);
866 }
867
868 sp<ABuffer> aBuffer = new ABuffer(appSegmentBuffer, appSegmentBufferSize);
869 auto res = inputFrame.muxer->writeSampleData(aBuffer, inputFrame.trackIndex,
870 timestamp, MediaCodec::BUFFER_FLAG_MUXER_DATA);
871 delete[] appSegmentBuffer;
872
873 if (res != OK) {
874 ALOGE("%s: Failed to write JPEG APP segments to muxer: %s (%d)",
875 __FUNCTION__, strerror(-res), res);
876 return res;
877 }
878 inputFrame.appSegmentWritten = true;
879
880 return OK;
881}
882
883status_t HeicCompositeStream::processCodecInputFrame(InputFrame &inputFrame) {
884 for (auto& inputBuffer : inputFrame.codecInputBuffers) {
885 sp<MediaCodecBuffer> buffer;
886 auto res = mCodec->getInputBuffer(inputBuffer.index, &buffer);
887 if (res != OK) {
888 ALOGE("%s: Error getting codec input buffer: %s (%d)", __FUNCTION__,
889 strerror(-res), res);
890 return res;
891 }
892
893 // Copy one tile from source to destination.
894 size_t tileX = inputBuffer.tileIndex % mGridCols;
895 size_t tileY = inputBuffer.tileIndex / mGridCols;
896 size_t top = mGridHeight * tileY;
897 size_t left = mGridWidth * tileX;
898 size_t width = (tileX == static_cast<size_t>(mGridCols) - 1) ?
899 mOutputWidth - tileX * mGridWidth : mGridWidth;
900 size_t height = (tileY == static_cast<size_t>(mGridRows) - 1) ?
901 mOutputHeight - tileY * mGridHeight : mGridHeight;
902 ALOGV("%s: inputBuffer tileIndex [%zu, %zu], top %zu, left %zu, width %zu, height %zu",
903 __FUNCTION__, tileX, tileY, top, left, width, height);
904
905 res = copyOneYuvTile(buffer, inputFrame.yuvBuffer, top, left, width, height);
906 if (res != OK) {
907 ALOGE("%s: Failed to copy YUV tile %s (%d)", __FUNCTION__,
908 strerror(-res), res);
909 return res;
910 }
911
912 res = mCodec->queueInputBuffer(inputBuffer.index, 0, buffer->capacity(),
913 inputBuffer.timeUs, 0, nullptr /*errorDetailMsg*/);
914 if (res != OK) {
915 ALOGE("%s: Failed to queueInputBuffer to Codec: %s (%d)",
916 __FUNCTION__, strerror(-res), res);
917 return res;
918 }
919 }
920
921 inputFrame.codecInputBuffers.clear();
922 return OK;
923}
924
925status_t HeicCompositeStream::processOneCodecOutputFrame(nsecs_t timestamp,
926 InputFrame &inputFrame) {
927 auto it = inputFrame.codecOutputBuffers.begin();
928 sp<MediaCodecBuffer> buffer;
929 status_t res = mCodec->getOutputBuffer(it->index, &buffer);
930 if (res != OK) {
931 ALOGE("%s: Error getting Heic codec output buffer at index %d: %s (%d)",
932 __FUNCTION__, it->index, strerror(-res), res);
933 return res;
934 }
935 if (buffer == nullptr) {
936 ALOGE("%s: Invalid Heic codec output buffer at index %d",
937 __FUNCTION__, it->index);
938 return BAD_VALUE;
939 }
940
941 sp<ABuffer> aBuffer = new ABuffer(buffer->data(), buffer->size());
942 res = inputFrame.muxer->writeSampleData(
943 aBuffer, inputFrame.trackIndex, timestamp, 0 /*flags*/);
944 if (res != OK) {
945 ALOGE("%s: Failed to write buffer index %d to muxer: %s (%d)",
946 __FUNCTION__, it->index, strerror(-res), res);
947 return res;
948 }
949
950 mCodec->releaseOutputBuffer(it->index);
951 if (inputFrame.pendingOutputTiles == 0) {
952 ALOGW("%s: Codec generated more tiles than expected!", __FUNCTION__);
953 } else {
954 inputFrame.pendingOutputTiles--;
955 }
956
957 inputFrame.codecOutputBuffers.erase(inputFrame.codecOutputBuffers.begin());
958 return OK;
959}
960
961status_t HeicCompositeStream::processCompletedInputFrame(nsecs_t timestamp,
962 InputFrame &inputFrame) {
963 sp<ANativeWindow> outputANW = mOutputSurface;
964 inputFrame.muxer->stop();
965
966 // Copy the content of the file to memory.
967 sp<GraphicBuffer> gb = GraphicBuffer::from(inputFrame.anb);
968 void* dstBuffer;
969 auto res = gb->lockAsync(GRALLOC_USAGE_SW_WRITE_OFTEN, &dstBuffer, inputFrame.fenceFd);
970 if (res != OK) {
971 ALOGE("%s: Error trying to lock output buffer fence: %s (%d)", __FUNCTION__,
972 strerror(-res), res);
973 return res;
974 }
975
976 off_t fSize = lseek(inputFrame.fileFd, 0, SEEK_END);
977 if (static_cast<size_t>(fSize) > mMaxHeicBufferSize - sizeof(CameraBlob)) {
978 ALOGE("%s: Error: MediaMuxer output size %ld is larger than buffer sizer %zu",
979 __FUNCTION__, fSize, mMaxHeicBufferSize - sizeof(CameraBlob));
980 return BAD_VALUE;
981 }
982
983 lseek(inputFrame.fileFd, 0, SEEK_SET);
984 ssize_t bytesRead = read(inputFrame.fileFd, dstBuffer, fSize);
985 if (bytesRead < fSize) {
986 ALOGE("%s: Only %zd of %ld bytes read", __FUNCTION__, bytesRead, fSize);
987 return BAD_VALUE;
988 }
989
990 close(inputFrame.fileFd);
991 inputFrame.fileFd = -1;
992
993 // Fill in HEIC header
994 uint8_t *header = static_cast<uint8_t*>(dstBuffer) + mMaxHeicBufferSize - sizeof(CameraBlob);
995 struct CameraBlob *blobHeader = (struct CameraBlob *)header;
996 // Must be in sync with CAMERA3_HEIC_BLOB_ID in android_media_Utils.cpp
997 blobHeader->blobId = static_cast<CameraBlobId>(0x00FE);
998 blobHeader->blobSize = fSize;
999
1000 res = native_window_set_buffers_timestamp(mOutputSurface.get(), timestamp);
1001 if (res != OK) {
1002 ALOGE("%s: Stream %d: Error setting timestamp: %s (%d)",
1003 __FUNCTION__, getStreamId(), strerror(-res), res);
1004 return res;
1005 }
1006
1007 res = outputANW->queueBuffer(mOutputSurface.get(), inputFrame.anb, /*fence*/ -1);
1008 if (res != OK) {
1009 ALOGE("%s: Failed to queueBuffer to Heic stream: %s (%d)", __FUNCTION__,
1010 strerror(-res), res);
1011 return res;
1012 }
1013 inputFrame.anb = nullptr;
1014
1015 return OK;
1016}
1017
1018
1019void HeicCompositeStream::releaseInputFrameLocked(InputFrame *inputFrame /*out*/) {
1020 if (inputFrame == nullptr) {
1021 return;
1022 }
1023
1024 if (inputFrame->appSegmentBuffer.data != nullptr) {
1025 mAppSegmentConsumer->unlockBuffer(inputFrame->appSegmentBuffer);
1026 inputFrame->appSegmentBuffer.data = nullptr;
1027 mAppSegmentBufferAcquired = false;
1028 }
1029
1030 while (!inputFrame->codecOutputBuffers.empty()) {
1031 auto it = inputFrame->codecOutputBuffers.begin();
1032 ALOGV("%s: releaseOutputBuffer index %d", __FUNCTION__, it->index);
1033 mCodec->releaseOutputBuffer(it->index);
1034 inputFrame->codecOutputBuffers.erase(it);
1035 }
1036
1037 if (inputFrame->yuvBuffer.data != nullptr) {
1038 mMainImageConsumer->unlockBuffer(inputFrame->yuvBuffer);
1039 inputFrame->yuvBuffer.data = nullptr;
1040 mYuvBufferAcquired = false;
1041 }
1042
1043 while (!inputFrame->codecInputBuffers.empty()) {
1044 auto it = inputFrame->codecInputBuffers.begin();
1045 inputFrame->codecInputBuffers.erase(it);
1046 }
1047
1048 if ((inputFrame->error || mErrorState) && !inputFrame->errorNotified) {
1049 notifyError(inputFrame->frameNumber);
1050 inputFrame->errorNotified = true;
1051 }
1052
1053 if (inputFrame->fileFd >= 0) {
1054 close(inputFrame->fileFd);
1055 inputFrame->fileFd = -1;
1056 }
1057
1058 if (inputFrame->anb != nullptr) {
1059 sp<ANativeWindow> outputANW = mOutputSurface;
1060 outputANW->cancelBuffer(mOutputSurface.get(), inputFrame->anb, /*fence*/ -1);
1061 inputFrame->anb = nullptr;
1062 }
1063}
1064
1065void HeicCompositeStream::releaseInputFramesLocked(int64_t currentTs) {
1066 auto it = mPendingInputFrames.begin();
1067 while (it != mPendingInputFrames.end()) {
1068 if (it->first <= currentTs) {
1069 releaseInputFrameLocked(&it->second);
1070 it = mPendingInputFrames.erase(it);
1071 } else {
1072 it++;
1073 }
1074 }
1075}
1076
1077status_t HeicCompositeStream::initializeCodec(uint32_t width, uint32_t height,
1078 const sp<CameraDeviceBase>& cameraDevice) {
1079 ALOGV("%s", __FUNCTION__);
1080
1081 bool useGrid = false;
1082 bool isSizeSupported = isSizeSupportedByHeifEncoder(width, height,
1083 &mUseHeic, &useGrid, nullptr);
1084 if (!isSizeSupported) {
1085 ALOGE("%s: Encoder doesnt' support size %u x %u!",
1086 __FUNCTION__, width, height);
1087 return BAD_VALUE;
1088 }
1089
1090 // Create Looper for MediaCodec.
1091 auto desiredMime = mUseHeic ? MIMETYPE_IMAGE_ANDROID_HEIC : MIMETYPE_VIDEO_HEVC;
1092 mCodecLooper = new ALooper;
1093 mCodecLooper->setName("Camera3-HeicComposite-MediaCodecLooper");
1094 status_t res = mCodecLooper->start(
1095 false, // runOnCallingThread
1096 false, // canCallJava
1097 PRIORITY_AUDIO);
1098 if (res != OK) {
1099 ALOGE("%s: Failed to start codec looper: %s (%d)",
1100 __FUNCTION__, strerror(-res), res);
1101 return NO_INIT;
1102 }
1103
1104 // Create HEIC/HEVC codec.
1105 mCodec = MediaCodec::CreateByType(mCodecLooper, desiredMime, true /*encoder*/);
1106 if (mCodec == nullptr) {
1107 ALOGE("%s: Failed to create codec for %s", __FUNCTION__, desiredMime);
1108 return NO_INIT;
1109 }
1110
1111 // Create Looper and handler for Codec callback.
1112 mCodecCallbackHandler = new CodecCallbackHandler(this);
1113 if (mCodecCallbackHandler == nullptr) {
1114 ALOGE("%s: Failed to create codec callback handler", __FUNCTION__);
1115 return NO_MEMORY;
1116 }
1117 mCallbackLooper = new ALooper;
1118 mCallbackLooper->setName("Camera3-HeicComposite-MediaCodecCallbackLooper");
1119 res = mCallbackLooper->start(
1120 false, // runOnCallingThread
1121 false, // canCallJava
1122 PRIORITY_AUDIO);
1123 if (res != OK) {
1124 ALOGE("%s: Failed to start media callback looper: %s (%d)",
1125 __FUNCTION__, strerror(-res), res);
1126 return NO_INIT;
1127 }
1128 mCallbackLooper->registerHandler(mCodecCallbackHandler);
1129
1130 mAsyncNotify = new AMessage(kWhatCallbackNotify, mCodecCallbackHandler);
1131 res = mCodec->setCallback(mAsyncNotify);
1132 if (res != OK) {
1133 ALOGE("%s: Failed to set MediaCodec callback: %s (%d)", __FUNCTION__,
1134 strerror(-res), res);
1135 return res;
1136 }
1137
1138 // Create output format and configure the Codec.
1139 sp<AMessage> outputFormat = new AMessage();
1140 outputFormat->setString(KEY_MIME, desiredMime);
1141 outputFormat->setInt32(KEY_BITRATE_MODE, BITRATE_MODE_CQ);
1142 outputFormat->setInt32(KEY_QUALITY, kDefaultJpegQuality);
1143 // Ask codec to skip timestamp check and encode all frames.
1144 outputFormat->setInt64("max-pts-gap-to-encoder", kNoFrameDropMaxPtsGap);
1145
1146 int32_t gridWidth, gridHeight, gridRows, gridCols;
1147 if (useGrid || mUseHeic) {
1148 gridWidth = HeicEncoderInfoManager::kGridWidth;
1149 gridHeight = HeicEncoderInfoManager::kGridHeight;
1150 gridRows = (height + gridHeight - 1)/gridHeight;
1151 gridCols = (width + gridWidth - 1)/gridWidth;
1152
1153 if (mUseHeic) {
1154 outputFormat->setInt32(KEY_TILE_WIDTH, gridWidth);
1155 outputFormat->setInt32(KEY_TILE_HEIGHT, gridHeight);
1156 outputFormat->setInt32(KEY_GRID_COLUMNS, gridCols);
1157 outputFormat->setInt32(KEY_GRID_ROWS, gridRows);
1158 }
1159
1160 } else {
1161 gridWidth = width;
1162 gridHeight = height;
1163 gridRows = 1;
1164 gridCols = 1;
1165 }
1166
1167 outputFormat->setInt32(KEY_WIDTH, !useGrid ? width : gridWidth);
1168 outputFormat->setInt32(KEY_HEIGHT, !useGrid ? height : gridHeight);
1169 outputFormat->setInt32(KEY_I_FRAME_INTERVAL, 0);
1170 outputFormat->setInt32(KEY_COLOR_FORMAT,
1171 useGrid ? COLOR_FormatYUV420Flexible : COLOR_FormatSurface);
1172 outputFormat->setInt32(KEY_FRAME_RATE, gridRows * gridCols);
1173 // This only serves as a hint to encoder when encoding is not real-time.
1174 outputFormat->setInt32(KEY_OPERATING_RATE, useGrid ? kGridOpRate : kNoGridOpRate);
1175
1176 res = mCodec->configure(outputFormat, nullptr /*nativeWindow*/,
1177 nullptr /*crypto*/, CONFIGURE_FLAG_ENCODE);
1178 if (res != OK) {
1179 ALOGE("%s: Failed to configure codec: %s (%d)", __FUNCTION__,
1180 strerror(-res), res);
1181 return res;
1182 }
1183
1184 mGridWidth = gridWidth;
1185 mGridHeight = gridHeight;
1186 mGridRows = gridRows;
1187 mGridCols = gridCols;
1188 mUseGrid = useGrid;
1189 mOutputWidth = width;
1190 mOutputHeight = height;
1191 mAppSegmentMaxSize = calcAppSegmentMaxSize(cameraDevice->info());
1192 mMaxHeicBufferSize = mOutputWidth * mOutputHeight * 3 / 2 + mAppSegmentMaxSize;
1193
1194 return OK;
1195}
1196
1197void HeicCompositeStream::deinitCodec() {
1198 ALOGV("%s", __FUNCTION__);
1199 if (mCodec != nullptr) {
1200 mCodec->stop();
1201 mCodec->release();
1202 mCodec.clear();
1203 }
1204
1205 if (mCodecLooper != nullptr) {
1206 mCodecLooper->stop();
1207 mCodecLooper.clear();
1208 }
1209
1210 if (mCallbackLooper != nullptr) {
1211 mCallbackLooper->stop();
1212 mCallbackLooper.clear();
1213 }
1214
1215 mAsyncNotify.clear();
1216 mFormat.clear();
1217}
1218
1219// Return the size of the complete list of app segment, 0 indicates failure
1220size_t HeicCompositeStream::findAppSegmentsSize(const uint8_t* appSegmentBuffer,
1221 size_t maxSize, size_t *app1SegmentSize) {
1222 if (appSegmentBuffer == nullptr || app1SegmentSize == nullptr) {
1223 ALOGE("%s: Invalid input appSegmentBuffer %p, app1SegmentSize %p",
1224 __FUNCTION__, appSegmentBuffer, app1SegmentSize);
1225 return 0;
1226 }
1227
1228 size_t expectedSize = 0;
1229 // First check for EXIF transport header at the end of the buffer
1230 const uint8_t *header = appSegmentBuffer + (maxSize - sizeof(struct CameraBlob));
1231 const struct CameraBlob *blob = (const struct CameraBlob*)(header);
1232 if (blob->blobId != CameraBlobId::JPEG_APP_SEGMENTS) {
1233 ALOGE("%s: Invalid EXIF blobId %hu", __FUNCTION__, blob->blobId);
1234 return 0;
1235 }
1236
1237 expectedSize = blob->blobSize;
1238 if (expectedSize == 0 || expectedSize > maxSize - sizeof(struct CameraBlob)) {
1239 ALOGE("%s: Invalid blobSize %zu.", __FUNCTION__, expectedSize);
1240 return 0;
1241 }
1242
1243 uint32_t totalSize = 0;
1244
1245 // Verify APP1 marker (mandatory)
1246 uint8_t app1Marker[] = {0xFF, 0xE1};
1247 if (memcmp(appSegmentBuffer, app1Marker, sizeof(app1Marker))) {
1248 ALOGE("%s: Invalid APP1 marker: %x, %x", __FUNCTION__,
1249 appSegmentBuffer[0], appSegmentBuffer[1]);
1250 return 0;
1251 }
1252 totalSize += sizeof(app1Marker);
1253
1254 uint16_t app1Size = (static_cast<uint16_t>(appSegmentBuffer[totalSize]) << 8) +
1255 appSegmentBuffer[totalSize+1];
1256 totalSize += app1Size;
1257
1258 ALOGV("%s: Expected APP segments size %zu, APP1 segment size %u",
1259 __FUNCTION__, expectedSize, app1Size);
1260 while (totalSize < expectedSize) {
1261 if (appSegmentBuffer[totalSize] != 0xFF ||
1262 appSegmentBuffer[totalSize+1] <= 0xE1 ||
1263 appSegmentBuffer[totalSize+1] > 0xEF) {
1264 // Invalid APPn marker
1265 ALOGE("%s: Invalid APPn marker: %x, %x", __FUNCTION__,
1266 appSegmentBuffer[totalSize], appSegmentBuffer[totalSize+1]);
1267 return 0;
1268 }
1269 totalSize += 2;
1270
1271 uint16_t appnSize = (static_cast<uint16_t>(appSegmentBuffer[totalSize]) << 8) +
1272 appSegmentBuffer[totalSize+1];
1273 totalSize += appnSize;
1274 }
1275
1276 if (totalSize != expectedSize) {
1277 ALOGE("%s: Invalid JPEG APP segments: totalSize %u vs expected size %zu",
1278 __FUNCTION__, totalSize, expectedSize);
1279 return 0;
1280 }
1281
1282 *app1SegmentSize = app1Size + sizeof(app1Marker);
1283 return expectedSize;
1284}
1285
1286int64_t HeicCompositeStream::findTimestampInNsLocked(int64_t timeInUs) {
1287 for (const auto& fn : mFrameNumberMap) {
1288 if (timeInUs == ns2us(fn.second)) {
1289 return fn.second;
1290 }
1291 }
1292 for (const auto& inputFrame : mPendingInputFrames) {
1293 if (timeInUs == ns2us(inputFrame.first)) {
1294 return inputFrame.first;
1295 }
1296 }
1297 return -1;
1298}
1299
1300status_t HeicCompositeStream::copyOneYuvTile(sp<MediaCodecBuffer>& codecBuffer,
1301 const CpuConsumer::LockedBuffer& yuvBuffer,
1302 size_t top, size_t left, size_t width, size_t height) {
1303 ATRACE_CALL();
1304
1305 // Get stride information for codecBuffer
1306 sp<ABuffer> imageData;
1307 if (!codecBuffer->meta()->findBuffer("image-data", &imageData)) {
1308 ALOGE("%s: Codec input buffer is not for image data!", __FUNCTION__);
1309 return BAD_VALUE;
1310 }
1311 if (imageData->size() != sizeof(MediaImage2)) {
1312 ALOGE("%s: Invalid codec input image size %zu, expected %zu",
1313 __FUNCTION__, imageData->size(), sizeof(MediaImage2));
1314 return BAD_VALUE;
1315 }
1316 MediaImage2* imageInfo = reinterpret_cast<MediaImage2*>(imageData->data());
1317 if (imageInfo->mType != MediaImage2::MEDIA_IMAGE_TYPE_YUV ||
1318 imageInfo->mBitDepth != 8 ||
1319 imageInfo->mBitDepthAllocated != 8 ||
1320 imageInfo->mNumPlanes != 3) {
1321 ALOGE("%s: Invalid codec input image info: mType %d, mBitDepth %d, "
1322 "mBitDepthAllocated %d, mNumPlanes %d!", __FUNCTION__,
1323 imageInfo->mType, imageInfo->mBitDepth,
1324 imageInfo->mBitDepthAllocated, imageInfo->mNumPlanes);
1325 return BAD_VALUE;
1326 }
1327
1328 ALOGV("%s: yuvBuffer chromaStep %d, chromaStride %d",
1329 __FUNCTION__, yuvBuffer.chromaStep, yuvBuffer.chromaStride);
1330 ALOGV("%s: U offset %u, V offset %u, U rowInc %d, V rowInc %d, U colInc %d, V colInc %d",
1331 __FUNCTION__, imageInfo->mPlane[MediaImage2::U].mOffset,
1332 imageInfo->mPlane[MediaImage2::V].mOffset,
1333 imageInfo->mPlane[MediaImage2::U].mRowInc,
1334 imageInfo->mPlane[MediaImage2::V].mRowInc,
1335 imageInfo->mPlane[MediaImage2::U].mColInc,
1336 imageInfo->mPlane[MediaImage2::V].mColInc);
1337
1338 // Y
1339 for (auto row = top; row < top+height; row++) {
1340 uint8_t *dst = codecBuffer->data() + imageInfo->mPlane[MediaImage2::Y].mOffset +
1341 imageInfo->mPlane[MediaImage2::Y].mRowInc * (row - top);
1342 memcpy(dst, yuvBuffer.data+row*yuvBuffer.stride+left, width);
1343 }
1344
1345 // U is Cb, V is Cr
1346 bool codecUPlaneFirst = imageInfo->mPlane[MediaImage2::V].mOffset >
1347 imageInfo->mPlane[MediaImage2::U].mOffset;
1348 uint32_t codecUvOffsetDiff = codecUPlaneFirst ?
1349 imageInfo->mPlane[MediaImage2::V].mOffset - imageInfo->mPlane[MediaImage2::U].mOffset :
1350 imageInfo->mPlane[MediaImage2::U].mOffset - imageInfo->mPlane[MediaImage2::V].mOffset;
1351 bool isCodecUvSemiplannar = (codecUvOffsetDiff == 1) &&
1352 (imageInfo->mPlane[MediaImage2::U].mRowInc ==
1353 imageInfo->mPlane[MediaImage2::V].mRowInc) &&
1354 (imageInfo->mPlane[MediaImage2::U].mColInc == 2) &&
1355 (imageInfo->mPlane[MediaImage2::V].mColInc == 2);
1356 bool isCodecUvPlannar =
1357 ((codecUPlaneFirst && codecUvOffsetDiff >=
1358 imageInfo->mPlane[MediaImage2::U].mRowInc * imageInfo->mHeight/2) ||
1359 ((!codecUPlaneFirst && codecUvOffsetDiff >=
1360 imageInfo->mPlane[MediaImage2::V].mRowInc * imageInfo->mHeight/2))) &&
1361 imageInfo->mPlane[MediaImage2::U].mColInc == 1 &&
1362 imageInfo->mPlane[MediaImage2::V].mColInc == 1;
1363 bool cameraUPlaneFirst = yuvBuffer.dataCr > yuvBuffer.dataCb;
1364
1365 if (isCodecUvSemiplannar && yuvBuffer.chromaStep == 2 &&
1366 (codecUPlaneFirst == cameraUPlaneFirst)) {
1367 // UV semiplannar
1368 // The chrome plane could be either Cb first, or Cr first. Take the
1369 // smaller address.
1370 uint8_t *src = std::min(yuvBuffer.dataCb, yuvBuffer.dataCr);
1371 MediaImage2::PlaneIndex dstPlane = codecUvOffsetDiff > 0 ? MediaImage2::U : MediaImage2::V;
1372 for (auto row = top/2; row < (top+height)/2; row++) {
1373 uint8_t *dst = codecBuffer->data() + imageInfo->mPlane[dstPlane].mOffset +
1374 imageInfo->mPlane[dstPlane].mRowInc * (row - top/2);
1375 memcpy(dst, src+row*yuvBuffer.chromaStride+left, width);
1376 }
1377 } else if (isCodecUvPlannar && yuvBuffer.chromaStep == 1) {
1378 // U plane
1379 for (auto row = top/2; row < (top+height)/2; row++) {
1380 uint8_t *dst = codecBuffer->data() + imageInfo->mPlane[MediaImage2::U].mOffset +
1381 imageInfo->mPlane[MediaImage2::U].mRowInc * (row - top/2);
1382 memcpy(dst, yuvBuffer.dataCb+row*yuvBuffer.chromaStride+left/2, width/2);
1383 }
1384
1385 // V plane
1386 for (auto row = top/2; row < (top+height)/2; row++) {
1387 uint8_t *dst = codecBuffer->data() + imageInfo->mPlane[MediaImage2::V].mOffset +
1388 imageInfo->mPlane[MediaImage2::V].mRowInc * (row - top/2);
1389 memcpy(dst, yuvBuffer.dataCr+row*yuvBuffer.chromaStride+left/2, width/2);
1390 }
1391 } else {
1392 // Convert between semiplannar and plannar
1393 uint8_t *dst = codecBuffer->data();
1394 for (auto row = top/2; row < (top+height)/2; row++) {
1395 for (auto col = left/2; col < (left+width)/2; col++) {
1396 // U/Cb
1397 int32_t dstIndex = imageInfo->mPlane[MediaImage2::U].mOffset +
1398 imageInfo->mPlane[MediaImage2::U].mRowInc * (row - top/2) +
1399 imageInfo->mPlane[MediaImage2::U].mColInc * (col - left/2);
1400 int32_t srcIndex = row * yuvBuffer.chromaStride + yuvBuffer.chromaStep * col;
1401 dst[dstIndex] = yuvBuffer.dataCb[srcIndex];
1402
1403 // V/Cr
1404 dstIndex = imageInfo->mPlane[MediaImage2::V].mOffset +
1405 imageInfo->mPlane[MediaImage2::V].mRowInc * (row - top/2) +
1406 imageInfo->mPlane[MediaImage2::V].mColInc * (col - left/2);
1407 srcIndex = row * yuvBuffer.chromaStride + yuvBuffer.chromaStep * col;
1408 dst[dstIndex] = yuvBuffer.dataCr[srcIndex];
1409 }
1410 }
1411 }
1412 return OK;
1413}
1414
1415size_t HeicCompositeStream::calcAppSegmentMaxSize(const CameraMetadata& info) {
1416 camera_metadata_ro_entry_t entry = info.find(ANDROID_HEIC_INFO_MAX_JPEG_APP_SEGMENTS_COUNT);
1417 size_t maxAppsSegment = 1;
1418 if (entry.count > 0) {
1419 maxAppsSegment = entry.data.u8[0] < 1 ? 1 :
1420 entry.data.u8[0] > 16 ? 16 : entry.data.u8[0];
1421 }
1422 return maxAppsSegment * (2 + 0xFFFF) + sizeof(struct CameraBlob);
1423}
1424
1425bool HeicCompositeStream::threadLoop() {
1426 int64_t currentTs = INT64_MAX;
1427 bool newInputAvailable = false;
1428
1429 {
1430 Mutex::Autolock l(mMutex);
1431 if (mErrorState) {
1432 // In case we landed in error state, return any pending buffers and
1433 // halt all further processing.
1434 compilePendingInputLocked();
1435 releaseInputFramesLocked(currentTs);
1436 return false;
1437 }
1438
1439
1440 while (!newInputAvailable) {
1441 compilePendingInputLocked();
1442 newInputAvailable = getNextReadyInputLocked(&currentTs);
1443
1444 if (!newInputAvailable) {
1445 auto failingFrameNumber = getNextFailingInputLocked(&currentTs);
1446 if (failingFrameNumber >= 0) {
1447 // We cannot erase 'mPendingInputFrames[currentTs]' at this point because it is
1448 // possible for two internal stream buffers to fail. In such scenario the
1449 // composite stream should notify the client about a stream buffer error only
1450 // once and this information is kept within 'errorNotified'.
1451 // Any present failed input frames will be removed on a subsequent call to
1452 // 'releaseInputFramesLocked()'.
1453 releaseInputFrameLocked(&mPendingInputFrames[currentTs]);
1454 currentTs = INT64_MAX;
1455 }
1456
1457 auto ret = mInputReadyCondition.waitRelative(mMutex, kWaitDuration);
1458 if (ret == TIMED_OUT) {
1459 return true;
1460 } else if (ret != OK) {
1461 ALOGE("%s: Timed wait on condition failed: %s (%d)", __FUNCTION__,
1462 strerror(-ret), ret);
1463 return false;
1464 }
1465 }
1466 }
1467 }
1468
1469 auto res = processInputFrame(currentTs, mPendingInputFrames[currentTs]);
1470 Mutex::Autolock l(mMutex);
1471 if (res != OK) {
1472 ALOGE("%s: Failed processing frame with timestamp: %" PRIu64 ": %s (%d)",
1473 __FUNCTION__, currentTs, strerror(-res), res);
1474 mPendingInputFrames[currentTs].error = true;
1475 }
1476
1477 if (mPendingInputFrames[currentTs].error ||
1478 (mPendingInputFrames[currentTs].appSegmentWritten &&
1479 mPendingInputFrames[currentTs].pendingOutputTiles == 0)) {
1480 releaseInputFramesLocked(currentTs);
1481 }
1482
1483 return true;
1484}
1485
1486bool HeicCompositeStream::onStreamBufferError(const CaptureResultExtras& resultExtras) {
1487 bool res = false;
1488 // Buffer errors concerning internal composite streams should not be directly visible to
1489 // camera clients. They must only receive a single buffer error with the public composite
1490 // stream id.
1491 if ((resultExtras.errorStreamId == mAppSegmentStreamId) ||
1492 (resultExtras.errorStreamId == mMainImageStreamId)) {
1493 flagAnErrorFrameNumber(resultExtras.frameNumber);
1494 res = true;
1495 }
1496
1497 return res;
1498}
1499
1500void HeicCompositeStream::CodecCallbackHandler::onMessageReceived(const sp<AMessage> &msg) {
1501 sp<HeicCompositeStream> parent = mParent.promote();
1502 if (parent == nullptr) return;
1503
1504 switch (msg->what()) {
1505 case kWhatCallbackNotify: {
1506 int32_t cbID;
1507 if (!msg->findInt32("callbackID", &cbID)) {
1508 ALOGE("kWhatCallbackNotify: callbackID is expected.");
1509 break;
1510 }
1511
1512 ALOGV("kWhatCallbackNotify: cbID = %d", cbID);
1513
1514 switch (cbID) {
1515 case MediaCodec::CB_INPUT_AVAILABLE: {
1516 int32_t index;
1517 if (!msg->findInt32("index", &index)) {
1518 ALOGE("CB_INPUT_AVAILABLE: index is expected.");
1519 break;
1520 }
1521 parent->onHeicInputFrameAvailable(index);
1522 break;
1523 }
1524
1525 case MediaCodec::CB_OUTPUT_AVAILABLE: {
1526 int32_t index;
1527 size_t offset;
1528 size_t size;
1529 int64_t timeUs;
1530 int32_t flags;
1531
1532 if (!msg->findInt32("index", &index)) {
1533 ALOGE("CB_OUTPUT_AVAILABLE: index is expected.");
1534 break;
1535 }
1536 if (!msg->findSize("offset", &offset)) {
1537 ALOGE("CB_OUTPUT_AVAILABLE: offset is expected.");
1538 break;
1539 }
1540 if (!msg->findSize("size", &size)) {
1541 ALOGE("CB_OUTPUT_AVAILABLE: size is expected.");
1542 break;
1543 }
1544 if (!msg->findInt64("timeUs", &timeUs)) {
1545 ALOGE("CB_OUTPUT_AVAILABLE: timeUs is expected.");
1546 break;
1547 }
1548 if (!msg->findInt32("flags", &flags)) {
1549 ALOGE("CB_OUTPUT_AVAILABLE: flags is expected.");
1550 break;
1551 }
1552
1553 CodecOutputBufferInfo bufferInfo = {
1554 index,
1555 (int32_t)offset,
1556 (int32_t)size,
1557 timeUs,
1558 (uint32_t)flags};
1559
1560 parent->onHeicOutputFrameAvailable(bufferInfo);
1561 break;
1562 }
1563
1564 case MediaCodec::CB_OUTPUT_FORMAT_CHANGED: {
1565 sp<AMessage> format;
1566 if (!msg->findMessage("format", &format)) {
1567 ALOGE("CB_OUTPUT_FORMAT_CHANGED: format is expected.");
1568 break;
1569 }
1570
1571 parent->onHeicFormatChanged(format);
1572 break;
1573 }
1574
1575 case MediaCodec::CB_ERROR: {
1576 status_t err;
1577 int32_t actionCode;
1578 AString detail;
1579 if (!msg->findInt32("err", &err)) {
1580 ALOGE("CB_ERROR: err is expected.");
1581 break;
1582 }
1583 if (!msg->findInt32("action", &actionCode)) {
1584 ALOGE("CB_ERROR: action is expected.");
1585 break;
1586 }
1587 msg->findString("detail", &detail);
1588 ALOGE("Codec reported error(0x%x), actionCode(%d), detail(%s)",
1589 err, actionCode, detail.c_str());
1590
1591 parent->onHeicCodecError();
1592 break;
1593 }
1594
1595 default: {
1596 ALOGE("kWhatCallbackNotify: callbackID(%d) is unexpected.", cbID);
1597 break;
1598 }
1599 }
1600 break;
1601 }
1602
1603 default:
1604 ALOGE("shouldn't be here");
1605 break;
1606 }
1607}
1608
1609}; // namespace camera3
1610}; // namespace android