blob: a61cdeeac91c372eb1313a62382e1b40ca73ca15 [file] [log] [blame]
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -08001/*
2 * Copyright (C) 2019 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#define LOG_TAG "Camera3-HeicCompositeStream"
18#define ATRACE_TAG ATRACE_TAG_CAMERA
19//#define LOG_NDEBUG 0
20
21#include <linux/memfd.h>
22#include <pthread.h>
23#include <sys/syscall.h>
24
25#include <android/hardware/camera/device/3.5/types.h>
26#include <gui/Surface.h>
27#include <utils/Log.h>
28#include <utils/Trace.h>
29
30#include <media/ICrypto.h>
31#include <media/MediaCodecBuffer.h>
32#include <media/stagefright/foundation/ABuffer.h>
33#include <media/stagefright/foundation/AMessage.h>
34#include <media/stagefright/foundation/MediaDefs.h>
35#include <media/stagefright/MediaCodecConstants.h>
36
37#include "common/CameraDeviceBase.h"
38#include "utils/ExifUtils.h"
39#include "HeicEncoderInfoManager.h"
40#include "HeicCompositeStream.h"
41
42using android::hardware::camera::device::V3_5::CameraBlob;
43using android::hardware::camera::device::V3_5::CameraBlobId;
44
45namespace android {
46namespace camera3 {
47
48HeicCompositeStream::HeicCompositeStream(wp<CameraDeviceBase> device,
49 wp<hardware::camera2::ICameraDeviceCallbacks> cb) :
50 CompositeStream(device, cb),
51 mUseHeic(false),
52 mNumOutputTiles(1),
53 mOutputWidth(0),
54 mOutputHeight(0),
55 mMaxHeicBufferSize(0),
56 mGridWidth(HeicEncoderInfoManager::kGridWidth),
57 mGridHeight(HeicEncoderInfoManager::kGridHeight),
58 mGridRows(1),
59 mGridCols(1),
60 mUseGrid(false),
61 mAppSegmentStreamId(-1),
62 mAppSegmentSurfaceId(-1),
63 mAppSegmentBufferAcquired(false),
64 mMainImageStreamId(-1),
65 mMainImageSurfaceId(-1),
66 mYuvBufferAcquired(false),
67 mProducerListener(new ProducerListener()),
68 mOutputBufferCounter(0),
69 mGridTimestampUs(0) {
70}
71
72HeicCompositeStream::~HeicCompositeStream() {
73 // Call deinitCodec in case stream hasn't been deleted yet to avoid any
74 // memory/resource leak.
75 deinitCodec();
76
77 mInputAppSegmentBuffers.clear();
78 mCodecOutputBuffers.clear();
79
80 mAppSegmentStreamId = -1;
81 mAppSegmentSurfaceId = -1;
82 mAppSegmentConsumer.clear();
83 mAppSegmentSurface.clear();
84
85 mMainImageStreamId = -1;
86 mMainImageSurfaceId = -1;
87 mMainImageConsumer.clear();
88 mMainImageSurface.clear();
89}
90
91bool HeicCompositeStream::isHeicCompositeStream(const sp<Surface> &surface) {
92 ANativeWindow *anw = surface.get();
93 status_t err;
94 int format;
95 if ((err = anw->query(anw, NATIVE_WINDOW_FORMAT, &format)) != OK) {
96 String8 msg = String8::format("Failed to query Surface format: %s (%d)", strerror(-err),
97 err);
98 ALOGE("%s: %s", __FUNCTION__, msg.string());
99 return false;
100 }
101
102 int dataspace;
103 if ((err = anw->query(anw, NATIVE_WINDOW_DEFAULT_DATASPACE, &dataspace)) != OK) {
104 String8 msg = String8::format("Failed to query Surface dataspace: %s (%d)", strerror(-err),
105 err);
106 ALOGE("%s: %s", __FUNCTION__, msg.string());
107 return false;
108 }
109
110 return ((format == HAL_PIXEL_FORMAT_BLOB) && (dataspace == HAL_DATASPACE_HEIF));
111}
112
113status_t HeicCompositeStream::createInternalStreams(const std::vector<sp<Surface>>& consumers,
114 bool /*hasDeferredConsumer*/, uint32_t width, uint32_t height, int format,
115 camera3_stream_rotation_t rotation, int *id, const String8& physicalCameraId,
116 std::vector<int> *surfaceIds, int /*streamSetId*/, bool /*isShared*/) {
117
118 sp<CameraDeviceBase> device = mDevice.promote();
119 if (!device.get()) {
120 ALOGE("%s: Invalid camera device!", __FUNCTION__);
121 return NO_INIT;
122 }
123
124 status_t res = initializeCodec(width, height, device);
125 if (res != OK) {
126 ALOGE("%s: Failed to initialize HEIC/HEVC codec: %s (%d)",
127 __FUNCTION__, strerror(-res), res);
128 return NO_INIT;
129 }
130
131 sp<IGraphicBufferProducer> producer;
132 sp<IGraphicBufferConsumer> consumer;
133 BufferQueue::createBufferQueue(&producer, &consumer);
134 mAppSegmentConsumer = new CpuConsumer(consumer, 1);
135 mAppSegmentConsumer->setFrameAvailableListener(this);
136 mAppSegmentConsumer->setName(String8("Camera3-HeicComposite-AppSegmentStream"));
137 mAppSegmentSurface = new Surface(producer);
138
Shuzhen Wange7f4b462019-02-12 08:43:07 -0800139 mStaticInfo = device->info();
140
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800141 res = device->createStream(mAppSegmentSurface, mAppSegmentMaxSize, 1, format,
142 kAppSegmentDataSpace, rotation, &mAppSegmentStreamId, physicalCameraId, surfaceIds);
143 if (res == OK) {
144 mAppSegmentSurfaceId = (*surfaceIds)[0];
145 } else {
146 ALOGE("%s: Failed to create JPEG App segment stream: %s (%d)", __FUNCTION__,
147 strerror(-res), res);
148 return res;
149 }
150
151 if (!mUseGrid) {
152 res = mCodec->createInputSurface(&producer);
153 if (res != OK) {
154 ALOGE("%s: Failed to create input surface for Heic codec: %s (%d)",
155 __FUNCTION__, strerror(-res), res);
156 return res;
157 }
158 } else {
159 BufferQueue::createBufferQueue(&producer, &consumer);
160 mMainImageConsumer = new CpuConsumer(consumer, 1);
161 mMainImageConsumer->setFrameAvailableListener(this);
162 mMainImageConsumer->setName(String8("Camera3-HeicComposite-HevcInputYUVStream"));
163 }
164 mMainImageSurface = new Surface(producer);
165
166 res = mCodec->start();
167 if (res != OK) {
168 ALOGE("%s: Failed to start codec: %s (%d)", __FUNCTION__,
169 strerror(-res), res);
170 return res;
171 }
172
173 std::vector<int> sourceSurfaceId;
174 //Use YUV_888 format if framework tiling is needed.
175 int srcStreamFmt = mUseGrid ? HAL_PIXEL_FORMAT_YCbCr_420_888 :
176 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
177 res = device->createStream(mMainImageSurface, width, height, srcStreamFmt, kHeifDataSpace,
178 rotation, id, physicalCameraId, &sourceSurfaceId);
179 if (res == OK) {
180 mMainImageSurfaceId = sourceSurfaceId[0];
181 mMainImageStreamId = *id;
182 } else {
183 ALOGE("%s: Failed to create main image stream: %s (%d)", __FUNCTION__,
184 strerror(-res), res);
185 return res;
186 }
187
188 mOutputSurface = consumers[0];
189 res = registerCompositeStreamListener(getStreamId());
190 if (res != OK) {
191 ALOGE("%s: Failed to register HAL main image stream", __FUNCTION__);
192 return res;
193 }
194
195 return res;
196}
197
198status_t HeicCompositeStream::deleteInternalStreams() {
199 requestExit();
200 auto res = join();
201 if (res != OK) {
202 ALOGE("%s: Failed to join with the main processing thread: %s (%d)", __FUNCTION__,
203 strerror(-res), res);
204 }
205
206 deinitCodec();
207
208 if (mAppSegmentStreamId >= 0) {
209 sp<CameraDeviceBase> device = mDevice.promote();
210 if (!device.get()) {
211 ALOGE("%s: Invalid camera device!", __FUNCTION__);
212 return NO_INIT;
213 }
214
215 res = device->deleteStream(mAppSegmentStreamId);
216 mAppSegmentStreamId = -1;
217 }
218
Shuzhen Wang2c545042019-02-07 10:27:35 -0800219 if (mOutputSurface != nullptr) {
220 mOutputSurface->disconnect(NATIVE_WINDOW_API_CAMERA);
221 mOutputSurface.clear();
222 }
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800223 return res;
224}
225
226void HeicCompositeStream::onBufferReleased(const BufferInfo& bufferInfo) {
227 Mutex::Autolock l(mMutex);
228
229 if (bufferInfo.mError) return;
230
231 mCodecOutputBufferTimestamps.push(bufferInfo.mTimestamp);
232}
233
234// We need to get the settings early to handle the case where the codec output
235// arrives earlier than result metadata.
236void HeicCompositeStream::onBufferRequestForFrameNumber(uint64_t frameNumber, int streamId,
237 const CameraMetadata& settings) {
238 ATRACE_ASYNC_BEGIN("HEIC capture", frameNumber);
239
240 Mutex::Autolock l(mMutex);
241 if (mErrorState || (streamId != getStreamId())) {
242 return;
243 }
244
245 mPendingCaptureResults.emplace(frameNumber, CameraMetadata());
246
247 camera_metadata_ro_entry entry;
248
249 int32_t orientation = 0;
250 entry = settings.find(ANDROID_JPEG_ORIENTATION);
251 if (entry.count == 1) {
252 orientation = entry.data.i32[0];
253 }
254
255 int32_t quality = kDefaultJpegQuality;
256 entry = settings.find(ANDROID_JPEG_QUALITY);
257 if (entry.count == 1) {
258 quality = entry.data.i32[0];
259 }
260
261 mSettingsByFrameNumber[frameNumber] = std::make_pair(orientation, quality);
262}
263
264void HeicCompositeStream::onFrameAvailable(const BufferItem& item) {
265 if (item.mDataSpace == static_cast<android_dataspace>(kAppSegmentDataSpace)) {
266 ALOGV("%s: JPEG APP segments buffer with ts: %" PRIu64 " ms. arrived!",
267 __func__, ns2ms(item.mTimestamp));
268
269 Mutex::Autolock l(mMutex);
270 if (!mErrorState) {
271 mInputAppSegmentBuffers.push_back(item.mTimestamp);
272 mInputReadyCondition.signal();
273 }
274 } else if (item.mDataSpace == kHeifDataSpace) {
275 ALOGV("%s: YUV_888 buffer with ts: %" PRIu64 " ms. arrived!",
276 __func__, ns2ms(item.mTimestamp));
277
278 Mutex::Autolock l(mMutex);
279 if (!mUseGrid) {
280 ALOGE("%s: YUV_888 internal stream is only supported for HEVC tiling",
281 __FUNCTION__);
282 return;
283 }
284 if (!mErrorState) {
285 mInputYuvBuffers.push_back(item.mTimestamp);
286 mInputReadyCondition.signal();
287 }
288 } else {
289 ALOGE("%s: Unexpected data space: 0x%x", __FUNCTION__, item.mDataSpace);
290 }
291}
292
293status_t HeicCompositeStream::getCompositeStreamInfo(const OutputStreamInfo &streamInfo,
294 const CameraMetadata& ch, std::vector<OutputStreamInfo>* compositeOutput /*out*/) {
295 if (compositeOutput == nullptr) {
296 return BAD_VALUE;
297 }
298
299 compositeOutput->clear();
300
301 bool useGrid, useHeic;
302 bool isSizeSupported = isSizeSupportedByHeifEncoder(
303 streamInfo.width, streamInfo.height, &useHeic, &useGrid, nullptr);
304 if (!isSizeSupported) {
305 // Size is not supported by either encoder.
306 return OK;
307 }
308
309 compositeOutput->insert(compositeOutput->end(), 2, streamInfo);
310
311 // JPEG APPS segments Blob stream info
312 (*compositeOutput)[0].width = calcAppSegmentMaxSize(ch);
313 (*compositeOutput)[0].height = 1;
314 (*compositeOutput)[0].format = HAL_PIXEL_FORMAT_BLOB;
315 (*compositeOutput)[0].dataSpace = kAppSegmentDataSpace;
316 (*compositeOutput)[0].consumerUsage = GRALLOC_USAGE_SW_READ_OFTEN;
317
318 // YUV/IMPLEMENTATION_DEFINED stream info
319 (*compositeOutput)[1].width = streamInfo.width;
320 (*compositeOutput)[1].height = streamInfo.height;
321 (*compositeOutput)[1].format = useGrid ? HAL_PIXEL_FORMAT_YCbCr_420_888 :
322 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
323 (*compositeOutput)[1].dataSpace = kHeifDataSpace;
324 (*compositeOutput)[1].consumerUsage = useHeic ? GRALLOC_USAGE_HW_IMAGE_ENCODER :
325 useGrid ? GRALLOC_USAGE_SW_READ_OFTEN : GRALLOC_USAGE_HW_VIDEO_ENCODER;
326
327 return NO_ERROR;
328}
329
330bool HeicCompositeStream::isSizeSupportedByHeifEncoder(int32_t width, int32_t height,
331 bool* useHeic, bool* useGrid, int64_t* stall) {
332 static HeicEncoderInfoManager& heicManager = HeicEncoderInfoManager::getInstance();
333 return heicManager.isSizeSupported(width, height, useHeic, useGrid, stall);
334}
335
336bool HeicCompositeStream::isInMemoryTempFileSupported() {
337 int memfd = syscall(__NR_memfd_create, "HEIF-try-memfd", MFD_CLOEXEC);
338 if (memfd == -1) {
339 if (errno != ENOSYS) {
340 ALOGE("%s: Failed to create tmpfs file. errno %d", __FUNCTION__, errno);
341 }
342 return false;
343 }
344 close(memfd);
345 return true;
346}
347
348void HeicCompositeStream::onHeicOutputFrameAvailable(
349 const CodecOutputBufferInfo& outputBufferInfo) {
350 Mutex::Autolock l(mMutex);
351
352 ALOGV("%s: index %d, offset %d, size %d, time %" PRId64 ", flags 0x%x",
353 __FUNCTION__, outputBufferInfo.index, outputBufferInfo.offset,
354 outputBufferInfo.size, outputBufferInfo.timeUs, outputBufferInfo.flags);
355
356 if (!mErrorState) {
357 if ((outputBufferInfo.size > 0) &&
358 ((outputBufferInfo.flags & MediaCodec::BUFFER_FLAG_CODECCONFIG) == 0)) {
359 mCodecOutputBuffers.push_back(outputBufferInfo);
360 mInputReadyCondition.signal();
361 } else {
362 mCodec->releaseOutputBuffer(outputBufferInfo.index);
363 }
364 } else {
365 mCodec->releaseOutputBuffer(outputBufferInfo.index);
366 }
367}
368
369void HeicCompositeStream::onHeicInputFrameAvailable(int32_t index) {
370 Mutex::Autolock l(mMutex);
371
372 if (!mUseGrid) {
373 ALOGE("%s: Codec YUV input mode must only be used for Hevc tiling mode", __FUNCTION__);
374 return;
375 }
376
377 mCodecInputBuffers.push_back(index);
378 mInputReadyCondition.signal();
379}
380
381void HeicCompositeStream::onHeicFormatChanged(sp<AMessage>& newFormat) {
382 if (newFormat == nullptr) {
383 ALOGE("%s: newFormat must not be null!", __FUNCTION__);
384 return;
385 }
386
387 Mutex::Autolock l(mMutex);
388
389 AString mime;
390 AString mimeHeic(MIMETYPE_IMAGE_ANDROID_HEIC);
391 newFormat->findString(KEY_MIME, &mime);
392 if (mime != mimeHeic) {
393 // For HEVC codec, below keys need to be filled out or overwritten so that the
394 // muxer can handle them as HEIC output image.
395 newFormat->setString(KEY_MIME, mimeHeic);
396 newFormat->setInt32(KEY_WIDTH, mOutputWidth);
397 newFormat->setInt32(KEY_HEIGHT, mOutputHeight);
398 if (mUseGrid) {
399 newFormat->setInt32(KEY_TILE_WIDTH, mGridWidth);
400 newFormat->setInt32(KEY_TILE_HEIGHT, mGridHeight);
401 newFormat->setInt32(KEY_GRID_ROWS, mGridRows);
402 newFormat->setInt32(KEY_GRID_COLUMNS, mGridCols);
403 }
404 }
405 newFormat->setInt32(KEY_IS_DEFAULT, 1 /*isPrimary*/);
406
407 int32_t gridRows, gridCols;
408 if (newFormat->findInt32(KEY_GRID_ROWS, &gridRows) &&
409 newFormat->findInt32(KEY_GRID_COLUMNS, &gridCols)) {
410 mNumOutputTiles = gridRows * gridCols;
411 } else {
412 mNumOutputTiles = 1;
413 }
414
415 ALOGV("%s: mNumOutputTiles is %zu", __FUNCTION__, mNumOutputTiles);
416 mFormat = newFormat;
417}
418
419void HeicCompositeStream::onHeicCodecError() {
420 Mutex::Autolock l(mMutex);
421 mErrorState = true;
422}
423
424status_t HeicCompositeStream::configureStream() {
425 if (isRunning()) {
426 // Processing thread is already running, nothing more to do.
427 return NO_ERROR;
428 }
429
430 if (mOutputSurface.get() == nullptr) {
431 ALOGE("%s: No valid output surface set!", __FUNCTION__);
432 return NO_INIT;
433 }
434
435 auto res = mOutputSurface->connect(NATIVE_WINDOW_API_CAMERA, mProducerListener);
436 if (res != OK) {
437 ALOGE("%s: Unable to connect to native window for stream %d",
438 __FUNCTION__, mMainImageStreamId);
439 return res;
440 }
441
442 if ((res = native_window_set_buffers_format(mOutputSurface.get(), HAL_PIXEL_FORMAT_BLOB))
443 != OK) {
444 ALOGE("%s: Unable to configure stream buffer format for stream %d", __FUNCTION__,
445 mMainImageStreamId);
446 return res;
447 }
448
449 ANativeWindow *anwConsumer = mOutputSurface.get();
450 int maxConsumerBuffers;
451 if ((res = anwConsumer->query(anwConsumer, NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS,
452 &maxConsumerBuffers)) != OK) {
453 ALOGE("%s: Unable to query consumer undequeued"
454 " buffer count for stream %d", __FUNCTION__, mMainImageStreamId);
455 return res;
456 }
457
458 // Cannot use SourceSurface buffer count since it could be codec's 512*512 tile
459 // buffer count.
460 int maxProducerBuffers = 1;
461 if ((res = native_window_set_buffer_count(
462 anwConsumer, maxProducerBuffers + maxConsumerBuffers)) != OK) {
463 ALOGE("%s: Unable to set buffer count for stream %d", __FUNCTION__, mMainImageStreamId);
464 return res;
465 }
466
467 if ((res = native_window_set_buffers_dimensions(anwConsumer, mMaxHeicBufferSize, 1)) != OK) {
468 ALOGE("%s: Unable to set buffer dimension %zu x 1 for stream %d: %s (%d)",
469 __FUNCTION__, mMaxHeicBufferSize, mMainImageStreamId, strerror(-res), res);
470 return res;
471 }
472
473 run("HeicCompositeStreamProc");
474
475 return NO_ERROR;
476}
477
478status_t HeicCompositeStream::insertGbp(SurfaceMap* /*out*/outSurfaceMap,
479 Vector<int32_t>* /*out*/outputStreamIds, int32_t* /*out*/currentStreamId) {
480 if (outSurfaceMap->find(mAppSegmentStreamId) == outSurfaceMap->end()) {
481 (*outSurfaceMap)[mAppSegmentStreamId] = std::vector<size_t>();
482 outputStreamIds->push_back(mAppSegmentStreamId);
483 }
484 (*outSurfaceMap)[mAppSegmentStreamId].push_back(mAppSegmentSurfaceId);
485
486 if (outSurfaceMap->find(mMainImageStreamId) == outSurfaceMap->end()) {
487 (*outSurfaceMap)[mMainImageStreamId] = std::vector<size_t>();
488 outputStreamIds->push_back(mMainImageStreamId);
489 }
490 (*outSurfaceMap)[mMainImageStreamId].push_back(mMainImageSurfaceId);
491
492 if (currentStreamId != nullptr) {
493 *currentStreamId = mMainImageStreamId;
494 }
495
496 return NO_ERROR;
497}
498
499void HeicCompositeStream::onShutter(const CaptureResultExtras& resultExtras, nsecs_t timestamp) {
500 Mutex::Autolock l(mMutex);
501 if (mErrorState) {
502 return;
503 }
504
505 if (mSettingsByFrameNumber.find(resultExtras.frameNumber) != mSettingsByFrameNumber.end()) {
506 mFrameNumberMap.emplace(resultExtras.frameNumber, timestamp);
507 mSettingsByTimestamp[timestamp] = mSettingsByFrameNumber[resultExtras.frameNumber];
508 mSettingsByFrameNumber.erase(resultExtras.frameNumber);
509 mInputReadyCondition.signal();
510 }
511}
512
513void HeicCompositeStream::compilePendingInputLocked() {
514 while (!mSettingsByTimestamp.empty()) {
515 auto it = mSettingsByTimestamp.begin();
516 mPendingInputFrames[it->first].orientation = it->second.first;
517 mPendingInputFrames[it->first].quality = it->second.second;
518 mSettingsByTimestamp.erase(it);
519 }
520
521 while (!mInputAppSegmentBuffers.empty() && !mAppSegmentBufferAcquired) {
522 CpuConsumer::LockedBuffer imgBuffer;
523 auto it = mInputAppSegmentBuffers.begin();
524 auto res = mAppSegmentConsumer->lockNextBuffer(&imgBuffer);
525 if (res == NOT_ENOUGH_DATA) {
526 // Canot not lock any more buffers.
527 break;
528 } else if ((res != OK) || (*it != imgBuffer.timestamp)) {
529 if (res != OK) {
530 ALOGE("%s: Error locking JPEG_APP_SEGMENTS image buffer: %s (%d)", __FUNCTION__,
531 strerror(-res), res);
532 } else {
533 ALOGE("%s: Expecting JPEG_APP_SEGMENTS buffer with time stamp: %" PRId64
534 " received buffer with time stamp: %" PRId64, __FUNCTION__,
535 *it, imgBuffer.timestamp);
536 }
537 mPendingInputFrames[*it].error = true;
538 mInputAppSegmentBuffers.erase(it);
539 continue;
540 }
541
542 if ((mPendingInputFrames.find(imgBuffer.timestamp) != mPendingInputFrames.end()) &&
543 (mPendingInputFrames[imgBuffer.timestamp].error)) {
544 mAppSegmentConsumer->unlockBuffer(imgBuffer);
545 } else {
546 mPendingInputFrames[imgBuffer.timestamp].appSegmentBuffer = imgBuffer;
547 mAppSegmentBufferAcquired = true;
548 }
549 mInputAppSegmentBuffers.erase(it);
550 }
551
552 while (!mInputYuvBuffers.empty() && !mYuvBufferAcquired) {
553 CpuConsumer::LockedBuffer imgBuffer;
554 auto it = mInputYuvBuffers.begin();
555 auto res = mMainImageConsumer->lockNextBuffer(&imgBuffer);
556 if (res == NOT_ENOUGH_DATA) {
557 // Canot not lock any more buffers.
558 break;
559 } else if (res != OK) {
560 ALOGE("%s: Error locking YUV_888 image buffer: %s (%d)", __FUNCTION__,
561 strerror(-res), res);
562 mPendingInputFrames[*it].error = true;
563 mInputYuvBuffers.erase(it);
564 continue;
565 } else if (*it != imgBuffer.timestamp) {
566 ALOGW("%s: Expecting YUV_888 buffer with time stamp: %" PRId64 " received buffer with "
567 "time stamp: %" PRId64, __FUNCTION__, *it, imgBuffer.timestamp);
568 mPendingInputFrames[*it].error = true;
569 mInputYuvBuffers.erase(it);
570 continue;
571 }
572
573 if ((mPendingInputFrames.find(imgBuffer.timestamp) != mPendingInputFrames.end()) &&
574 (mPendingInputFrames[imgBuffer.timestamp].error)) {
575 mMainImageConsumer->unlockBuffer(imgBuffer);
576 } else {
577 mPendingInputFrames[imgBuffer.timestamp].yuvBuffer = imgBuffer;
578 mYuvBufferAcquired = true;
579 }
580 mInputYuvBuffers.erase(it);
581 }
582
583 while (!mCodecOutputBuffers.empty()) {
584 auto it = mCodecOutputBuffers.begin();
585 // Bitstream buffer timestamp doesn't necessarily directly correlate with input
586 // buffer timestamp. Assume encoder input to output is FIFO, use a queue
587 // to look up timestamp.
588 int64_t bufferTime = -1;
589 if (mCodecOutputBufferTimestamps.empty()) {
590 ALOGE("%s: Failed to find buffer timestamp for codec output buffer!", __FUNCTION__);
591 } else {
592 // Direct mapping between camera timestamp (in ns) and codec timestamp (in us).
593 bufferTime = mCodecOutputBufferTimestamps.front();
594 mOutputBufferCounter++;
595 if (mOutputBufferCounter == mNumOutputTiles) {
596 mCodecOutputBufferTimestamps.pop();
597 mOutputBufferCounter = 0;
598 }
599
600 mPendingInputFrames[bufferTime].codecOutputBuffers.push_back(*it);
601 }
602 mCodecOutputBuffers.erase(it);
603 }
604
605 while (!mFrameNumberMap.empty()) {
606 auto it = mFrameNumberMap.begin();
607 mPendingInputFrames[it->second].frameNumber = it->first;
608 mFrameNumberMap.erase(it);
609 }
610
Shuzhen Wange7f4b462019-02-12 08:43:07 -0800611 while (!mCaptureResults.empty()) {
612 auto it = mCaptureResults.begin();
613 // Negative timestamp indicates that something went wrong during the capture result
614 // collection process.
615 if (it->first >= 0) {
616 if (mPendingInputFrames[it->first].frameNumber == std::get<0>(it->second)) {
617 mPendingInputFrames[it->first].result =
618 std::make_unique<CameraMetadata>(std::get<1>(it->second));
619 } else {
620 ALOGE("%s: Capture result frameNumber/timestamp mapping changed between "
621 "shutter and capture result!", __FUNCTION__);
622 }
623 }
624 mCaptureResults.erase(it);
625 }
626
627 // mErrorFrameNumbers stores frame number of dropped buffers.
628 auto it = mErrorFrameNumbers.begin();
629 while (it != mErrorFrameNumbers.end()) {
630 bool frameFound = false;
631 for (auto &inputFrame : mPendingInputFrames) {
632 if (inputFrame.second.frameNumber == *it) {
633 inputFrame.second.error = true;
634 frameFound = true;
635 break;
636 }
637 }
638
639 if (frameFound) {
640 it = mErrorFrameNumbers.erase(it);
641 } else {
642 ALOGW("%s: Not able to find failing input with frame number: %" PRId64, __FUNCTION__,
643 *it);
644 it++;
645 }
646 }
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800647
648 // Distribute codec input buffers to be filled out from YUV output
649 for (auto it = mPendingInputFrames.begin();
650 it != mPendingInputFrames.end() && mCodecInputBuffers.size() > 0; it++) {
651 InputFrame& inputFrame(it->second);
652 if (inputFrame.codecInputCounter < mGridRows * mGridCols) {
653 // Available input tiles that are required for the current input
654 // image.
655 size_t newInputTiles = std::min(mCodecInputBuffers.size(),
656 mGridRows * mGridCols - inputFrame.codecInputCounter);
657 for (size_t i = 0; i < newInputTiles; i++) {
658 CodecInputBufferInfo inputInfo =
659 { mCodecInputBuffers[0], mGridTimestampUs++, inputFrame.codecInputCounter };
660 inputFrame.codecInputBuffers.push_back(inputInfo);
661
662 mCodecInputBuffers.erase(mCodecInputBuffers.begin());
663 inputFrame.codecInputCounter++;
664 }
665 break;
666 }
667 }
668}
669
670bool HeicCompositeStream::getNextReadyInputLocked(int64_t *currentTs /*out*/) {
671 if (currentTs == nullptr) {
672 return false;
673 }
674
675 bool newInputAvailable = false;
676 for (const auto& it : mPendingInputFrames) {
Shuzhen Wange7f4b462019-02-12 08:43:07 -0800677 bool appSegmentReady = (it.second.appSegmentBuffer.data != nullptr) &&
678 !it.second.appSegmentWritten && it.second.result != nullptr;
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800679 bool codecOutputReady = !it.second.codecOutputBuffers.empty();
680 bool codecInputReady = (it.second.yuvBuffer.data != nullptr) &&
681 (!it.second.codecInputBuffers.empty());
682 if ((!it.second.error) &&
683 (it.first < *currentTs) &&
Shuzhen Wange7f4b462019-02-12 08:43:07 -0800684 (appSegmentReady || codecOutputReady || codecInputReady)) {
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800685 *currentTs = it.first;
686 newInputAvailable = true;
687 break;
688 }
689 }
690
691 return newInputAvailable;
692}
693
694int64_t HeicCompositeStream::getNextFailingInputLocked(int64_t *currentTs /*out*/) {
695 int64_t res = -1;
696 if (currentTs == nullptr) {
697 return res;
698 }
699
700 for (const auto& it : mPendingInputFrames) {
701 if (it.second.error && !it.second.errorNotified && (it.first < *currentTs)) {
702 *currentTs = it.first;
703 res = it.second.frameNumber;
704 break;
705 }
706 }
707
708 return res;
709}
710
711status_t HeicCompositeStream::processInputFrame(nsecs_t timestamp,
712 InputFrame &inputFrame) {
713 ATRACE_CALL();
714 status_t res = OK;
715
Shuzhen Wange7f4b462019-02-12 08:43:07 -0800716 bool appSegmentReady = inputFrame.appSegmentBuffer.data != nullptr &&
717 !inputFrame.appSegmentWritten && inputFrame.result != nullptr;
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800718 bool codecOutputReady = inputFrame.codecOutputBuffers.size() > 0;
719 bool codecInputReady = inputFrame.yuvBuffer.data != nullptr &&
720 !inputFrame.codecInputBuffers.empty();
721
Shuzhen Wange7f4b462019-02-12 08:43:07 -0800722 if (!appSegmentReady && !codecOutputReady && !codecInputReady) {
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800723 ALOGW("%s: No valid appSegmentBuffer/codec input/outputBuffer available!", __FUNCTION__);
724 return OK;
725 }
726
727 // Handle inputs for Hevc tiling
728 if (codecInputReady) {
729 res = processCodecInputFrame(inputFrame);
730 if (res != OK) {
731 ALOGE("%s: Failed to process codec input frame: %s (%d)", __FUNCTION__,
732 strerror(-res), res);
733 return res;
734 }
735 }
736
737 // Initialize and start muxer if not yet done so
738 if (inputFrame.muxer == nullptr) {
739 res = startMuxerForInputFrame(timestamp, inputFrame);
740 if (res != OK) {
741 ALOGE("%s: Failed to create and start muxer: %s (%d)", __FUNCTION__,
742 strerror(-res), res);
743 return res;
744 }
745 }
746
747 // Write JPEG APP segments data to the muxer.
Shuzhen Wange7f4b462019-02-12 08:43:07 -0800748 if (appSegmentReady && inputFrame.muxer != nullptr) {
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800749 res = processAppSegment(timestamp, inputFrame);
750 if (res != OK) {
751 ALOGE("%s: Failed to process JPEG APP segments: %s (%d)", __FUNCTION__,
752 strerror(-res), res);
753 return res;
754 }
755 }
756
757 // Write media codec bitstream buffers to muxer.
758 while (!inputFrame.codecOutputBuffers.empty()) {
759 res = processOneCodecOutputFrame(timestamp, inputFrame);
760 if (res != OK) {
761 ALOGE("%s: Failed to process codec output frame: %s (%d)", __FUNCTION__,
762 strerror(-res), res);
763 return res;
764 }
765 }
766
767 if (inputFrame.appSegmentWritten && inputFrame.pendingOutputTiles == 0) {
768 res = processCompletedInputFrame(timestamp, inputFrame);
769 if (res != OK) {
770 ALOGE("%s: Failed to process completed input frame: %s (%d)", __FUNCTION__,
771 strerror(-res), res);
772 return res;
773 }
774 }
775
776 return res;
777}
778
779status_t HeicCompositeStream::startMuxerForInputFrame(nsecs_t timestamp, InputFrame &inputFrame) {
780 sp<ANativeWindow> outputANW = mOutputSurface;
781 if (inputFrame.codecOutputBuffers.size() == 0) {
782 // No single codec output buffer has been generated. Continue to
783 // wait.
784 return OK;
785 }
786
787 auto res = outputANW->dequeueBuffer(mOutputSurface.get(), &inputFrame.anb, &inputFrame.fenceFd);
788 if (res != OK) {
789 ALOGE("%s: Error retrieving output buffer: %s (%d)", __FUNCTION__, strerror(-res),
790 res);
791 return res;
792 }
793
794 // Combine current thread id, stream id and timestamp to uniquely identify image.
795 std::ostringstream tempOutputFile;
796 tempOutputFile << "HEIF-" << pthread_self() << "-"
797 << getStreamId() << "-" << timestamp;
798 inputFrame.fileFd = syscall(__NR_memfd_create, tempOutputFile.str().c_str(), MFD_CLOEXEC);
799 if (inputFrame.fileFd < 0) {
800 ALOGE("%s: Failed to create file %s. Error no is %d", __FUNCTION__,
801 tempOutputFile.str().c_str(), errno);
802 return NO_INIT;
803 }
804 inputFrame.muxer = new MediaMuxer(inputFrame.fileFd, MediaMuxer::OUTPUT_FORMAT_HEIF);
805 if (inputFrame.muxer == nullptr) {
806 ALOGE("%s: Failed to create MediaMuxer for file fd %d",
807 __FUNCTION__, inputFrame.fileFd);
808 return NO_INIT;
809 }
810
811 res = inputFrame.muxer->setOrientationHint(inputFrame.orientation);
812 if (res != OK) {
813 ALOGE("%s: Failed to setOrientationHint: %s (%d)", __FUNCTION__,
814 strerror(-res), res);
815 return res;
816 }
817 // Set encoder quality
818 {
819 sp<AMessage> qualityParams = new AMessage;
820 qualityParams->setInt32(PARAMETER_KEY_VIDEO_BITRATE, inputFrame.quality);
821 res = mCodec->setParameters(qualityParams);
822 if (res != OK) {
823 ALOGE("%s: Failed to set codec quality: %s (%d)",
824 __FUNCTION__, strerror(-res), res);
825 return res;
826 }
827 }
828
829 ssize_t trackId = inputFrame.muxer->addTrack(mFormat);
830 if (trackId < 0) {
831 ALOGE("%s: Failed to addTrack to the muxer: %zd", __FUNCTION__, trackId);
832 return NO_INIT;
833 }
834
835 inputFrame.trackIndex = trackId;
836 inputFrame.pendingOutputTiles = mNumOutputTiles;
837
838 res = inputFrame.muxer->start();
839 if (res != OK) {
840 ALOGE("%s: Failed to start MediaMuxer: %s (%d)",
841 __FUNCTION__, strerror(-res), res);
842 return res;
843 }
844
845 return OK;
846}
847
848status_t HeicCompositeStream::processAppSegment(nsecs_t timestamp, InputFrame &inputFrame) {
849 size_t app1Size = 0;
850 auto appSegmentSize = findAppSegmentsSize(inputFrame.appSegmentBuffer.data,
851 inputFrame.appSegmentBuffer.width * inputFrame.appSegmentBuffer.height,
852 &app1Size);
853 ALOGV("%s: appSegmentSize is %zu, width %d, height %d, app1Size %zu", __FUNCTION__,
854 appSegmentSize, inputFrame.appSegmentBuffer.width,
855 inputFrame.appSegmentBuffer.height, app1Size);
856 if (appSegmentSize == 0) {
857 ALOGE("%s: Failed to find JPEG APP segment size", __FUNCTION__);
858 return NO_INIT;
859 }
860
861 std::unique_ptr<ExifUtils> exifUtils(ExifUtils::create());
862 auto exifRes = exifUtils->initialize(inputFrame.appSegmentBuffer.data, app1Size);
863 if (!exifRes) {
864 ALOGE("%s: Failed to initialize ExifUtils object!", __FUNCTION__);
865 return BAD_VALUE;
866 }
Shuzhen Wange7f4b462019-02-12 08:43:07 -0800867 exifRes = exifUtils->setFromMetadata(*inputFrame.result, mStaticInfo,
868 mOutputWidth, mOutputHeight);
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800869 if (!exifRes) {
870 ALOGE("%s: Failed to set Exif tags using metadata and main image sizes", __FUNCTION__);
871 return BAD_VALUE;
872 }
873 exifRes = exifUtils->setOrientation(inputFrame.orientation);
874 if (!exifRes) {
875 ALOGE("%s: ExifUtils failed to set orientation", __FUNCTION__);
876 return BAD_VALUE;
877 }
878 exifRes = exifUtils->generateApp1();
879 if (!exifRes) {
880 ALOGE("%s: ExifUtils failed to generate APP1 segment", __FUNCTION__);
881 return BAD_VALUE;
882 }
883
884 unsigned int newApp1Length = exifUtils->getApp1Length();
885 const uint8_t *newApp1Segment = exifUtils->getApp1Buffer();
886
887 //Assemble the APP1 marker buffer required by MediaCodec
888 uint8_t kExifApp1Marker[] = {'E', 'x', 'i', 'f', 0xFF, 0xE1, 0x00, 0x00};
889 kExifApp1Marker[6] = static_cast<uint8_t>(newApp1Length >> 8);
890 kExifApp1Marker[7] = static_cast<uint8_t>(newApp1Length & 0xFF);
891 size_t appSegmentBufferSize = sizeof(kExifApp1Marker) +
892 appSegmentSize - app1Size + newApp1Length;
893 uint8_t* appSegmentBuffer = new uint8_t[appSegmentBufferSize];
894 memcpy(appSegmentBuffer, kExifApp1Marker, sizeof(kExifApp1Marker));
895 memcpy(appSegmentBuffer + sizeof(kExifApp1Marker), newApp1Segment, newApp1Length);
896 if (appSegmentSize - app1Size > 0) {
897 memcpy(appSegmentBuffer + sizeof(kExifApp1Marker) + newApp1Length,
898 inputFrame.appSegmentBuffer.data + app1Size, appSegmentSize - app1Size);
899 }
900
901 sp<ABuffer> aBuffer = new ABuffer(appSegmentBuffer, appSegmentBufferSize);
902 auto res = inputFrame.muxer->writeSampleData(aBuffer, inputFrame.trackIndex,
903 timestamp, MediaCodec::BUFFER_FLAG_MUXER_DATA);
904 delete[] appSegmentBuffer;
905
906 if (res != OK) {
907 ALOGE("%s: Failed to write JPEG APP segments to muxer: %s (%d)",
908 __FUNCTION__, strerror(-res), res);
909 return res;
910 }
911 inputFrame.appSegmentWritten = true;
912
913 return OK;
914}
915
916status_t HeicCompositeStream::processCodecInputFrame(InputFrame &inputFrame) {
917 for (auto& inputBuffer : inputFrame.codecInputBuffers) {
918 sp<MediaCodecBuffer> buffer;
919 auto res = mCodec->getInputBuffer(inputBuffer.index, &buffer);
920 if (res != OK) {
921 ALOGE("%s: Error getting codec input buffer: %s (%d)", __FUNCTION__,
922 strerror(-res), res);
923 return res;
924 }
925
926 // Copy one tile from source to destination.
927 size_t tileX = inputBuffer.tileIndex % mGridCols;
928 size_t tileY = inputBuffer.tileIndex / mGridCols;
929 size_t top = mGridHeight * tileY;
930 size_t left = mGridWidth * tileX;
931 size_t width = (tileX == static_cast<size_t>(mGridCols) - 1) ?
932 mOutputWidth - tileX * mGridWidth : mGridWidth;
933 size_t height = (tileY == static_cast<size_t>(mGridRows) - 1) ?
934 mOutputHeight - tileY * mGridHeight : mGridHeight;
935 ALOGV("%s: inputBuffer tileIndex [%zu, %zu], top %zu, left %zu, width %zu, height %zu",
936 __FUNCTION__, tileX, tileY, top, left, width, height);
937
938 res = copyOneYuvTile(buffer, inputFrame.yuvBuffer, top, left, width, height);
939 if (res != OK) {
940 ALOGE("%s: Failed to copy YUV tile %s (%d)", __FUNCTION__,
941 strerror(-res), res);
942 return res;
943 }
944
945 res = mCodec->queueInputBuffer(inputBuffer.index, 0, buffer->capacity(),
946 inputBuffer.timeUs, 0, nullptr /*errorDetailMsg*/);
947 if (res != OK) {
948 ALOGE("%s: Failed to queueInputBuffer to Codec: %s (%d)",
949 __FUNCTION__, strerror(-res), res);
950 return res;
951 }
952 }
953
954 inputFrame.codecInputBuffers.clear();
955 return OK;
956}
957
958status_t HeicCompositeStream::processOneCodecOutputFrame(nsecs_t timestamp,
959 InputFrame &inputFrame) {
960 auto it = inputFrame.codecOutputBuffers.begin();
961 sp<MediaCodecBuffer> buffer;
962 status_t res = mCodec->getOutputBuffer(it->index, &buffer);
963 if (res != OK) {
964 ALOGE("%s: Error getting Heic codec output buffer at index %d: %s (%d)",
965 __FUNCTION__, it->index, strerror(-res), res);
966 return res;
967 }
968 if (buffer == nullptr) {
969 ALOGE("%s: Invalid Heic codec output buffer at index %d",
970 __FUNCTION__, it->index);
971 return BAD_VALUE;
972 }
973
974 sp<ABuffer> aBuffer = new ABuffer(buffer->data(), buffer->size());
975 res = inputFrame.muxer->writeSampleData(
976 aBuffer, inputFrame.trackIndex, timestamp, 0 /*flags*/);
977 if (res != OK) {
978 ALOGE("%s: Failed to write buffer index %d to muxer: %s (%d)",
979 __FUNCTION__, it->index, strerror(-res), res);
980 return res;
981 }
982
983 mCodec->releaseOutputBuffer(it->index);
984 if (inputFrame.pendingOutputTiles == 0) {
985 ALOGW("%s: Codec generated more tiles than expected!", __FUNCTION__);
986 } else {
987 inputFrame.pendingOutputTiles--;
988 }
989
990 inputFrame.codecOutputBuffers.erase(inputFrame.codecOutputBuffers.begin());
991 return OK;
992}
993
994status_t HeicCompositeStream::processCompletedInputFrame(nsecs_t timestamp,
995 InputFrame &inputFrame) {
996 sp<ANativeWindow> outputANW = mOutputSurface;
997 inputFrame.muxer->stop();
998
999 // Copy the content of the file to memory.
1000 sp<GraphicBuffer> gb = GraphicBuffer::from(inputFrame.anb);
1001 void* dstBuffer;
1002 auto res = gb->lockAsync(GRALLOC_USAGE_SW_WRITE_OFTEN, &dstBuffer, inputFrame.fenceFd);
1003 if (res != OK) {
1004 ALOGE("%s: Error trying to lock output buffer fence: %s (%d)", __FUNCTION__,
1005 strerror(-res), res);
1006 return res;
1007 }
1008
1009 off_t fSize = lseek(inputFrame.fileFd, 0, SEEK_END);
1010 if (static_cast<size_t>(fSize) > mMaxHeicBufferSize - sizeof(CameraBlob)) {
1011 ALOGE("%s: Error: MediaMuxer output size %ld is larger than buffer sizer %zu",
1012 __FUNCTION__, fSize, mMaxHeicBufferSize - sizeof(CameraBlob));
1013 return BAD_VALUE;
1014 }
1015
1016 lseek(inputFrame.fileFd, 0, SEEK_SET);
1017 ssize_t bytesRead = read(inputFrame.fileFd, dstBuffer, fSize);
1018 if (bytesRead < fSize) {
1019 ALOGE("%s: Only %zd of %ld bytes read", __FUNCTION__, bytesRead, fSize);
1020 return BAD_VALUE;
1021 }
1022
1023 close(inputFrame.fileFd);
1024 inputFrame.fileFd = -1;
1025
1026 // Fill in HEIC header
1027 uint8_t *header = static_cast<uint8_t*>(dstBuffer) + mMaxHeicBufferSize - sizeof(CameraBlob);
1028 struct CameraBlob *blobHeader = (struct CameraBlob *)header;
1029 // Must be in sync with CAMERA3_HEIC_BLOB_ID in android_media_Utils.cpp
1030 blobHeader->blobId = static_cast<CameraBlobId>(0x00FE);
1031 blobHeader->blobSize = fSize;
1032
1033 res = native_window_set_buffers_timestamp(mOutputSurface.get(), timestamp);
1034 if (res != OK) {
1035 ALOGE("%s: Stream %d: Error setting timestamp: %s (%d)",
1036 __FUNCTION__, getStreamId(), strerror(-res), res);
1037 return res;
1038 }
1039
1040 res = outputANW->queueBuffer(mOutputSurface.get(), inputFrame.anb, /*fence*/ -1);
1041 if (res != OK) {
1042 ALOGE("%s: Failed to queueBuffer to Heic stream: %s (%d)", __FUNCTION__,
1043 strerror(-res), res);
1044 return res;
1045 }
1046 inputFrame.anb = nullptr;
1047
Shuzhen Wange7f4b462019-02-12 08:43:07 -08001048 ATRACE_ASYNC_END("HEIC capture", inputFrame.frameNumber);
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -08001049 return OK;
1050}
1051
1052
1053void HeicCompositeStream::releaseInputFrameLocked(InputFrame *inputFrame /*out*/) {
1054 if (inputFrame == nullptr) {
1055 return;
1056 }
1057
1058 if (inputFrame->appSegmentBuffer.data != nullptr) {
1059 mAppSegmentConsumer->unlockBuffer(inputFrame->appSegmentBuffer);
1060 inputFrame->appSegmentBuffer.data = nullptr;
1061 mAppSegmentBufferAcquired = false;
1062 }
1063
1064 while (!inputFrame->codecOutputBuffers.empty()) {
1065 auto it = inputFrame->codecOutputBuffers.begin();
1066 ALOGV("%s: releaseOutputBuffer index %d", __FUNCTION__, it->index);
1067 mCodec->releaseOutputBuffer(it->index);
1068 inputFrame->codecOutputBuffers.erase(it);
1069 }
1070
1071 if (inputFrame->yuvBuffer.data != nullptr) {
1072 mMainImageConsumer->unlockBuffer(inputFrame->yuvBuffer);
1073 inputFrame->yuvBuffer.data = nullptr;
1074 mYuvBufferAcquired = false;
1075 }
1076
1077 while (!inputFrame->codecInputBuffers.empty()) {
1078 auto it = inputFrame->codecInputBuffers.begin();
1079 inputFrame->codecInputBuffers.erase(it);
1080 }
1081
1082 if ((inputFrame->error || mErrorState) && !inputFrame->errorNotified) {
1083 notifyError(inputFrame->frameNumber);
1084 inputFrame->errorNotified = true;
1085 }
1086
1087 if (inputFrame->fileFd >= 0) {
1088 close(inputFrame->fileFd);
1089 inputFrame->fileFd = -1;
1090 }
1091
1092 if (inputFrame->anb != nullptr) {
1093 sp<ANativeWindow> outputANW = mOutputSurface;
1094 outputANW->cancelBuffer(mOutputSurface.get(), inputFrame->anb, /*fence*/ -1);
1095 inputFrame->anb = nullptr;
1096 }
1097}
1098
1099void HeicCompositeStream::releaseInputFramesLocked(int64_t currentTs) {
1100 auto it = mPendingInputFrames.begin();
1101 while (it != mPendingInputFrames.end()) {
1102 if (it->first <= currentTs) {
1103 releaseInputFrameLocked(&it->second);
1104 it = mPendingInputFrames.erase(it);
1105 } else {
1106 it++;
1107 }
1108 }
1109}
1110
1111status_t HeicCompositeStream::initializeCodec(uint32_t width, uint32_t height,
1112 const sp<CameraDeviceBase>& cameraDevice) {
1113 ALOGV("%s", __FUNCTION__);
1114
1115 bool useGrid = false;
1116 bool isSizeSupported = isSizeSupportedByHeifEncoder(width, height,
1117 &mUseHeic, &useGrid, nullptr);
1118 if (!isSizeSupported) {
1119 ALOGE("%s: Encoder doesnt' support size %u x %u!",
1120 __FUNCTION__, width, height);
1121 return BAD_VALUE;
1122 }
1123
1124 // Create Looper for MediaCodec.
1125 auto desiredMime = mUseHeic ? MIMETYPE_IMAGE_ANDROID_HEIC : MIMETYPE_VIDEO_HEVC;
1126 mCodecLooper = new ALooper;
1127 mCodecLooper->setName("Camera3-HeicComposite-MediaCodecLooper");
1128 status_t res = mCodecLooper->start(
1129 false, // runOnCallingThread
1130 false, // canCallJava
1131 PRIORITY_AUDIO);
1132 if (res != OK) {
1133 ALOGE("%s: Failed to start codec looper: %s (%d)",
1134 __FUNCTION__, strerror(-res), res);
1135 return NO_INIT;
1136 }
1137
1138 // Create HEIC/HEVC codec.
1139 mCodec = MediaCodec::CreateByType(mCodecLooper, desiredMime, true /*encoder*/);
1140 if (mCodec == nullptr) {
1141 ALOGE("%s: Failed to create codec for %s", __FUNCTION__, desiredMime);
1142 return NO_INIT;
1143 }
1144
1145 // Create Looper and handler for Codec callback.
1146 mCodecCallbackHandler = new CodecCallbackHandler(this);
1147 if (mCodecCallbackHandler == nullptr) {
1148 ALOGE("%s: Failed to create codec callback handler", __FUNCTION__);
1149 return NO_MEMORY;
1150 }
1151 mCallbackLooper = new ALooper;
1152 mCallbackLooper->setName("Camera3-HeicComposite-MediaCodecCallbackLooper");
1153 res = mCallbackLooper->start(
1154 false, // runOnCallingThread
1155 false, // canCallJava
1156 PRIORITY_AUDIO);
1157 if (res != OK) {
1158 ALOGE("%s: Failed to start media callback looper: %s (%d)",
1159 __FUNCTION__, strerror(-res), res);
1160 return NO_INIT;
1161 }
1162 mCallbackLooper->registerHandler(mCodecCallbackHandler);
1163
1164 mAsyncNotify = new AMessage(kWhatCallbackNotify, mCodecCallbackHandler);
1165 res = mCodec->setCallback(mAsyncNotify);
1166 if (res != OK) {
1167 ALOGE("%s: Failed to set MediaCodec callback: %s (%d)", __FUNCTION__,
1168 strerror(-res), res);
1169 return res;
1170 }
1171
1172 // Create output format and configure the Codec.
1173 sp<AMessage> outputFormat = new AMessage();
1174 outputFormat->setString(KEY_MIME, desiredMime);
1175 outputFormat->setInt32(KEY_BITRATE_MODE, BITRATE_MODE_CQ);
1176 outputFormat->setInt32(KEY_QUALITY, kDefaultJpegQuality);
1177 // Ask codec to skip timestamp check and encode all frames.
1178 outputFormat->setInt64("max-pts-gap-to-encoder", kNoFrameDropMaxPtsGap);
1179
1180 int32_t gridWidth, gridHeight, gridRows, gridCols;
1181 if (useGrid || mUseHeic) {
1182 gridWidth = HeicEncoderInfoManager::kGridWidth;
1183 gridHeight = HeicEncoderInfoManager::kGridHeight;
1184 gridRows = (height + gridHeight - 1)/gridHeight;
1185 gridCols = (width + gridWidth - 1)/gridWidth;
1186
1187 if (mUseHeic) {
1188 outputFormat->setInt32(KEY_TILE_WIDTH, gridWidth);
1189 outputFormat->setInt32(KEY_TILE_HEIGHT, gridHeight);
1190 outputFormat->setInt32(KEY_GRID_COLUMNS, gridCols);
1191 outputFormat->setInt32(KEY_GRID_ROWS, gridRows);
1192 }
1193
1194 } else {
1195 gridWidth = width;
1196 gridHeight = height;
1197 gridRows = 1;
1198 gridCols = 1;
1199 }
1200
1201 outputFormat->setInt32(KEY_WIDTH, !useGrid ? width : gridWidth);
1202 outputFormat->setInt32(KEY_HEIGHT, !useGrid ? height : gridHeight);
1203 outputFormat->setInt32(KEY_I_FRAME_INTERVAL, 0);
1204 outputFormat->setInt32(KEY_COLOR_FORMAT,
1205 useGrid ? COLOR_FormatYUV420Flexible : COLOR_FormatSurface);
1206 outputFormat->setInt32(KEY_FRAME_RATE, gridRows * gridCols);
1207 // This only serves as a hint to encoder when encoding is not real-time.
1208 outputFormat->setInt32(KEY_OPERATING_RATE, useGrid ? kGridOpRate : kNoGridOpRate);
1209
1210 res = mCodec->configure(outputFormat, nullptr /*nativeWindow*/,
1211 nullptr /*crypto*/, CONFIGURE_FLAG_ENCODE);
1212 if (res != OK) {
1213 ALOGE("%s: Failed to configure codec: %s (%d)", __FUNCTION__,
1214 strerror(-res), res);
1215 return res;
1216 }
1217
1218 mGridWidth = gridWidth;
1219 mGridHeight = gridHeight;
1220 mGridRows = gridRows;
1221 mGridCols = gridCols;
1222 mUseGrid = useGrid;
1223 mOutputWidth = width;
1224 mOutputHeight = height;
1225 mAppSegmentMaxSize = calcAppSegmentMaxSize(cameraDevice->info());
1226 mMaxHeicBufferSize = mOutputWidth * mOutputHeight * 3 / 2 + mAppSegmentMaxSize;
1227
1228 return OK;
1229}
1230
1231void HeicCompositeStream::deinitCodec() {
1232 ALOGV("%s", __FUNCTION__);
1233 if (mCodec != nullptr) {
1234 mCodec->stop();
1235 mCodec->release();
1236 mCodec.clear();
1237 }
1238
1239 if (mCodecLooper != nullptr) {
1240 mCodecLooper->stop();
1241 mCodecLooper.clear();
1242 }
1243
1244 if (mCallbackLooper != nullptr) {
1245 mCallbackLooper->stop();
1246 mCallbackLooper.clear();
1247 }
1248
1249 mAsyncNotify.clear();
1250 mFormat.clear();
1251}
1252
1253// Return the size of the complete list of app segment, 0 indicates failure
1254size_t HeicCompositeStream::findAppSegmentsSize(const uint8_t* appSegmentBuffer,
1255 size_t maxSize, size_t *app1SegmentSize) {
1256 if (appSegmentBuffer == nullptr || app1SegmentSize == nullptr) {
1257 ALOGE("%s: Invalid input appSegmentBuffer %p, app1SegmentSize %p",
1258 __FUNCTION__, appSegmentBuffer, app1SegmentSize);
1259 return 0;
1260 }
1261
1262 size_t expectedSize = 0;
1263 // First check for EXIF transport header at the end of the buffer
1264 const uint8_t *header = appSegmentBuffer + (maxSize - sizeof(struct CameraBlob));
1265 const struct CameraBlob *blob = (const struct CameraBlob*)(header);
1266 if (blob->blobId != CameraBlobId::JPEG_APP_SEGMENTS) {
1267 ALOGE("%s: Invalid EXIF blobId %hu", __FUNCTION__, blob->blobId);
1268 return 0;
1269 }
1270
1271 expectedSize = blob->blobSize;
1272 if (expectedSize == 0 || expectedSize > maxSize - sizeof(struct CameraBlob)) {
1273 ALOGE("%s: Invalid blobSize %zu.", __FUNCTION__, expectedSize);
1274 return 0;
1275 }
1276
1277 uint32_t totalSize = 0;
1278
1279 // Verify APP1 marker (mandatory)
1280 uint8_t app1Marker[] = {0xFF, 0xE1};
1281 if (memcmp(appSegmentBuffer, app1Marker, sizeof(app1Marker))) {
1282 ALOGE("%s: Invalid APP1 marker: %x, %x", __FUNCTION__,
1283 appSegmentBuffer[0], appSegmentBuffer[1]);
1284 return 0;
1285 }
1286 totalSize += sizeof(app1Marker);
1287
1288 uint16_t app1Size = (static_cast<uint16_t>(appSegmentBuffer[totalSize]) << 8) +
1289 appSegmentBuffer[totalSize+1];
1290 totalSize += app1Size;
1291
1292 ALOGV("%s: Expected APP segments size %zu, APP1 segment size %u",
1293 __FUNCTION__, expectedSize, app1Size);
1294 while (totalSize < expectedSize) {
1295 if (appSegmentBuffer[totalSize] != 0xFF ||
1296 appSegmentBuffer[totalSize+1] <= 0xE1 ||
1297 appSegmentBuffer[totalSize+1] > 0xEF) {
1298 // Invalid APPn marker
1299 ALOGE("%s: Invalid APPn marker: %x, %x", __FUNCTION__,
1300 appSegmentBuffer[totalSize], appSegmentBuffer[totalSize+1]);
1301 return 0;
1302 }
1303 totalSize += 2;
1304
1305 uint16_t appnSize = (static_cast<uint16_t>(appSegmentBuffer[totalSize]) << 8) +
1306 appSegmentBuffer[totalSize+1];
1307 totalSize += appnSize;
1308 }
1309
1310 if (totalSize != expectedSize) {
1311 ALOGE("%s: Invalid JPEG APP segments: totalSize %u vs expected size %zu",
1312 __FUNCTION__, totalSize, expectedSize);
1313 return 0;
1314 }
1315
1316 *app1SegmentSize = app1Size + sizeof(app1Marker);
1317 return expectedSize;
1318}
1319
1320int64_t HeicCompositeStream::findTimestampInNsLocked(int64_t timeInUs) {
1321 for (const auto& fn : mFrameNumberMap) {
1322 if (timeInUs == ns2us(fn.second)) {
1323 return fn.second;
1324 }
1325 }
1326 for (const auto& inputFrame : mPendingInputFrames) {
1327 if (timeInUs == ns2us(inputFrame.first)) {
1328 return inputFrame.first;
1329 }
1330 }
1331 return -1;
1332}
1333
1334status_t HeicCompositeStream::copyOneYuvTile(sp<MediaCodecBuffer>& codecBuffer,
1335 const CpuConsumer::LockedBuffer& yuvBuffer,
1336 size_t top, size_t left, size_t width, size_t height) {
1337 ATRACE_CALL();
1338
1339 // Get stride information for codecBuffer
1340 sp<ABuffer> imageData;
1341 if (!codecBuffer->meta()->findBuffer("image-data", &imageData)) {
1342 ALOGE("%s: Codec input buffer is not for image data!", __FUNCTION__);
1343 return BAD_VALUE;
1344 }
1345 if (imageData->size() != sizeof(MediaImage2)) {
1346 ALOGE("%s: Invalid codec input image size %zu, expected %zu",
1347 __FUNCTION__, imageData->size(), sizeof(MediaImage2));
1348 return BAD_VALUE;
1349 }
1350 MediaImage2* imageInfo = reinterpret_cast<MediaImage2*>(imageData->data());
1351 if (imageInfo->mType != MediaImage2::MEDIA_IMAGE_TYPE_YUV ||
1352 imageInfo->mBitDepth != 8 ||
1353 imageInfo->mBitDepthAllocated != 8 ||
1354 imageInfo->mNumPlanes != 3) {
1355 ALOGE("%s: Invalid codec input image info: mType %d, mBitDepth %d, "
1356 "mBitDepthAllocated %d, mNumPlanes %d!", __FUNCTION__,
1357 imageInfo->mType, imageInfo->mBitDepth,
1358 imageInfo->mBitDepthAllocated, imageInfo->mNumPlanes);
1359 return BAD_VALUE;
1360 }
1361
1362 ALOGV("%s: yuvBuffer chromaStep %d, chromaStride %d",
1363 __FUNCTION__, yuvBuffer.chromaStep, yuvBuffer.chromaStride);
1364 ALOGV("%s: U offset %u, V offset %u, U rowInc %d, V rowInc %d, U colInc %d, V colInc %d",
1365 __FUNCTION__, imageInfo->mPlane[MediaImage2::U].mOffset,
1366 imageInfo->mPlane[MediaImage2::V].mOffset,
1367 imageInfo->mPlane[MediaImage2::U].mRowInc,
1368 imageInfo->mPlane[MediaImage2::V].mRowInc,
1369 imageInfo->mPlane[MediaImage2::U].mColInc,
1370 imageInfo->mPlane[MediaImage2::V].mColInc);
1371
1372 // Y
1373 for (auto row = top; row < top+height; row++) {
1374 uint8_t *dst = codecBuffer->data() + imageInfo->mPlane[MediaImage2::Y].mOffset +
1375 imageInfo->mPlane[MediaImage2::Y].mRowInc * (row - top);
1376 memcpy(dst, yuvBuffer.data+row*yuvBuffer.stride+left, width);
1377 }
1378
1379 // U is Cb, V is Cr
1380 bool codecUPlaneFirst = imageInfo->mPlane[MediaImage2::V].mOffset >
1381 imageInfo->mPlane[MediaImage2::U].mOffset;
1382 uint32_t codecUvOffsetDiff = codecUPlaneFirst ?
1383 imageInfo->mPlane[MediaImage2::V].mOffset - imageInfo->mPlane[MediaImage2::U].mOffset :
1384 imageInfo->mPlane[MediaImage2::U].mOffset - imageInfo->mPlane[MediaImage2::V].mOffset;
1385 bool isCodecUvSemiplannar = (codecUvOffsetDiff == 1) &&
1386 (imageInfo->mPlane[MediaImage2::U].mRowInc ==
1387 imageInfo->mPlane[MediaImage2::V].mRowInc) &&
1388 (imageInfo->mPlane[MediaImage2::U].mColInc == 2) &&
1389 (imageInfo->mPlane[MediaImage2::V].mColInc == 2);
1390 bool isCodecUvPlannar =
1391 ((codecUPlaneFirst && codecUvOffsetDiff >=
1392 imageInfo->mPlane[MediaImage2::U].mRowInc * imageInfo->mHeight/2) ||
1393 ((!codecUPlaneFirst && codecUvOffsetDiff >=
1394 imageInfo->mPlane[MediaImage2::V].mRowInc * imageInfo->mHeight/2))) &&
1395 imageInfo->mPlane[MediaImage2::U].mColInc == 1 &&
1396 imageInfo->mPlane[MediaImage2::V].mColInc == 1;
1397 bool cameraUPlaneFirst = yuvBuffer.dataCr > yuvBuffer.dataCb;
1398
1399 if (isCodecUvSemiplannar && yuvBuffer.chromaStep == 2 &&
1400 (codecUPlaneFirst == cameraUPlaneFirst)) {
1401 // UV semiplannar
1402 // The chrome plane could be either Cb first, or Cr first. Take the
1403 // smaller address.
1404 uint8_t *src = std::min(yuvBuffer.dataCb, yuvBuffer.dataCr);
1405 MediaImage2::PlaneIndex dstPlane = codecUvOffsetDiff > 0 ? MediaImage2::U : MediaImage2::V;
1406 for (auto row = top/2; row < (top+height)/2; row++) {
1407 uint8_t *dst = codecBuffer->data() + imageInfo->mPlane[dstPlane].mOffset +
1408 imageInfo->mPlane[dstPlane].mRowInc * (row - top/2);
1409 memcpy(dst, src+row*yuvBuffer.chromaStride+left, width);
1410 }
1411 } else if (isCodecUvPlannar && yuvBuffer.chromaStep == 1) {
1412 // U plane
1413 for (auto row = top/2; row < (top+height)/2; row++) {
1414 uint8_t *dst = codecBuffer->data() + imageInfo->mPlane[MediaImage2::U].mOffset +
1415 imageInfo->mPlane[MediaImage2::U].mRowInc * (row - top/2);
1416 memcpy(dst, yuvBuffer.dataCb+row*yuvBuffer.chromaStride+left/2, width/2);
1417 }
1418
1419 // V plane
1420 for (auto row = top/2; row < (top+height)/2; row++) {
1421 uint8_t *dst = codecBuffer->data() + imageInfo->mPlane[MediaImage2::V].mOffset +
1422 imageInfo->mPlane[MediaImage2::V].mRowInc * (row - top/2);
1423 memcpy(dst, yuvBuffer.dataCr+row*yuvBuffer.chromaStride+left/2, width/2);
1424 }
1425 } else {
1426 // Convert between semiplannar and plannar
1427 uint8_t *dst = codecBuffer->data();
1428 for (auto row = top/2; row < (top+height)/2; row++) {
1429 for (auto col = left/2; col < (left+width)/2; col++) {
1430 // U/Cb
1431 int32_t dstIndex = imageInfo->mPlane[MediaImage2::U].mOffset +
1432 imageInfo->mPlane[MediaImage2::U].mRowInc * (row - top/2) +
1433 imageInfo->mPlane[MediaImage2::U].mColInc * (col - left/2);
1434 int32_t srcIndex = row * yuvBuffer.chromaStride + yuvBuffer.chromaStep * col;
1435 dst[dstIndex] = yuvBuffer.dataCb[srcIndex];
1436
1437 // V/Cr
1438 dstIndex = imageInfo->mPlane[MediaImage2::V].mOffset +
1439 imageInfo->mPlane[MediaImage2::V].mRowInc * (row - top/2) +
1440 imageInfo->mPlane[MediaImage2::V].mColInc * (col - left/2);
1441 srcIndex = row * yuvBuffer.chromaStride + yuvBuffer.chromaStep * col;
1442 dst[dstIndex] = yuvBuffer.dataCr[srcIndex];
1443 }
1444 }
1445 }
1446 return OK;
1447}
1448
1449size_t HeicCompositeStream::calcAppSegmentMaxSize(const CameraMetadata& info) {
1450 camera_metadata_ro_entry_t entry = info.find(ANDROID_HEIC_INFO_MAX_JPEG_APP_SEGMENTS_COUNT);
1451 size_t maxAppsSegment = 1;
1452 if (entry.count > 0) {
1453 maxAppsSegment = entry.data.u8[0] < 1 ? 1 :
1454 entry.data.u8[0] > 16 ? 16 : entry.data.u8[0];
1455 }
1456 return maxAppsSegment * (2 + 0xFFFF) + sizeof(struct CameraBlob);
1457}
1458
1459bool HeicCompositeStream::threadLoop() {
1460 int64_t currentTs = INT64_MAX;
1461 bool newInputAvailable = false;
1462
1463 {
1464 Mutex::Autolock l(mMutex);
1465 if (mErrorState) {
1466 // In case we landed in error state, return any pending buffers and
1467 // halt all further processing.
1468 compilePendingInputLocked();
1469 releaseInputFramesLocked(currentTs);
1470 return false;
1471 }
1472
1473
1474 while (!newInputAvailable) {
1475 compilePendingInputLocked();
1476 newInputAvailable = getNextReadyInputLocked(&currentTs);
1477
1478 if (!newInputAvailable) {
1479 auto failingFrameNumber = getNextFailingInputLocked(&currentTs);
1480 if (failingFrameNumber >= 0) {
1481 // We cannot erase 'mPendingInputFrames[currentTs]' at this point because it is
1482 // possible for two internal stream buffers to fail. In such scenario the
1483 // composite stream should notify the client about a stream buffer error only
1484 // once and this information is kept within 'errorNotified'.
1485 // Any present failed input frames will be removed on a subsequent call to
1486 // 'releaseInputFramesLocked()'.
1487 releaseInputFrameLocked(&mPendingInputFrames[currentTs]);
1488 currentTs = INT64_MAX;
1489 }
1490
1491 auto ret = mInputReadyCondition.waitRelative(mMutex, kWaitDuration);
1492 if (ret == TIMED_OUT) {
1493 return true;
1494 } else if (ret != OK) {
1495 ALOGE("%s: Timed wait on condition failed: %s (%d)", __FUNCTION__,
1496 strerror(-ret), ret);
1497 return false;
1498 }
1499 }
1500 }
1501 }
1502
1503 auto res = processInputFrame(currentTs, mPendingInputFrames[currentTs]);
1504 Mutex::Autolock l(mMutex);
1505 if (res != OK) {
1506 ALOGE("%s: Failed processing frame with timestamp: %" PRIu64 ": %s (%d)",
1507 __FUNCTION__, currentTs, strerror(-res), res);
1508 mPendingInputFrames[currentTs].error = true;
1509 }
1510
1511 if (mPendingInputFrames[currentTs].error ||
1512 (mPendingInputFrames[currentTs].appSegmentWritten &&
1513 mPendingInputFrames[currentTs].pendingOutputTiles == 0)) {
1514 releaseInputFramesLocked(currentTs);
1515 }
1516
1517 return true;
1518}
1519
1520bool HeicCompositeStream::onStreamBufferError(const CaptureResultExtras& resultExtras) {
1521 bool res = false;
1522 // Buffer errors concerning internal composite streams should not be directly visible to
1523 // camera clients. They must only receive a single buffer error with the public composite
1524 // stream id.
1525 if ((resultExtras.errorStreamId == mAppSegmentStreamId) ||
1526 (resultExtras.errorStreamId == mMainImageStreamId)) {
1527 flagAnErrorFrameNumber(resultExtras.frameNumber);
1528 res = true;
1529 }
1530
1531 return res;
1532}
1533
Shuzhen Wange7f4b462019-02-12 08:43:07 -08001534void HeicCompositeStream::onResultError(const CaptureResultExtras& resultExtras) {
1535 // For result error, since the APPS_SEGMENT buffer already contains EXIF,
1536 // simply skip using the capture result metadata to override EXIF.
1537 Mutex::Autolock l(mMutex);
1538
1539 int64_t timestamp = -1;
1540 for (const auto& fn : mFrameNumberMap) {
1541 if (fn.first == resultExtras.frameNumber) {
1542 timestamp = fn.second;
1543 break;
1544 }
1545 }
1546 if (timestamp == -1) {
1547 for (const auto& inputFrame : mPendingInputFrames) {
1548 if (inputFrame.second.frameNumber == resultExtras.frameNumber) {
1549 timestamp = inputFrame.first;
1550 break;
1551 }
1552 }
1553 }
1554
1555 if (timestamp == -1) {
1556 ALOGE("%s: Failed to find shutter timestamp for result error!", __FUNCTION__);
1557 return;
1558 }
1559
1560 mCaptureResults.emplace(timestamp, std::make_tuple(resultExtras.frameNumber, CameraMetadata()));
1561 mInputReadyCondition.signal();
1562}
1563
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -08001564void HeicCompositeStream::CodecCallbackHandler::onMessageReceived(const sp<AMessage> &msg) {
1565 sp<HeicCompositeStream> parent = mParent.promote();
1566 if (parent == nullptr) return;
1567
1568 switch (msg->what()) {
1569 case kWhatCallbackNotify: {
1570 int32_t cbID;
1571 if (!msg->findInt32("callbackID", &cbID)) {
1572 ALOGE("kWhatCallbackNotify: callbackID is expected.");
1573 break;
1574 }
1575
1576 ALOGV("kWhatCallbackNotify: cbID = %d", cbID);
1577
1578 switch (cbID) {
1579 case MediaCodec::CB_INPUT_AVAILABLE: {
1580 int32_t index;
1581 if (!msg->findInt32("index", &index)) {
1582 ALOGE("CB_INPUT_AVAILABLE: index is expected.");
1583 break;
1584 }
1585 parent->onHeicInputFrameAvailable(index);
1586 break;
1587 }
1588
1589 case MediaCodec::CB_OUTPUT_AVAILABLE: {
1590 int32_t index;
1591 size_t offset;
1592 size_t size;
1593 int64_t timeUs;
1594 int32_t flags;
1595
1596 if (!msg->findInt32("index", &index)) {
1597 ALOGE("CB_OUTPUT_AVAILABLE: index is expected.");
1598 break;
1599 }
1600 if (!msg->findSize("offset", &offset)) {
1601 ALOGE("CB_OUTPUT_AVAILABLE: offset is expected.");
1602 break;
1603 }
1604 if (!msg->findSize("size", &size)) {
1605 ALOGE("CB_OUTPUT_AVAILABLE: size is expected.");
1606 break;
1607 }
1608 if (!msg->findInt64("timeUs", &timeUs)) {
1609 ALOGE("CB_OUTPUT_AVAILABLE: timeUs is expected.");
1610 break;
1611 }
1612 if (!msg->findInt32("flags", &flags)) {
1613 ALOGE("CB_OUTPUT_AVAILABLE: flags is expected.");
1614 break;
1615 }
1616
1617 CodecOutputBufferInfo bufferInfo = {
1618 index,
1619 (int32_t)offset,
1620 (int32_t)size,
1621 timeUs,
1622 (uint32_t)flags};
1623
1624 parent->onHeicOutputFrameAvailable(bufferInfo);
1625 break;
1626 }
1627
1628 case MediaCodec::CB_OUTPUT_FORMAT_CHANGED: {
1629 sp<AMessage> format;
1630 if (!msg->findMessage("format", &format)) {
1631 ALOGE("CB_OUTPUT_FORMAT_CHANGED: format is expected.");
1632 break;
1633 }
1634
1635 parent->onHeicFormatChanged(format);
1636 break;
1637 }
1638
1639 case MediaCodec::CB_ERROR: {
1640 status_t err;
1641 int32_t actionCode;
1642 AString detail;
1643 if (!msg->findInt32("err", &err)) {
1644 ALOGE("CB_ERROR: err is expected.");
1645 break;
1646 }
1647 if (!msg->findInt32("action", &actionCode)) {
1648 ALOGE("CB_ERROR: action is expected.");
1649 break;
1650 }
1651 msg->findString("detail", &detail);
1652 ALOGE("Codec reported error(0x%x), actionCode(%d), detail(%s)",
1653 err, actionCode, detail.c_str());
1654
1655 parent->onHeicCodecError();
1656 break;
1657 }
1658
1659 default: {
1660 ALOGE("kWhatCallbackNotify: callbackID(%d) is unexpected.", cbID);
1661 break;
1662 }
1663 }
1664 break;
1665 }
1666
1667 default:
1668 ALOGE("shouldn't be here");
1669 break;
1670 }
1671}
1672
1673}; // namespace camera3
1674}; // namespace android