blob: 7663bb91ee3dab2d0224f38c912ed2f96ceb8ad1 [file] [log] [blame]
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -08001/*
2 * Copyright (C) 2019 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#define LOG_TAG "Camera3-HeicCompositeStream"
18#define ATRACE_TAG ATRACE_TAG_CAMERA
19//#define LOG_NDEBUG 0
20
21#include <linux/memfd.h>
22#include <pthread.h>
23#include <sys/syscall.h>
24
25#include <android/hardware/camera/device/3.5/types.h>
Shuzhen Wang219c2992019-02-15 17:24:28 -080026#include <libyuv.h>
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -080027#include <gui/Surface.h>
28#include <utils/Log.h>
29#include <utils/Trace.h>
30
31#include <media/ICrypto.h>
32#include <media/MediaCodecBuffer.h>
33#include <media/stagefright/foundation/ABuffer.h>
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -080034#include <media/stagefright/foundation/MediaDefs.h>
35#include <media/stagefright/MediaCodecConstants.h>
36
37#include "common/CameraDeviceBase.h"
38#include "utils/ExifUtils.h"
39#include "HeicEncoderInfoManager.h"
40#include "HeicCompositeStream.h"
41
42using android::hardware::camera::device::V3_5::CameraBlob;
43using android::hardware::camera::device::V3_5::CameraBlobId;
44
45namespace android {
46namespace camera3 {
47
48HeicCompositeStream::HeicCompositeStream(wp<CameraDeviceBase> device,
49 wp<hardware::camera2::ICameraDeviceCallbacks> cb) :
50 CompositeStream(device, cb),
51 mUseHeic(false),
52 mNumOutputTiles(1),
53 mOutputWidth(0),
54 mOutputHeight(0),
55 mMaxHeicBufferSize(0),
56 mGridWidth(HeicEncoderInfoManager::kGridWidth),
57 mGridHeight(HeicEncoderInfoManager::kGridHeight),
58 mGridRows(1),
59 mGridCols(1),
60 mUseGrid(false),
61 mAppSegmentStreamId(-1),
62 mAppSegmentSurfaceId(-1),
63 mAppSegmentBufferAcquired(false),
64 mMainImageStreamId(-1),
65 mMainImageSurfaceId(-1),
66 mYuvBufferAcquired(false),
67 mProducerListener(new ProducerListener()),
Shuzhen Wang983f8142019-09-25 14:19:28 -070068 mDequeuedOutputBufferCnt(0),
69 mCodecOutputCounter(0),
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -080070 mGridTimestampUs(0) {
71}
72
73HeicCompositeStream::~HeicCompositeStream() {
74 // Call deinitCodec in case stream hasn't been deleted yet to avoid any
75 // memory/resource leak.
76 deinitCodec();
77
78 mInputAppSegmentBuffers.clear();
79 mCodecOutputBuffers.clear();
80
81 mAppSegmentStreamId = -1;
82 mAppSegmentSurfaceId = -1;
83 mAppSegmentConsumer.clear();
84 mAppSegmentSurface.clear();
85
86 mMainImageStreamId = -1;
87 mMainImageSurfaceId = -1;
88 mMainImageConsumer.clear();
89 mMainImageSurface.clear();
90}
91
92bool HeicCompositeStream::isHeicCompositeStream(const sp<Surface> &surface) {
93 ANativeWindow *anw = surface.get();
94 status_t err;
95 int format;
96 if ((err = anw->query(anw, NATIVE_WINDOW_FORMAT, &format)) != OK) {
97 String8 msg = String8::format("Failed to query Surface format: %s (%d)", strerror(-err),
98 err);
99 ALOGE("%s: %s", __FUNCTION__, msg.string());
100 return false;
101 }
102
103 int dataspace;
104 if ((err = anw->query(anw, NATIVE_WINDOW_DEFAULT_DATASPACE, &dataspace)) != OK) {
105 String8 msg = String8::format("Failed to query Surface dataspace: %s (%d)", strerror(-err),
106 err);
107 ALOGE("%s: %s", __FUNCTION__, msg.string());
108 return false;
109 }
110
111 return ((format == HAL_PIXEL_FORMAT_BLOB) && (dataspace == HAL_DATASPACE_HEIF));
112}
113
114status_t HeicCompositeStream::createInternalStreams(const std::vector<sp<Surface>>& consumers,
115 bool /*hasDeferredConsumer*/, uint32_t width, uint32_t height, int format,
116 camera3_stream_rotation_t rotation, int *id, const String8& physicalCameraId,
117 std::vector<int> *surfaceIds, int /*streamSetId*/, bool /*isShared*/) {
118
119 sp<CameraDeviceBase> device = mDevice.promote();
120 if (!device.get()) {
121 ALOGE("%s: Invalid camera device!", __FUNCTION__);
122 return NO_INIT;
123 }
124
125 status_t res = initializeCodec(width, height, device);
126 if (res != OK) {
127 ALOGE("%s: Failed to initialize HEIC/HEVC codec: %s (%d)",
128 __FUNCTION__, strerror(-res), res);
129 return NO_INIT;
130 }
131
132 sp<IGraphicBufferProducer> producer;
133 sp<IGraphicBufferConsumer> consumer;
134 BufferQueue::createBufferQueue(&producer, &consumer);
135 mAppSegmentConsumer = new CpuConsumer(consumer, 1);
136 mAppSegmentConsumer->setFrameAvailableListener(this);
137 mAppSegmentConsumer->setName(String8("Camera3-HeicComposite-AppSegmentStream"));
138 mAppSegmentSurface = new Surface(producer);
139
Shuzhen Wange7f4b462019-02-12 08:43:07 -0800140 mStaticInfo = device->info();
141
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800142 res = device->createStream(mAppSegmentSurface, mAppSegmentMaxSize, 1, format,
143 kAppSegmentDataSpace, rotation, &mAppSegmentStreamId, physicalCameraId, surfaceIds);
144 if (res == OK) {
145 mAppSegmentSurfaceId = (*surfaceIds)[0];
146 } else {
147 ALOGE("%s: Failed to create JPEG App segment stream: %s (%d)", __FUNCTION__,
148 strerror(-res), res);
149 return res;
150 }
151
152 if (!mUseGrid) {
153 res = mCodec->createInputSurface(&producer);
154 if (res != OK) {
155 ALOGE("%s: Failed to create input surface for Heic codec: %s (%d)",
156 __FUNCTION__, strerror(-res), res);
157 return res;
158 }
159 } else {
160 BufferQueue::createBufferQueue(&producer, &consumer);
161 mMainImageConsumer = new CpuConsumer(consumer, 1);
162 mMainImageConsumer->setFrameAvailableListener(this);
163 mMainImageConsumer->setName(String8("Camera3-HeicComposite-HevcInputYUVStream"));
164 }
165 mMainImageSurface = new Surface(producer);
166
167 res = mCodec->start();
168 if (res != OK) {
169 ALOGE("%s: Failed to start codec: %s (%d)", __FUNCTION__,
170 strerror(-res), res);
171 return res;
172 }
173
174 std::vector<int> sourceSurfaceId;
175 //Use YUV_888 format if framework tiling is needed.
176 int srcStreamFmt = mUseGrid ? HAL_PIXEL_FORMAT_YCbCr_420_888 :
177 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
178 res = device->createStream(mMainImageSurface, width, height, srcStreamFmt, kHeifDataSpace,
179 rotation, id, physicalCameraId, &sourceSurfaceId);
180 if (res == OK) {
181 mMainImageSurfaceId = sourceSurfaceId[0];
182 mMainImageStreamId = *id;
183 } else {
184 ALOGE("%s: Failed to create main image stream: %s (%d)", __FUNCTION__,
185 strerror(-res), res);
186 return res;
187 }
188
189 mOutputSurface = consumers[0];
190 res = registerCompositeStreamListener(getStreamId());
191 if (res != OK) {
192 ALOGE("%s: Failed to register HAL main image stream", __FUNCTION__);
193 return res;
194 }
195
Shuzhen Wang219c2992019-02-15 17:24:28 -0800196 initCopyRowFunction(width);
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800197 return res;
198}
199
200status_t HeicCompositeStream::deleteInternalStreams() {
201 requestExit();
202 auto res = join();
203 if (res != OK) {
204 ALOGE("%s: Failed to join with the main processing thread: %s (%d)", __FUNCTION__,
205 strerror(-res), res);
206 }
207
208 deinitCodec();
209
210 if (mAppSegmentStreamId >= 0) {
211 sp<CameraDeviceBase> device = mDevice.promote();
212 if (!device.get()) {
213 ALOGE("%s: Invalid camera device!", __FUNCTION__);
214 return NO_INIT;
215 }
216
217 res = device->deleteStream(mAppSegmentStreamId);
218 mAppSegmentStreamId = -1;
219 }
220
Shuzhen Wang2c545042019-02-07 10:27:35 -0800221 if (mOutputSurface != nullptr) {
222 mOutputSurface->disconnect(NATIVE_WINDOW_API_CAMERA);
223 mOutputSurface.clear();
224 }
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800225 return res;
226}
227
228void HeicCompositeStream::onBufferReleased(const BufferInfo& bufferInfo) {
229 Mutex::Autolock l(mMutex);
230
231 if (bufferInfo.mError) return;
232
233 mCodecOutputBufferTimestamps.push(bufferInfo.mTimestamp);
Shuzhen Wang983f8142019-09-25 14:19:28 -0700234 ALOGV("%s: [%" PRId64 "]: Adding codecOutputBufferTimestamp (%zu timestamps in total)",
235 __FUNCTION__, bufferInfo.mTimestamp, mCodecOutputBufferTimestamps.size());
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800236}
237
238// We need to get the settings early to handle the case where the codec output
239// arrives earlier than result metadata.
240void HeicCompositeStream::onBufferRequestForFrameNumber(uint64_t frameNumber, int streamId,
241 const CameraMetadata& settings) {
242 ATRACE_ASYNC_BEGIN("HEIC capture", frameNumber);
243
244 Mutex::Autolock l(mMutex);
245 if (mErrorState || (streamId != getStreamId())) {
246 return;
247 }
248
249 mPendingCaptureResults.emplace(frameNumber, CameraMetadata());
250
251 camera_metadata_ro_entry entry;
252
253 int32_t orientation = 0;
254 entry = settings.find(ANDROID_JPEG_ORIENTATION);
255 if (entry.count == 1) {
256 orientation = entry.data.i32[0];
257 }
258
259 int32_t quality = kDefaultJpegQuality;
260 entry = settings.find(ANDROID_JPEG_QUALITY);
261 if (entry.count == 1) {
262 quality = entry.data.i32[0];
263 }
264
265 mSettingsByFrameNumber[frameNumber] = std::make_pair(orientation, quality);
266}
267
268void HeicCompositeStream::onFrameAvailable(const BufferItem& item) {
269 if (item.mDataSpace == static_cast<android_dataspace>(kAppSegmentDataSpace)) {
270 ALOGV("%s: JPEG APP segments buffer with ts: %" PRIu64 " ms. arrived!",
271 __func__, ns2ms(item.mTimestamp));
272
273 Mutex::Autolock l(mMutex);
274 if (!mErrorState) {
275 mInputAppSegmentBuffers.push_back(item.mTimestamp);
276 mInputReadyCondition.signal();
277 }
278 } else if (item.mDataSpace == kHeifDataSpace) {
279 ALOGV("%s: YUV_888 buffer with ts: %" PRIu64 " ms. arrived!",
280 __func__, ns2ms(item.mTimestamp));
281
282 Mutex::Autolock l(mMutex);
283 if (!mUseGrid) {
284 ALOGE("%s: YUV_888 internal stream is only supported for HEVC tiling",
285 __FUNCTION__);
286 return;
287 }
288 if (!mErrorState) {
289 mInputYuvBuffers.push_back(item.mTimestamp);
290 mInputReadyCondition.signal();
291 }
292 } else {
293 ALOGE("%s: Unexpected data space: 0x%x", __FUNCTION__, item.mDataSpace);
294 }
295}
296
297status_t HeicCompositeStream::getCompositeStreamInfo(const OutputStreamInfo &streamInfo,
298 const CameraMetadata& ch, std::vector<OutputStreamInfo>* compositeOutput /*out*/) {
299 if (compositeOutput == nullptr) {
300 return BAD_VALUE;
301 }
302
303 compositeOutput->clear();
304
305 bool useGrid, useHeic;
306 bool isSizeSupported = isSizeSupportedByHeifEncoder(
307 streamInfo.width, streamInfo.height, &useHeic, &useGrid, nullptr);
308 if (!isSizeSupported) {
309 // Size is not supported by either encoder.
310 return OK;
311 }
312
313 compositeOutput->insert(compositeOutput->end(), 2, streamInfo);
314
315 // JPEG APPS segments Blob stream info
316 (*compositeOutput)[0].width = calcAppSegmentMaxSize(ch);
317 (*compositeOutput)[0].height = 1;
318 (*compositeOutput)[0].format = HAL_PIXEL_FORMAT_BLOB;
319 (*compositeOutput)[0].dataSpace = kAppSegmentDataSpace;
320 (*compositeOutput)[0].consumerUsage = GRALLOC_USAGE_SW_READ_OFTEN;
321
322 // YUV/IMPLEMENTATION_DEFINED stream info
323 (*compositeOutput)[1].width = streamInfo.width;
324 (*compositeOutput)[1].height = streamInfo.height;
325 (*compositeOutput)[1].format = useGrid ? HAL_PIXEL_FORMAT_YCbCr_420_888 :
326 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
327 (*compositeOutput)[1].dataSpace = kHeifDataSpace;
328 (*compositeOutput)[1].consumerUsage = useHeic ? GRALLOC_USAGE_HW_IMAGE_ENCODER :
329 useGrid ? GRALLOC_USAGE_SW_READ_OFTEN : GRALLOC_USAGE_HW_VIDEO_ENCODER;
330
331 return NO_ERROR;
332}
333
334bool HeicCompositeStream::isSizeSupportedByHeifEncoder(int32_t width, int32_t height,
Chong Zhang688abaa2019-05-17 16:32:23 -0700335 bool* useHeic, bool* useGrid, int64_t* stall, AString* hevcName) {
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800336 static HeicEncoderInfoManager& heicManager = HeicEncoderInfoManager::getInstance();
Chong Zhang688abaa2019-05-17 16:32:23 -0700337 return heicManager.isSizeSupported(width, height, useHeic, useGrid, stall, hevcName);
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800338}
339
340bool HeicCompositeStream::isInMemoryTempFileSupported() {
341 int memfd = syscall(__NR_memfd_create, "HEIF-try-memfd", MFD_CLOEXEC);
342 if (memfd == -1) {
343 if (errno != ENOSYS) {
344 ALOGE("%s: Failed to create tmpfs file. errno %d", __FUNCTION__, errno);
345 }
346 return false;
347 }
348 close(memfd);
349 return true;
350}
351
352void HeicCompositeStream::onHeicOutputFrameAvailable(
353 const CodecOutputBufferInfo& outputBufferInfo) {
354 Mutex::Autolock l(mMutex);
355
356 ALOGV("%s: index %d, offset %d, size %d, time %" PRId64 ", flags 0x%x",
357 __FUNCTION__, outputBufferInfo.index, outputBufferInfo.offset,
358 outputBufferInfo.size, outputBufferInfo.timeUs, outputBufferInfo.flags);
359
360 if (!mErrorState) {
361 if ((outputBufferInfo.size > 0) &&
362 ((outputBufferInfo.flags & MediaCodec::BUFFER_FLAG_CODECCONFIG) == 0)) {
363 mCodecOutputBuffers.push_back(outputBufferInfo);
364 mInputReadyCondition.signal();
365 } else {
Shuzhen Wang983f8142019-09-25 14:19:28 -0700366 ALOGV("%s: Releasing output buffer: size %d flags: 0x%x ", __FUNCTION__,
367 outputBufferInfo.size, outputBufferInfo.flags);
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800368 mCodec->releaseOutputBuffer(outputBufferInfo.index);
369 }
370 } else {
371 mCodec->releaseOutputBuffer(outputBufferInfo.index);
372 }
373}
374
375void HeicCompositeStream::onHeicInputFrameAvailable(int32_t index) {
376 Mutex::Autolock l(mMutex);
377
378 if (!mUseGrid) {
379 ALOGE("%s: Codec YUV input mode must only be used for Hevc tiling mode", __FUNCTION__);
380 return;
381 }
382
383 mCodecInputBuffers.push_back(index);
384 mInputReadyCondition.signal();
385}
386
387void HeicCompositeStream::onHeicFormatChanged(sp<AMessage>& newFormat) {
388 if (newFormat == nullptr) {
389 ALOGE("%s: newFormat must not be null!", __FUNCTION__);
390 return;
391 }
392
393 Mutex::Autolock l(mMutex);
394
395 AString mime;
396 AString mimeHeic(MIMETYPE_IMAGE_ANDROID_HEIC);
397 newFormat->findString(KEY_MIME, &mime);
398 if (mime != mimeHeic) {
399 // For HEVC codec, below keys need to be filled out or overwritten so that the
400 // muxer can handle them as HEIC output image.
401 newFormat->setString(KEY_MIME, mimeHeic);
402 newFormat->setInt32(KEY_WIDTH, mOutputWidth);
403 newFormat->setInt32(KEY_HEIGHT, mOutputHeight);
404 if (mUseGrid) {
405 newFormat->setInt32(KEY_TILE_WIDTH, mGridWidth);
406 newFormat->setInt32(KEY_TILE_HEIGHT, mGridHeight);
407 newFormat->setInt32(KEY_GRID_ROWS, mGridRows);
408 newFormat->setInt32(KEY_GRID_COLUMNS, mGridCols);
409 }
410 }
411 newFormat->setInt32(KEY_IS_DEFAULT, 1 /*isPrimary*/);
412
413 int32_t gridRows, gridCols;
414 if (newFormat->findInt32(KEY_GRID_ROWS, &gridRows) &&
415 newFormat->findInt32(KEY_GRID_COLUMNS, &gridCols)) {
416 mNumOutputTiles = gridRows * gridCols;
417 } else {
418 mNumOutputTiles = 1;
419 }
420
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800421 mFormat = newFormat;
Shuzhen Wang983f8142019-09-25 14:19:28 -0700422
423 ALOGV("%s: mNumOutputTiles is %zu", __FUNCTION__, mNumOutputTiles);
424 mInputReadyCondition.signal();
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800425}
426
427void HeicCompositeStream::onHeicCodecError() {
428 Mutex::Autolock l(mMutex);
429 mErrorState = true;
430}
431
432status_t HeicCompositeStream::configureStream() {
433 if (isRunning()) {
434 // Processing thread is already running, nothing more to do.
435 return NO_ERROR;
436 }
437
438 if (mOutputSurface.get() == nullptr) {
439 ALOGE("%s: No valid output surface set!", __FUNCTION__);
440 return NO_INIT;
441 }
442
443 auto res = mOutputSurface->connect(NATIVE_WINDOW_API_CAMERA, mProducerListener);
444 if (res != OK) {
445 ALOGE("%s: Unable to connect to native window for stream %d",
446 __FUNCTION__, mMainImageStreamId);
447 return res;
448 }
449
450 if ((res = native_window_set_buffers_format(mOutputSurface.get(), HAL_PIXEL_FORMAT_BLOB))
451 != OK) {
452 ALOGE("%s: Unable to configure stream buffer format for stream %d", __FUNCTION__,
453 mMainImageStreamId);
454 return res;
455 }
456
457 ANativeWindow *anwConsumer = mOutputSurface.get();
458 int maxConsumerBuffers;
459 if ((res = anwConsumer->query(anwConsumer, NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS,
460 &maxConsumerBuffers)) != OK) {
461 ALOGE("%s: Unable to query consumer undequeued"
462 " buffer count for stream %d", __FUNCTION__, mMainImageStreamId);
463 return res;
464 }
465
466 // Cannot use SourceSurface buffer count since it could be codec's 512*512 tile
467 // buffer count.
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800468 if ((res = native_window_set_buffer_count(
Shuzhen Wang983f8142019-09-25 14:19:28 -0700469 anwConsumer, kMaxOutputSurfaceProducerCount + maxConsumerBuffers)) != OK) {
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800470 ALOGE("%s: Unable to set buffer count for stream %d", __FUNCTION__, mMainImageStreamId);
471 return res;
472 }
473
474 if ((res = native_window_set_buffers_dimensions(anwConsumer, mMaxHeicBufferSize, 1)) != OK) {
475 ALOGE("%s: Unable to set buffer dimension %zu x 1 for stream %d: %s (%d)",
476 __FUNCTION__, mMaxHeicBufferSize, mMainImageStreamId, strerror(-res), res);
477 return res;
478 }
479
480 run("HeicCompositeStreamProc");
481
482 return NO_ERROR;
483}
484
485status_t HeicCompositeStream::insertGbp(SurfaceMap* /*out*/outSurfaceMap,
486 Vector<int32_t>* /*out*/outputStreamIds, int32_t* /*out*/currentStreamId) {
487 if (outSurfaceMap->find(mAppSegmentStreamId) == outSurfaceMap->end()) {
488 (*outSurfaceMap)[mAppSegmentStreamId] = std::vector<size_t>();
489 outputStreamIds->push_back(mAppSegmentStreamId);
490 }
491 (*outSurfaceMap)[mAppSegmentStreamId].push_back(mAppSegmentSurfaceId);
492
493 if (outSurfaceMap->find(mMainImageStreamId) == outSurfaceMap->end()) {
494 (*outSurfaceMap)[mMainImageStreamId] = std::vector<size_t>();
495 outputStreamIds->push_back(mMainImageStreamId);
496 }
497 (*outSurfaceMap)[mMainImageStreamId].push_back(mMainImageSurfaceId);
498
499 if (currentStreamId != nullptr) {
500 *currentStreamId = mMainImageStreamId;
501 }
502
503 return NO_ERROR;
504}
505
506void HeicCompositeStream::onShutter(const CaptureResultExtras& resultExtras, nsecs_t timestamp) {
507 Mutex::Autolock l(mMutex);
508 if (mErrorState) {
509 return;
510 }
511
512 if (mSettingsByFrameNumber.find(resultExtras.frameNumber) != mSettingsByFrameNumber.end()) {
Shuzhen Wang983f8142019-09-25 14:19:28 -0700513 ALOGV("%s: [%" PRId64 "]: frameNumber %" PRId64, __FUNCTION__,
514 timestamp, resultExtras.frameNumber);
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800515 mFrameNumberMap.emplace(resultExtras.frameNumber, timestamp);
516 mSettingsByTimestamp[timestamp] = mSettingsByFrameNumber[resultExtras.frameNumber];
517 mSettingsByFrameNumber.erase(resultExtras.frameNumber);
518 mInputReadyCondition.signal();
519 }
520}
521
522void HeicCompositeStream::compilePendingInputLocked() {
523 while (!mSettingsByTimestamp.empty()) {
524 auto it = mSettingsByTimestamp.begin();
525 mPendingInputFrames[it->first].orientation = it->second.first;
526 mPendingInputFrames[it->first].quality = it->second.second;
527 mSettingsByTimestamp.erase(it);
528 }
529
530 while (!mInputAppSegmentBuffers.empty() && !mAppSegmentBufferAcquired) {
531 CpuConsumer::LockedBuffer imgBuffer;
532 auto it = mInputAppSegmentBuffers.begin();
533 auto res = mAppSegmentConsumer->lockNextBuffer(&imgBuffer);
534 if (res == NOT_ENOUGH_DATA) {
535 // Canot not lock any more buffers.
536 break;
537 } else if ((res != OK) || (*it != imgBuffer.timestamp)) {
538 if (res != OK) {
539 ALOGE("%s: Error locking JPEG_APP_SEGMENTS image buffer: %s (%d)", __FUNCTION__,
540 strerror(-res), res);
541 } else {
542 ALOGE("%s: Expecting JPEG_APP_SEGMENTS buffer with time stamp: %" PRId64
543 " received buffer with time stamp: %" PRId64, __FUNCTION__,
544 *it, imgBuffer.timestamp);
545 }
546 mPendingInputFrames[*it].error = true;
547 mInputAppSegmentBuffers.erase(it);
548 continue;
549 }
550
551 if ((mPendingInputFrames.find(imgBuffer.timestamp) != mPendingInputFrames.end()) &&
552 (mPendingInputFrames[imgBuffer.timestamp].error)) {
553 mAppSegmentConsumer->unlockBuffer(imgBuffer);
554 } else {
555 mPendingInputFrames[imgBuffer.timestamp].appSegmentBuffer = imgBuffer;
556 mAppSegmentBufferAcquired = true;
557 }
558 mInputAppSegmentBuffers.erase(it);
559 }
560
561 while (!mInputYuvBuffers.empty() && !mYuvBufferAcquired) {
562 CpuConsumer::LockedBuffer imgBuffer;
563 auto it = mInputYuvBuffers.begin();
564 auto res = mMainImageConsumer->lockNextBuffer(&imgBuffer);
565 if (res == NOT_ENOUGH_DATA) {
566 // Canot not lock any more buffers.
567 break;
568 } else if (res != OK) {
569 ALOGE("%s: Error locking YUV_888 image buffer: %s (%d)", __FUNCTION__,
570 strerror(-res), res);
571 mPendingInputFrames[*it].error = true;
572 mInputYuvBuffers.erase(it);
573 continue;
574 } else if (*it != imgBuffer.timestamp) {
575 ALOGW("%s: Expecting YUV_888 buffer with time stamp: %" PRId64 " received buffer with "
576 "time stamp: %" PRId64, __FUNCTION__, *it, imgBuffer.timestamp);
577 mPendingInputFrames[*it].error = true;
578 mInputYuvBuffers.erase(it);
579 continue;
580 }
581
582 if ((mPendingInputFrames.find(imgBuffer.timestamp) != mPendingInputFrames.end()) &&
583 (mPendingInputFrames[imgBuffer.timestamp].error)) {
584 mMainImageConsumer->unlockBuffer(imgBuffer);
585 } else {
586 mPendingInputFrames[imgBuffer.timestamp].yuvBuffer = imgBuffer;
587 mYuvBufferAcquired = true;
588 }
589 mInputYuvBuffers.erase(it);
590 }
591
592 while (!mCodecOutputBuffers.empty()) {
593 auto it = mCodecOutputBuffers.begin();
594 // Bitstream buffer timestamp doesn't necessarily directly correlate with input
595 // buffer timestamp. Assume encoder input to output is FIFO, use a queue
596 // to look up timestamp.
597 int64_t bufferTime = -1;
598 if (mCodecOutputBufferTimestamps.empty()) {
599 ALOGE("%s: Failed to find buffer timestamp for codec output buffer!", __FUNCTION__);
600 } else {
601 // Direct mapping between camera timestamp (in ns) and codec timestamp (in us).
602 bufferTime = mCodecOutputBufferTimestamps.front();
Shuzhen Wang983f8142019-09-25 14:19:28 -0700603 mCodecOutputCounter++;
604 if (mCodecOutputCounter == mNumOutputTiles) {
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800605 mCodecOutputBufferTimestamps.pop();
Shuzhen Wang983f8142019-09-25 14:19:28 -0700606 mCodecOutputCounter = 0;
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800607 }
608
609 mPendingInputFrames[bufferTime].codecOutputBuffers.push_back(*it);
Shuzhen Wang983f8142019-09-25 14:19:28 -0700610 ALOGV("%s: [%" PRId64 "]: Pushing codecOutputBuffers (time %" PRId64 " us)",
611 __FUNCTION__, bufferTime, it->timeUs);
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800612 }
613 mCodecOutputBuffers.erase(it);
614 }
615
616 while (!mFrameNumberMap.empty()) {
617 auto it = mFrameNumberMap.begin();
618 mPendingInputFrames[it->second].frameNumber = it->first;
Shuzhen Wang983f8142019-09-25 14:19:28 -0700619 ALOGV("%s: [%" PRId64 "]: frameNumber is %" PRId64, __FUNCTION__, it->second, it->first);
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800620 mFrameNumberMap.erase(it);
621 }
622
Shuzhen Wange7f4b462019-02-12 08:43:07 -0800623 while (!mCaptureResults.empty()) {
624 auto it = mCaptureResults.begin();
625 // Negative timestamp indicates that something went wrong during the capture result
626 // collection process.
627 if (it->first >= 0) {
628 if (mPendingInputFrames[it->first].frameNumber == std::get<0>(it->second)) {
629 mPendingInputFrames[it->first].result =
630 std::make_unique<CameraMetadata>(std::get<1>(it->second));
631 } else {
632 ALOGE("%s: Capture result frameNumber/timestamp mapping changed between "
633 "shutter and capture result!", __FUNCTION__);
634 }
635 }
636 mCaptureResults.erase(it);
637 }
638
639 // mErrorFrameNumbers stores frame number of dropped buffers.
640 auto it = mErrorFrameNumbers.begin();
641 while (it != mErrorFrameNumbers.end()) {
642 bool frameFound = false;
643 for (auto &inputFrame : mPendingInputFrames) {
644 if (inputFrame.second.frameNumber == *it) {
645 inputFrame.second.error = true;
646 frameFound = true;
647 break;
648 }
649 }
650
651 if (frameFound) {
652 it = mErrorFrameNumbers.erase(it);
653 } else {
654 ALOGW("%s: Not able to find failing input with frame number: %" PRId64, __FUNCTION__,
655 *it);
656 it++;
657 }
658 }
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800659
660 // Distribute codec input buffers to be filled out from YUV output
661 for (auto it = mPendingInputFrames.begin();
662 it != mPendingInputFrames.end() && mCodecInputBuffers.size() > 0; it++) {
663 InputFrame& inputFrame(it->second);
664 if (inputFrame.codecInputCounter < mGridRows * mGridCols) {
665 // Available input tiles that are required for the current input
666 // image.
667 size_t newInputTiles = std::min(mCodecInputBuffers.size(),
668 mGridRows * mGridCols - inputFrame.codecInputCounter);
669 for (size_t i = 0; i < newInputTiles; i++) {
670 CodecInputBufferInfo inputInfo =
671 { mCodecInputBuffers[0], mGridTimestampUs++, inputFrame.codecInputCounter };
672 inputFrame.codecInputBuffers.push_back(inputInfo);
673
674 mCodecInputBuffers.erase(mCodecInputBuffers.begin());
675 inputFrame.codecInputCounter++;
676 }
677 break;
678 }
679 }
680}
681
682bool HeicCompositeStream::getNextReadyInputLocked(int64_t *currentTs /*out*/) {
683 if (currentTs == nullptr) {
684 return false;
685 }
686
687 bool newInputAvailable = false;
Shuzhen Wang983f8142019-09-25 14:19:28 -0700688 for (auto& it : mPendingInputFrames) {
689 // New input is considered to be available only if:
690 // 1. input buffers are ready, or
691 // 2. App segment and muxer is created, or
692 // 3. A codec output tile is ready, and an output buffer is available.
693 // This makes sure that muxer gets created only when an output tile is
694 // generated, because right now we only handle 1 HEIC output buffer at a
695 // time (max dequeued buffer count is 1).
Shuzhen Wange7f4b462019-02-12 08:43:07 -0800696 bool appSegmentReady = (it.second.appSegmentBuffer.data != nullptr) &&
Shuzhen Wang983f8142019-09-25 14:19:28 -0700697 !it.second.appSegmentWritten && it.second.result != nullptr &&
698 it.second.muxer != nullptr;
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800699 bool codecOutputReady = !it.second.codecOutputBuffers.empty();
700 bool codecInputReady = (it.second.yuvBuffer.data != nullptr) &&
701 (!it.second.codecInputBuffers.empty());
Shuzhen Wang983f8142019-09-25 14:19:28 -0700702 bool hasOutputBuffer = it.second.muxer != nullptr ||
703 (mDequeuedOutputBufferCnt < kMaxOutputSurfaceProducerCount);
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800704 if ((!it.second.error) &&
705 (it.first < *currentTs) &&
Shuzhen Wang983f8142019-09-25 14:19:28 -0700706 (appSegmentReady || (codecOutputReady && hasOutputBuffer) || codecInputReady)) {
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800707 *currentTs = it.first;
Shuzhen Wang983f8142019-09-25 14:19:28 -0700708 if (it.second.format == nullptr && mFormat != nullptr) {
709 it.second.format = mFormat->dup();
710 }
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800711 newInputAvailable = true;
712 break;
713 }
714 }
715
716 return newInputAvailable;
717}
718
719int64_t HeicCompositeStream::getNextFailingInputLocked(int64_t *currentTs /*out*/) {
720 int64_t res = -1;
721 if (currentTs == nullptr) {
722 return res;
723 }
724
725 for (const auto& it : mPendingInputFrames) {
726 if (it.second.error && !it.second.errorNotified && (it.first < *currentTs)) {
727 *currentTs = it.first;
728 res = it.second.frameNumber;
729 break;
730 }
731 }
732
733 return res;
734}
735
736status_t HeicCompositeStream::processInputFrame(nsecs_t timestamp,
737 InputFrame &inputFrame) {
738 ATRACE_CALL();
739 status_t res = OK;
740
Shuzhen Wange7f4b462019-02-12 08:43:07 -0800741 bool appSegmentReady = inputFrame.appSegmentBuffer.data != nullptr &&
Shuzhen Wang983f8142019-09-25 14:19:28 -0700742 !inputFrame.appSegmentWritten && inputFrame.result != nullptr &&
743 inputFrame.muxer != nullptr;
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800744 bool codecOutputReady = inputFrame.codecOutputBuffers.size() > 0;
745 bool codecInputReady = inputFrame.yuvBuffer.data != nullptr &&
Shuzhen Wang983f8142019-09-25 14:19:28 -0700746 !inputFrame.codecInputBuffers.empty();
747 bool hasOutputBuffer = inputFrame.muxer != nullptr ||
748 (mDequeuedOutputBufferCnt < kMaxOutputSurfaceProducerCount);
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800749
Shuzhen Wang983f8142019-09-25 14:19:28 -0700750 ALOGV("%s: [%" PRId64 "]: appSegmentReady %d, codecOutputReady %d, codecInputReady %d,"
751 " dequeuedOutputBuffer %d", __FUNCTION__, timestamp, appSegmentReady,
752 codecOutputReady, codecInputReady, mDequeuedOutputBufferCnt);
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800753
754 // Handle inputs for Hevc tiling
755 if (codecInputReady) {
756 res = processCodecInputFrame(inputFrame);
757 if (res != OK) {
758 ALOGE("%s: Failed to process codec input frame: %s (%d)", __FUNCTION__,
759 strerror(-res), res);
760 return res;
761 }
762 }
763
Shuzhen Wang983f8142019-09-25 14:19:28 -0700764 if (!(codecOutputReady && hasOutputBuffer) && !appSegmentReady) {
765 return OK;
766 }
767
768 // Initialize and start muxer if not yet done so. In this case,
769 // codecOutputReady must be true. Otherwise, appSegmentReady is guaranteed
770 // to be false, and the function must have returned early.
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800771 if (inputFrame.muxer == nullptr) {
772 res = startMuxerForInputFrame(timestamp, inputFrame);
773 if (res != OK) {
774 ALOGE("%s: Failed to create and start muxer: %s (%d)", __FUNCTION__,
775 strerror(-res), res);
776 return res;
777 }
778 }
779
780 // Write JPEG APP segments data to the muxer.
Shuzhen Wang983f8142019-09-25 14:19:28 -0700781 if (appSegmentReady) {
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800782 res = processAppSegment(timestamp, inputFrame);
783 if (res != OK) {
784 ALOGE("%s: Failed to process JPEG APP segments: %s (%d)", __FUNCTION__,
785 strerror(-res), res);
786 return res;
787 }
788 }
789
790 // Write media codec bitstream buffers to muxer.
791 while (!inputFrame.codecOutputBuffers.empty()) {
792 res = processOneCodecOutputFrame(timestamp, inputFrame);
793 if (res != OK) {
794 ALOGE("%s: Failed to process codec output frame: %s (%d)", __FUNCTION__,
795 strerror(-res), res);
796 return res;
797 }
798 }
799
800 if (inputFrame.appSegmentWritten && inputFrame.pendingOutputTiles == 0) {
801 res = processCompletedInputFrame(timestamp, inputFrame);
802 if (res != OK) {
803 ALOGE("%s: Failed to process completed input frame: %s (%d)", __FUNCTION__,
804 strerror(-res), res);
805 return res;
806 }
807 }
808
809 return res;
810}
811
812status_t HeicCompositeStream::startMuxerForInputFrame(nsecs_t timestamp, InputFrame &inputFrame) {
813 sp<ANativeWindow> outputANW = mOutputSurface;
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800814
815 auto res = outputANW->dequeueBuffer(mOutputSurface.get(), &inputFrame.anb, &inputFrame.fenceFd);
816 if (res != OK) {
817 ALOGE("%s: Error retrieving output buffer: %s (%d)", __FUNCTION__, strerror(-res),
818 res);
819 return res;
820 }
Shuzhen Wang983f8142019-09-25 14:19:28 -0700821 mDequeuedOutputBufferCnt++;
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800822
823 // Combine current thread id, stream id and timestamp to uniquely identify image.
824 std::ostringstream tempOutputFile;
825 tempOutputFile << "HEIF-" << pthread_self() << "-"
826 << getStreamId() << "-" << timestamp;
827 inputFrame.fileFd = syscall(__NR_memfd_create, tempOutputFile.str().c_str(), MFD_CLOEXEC);
828 if (inputFrame.fileFd < 0) {
829 ALOGE("%s: Failed to create file %s. Error no is %d", __FUNCTION__,
830 tempOutputFile.str().c_str(), errno);
831 return NO_INIT;
832 }
833 inputFrame.muxer = new MediaMuxer(inputFrame.fileFd, MediaMuxer::OUTPUT_FORMAT_HEIF);
834 if (inputFrame.muxer == nullptr) {
835 ALOGE("%s: Failed to create MediaMuxer for file fd %d",
836 __FUNCTION__, inputFrame.fileFd);
837 return NO_INIT;
838 }
839
840 res = inputFrame.muxer->setOrientationHint(inputFrame.orientation);
841 if (res != OK) {
842 ALOGE("%s: Failed to setOrientationHint: %s (%d)", __FUNCTION__,
843 strerror(-res), res);
844 return res;
845 }
846 // Set encoder quality
847 {
848 sp<AMessage> qualityParams = new AMessage;
849 qualityParams->setInt32(PARAMETER_KEY_VIDEO_BITRATE, inputFrame.quality);
850 res = mCodec->setParameters(qualityParams);
851 if (res != OK) {
852 ALOGE("%s: Failed to set codec quality: %s (%d)",
853 __FUNCTION__, strerror(-res), res);
854 return res;
855 }
856 }
857
Shuzhen Wang983f8142019-09-25 14:19:28 -0700858 ssize_t trackId = inputFrame.muxer->addTrack(inputFrame.format);
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800859 if (trackId < 0) {
860 ALOGE("%s: Failed to addTrack to the muxer: %zd", __FUNCTION__, trackId);
861 return NO_INIT;
862 }
863
864 inputFrame.trackIndex = trackId;
865 inputFrame.pendingOutputTiles = mNumOutputTiles;
866
867 res = inputFrame.muxer->start();
868 if (res != OK) {
869 ALOGE("%s: Failed to start MediaMuxer: %s (%d)",
870 __FUNCTION__, strerror(-res), res);
871 return res;
872 }
873
Shuzhen Wang983f8142019-09-25 14:19:28 -0700874 ALOGV("%s: [%" PRId64 "]: Muxer started for inputFrame", __FUNCTION__,
875 timestamp);
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800876 return OK;
877}
878
879status_t HeicCompositeStream::processAppSegment(nsecs_t timestamp, InputFrame &inputFrame) {
880 size_t app1Size = 0;
881 auto appSegmentSize = findAppSegmentsSize(inputFrame.appSegmentBuffer.data,
882 inputFrame.appSegmentBuffer.width * inputFrame.appSegmentBuffer.height,
883 &app1Size);
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800884 if (appSegmentSize == 0) {
885 ALOGE("%s: Failed to find JPEG APP segment size", __FUNCTION__);
886 return NO_INIT;
887 }
888
889 std::unique_ptr<ExifUtils> exifUtils(ExifUtils::create());
890 auto exifRes = exifUtils->initialize(inputFrame.appSegmentBuffer.data, app1Size);
891 if (!exifRes) {
892 ALOGE("%s: Failed to initialize ExifUtils object!", __FUNCTION__);
893 return BAD_VALUE;
894 }
Shuzhen Wange7f4b462019-02-12 08:43:07 -0800895 exifRes = exifUtils->setFromMetadata(*inputFrame.result, mStaticInfo,
896 mOutputWidth, mOutputHeight);
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800897 if (!exifRes) {
898 ALOGE("%s: Failed to set Exif tags using metadata and main image sizes", __FUNCTION__);
899 return BAD_VALUE;
900 }
901 exifRes = exifUtils->setOrientation(inputFrame.orientation);
902 if (!exifRes) {
903 ALOGE("%s: ExifUtils failed to set orientation", __FUNCTION__);
904 return BAD_VALUE;
905 }
906 exifRes = exifUtils->generateApp1();
907 if (!exifRes) {
908 ALOGE("%s: ExifUtils failed to generate APP1 segment", __FUNCTION__);
909 return BAD_VALUE;
910 }
911
912 unsigned int newApp1Length = exifUtils->getApp1Length();
913 const uint8_t *newApp1Segment = exifUtils->getApp1Buffer();
914
915 //Assemble the APP1 marker buffer required by MediaCodec
916 uint8_t kExifApp1Marker[] = {'E', 'x', 'i', 'f', 0xFF, 0xE1, 0x00, 0x00};
917 kExifApp1Marker[6] = static_cast<uint8_t>(newApp1Length >> 8);
918 kExifApp1Marker[7] = static_cast<uint8_t>(newApp1Length & 0xFF);
919 size_t appSegmentBufferSize = sizeof(kExifApp1Marker) +
920 appSegmentSize - app1Size + newApp1Length;
921 uint8_t* appSegmentBuffer = new uint8_t[appSegmentBufferSize];
922 memcpy(appSegmentBuffer, kExifApp1Marker, sizeof(kExifApp1Marker));
923 memcpy(appSegmentBuffer + sizeof(kExifApp1Marker), newApp1Segment, newApp1Length);
924 if (appSegmentSize - app1Size > 0) {
925 memcpy(appSegmentBuffer + sizeof(kExifApp1Marker) + newApp1Length,
926 inputFrame.appSegmentBuffer.data + app1Size, appSegmentSize - app1Size);
927 }
928
929 sp<ABuffer> aBuffer = new ABuffer(appSegmentBuffer, appSegmentBufferSize);
930 auto res = inputFrame.muxer->writeSampleData(aBuffer, inputFrame.trackIndex,
931 timestamp, MediaCodec::BUFFER_FLAG_MUXER_DATA);
932 delete[] appSegmentBuffer;
933
934 if (res != OK) {
935 ALOGE("%s: Failed to write JPEG APP segments to muxer: %s (%d)",
936 __FUNCTION__, strerror(-res), res);
937 return res;
938 }
939 inputFrame.appSegmentWritten = true;
940
Shuzhen Wang983f8142019-09-25 14:19:28 -0700941 ALOGV("%s: [%" PRId64 "]: appSegmentSize is %zu, width %d, height %d, app1Size %zu",
942 __FUNCTION__, timestamp, appSegmentSize, inputFrame.appSegmentBuffer.width,
943 inputFrame.appSegmentBuffer.height, app1Size);
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800944 return OK;
945}
946
947status_t HeicCompositeStream::processCodecInputFrame(InputFrame &inputFrame) {
948 for (auto& inputBuffer : inputFrame.codecInputBuffers) {
949 sp<MediaCodecBuffer> buffer;
950 auto res = mCodec->getInputBuffer(inputBuffer.index, &buffer);
951 if (res != OK) {
952 ALOGE("%s: Error getting codec input buffer: %s (%d)", __FUNCTION__,
953 strerror(-res), res);
954 return res;
955 }
956
957 // Copy one tile from source to destination.
958 size_t tileX = inputBuffer.tileIndex % mGridCols;
959 size_t tileY = inputBuffer.tileIndex / mGridCols;
960 size_t top = mGridHeight * tileY;
961 size_t left = mGridWidth * tileX;
962 size_t width = (tileX == static_cast<size_t>(mGridCols) - 1) ?
963 mOutputWidth - tileX * mGridWidth : mGridWidth;
964 size_t height = (tileY == static_cast<size_t>(mGridRows) - 1) ?
965 mOutputHeight - tileY * mGridHeight : mGridHeight;
Shuzhen Wang983f8142019-09-25 14:19:28 -0700966 ALOGV("%s: inputBuffer tileIndex [%zu, %zu], top %zu, left %zu, width %zu, height %zu,"
967 " timeUs %" PRId64, __FUNCTION__, tileX, tileY, top, left, width, height,
968 inputBuffer.timeUs);
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800969
970 res = copyOneYuvTile(buffer, inputFrame.yuvBuffer, top, left, width, height);
971 if (res != OK) {
972 ALOGE("%s: Failed to copy YUV tile %s (%d)", __FUNCTION__,
973 strerror(-res), res);
974 return res;
975 }
976
977 res = mCodec->queueInputBuffer(inputBuffer.index, 0, buffer->capacity(),
978 inputBuffer.timeUs, 0, nullptr /*errorDetailMsg*/);
979 if (res != OK) {
980 ALOGE("%s: Failed to queueInputBuffer to Codec: %s (%d)",
981 __FUNCTION__, strerror(-res), res);
982 return res;
983 }
984 }
985
986 inputFrame.codecInputBuffers.clear();
987 return OK;
988}
989
990status_t HeicCompositeStream::processOneCodecOutputFrame(nsecs_t timestamp,
991 InputFrame &inputFrame) {
992 auto it = inputFrame.codecOutputBuffers.begin();
993 sp<MediaCodecBuffer> buffer;
994 status_t res = mCodec->getOutputBuffer(it->index, &buffer);
995 if (res != OK) {
996 ALOGE("%s: Error getting Heic codec output buffer at index %d: %s (%d)",
997 __FUNCTION__, it->index, strerror(-res), res);
998 return res;
999 }
1000 if (buffer == nullptr) {
1001 ALOGE("%s: Invalid Heic codec output buffer at index %d",
1002 __FUNCTION__, it->index);
1003 return BAD_VALUE;
1004 }
1005
1006 sp<ABuffer> aBuffer = new ABuffer(buffer->data(), buffer->size());
1007 res = inputFrame.muxer->writeSampleData(
1008 aBuffer, inputFrame.trackIndex, timestamp, 0 /*flags*/);
1009 if (res != OK) {
1010 ALOGE("%s: Failed to write buffer index %d to muxer: %s (%d)",
1011 __FUNCTION__, it->index, strerror(-res), res);
1012 return res;
1013 }
1014
1015 mCodec->releaseOutputBuffer(it->index);
1016 if (inputFrame.pendingOutputTiles == 0) {
1017 ALOGW("%s: Codec generated more tiles than expected!", __FUNCTION__);
1018 } else {
1019 inputFrame.pendingOutputTiles--;
1020 }
1021
1022 inputFrame.codecOutputBuffers.erase(inputFrame.codecOutputBuffers.begin());
Shuzhen Wang983f8142019-09-25 14:19:28 -07001023
1024 ALOGV("%s: [%" PRId64 "]: Output buffer index %d",
1025 __FUNCTION__, timestamp, it->index);
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -08001026 return OK;
1027}
1028
1029status_t HeicCompositeStream::processCompletedInputFrame(nsecs_t timestamp,
1030 InputFrame &inputFrame) {
1031 sp<ANativeWindow> outputANW = mOutputSurface;
1032 inputFrame.muxer->stop();
1033
1034 // Copy the content of the file to memory.
1035 sp<GraphicBuffer> gb = GraphicBuffer::from(inputFrame.anb);
1036 void* dstBuffer;
1037 auto res = gb->lockAsync(GRALLOC_USAGE_SW_WRITE_OFTEN, &dstBuffer, inputFrame.fenceFd);
1038 if (res != OK) {
1039 ALOGE("%s: Error trying to lock output buffer fence: %s (%d)", __FUNCTION__,
1040 strerror(-res), res);
1041 return res;
1042 }
1043
1044 off_t fSize = lseek(inputFrame.fileFd, 0, SEEK_END);
1045 if (static_cast<size_t>(fSize) > mMaxHeicBufferSize - sizeof(CameraBlob)) {
1046 ALOGE("%s: Error: MediaMuxer output size %ld is larger than buffer sizer %zu",
1047 __FUNCTION__, fSize, mMaxHeicBufferSize - sizeof(CameraBlob));
1048 return BAD_VALUE;
1049 }
1050
1051 lseek(inputFrame.fileFd, 0, SEEK_SET);
1052 ssize_t bytesRead = read(inputFrame.fileFd, dstBuffer, fSize);
1053 if (bytesRead < fSize) {
1054 ALOGE("%s: Only %zd of %ld bytes read", __FUNCTION__, bytesRead, fSize);
1055 return BAD_VALUE;
1056 }
1057
1058 close(inputFrame.fileFd);
1059 inputFrame.fileFd = -1;
1060
1061 // Fill in HEIC header
1062 uint8_t *header = static_cast<uint8_t*>(dstBuffer) + mMaxHeicBufferSize - sizeof(CameraBlob);
1063 struct CameraBlob *blobHeader = (struct CameraBlob *)header;
1064 // Must be in sync with CAMERA3_HEIC_BLOB_ID in android_media_Utils.cpp
1065 blobHeader->blobId = static_cast<CameraBlobId>(0x00FE);
1066 blobHeader->blobSize = fSize;
1067
1068 res = native_window_set_buffers_timestamp(mOutputSurface.get(), timestamp);
1069 if (res != OK) {
1070 ALOGE("%s: Stream %d: Error setting timestamp: %s (%d)",
1071 __FUNCTION__, getStreamId(), strerror(-res), res);
1072 return res;
1073 }
1074
1075 res = outputANW->queueBuffer(mOutputSurface.get(), inputFrame.anb, /*fence*/ -1);
1076 if (res != OK) {
1077 ALOGE("%s: Failed to queueBuffer to Heic stream: %s (%d)", __FUNCTION__,
1078 strerror(-res), res);
1079 return res;
1080 }
1081 inputFrame.anb = nullptr;
Shuzhen Wang983f8142019-09-25 14:19:28 -07001082 mDequeuedOutputBufferCnt--;
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -08001083
Shuzhen Wang983f8142019-09-25 14:19:28 -07001084 ALOGV("%s: [%" PRId64 "]", __FUNCTION__, timestamp);
Shuzhen Wange7f4b462019-02-12 08:43:07 -08001085 ATRACE_ASYNC_END("HEIC capture", inputFrame.frameNumber);
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -08001086 return OK;
1087}
1088
1089
1090void HeicCompositeStream::releaseInputFrameLocked(InputFrame *inputFrame /*out*/) {
1091 if (inputFrame == nullptr) {
1092 return;
1093 }
1094
1095 if (inputFrame->appSegmentBuffer.data != nullptr) {
1096 mAppSegmentConsumer->unlockBuffer(inputFrame->appSegmentBuffer);
1097 inputFrame->appSegmentBuffer.data = nullptr;
1098 mAppSegmentBufferAcquired = false;
1099 }
1100
1101 while (!inputFrame->codecOutputBuffers.empty()) {
1102 auto it = inputFrame->codecOutputBuffers.begin();
1103 ALOGV("%s: releaseOutputBuffer index %d", __FUNCTION__, it->index);
1104 mCodec->releaseOutputBuffer(it->index);
1105 inputFrame->codecOutputBuffers.erase(it);
1106 }
1107
1108 if (inputFrame->yuvBuffer.data != nullptr) {
1109 mMainImageConsumer->unlockBuffer(inputFrame->yuvBuffer);
1110 inputFrame->yuvBuffer.data = nullptr;
1111 mYuvBufferAcquired = false;
1112 }
1113
1114 while (!inputFrame->codecInputBuffers.empty()) {
1115 auto it = inputFrame->codecInputBuffers.begin();
1116 inputFrame->codecInputBuffers.erase(it);
1117 }
1118
1119 if ((inputFrame->error || mErrorState) && !inputFrame->errorNotified) {
1120 notifyError(inputFrame->frameNumber);
1121 inputFrame->errorNotified = true;
1122 }
1123
1124 if (inputFrame->fileFd >= 0) {
1125 close(inputFrame->fileFd);
1126 inputFrame->fileFd = -1;
1127 }
1128
1129 if (inputFrame->anb != nullptr) {
1130 sp<ANativeWindow> outputANW = mOutputSurface;
1131 outputANW->cancelBuffer(mOutputSurface.get(), inputFrame->anb, /*fence*/ -1);
1132 inputFrame->anb = nullptr;
1133 }
1134}
1135
1136void HeicCompositeStream::releaseInputFramesLocked(int64_t currentTs) {
1137 auto it = mPendingInputFrames.begin();
1138 while (it != mPendingInputFrames.end()) {
1139 if (it->first <= currentTs) {
1140 releaseInputFrameLocked(&it->second);
1141 it = mPendingInputFrames.erase(it);
1142 } else {
1143 it++;
1144 }
1145 }
1146}
1147
1148status_t HeicCompositeStream::initializeCodec(uint32_t width, uint32_t height,
1149 const sp<CameraDeviceBase>& cameraDevice) {
1150 ALOGV("%s", __FUNCTION__);
1151
1152 bool useGrid = false;
Chong Zhang688abaa2019-05-17 16:32:23 -07001153 AString hevcName;
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -08001154 bool isSizeSupported = isSizeSupportedByHeifEncoder(width, height,
Chong Zhang688abaa2019-05-17 16:32:23 -07001155 &mUseHeic, &useGrid, nullptr, &hevcName);
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -08001156 if (!isSizeSupported) {
1157 ALOGE("%s: Encoder doesnt' support size %u x %u!",
1158 __FUNCTION__, width, height);
1159 return BAD_VALUE;
1160 }
1161
1162 // Create Looper for MediaCodec.
1163 auto desiredMime = mUseHeic ? MIMETYPE_IMAGE_ANDROID_HEIC : MIMETYPE_VIDEO_HEVC;
1164 mCodecLooper = new ALooper;
1165 mCodecLooper->setName("Camera3-HeicComposite-MediaCodecLooper");
1166 status_t res = mCodecLooper->start(
1167 false, // runOnCallingThread
1168 false, // canCallJava
1169 PRIORITY_AUDIO);
1170 if (res != OK) {
1171 ALOGE("%s: Failed to start codec looper: %s (%d)",
1172 __FUNCTION__, strerror(-res), res);
1173 return NO_INIT;
1174 }
1175
1176 // Create HEIC/HEVC codec.
Chong Zhang688abaa2019-05-17 16:32:23 -07001177 if (mUseHeic) {
1178 mCodec = MediaCodec::CreateByType(mCodecLooper, desiredMime, true /*encoder*/);
1179 } else {
1180 mCodec = MediaCodec::CreateByComponentName(mCodecLooper, hevcName);
1181 }
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -08001182 if (mCodec == nullptr) {
1183 ALOGE("%s: Failed to create codec for %s", __FUNCTION__, desiredMime);
1184 return NO_INIT;
1185 }
1186
1187 // Create Looper and handler for Codec callback.
1188 mCodecCallbackHandler = new CodecCallbackHandler(this);
1189 if (mCodecCallbackHandler == nullptr) {
1190 ALOGE("%s: Failed to create codec callback handler", __FUNCTION__);
1191 return NO_MEMORY;
1192 }
1193 mCallbackLooper = new ALooper;
1194 mCallbackLooper->setName("Camera3-HeicComposite-MediaCodecCallbackLooper");
1195 res = mCallbackLooper->start(
1196 false, // runOnCallingThread
1197 false, // canCallJava
1198 PRIORITY_AUDIO);
1199 if (res != OK) {
1200 ALOGE("%s: Failed to start media callback looper: %s (%d)",
1201 __FUNCTION__, strerror(-res), res);
1202 return NO_INIT;
1203 }
1204 mCallbackLooper->registerHandler(mCodecCallbackHandler);
1205
1206 mAsyncNotify = new AMessage(kWhatCallbackNotify, mCodecCallbackHandler);
1207 res = mCodec->setCallback(mAsyncNotify);
1208 if (res != OK) {
1209 ALOGE("%s: Failed to set MediaCodec callback: %s (%d)", __FUNCTION__,
1210 strerror(-res), res);
1211 return res;
1212 }
1213
1214 // Create output format and configure the Codec.
1215 sp<AMessage> outputFormat = new AMessage();
1216 outputFormat->setString(KEY_MIME, desiredMime);
1217 outputFormat->setInt32(KEY_BITRATE_MODE, BITRATE_MODE_CQ);
1218 outputFormat->setInt32(KEY_QUALITY, kDefaultJpegQuality);
1219 // Ask codec to skip timestamp check and encode all frames.
Chong Zhang70bfcec2019-03-18 12:52:28 -07001220 outputFormat->setInt64(KEY_MAX_PTS_GAP_TO_ENCODER, kNoFrameDropMaxPtsGap);
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -08001221
1222 int32_t gridWidth, gridHeight, gridRows, gridCols;
1223 if (useGrid || mUseHeic) {
1224 gridWidth = HeicEncoderInfoManager::kGridWidth;
1225 gridHeight = HeicEncoderInfoManager::kGridHeight;
1226 gridRows = (height + gridHeight - 1)/gridHeight;
1227 gridCols = (width + gridWidth - 1)/gridWidth;
1228
1229 if (mUseHeic) {
1230 outputFormat->setInt32(KEY_TILE_WIDTH, gridWidth);
1231 outputFormat->setInt32(KEY_TILE_HEIGHT, gridHeight);
1232 outputFormat->setInt32(KEY_GRID_COLUMNS, gridCols);
1233 outputFormat->setInt32(KEY_GRID_ROWS, gridRows);
1234 }
1235
1236 } else {
1237 gridWidth = width;
1238 gridHeight = height;
1239 gridRows = 1;
1240 gridCols = 1;
1241 }
1242
1243 outputFormat->setInt32(KEY_WIDTH, !useGrid ? width : gridWidth);
1244 outputFormat->setInt32(KEY_HEIGHT, !useGrid ? height : gridHeight);
1245 outputFormat->setInt32(KEY_I_FRAME_INTERVAL, 0);
1246 outputFormat->setInt32(KEY_COLOR_FORMAT,
1247 useGrid ? COLOR_FormatYUV420Flexible : COLOR_FormatSurface);
1248 outputFormat->setInt32(KEY_FRAME_RATE, gridRows * gridCols);
1249 // This only serves as a hint to encoder when encoding is not real-time.
1250 outputFormat->setInt32(KEY_OPERATING_RATE, useGrid ? kGridOpRate : kNoGridOpRate);
1251
1252 res = mCodec->configure(outputFormat, nullptr /*nativeWindow*/,
1253 nullptr /*crypto*/, CONFIGURE_FLAG_ENCODE);
1254 if (res != OK) {
1255 ALOGE("%s: Failed to configure codec: %s (%d)", __FUNCTION__,
1256 strerror(-res), res);
1257 return res;
1258 }
1259
1260 mGridWidth = gridWidth;
1261 mGridHeight = gridHeight;
1262 mGridRows = gridRows;
1263 mGridCols = gridCols;
1264 mUseGrid = useGrid;
1265 mOutputWidth = width;
1266 mOutputHeight = height;
1267 mAppSegmentMaxSize = calcAppSegmentMaxSize(cameraDevice->info());
1268 mMaxHeicBufferSize = mOutputWidth * mOutputHeight * 3 / 2 + mAppSegmentMaxSize;
1269
1270 return OK;
1271}
1272
1273void HeicCompositeStream::deinitCodec() {
1274 ALOGV("%s", __FUNCTION__);
1275 if (mCodec != nullptr) {
1276 mCodec->stop();
1277 mCodec->release();
1278 mCodec.clear();
1279 }
1280
1281 if (mCodecLooper != nullptr) {
1282 mCodecLooper->stop();
1283 mCodecLooper.clear();
1284 }
1285
1286 if (mCallbackLooper != nullptr) {
1287 mCallbackLooper->stop();
1288 mCallbackLooper.clear();
1289 }
1290
1291 mAsyncNotify.clear();
1292 mFormat.clear();
1293}
1294
1295// Return the size of the complete list of app segment, 0 indicates failure
1296size_t HeicCompositeStream::findAppSegmentsSize(const uint8_t* appSegmentBuffer,
1297 size_t maxSize, size_t *app1SegmentSize) {
1298 if (appSegmentBuffer == nullptr || app1SegmentSize == nullptr) {
1299 ALOGE("%s: Invalid input appSegmentBuffer %p, app1SegmentSize %p",
1300 __FUNCTION__, appSegmentBuffer, app1SegmentSize);
1301 return 0;
1302 }
1303
1304 size_t expectedSize = 0;
1305 // First check for EXIF transport header at the end of the buffer
1306 const uint8_t *header = appSegmentBuffer + (maxSize - sizeof(struct CameraBlob));
1307 const struct CameraBlob *blob = (const struct CameraBlob*)(header);
1308 if (blob->blobId != CameraBlobId::JPEG_APP_SEGMENTS) {
1309 ALOGE("%s: Invalid EXIF blobId %hu", __FUNCTION__, blob->blobId);
1310 return 0;
1311 }
1312
1313 expectedSize = blob->blobSize;
1314 if (expectedSize == 0 || expectedSize > maxSize - sizeof(struct CameraBlob)) {
1315 ALOGE("%s: Invalid blobSize %zu.", __FUNCTION__, expectedSize);
1316 return 0;
1317 }
1318
1319 uint32_t totalSize = 0;
1320
1321 // Verify APP1 marker (mandatory)
1322 uint8_t app1Marker[] = {0xFF, 0xE1};
1323 if (memcmp(appSegmentBuffer, app1Marker, sizeof(app1Marker))) {
1324 ALOGE("%s: Invalid APP1 marker: %x, %x", __FUNCTION__,
1325 appSegmentBuffer[0], appSegmentBuffer[1]);
1326 return 0;
1327 }
1328 totalSize += sizeof(app1Marker);
1329
1330 uint16_t app1Size = (static_cast<uint16_t>(appSegmentBuffer[totalSize]) << 8) +
1331 appSegmentBuffer[totalSize+1];
1332 totalSize += app1Size;
1333
1334 ALOGV("%s: Expected APP segments size %zu, APP1 segment size %u",
1335 __FUNCTION__, expectedSize, app1Size);
1336 while (totalSize < expectedSize) {
1337 if (appSegmentBuffer[totalSize] != 0xFF ||
1338 appSegmentBuffer[totalSize+1] <= 0xE1 ||
1339 appSegmentBuffer[totalSize+1] > 0xEF) {
1340 // Invalid APPn marker
1341 ALOGE("%s: Invalid APPn marker: %x, %x", __FUNCTION__,
1342 appSegmentBuffer[totalSize], appSegmentBuffer[totalSize+1]);
1343 return 0;
1344 }
1345 totalSize += 2;
1346
1347 uint16_t appnSize = (static_cast<uint16_t>(appSegmentBuffer[totalSize]) << 8) +
1348 appSegmentBuffer[totalSize+1];
1349 totalSize += appnSize;
1350 }
1351
1352 if (totalSize != expectedSize) {
1353 ALOGE("%s: Invalid JPEG APP segments: totalSize %u vs expected size %zu",
1354 __FUNCTION__, totalSize, expectedSize);
1355 return 0;
1356 }
1357
1358 *app1SegmentSize = app1Size + sizeof(app1Marker);
1359 return expectedSize;
1360}
1361
1362int64_t HeicCompositeStream::findTimestampInNsLocked(int64_t timeInUs) {
1363 for (const auto& fn : mFrameNumberMap) {
1364 if (timeInUs == ns2us(fn.second)) {
1365 return fn.second;
1366 }
1367 }
1368 for (const auto& inputFrame : mPendingInputFrames) {
1369 if (timeInUs == ns2us(inputFrame.first)) {
1370 return inputFrame.first;
1371 }
1372 }
1373 return -1;
1374}
1375
1376status_t HeicCompositeStream::copyOneYuvTile(sp<MediaCodecBuffer>& codecBuffer,
1377 const CpuConsumer::LockedBuffer& yuvBuffer,
1378 size_t top, size_t left, size_t width, size_t height) {
1379 ATRACE_CALL();
1380
1381 // Get stride information for codecBuffer
1382 sp<ABuffer> imageData;
1383 if (!codecBuffer->meta()->findBuffer("image-data", &imageData)) {
1384 ALOGE("%s: Codec input buffer is not for image data!", __FUNCTION__);
1385 return BAD_VALUE;
1386 }
1387 if (imageData->size() != sizeof(MediaImage2)) {
1388 ALOGE("%s: Invalid codec input image size %zu, expected %zu",
1389 __FUNCTION__, imageData->size(), sizeof(MediaImage2));
1390 return BAD_VALUE;
1391 }
1392 MediaImage2* imageInfo = reinterpret_cast<MediaImage2*>(imageData->data());
1393 if (imageInfo->mType != MediaImage2::MEDIA_IMAGE_TYPE_YUV ||
1394 imageInfo->mBitDepth != 8 ||
1395 imageInfo->mBitDepthAllocated != 8 ||
1396 imageInfo->mNumPlanes != 3) {
1397 ALOGE("%s: Invalid codec input image info: mType %d, mBitDepth %d, "
1398 "mBitDepthAllocated %d, mNumPlanes %d!", __FUNCTION__,
1399 imageInfo->mType, imageInfo->mBitDepth,
1400 imageInfo->mBitDepthAllocated, imageInfo->mNumPlanes);
1401 return BAD_VALUE;
1402 }
1403
1404 ALOGV("%s: yuvBuffer chromaStep %d, chromaStride %d",
1405 __FUNCTION__, yuvBuffer.chromaStep, yuvBuffer.chromaStride);
1406 ALOGV("%s: U offset %u, V offset %u, U rowInc %d, V rowInc %d, U colInc %d, V colInc %d",
1407 __FUNCTION__, imageInfo->mPlane[MediaImage2::U].mOffset,
1408 imageInfo->mPlane[MediaImage2::V].mOffset,
1409 imageInfo->mPlane[MediaImage2::U].mRowInc,
1410 imageInfo->mPlane[MediaImage2::V].mRowInc,
1411 imageInfo->mPlane[MediaImage2::U].mColInc,
1412 imageInfo->mPlane[MediaImage2::V].mColInc);
1413
1414 // Y
1415 for (auto row = top; row < top+height; row++) {
1416 uint8_t *dst = codecBuffer->data() + imageInfo->mPlane[MediaImage2::Y].mOffset +
1417 imageInfo->mPlane[MediaImage2::Y].mRowInc * (row - top);
Shuzhen Wang219c2992019-02-15 17:24:28 -08001418 mFnCopyRow(yuvBuffer.data+row*yuvBuffer.stride+left, dst, width);
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -08001419 }
1420
1421 // U is Cb, V is Cr
1422 bool codecUPlaneFirst = imageInfo->mPlane[MediaImage2::V].mOffset >
1423 imageInfo->mPlane[MediaImage2::U].mOffset;
1424 uint32_t codecUvOffsetDiff = codecUPlaneFirst ?
1425 imageInfo->mPlane[MediaImage2::V].mOffset - imageInfo->mPlane[MediaImage2::U].mOffset :
1426 imageInfo->mPlane[MediaImage2::U].mOffset - imageInfo->mPlane[MediaImage2::V].mOffset;
1427 bool isCodecUvSemiplannar = (codecUvOffsetDiff == 1) &&
1428 (imageInfo->mPlane[MediaImage2::U].mRowInc ==
1429 imageInfo->mPlane[MediaImage2::V].mRowInc) &&
1430 (imageInfo->mPlane[MediaImage2::U].mColInc == 2) &&
1431 (imageInfo->mPlane[MediaImage2::V].mColInc == 2);
1432 bool isCodecUvPlannar =
1433 ((codecUPlaneFirst && codecUvOffsetDiff >=
1434 imageInfo->mPlane[MediaImage2::U].mRowInc * imageInfo->mHeight/2) ||
1435 ((!codecUPlaneFirst && codecUvOffsetDiff >=
1436 imageInfo->mPlane[MediaImage2::V].mRowInc * imageInfo->mHeight/2))) &&
1437 imageInfo->mPlane[MediaImage2::U].mColInc == 1 &&
1438 imageInfo->mPlane[MediaImage2::V].mColInc == 1;
1439 bool cameraUPlaneFirst = yuvBuffer.dataCr > yuvBuffer.dataCb;
1440
1441 if (isCodecUvSemiplannar && yuvBuffer.chromaStep == 2 &&
1442 (codecUPlaneFirst == cameraUPlaneFirst)) {
1443 // UV semiplannar
1444 // The chrome plane could be either Cb first, or Cr first. Take the
1445 // smaller address.
1446 uint8_t *src = std::min(yuvBuffer.dataCb, yuvBuffer.dataCr);
1447 MediaImage2::PlaneIndex dstPlane = codecUvOffsetDiff > 0 ? MediaImage2::U : MediaImage2::V;
1448 for (auto row = top/2; row < (top+height)/2; row++) {
1449 uint8_t *dst = codecBuffer->data() + imageInfo->mPlane[dstPlane].mOffset +
1450 imageInfo->mPlane[dstPlane].mRowInc * (row - top/2);
Shuzhen Wang219c2992019-02-15 17:24:28 -08001451 mFnCopyRow(src+row*yuvBuffer.chromaStride+left, dst, width);
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -08001452 }
1453 } else if (isCodecUvPlannar && yuvBuffer.chromaStep == 1) {
1454 // U plane
1455 for (auto row = top/2; row < (top+height)/2; row++) {
1456 uint8_t *dst = codecBuffer->data() + imageInfo->mPlane[MediaImage2::U].mOffset +
1457 imageInfo->mPlane[MediaImage2::U].mRowInc * (row - top/2);
Shuzhen Wang219c2992019-02-15 17:24:28 -08001458 mFnCopyRow(yuvBuffer.dataCb+row*yuvBuffer.chromaStride+left/2, dst, width/2);
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -08001459 }
1460
1461 // V plane
1462 for (auto row = top/2; row < (top+height)/2; row++) {
1463 uint8_t *dst = codecBuffer->data() + imageInfo->mPlane[MediaImage2::V].mOffset +
1464 imageInfo->mPlane[MediaImage2::V].mRowInc * (row - top/2);
Shuzhen Wang219c2992019-02-15 17:24:28 -08001465 mFnCopyRow(yuvBuffer.dataCr+row*yuvBuffer.chromaStride+left/2, dst, width/2);
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -08001466 }
1467 } else {
Shuzhen Wang219c2992019-02-15 17:24:28 -08001468 // Convert between semiplannar and plannar, or when UV orders are
1469 // different.
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -08001470 uint8_t *dst = codecBuffer->data();
1471 for (auto row = top/2; row < (top+height)/2; row++) {
1472 for (auto col = left/2; col < (left+width)/2; col++) {
1473 // U/Cb
1474 int32_t dstIndex = imageInfo->mPlane[MediaImage2::U].mOffset +
1475 imageInfo->mPlane[MediaImage2::U].mRowInc * (row - top/2) +
1476 imageInfo->mPlane[MediaImage2::U].mColInc * (col - left/2);
1477 int32_t srcIndex = row * yuvBuffer.chromaStride + yuvBuffer.chromaStep * col;
1478 dst[dstIndex] = yuvBuffer.dataCb[srcIndex];
1479
1480 // V/Cr
1481 dstIndex = imageInfo->mPlane[MediaImage2::V].mOffset +
1482 imageInfo->mPlane[MediaImage2::V].mRowInc * (row - top/2) +
1483 imageInfo->mPlane[MediaImage2::V].mColInc * (col - left/2);
1484 srcIndex = row * yuvBuffer.chromaStride + yuvBuffer.chromaStep * col;
1485 dst[dstIndex] = yuvBuffer.dataCr[srcIndex];
1486 }
1487 }
1488 }
1489 return OK;
1490}
1491
Shuzhen Wang219c2992019-02-15 17:24:28 -08001492void HeicCompositeStream::initCopyRowFunction(int32_t width)
1493{
1494 using namespace libyuv;
1495
1496 mFnCopyRow = CopyRow_C;
1497#if defined(HAS_COPYROW_SSE2)
1498 if (TestCpuFlag(kCpuHasSSE2)) {
1499 mFnCopyRow = IS_ALIGNED(width, 32) ? CopyRow_SSE2 : CopyRow_Any_SSE2;
1500 }
1501#endif
1502#if defined(HAS_COPYROW_AVX)
1503 if (TestCpuFlag(kCpuHasAVX)) {
1504 mFnCopyRow = IS_ALIGNED(width, 64) ? CopyRow_AVX : CopyRow_Any_AVX;
1505 }
1506#endif
1507#if defined(HAS_COPYROW_ERMS)
1508 if (TestCpuFlag(kCpuHasERMS)) {
1509 mFnCopyRow = CopyRow_ERMS;
1510 }
1511#endif
1512#if defined(HAS_COPYROW_NEON)
1513 if (TestCpuFlag(kCpuHasNEON)) {
1514 mFnCopyRow = IS_ALIGNED(width, 32) ? CopyRow_NEON : CopyRow_Any_NEON;
1515 }
1516#endif
1517#if defined(HAS_COPYROW_MIPS)
1518 if (TestCpuFlag(kCpuHasMIPS)) {
1519 mFnCopyRow = CopyRow_MIPS;
1520 }
1521#endif
1522}
1523
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -08001524size_t HeicCompositeStream::calcAppSegmentMaxSize(const CameraMetadata& info) {
1525 camera_metadata_ro_entry_t entry = info.find(ANDROID_HEIC_INFO_MAX_JPEG_APP_SEGMENTS_COUNT);
1526 size_t maxAppsSegment = 1;
1527 if (entry.count > 0) {
1528 maxAppsSegment = entry.data.u8[0] < 1 ? 1 :
1529 entry.data.u8[0] > 16 ? 16 : entry.data.u8[0];
1530 }
1531 return maxAppsSegment * (2 + 0xFFFF) + sizeof(struct CameraBlob);
1532}
1533
1534bool HeicCompositeStream::threadLoop() {
1535 int64_t currentTs = INT64_MAX;
1536 bool newInputAvailable = false;
1537
1538 {
1539 Mutex::Autolock l(mMutex);
1540 if (mErrorState) {
1541 // In case we landed in error state, return any pending buffers and
1542 // halt all further processing.
1543 compilePendingInputLocked();
1544 releaseInputFramesLocked(currentTs);
1545 return false;
1546 }
1547
1548
1549 while (!newInputAvailable) {
1550 compilePendingInputLocked();
1551 newInputAvailable = getNextReadyInputLocked(&currentTs);
1552
1553 if (!newInputAvailable) {
1554 auto failingFrameNumber = getNextFailingInputLocked(&currentTs);
1555 if (failingFrameNumber >= 0) {
1556 // We cannot erase 'mPendingInputFrames[currentTs]' at this point because it is
1557 // possible for two internal stream buffers to fail. In such scenario the
1558 // composite stream should notify the client about a stream buffer error only
1559 // once and this information is kept within 'errorNotified'.
1560 // Any present failed input frames will be removed on a subsequent call to
1561 // 'releaseInputFramesLocked()'.
1562 releaseInputFrameLocked(&mPendingInputFrames[currentTs]);
1563 currentTs = INT64_MAX;
1564 }
1565
1566 auto ret = mInputReadyCondition.waitRelative(mMutex, kWaitDuration);
1567 if (ret == TIMED_OUT) {
1568 return true;
1569 } else if (ret != OK) {
1570 ALOGE("%s: Timed wait on condition failed: %s (%d)", __FUNCTION__,
1571 strerror(-ret), ret);
1572 return false;
1573 }
1574 }
1575 }
1576 }
1577
1578 auto res = processInputFrame(currentTs, mPendingInputFrames[currentTs]);
1579 Mutex::Autolock l(mMutex);
1580 if (res != OK) {
1581 ALOGE("%s: Failed processing frame with timestamp: %" PRIu64 ": %s (%d)",
1582 __FUNCTION__, currentTs, strerror(-res), res);
1583 mPendingInputFrames[currentTs].error = true;
1584 }
1585
1586 if (mPendingInputFrames[currentTs].error ||
1587 (mPendingInputFrames[currentTs].appSegmentWritten &&
1588 mPendingInputFrames[currentTs].pendingOutputTiles == 0)) {
1589 releaseInputFramesLocked(currentTs);
1590 }
1591
1592 return true;
1593}
1594
1595bool HeicCompositeStream::onStreamBufferError(const CaptureResultExtras& resultExtras) {
1596 bool res = false;
1597 // Buffer errors concerning internal composite streams should not be directly visible to
1598 // camera clients. They must only receive a single buffer error with the public composite
1599 // stream id.
1600 if ((resultExtras.errorStreamId == mAppSegmentStreamId) ||
1601 (resultExtras.errorStreamId == mMainImageStreamId)) {
1602 flagAnErrorFrameNumber(resultExtras.frameNumber);
1603 res = true;
1604 }
1605
1606 return res;
1607}
1608
Shuzhen Wange7f4b462019-02-12 08:43:07 -08001609void HeicCompositeStream::onResultError(const CaptureResultExtras& resultExtras) {
1610 // For result error, since the APPS_SEGMENT buffer already contains EXIF,
1611 // simply skip using the capture result metadata to override EXIF.
1612 Mutex::Autolock l(mMutex);
1613
1614 int64_t timestamp = -1;
1615 for (const auto& fn : mFrameNumberMap) {
1616 if (fn.first == resultExtras.frameNumber) {
1617 timestamp = fn.second;
1618 break;
1619 }
1620 }
1621 if (timestamp == -1) {
1622 for (const auto& inputFrame : mPendingInputFrames) {
1623 if (inputFrame.second.frameNumber == resultExtras.frameNumber) {
1624 timestamp = inputFrame.first;
1625 break;
1626 }
1627 }
1628 }
1629
1630 if (timestamp == -1) {
1631 ALOGE("%s: Failed to find shutter timestamp for result error!", __FUNCTION__);
1632 return;
1633 }
1634
1635 mCaptureResults.emplace(timestamp, std::make_tuple(resultExtras.frameNumber, CameraMetadata()));
1636 mInputReadyCondition.signal();
1637}
1638
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -08001639void HeicCompositeStream::CodecCallbackHandler::onMessageReceived(const sp<AMessage> &msg) {
1640 sp<HeicCompositeStream> parent = mParent.promote();
1641 if (parent == nullptr) return;
1642
1643 switch (msg->what()) {
1644 case kWhatCallbackNotify: {
1645 int32_t cbID;
1646 if (!msg->findInt32("callbackID", &cbID)) {
1647 ALOGE("kWhatCallbackNotify: callbackID is expected.");
1648 break;
1649 }
1650
1651 ALOGV("kWhatCallbackNotify: cbID = %d", cbID);
1652
1653 switch (cbID) {
1654 case MediaCodec::CB_INPUT_AVAILABLE: {
1655 int32_t index;
1656 if (!msg->findInt32("index", &index)) {
1657 ALOGE("CB_INPUT_AVAILABLE: index is expected.");
1658 break;
1659 }
1660 parent->onHeicInputFrameAvailable(index);
1661 break;
1662 }
1663
1664 case MediaCodec::CB_OUTPUT_AVAILABLE: {
1665 int32_t index;
1666 size_t offset;
1667 size_t size;
1668 int64_t timeUs;
1669 int32_t flags;
1670
1671 if (!msg->findInt32("index", &index)) {
1672 ALOGE("CB_OUTPUT_AVAILABLE: index is expected.");
1673 break;
1674 }
1675 if (!msg->findSize("offset", &offset)) {
1676 ALOGE("CB_OUTPUT_AVAILABLE: offset is expected.");
1677 break;
1678 }
1679 if (!msg->findSize("size", &size)) {
1680 ALOGE("CB_OUTPUT_AVAILABLE: size is expected.");
1681 break;
1682 }
1683 if (!msg->findInt64("timeUs", &timeUs)) {
1684 ALOGE("CB_OUTPUT_AVAILABLE: timeUs is expected.");
1685 break;
1686 }
1687 if (!msg->findInt32("flags", &flags)) {
1688 ALOGE("CB_OUTPUT_AVAILABLE: flags is expected.");
1689 break;
1690 }
1691
1692 CodecOutputBufferInfo bufferInfo = {
1693 index,
1694 (int32_t)offset,
1695 (int32_t)size,
1696 timeUs,
1697 (uint32_t)flags};
1698
1699 parent->onHeicOutputFrameAvailable(bufferInfo);
1700 break;
1701 }
1702
1703 case MediaCodec::CB_OUTPUT_FORMAT_CHANGED: {
1704 sp<AMessage> format;
1705 if (!msg->findMessage("format", &format)) {
1706 ALOGE("CB_OUTPUT_FORMAT_CHANGED: format is expected.");
1707 break;
1708 }
Chong Zhang1fa26862019-09-16 16:15:00 -07001709 // Here format is MediaCodec's internal copy of output format.
1710 // Make a copy since onHeicFormatChanged() might modify it.
1711 sp<AMessage> formatCopy;
1712 if (format != nullptr) {
1713 formatCopy = format->dup();
1714 }
1715 parent->onHeicFormatChanged(formatCopy);
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -08001716 break;
1717 }
1718
1719 case MediaCodec::CB_ERROR: {
1720 status_t err;
1721 int32_t actionCode;
1722 AString detail;
1723 if (!msg->findInt32("err", &err)) {
1724 ALOGE("CB_ERROR: err is expected.");
1725 break;
1726 }
1727 if (!msg->findInt32("action", &actionCode)) {
1728 ALOGE("CB_ERROR: action is expected.");
1729 break;
1730 }
1731 msg->findString("detail", &detail);
1732 ALOGE("Codec reported error(0x%x), actionCode(%d), detail(%s)",
1733 err, actionCode, detail.c_str());
1734
1735 parent->onHeicCodecError();
1736 break;
1737 }
1738
1739 default: {
1740 ALOGE("kWhatCallbackNotify: callbackID(%d) is unexpected.", cbID);
1741 break;
1742 }
1743 }
1744 break;
1745 }
1746
1747 default:
1748 ALOGE("shouldn't be here");
1749 break;
1750 }
1751}
1752
1753}; // namespace camera3
1754}; // namespace android