blob: 26459f9bc41d4a91dae17d85957dad757a4dc04a [file] [log] [blame]
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -08001/*
2 * Copyright (C) 2019 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#define LOG_TAG "Camera3-HeicCompositeStream"
18#define ATRACE_TAG ATRACE_TAG_CAMERA
19//#define LOG_NDEBUG 0
20
21#include <linux/memfd.h>
22#include <pthread.h>
23#include <sys/syscall.h>
24
25#include <android/hardware/camera/device/3.5/types.h>
Shuzhen Wang219c2992019-02-15 17:24:28 -080026#include <libyuv.h>
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -080027#include <gui/Surface.h>
28#include <utils/Log.h>
29#include <utils/Trace.h>
30
Marco Nelissen13aa1a42019-09-27 10:21:55 -070031#include <mediadrm/ICrypto.h>
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -080032#include <media/MediaCodecBuffer.h>
33#include <media/stagefright/foundation/ABuffer.h>
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -080034#include <media/stagefright/foundation/MediaDefs.h>
35#include <media/stagefright/MediaCodecConstants.h>
36
37#include "common/CameraDeviceBase.h"
38#include "utils/ExifUtils.h"
39#include "HeicEncoderInfoManager.h"
40#include "HeicCompositeStream.h"
41
42using android::hardware::camera::device::V3_5::CameraBlob;
43using android::hardware::camera::device::V3_5::CameraBlobId;
44
45namespace android {
46namespace camera3 {
47
48HeicCompositeStream::HeicCompositeStream(wp<CameraDeviceBase> device,
49 wp<hardware::camera2::ICameraDeviceCallbacks> cb) :
50 CompositeStream(device, cb),
51 mUseHeic(false),
52 mNumOutputTiles(1),
53 mOutputWidth(0),
54 mOutputHeight(0),
55 mMaxHeicBufferSize(0),
56 mGridWidth(HeicEncoderInfoManager::kGridWidth),
57 mGridHeight(HeicEncoderInfoManager::kGridHeight),
58 mGridRows(1),
59 mGridCols(1),
60 mUseGrid(false),
61 mAppSegmentStreamId(-1),
62 mAppSegmentSurfaceId(-1),
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -080063 mMainImageStreamId(-1),
64 mMainImageSurfaceId(-1),
65 mYuvBufferAcquired(false),
66 mProducerListener(new ProducerListener()),
Shuzhen Wang3d00ee52019-09-25 14:19:28 -070067 mDequeuedOutputBufferCnt(0),
Michael Gonzalezb5986a32019-10-09 15:38:17 -070068 mLockedAppSegmentBufferCnt(0),
Shuzhen Wang3d00ee52019-09-25 14:19:28 -070069 mCodecOutputCounter(0),
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -080070 mGridTimestampUs(0) {
71}
72
73HeicCompositeStream::~HeicCompositeStream() {
74 // Call deinitCodec in case stream hasn't been deleted yet to avoid any
75 // memory/resource leak.
76 deinitCodec();
77
78 mInputAppSegmentBuffers.clear();
79 mCodecOutputBuffers.clear();
80
81 mAppSegmentStreamId = -1;
82 mAppSegmentSurfaceId = -1;
83 mAppSegmentConsumer.clear();
84 mAppSegmentSurface.clear();
85
86 mMainImageStreamId = -1;
87 mMainImageSurfaceId = -1;
88 mMainImageConsumer.clear();
89 mMainImageSurface.clear();
90}
91
92bool HeicCompositeStream::isHeicCompositeStream(const sp<Surface> &surface) {
93 ANativeWindow *anw = surface.get();
94 status_t err;
95 int format;
96 if ((err = anw->query(anw, NATIVE_WINDOW_FORMAT, &format)) != OK) {
97 String8 msg = String8::format("Failed to query Surface format: %s (%d)", strerror(-err),
98 err);
99 ALOGE("%s: %s", __FUNCTION__, msg.string());
100 return false;
101 }
102
103 int dataspace;
104 if ((err = anw->query(anw, NATIVE_WINDOW_DEFAULT_DATASPACE, &dataspace)) != OK) {
105 String8 msg = String8::format("Failed to query Surface dataspace: %s (%d)", strerror(-err),
106 err);
107 ALOGE("%s: %s", __FUNCTION__, msg.string());
108 return false;
109 }
110
111 return ((format == HAL_PIXEL_FORMAT_BLOB) && (dataspace == HAL_DATASPACE_HEIF));
112}
113
114status_t HeicCompositeStream::createInternalStreams(const std::vector<sp<Surface>>& consumers,
115 bool /*hasDeferredConsumer*/, uint32_t width, uint32_t height, int format,
116 camera3_stream_rotation_t rotation, int *id, const String8& physicalCameraId,
117 std::vector<int> *surfaceIds, int /*streamSetId*/, bool /*isShared*/) {
118
119 sp<CameraDeviceBase> device = mDevice.promote();
120 if (!device.get()) {
121 ALOGE("%s: Invalid camera device!", __FUNCTION__);
122 return NO_INIT;
123 }
124
125 status_t res = initializeCodec(width, height, device);
126 if (res != OK) {
127 ALOGE("%s: Failed to initialize HEIC/HEVC codec: %s (%d)",
128 __FUNCTION__, strerror(-res), res);
129 return NO_INIT;
130 }
131
132 sp<IGraphicBufferProducer> producer;
133 sp<IGraphicBufferConsumer> consumer;
134 BufferQueue::createBufferQueue(&producer, &consumer);
Michael Gonzalezb5986a32019-10-09 15:38:17 -0700135 mAppSegmentConsumer = new CpuConsumer(consumer, kMaxAcquiredAppSegment);
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800136 mAppSegmentConsumer->setFrameAvailableListener(this);
137 mAppSegmentConsumer->setName(String8("Camera3-HeicComposite-AppSegmentStream"));
138 mAppSegmentSurface = new Surface(producer);
139
Shuzhen Wange7f4b462019-02-12 08:43:07 -0800140 mStaticInfo = device->info();
141
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800142 res = device->createStream(mAppSegmentSurface, mAppSegmentMaxSize, 1, format,
143 kAppSegmentDataSpace, rotation, &mAppSegmentStreamId, physicalCameraId, surfaceIds);
144 if (res == OK) {
145 mAppSegmentSurfaceId = (*surfaceIds)[0];
146 } else {
147 ALOGE("%s: Failed to create JPEG App segment stream: %s (%d)", __FUNCTION__,
148 strerror(-res), res);
149 return res;
150 }
151
152 if (!mUseGrid) {
153 res = mCodec->createInputSurface(&producer);
154 if (res != OK) {
155 ALOGE("%s: Failed to create input surface for Heic codec: %s (%d)",
156 __FUNCTION__, strerror(-res), res);
157 return res;
158 }
159 } else {
160 BufferQueue::createBufferQueue(&producer, &consumer);
161 mMainImageConsumer = new CpuConsumer(consumer, 1);
162 mMainImageConsumer->setFrameAvailableListener(this);
163 mMainImageConsumer->setName(String8("Camera3-HeicComposite-HevcInputYUVStream"));
164 }
165 mMainImageSurface = new Surface(producer);
166
167 res = mCodec->start();
168 if (res != OK) {
169 ALOGE("%s: Failed to start codec: %s (%d)", __FUNCTION__,
170 strerror(-res), res);
171 return res;
172 }
173
174 std::vector<int> sourceSurfaceId;
175 //Use YUV_888 format if framework tiling is needed.
176 int srcStreamFmt = mUseGrid ? HAL_PIXEL_FORMAT_YCbCr_420_888 :
177 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
178 res = device->createStream(mMainImageSurface, width, height, srcStreamFmt, kHeifDataSpace,
179 rotation, id, physicalCameraId, &sourceSurfaceId);
180 if (res == OK) {
181 mMainImageSurfaceId = sourceSurfaceId[0];
182 mMainImageStreamId = *id;
183 } else {
184 ALOGE("%s: Failed to create main image stream: %s (%d)", __FUNCTION__,
185 strerror(-res), res);
186 return res;
187 }
188
189 mOutputSurface = consumers[0];
190 res = registerCompositeStreamListener(getStreamId());
191 if (res != OK) {
192 ALOGE("%s: Failed to register HAL main image stream", __FUNCTION__);
193 return res;
194 }
195
Shuzhen Wang219c2992019-02-15 17:24:28 -0800196 initCopyRowFunction(width);
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800197 return res;
198}
199
200status_t HeicCompositeStream::deleteInternalStreams() {
201 requestExit();
202 auto res = join();
203 if (res != OK) {
204 ALOGE("%s: Failed to join with the main processing thread: %s (%d)", __FUNCTION__,
205 strerror(-res), res);
206 }
207
208 deinitCodec();
209
210 if (mAppSegmentStreamId >= 0) {
211 sp<CameraDeviceBase> device = mDevice.promote();
212 if (!device.get()) {
213 ALOGE("%s: Invalid camera device!", __FUNCTION__);
214 return NO_INIT;
215 }
216
217 res = device->deleteStream(mAppSegmentStreamId);
218 mAppSegmentStreamId = -1;
219 }
220
Shuzhen Wang2c545042019-02-07 10:27:35 -0800221 if (mOutputSurface != nullptr) {
222 mOutputSurface->disconnect(NATIVE_WINDOW_API_CAMERA);
223 mOutputSurface.clear();
224 }
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800225 return res;
226}
227
228void HeicCompositeStream::onBufferReleased(const BufferInfo& bufferInfo) {
229 Mutex::Autolock l(mMutex);
230
231 if (bufferInfo.mError) return;
232
233 mCodecOutputBufferTimestamps.push(bufferInfo.mTimestamp);
Shuzhen Wang3d00ee52019-09-25 14:19:28 -0700234 ALOGV("%s: [%" PRId64 "]: Adding codecOutputBufferTimestamp (%zu timestamps in total)",
235 __FUNCTION__, bufferInfo.mTimestamp, mCodecOutputBufferTimestamps.size());
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800236}
237
238// We need to get the settings early to handle the case where the codec output
239// arrives earlier than result metadata.
240void HeicCompositeStream::onBufferRequestForFrameNumber(uint64_t frameNumber, int streamId,
241 const CameraMetadata& settings) {
242 ATRACE_ASYNC_BEGIN("HEIC capture", frameNumber);
243
244 Mutex::Autolock l(mMutex);
245 if (mErrorState || (streamId != getStreamId())) {
246 return;
247 }
248
249 mPendingCaptureResults.emplace(frameNumber, CameraMetadata());
250
251 camera_metadata_ro_entry entry;
252
253 int32_t orientation = 0;
254 entry = settings.find(ANDROID_JPEG_ORIENTATION);
255 if (entry.count == 1) {
256 orientation = entry.data.i32[0];
257 }
258
259 int32_t quality = kDefaultJpegQuality;
260 entry = settings.find(ANDROID_JPEG_QUALITY);
261 if (entry.count == 1) {
262 quality = entry.data.i32[0];
263 }
264
265 mSettingsByFrameNumber[frameNumber] = std::make_pair(orientation, quality);
266}
267
268void HeicCompositeStream::onFrameAvailable(const BufferItem& item) {
269 if (item.mDataSpace == static_cast<android_dataspace>(kAppSegmentDataSpace)) {
270 ALOGV("%s: JPEG APP segments buffer with ts: %" PRIu64 " ms. arrived!",
271 __func__, ns2ms(item.mTimestamp));
272
273 Mutex::Autolock l(mMutex);
274 if (!mErrorState) {
275 mInputAppSegmentBuffers.push_back(item.mTimestamp);
276 mInputReadyCondition.signal();
277 }
278 } else if (item.mDataSpace == kHeifDataSpace) {
279 ALOGV("%s: YUV_888 buffer with ts: %" PRIu64 " ms. arrived!",
280 __func__, ns2ms(item.mTimestamp));
281
282 Mutex::Autolock l(mMutex);
283 if (!mUseGrid) {
284 ALOGE("%s: YUV_888 internal stream is only supported for HEVC tiling",
285 __FUNCTION__);
286 return;
287 }
288 if (!mErrorState) {
289 mInputYuvBuffers.push_back(item.mTimestamp);
290 mInputReadyCondition.signal();
291 }
292 } else {
293 ALOGE("%s: Unexpected data space: 0x%x", __FUNCTION__, item.mDataSpace);
294 }
295}
296
297status_t HeicCompositeStream::getCompositeStreamInfo(const OutputStreamInfo &streamInfo,
298 const CameraMetadata& ch, std::vector<OutputStreamInfo>* compositeOutput /*out*/) {
299 if (compositeOutput == nullptr) {
300 return BAD_VALUE;
301 }
302
303 compositeOutput->clear();
304
305 bool useGrid, useHeic;
306 bool isSizeSupported = isSizeSupportedByHeifEncoder(
307 streamInfo.width, streamInfo.height, &useHeic, &useGrid, nullptr);
308 if (!isSizeSupported) {
309 // Size is not supported by either encoder.
310 return OK;
311 }
312
313 compositeOutput->insert(compositeOutput->end(), 2, streamInfo);
314
315 // JPEG APPS segments Blob stream info
316 (*compositeOutput)[0].width = calcAppSegmentMaxSize(ch);
317 (*compositeOutput)[0].height = 1;
318 (*compositeOutput)[0].format = HAL_PIXEL_FORMAT_BLOB;
319 (*compositeOutput)[0].dataSpace = kAppSegmentDataSpace;
320 (*compositeOutput)[0].consumerUsage = GRALLOC_USAGE_SW_READ_OFTEN;
321
322 // YUV/IMPLEMENTATION_DEFINED stream info
323 (*compositeOutput)[1].width = streamInfo.width;
324 (*compositeOutput)[1].height = streamInfo.height;
325 (*compositeOutput)[1].format = useGrid ? HAL_PIXEL_FORMAT_YCbCr_420_888 :
326 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
327 (*compositeOutput)[1].dataSpace = kHeifDataSpace;
328 (*compositeOutput)[1].consumerUsage = useHeic ? GRALLOC_USAGE_HW_IMAGE_ENCODER :
329 useGrid ? GRALLOC_USAGE_SW_READ_OFTEN : GRALLOC_USAGE_HW_VIDEO_ENCODER;
330
331 return NO_ERROR;
332}
333
334bool HeicCompositeStream::isSizeSupportedByHeifEncoder(int32_t width, int32_t height,
Chong Zhang688abaa2019-05-17 16:32:23 -0700335 bool* useHeic, bool* useGrid, int64_t* stall, AString* hevcName) {
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800336 static HeicEncoderInfoManager& heicManager = HeicEncoderInfoManager::getInstance();
Chong Zhang688abaa2019-05-17 16:32:23 -0700337 return heicManager.isSizeSupported(width, height, useHeic, useGrid, stall, hevcName);
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800338}
339
340bool HeicCompositeStream::isInMemoryTempFileSupported() {
341 int memfd = syscall(__NR_memfd_create, "HEIF-try-memfd", MFD_CLOEXEC);
342 if (memfd == -1) {
343 if (errno != ENOSYS) {
344 ALOGE("%s: Failed to create tmpfs file. errno %d", __FUNCTION__, errno);
345 }
346 return false;
347 }
348 close(memfd);
349 return true;
350}
351
352void HeicCompositeStream::onHeicOutputFrameAvailable(
353 const CodecOutputBufferInfo& outputBufferInfo) {
354 Mutex::Autolock l(mMutex);
355
356 ALOGV("%s: index %d, offset %d, size %d, time %" PRId64 ", flags 0x%x",
357 __FUNCTION__, outputBufferInfo.index, outputBufferInfo.offset,
358 outputBufferInfo.size, outputBufferInfo.timeUs, outputBufferInfo.flags);
359
360 if (!mErrorState) {
361 if ((outputBufferInfo.size > 0) &&
362 ((outputBufferInfo.flags & MediaCodec::BUFFER_FLAG_CODECCONFIG) == 0)) {
363 mCodecOutputBuffers.push_back(outputBufferInfo);
364 mInputReadyCondition.signal();
365 } else {
Shuzhen Wang3d00ee52019-09-25 14:19:28 -0700366 ALOGV("%s: Releasing output buffer: size %d flags: 0x%x ", __FUNCTION__,
367 outputBufferInfo.size, outputBufferInfo.flags);
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800368 mCodec->releaseOutputBuffer(outputBufferInfo.index);
369 }
370 } else {
371 mCodec->releaseOutputBuffer(outputBufferInfo.index);
372 }
373}
374
375void HeicCompositeStream::onHeicInputFrameAvailable(int32_t index) {
376 Mutex::Autolock l(mMutex);
377
378 if (!mUseGrid) {
379 ALOGE("%s: Codec YUV input mode must only be used for Hevc tiling mode", __FUNCTION__);
380 return;
381 }
382
383 mCodecInputBuffers.push_back(index);
384 mInputReadyCondition.signal();
385}
386
387void HeicCompositeStream::onHeicFormatChanged(sp<AMessage>& newFormat) {
388 if (newFormat == nullptr) {
389 ALOGE("%s: newFormat must not be null!", __FUNCTION__);
390 return;
391 }
392
393 Mutex::Autolock l(mMutex);
394
395 AString mime;
396 AString mimeHeic(MIMETYPE_IMAGE_ANDROID_HEIC);
397 newFormat->findString(KEY_MIME, &mime);
398 if (mime != mimeHeic) {
399 // For HEVC codec, below keys need to be filled out or overwritten so that the
400 // muxer can handle them as HEIC output image.
401 newFormat->setString(KEY_MIME, mimeHeic);
402 newFormat->setInt32(KEY_WIDTH, mOutputWidth);
403 newFormat->setInt32(KEY_HEIGHT, mOutputHeight);
404 if (mUseGrid) {
405 newFormat->setInt32(KEY_TILE_WIDTH, mGridWidth);
406 newFormat->setInt32(KEY_TILE_HEIGHT, mGridHeight);
407 newFormat->setInt32(KEY_GRID_ROWS, mGridRows);
408 newFormat->setInt32(KEY_GRID_COLUMNS, mGridCols);
409 }
410 }
411 newFormat->setInt32(KEY_IS_DEFAULT, 1 /*isPrimary*/);
412
413 int32_t gridRows, gridCols;
414 if (newFormat->findInt32(KEY_GRID_ROWS, &gridRows) &&
415 newFormat->findInt32(KEY_GRID_COLUMNS, &gridCols)) {
416 mNumOutputTiles = gridRows * gridCols;
417 } else {
418 mNumOutputTiles = 1;
419 }
420
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800421 mFormat = newFormat;
Shuzhen Wang3d00ee52019-09-25 14:19:28 -0700422
423 ALOGV("%s: mNumOutputTiles is %zu", __FUNCTION__, mNumOutputTiles);
424 mInputReadyCondition.signal();
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800425}
426
427void HeicCompositeStream::onHeicCodecError() {
428 Mutex::Autolock l(mMutex);
429 mErrorState = true;
430}
431
432status_t HeicCompositeStream::configureStream() {
433 if (isRunning()) {
434 // Processing thread is already running, nothing more to do.
435 return NO_ERROR;
436 }
437
438 if (mOutputSurface.get() == nullptr) {
439 ALOGE("%s: No valid output surface set!", __FUNCTION__);
440 return NO_INIT;
441 }
442
443 auto res = mOutputSurface->connect(NATIVE_WINDOW_API_CAMERA, mProducerListener);
444 if (res != OK) {
445 ALOGE("%s: Unable to connect to native window for stream %d",
446 __FUNCTION__, mMainImageStreamId);
447 return res;
448 }
449
450 if ((res = native_window_set_buffers_format(mOutputSurface.get(), HAL_PIXEL_FORMAT_BLOB))
451 != OK) {
452 ALOGE("%s: Unable to configure stream buffer format for stream %d", __FUNCTION__,
453 mMainImageStreamId);
454 return res;
455 }
456
457 ANativeWindow *anwConsumer = mOutputSurface.get();
458 int maxConsumerBuffers;
459 if ((res = anwConsumer->query(anwConsumer, NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS,
460 &maxConsumerBuffers)) != OK) {
461 ALOGE("%s: Unable to query consumer undequeued"
462 " buffer count for stream %d", __FUNCTION__, mMainImageStreamId);
463 return res;
464 }
465
466 // Cannot use SourceSurface buffer count since it could be codec's 512*512 tile
467 // buffer count.
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800468 if ((res = native_window_set_buffer_count(
Shuzhen Wang3d00ee52019-09-25 14:19:28 -0700469 anwConsumer, kMaxOutputSurfaceProducerCount + maxConsumerBuffers)) != OK) {
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800470 ALOGE("%s: Unable to set buffer count for stream %d", __FUNCTION__, mMainImageStreamId);
471 return res;
472 }
473
474 if ((res = native_window_set_buffers_dimensions(anwConsumer, mMaxHeicBufferSize, 1)) != OK) {
475 ALOGE("%s: Unable to set buffer dimension %zu x 1 for stream %d: %s (%d)",
476 __FUNCTION__, mMaxHeicBufferSize, mMainImageStreamId, strerror(-res), res);
477 return res;
478 }
479
480 run("HeicCompositeStreamProc");
481
482 return NO_ERROR;
483}
484
485status_t HeicCompositeStream::insertGbp(SurfaceMap* /*out*/outSurfaceMap,
486 Vector<int32_t>* /*out*/outputStreamIds, int32_t* /*out*/currentStreamId) {
487 if (outSurfaceMap->find(mAppSegmentStreamId) == outSurfaceMap->end()) {
488 (*outSurfaceMap)[mAppSegmentStreamId] = std::vector<size_t>();
489 outputStreamIds->push_back(mAppSegmentStreamId);
490 }
491 (*outSurfaceMap)[mAppSegmentStreamId].push_back(mAppSegmentSurfaceId);
492
493 if (outSurfaceMap->find(mMainImageStreamId) == outSurfaceMap->end()) {
494 (*outSurfaceMap)[mMainImageStreamId] = std::vector<size_t>();
495 outputStreamIds->push_back(mMainImageStreamId);
496 }
497 (*outSurfaceMap)[mMainImageStreamId].push_back(mMainImageSurfaceId);
498
499 if (currentStreamId != nullptr) {
500 *currentStreamId = mMainImageStreamId;
501 }
502
503 return NO_ERROR;
504}
505
506void HeicCompositeStream::onShutter(const CaptureResultExtras& resultExtras, nsecs_t timestamp) {
507 Mutex::Autolock l(mMutex);
508 if (mErrorState) {
509 return;
510 }
511
512 if (mSettingsByFrameNumber.find(resultExtras.frameNumber) != mSettingsByFrameNumber.end()) {
Shuzhen Wang3d00ee52019-09-25 14:19:28 -0700513 ALOGV("%s: [%" PRId64 "]: frameNumber %" PRId64, __FUNCTION__,
514 timestamp, resultExtras.frameNumber);
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800515 mFrameNumberMap.emplace(resultExtras.frameNumber, timestamp);
516 mSettingsByTimestamp[timestamp] = mSettingsByFrameNumber[resultExtras.frameNumber];
517 mSettingsByFrameNumber.erase(resultExtras.frameNumber);
518 mInputReadyCondition.signal();
519 }
520}
521
522void HeicCompositeStream::compilePendingInputLocked() {
523 while (!mSettingsByTimestamp.empty()) {
524 auto it = mSettingsByTimestamp.begin();
525 mPendingInputFrames[it->first].orientation = it->second.first;
526 mPendingInputFrames[it->first].quality = it->second.second;
527 mSettingsByTimestamp.erase(it);
528 }
529
Michael Gonzalezb5986a32019-10-09 15:38:17 -0700530 while (!mInputAppSegmentBuffers.empty()) {
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800531 CpuConsumer::LockedBuffer imgBuffer;
532 auto it = mInputAppSegmentBuffers.begin();
533 auto res = mAppSegmentConsumer->lockNextBuffer(&imgBuffer);
534 if (res == NOT_ENOUGH_DATA) {
Michael Gonzalezb5986a32019-10-09 15:38:17 -0700535 // Can not lock any more buffers.
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800536 break;
537 } else if ((res != OK) || (*it != imgBuffer.timestamp)) {
538 if (res != OK) {
539 ALOGE("%s: Error locking JPEG_APP_SEGMENTS image buffer: %s (%d)", __FUNCTION__,
540 strerror(-res), res);
541 } else {
542 ALOGE("%s: Expecting JPEG_APP_SEGMENTS buffer with time stamp: %" PRId64
543 " received buffer with time stamp: %" PRId64, __FUNCTION__,
544 *it, imgBuffer.timestamp);
Michael Gonzalezb5986a32019-10-09 15:38:17 -0700545 mAppSegmentConsumer->unlockBuffer(imgBuffer);
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800546 }
547 mPendingInputFrames[*it].error = true;
548 mInputAppSegmentBuffers.erase(it);
549 continue;
550 }
551
552 if ((mPendingInputFrames.find(imgBuffer.timestamp) != mPendingInputFrames.end()) &&
553 (mPendingInputFrames[imgBuffer.timestamp].error)) {
554 mAppSegmentConsumer->unlockBuffer(imgBuffer);
555 } else {
556 mPendingInputFrames[imgBuffer.timestamp].appSegmentBuffer = imgBuffer;
Michael Gonzalezb5986a32019-10-09 15:38:17 -0700557 mLockedAppSegmentBufferCnt++;
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800558 }
559 mInputAppSegmentBuffers.erase(it);
560 }
561
562 while (!mInputYuvBuffers.empty() && !mYuvBufferAcquired) {
563 CpuConsumer::LockedBuffer imgBuffer;
564 auto it = mInputYuvBuffers.begin();
565 auto res = mMainImageConsumer->lockNextBuffer(&imgBuffer);
566 if (res == NOT_ENOUGH_DATA) {
Michael Gonzalezb5986a32019-10-09 15:38:17 -0700567 // Can not lock any more buffers.
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800568 break;
569 } else if (res != OK) {
570 ALOGE("%s: Error locking YUV_888 image buffer: %s (%d)", __FUNCTION__,
571 strerror(-res), res);
572 mPendingInputFrames[*it].error = true;
573 mInputYuvBuffers.erase(it);
574 continue;
575 } else if (*it != imgBuffer.timestamp) {
576 ALOGW("%s: Expecting YUV_888 buffer with time stamp: %" PRId64 " received buffer with "
577 "time stamp: %" PRId64, __FUNCTION__, *it, imgBuffer.timestamp);
578 mPendingInputFrames[*it].error = true;
579 mInputYuvBuffers.erase(it);
580 continue;
581 }
582
583 if ((mPendingInputFrames.find(imgBuffer.timestamp) != mPendingInputFrames.end()) &&
584 (mPendingInputFrames[imgBuffer.timestamp].error)) {
585 mMainImageConsumer->unlockBuffer(imgBuffer);
586 } else {
587 mPendingInputFrames[imgBuffer.timestamp].yuvBuffer = imgBuffer;
588 mYuvBufferAcquired = true;
589 }
590 mInputYuvBuffers.erase(it);
591 }
592
593 while (!mCodecOutputBuffers.empty()) {
594 auto it = mCodecOutputBuffers.begin();
595 // Bitstream buffer timestamp doesn't necessarily directly correlate with input
596 // buffer timestamp. Assume encoder input to output is FIFO, use a queue
597 // to look up timestamp.
598 int64_t bufferTime = -1;
599 if (mCodecOutputBufferTimestamps.empty()) {
600 ALOGE("%s: Failed to find buffer timestamp for codec output buffer!", __FUNCTION__);
601 } else {
602 // Direct mapping between camera timestamp (in ns) and codec timestamp (in us).
603 bufferTime = mCodecOutputBufferTimestamps.front();
Shuzhen Wang3d00ee52019-09-25 14:19:28 -0700604 mCodecOutputCounter++;
605 if (mCodecOutputCounter == mNumOutputTiles) {
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800606 mCodecOutputBufferTimestamps.pop();
Shuzhen Wang3d00ee52019-09-25 14:19:28 -0700607 mCodecOutputCounter = 0;
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800608 }
609
610 mPendingInputFrames[bufferTime].codecOutputBuffers.push_back(*it);
Shuzhen Wang3d00ee52019-09-25 14:19:28 -0700611 ALOGV("%s: [%" PRId64 "]: Pushing codecOutputBuffers (time %" PRId64 " us)",
612 __FUNCTION__, bufferTime, it->timeUs);
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800613 }
614 mCodecOutputBuffers.erase(it);
615 }
616
617 while (!mFrameNumberMap.empty()) {
618 auto it = mFrameNumberMap.begin();
619 mPendingInputFrames[it->second].frameNumber = it->first;
Shuzhen Wang3d00ee52019-09-25 14:19:28 -0700620 ALOGV("%s: [%" PRId64 "]: frameNumber is %" PRId64, __FUNCTION__, it->second, it->first);
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800621 mFrameNumberMap.erase(it);
622 }
623
Shuzhen Wange7f4b462019-02-12 08:43:07 -0800624 while (!mCaptureResults.empty()) {
625 auto it = mCaptureResults.begin();
626 // Negative timestamp indicates that something went wrong during the capture result
627 // collection process.
628 if (it->first >= 0) {
629 if (mPendingInputFrames[it->first].frameNumber == std::get<0>(it->second)) {
630 mPendingInputFrames[it->first].result =
631 std::make_unique<CameraMetadata>(std::get<1>(it->second));
632 } else {
633 ALOGE("%s: Capture result frameNumber/timestamp mapping changed between "
634 "shutter and capture result!", __FUNCTION__);
635 }
636 }
637 mCaptureResults.erase(it);
638 }
639
640 // mErrorFrameNumbers stores frame number of dropped buffers.
641 auto it = mErrorFrameNumbers.begin();
642 while (it != mErrorFrameNumbers.end()) {
643 bool frameFound = false;
644 for (auto &inputFrame : mPendingInputFrames) {
645 if (inputFrame.second.frameNumber == *it) {
646 inputFrame.second.error = true;
647 frameFound = true;
648 break;
649 }
650 }
651
652 if (frameFound) {
653 it = mErrorFrameNumbers.erase(it);
654 } else {
655 ALOGW("%s: Not able to find failing input with frame number: %" PRId64, __FUNCTION__,
656 *it);
657 it++;
658 }
659 }
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800660
661 // Distribute codec input buffers to be filled out from YUV output
662 for (auto it = mPendingInputFrames.begin();
663 it != mPendingInputFrames.end() && mCodecInputBuffers.size() > 0; it++) {
664 InputFrame& inputFrame(it->second);
665 if (inputFrame.codecInputCounter < mGridRows * mGridCols) {
666 // Available input tiles that are required for the current input
667 // image.
668 size_t newInputTiles = std::min(mCodecInputBuffers.size(),
669 mGridRows * mGridCols - inputFrame.codecInputCounter);
670 for (size_t i = 0; i < newInputTiles; i++) {
671 CodecInputBufferInfo inputInfo =
672 { mCodecInputBuffers[0], mGridTimestampUs++, inputFrame.codecInputCounter };
673 inputFrame.codecInputBuffers.push_back(inputInfo);
674
675 mCodecInputBuffers.erase(mCodecInputBuffers.begin());
676 inputFrame.codecInputCounter++;
677 }
678 break;
679 }
680 }
681}
682
683bool HeicCompositeStream::getNextReadyInputLocked(int64_t *currentTs /*out*/) {
684 if (currentTs == nullptr) {
685 return false;
686 }
687
688 bool newInputAvailable = false;
Shuzhen Wang3d00ee52019-09-25 14:19:28 -0700689 for (auto& it : mPendingInputFrames) {
690 // New input is considered to be available only if:
691 // 1. input buffers are ready, or
692 // 2. App segment and muxer is created, or
693 // 3. A codec output tile is ready, and an output buffer is available.
694 // This makes sure that muxer gets created only when an output tile is
695 // generated, because right now we only handle 1 HEIC output buffer at a
696 // time (max dequeued buffer count is 1).
Shuzhen Wange7f4b462019-02-12 08:43:07 -0800697 bool appSegmentReady = (it.second.appSegmentBuffer.data != nullptr) &&
Shuzhen Wang3d00ee52019-09-25 14:19:28 -0700698 !it.second.appSegmentWritten && it.second.result != nullptr &&
699 it.second.muxer != nullptr;
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800700 bool codecOutputReady = !it.second.codecOutputBuffers.empty();
701 bool codecInputReady = (it.second.yuvBuffer.data != nullptr) &&
702 (!it.second.codecInputBuffers.empty());
Shuzhen Wang3d00ee52019-09-25 14:19:28 -0700703 bool hasOutputBuffer = it.second.muxer != nullptr ||
704 (mDequeuedOutputBufferCnt < kMaxOutputSurfaceProducerCount);
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800705 if ((!it.second.error) &&
706 (it.first < *currentTs) &&
Shuzhen Wang3d00ee52019-09-25 14:19:28 -0700707 (appSegmentReady || (codecOutputReady && hasOutputBuffer) || codecInputReady)) {
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800708 *currentTs = it.first;
Shuzhen Wang3d00ee52019-09-25 14:19:28 -0700709 if (it.second.format == nullptr && mFormat != nullptr) {
710 it.second.format = mFormat->dup();
711 }
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800712 newInputAvailable = true;
713 break;
714 }
715 }
716
717 return newInputAvailable;
718}
719
720int64_t HeicCompositeStream::getNextFailingInputLocked(int64_t *currentTs /*out*/) {
721 int64_t res = -1;
722 if (currentTs == nullptr) {
723 return res;
724 }
725
726 for (const auto& it : mPendingInputFrames) {
727 if (it.second.error && !it.second.errorNotified && (it.first < *currentTs)) {
728 *currentTs = it.first;
729 res = it.second.frameNumber;
730 break;
731 }
732 }
733
734 return res;
735}
736
737status_t HeicCompositeStream::processInputFrame(nsecs_t timestamp,
738 InputFrame &inputFrame) {
739 ATRACE_CALL();
740 status_t res = OK;
741
Shuzhen Wange7f4b462019-02-12 08:43:07 -0800742 bool appSegmentReady = inputFrame.appSegmentBuffer.data != nullptr &&
Shuzhen Wang3d00ee52019-09-25 14:19:28 -0700743 !inputFrame.appSegmentWritten && inputFrame.result != nullptr &&
744 inputFrame.muxer != nullptr;
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800745 bool codecOutputReady = inputFrame.codecOutputBuffers.size() > 0;
746 bool codecInputReady = inputFrame.yuvBuffer.data != nullptr &&
Shuzhen Wang3d00ee52019-09-25 14:19:28 -0700747 !inputFrame.codecInputBuffers.empty();
748 bool hasOutputBuffer = inputFrame.muxer != nullptr ||
749 (mDequeuedOutputBufferCnt < kMaxOutputSurfaceProducerCount);
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800750
Shuzhen Wang3d00ee52019-09-25 14:19:28 -0700751 ALOGV("%s: [%" PRId64 "]: appSegmentReady %d, codecOutputReady %d, codecInputReady %d,"
752 " dequeuedOutputBuffer %d", __FUNCTION__, timestamp, appSegmentReady,
753 codecOutputReady, codecInputReady, mDequeuedOutputBufferCnt);
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800754
755 // Handle inputs for Hevc tiling
756 if (codecInputReady) {
757 res = processCodecInputFrame(inputFrame);
758 if (res != OK) {
759 ALOGE("%s: Failed to process codec input frame: %s (%d)", __FUNCTION__,
760 strerror(-res), res);
761 return res;
762 }
763 }
764
Shuzhen Wang3d00ee52019-09-25 14:19:28 -0700765 if (!(codecOutputReady && hasOutputBuffer) && !appSegmentReady) {
766 return OK;
767 }
768
769 // Initialize and start muxer if not yet done so. In this case,
770 // codecOutputReady must be true. Otherwise, appSegmentReady is guaranteed
771 // to be false, and the function must have returned early.
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800772 if (inputFrame.muxer == nullptr) {
773 res = startMuxerForInputFrame(timestamp, inputFrame);
774 if (res != OK) {
775 ALOGE("%s: Failed to create and start muxer: %s (%d)", __FUNCTION__,
776 strerror(-res), res);
777 return res;
778 }
779 }
780
781 // Write JPEG APP segments data to the muxer.
Shuzhen Wang3d00ee52019-09-25 14:19:28 -0700782 if (appSegmentReady) {
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800783 res = processAppSegment(timestamp, inputFrame);
784 if (res != OK) {
785 ALOGE("%s: Failed to process JPEG APP segments: %s (%d)", __FUNCTION__,
786 strerror(-res), res);
787 return res;
788 }
789 }
790
791 // Write media codec bitstream buffers to muxer.
792 while (!inputFrame.codecOutputBuffers.empty()) {
793 res = processOneCodecOutputFrame(timestamp, inputFrame);
794 if (res != OK) {
795 ALOGE("%s: Failed to process codec output frame: %s (%d)", __FUNCTION__,
796 strerror(-res), res);
797 return res;
798 }
799 }
800
Michael Gonzalezb5986a32019-10-09 15:38:17 -0700801 if (inputFrame.pendingOutputTiles == 0) {
802 if (inputFrame.appSegmentWritten) {
803 res = processCompletedInputFrame(timestamp, inputFrame);
804 if (res != OK) {
805 ALOGE("%s: Failed to process completed input frame: %s (%d)", __FUNCTION__,
806 strerror(-res), res);
807 return res;
808 }
809 } else if (mLockedAppSegmentBufferCnt == kMaxAcquiredAppSegment) {
810 ALOGE("%s: Out-of-order app segment buffers reaches limit %u", __FUNCTION__,
811 kMaxAcquiredAppSegment);
812 return INVALID_OPERATION;
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800813 }
814 }
815
816 return res;
817}
818
819status_t HeicCompositeStream::startMuxerForInputFrame(nsecs_t timestamp, InputFrame &inputFrame) {
820 sp<ANativeWindow> outputANW = mOutputSurface;
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800821
822 auto res = outputANW->dequeueBuffer(mOutputSurface.get(), &inputFrame.anb, &inputFrame.fenceFd);
823 if (res != OK) {
824 ALOGE("%s: Error retrieving output buffer: %s (%d)", __FUNCTION__, strerror(-res),
825 res);
826 return res;
827 }
Shuzhen Wang3d00ee52019-09-25 14:19:28 -0700828 mDequeuedOutputBufferCnt++;
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800829
830 // Combine current thread id, stream id and timestamp to uniquely identify image.
831 std::ostringstream tempOutputFile;
832 tempOutputFile << "HEIF-" << pthread_self() << "-"
833 << getStreamId() << "-" << timestamp;
834 inputFrame.fileFd = syscall(__NR_memfd_create, tempOutputFile.str().c_str(), MFD_CLOEXEC);
835 if (inputFrame.fileFd < 0) {
836 ALOGE("%s: Failed to create file %s. Error no is %d", __FUNCTION__,
837 tempOutputFile.str().c_str(), errno);
838 return NO_INIT;
839 }
840 inputFrame.muxer = new MediaMuxer(inputFrame.fileFd, MediaMuxer::OUTPUT_FORMAT_HEIF);
841 if (inputFrame.muxer == nullptr) {
842 ALOGE("%s: Failed to create MediaMuxer for file fd %d",
843 __FUNCTION__, inputFrame.fileFd);
844 return NO_INIT;
845 }
846
847 res = inputFrame.muxer->setOrientationHint(inputFrame.orientation);
848 if (res != OK) {
849 ALOGE("%s: Failed to setOrientationHint: %s (%d)", __FUNCTION__,
850 strerror(-res), res);
851 return res;
852 }
853 // Set encoder quality
854 {
855 sp<AMessage> qualityParams = new AMessage;
856 qualityParams->setInt32(PARAMETER_KEY_VIDEO_BITRATE, inputFrame.quality);
857 res = mCodec->setParameters(qualityParams);
858 if (res != OK) {
859 ALOGE("%s: Failed to set codec quality: %s (%d)",
860 __FUNCTION__, strerror(-res), res);
861 return res;
862 }
863 }
864
Shuzhen Wang3d00ee52019-09-25 14:19:28 -0700865 ssize_t trackId = inputFrame.muxer->addTrack(inputFrame.format);
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800866 if (trackId < 0) {
867 ALOGE("%s: Failed to addTrack to the muxer: %zd", __FUNCTION__, trackId);
868 return NO_INIT;
869 }
870
871 inputFrame.trackIndex = trackId;
872 inputFrame.pendingOutputTiles = mNumOutputTiles;
873
874 res = inputFrame.muxer->start();
875 if (res != OK) {
876 ALOGE("%s: Failed to start MediaMuxer: %s (%d)",
877 __FUNCTION__, strerror(-res), res);
878 return res;
879 }
880
Shuzhen Wang3d00ee52019-09-25 14:19:28 -0700881 ALOGV("%s: [%" PRId64 "]: Muxer started for inputFrame", __FUNCTION__,
882 timestamp);
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800883 return OK;
884}
885
886status_t HeicCompositeStream::processAppSegment(nsecs_t timestamp, InputFrame &inputFrame) {
887 size_t app1Size = 0;
888 auto appSegmentSize = findAppSegmentsSize(inputFrame.appSegmentBuffer.data,
889 inputFrame.appSegmentBuffer.width * inputFrame.appSegmentBuffer.height,
890 &app1Size);
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800891 if (appSegmentSize == 0) {
892 ALOGE("%s: Failed to find JPEG APP segment size", __FUNCTION__);
893 return NO_INIT;
894 }
895
896 std::unique_ptr<ExifUtils> exifUtils(ExifUtils::create());
897 auto exifRes = exifUtils->initialize(inputFrame.appSegmentBuffer.data, app1Size);
898 if (!exifRes) {
899 ALOGE("%s: Failed to initialize ExifUtils object!", __FUNCTION__);
900 return BAD_VALUE;
901 }
Shuzhen Wange7f4b462019-02-12 08:43:07 -0800902 exifRes = exifUtils->setFromMetadata(*inputFrame.result, mStaticInfo,
903 mOutputWidth, mOutputHeight);
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800904 if (!exifRes) {
905 ALOGE("%s: Failed to set Exif tags using metadata and main image sizes", __FUNCTION__);
906 return BAD_VALUE;
907 }
908 exifRes = exifUtils->setOrientation(inputFrame.orientation);
909 if (!exifRes) {
910 ALOGE("%s: ExifUtils failed to set orientation", __FUNCTION__);
911 return BAD_VALUE;
912 }
913 exifRes = exifUtils->generateApp1();
914 if (!exifRes) {
915 ALOGE("%s: ExifUtils failed to generate APP1 segment", __FUNCTION__);
916 return BAD_VALUE;
917 }
918
919 unsigned int newApp1Length = exifUtils->getApp1Length();
920 const uint8_t *newApp1Segment = exifUtils->getApp1Buffer();
921
922 //Assemble the APP1 marker buffer required by MediaCodec
923 uint8_t kExifApp1Marker[] = {'E', 'x', 'i', 'f', 0xFF, 0xE1, 0x00, 0x00};
924 kExifApp1Marker[6] = static_cast<uint8_t>(newApp1Length >> 8);
925 kExifApp1Marker[7] = static_cast<uint8_t>(newApp1Length & 0xFF);
926 size_t appSegmentBufferSize = sizeof(kExifApp1Marker) +
927 appSegmentSize - app1Size + newApp1Length;
928 uint8_t* appSegmentBuffer = new uint8_t[appSegmentBufferSize];
929 memcpy(appSegmentBuffer, kExifApp1Marker, sizeof(kExifApp1Marker));
930 memcpy(appSegmentBuffer + sizeof(kExifApp1Marker), newApp1Segment, newApp1Length);
931 if (appSegmentSize - app1Size > 0) {
932 memcpy(appSegmentBuffer + sizeof(kExifApp1Marker) + newApp1Length,
933 inputFrame.appSegmentBuffer.data + app1Size, appSegmentSize - app1Size);
934 }
935
936 sp<ABuffer> aBuffer = new ABuffer(appSegmentBuffer, appSegmentBufferSize);
937 auto res = inputFrame.muxer->writeSampleData(aBuffer, inputFrame.trackIndex,
938 timestamp, MediaCodec::BUFFER_FLAG_MUXER_DATA);
939 delete[] appSegmentBuffer;
940
941 if (res != OK) {
942 ALOGE("%s: Failed to write JPEG APP segments to muxer: %s (%d)",
943 __FUNCTION__, strerror(-res), res);
944 return res;
945 }
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800946
Shuzhen Wang3d00ee52019-09-25 14:19:28 -0700947 ALOGV("%s: [%" PRId64 "]: appSegmentSize is %zu, width %d, height %d, app1Size %zu",
948 __FUNCTION__, timestamp, appSegmentSize, inputFrame.appSegmentBuffer.width,
949 inputFrame.appSegmentBuffer.height, app1Size);
Michael Gonzalezb5986a32019-10-09 15:38:17 -0700950
951 inputFrame.appSegmentWritten = true;
952 // Release the buffer now so any pending input app segments can be processed
953 mAppSegmentConsumer->unlockBuffer(inputFrame.appSegmentBuffer);
954 inputFrame.appSegmentBuffer.data = nullptr;
955 mLockedAppSegmentBufferCnt--;
956
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800957 return OK;
958}
959
960status_t HeicCompositeStream::processCodecInputFrame(InputFrame &inputFrame) {
961 for (auto& inputBuffer : inputFrame.codecInputBuffers) {
962 sp<MediaCodecBuffer> buffer;
963 auto res = mCodec->getInputBuffer(inputBuffer.index, &buffer);
964 if (res != OK) {
965 ALOGE("%s: Error getting codec input buffer: %s (%d)", __FUNCTION__,
966 strerror(-res), res);
967 return res;
968 }
969
970 // Copy one tile from source to destination.
971 size_t tileX = inputBuffer.tileIndex % mGridCols;
972 size_t tileY = inputBuffer.tileIndex / mGridCols;
973 size_t top = mGridHeight * tileY;
974 size_t left = mGridWidth * tileX;
975 size_t width = (tileX == static_cast<size_t>(mGridCols) - 1) ?
976 mOutputWidth - tileX * mGridWidth : mGridWidth;
977 size_t height = (tileY == static_cast<size_t>(mGridRows) - 1) ?
978 mOutputHeight - tileY * mGridHeight : mGridHeight;
Shuzhen Wang3d00ee52019-09-25 14:19:28 -0700979 ALOGV("%s: inputBuffer tileIndex [%zu, %zu], top %zu, left %zu, width %zu, height %zu,"
980 " timeUs %" PRId64, __FUNCTION__, tileX, tileY, top, left, width, height,
981 inputBuffer.timeUs);
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800982
983 res = copyOneYuvTile(buffer, inputFrame.yuvBuffer, top, left, width, height);
984 if (res != OK) {
985 ALOGE("%s: Failed to copy YUV tile %s (%d)", __FUNCTION__,
986 strerror(-res), res);
987 return res;
988 }
989
990 res = mCodec->queueInputBuffer(inputBuffer.index, 0, buffer->capacity(),
991 inputBuffer.timeUs, 0, nullptr /*errorDetailMsg*/);
992 if (res != OK) {
993 ALOGE("%s: Failed to queueInputBuffer to Codec: %s (%d)",
994 __FUNCTION__, strerror(-res), res);
995 return res;
996 }
997 }
998
999 inputFrame.codecInputBuffers.clear();
1000 return OK;
1001}
1002
1003status_t HeicCompositeStream::processOneCodecOutputFrame(nsecs_t timestamp,
1004 InputFrame &inputFrame) {
1005 auto it = inputFrame.codecOutputBuffers.begin();
1006 sp<MediaCodecBuffer> buffer;
1007 status_t res = mCodec->getOutputBuffer(it->index, &buffer);
1008 if (res != OK) {
1009 ALOGE("%s: Error getting Heic codec output buffer at index %d: %s (%d)",
1010 __FUNCTION__, it->index, strerror(-res), res);
1011 return res;
1012 }
1013 if (buffer == nullptr) {
1014 ALOGE("%s: Invalid Heic codec output buffer at index %d",
1015 __FUNCTION__, it->index);
1016 return BAD_VALUE;
1017 }
1018
1019 sp<ABuffer> aBuffer = new ABuffer(buffer->data(), buffer->size());
1020 res = inputFrame.muxer->writeSampleData(
1021 aBuffer, inputFrame.trackIndex, timestamp, 0 /*flags*/);
1022 if (res != OK) {
1023 ALOGE("%s: Failed to write buffer index %d to muxer: %s (%d)",
1024 __FUNCTION__, it->index, strerror(-res), res);
1025 return res;
1026 }
1027
1028 mCodec->releaseOutputBuffer(it->index);
1029 if (inputFrame.pendingOutputTiles == 0) {
1030 ALOGW("%s: Codec generated more tiles than expected!", __FUNCTION__);
1031 } else {
1032 inputFrame.pendingOutputTiles--;
1033 }
1034
1035 inputFrame.codecOutputBuffers.erase(inputFrame.codecOutputBuffers.begin());
Shuzhen Wang3d00ee52019-09-25 14:19:28 -07001036
1037 ALOGV("%s: [%" PRId64 "]: Output buffer index %d",
1038 __FUNCTION__, timestamp, it->index);
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -08001039 return OK;
1040}
1041
1042status_t HeicCompositeStream::processCompletedInputFrame(nsecs_t timestamp,
1043 InputFrame &inputFrame) {
1044 sp<ANativeWindow> outputANW = mOutputSurface;
1045 inputFrame.muxer->stop();
1046
1047 // Copy the content of the file to memory.
1048 sp<GraphicBuffer> gb = GraphicBuffer::from(inputFrame.anb);
1049 void* dstBuffer;
1050 auto res = gb->lockAsync(GRALLOC_USAGE_SW_WRITE_OFTEN, &dstBuffer, inputFrame.fenceFd);
1051 if (res != OK) {
1052 ALOGE("%s: Error trying to lock output buffer fence: %s (%d)", __FUNCTION__,
1053 strerror(-res), res);
1054 return res;
1055 }
1056
1057 off_t fSize = lseek(inputFrame.fileFd, 0, SEEK_END);
1058 if (static_cast<size_t>(fSize) > mMaxHeicBufferSize - sizeof(CameraBlob)) {
1059 ALOGE("%s: Error: MediaMuxer output size %ld is larger than buffer sizer %zu",
1060 __FUNCTION__, fSize, mMaxHeicBufferSize - sizeof(CameraBlob));
1061 return BAD_VALUE;
1062 }
1063
1064 lseek(inputFrame.fileFd, 0, SEEK_SET);
1065 ssize_t bytesRead = read(inputFrame.fileFd, dstBuffer, fSize);
1066 if (bytesRead < fSize) {
1067 ALOGE("%s: Only %zd of %ld bytes read", __FUNCTION__, bytesRead, fSize);
1068 return BAD_VALUE;
1069 }
1070
1071 close(inputFrame.fileFd);
1072 inputFrame.fileFd = -1;
1073
1074 // Fill in HEIC header
1075 uint8_t *header = static_cast<uint8_t*>(dstBuffer) + mMaxHeicBufferSize - sizeof(CameraBlob);
1076 struct CameraBlob *blobHeader = (struct CameraBlob *)header;
1077 // Must be in sync with CAMERA3_HEIC_BLOB_ID in android_media_Utils.cpp
1078 blobHeader->blobId = static_cast<CameraBlobId>(0x00FE);
1079 blobHeader->blobSize = fSize;
1080
1081 res = native_window_set_buffers_timestamp(mOutputSurface.get(), timestamp);
1082 if (res != OK) {
1083 ALOGE("%s: Stream %d: Error setting timestamp: %s (%d)",
1084 __FUNCTION__, getStreamId(), strerror(-res), res);
1085 return res;
1086 }
1087
1088 res = outputANW->queueBuffer(mOutputSurface.get(), inputFrame.anb, /*fence*/ -1);
1089 if (res != OK) {
1090 ALOGE("%s: Failed to queueBuffer to Heic stream: %s (%d)", __FUNCTION__,
1091 strerror(-res), res);
1092 return res;
1093 }
1094 inputFrame.anb = nullptr;
Shuzhen Wang3d00ee52019-09-25 14:19:28 -07001095 mDequeuedOutputBufferCnt--;
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -08001096
Shuzhen Wang3d00ee52019-09-25 14:19:28 -07001097 ALOGV("%s: [%" PRId64 "]", __FUNCTION__, timestamp);
Shuzhen Wange7f4b462019-02-12 08:43:07 -08001098 ATRACE_ASYNC_END("HEIC capture", inputFrame.frameNumber);
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -08001099 return OK;
1100}
1101
1102
1103void HeicCompositeStream::releaseInputFrameLocked(InputFrame *inputFrame /*out*/) {
1104 if (inputFrame == nullptr) {
1105 return;
1106 }
1107
1108 if (inputFrame->appSegmentBuffer.data != nullptr) {
1109 mAppSegmentConsumer->unlockBuffer(inputFrame->appSegmentBuffer);
1110 inputFrame->appSegmentBuffer.data = nullptr;
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -08001111 }
1112
1113 while (!inputFrame->codecOutputBuffers.empty()) {
1114 auto it = inputFrame->codecOutputBuffers.begin();
1115 ALOGV("%s: releaseOutputBuffer index %d", __FUNCTION__, it->index);
1116 mCodec->releaseOutputBuffer(it->index);
1117 inputFrame->codecOutputBuffers.erase(it);
1118 }
1119
1120 if (inputFrame->yuvBuffer.data != nullptr) {
1121 mMainImageConsumer->unlockBuffer(inputFrame->yuvBuffer);
1122 inputFrame->yuvBuffer.data = nullptr;
1123 mYuvBufferAcquired = false;
1124 }
1125
1126 while (!inputFrame->codecInputBuffers.empty()) {
1127 auto it = inputFrame->codecInputBuffers.begin();
1128 inputFrame->codecInputBuffers.erase(it);
1129 }
1130
1131 if ((inputFrame->error || mErrorState) && !inputFrame->errorNotified) {
1132 notifyError(inputFrame->frameNumber);
1133 inputFrame->errorNotified = true;
1134 }
1135
1136 if (inputFrame->fileFd >= 0) {
1137 close(inputFrame->fileFd);
1138 inputFrame->fileFd = -1;
1139 }
1140
1141 if (inputFrame->anb != nullptr) {
1142 sp<ANativeWindow> outputANW = mOutputSurface;
1143 outputANW->cancelBuffer(mOutputSurface.get(), inputFrame->anb, /*fence*/ -1);
1144 inputFrame->anb = nullptr;
1145 }
1146}
1147
Michael Gonzalezb5986a32019-10-09 15:38:17 -07001148void HeicCompositeStream::releaseInputFramesLocked() {
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -08001149 auto it = mPendingInputFrames.begin();
1150 while (it != mPendingInputFrames.end()) {
Michael Gonzalezb5986a32019-10-09 15:38:17 -07001151 auto& inputFrame = it->second;
1152 if (inputFrame.error ||
1153 (inputFrame.appSegmentWritten && inputFrame.pendingOutputTiles == 0)) {
1154 releaseInputFrameLocked(&inputFrame);
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -08001155 it = mPendingInputFrames.erase(it);
1156 } else {
1157 it++;
1158 }
1159 }
1160}
1161
1162status_t HeicCompositeStream::initializeCodec(uint32_t width, uint32_t height,
1163 const sp<CameraDeviceBase>& cameraDevice) {
1164 ALOGV("%s", __FUNCTION__);
1165
1166 bool useGrid = false;
Chong Zhang688abaa2019-05-17 16:32:23 -07001167 AString hevcName;
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -08001168 bool isSizeSupported = isSizeSupportedByHeifEncoder(width, height,
Chong Zhang688abaa2019-05-17 16:32:23 -07001169 &mUseHeic, &useGrid, nullptr, &hevcName);
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -08001170 if (!isSizeSupported) {
1171 ALOGE("%s: Encoder doesnt' support size %u x %u!",
1172 __FUNCTION__, width, height);
1173 return BAD_VALUE;
1174 }
1175
1176 // Create Looper for MediaCodec.
1177 auto desiredMime = mUseHeic ? MIMETYPE_IMAGE_ANDROID_HEIC : MIMETYPE_VIDEO_HEVC;
1178 mCodecLooper = new ALooper;
1179 mCodecLooper->setName("Camera3-HeicComposite-MediaCodecLooper");
1180 status_t res = mCodecLooper->start(
1181 false, // runOnCallingThread
1182 false, // canCallJava
1183 PRIORITY_AUDIO);
1184 if (res != OK) {
1185 ALOGE("%s: Failed to start codec looper: %s (%d)",
1186 __FUNCTION__, strerror(-res), res);
1187 return NO_INIT;
1188 }
1189
1190 // Create HEIC/HEVC codec.
Chong Zhang688abaa2019-05-17 16:32:23 -07001191 if (mUseHeic) {
1192 mCodec = MediaCodec::CreateByType(mCodecLooper, desiredMime, true /*encoder*/);
1193 } else {
1194 mCodec = MediaCodec::CreateByComponentName(mCodecLooper, hevcName);
1195 }
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -08001196 if (mCodec == nullptr) {
1197 ALOGE("%s: Failed to create codec for %s", __FUNCTION__, desiredMime);
1198 return NO_INIT;
1199 }
1200
1201 // Create Looper and handler for Codec callback.
1202 mCodecCallbackHandler = new CodecCallbackHandler(this);
1203 if (mCodecCallbackHandler == nullptr) {
1204 ALOGE("%s: Failed to create codec callback handler", __FUNCTION__);
1205 return NO_MEMORY;
1206 }
1207 mCallbackLooper = new ALooper;
1208 mCallbackLooper->setName("Camera3-HeicComposite-MediaCodecCallbackLooper");
1209 res = mCallbackLooper->start(
1210 false, // runOnCallingThread
1211 false, // canCallJava
1212 PRIORITY_AUDIO);
1213 if (res != OK) {
1214 ALOGE("%s: Failed to start media callback looper: %s (%d)",
1215 __FUNCTION__, strerror(-res), res);
1216 return NO_INIT;
1217 }
1218 mCallbackLooper->registerHandler(mCodecCallbackHandler);
1219
1220 mAsyncNotify = new AMessage(kWhatCallbackNotify, mCodecCallbackHandler);
1221 res = mCodec->setCallback(mAsyncNotify);
1222 if (res != OK) {
1223 ALOGE("%s: Failed to set MediaCodec callback: %s (%d)", __FUNCTION__,
1224 strerror(-res), res);
1225 return res;
1226 }
1227
1228 // Create output format and configure the Codec.
1229 sp<AMessage> outputFormat = new AMessage();
1230 outputFormat->setString(KEY_MIME, desiredMime);
1231 outputFormat->setInt32(KEY_BITRATE_MODE, BITRATE_MODE_CQ);
1232 outputFormat->setInt32(KEY_QUALITY, kDefaultJpegQuality);
1233 // Ask codec to skip timestamp check and encode all frames.
Chong Zhang70bfcec2019-03-18 12:52:28 -07001234 outputFormat->setInt64(KEY_MAX_PTS_GAP_TO_ENCODER, kNoFrameDropMaxPtsGap);
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -08001235
1236 int32_t gridWidth, gridHeight, gridRows, gridCols;
1237 if (useGrid || mUseHeic) {
1238 gridWidth = HeicEncoderInfoManager::kGridWidth;
1239 gridHeight = HeicEncoderInfoManager::kGridHeight;
1240 gridRows = (height + gridHeight - 1)/gridHeight;
1241 gridCols = (width + gridWidth - 1)/gridWidth;
1242
1243 if (mUseHeic) {
1244 outputFormat->setInt32(KEY_TILE_WIDTH, gridWidth);
1245 outputFormat->setInt32(KEY_TILE_HEIGHT, gridHeight);
1246 outputFormat->setInt32(KEY_GRID_COLUMNS, gridCols);
1247 outputFormat->setInt32(KEY_GRID_ROWS, gridRows);
1248 }
1249
1250 } else {
1251 gridWidth = width;
1252 gridHeight = height;
1253 gridRows = 1;
1254 gridCols = 1;
1255 }
1256
1257 outputFormat->setInt32(KEY_WIDTH, !useGrid ? width : gridWidth);
1258 outputFormat->setInt32(KEY_HEIGHT, !useGrid ? height : gridHeight);
1259 outputFormat->setInt32(KEY_I_FRAME_INTERVAL, 0);
1260 outputFormat->setInt32(KEY_COLOR_FORMAT,
1261 useGrid ? COLOR_FormatYUV420Flexible : COLOR_FormatSurface);
Shuzhen Wang0ca81522019-08-30 14:15:16 -07001262 outputFormat->setInt32(KEY_FRAME_RATE, useGrid ? gridRows * gridCols : kNoGridOpRate);
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -08001263 // This only serves as a hint to encoder when encoding is not real-time.
1264 outputFormat->setInt32(KEY_OPERATING_RATE, useGrid ? kGridOpRate : kNoGridOpRate);
1265
1266 res = mCodec->configure(outputFormat, nullptr /*nativeWindow*/,
1267 nullptr /*crypto*/, CONFIGURE_FLAG_ENCODE);
1268 if (res != OK) {
1269 ALOGE("%s: Failed to configure codec: %s (%d)", __FUNCTION__,
1270 strerror(-res), res);
1271 return res;
1272 }
1273
1274 mGridWidth = gridWidth;
1275 mGridHeight = gridHeight;
1276 mGridRows = gridRows;
1277 mGridCols = gridCols;
1278 mUseGrid = useGrid;
1279 mOutputWidth = width;
1280 mOutputHeight = height;
1281 mAppSegmentMaxSize = calcAppSegmentMaxSize(cameraDevice->info());
1282 mMaxHeicBufferSize = mOutputWidth * mOutputHeight * 3 / 2 + mAppSegmentMaxSize;
1283
1284 return OK;
1285}
1286
1287void HeicCompositeStream::deinitCodec() {
1288 ALOGV("%s", __FUNCTION__);
1289 if (mCodec != nullptr) {
1290 mCodec->stop();
1291 mCodec->release();
1292 mCodec.clear();
1293 }
1294
1295 if (mCodecLooper != nullptr) {
1296 mCodecLooper->stop();
1297 mCodecLooper.clear();
1298 }
1299
1300 if (mCallbackLooper != nullptr) {
1301 mCallbackLooper->stop();
1302 mCallbackLooper.clear();
1303 }
1304
1305 mAsyncNotify.clear();
1306 mFormat.clear();
1307}
1308
1309// Return the size of the complete list of app segment, 0 indicates failure
1310size_t HeicCompositeStream::findAppSegmentsSize(const uint8_t* appSegmentBuffer,
1311 size_t maxSize, size_t *app1SegmentSize) {
1312 if (appSegmentBuffer == nullptr || app1SegmentSize == nullptr) {
1313 ALOGE("%s: Invalid input appSegmentBuffer %p, app1SegmentSize %p",
1314 __FUNCTION__, appSegmentBuffer, app1SegmentSize);
1315 return 0;
1316 }
1317
1318 size_t expectedSize = 0;
1319 // First check for EXIF transport header at the end of the buffer
1320 const uint8_t *header = appSegmentBuffer + (maxSize - sizeof(struct CameraBlob));
1321 const struct CameraBlob *blob = (const struct CameraBlob*)(header);
1322 if (blob->blobId != CameraBlobId::JPEG_APP_SEGMENTS) {
1323 ALOGE("%s: Invalid EXIF blobId %hu", __FUNCTION__, blob->blobId);
1324 return 0;
1325 }
1326
1327 expectedSize = blob->blobSize;
1328 if (expectedSize == 0 || expectedSize > maxSize - sizeof(struct CameraBlob)) {
1329 ALOGE("%s: Invalid blobSize %zu.", __FUNCTION__, expectedSize);
1330 return 0;
1331 }
1332
1333 uint32_t totalSize = 0;
1334
1335 // Verify APP1 marker (mandatory)
1336 uint8_t app1Marker[] = {0xFF, 0xE1};
1337 if (memcmp(appSegmentBuffer, app1Marker, sizeof(app1Marker))) {
1338 ALOGE("%s: Invalid APP1 marker: %x, %x", __FUNCTION__,
1339 appSegmentBuffer[0], appSegmentBuffer[1]);
1340 return 0;
1341 }
1342 totalSize += sizeof(app1Marker);
1343
1344 uint16_t app1Size = (static_cast<uint16_t>(appSegmentBuffer[totalSize]) << 8) +
1345 appSegmentBuffer[totalSize+1];
1346 totalSize += app1Size;
1347
1348 ALOGV("%s: Expected APP segments size %zu, APP1 segment size %u",
1349 __FUNCTION__, expectedSize, app1Size);
1350 while (totalSize < expectedSize) {
1351 if (appSegmentBuffer[totalSize] != 0xFF ||
1352 appSegmentBuffer[totalSize+1] <= 0xE1 ||
1353 appSegmentBuffer[totalSize+1] > 0xEF) {
1354 // Invalid APPn marker
1355 ALOGE("%s: Invalid APPn marker: %x, %x", __FUNCTION__,
1356 appSegmentBuffer[totalSize], appSegmentBuffer[totalSize+1]);
1357 return 0;
1358 }
1359 totalSize += 2;
1360
1361 uint16_t appnSize = (static_cast<uint16_t>(appSegmentBuffer[totalSize]) << 8) +
1362 appSegmentBuffer[totalSize+1];
1363 totalSize += appnSize;
1364 }
1365
1366 if (totalSize != expectedSize) {
1367 ALOGE("%s: Invalid JPEG APP segments: totalSize %u vs expected size %zu",
1368 __FUNCTION__, totalSize, expectedSize);
1369 return 0;
1370 }
1371
1372 *app1SegmentSize = app1Size + sizeof(app1Marker);
1373 return expectedSize;
1374}
1375
1376int64_t HeicCompositeStream::findTimestampInNsLocked(int64_t timeInUs) {
1377 for (const auto& fn : mFrameNumberMap) {
1378 if (timeInUs == ns2us(fn.second)) {
1379 return fn.second;
1380 }
1381 }
1382 for (const auto& inputFrame : mPendingInputFrames) {
1383 if (timeInUs == ns2us(inputFrame.first)) {
1384 return inputFrame.first;
1385 }
1386 }
1387 return -1;
1388}
1389
1390status_t HeicCompositeStream::copyOneYuvTile(sp<MediaCodecBuffer>& codecBuffer,
1391 const CpuConsumer::LockedBuffer& yuvBuffer,
1392 size_t top, size_t left, size_t width, size_t height) {
1393 ATRACE_CALL();
1394
1395 // Get stride information for codecBuffer
1396 sp<ABuffer> imageData;
1397 if (!codecBuffer->meta()->findBuffer("image-data", &imageData)) {
1398 ALOGE("%s: Codec input buffer is not for image data!", __FUNCTION__);
1399 return BAD_VALUE;
1400 }
1401 if (imageData->size() != sizeof(MediaImage2)) {
1402 ALOGE("%s: Invalid codec input image size %zu, expected %zu",
1403 __FUNCTION__, imageData->size(), sizeof(MediaImage2));
1404 return BAD_VALUE;
1405 }
1406 MediaImage2* imageInfo = reinterpret_cast<MediaImage2*>(imageData->data());
1407 if (imageInfo->mType != MediaImage2::MEDIA_IMAGE_TYPE_YUV ||
1408 imageInfo->mBitDepth != 8 ||
1409 imageInfo->mBitDepthAllocated != 8 ||
1410 imageInfo->mNumPlanes != 3) {
1411 ALOGE("%s: Invalid codec input image info: mType %d, mBitDepth %d, "
1412 "mBitDepthAllocated %d, mNumPlanes %d!", __FUNCTION__,
1413 imageInfo->mType, imageInfo->mBitDepth,
1414 imageInfo->mBitDepthAllocated, imageInfo->mNumPlanes);
1415 return BAD_VALUE;
1416 }
1417
1418 ALOGV("%s: yuvBuffer chromaStep %d, chromaStride %d",
1419 __FUNCTION__, yuvBuffer.chromaStep, yuvBuffer.chromaStride);
1420 ALOGV("%s: U offset %u, V offset %u, U rowInc %d, V rowInc %d, U colInc %d, V colInc %d",
1421 __FUNCTION__, imageInfo->mPlane[MediaImage2::U].mOffset,
1422 imageInfo->mPlane[MediaImage2::V].mOffset,
1423 imageInfo->mPlane[MediaImage2::U].mRowInc,
1424 imageInfo->mPlane[MediaImage2::V].mRowInc,
1425 imageInfo->mPlane[MediaImage2::U].mColInc,
1426 imageInfo->mPlane[MediaImage2::V].mColInc);
1427
1428 // Y
1429 for (auto row = top; row < top+height; row++) {
1430 uint8_t *dst = codecBuffer->data() + imageInfo->mPlane[MediaImage2::Y].mOffset +
1431 imageInfo->mPlane[MediaImage2::Y].mRowInc * (row - top);
Shuzhen Wang219c2992019-02-15 17:24:28 -08001432 mFnCopyRow(yuvBuffer.data+row*yuvBuffer.stride+left, dst, width);
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -08001433 }
1434
1435 // U is Cb, V is Cr
1436 bool codecUPlaneFirst = imageInfo->mPlane[MediaImage2::V].mOffset >
1437 imageInfo->mPlane[MediaImage2::U].mOffset;
1438 uint32_t codecUvOffsetDiff = codecUPlaneFirst ?
1439 imageInfo->mPlane[MediaImage2::V].mOffset - imageInfo->mPlane[MediaImage2::U].mOffset :
1440 imageInfo->mPlane[MediaImage2::U].mOffset - imageInfo->mPlane[MediaImage2::V].mOffset;
1441 bool isCodecUvSemiplannar = (codecUvOffsetDiff == 1) &&
1442 (imageInfo->mPlane[MediaImage2::U].mRowInc ==
1443 imageInfo->mPlane[MediaImage2::V].mRowInc) &&
1444 (imageInfo->mPlane[MediaImage2::U].mColInc == 2) &&
1445 (imageInfo->mPlane[MediaImage2::V].mColInc == 2);
1446 bool isCodecUvPlannar =
1447 ((codecUPlaneFirst && codecUvOffsetDiff >=
1448 imageInfo->mPlane[MediaImage2::U].mRowInc * imageInfo->mHeight/2) ||
1449 ((!codecUPlaneFirst && codecUvOffsetDiff >=
1450 imageInfo->mPlane[MediaImage2::V].mRowInc * imageInfo->mHeight/2))) &&
1451 imageInfo->mPlane[MediaImage2::U].mColInc == 1 &&
1452 imageInfo->mPlane[MediaImage2::V].mColInc == 1;
1453 bool cameraUPlaneFirst = yuvBuffer.dataCr > yuvBuffer.dataCb;
1454
1455 if (isCodecUvSemiplannar && yuvBuffer.chromaStep == 2 &&
1456 (codecUPlaneFirst == cameraUPlaneFirst)) {
1457 // UV semiplannar
1458 // The chrome plane could be either Cb first, or Cr first. Take the
1459 // smaller address.
1460 uint8_t *src = std::min(yuvBuffer.dataCb, yuvBuffer.dataCr);
1461 MediaImage2::PlaneIndex dstPlane = codecUvOffsetDiff > 0 ? MediaImage2::U : MediaImage2::V;
1462 for (auto row = top/2; row < (top+height)/2; row++) {
1463 uint8_t *dst = codecBuffer->data() + imageInfo->mPlane[dstPlane].mOffset +
1464 imageInfo->mPlane[dstPlane].mRowInc * (row - top/2);
Shuzhen Wang219c2992019-02-15 17:24:28 -08001465 mFnCopyRow(src+row*yuvBuffer.chromaStride+left, dst, width);
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -08001466 }
1467 } else if (isCodecUvPlannar && yuvBuffer.chromaStep == 1) {
1468 // U plane
1469 for (auto row = top/2; row < (top+height)/2; row++) {
1470 uint8_t *dst = codecBuffer->data() + imageInfo->mPlane[MediaImage2::U].mOffset +
1471 imageInfo->mPlane[MediaImage2::U].mRowInc * (row - top/2);
Shuzhen Wang219c2992019-02-15 17:24:28 -08001472 mFnCopyRow(yuvBuffer.dataCb+row*yuvBuffer.chromaStride+left/2, dst, width/2);
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -08001473 }
1474
1475 // V plane
1476 for (auto row = top/2; row < (top+height)/2; row++) {
1477 uint8_t *dst = codecBuffer->data() + imageInfo->mPlane[MediaImage2::V].mOffset +
1478 imageInfo->mPlane[MediaImage2::V].mRowInc * (row - top/2);
Shuzhen Wang219c2992019-02-15 17:24:28 -08001479 mFnCopyRow(yuvBuffer.dataCr+row*yuvBuffer.chromaStride+left/2, dst, width/2);
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -08001480 }
1481 } else {
Shuzhen Wang219c2992019-02-15 17:24:28 -08001482 // Convert between semiplannar and plannar, or when UV orders are
1483 // different.
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -08001484 uint8_t *dst = codecBuffer->data();
1485 for (auto row = top/2; row < (top+height)/2; row++) {
1486 for (auto col = left/2; col < (left+width)/2; col++) {
1487 // U/Cb
1488 int32_t dstIndex = imageInfo->mPlane[MediaImage2::U].mOffset +
1489 imageInfo->mPlane[MediaImage2::U].mRowInc * (row - top/2) +
1490 imageInfo->mPlane[MediaImage2::U].mColInc * (col - left/2);
1491 int32_t srcIndex = row * yuvBuffer.chromaStride + yuvBuffer.chromaStep * col;
1492 dst[dstIndex] = yuvBuffer.dataCb[srcIndex];
1493
1494 // V/Cr
1495 dstIndex = imageInfo->mPlane[MediaImage2::V].mOffset +
1496 imageInfo->mPlane[MediaImage2::V].mRowInc * (row - top/2) +
1497 imageInfo->mPlane[MediaImage2::V].mColInc * (col - left/2);
1498 srcIndex = row * yuvBuffer.chromaStride + yuvBuffer.chromaStep * col;
1499 dst[dstIndex] = yuvBuffer.dataCr[srcIndex];
1500 }
1501 }
1502 }
1503 return OK;
1504}
1505
Shuzhen Wang219c2992019-02-15 17:24:28 -08001506void HeicCompositeStream::initCopyRowFunction(int32_t width)
1507{
1508 using namespace libyuv;
1509
1510 mFnCopyRow = CopyRow_C;
1511#if defined(HAS_COPYROW_SSE2)
1512 if (TestCpuFlag(kCpuHasSSE2)) {
1513 mFnCopyRow = IS_ALIGNED(width, 32) ? CopyRow_SSE2 : CopyRow_Any_SSE2;
1514 }
1515#endif
1516#if defined(HAS_COPYROW_AVX)
1517 if (TestCpuFlag(kCpuHasAVX)) {
1518 mFnCopyRow = IS_ALIGNED(width, 64) ? CopyRow_AVX : CopyRow_Any_AVX;
1519 }
1520#endif
1521#if defined(HAS_COPYROW_ERMS)
1522 if (TestCpuFlag(kCpuHasERMS)) {
1523 mFnCopyRow = CopyRow_ERMS;
1524 }
1525#endif
1526#if defined(HAS_COPYROW_NEON)
1527 if (TestCpuFlag(kCpuHasNEON)) {
1528 mFnCopyRow = IS_ALIGNED(width, 32) ? CopyRow_NEON : CopyRow_Any_NEON;
1529 }
1530#endif
1531#if defined(HAS_COPYROW_MIPS)
1532 if (TestCpuFlag(kCpuHasMIPS)) {
1533 mFnCopyRow = CopyRow_MIPS;
1534 }
1535#endif
1536}
1537
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -08001538size_t HeicCompositeStream::calcAppSegmentMaxSize(const CameraMetadata& info) {
1539 camera_metadata_ro_entry_t entry = info.find(ANDROID_HEIC_INFO_MAX_JPEG_APP_SEGMENTS_COUNT);
1540 size_t maxAppsSegment = 1;
1541 if (entry.count > 0) {
1542 maxAppsSegment = entry.data.u8[0] < 1 ? 1 :
1543 entry.data.u8[0] > 16 ? 16 : entry.data.u8[0];
1544 }
1545 return maxAppsSegment * (2 + 0xFFFF) + sizeof(struct CameraBlob);
1546}
1547
1548bool HeicCompositeStream::threadLoop() {
1549 int64_t currentTs = INT64_MAX;
1550 bool newInputAvailable = false;
1551
1552 {
1553 Mutex::Autolock l(mMutex);
1554 if (mErrorState) {
1555 // In case we landed in error state, return any pending buffers and
1556 // halt all further processing.
1557 compilePendingInputLocked();
Michael Gonzalezb5986a32019-10-09 15:38:17 -07001558 releaseInputFramesLocked();
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -08001559 return false;
1560 }
1561
1562
1563 while (!newInputAvailable) {
1564 compilePendingInputLocked();
1565 newInputAvailable = getNextReadyInputLocked(&currentTs);
1566
1567 if (!newInputAvailable) {
1568 auto failingFrameNumber = getNextFailingInputLocked(&currentTs);
1569 if (failingFrameNumber >= 0) {
1570 // We cannot erase 'mPendingInputFrames[currentTs]' at this point because it is
1571 // possible for two internal stream buffers to fail. In such scenario the
1572 // composite stream should notify the client about a stream buffer error only
1573 // once and this information is kept within 'errorNotified'.
1574 // Any present failed input frames will be removed on a subsequent call to
1575 // 'releaseInputFramesLocked()'.
1576 releaseInputFrameLocked(&mPendingInputFrames[currentTs]);
1577 currentTs = INT64_MAX;
1578 }
1579
1580 auto ret = mInputReadyCondition.waitRelative(mMutex, kWaitDuration);
1581 if (ret == TIMED_OUT) {
1582 return true;
1583 } else if (ret != OK) {
1584 ALOGE("%s: Timed wait on condition failed: %s (%d)", __FUNCTION__,
1585 strerror(-ret), ret);
1586 return false;
1587 }
1588 }
1589 }
1590 }
1591
1592 auto res = processInputFrame(currentTs, mPendingInputFrames[currentTs]);
1593 Mutex::Autolock l(mMutex);
1594 if (res != OK) {
1595 ALOGE("%s: Failed processing frame with timestamp: %" PRIu64 ": %s (%d)",
1596 __FUNCTION__, currentTs, strerror(-res), res);
1597 mPendingInputFrames[currentTs].error = true;
1598 }
1599
Michael Gonzalezb5986a32019-10-09 15:38:17 -07001600 releaseInputFramesLocked();
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -08001601
1602 return true;
1603}
1604
1605bool HeicCompositeStream::onStreamBufferError(const CaptureResultExtras& resultExtras) {
1606 bool res = false;
1607 // Buffer errors concerning internal composite streams should not be directly visible to
1608 // camera clients. They must only receive a single buffer error with the public composite
1609 // stream id.
1610 if ((resultExtras.errorStreamId == mAppSegmentStreamId) ||
1611 (resultExtras.errorStreamId == mMainImageStreamId)) {
1612 flagAnErrorFrameNumber(resultExtras.frameNumber);
1613 res = true;
1614 }
1615
1616 return res;
1617}
1618
Shuzhen Wange7f4b462019-02-12 08:43:07 -08001619void HeicCompositeStream::onResultError(const CaptureResultExtras& resultExtras) {
1620 // For result error, since the APPS_SEGMENT buffer already contains EXIF,
1621 // simply skip using the capture result metadata to override EXIF.
1622 Mutex::Autolock l(mMutex);
1623
1624 int64_t timestamp = -1;
1625 for (const auto& fn : mFrameNumberMap) {
1626 if (fn.first == resultExtras.frameNumber) {
1627 timestamp = fn.second;
1628 break;
1629 }
1630 }
1631 if (timestamp == -1) {
1632 for (const auto& inputFrame : mPendingInputFrames) {
1633 if (inputFrame.second.frameNumber == resultExtras.frameNumber) {
1634 timestamp = inputFrame.first;
1635 break;
1636 }
1637 }
1638 }
1639
1640 if (timestamp == -1) {
1641 ALOGE("%s: Failed to find shutter timestamp for result error!", __FUNCTION__);
1642 return;
1643 }
1644
1645 mCaptureResults.emplace(timestamp, std::make_tuple(resultExtras.frameNumber, CameraMetadata()));
1646 mInputReadyCondition.signal();
1647}
1648
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -08001649void HeicCompositeStream::CodecCallbackHandler::onMessageReceived(const sp<AMessage> &msg) {
1650 sp<HeicCompositeStream> parent = mParent.promote();
1651 if (parent == nullptr) return;
1652
1653 switch (msg->what()) {
1654 case kWhatCallbackNotify: {
1655 int32_t cbID;
1656 if (!msg->findInt32("callbackID", &cbID)) {
1657 ALOGE("kWhatCallbackNotify: callbackID is expected.");
1658 break;
1659 }
1660
1661 ALOGV("kWhatCallbackNotify: cbID = %d", cbID);
1662
1663 switch (cbID) {
1664 case MediaCodec::CB_INPUT_AVAILABLE: {
1665 int32_t index;
1666 if (!msg->findInt32("index", &index)) {
1667 ALOGE("CB_INPUT_AVAILABLE: index is expected.");
1668 break;
1669 }
1670 parent->onHeicInputFrameAvailable(index);
1671 break;
1672 }
1673
1674 case MediaCodec::CB_OUTPUT_AVAILABLE: {
1675 int32_t index;
1676 size_t offset;
1677 size_t size;
1678 int64_t timeUs;
1679 int32_t flags;
1680
1681 if (!msg->findInt32("index", &index)) {
1682 ALOGE("CB_OUTPUT_AVAILABLE: index is expected.");
1683 break;
1684 }
1685 if (!msg->findSize("offset", &offset)) {
1686 ALOGE("CB_OUTPUT_AVAILABLE: offset is expected.");
1687 break;
1688 }
1689 if (!msg->findSize("size", &size)) {
1690 ALOGE("CB_OUTPUT_AVAILABLE: size is expected.");
1691 break;
1692 }
1693 if (!msg->findInt64("timeUs", &timeUs)) {
1694 ALOGE("CB_OUTPUT_AVAILABLE: timeUs is expected.");
1695 break;
1696 }
1697 if (!msg->findInt32("flags", &flags)) {
1698 ALOGE("CB_OUTPUT_AVAILABLE: flags is expected.");
1699 break;
1700 }
1701
1702 CodecOutputBufferInfo bufferInfo = {
1703 index,
1704 (int32_t)offset,
1705 (int32_t)size,
1706 timeUs,
1707 (uint32_t)flags};
1708
1709 parent->onHeicOutputFrameAvailable(bufferInfo);
1710 break;
1711 }
1712
1713 case MediaCodec::CB_OUTPUT_FORMAT_CHANGED: {
1714 sp<AMessage> format;
1715 if (!msg->findMessage("format", &format)) {
1716 ALOGE("CB_OUTPUT_FORMAT_CHANGED: format is expected.");
1717 break;
1718 }
Chong Zhang860eff12019-09-16 16:15:00 -07001719 // Here format is MediaCodec's internal copy of output format.
1720 // Make a copy since onHeicFormatChanged() might modify it.
1721 sp<AMessage> formatCopy;
1722 if (format != nullptr) {
1723 formatCopy = format->dup();
1724 }
1725 parent->onHeicFormatChanged(formatCopy);
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -08001726 break;
1727 }
1728
1729 case MediaCodec::CB_ERROR: {
1730 status_t err;
1731 int32_t actionCode;
1732 AString detail;
1733 if (!msg->findInt32("err", &err)) {
1734 ALOGE("CB_ERROR: err is expected.");
1735 break;
1736 }
1737 if (!msg->findInt32("action", &actionCode)) {
1738 ALOGE("CB_ERROR: action is expected.");
1739 break;
1740 }
1741 msg->findString("detail", &detail);
1742 ALOGE("Codec reported error(0x%x), actionCode(%d), detail(%s)",
1743 err, actionCode, detail.c_str());
1744
1745 parent->onHeicCodecError();
1746 break;
1747 }
1748
1749 default: {
1750 ALOGE("kWhatCallbackNotify: callbackID(%d) is unexpected.", cbID);
1751 break;
1752 }
1753 }
1754 break;
1755 }
1756
1757 default:
1758 ALOGE("shouldn't be here");
1759 break;
1760 }
1761}
1762
1763}; // namespace camera3
1764}; // namespace android