blob: 9cdad700fe864e3527a604d21de257d171aa6069 [file] [log] [blame]
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -08001/*
2 * Copyright (C) 2019 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#define LOG_TAG "Camera3-HeicCompositeStream"
18#define ATRACE_TAG ATRACE_TAG_CAMERA
19//#define LOG_NDEBUG 0
20
21#include <linux/memfd.h>
22#include <pthread.h>
23#include <sys/syscall.h>
24
25#include <android/hardware/camera/device/3.5/types.h>
Shuzhen Wang219c2992019-02-15 17:24:28 -080026#include <libyuv.h>
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -080027#include <gui/Surface.h>
28#include <utils/Log.h>
29#include <utils/Trace.h>
30
Marco Nelissen13aa1a42019-09-27 10:21:55 -070031#include <mediadrm/ICrypto.h>
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -080032#include <media/MediaCodecBuffer.h>
33#include <media/stagefright/foundation/ABuffer.h>
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -080034#include <media/stagefright/foundation/MediaDefs.h>
35#include <media/stagefright/MediaCodecConstants.h>
36
37#include "common/CameraDeviceBase.h"
38#include "utils/ExifUtils.h"
39#include "HeicEncoderInfoManager.h"
40#include "HeicCompositeStream.h"
41
42using android::hardware::camera::device::V3_5::CameraBlob;
43using android::hardware::camera::device::V3_5::CameraBlobId;
44
45namespace android {
46namespace camera3 {
47
48HeicCompositeStream::HeicCompositeStream(wp<CameraDeviceBase> device,
49 wp<hardware::camera2::ICameraDeviceCallbacks> cb) :
50 CompositeStream(device, cb),
51 mUseHeic(false),
52 mNumOutputTiles(1),
53 mOutputWidth(0),
54 mOutputHeight(0),
55 mMaxHeicBufferSize(0),
56 mGridWidth(HeicEncoderInfoManager::kGridWidth),
57 mGridHeight(HeicEncoderInfoManager::kGridHeight),
58 mGridRows(1),
59 mGridCols(1),
60 mUseGrid(false),
61 mAppSegmentStreamId(-1),
62 mAppSegmentSurfaceId(-1),
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -080063 mMainImageStreamId(-1),
64 mMainImageSurfaceId(-1),
65 mYuvBufferAcquired(false),
66 mProducerListener(new ProducerListener()),
Shuzhen Wang3d00ee52019-09-25 14:19:28 -070067 mDequeuedOutputBufferCnt(0),
Michael Gonzalezb5986a32019-10-09 15:38:17 -070068 mLockedAppSegmentBufferCnt(0),
Shuzhen Wang3d00ee52019-09-25 14:19:28 -070069 mCodecOutputCounter(0),
Shuzhen Wang62f49ed2019-09-04 14:07:53 -070070 mQuality(-1),
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -080071 mGridTimestampUs(0) {
72}
73
74HeicCompositeStream::~HeicCompositeStream() {
75 // Call deinitCodec in case stream hasn't been deleted yet to avoid any
76 // memory/resource leak.
77 deinitCodec();
78
79 mInputAppSegmentBuffers.clear();
80 mCodecOutputBuffers.clear();
81
82 mAppSegmentStreamId = -1;
83 mAppSegmentSurfaceId = -1;
84 mAppSegmentConsumer.clear();
85 mAppSegmentSurface.clear();
86
87 mMainImageStreamId = -1;
88 mMainImageSurfaceId = -1;
89 mMainImageConsumer.clear();
90 mMainImageSurface.clear();
91}
92
93bool HeicCompositeStream::isHeicCompositeStream(const sp<Surface> &surface) {
94 ANativeWindow *anw = surface.get();
95 status_t err;
96 int format;
97 if ((err = anw->query(anw, NATIVE_WINDOW_FORMAT, &format)) != OK) {
98 String8 msg = String8::format("Failed to query Surface format: %s (%d)", strerror(-err),
99 err);
100 ALOGE("%s: %s", __FUNCTION__, msg.string());
101 return false;
102 }
103
104 int dataspace;
105 if ((err = anw->query(anw, NATIVE_WINDOW_DEFAULT_DATASPACE, &dataspace)) != OK) {
106 String8 msg = String8::format("Failed to query Surface dataspace: %s (%d)", strerror(-err),
107 err);
108 ALOGE("%s: %s", __FUNCTION__, msg.string());
109 return false;
110 }
111
112 return ((format == HAL_PIXEL_FORMAT_BLOB) && (dataspace == HAL_DATASPACE_HEIF));
113}
114
115status_t HeicCompositeStream::createInternalStreams(const std::vector<sp<Surface>>& consumers,
116 bool /*hasDeferredConsumer*/, uint32_t width, uint32_t height, int format,
117 camera3_stream_rotation_t rotation, int *id, const String8& physicalCameraId,
118 std::vector<int> *surfaceIds, int /*streamSetId*/, bool /*isShared*/) {
119
120 sp<CameraDeviceBase> device = mDevice.promote();
121 if (!device.get()) {
122 ALOGE("%s: Invalid camera device!", __FUNCTION__);
123 return NO_INIT;
124 }
125
126 status_t res = initializeCodec(width, height, device);
127 if (res != OK) {
128 ALOGE("%s: Failed to initialize HEIC/HEVC codec: %s (%d)",
129 __FUNCTION__, strerror(-res), res);
130 return NO_INIT;
131 }
132
133 sp<IGraphicBufferProducer> producer;
134 sp<IGraphicBufferConsumer> consumer;
135 BufferQueue::createBufferQueue(&producer, &consumer);
Michael Gonzalezb5986a32019-10-09 15:38:17 -0700136 mAppSegmentConsumer = new CpuConsumer(consumer, kMaxAcquiredAppSegment);
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800137 mAppSegmentConsumer->setFrameAvailableListener(this);
138 mAppSegmentConsumer->setName(String8("Camera3-HeicComposite-AppSegmentStream"));
139 mAppSegmentSurface = new Surface(producer);
140
Shuzhen Wange7f4b462019-02-12 08:43:07 -0800141 mStaticInfo = device->info();
142
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800143 res = device->createStream(mAppSegmentSurface, mAppSegmentMaxSize, 1, format,
144 kAppSegmentDataSpace, rotation, &mAppSegmentStreamId, physicalCameraId, surfaceIds);
145 if (res == OK) {
146 mAppSegmentSurfaceId = (*surfaceIds)[0];
147 } else {
148 ALOGE("%s: Failed to create JPEG App segment stream: %s (%d)", __FUNCTION__,
149 strerror(-res), res);
150 return res;
151 }
152
153 if (!mUseGrid) {
154 res = mCodec->createInputSurface(&producer);
155 if (res != OK) {
156 ALOGE("%s: Failed to create input surface for Heic codec: %s (%d)",
157 __FUNCTION__, strerror(-res), res);
158 return res;
159 }
160 } else {
161 BufferQueue::createBufferQueue(&producer, &consumer);
162 mMainImageConsumer = new CpuConsumer(consumer, 1);
163 mMainImageConsumer->setFrameAvailableListener(this);
164 mMainImageConsumer->setName(String8("Camera3-HeicComposite-HevcInputYUVStream"));
165 }
166 mMainImageSurface = new Surface(producer);
167
168 res = mCodec->start();
169 if (res != OK) {
170 ALOGE("%s: Failed to start codec: %s (%d)", __FUNCTION__,
171 strerror(-res), res);
172 return res;
173 }
174
175 std::vector<int> sourceSurfaceId;
176 //Use YUV_888 format if framework tiling is needed.
177 int srcStreamFmt = mUseGrid ? HAL_PIXEL_FORMAT_YCbCr_420_888 :
178 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
179 res = device->createStream(mMainImageSurface, width, height, srcStreamFmt, kHeifDataSpace,
180 rotation, id, physicalCameraId, &sourceSurfaceId);
181 if (res == OK) {
182 mMainImageSurfaceId = sourceSurfaceId[0];
183 mMainImageStreamId = *id;
184 } else {
185 ALOGE("%s: Failed to create main image stream: %s (%d)", __FUNCTION__,
186 strerror(-res), res);
187 return res;
188 }
189
190 mOutputSurface = consumers[0];
191 res = registerCompositeStreamListener(getStreamId());
192 if (res != OK) {
193 ALOGE("%s: Failed to register HAL main image stream", __FUNCTION__);
194 return res;
195 }
196
Shuzhen Wang219c2992019-02-15 17:24:28 -0800197 initCopyRowFunction(width);
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800198 return res;
199}
200
201status_t HeicCompositeStream::deleteInternalStreams() {
202 requestExit();
203 auto res = join();
204 if (res != OK) {
205 ALOGE("%s: Failed to join with the main processing thread: %s (%d)", __FUNCTION__,
206 strerror(-res), res);
207 }
208
209 deinitCodec();
210
211 if (mAppSegmentStreamId >= 0) {
212 sp<CameraDeviceBase> device = mDevice.promote();
213 if (!device.get()) {
214 ALOGE("%s: Invalid camera device!", __FUNCTION__);
215 return NO_INIT;
216 }
217
218 res = device->deleteStream(mAppSegmentStreamId);
219 mAppSegmentStreamId = -1;
220 }
221
Shuzhen Wang2c545042019-02-07 10:27:35 -0800222 if (mOutputSurface != nullptr) {
223 mOutputSurface->disconnect(NATIVE_WINDOW_API_CAMERA);
224 mOutputSurface.clear();
225 }
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800226 return res;
227}
228
229void HeicCompositeStream::onBufferReleased(const BufferInfo& bufferInfo) {
230 Mutex::Autolock l(mMutex);
231
232 if (bufferInfo.mError) return;
233
234 mCodecOutputBufferTimestamps.push(bufferInfo.mTimestamp);
Shuzhen Wang3d00ee52019-09-25 14:19:28 -0700235 ALOGV("%s: [%" PRId64 "]: Adding codecOutputBufferTimestamp (%zu timestamps in total)",
236 __FUNCTION__, bufferInfo.mTimestamp, mCodecOutputBufferTimestamps.size());
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800237}
238
239// We need to get the settings early to handle the case where the codec output
240// arrives earlier than result metadata.
241void HeicCompositeStream::onBufferRequestForFrameNumber(uint64_t frameNumber, int streamId,
242 const CameraMetadata& settings) {
243 ATRACE_ASYNC_BEGIN("HEIC capture", frameNumber);
244
245 Mutex::Autolock l(mMutex);
246 if (mErrorState || (streamId != getStreamId())) {
247 return;
248 }
249
250 mPendingCaptureResults.emplace(frameNumber, CameraMetadata());
251
252 camera_metadata_ro_entry entry;
253
254 int32_t orientation = 0;
255 entry = settings.find(ANDROID_JPEG_ORIENTATION);
256 if (entry.count == 1) {
257 orientation = entry.data.i32[0];
258 }
259
260 int32_t quality = kDefaultJpegQuality;
261 entry = settings.find(ANDROID_JPEG_QUALITY);
262 if (entry.count == 1) {
263 quality = entry.data.i32[0];
264 }
265
266 mSettingsByFrameNumber[frameNumber] = std::make_pair(orientation, quality);
267}
268
269void HeicCompositeStream::onFrameAvailable(const BufferItem& item) {
270 if (item.mDataSpace == static_cast<android_dataspace>(kAppSegmentDataSpace)) {
271 ALOGV("%s: JPEG APP segments buffer with ts: %" PRIu64 " ms. arrived!",
272 __func__, ns2ms(item.mTimestamp));
273
274 Mutex::Autolock l(mMutex);
275 if (!mErrorState) {
276 mInputAppSegmentBuffers.push_back(item.mTimestamp);
277 mInputReadyCondition.signal();
278 }
279 } else if (item.mDataSpace == kHeifDataSpace) {
280 ALOGV("%s: YUV_888 buffer with ts: %" PRIu64 " ms. arrived!",
281 __func__, ns2ms(item.mTimestamp));
282
283 Mutex::Autolock l(mMutex);
284 if (!mUseGrid) {
285 ALOGE("%s: YUV_888 internal stream is only supported for HEVC tiling",
286 __FUNCTION__);
287 return;
288 }
289 if (!mErrorState) {
290 mInputYuvBuffers.push_back(item.mTimestamp);
291 mInputReadyCondition.signal();
292 }
293 } else {
294 ALOGE("%s: Unexpected data space: 0x%x", __FUNCTION__, item.mDataSpace);
295 }
296}
297
298status_t HeicCompositeStream::getCompositeStreamInfo(const OutputStreamInfo &streamInfo,
299 const CameraMetadata& ch, std::vector<OutputStreamInfo>* compositeOutput /*out*/) {
300 if (compositeOutput == nullptr) {
301 return BAD_VALUE;
302 }
303
304 compositeOutput->clear();
305
306 bool useGrid, useHeic;
307 bool isSizeSupported = isSizeSupportedByHeifEncoder(
308 streamInfo.width, streamInfo.height, &useHeic, &useGrid, nullptr);
309 if (!isSizeSupported) {
310 // Size is not supported by either encoder.
311 return OK;
312 }
313
314 compositeOutput->insert(compositeOutput->end(), 2, streamInfo);
315
316 // JPEG APPS segments Blob stream info
317 (*compositeOutput)[0].width = calcAppSegmentMaxSize(ch);
318 (*compositeOutput)[0].height = 1;
319 (*compositeOutput)[0].format = HAL_PIXEL_FORMAT_BLOB;
320 (*compositeOutput)[0].dataSpace = kAppSegmentDataSpace;
321 (*compositeOutput)[0].consumerUsage = GRALLOC_USAGE_SW_READ_OFTEN;
322
323 // YUV/IMPLEMENTATION_DEFINED stream info
324 (*compositeOutput)[1].width = streamInfo.width;
325 (*compositeOutput)[1].height = streamInfo.height;
326 (*compositeOutput)[1].format = useGrid ? HAL_PIXEL_FORMAT_YCbCr_420_888 :
327 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
328 (*compositeOutput)[1].dataSpace = kHeifDataSpace;
329 (*compositeOutput)[1].consumerUsage = useHeic ? GRALLOC_USAGE_HW_IMAGE_ENCODER :
330 useGrid ? GRALLOC_USAGE_SW_READ_OFTEN : GRALLOC_USAGE_HW_VIDEO_ENCODER;
331
332 return NO_ERROR;
333}
334
335bool HeicCompositeStream::isSizeSupportedByHeifEncoder(int32_t width, int32_t height,
Chong Zhang688abaa2019-05-17 16:32:23 -0700336 bool* useHeic, bool* useGrid, int64_t* stall, AString* hevcName) {
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800337 static HeicEncoderInfoManager& heicManager = HeicEncoderInfoManager::getInstance();
Chong Zhang688abaa2019-05-17 16:32:23 -0700338 return heicManager.isSizeSupported(width, height, useHeic, useGrid, stall, hevcName);
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800339}
340
341bool HeicCompositeStream::isInMemoryTempFileSupported() {
342 int memfd = syscall(__NR_memfd_create, "HEIF-try-memfd", MFD_CLOEXEC);
343 if (memfd == -1) {
344 if (errno != ENOSYS) {
345 ALOGE("%s: Failed to create tmpfs file. errno %d", __FUNCTION__, errno);
346 }
347 return false;
348 }
349 close(memfd);
350 return true;
351}
352
353void HeicCompositeStream::onHeicOutputFrameAvailable(
354 const CodecOutputBufferInfo& outputBufferInfo) {
355 Mutex::Autolock l(mMutex);
356
357 ALOGV("%s: index %d, offset %d, size %d, time %" PRId64 ", flags 0x%x",
358 __FUNCTION__, outputBufferInfo.index, outputBufferInfo.offset,
359 outputBufferInfo.size, outputBufferInfo.timeUs, outputBufferInfo.flags);
360
361 if (!mErrorState) {
362 if ((outputBufferInfo.size > 0) &&
363 ((outputBufferInfo.flags & MediaCodec::BUFFER_FLAG_CODECCONFIG) == 0)) {
364 mCodecOutputBuffers.push_back(outputBufferInfo);
365 mInputReadyCondition.signal();
366 } else {
Shuzhen Wang3d00ee52019-09-25 14:19:28 -0700367 ALOGV("%s: Releasing output buffer: size %d flags: 0x%x ", __FUNCTION__,
368 outputBufferInfo.size, outputBufferInfo.flags);
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800369 mCodec->releaseOutputBuffer(outputBufferInfo.index);
370 }
371 } else {
372 mCodec->releaseOutputBuffer(outputBufferInfo.index);
373 }
374}
375
376void HeicCompositeStream::onHeicInputFrameAvailable(int32_t index) {
377 Mutex::Autolock l(mMutex);
378
379 if (!mUseGrid) {
380 ALOGE("%s: Codec YUV input mode must only be used for Hevc tiling mode", __FUNCTION__);
381 return;
382 }
383
384 mCodecInputBuffers.push_back(index);
385 mInputReadyCondition.signal();
386}
387
388void HeicCompositeStream::onHeicFormatChanged(sp<AMessage>& newFormat) {
389 if (newFormat == nullptr) {
390 ALOGE("%s: newFormat must not be null!", __FUNCTION__);
391 return;
392 }
393
394 Mutex::Autolock l(mMutex);
395
396 AString mime;
397 AString mimeHeic(MIMETYPE_IMAGE_ANDROID_HEIC);
398 newFormat->findString(KEY_MIME, &mime);
399 if (mime != mimeHeic) {
400 // For HEVC codec, below keys need to be filled out or overwritten so that the
401 // muxer can handle them as HEIC output image.
402 newFormat->setString(KEY_MIME, mimeHeic);
403 newFormat->setInt32(KEY_WIDTH, mOutputWidth);
404 newFormat->setInt32(KEY_HEIGHT, mOutputHeight);
405 if (mUseGrid) {
406 newFormat->setInt32(KEY_TILE_WIDTH, mGridWidth);
407 newFormat->setInt32(KEY_TILE_HEIGHT, mGridHeight);
408 newFormat->setInt32(KEY_GRID_ROWS, mGridRows);
409 newFormat->setInt32(KEY_GRID_COLUMNS, mGridCols);
410 }
411 }
412 newFormat->setInt32(KEY_IS_DEFAULT, 1 /*isPrimary*/);
413
414 int32_t gridRows, gridCols;
415 if (newFormat->findInt32(KEY_GRID_ROWS, &gridRows) &&
416 newFormat->findInt32(KEY_GRID_COLUMNS, &gridCols)) {
417 mNumOutputTiles = gridRows * gridCols;
418 } else {
419 mNumOutputTiles = 1;
420 }
421
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800422 mFormat = newFormat;
Shuzhen Wang3d00ee52019-09-25 14:19:28 -0700423
424 ALOGV("%s: mNumOutputTiles is %zu", __FUNCTION__, mNumOutputTiles);
425 mInputReadyCondition.signal();
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800426}
427
428void HeicCompositeStream::onHeicCodecError() {
429 Mutex::Autolock l(mMutex);
430 mErrorState = true;
431}
432
433status_t HeicCompositeStream::configureStream() {
434 if (isRunning()) {
435 // Processing thread is already running, nothing more to do.
436 return NO_ERROR;
437 }
438
439 if (mOutputSurface.get() == nullptr) {
440 ALOGE("%s: No valid output surface set!", __FUNCTION__);
441 return NO_INIT;
442 }
443
444 auto res = mOutputSurface->connect(NATIVE_WINDOW_API_CAMERA, mProducerListener);
445 if (res != OK) {
446 ALOGE("%s: Unable to connect to native window for stream %d",
447 __FUNCTION__, mMainImageStreamId);
448 return res;
449 }
450
451 if ((res = native_window_set_buffers_format(mOutputSurface.get(), HAL_PIXEL_FORMAT_BLOB))
452 != OK) {
453 ALOGE("%s: Unable to configure stream buffer format for stream %d", __FUNCTION__,
454 mMainImageStreamId);
455 return res;
456 }
457
458 ANativeWindow *anwConsumer = mOutputSurface.get();
459 int maxConsumerBuffers;
460 if ((res = anwConsumer->query(anwConsumer, NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS,
461 &maxConsumerBuffers)) != OK) {
462 ALOGE("%s: Unable to query consumer undequeued"
463 " buffer count for stream %d", __FUNCTION__, mMainImageStreamId);
464 return res;
465 }
466
467 // Cannot use SourceSurface buffer count since it could be codec's 512*512 tile
468 // buffer count.
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800469 if ((res = native_window_set_buffer_count(
Shuzhen Wang3d00ee52019-09-25 14:19:28 -0700470 anwConsumer, kMaxOutputSurfaceProducerCount + maxConsumerBuffers)) != OK) {
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800471 ALOGE("%s: Unable to set buffer count for stream %d", __FUNCTION__, mMainImageStreamId);
472 return res;
473 }
474
475 if ((res = native_window_set_buffers_dimensions(anwConsumer, mMaxHeicBufferSize, 1)) != OK) {
476 ALOGE("%s: Unable to set buffer dimension %zu x 1 for stream %d: %s (%d)",
477 __FUNCTION__, mMaxHeicBufferSize, mMainImageStreamId, strerror(-res), res);
478 return res;
479 }
480
481 run("HeicCompositeStreamProc");
482
483 return NO_ERROR;
484}
485
486status_t HeicCompositeStream::insertGbp(SurfaceMap* /*out*/outSurfaceMap,
487 Vector<int32_t>* /*out*/outputStreamIds, int32_t* /*out*/currentStreamId) {
488 if (outSurfaceMap->find(mAppSegmentStreamId) == outSurfaceMap->end()) {
489 (*outSurfaceMap)[mAppSegmentStreamId] = std::vector<size_t>();
490 outputStreamIds->push_back(mAppSegmentStreamId);
491 }
492 (*outSurfaceMap)[mAppSegmentStreamId].push_back(mAppSegmentSurfaceId);
493
494 if (outSurfaceMap->find(mMainImageStreamId) == outSurfaceMap->end()) {
495 (*outSurfaceMap)[mMainImageStreamId] = std::vector<size_t>();
496 outputStreamIds->push_back(mMainImageStreamId);
497 }
498 (*outSurfaceMap)[mMainImageStreamId].push_back(mMainImageSurfaceId);
499
500 if (currentStreamId != nullptr) {
501 *currentStreamId = mMainImageStreamId;
502 }
503
504 return NO_ERROR;
505}
506
507void HeicCompositeStream::onShutter(const CaptureResultExtras& resultExtras, nsecs_t timestamp) {
508 Mutex::Autolock l(mMutex);
509 if (mErrorState) {
510 return;
511 }
512
513 if (mSettingsByFrameNumber.find(resultExtras.frameNumber) != mSettingsByFrameNumber.end()) {
Shuzhen Wang3d00ee52019-09-25 14:19:28 -0700514 ALOGV("%s: [%" PRId64 "]: frameNumber %" PRId64, __FUNCTION__,
515 timestamp, resultExtras.frameNumber);
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800516 mFrameNumberMap.emplace(resultExtras.frameNumber, timestamp);
517 mSettingsByTimestamp[timestamp] = mSettingsByFrameNumber[resultExtras.frameNumber];
518 mSettingsByFrameNumber.erase(resultExtras.frameNumber);
519 mInputReadyCondition.signal();
520 }
521}
522
523void HeicCompositeStream::compilePendingInputLocked() {
524 while (!mSettingsByTimestamp.empty()) {
525 auto it = mSettingsByTimestamp.begin();
526 mPendingInputFrames[it->first].orientation = it->second.first;
527 mPendingInputFrames[it->first].quality = it->second.second;
528 mSettingsByTimestamp.erase(it);
Shuzhen Wang62f49ed2019-09-04 14:07:53 -0700529
530 // Set encoder quality if no inflight encoding
531 if (mPendingInputFrames.size() == 1) {
532 int32_t newQuality = mPendingInputFrames.begin()->second.quality;
533 updateCodecQualityLocked(newQuality);
534 }
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800535 }
536
Michael Gonzalezb5986a32019-10-09 15:38:17 -0700537 while (!mInputAppSegmentBuffers.empty()) {
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800538 CpuConsumer::LockedBuffer imgBuffer;
539 auto it = mInputAppSegmentBuffers.begin();
540 auto res = mAppSegmentConsumer->lockNextBuffer(&imgBuffer);
541 if (res == NOT_ENOUGH_DATA) {
Michael Gonzalezb5986a32019-10-09 15:38:17 -0700542 // Can not lock any more buffers.
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800543 break;
544 } else if ((res != OK) || (*it != imgBuffer.timestamp)) {
545 if (res != OK) {
546 ALOGE("%s: Error locking JPEG_APP_SEGMENTS image buffer: %s (%d)", __FUNCTION__,
547 strerror(-res), res);
548 } else {
549 ALOGE("%s: Expecting JPEG_APP_SEGMENTS buffer with time stamp: %" PRId64
550 " received buffer with time stamp: %" PRId64, __FUNCTION__,
551 *it, imgBuffer.timestamp);
Michael Gonzalezb5986a32019-10-09 15:38:17 -0700552 mAppSegmentConsumer->unlockBuffer(imgBuffer);
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800553 }
554 mPendingInputFrames[*it].error = true;
555 mInputAppSegmentBuffers.erase(it);
556 continue;
557 }
558
559 if ((mPendingInputFrames.find(imgBuffer.timestamp) != mPendingInputFrames.end()) &&
560 (mPendingInputFrames[imgBuffer.timestamp].error)) {
561 mAppSegmentConsumer->unlockBuffer(imgBuffer);
562 } else {
563 mPendingInputFrames[imgBuffer.timestamp].appSegmentBuffer = imgBuffer;
Michael Gonzalezb5986a32019-10-09 15:38:17 -0700564 mLockedAppSegmentBufferCnt++;
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800565 }
566 mInputAppSegmentBuffers.erase(it);
567 }
568
569 while (!mInputYuvBuffers.empty() && !mYuvBufferAcquired) {
570 CpuConsumer::LockedBuffer imgBuffer;
571 auto it = mInputYuvBuffers.begin();
572 auto res = mMainImageConsumer->lockNextBuffer(&imgBuffer);
573 if (res == NOT_ENOUGH_DATA) {
Michael Gonzalezb5986a32019-10-09 15:38:17 -0700574 // Can not lock any more buffers.
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800575 break;
576 } else if (res != OK) {
577 ALOGE("%s: Error locking YUV_888 image buffer: %s (%d)", __FUNCTION__,
578 strerror(-res), res);
579 mPendingInputFrames[*it].error = true;
580 mInputYuvBuffers.erase(it);
581 continue;
582 } else if (*it != imgBuffer.timestamp) {
583 ALOGW("%s: Expecting YUV_888 buffer with time stamp: %" PRId64 " received buffer with "
584 "time stamp: %" PRId64, __FUNCTION__, *it, imgBuffer.timestamp);
585 mPendingInputFrames[*it].error = true;
586 mInputYuvBuffers.erase(it);
587 continue;
588 }
589
590 if ((mPendingInputFrames.find(imgBuffer.timestamp) != mPendingInputFrames.end()) &&
591 (mPendingInputFrames[imgBuffer.timestamp].error)) {
592 mMainImageConsumer->unlockBuffer(imgBuffer);
593 } else {
594 mPendingInputFrames[imgBuffer.timestamp].yuvBuffer = imgBuffer;
595 mYuvBufferAcquired = true;
596 }
597 mInputYuvBuffers.erase(it);
598 }
599
600 while (!mCodecOutputBuffers.empty()) {
601 auto it = mCodecOutputBuffers.begin();
602 // Bitstream buffer timestamp doesn't necessarily directly correlate with input
603 // buffer timestamp. Assume encoder input to output is FIFO, use a queue
604 // to look up timestamp.
605 int64_t bufferTime = -1;
606 if (mCodecOutputBufferTimestamps.empty()) {
Michael Gonzalez5c103f22019-10-08 14:30:32 -0700607 ALOGV("%s: Failed to find buffer timestamp for codec output buffer!", __FUNCTION__);
608 break;
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800609 } else {
610 // Direct mapping between camera timestamp (in ns) and codec timestamp (in us).
611 bufferTime = mCodecOutputBufferTimestamps.front();
Shuzhen Wang3d00ee52019-09-25 14:19:28 -0700612 mCodecOutputCounter++;
613 if (mCodecOutputCounter == mNumOutputTiles) {
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800614 mCodecOutputBufferTimestamps.pop();
Shuzhen Wang3d00ee52019-09-25 14:19:28 -0700615 mCodecOutputCounter = 0;
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800616 }
617
618 mPendingInputFrames[bufferTime].codecOutputBuffers.push_back(*it);
Shuzhen Wang3d00ee52019-09-25 14:19:28 -0700619 ALOGV("%s: [%" PRId64 "]: Pushing codecOutputBuffers (time %" PRId64 " us)",
620 __FUNCTION__, bufferTime, it->timeUs);
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800621 }
622 mCodecOutputBuffers.erase(it);
623 }
624
625 while (!mFrameNumberMap.empty()) {
626 auto it = mFrameNumberMap.begin();
627 mPendingInputFrames[it->second].frameNumber = it->first;
Shuzhen Wang3d00ee52019-09-25 14:19:28 -0700628 ALOGV("%s: [%" PRId64 "]: frameNumber is %" PRId64, __FUNCTION__, it->second, it->first);
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800629 mFrameNumberMap.erase(it);
630 }
631
Shuzhen Wange7f4b462019-02-12 08:43:07 -0800632 while (!mCaptureResults.empty()) {
633 auto it = mCaptureResults.begin();
634 // Negative timestamp indicates that something went wrong during the capture result
635 // collection process.
636 if (it->first >= 0) {
637 if (mPendingInputFrames[it->first].frameNumber == std::get<0>(it->second)) {
638 mPendingInputFrames[it->first].result =
639 std::make_unique<CameraMetadata>(std::get<1>(it->second));
640 } else {
641 ALOGE("%s: Capture result frameNumber/timestamp mapping changed between "
642 "shutter and capture result!", __FUNCTION__);
643 }
644 }
645 mCaptureResults.erase(it);
646 }
647
648 // mErrorFrameNumbers stores frame number of dropped buffers.
649 auto it = mErrorFrameNumbers.begin();
650 while (it != mErrorFrameNumbers.end()) {
651 bool frameFound = false;
652 for (auto &inputFrame : mPendingInputFrames) {
653 if (inputFrame.second.frameNumber == *it) {
654 inputFrame.second.error = true;
655 frameFound = true;
656 break;
657 }
658 }
659
660 if (frameFound) {
661 it = mErrorFrameNumbers.erase(it);
662 } else {
663 ALOGW("%s: Not able to find failing input with frame number: %" PRId64, __FUNCTION__,
664 *it);
665 it++;
666 }
667 }
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800668
669 // Distribute codec input buffers to be filled out from YUV output
670 for (auto it = mPendingInputFrames.begin();
671 it != mPendingInputFrames.end() && mCodecInputBuffers.size() > 0; it++) {
672 InputFrame& inputFrame(it->second);
673 if (inputFrame.codecInputCounter < mGridRows * mGridCols) {
674 // Available input tiles that are required for the current input
675 // image.
676 size_t newInputTiles = std::min(mCodecInputBuffers.size(),
677 mGridRows * mGridCols - inputFrame.codecInputCounter);
678 for (size_t i = 0; i < newInputTiles; i++) {
679 CodecInputBufferInfo inputInfo =
680 { mCodecInputBuffers[0], mGridTimestampUs++, inputFrame.codecInputCounter };
681 inputFrame.codecInputBuffers.push_back(inputInfo);
682
683 mCodecInputBuffers.erase(mCodecInputBuffers.begin());
684 inputFrame.codecInputCounter++;
685 }
686 break;
687 }
688 }
689}
690
691bool HeicCompositeStream::getNextReadyInputLocked(int64_t *currentTs /*out*/) {
692 if (currentTs == nullptr) {
693 return false;
694 }
695
696 bool newInputAvailable = false;
Shuzhen Wang3d00ee52019-09-25 14:19:28 -0700697 for (auto& it : mPendingInputFrames) {
698 // New input is considered to be available only if:
699 // 1. input buffers are ready, or
700 // 2. App segment and muxer is created, or
701 // 3. A codec output tile is ready, and an output buffer is available.
702 // This makes sure that muxer gets created only when an output tile is
703 // generated, because right now we only handle 1 HEIC output buffer at a
704 // time (max dequeued buffer count is 1).
Shuzhen Wange7f4b462019-02-12 08:43:07 -0800705 bool appSegmentReady = (it.second.appSegmentBuffer.data != nullptr) &&
Shuzhen Wang3d00ee52019-09-25 14:19:28 -0700706 !it.second.appSegmentWritten && it.second.result != nullptr &&
707 it.second.muxer != nullptr;
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800708 bool codecOutputReady = !it.second.codecOutputBuffers.empty();
709 bool codecInputReady = (it.second.yuvBuffer.data != nullptr) &&
710 (!it.second.codecInputBuffers.empty());
Shuzhen Wang3d00ee52019-09-25 14:19:28 -0700711 bool hasOutputBuffer = it.second.muxer != nullptr ||
712 (mDequeuedOutputBufferCnt < kMaxOutputSurfaceProducerCount);
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800713 if ((!it.second.error) &&
714 (it.first < *currentTs) &&
Shuzhen Wang3d00ee52019-09-25 14:19:28 -0700715 (appSegmentReady || (codecOutputReady && hasOutputBuffer) || codecInputReady)) {
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800716 *currentTs = it.first;
Shuzhen Wang3d00ee52019-09-25 14:19:28 -0700717 if (it.second.format == nullptr && mFormat != nullptr) {
718 it.second.format = mFormat->dup();
719 }
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800720 newInputAvailable = true;
721 break;
722 }
723 }
724
725 return newInputAvailable;
726}
727
728int64_t HeicCompositeStream::getNextFailingInputLocked(int64_t *currentTs /*out*/) {
729 int64_t res = -1;
730 if (currentTs == nullptr) {
731 return res;
732 }
733
734 for (const auto& it : mPendingInputFrames) {
735 if (it.second.error && !it.second.errorNotified && (it.first < *currentTs)) {
736 *currentTs = it.first;
737 res = it.second.frameNumber;
738 break;
739 }
740 }
741
742 return res;
743}
744
745status_t HeicCompositeStream::processInputFrame(nsecs_t timestamp,
746 InputFrame &inputFrame) {
747 ATRACE_CALL();
748 status_t res = OK;
749
Shuzhen Wange7f4b462019-02-12 08:43:07 -0800750 bool appSegmentReady = inputFrame.appSegmentBuffer.data != nullptr &&
Shuzhen Wang3d00ee52019-09-25 14:19:28 -0700751 !inputFrame.appSegmentWritten && inputFrame.result != nullptr &&
752 inputFrame.muxer != nullptr;
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800753 bool codecOutputReady = inputFrame.codecOutputBuffers.size() > 0;
754 bool codecInputReady = inputFrame.yuvBuffer.data != nullptr &&
Shuzhen Wang3d00ee52019-09-25 14:19:28 -0700755 !inputFrame.codecInputBuffers.empty();
756 bool hasOutputBuffer = inputFrame.muxer != nullptr ||
757 (mDequeuedOutputBufferCnt < kMaxOutputSurfaceProducerCount);
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800758
Shuzhen Wang3d00ee52019-09-25 14:19:28 -0700759 ALOGV("%s: [%" PRId64 "]: appSegmentReady %d, codecOutputReady %d, codecInputReady %d,"
760 " dequeuedOutputBuffer %d", __FUNCTION__, timestamp, appSegmentReady,
761 codecOutputReady, codecInputReady, mDequeuedOutputBufferCnt);
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800762
763 // Handle inputs for Hevc tiling
764 if (codecInputReady) {
765 res = processCodecInputFrame(inputFrame);
766 if (res != OK) {
767 ALOGE("%s: Failed to process codec input frame: %s (%d)", __FUNCTION__,
768 strerror(-res), res);
769 return res;
770 }
771 }
772
Shuzhen Wang3d00ee52019-09-25 14:19:28 -0700773 if (!(codecOutputReady && hasOutputBuffer) && !appSegmentReady) {
774 return OK;
775 }
776
777 // Initialize and start muxer if not yet done so. In this case,
778 // codecOutputReady must be true. Otherwise, appSegmentReady is guaranteed
779 // to be false, and the function must have returned early.
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800780 if (inputFrame.muxer == nullptr) {
781 res = startMuxerForInputFrame(timestamp, inputFrame);
782 if (res != OK) {
783 ALOGE("%s: Failed to create and start muxer: %s (%d)", __FUNCTION__,
784 strerror(-res), res);
785 return res;
786 }
787 }
788
789 // Write JPEG APP segments data to the muxer.
Shuzhen Wang3d00ee52019-09-25 14:19:28 -0700790 if (appSegmentReady) {
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800791 res = processAppSegment(timestamp, inputFrame);
792 if (res != OK) {
793 ALOGE("%s: Failed to process JPEG APP segments: %s (%d)", __FUNCTION__,
794 strerror(-res), res);
795 return res;
796 }
797 }
798
799 // Write media codec bitstream buffers to muxer.
800 while (!inputFrame.codecOutputBuffers.empty()) {
801 res = processOneCodecOutputFrame(timestamp, inputFrame);
802 if (res != OK) {
803 ALOGE("%s: Failed to process codec output frame: %s (%d)", __FUNCTION__,
804 strerror(-res), res);
805 return res;
806 }
807 }
808
Michael Gonzalezb5986a32019-10-09 15:38:17 -0700809 if (inputFrame.pendingOutputTiles == 0) {
810 if (inputFrame.appSegmentWritten) {
811 res = processCompletedInputFrame(timestamp, inputFrame);
812 if (res != OK) {
813 ALOGE("%s: Failed to process completed input frame: %s (%d)", __FUNCTION__,
814 strerror(-res), res);
815 return res;
816 }
817 } else if (mLockedAppSegmentBufferCnt == kMaxAcquiredAppSegment) {
818 ALOGE("%s: Out-of-order app segment buffers reaches limit %u", __FUNCTION__,
819 kMaxAcquiredAppSegment);
820 return INVALID_OPERATION;
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800821 }
822 }
823
824 return res;
825}
826
827status_t HeicCompositeStream::startMuxerForInputFrame(nsecs_t timestamp, InputFrame &inputFrame) {
828 sp<ANativeWindow> outputANW = mOutputSurface;
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800829
830 auto res = outputANW->dequeueBuffer(mOutputSurface.get(), &inputFrame.anb, &inputFrame.fenceFd);
831 if (res != OK) {
832 ALOGE("%s: Error retrieving output buffer: %s (%d)", __FUNCTION__, strerror(-res),
833 res);
834 return res;
835 }
Shuzhen Wang3d00ee52019-09-25 14:19:28 -0700836 mDequeuedOutputBufferCnt++;
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800837
838 // Combine current thread id, stream id and timestamp to uniquely identify image.
839 std::ostringstream tempOutputFile;
840 tempOutputFile << "HEIF-" << pthread_self() << "-"
841 << getStreamId() << "-" << timestamp;
842 inputFrame.fileFd = syscall(__NR_memfd_create, tempOutputFile.str().c_str(), MFD_CLOEXEC);
843 if (inputFrame.fileFd < 0) {
844 ALOGE("%s: Failed to create file %s. Error no is %d", __FUNCTION__,
845 tempOutputFile.str().c_str(), errno);
846 return NO_INIT;
847 }
848 inputFrame.muxer = new MediaMuxer(inputFrame.fileFd, MediaMuxer::OUTPUT_FORMAT_HEIF);
849 if (inputFrame.muxer == nullptr) {
850 ALOGE("%s: Failed to create MediaMuxer for file fd %d",
851 __FUNCTION__, inputFrame.fileFd);
852 return NO_INIT;
853 }
854
855 res = inputFrame.muxer->setOrientationHint(inputFrame.orientation);
856 if (res != OK) {
857 ALOGE("%s: Failed to setOrientationHint: %s (%d)", __FUNCTION__,
858 strerror(-res), res);
859 return res;
860 }
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800861
Shuzhen Wang3d00ee52019-09-25 14:19:28 -0700862 ssize_t trackId = inputFrame.muxer->addTrack(inputFrame.format);
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800863 if (trackId < 0) {
864 ALOGE("%s: Failed to addTrack to the muxer: %zd", __FUNCTION__, trackId);
865 return NO_INIT;
866 }
867
868 inputFrame.trackIndex = trackId;
869 inputFrame.pendingOutputTiles = mNumOutputTiles;
870
871 res = inputFrame.muxer->start();
872 if (res != OK) {
873 ALOGE("%s: Failed to start MediaMuxer: %s (%d)",
874 __FUNCTION__, strerror(-res), res);
875 return res;
876 }
877
Shuzhen Wang3d00ee52019-09-25 14:19:28 -0700878 ALOGV("%s: [%" PRId64 "]: Muxer started for inputFrame", __FUNCTION__,
879 timestamp);
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800880 return OK;
881}
882
883status_t HeicCompositeStream::processAppSegment(nsecs_t timestamp, InputFrame &inputFrame) {
884 size_t app1Size = 0;
885 auto appSegmentSize = findAppSegmentsSize(inputFrame.appSegmentBuffer.data,
886 inputFrame.appSegmentBuffer.width * inputFrame.appSegmentBuffer.height,
887 &app1Size);
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800888 if (appSegmentSize == 0) {
889 ALOGE("%s: Failed to find JPEG APP segment size", __FUNCTION__);
890 return NO_INIT;
891 }
892
893 std::unique_ptr<ExifUtils> exifUtils(ExifUtils::create());
894 auto exifRes = exifUtils->initialize(inputFrame.appSegmentBuffer.data, app1Size);
895 if (!exifRes) {
896 ALOGE("%s: Failed to initialize ExifUtils object!", __FUNCTION__);
897 return BAD_VALUE;
898 }
Shuzhen Wange7f4b462019-02-12 08:43:07 -0800899 exifRes = exifUtils->setFromMetadata(*inputFrame.result, mStaticInfo,
900 mOutputWidth, mOutputHeight);
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800901 if (!exifRes) {
902 ALOGE("%s: Failed to set Exif tags using metadata and main image sizes", __FUNCTION__);
903 return BAD_VALUE;
904 }
905 exifRes = exifUtils->setOrientation(inputFrame.orientation);
906 if (!exifRes) {
907 ALOGE("%s: ExifUtils failed to set orientation", __FUNCTION__);
908 return BAD_VALUE;
909 }
910 exifRes = exifUtils->generateApp1();
911 if (!exifRes) {
912 ALOGE("%s: ExifUtils failed to generate APP1 segment", __FUNCTION__);
913 return BAD_VALUE;
914 }
915
916 unsigned int newApp1Length = exifUtils->getApp1Length();
917 const uint8_t *newApp1Segment = exifUtils->getApp1Buffer();
918
919 //Assemble the APP1 marker buffer required by MediaCodec
920 uint8_t kExifApp1Marker[] = {'E', 'x', 'i', 'f', 0xFF, 0xE1, 0x00, 0x00};
921 kExifApp1Marker[6] = static_cast<uint8_t>(newApp1Length >> 8);
922 kExifApp1Marker[7] = static_cast<uint8_t>(newApp1Length & 0xFF);
923 size_t appSegmentBufferSize = sizeof(kExifApp1Marker) +
924 appSegmentSize - app1Size + newApp1Length;
925 uint8_t* appSegmentBuffer = new uint8_t[appSegmentBufferSize];
926 memcpy(appSegmentBuffer, kExifApp1Marker, sizeof(kExifApp1Marker));
927 memcpy(appSegmentBuffer + sizeof(kExifApp1Marker), newApp1Segment, newApp1Length);
928 if (appSegmentSize - app1Size > 0) {
929 memcpy(appSegmentBuffer + sizeof(kExifApp1Marker) + newApp1Length,
930 inputFrame.appSegmentBuffer.data + app1Size, appSegmentSize - app1Size);
931 }
932
933 sp<ABuffer> aBuffer = new ABuffer(appSegmentBuffer, appSegmentBufferSize);
934 auto res = inputFrame.muxer->writeSampleData(aBuffer, inputFrame.trackIndex,
935 timestamp, MediaCodec::BUFFER_FLAG_MUXER_DATA);
936 delete[] appSegmentBuffer;
937
938 if (res != OK) {
939 ALOGE("%s: Failed to write JPEG APP segments to muxer: %s (%d)",
940 __FUNCTION__, strerror(-res), res);
941 return res;
942 }
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800943
Shuzhen Wang3d00ee52019-09-25 14:19:28 -0700944 ALOGV("%s: [%" PRId64 "]: appSegmentSize is %zu, width %d, height %d, app1Size %zu",
945 __FUNCTION__, timestamp, appSegmentSize, inputFrame.appSegmentBuffer.width,
946 inputFrame.appSegmentBuffer.height, app1Size);
Michael Gonzalezb5986a32019-10-09 15:38:17 -0700947
948 inputFrame.appSegmentWritten = true;
949 // Release the buffer now so any pending input app segments can be processed
950 mAppSegmentConsumer->unlockBuffer(inputFrame.appSegmentBuffer);
951 inputFrame.appSegmentBuffer.data = nullptr;
952 mLockedAppSegmentBufferCnt--;
953
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800954 return OK;
955}
956
957status_t HeicCompositeStream::processCodecInputFrame(InputFrame &inputFrame) {
958 for (auto& inputBuffer : inputFrame.codecInputBuffers) {
959 sp<MediaCodecBuffer> buffer;
960 auto res = mCodec->getInputBuffer(inputBuffer.index, &buffer);
961 if (res != OK) {
962 ALOGE("%s: Error getting codec input buffer: %s (%d)", __FUNCTION__,
963 strerror(-res), res);
964 return res;
965 }
966
967 // Copy one tile from source to destination.
968 size_t tileX = inputBuffer.tileIndex % mGridCols;
969 size_t tileY = inputBuffer.tileIndex / mGridCols;
970 size_t top = mGridHeight * tileY;
971 size_t left = mGridWidth * tileX;
972 size_t width = (tileX == static_cast<size_t>(mGridCols) - 1) ?
973 mOutputWidth - tileX * mGridWidth : mGridWidth;
974 size_t height = (tileY == static_cast<size_t>(mGridRows) - 1) ?
975 mOutputHeight - tileY * mGridHeight : mGridHeight;
Shuzhen Wang3d00ee52019-09-25 14:19:28 -0700976 ALOGV("%s: inputBuffer tileIndex [%zu, %zu], top %zu, left %zu, width %zu, height %zu,"
977 " timeUs %" PRId64, __FUNCTION__, tileX, tileY, top, left, width, height,
978 inputBuffer.timeUs);
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800979
980 res = copyOneYuvTile(buffer, inputFrame.yuvBuffer, top, left, width, height);
981 if (res != OK) {
982 ALOGE("%s: Failed to copy YUV tile %s (%d)", __FUNCTION__,
983 strerror(-res), res);
984 return res;
985 }
986
987 res = mCodec->queueInputBuffer(inputBuffer.index, 0, buffer->capacity(),
988 inputBuffer.timeUs, 0, nullptr /*errorDetailMsg*/);
989 if (res != OK) {
990 ALOGE("%s: Failed to queueInputBuffer to Codec: %s (%d)",
991 __FUNCTION__, strerror(-res), res);
992 return res;
993 }
994 }
995
996 inputFrame.codecInputBuffers.clear();
997 return OK;
998}
999
1000status_t HeicCompositeStream::processOneCodecOutputFrame(nsecs_t timestamp,
1001 InputFrame &inputFrame) {
1002 auto it = inputFrame.codecOutputBuffers.begin();
1003 sp<MediaCodecBuffer> buffer;
1004 status_t res = mCodec->getOutputBuffer(it->index, &buffer);
1005 if (res != OK) {
1006 ALOGE("%s: Error getting Heic codec output buffer at index %d: %s (%d)",
1007 __FUNCTION__, it->index, strerror(-res), res);
1008 return res;
1009 }
1010 if (buffer == nullptr) {
1011 ALOGE("%s: Invalid Heic codec output buffer at index %d",
1012 __FUNCTION__, it->index);
1013 return BAD_VALUE;
1014 }
1015
1016 sp<ABuffer> aBuffer = new ABuffer(buffer->data(), buffer->size());
1017 res = inputFrame.muxer->writeSampleData(
1018 aBuffer, inputFrame.trackIndex, timestamp, 0 /*flags*/);
1019 if (res != OK) {
1020 ALOGE("%s: Failed to write buffer index %d to muxer: %s (%d)",
1021 __FUNCTION__, it->index, strerror(-res), res);
1022 return res;
1023 }
1024
1025 mCodec->releaseOutputBuffer(it->index);
1026 if (inputFrame.pendingOutputTiles == 0) {
1027 ALOGW("%s: Codec generated more tiles than expected!", __FUNCTION__);
1028 } else {
1029 inputFrame.pendingOutputTiles--;
1030 }
1031
1032 inputFrame.codecOutputBuffers.erase(inputFrame.codecOutputBuffers.begin());
Shuzhen Wang3d00ee52019-09-25 14:19:28 -07001033
1034 ALOGV("%s: [%" PRId64 "]: Output buffer index %d",
1035 __FUNCTION__, timestamp, it->index);
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -08001036 return OK;
1037}
1038
1039status_t HeicCompositeStream::processCompletedInputFrame(nsecs_t timestamp,
1040 InputFrame &inputFrame) {
1041 sp<ANativeWindow> outputANW = mOutputSurface;
1042 inputFrame.muxer->stop();
1043
1044 // Copy the content of the file to memory.
1045 sp<GraphicBuffer> gb = GraphicBuffer::from(inputFrame.anb);
1046 void* dstBuffer;
1047 auto res = gb->lockAsync(GRALLOC_USAGE_SW_WRITE_OFTEN, &dstBuffer, inputFrame.fenceFd);
1048 if (res != OK) {
1049 ALOGE("%s: Error trying to lock output buffer fence: %s (%d)", __FUNCTION__,
1050 strerror(-res), res);
1051 return res;
1052 }
1053
1054 off_t fSize = lseek(inputFrame.fileFd, 0, SEEK_END);
1055 if (static_cast<size_t>(fSize) > mMaxHeicBufferSize - sizeof(CameraBlob)) {
1056 ALOGE("%s: Error: MediaMuxer output size %ld is larger than buffer sizer %zu",
1057 __FUNCTION__, fSize, mMaxHeicBufferSize - sizeof(CameraBlob));
1058 return BAD_VALUE;
1059 }
1060
1061 lseek(inputFrame.fileFd, 0, SEEK_SET);
1062 ssize_t bytesRead = read(inputFrame.fileFd, dstBuffer, fSize);
1063 if (bytesRead < fSize) {
1064 ALOGE("%s: Only %zd of %ld bytes read", __FUNCTION__, bytesRead, fSize);
1065 return BAD_VALUE;
1066 }
1067
1068 close(inputFrame.fileFd);
1069 inputFrame.fileFd = -1;
1070
1071 // Fill in HEIC header
1072 uint8_t *header = static_cast<uint8_t*>(dstBuffer) + mMaxHeicBufferSize - sizeof(CameraBlob);
1073 struct CameraBlob *blobHeader = (struct CameraBlob *)header;
1074 // Must be in sync with CAMERA3_HEIC_BLOB_ID in android_media_Utils.cpp
1075 blobHeader->blobId = static_cast<CameraBlobId>(0x00FE);
1076 blobHeader->blobSize = fSize;
1077
1078 res = native_window_set_buffers_timestamp(mOutputSurface.get(), timestamp);
1079 if (res != OK) {
1080 ALOGE("%s: Stream %d: Error setting timestamp: %s (%d)",
1081 __FUNCTION__, getStreamId(), strerror(-res), res);
1082 return res;
1083 }
1084
1085 res = outputANW->queueBuffer(mOutputSurface.get(), inputFrame.anb, /*fence*/ -1);
1086 if (res != OK) {
1087 ALOGE("%s: Failed to queueBuffer to Heic stream: %s (%d)", __FUNCTION__,
1088 strerror(-res), res);
1089 return res;
1090 }
1091 inputFrame.anb = nullptr;
Shuzhen Wang3d00ee52019-09-25 14:19:28 -07001092 mDequeuedOutputBufferCnt--;
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -08001093
Shuzhen Wang3d00ee52019-09-25 14:19:28 -07001094 ALOGV("%s: [%" PRId64 "]", __FUNCTION__, timestamp);
Shuzhen Wange7f4b462019-02-12 08:43:07 -08001095 ATRACE_ASYNC_END("HEIC capture", inputFrame.frameNumber);
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -08001096 return OK;
1097}
1098
1099
1100void HeicCompositeStream::releaseInputFrameLocked(InputFrame *inputFrame /*out*/) {
1101 if (inputFrame == nullptr) {
1102 return;
1103 }
1104
1105 if (inputFrame->appSegmentBuffer.data != nullptr) {
1106 mAppSegmentConsumer->unlockBuffer(inputFrame->appSegmentBuffer);
1107 inputFrame->appSegmentBuffer.data = nullptr;
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -08001108 }
1109
1110 while (!inputFrame->codecOutputBuffers.empty()) {
1111 auto it = inputFrame->codecOutputBuffers.begin();
1112 ALOGV("%s: releaseOutputBuffer index %d", __FUNCTION__, it->index);
1113 mCodec->releaseOutputBuffer(it->index);
1114 inputFrame->codecOutputBuffers.erase(it);
1115 }
1116
1117 if (inputFrame->yuvBuffer.data != nullptr) {
1118 mMainImageConsumer->unlockBuffer(inputFrame->yuvBuffer);
1119 inputFrame->yuvBuffer.data = nullptr;
1120 mYuvBufferAcquired = false;
1121 }
1122
1123 while (!inputFrame->codecInputBuffers.empty()) {
1124 auto it = inputFrame->codecInputBuffers.begin();
1125 inputFrame->codecInputBuffers.erase(it);
1126 }
1127
1128 if ((inputFrame->error || mErrorState) && !inputFrame->errorNotified) {
1129 notifyError(inputFrame->frameNumber);
1130 inputFrame->errorNotified = true;
1131 }
1132
1133 if (inputFrame->fileFd >= 0) {
1134 close(inputFrame->fileFd);
1135 inputFrame->fileFd = -1;
1136 }
1137
1138 if (inputFrame->anb != nullptr) {
1139 sp<ANativeWindow> outputANW = mOutputSurface;
1140 outputANW->cancelBuffer(mOutputSurface.get(), inputFrame->anb, /*fence*/ -1);
1141 inputFrame->anb = nullptr;
1142 }
1143}
1144
Michael Gonzalezb5986a32019-10-09 15:38:17 -07001145void HeicCompositeStream::releaseInputFramesLocked() {
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -08001146 auto it = mPendingInputFrames.begin();
Shuzhen Wang62f49ed2019-09-04 14:07:53 -07001147 bool inputFrameDone = false;
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -08001148 while (it != mPendingInputFrames.end()) {
Michael Gonzalezb5986a32019-10-09 15:38:17 -07001149 auto& inputFrame = it->second;
1150 if (inputFrame.error ||
1151 (inputFrame.appSegmentWritten && inputFrame.pendingOutputTiles == 0)) {
1152 releaseInputFrameLocked(&inputFrame);
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -08001153 it = mPendingInputFrames.erase(it);
Shuzhen Wang62f49ed2019-09-04 14:07:53 -07001154 inputFrameDone = true;
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -08001155 } else {
1156 it++;
1157 }
1158 }
Shuzhen Wang62f49ed2019-09-04 14:07:53 -07001159
1160 // Update codec quality based on first upcoming input frame.
1161 // Note that when encoding is in surface mode, currently there is no
1162 // way for camera service to synchronize quality setting on a per-frame
1163 // basis: we don't get notification when codec is ready to consume a new
1164 // input frame. So we update codec quality on a best-effort basis.
1165 if (inputFrameDone) {
1166 auto firstPendingFrame = mPendingInputFrames.begin();
1167 if (firstPendingFrame != mPendingInputFrames.end()) {
1168 updateCodecQualityLocked(firstPendingFrame->second.quality);
1169 }
1170 }
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -08001171}
1172
1173status_t HeicCompositeStream::initializeCodec(uint32_t width, uint32_t height,
1174 const sp<CameraDeviceBase>& cameraDevice) {
1175 ALOGV("%s", __FUNCTION__);
1176
1177 bool useGrid = false;
Chong Zhang688abaa2019-05-17 16:32:23 -07001178 AString hevcName;
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -08001179 bool isSizeSupported = isSizeSupportedByHeifEncoder(width, height,
Chong Zhang688abaa2019-05-17 16:32:23 -07001180 &mUseHeic, &useGrid, nullptr, &hevcName);
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -08001181 if (!isSizeSupported) {
1182 ALOGE("%s: Encoder doesnt' support size %u x %u!",
1183 __FUNCTION__, width, height);
1184 return BAD_VALUE;
1185 }
1186
1187 // Create Looper for MediaCodec.
1188 auto desiredMime = mUseHeic ? MIMETYPE_IMAGE_ANDROID_HEIC : MIMETYPE_VIDEO_HEVC;
1189 mCodecLooper = new ALooper;
1190 mCodecLooper->setName("Camera3-HeicComposite-MediaCodecLooper");
1191 status_t res = mCodecLooper->start(
1192 false, // runOnCallingThread
1193 false, // canCallJava
1194 PRIORITY_AUDIO);
1195 if (res != OK) {
1196 ALOGE("%s: Failed to start codec looper: %s (%d)",
1197 __FUNCTION__, strerror(-res), res);
1198 return NO_INIT;
1199 }
1200
1201 // Create HEIC/HEVC codec.
Chong Zhang688abaa2019-05-17 16:32:23 -07001202 if (mUseHeic) {
1203 mCodec = MediaCodec::CreateByType(mCodecLooper, desiredMime, true /*encoder*/);
1204 } else {
1205 mCodec = MediaCodec::CreateByComponentName(mCodecLooper, hevcName);
1206 }
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -08001207 if (mCodec == nullptr) {
1208 ALOGE("%s: Failed to create codec for %s", __FUNCTION__, desiredMime);
1209 return NO_INIT;
1210 }
1211
1212 // Create Looper and handler for Codec callback.
1213 mCodecCallbackHandler = new CodecCallbackHandler(this);
1214 if (mCodecCallbackHandler == nullptr) {
1215 ALOGE("%s: Failed to create codec callback handler", __FUNCTION__);
1216 return NO_MEMORY;
1217 }
1218 mCallbackLooper = new ALooper;
1219 mCallbackLooper->setName("Camera3-HeicComposite-MediaCodecCallbackLooper");
1220 res = mCallbackLooper->start(
1221 false, // runOnCallingThread
1222 false, // canCallJava
1223 PRIORITY_AUDIO);
1224 if (res != OK) {
1225 ALOGE("%s: Failed to start media callback looper: %s (%d)",
1226 __FUNCTION__, strerror(-res), res);
1227 return NO_INIT;
1228 }
1229 mCallbackLooper->registerHandler(mCodecCallbackHandler);
1230
1231 mAsyncNotify = new AMessage(kWhatCallbackNotify, mCodecCallbackHandler);
1232 res = mCodec->setCallback(mAsyncNotify);
1233 if (res != OK) {
1234 ALOGE("%s: Failed to set MediaCodec callback: %s (%d)", __FUNCTION__,
1235 strerror(-res), res);
1236 return res;
1237 }
1238
1239 // Create output format and configure the Codec.
1240 sp<AMessage> outputFormat = new AMessage();
1241 outputFormat->setString(KEY_MIME, desiredMime);
1242 outputFormat->setInt32(KEY_BITRATE_MODE, BITRATE_MODE_CQ);
1243 outputFormat->setInt32(KEY_QUALITY, kDefaultJpegQuality);
1244 // Ask codec to skip timestamp check and encode all frames.
Chong Zhang70bfcec2019-03-18 12:52:28 -07001245 outputFormat->setInt64(KEY_MAX_PTS_GAP_TO_ENCODER, kNoFrameDropMaxPtsGap);
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -08001246
1247 int32_t gridWidth, gridHeight, gridRows, gridCols;
1248 if (useGrid || mUseHeic) {
1249 gridWidth = HeicEncoderInfoManager::kGridWidth;
1250 gridHeight = HeicEncoderInfoManager::kGridHeight;
1251 gridRows = (height + gridHeight - 1)/gridHeight;
1252 gridCols = (width + gridWidth - 1)/gridWidth;
1253
1254 if (mUseHeic) {
1255 outputFormat->setInt32(KEY_TILE_WIDTH, gridWidth);
1256 outputFormat->setInt32(KEY_TILE_HEIGHT, gridHeight);
1257 outputFormat->setInt32(KEY_GRID_COLUMNS, gridCols);
1258 outputFormat->setInt32(KEY_GRID_ROWS, gridRows);
1259 }
1260
1261 } else {
1262 gridWidth = width;
1263 gridHeight = height;
1264 gridRows = 1;
1265 gridCols = 1;
1266 }
1267
1268 outputFormat->setInt32(KEY_WIDTH, !useGrid ? width : gridWidth);
1269 outputFormat->setInt32(KEY_HEIGHT, !useGrid ? height : gridHeight);
1270 outputFormat->setInt32(KEY_I_FRAME_INTERVAL, 0);
1271 outputFormat->setInt32(KEY_COLOR_FORMAT,
1272 useGrid ? COLOR_FormatYUV420Flexible : COLOR_FormatSurface);
Shuzhen Wang0ca81522019-08-30 14:15:16 -07001273 outputFormat->setInt32(KEY_FRAME_RATE, useGrid ? gridRows * gridCols : kNoGridOpRate);
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -08001274 // This only serves as a hint to encoder when encoding is not real-time.
1275 outputFormat->setInt32(KEY_OPERATING_RATE, useGrid ? kGridOpRate : kNoGridOpRate);
1276
1277 res = mCodec->configure(outputFormat, nullptr /*nativeWindow*/,
1278 nullptr /*crypto*/, CONFIGURE_FLAG_ENCODE);
1279 if (res != OK) {
1280 ALOGE("%s: Failed to configure codec: %s (%d)", __FUNCTION__,
1281 strerror(-res), res);
1282 return res;
1283 }
1284
1285 mGridWidth = gridWidth;
1286 mGridHeight = gridHeight;
1287 mGridRows = gridRows;
1288 mGridCols = gridCols;
1289 mUseGrid = useGrid;
1290 mOutputWidth = width;
1291 mOutputHeight = height;
1292 mAppSegmentMaxSize = calcAppSegmentMaxSize(cameraDevice->info());
1293 mMaxHeicBufferSize = mOutputWidth * mOutputHeight * 3 / 2 + mAppSegmentMaxSize;
1294
1295 return OK;
1296}
1297
1298void HeicCompositeStream::deinitCodec() {
1299 ALOGV("%s", __FUNCTION__);
1300 if (mCodec != nullptr) {
1301 mCodec->stop();
1302 mCodec->release();
1303 mCodec.clear();
1304 }
1305
1306 if (mCodecLooper != nullptr) {
1307 mCodecLooper->stop();
1308 mCodecLooper.clear();
1309 }
1310
1311 if (mCallbackLooper != nullptr) {
1312 mCallbackLooper->stop();
1313 mCallbackLooper.clear();
1314 }
1315
1316 mAsyncNotify.clear();
1317 mFormat.clear();
1318}
1319
1320// Return the size of the complete list of app segment, 0 indicates failure
1321size_t HeicCompositeStream::findAppSegmentsSize(const uint8_t* appSegmentBuffer,
1322 size_t maxSize, size_t *app1SegmentSize) {
1323 if (appSegmentBuffer == nullptr || app1SegmentSize == nullptr) {
1324 ALOGE("%s: Invalid input appSegmentBuffer %p, app1SegmentSize %p",
1325 __FUNCTION__, appSegmentBuffer, app1SegmentSize);
1326 return 0;
1327 }
1328
1329 size_t expectedSize = 0;
1330 // First check for EXIF transport header at the end of the buffer
1331 const uint8_t *header = appSegmentBuffer + (maxSize - sizeof(struct CameraBlob));
1332 const struct CameraBlob *blob = (const struct CameraBlob*)(header);
1333 if (blob->blobId != CameraBlobId::JPEG_APP_SEGMENTS) {
1334 ALOGE("%s: Invalid EXIF blobId %hu", __FUNCTION__, blob->blobId);
1335 return 0;
1336 }
1337
1338 expectedSize = blob->blobSize;
1339 if (expectedSize == 0 || expectedSize > maxSize - sizeof(struct CameraBlob)) {
1340 ALOGE("%s: Invalid blobSize %zu.", __FUNCTION__, expectedSize);
1341 return 0;
1342 }
1343
1344 uint32_t totalSize = 0;
1345
1346 // Verify APP1 marker (mandatory)
1347 uint8_t app1Marker[] = {0xFF, 0xE1};
1348 if (memcmp(appSegmentBuffer, app1Marker, sizeof(app1Marker))) {
1349 ALOGE("%s: Invalid APP1 marker: %x, %x", __FUNCTION__,
1350 appSegmentBuffer[0], appSegmentBuffer[1]);
1351 return 0;
1352 }
1353 totalSize += sizeof(app1Marker);
1354
1355 uint16_t app1Size = (static_cast<uint16_t>(appSegmentBuffer[totalSize]) << 8) +
1356 appSegmentBuffer[totalSize+1];
1357 totalSize += app1Size;
1358
1359 ALOGV("%s: Expected APP segments size %zu, APP1 segment size %u",
1360 __FUNCTION__, expectedSize, app1Size);
1361 while (totalSize < expectedSize) {
1362 if (appSegmentBuffer[totalSize] != 0xFF ||
1363 appSegmentBuffer[totalSize+1] <= 0xE1 ||
1364 appSegmentBuffer[totalSize+1] > 0xEF) {
1365 // Invalid APPn marker
1366 ALOGE("%s: Invalid APPn marker: %x, %x", __FUNCTION__,
1367 appSegmentBuffer[totalSize], appSegmentBuffer[totalSize+1]);
1368 return 0;
1369 }
1370 totalSize += 2;
1371
1372 uint16_t appnSize = (static_cast<uint16_t>(appSegmentBuffer[totalSize]) << 8) +
1373 appSegmentBuffer[totalSize+1];
1374 totalSize += appnSize;
1375 }
1376
1377 if (totalSize != expectedSize) {
1378 ALOGE("%s: Invalid JPEG APP segments: totalSize %u vs expected size %zu",
1379 __FUNCTION__, totalSize, expectedSize);
1380 return 0;
1381 }
1382
1383 *app1SegmentSize = app1Size + sizeof(app1Marker);
1384 return expectedSize;
1385}
1386
1387int64_t HeicCompositeStream::findTimestampInNsLocked(int64_t timeInUs) {
1388 for (const auto& fn : mFrameNumberMap) {
1389 if (timeInUs == ns2us(fn.second)) {
1390 return fn.second;
1391 }
1392 }
1393 for (const auto& inputFrame : mPendingInputFrames) {
1394 if (timeInUs == ns2us(inputFrame.first)) {
1395 return inputFrame.first;
1396 }
1397 }
1398 return -1;
1399}
1400
1401status_t HeicCompositeStream::copyOneYuvTile(sp<MediaCodecBuffer>& codecBuffer,
1402 const CpuConsumer::LockedBuffer& yuvBuffer,
1403 size_t top, size_t left, size_t width, size_t height) {
1404 ATRACE_CALL();
1405
1406 // Get stride information for codecBuffer
1407 sp<ABuffer> imageData;
1408 if (!codecBuffer->meta()->findBuffer("image-data", &imageData)) {
1409 ALOGE("%s: Codec input buffer is not for image data!", __FUNCTION__);
1410 return BAD_VALUE;
1411 }
1412 if (imageData->size() != sizeof(MediaImage2)) {
1413 ALOGE("%s: Invalid codec input image size %zu, expected %zu",
1414 __FUNCTION__, imageData->size(), sizeof(MediaImage2));
1415 return BAD_VALUE;
1416 }
1417 MediaImage2* imageInfo = reinterpret_cast<MediaImage2*>(imageData->data());
1418 if (imageInfo->mType != MediaImage2::MEDIA_IMAGE_TYPE_YUV ||
1419 imageInfo->mBitDepth != 8 ||
1420 imageInfo->mBitDepthAllocated != 8 ||
1421 imageInfo->mNumPlanes != 3) {
1422 ALOGE("%s: Invalid codec input image info: mType %d, mBitDepth %d, "
1423 "mBitDepthAllocated %d, mNumPlanes %d!", __FUNCTION__,
1424 imageInfo->mType, imageInfo->mBitDepth,
1425 imageInfo->mBitDepthAllocated, imageInfo->mNumPlanes);
1426 return BAD_VALUE;
1427 }
1428
1429 ALOGV("%s: yuvBuffer chromaStep %d, chromaStride %d",
1430 __FUNCTION__, yuvBuffer.chromaStep, yuvBuffer.chromaStride);
1431 ALOGV("%s: U offset %u, V offset %u, U rowInc %d, V rowInc %d, U colInc %d, V colInc %d",
1432 __FUNCTION__, imageInfo->mPlane[MediaImage2::U].mOffset,
1433 imageInfo->mPlane[MediaImage2::V].mOffset,
1434 imageInfo->mPlane[MediaImage2::U].mRowInc,
1435 imageInfo->mPlane[MediaImage2::V].mRowInc,
1436 imageInfo->mPlane[MediaImage2::U].mColInc,
1437 imageInfo->mPlane[MediaImage2::V].mColInc);
1438
1439 // Y
1440 for (auto row = top; row < top+height; row++) {
1441 uint8_t *dst = codecBuffer->data() + imageInfo->mPlane[MediaImage2::Y].mOffset +
1442 imageInfo->mPlane[MediaImage2::Y].mRowInc * (row - top);
Shuzhen Wang219c2992019-02-15 17:24:28 -08001443 mFnCopyRow(yuvBuffer.data+row*yuvBuffer.stride+left, dst, width);
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -08001444 }
1445
1446 // U is Cb, V is Cr
1447 bool codecUPlaneFirst = imageInfo->mPlane[MediaImage2::V].mOffset >
1448 imageInfo->mPlane[MediaImage2::U].mOffset;
1449 uint32_t codecUvOffsetDiff = codecUPlaneFirst ?
1450 imageInfo->mPlane[MediaImage2::V].mOffset - imageInfo->mPlane[MediaImage2::U].mOffset :
1451 imageInfo->mPlane[MediaImage2::U].mOffset - imageInfo->mPlane[MediaImage2::V].mOffset;
1452 bool isCodecUvSemiplannar = (codecUvOffsetDiff == 1) &&
1453 (imageInfo->mPlane[MediaImage2::U].mRowInc ==
1454 imageInfo->mPlane[MediaImage2::V].mRowInc) &&
1455 (imageInfo->mPlane[MediaImage2::U].mColInc == 2) &&
1456 (imageInfo->mPlane[MediaImage2::V].mColInc == 2);
1457 bool isCodecUvPlannar =
1458 ((codecUPlaneFirst && codecUvOffsetDiff >=
1459 imageInfo->mPlane[MediaImage2::U].mRowInc * imageInfo->mHeight/2) ||
1460 ((!codecUPlaneFirst && codecUvOffsetDiff >=
1461 imageInfo->mPlane[MediaImage2::V].mRowInc * imageInfo->mHeight/2))) &&
1462 imageInfo->mPlane[MediaImage2::U].mColInc == 1 &&
1463 imageInfo->mPlane[MediaImage2::V].mColInc == 1;
1464 bool cameraUPlaneFirst = yuvBuffer.dataCr > yuvBuffer.dataCb;
1465
1466 if (isCodecUvSemiplannar && yuvBuffer.chromaStep == 2 &&
1467 (codecUPlaneFirst == cameraUPlaneFirst)) {
1468 // UV semiplannar
1469 // The chrome plane could be either Cb first, or Cr first. Take the
1470 // smaller address.
1471 uint8_t *src = std::min(yuvBuffer.dataCb, yuvBuffer.dataCr);
1472 MediaImage2::PlaneIndex dstPlane = codecUvOffsetDiff > 0 ? MediaImage2::U : MediaImage2::V;
1473 for (auto row = top/2; row < (top+height)/2; row++) {
1474 uint8_t *dst = codecBuffer->data() + imageInfo->mPlane[dstPlane].mOffset +
1475 imageInfo->mPlane[dstPlane].mRowInc * (row - top/2);
Shuzhen Wang219c2992019-02-15 17:24:28 -08001476 mFnCopyRow(src+row*yuvBuffer.chromaStride+left, dst, width);
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -08001477 }
1478 } else if (isCodecUvPlannar && yuvBuffer.chromaStep == 1) {
1479 // U plane
1480 for (auto row = top/2; row < (top+height)/2; row++) {
1481 uint8_t *dst = codecBuffer->data() + imageInfo->mPlane[MediaImage2::U].mOffset +
1482 imageInfo->mPlane[MediaImage2::U].mRowInc * (row - top/2);
Shuzhen Wang219c2992019-02-15 17:24:28 -08001483 mFnCopyRow(yuvBuffer.dataCb+row*yuvBuffer.chromaStride+left/2, dst, width/2);
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -08001484 }
1485
1486 // V plane
1487 for (auto row = top/2; row < (top+height)/2; row++) {
1488 uint8_t *dst = codecBuffer->data() + imageInfo->mPlane[MediaImage2::V].mOffset +
1489 imageInfo->mPlane[MediaImage2::V].mRowInc * (row - top/2);
Shuzhen Wang219c2992019-02-15 17:24:28 -08001490 mFnCopyRow(yuvBuffer.dataCr+row*yuvBuffer.chromaStride+left/2, dst, width/2);
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -08001491 }
1492 } else {
Shuzhen Wang219c2992019-02-15 17:24:28 -08001493 // Convert between semiplannar and plannar, or when UV orders are
1494 // different.
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -08001495 uint8_t *dst = codecBuffer->data();
1496 for (auto row = top/2; row < (top+height)/2; row++) {
1497 for (auto col = left/2; col < (left+width)/2; col++) {
1498 // U/Cb
1499 int32_t dstIndex = imageInfo->mPlane[MediaImage2::U].mOffset +
1500 imageInfo->mPlane[MediaImage2::U].mRowInc * (row - top/2) +
1501 imageInfo->mPlane[MediaImage2::U].mColInc * (col - left/2);
1502 int32_t srcIndex = row * yuvBuffer.chromaStride + yuvBuffer.chromaStep * col;
1503 dst[dstIndex] = yuvBuffer.dataCb[srcIndex];
1504
1505 // V/Cr
1506 dstIndex = imageInfo->mPlane[MediaImage2::V].mOffset +
1507 imageInfo->mPlane[MediaImage2::V].mRowInc * (row - top/2) +
1508 imageInfo->mPlane[MediaImage2::V].mColInc * (col - left/2);
1509 srcIndex = row * yuvBuffer.chromaStride + yuvBuffer.chromaStep * col;
1510 dst[dstIndex] = yuvBuffer.dataCr[srcIndex];
1511 }
1512 }
1513 }
1514 return OK;
1515}
1516
Shuzhen Wang219c2992019-02-15 17:24:28 -08001517void HeicCompositeStream::initCopyRowFunction(int32_t width)
1518{
1519 using namespace libyuv;
1520
1521 mFnCopyRow = CopyRow_C;
1522#if defined(HAS_COPYROW_SSE2)
1523 if (TestCpuFlag(kCpuHasSSE2)) {
1524 mFnCopyRow = IS_ALIGNED(width, 32) ? CopyRow_SSE2 : CopyRow_Any_SSE2;
1525 }
1526#endif
1527#if defined(HAS_COPYROW_AVX)
1528 if (TestCpuFlag(kCpuHasAVX)) {
1529 mFnCopyRow = IS_ALIGNED(width, 64) ? CopyRow_AVX : CopyRow_Any_AVX;
1530 }
1531#endif
1532#if defined(HAS_COPYROW_ERMS)
1533 if (TestCpuFlag(kCpuHasERMS)) {
1534 mFnCopyRow = CopyRow_ERMS;
1535 }
1536#endif
1537#if defined(HAS_COPYROW_NEON)
1538 if (TestCpuFlag(kCpuHasNEON)) {
1539 mFnCopyRow = IS_ALIGNED(width, 32) ? CopyRow_NEON : CopyRow_Any_NEON;
1540 }
1541#endif
1542#if defined(HAS_COPYROW_MIPS)
1543 if (TestCpuFlag(kCpuHasMIPS)) {
1544 mFnCopyRow = CopyRow_MIPS;
1545 }
1546#endif
1547}
1548
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -08001549size_t HeicCompositeStream::calcAppSegmentMaxSize(const CameraMetadata& info) {
1550 camera_metadata_ro_entry_t entry = info.find(ANDROID_HEIC_INFO_MAX_JPEG_APP_SEGMENTS_COUNT);
1551 size_t maxAppsSegment = 1;
1552 if (entry.count > 0) {
1553 maxAppsSegment = entry.data.u8[0] < 1 ? 1 :
1554 entry.data.u8[0] > 16 ? 16 : entry.data.u8[0];
1555 }
1556 return maxAppsSegment * (2 + 0xFFFF) + sizeof(struct CameraBlob);
1557}
1558
Shuzhen Wang62f49ed2019-09-04 14:07:53 -07001559void HeicCompositeStream::updateCodecQualityLocked(int32_t quality) {
1560 if (quality != mQuality) {
1561 sp<AMessage> qualityParams = new AMessage;
1562 qualityParams->setInt32(PARAMETER_KEY_VIDEO_BITRATE, quality);
1563 status_t res = mCodec->setParameters(qualityParams);
1564 if (res != OK) {
1565 ALOGE("%s: Failed to set codec quality: %s (%d)",
1566 __FUNCTION__, strerror(-res), res);
1567 } else {
1568 mQuality = quality;
1569 }
1570 }
1571}
1572
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -08001573bool HeicCompositeStream::threadLoop() {
1574 int64_t currentTs = INT64_MAX;
1575 bool newInputAvailable = false;
1576
1577 {
1578 Mutex::Autolock l(mMutex);
1579 if (mErrorState) {
1580 // In case we landed in error state, return any pending buffers and
1581 // halt all further processing.
1582 compilePendingInputLocked();
Michael Gonzalezb5986a32019-10-09 15:38:17 -07001583 releaseInputFramesLocked();
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -08001584 return false;
1585 }
1586
1587
1588 while (!newInputAvailable) {
1589 compilePendingInputLocked();
1590 newInputAvailable = getNextReadyInputLocked(&currentTs);
1591
1592 if (!newInputAvailable) {
1593 auto failingFrameNumber = getNextFailingInputLocked(&currentTs);
1594 if (failingFrameNumber >= 0) {
1595 // We cannot erase 'mPendingInputFrames[currentTs]' at this point because it is
1596 // possible for two internal stream buffers to fail. In such scenario the
1597 // composite stream should notify the client about a stream buffer error only
1598 // once and this information is kept within 'errorNotified'.
1599 // Any present failed input frames will be removed on a subsequent call to
1600 // 'releaseInputFramesLocked()'.
1601 releaseInputFrameLocked(&mPendingInputFrames[currentTs]);
1602 currentTs = INT64_MAX;
1603 }
1604
1605 auto ret = mInputReadyCondition.waitRelative(mMutex, kWaitDuration);
1606 if (ret == TIMED_OUT) {
1607 return true;
1608 } else if (ret != OK) {
1609 ALOGE("%s: Timed wait on condition failed: %s (%d)", __FUNCTION__,
1610 strerror(-ret), ret);
1611 return false;
1612 }
1613 }
1614 }
1615 }
1616
1617 auto res = processInputFrame(currentTs, mPendingInputFrames[currentTs]);
1618 Mutex::Autolock l(mMutex);
1619 if (res != OK) {
1620 ALOGE("%s: Failed processing frame with timestamp: %" PRIu64 ": %s (%d)",
1621 __FUNCTION__, currentTs, strerror(-res), res);
1622 mPendingInputFrames[currentTs].error = true;
1623 }
1624
Michael Gonzalezb5986a32019-10-09 15:38:17 -07001625 releaseInputFramesLocked();
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -08001626
1627 return true;
1628}
1629
1630bool HeicCompositeStream::onStreamBufferError(const CaptureResultExtras& resultExtras) {
1631 bool res = false;
1632 // Buffer errors concerning internal composite streams should not be directly visible to
1633 // camera clients. They must only receive a single buffer error with the public composite
1634 // stream id.
1635 if ((resultExtras.errorStreamId == mAppSegmentStreamId) ||
1636 (resultExtras.errorStreamId == mMainImageStreamId)) {
1637 flagAnErrorFrameNumber(resultExtras.frameNumber);
1638 res = true;
1639 }
1640
1641 return res;
1642}
1643
Shuzhen Wange7f4b462019-02-12 08:43:07 -08001644void HeicCompositeStream::onResultError(const CaptureResultExtras& resultExtras) {
1645 // For result error, since the APPS_SEGMENT buffer already contains EXIF,
1646 // simply skip using the capture result metadata to override EXIF.
1647 Mutex::Autolock l(mMutex);
1648
1649 int64_t timestamp = -1;
1650 for (const auto& fn : mFrameNumberMap) {
1651 if (fn.first == resultExtras.frameNumber) {
1652 timestamp = fn.second;
1653 break;
1654 }
1655 }
1656 if (timestamp == -1) {
1657 for (const auto& inputFrame : mPendingInputFrames) {
1658 if (inputFrame.second.frameNumber == resultExtras.frameNumber) {
1659 timestamp = inputFrame.first;
1660 break;
1661 }
1662 }
1663 }
1664
1665 if (timestamp == -1) {
1666 ALOGE("%s: Failed to find shutter timestamp for result error!", __FUNCTION__);
1667 return;
1668 }
1669
1670 mCaptureResults.emplace(timestamp, std::make_tuple(resultExtras.frameNumber, CameraMetadata()));
1671 mInputReadyCondition.signal();
1672}
1673
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -08001674void HeicCompositeStream::CodecCallbackHandler::onMessageReceived(const sp<AMessage> &msg) {
1675 sp<HeicCompositeStream> parent = mParent.promote();
1676 if (parent == nullptr) return;
1677
1678 switch (msg->what()) {
1679 case kWhatCallbackNotify: {
1680 int32_t cbID;
1681 if (!msg->findInt32("callbackID", &cbID)) {
1682 ALOGE("kWhatCallbackNotify: callbackID is expected.");
1683 break;
1684 }
1685
1686 ALOGV("kWhatCallbackNotify: cbID = %d", cbID);
1687
1688 switch (cbID) {
1689 case MediaCodec::CB_INPUT_AVAILABLE: {
1690 int32_t index;
1691 if (!msg->findInt32("index", &index)) {
1692 ALOGE("CB_INPUT_AVAILABLE: index is expected.");
1693 break;
1694 }
1695 parent->onHeicInputFrameAvailable(index);
1696 break;
1697 }
1698
1699 case MediaCodec::CB_OUTPUT_AVAILABLE: {
1700 int32_t index;
1701 size_t offset;
1702 size_t size;
1703 int64_t timeUs;
1704 int32_t flags;
1705
1706 if (!msg->findInt32("index", &index)) {
1707 ALOGE("CB_OUTPUT_AVAILABLE: index is expected.");
1708 break;
1709 }
1710 if (!msg->findSize("offset", &offset)) {
1711 ALOGE("CB_OUTPUT_AVAILABLE: offset is expected.");
1712 break;
1713 }
1714 if (!msg->findSize("size", &size)) {
1715 ALOGE("CB_OUTPUT_AVAILABLE: size is expected.");
1716 break;
1717 }
1718 if (!msg->findInt64("timeUs", &timeUs)) {
1719 ALOGE("CB_OUTPUT_AVAILABLE: timeUs is expected.");
1720 break;
1721 }
1722 if (!msg->findInt32("flags", &flags)) {
1723 ALOGE("CB_OUTPUT_AVAILABLE: flags is expected.");
1724 break;
1725 }
1726
1727 CodecOutputBufferInfo bufferInfo = {
1728 index,
1729 (int32_t)offset,
1730 (int32_t)size,
1731 timeUs,
1732 (uint32_t)flags};
1733
1734 parent->onHeicOutputFrameAvailable(bufferInfo);
1735 break;
1736 }
1737
1738 case MediaCodec::CB_OUTPUT_FORMAT_CHANGED: {
1739 sp<AMessage> format;
1740 if (!msg->findMessage("format", &format)) {
1741 ALOGE("CB_OUTPUT_FORMAT_CHANGED: format is expected.");
1742 break;
1743 }
Chong Zhang860eff12019-09-16 16:15:00 -07001744 // Here format is MediaCodec's internal copy of output format.
1745 // Make a copy since onHeicFormatChanged() might modify it.
1746 sp<AMessage> formatCopy;
1747 if (format != nullptr) {
1748 formatCopy = format->dup();
1749 }
1750 parent->onHeicFormatChanged(formatCopy);
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -08001751 break;
1752 }
1753
1754 case MediaCodec::CB_ERROR: {
1755 status_t err;
1756 int32_t actionCode;
1757 AString detail;
1758 if (!msg->findInt32("err", &err)) {
1759 ALOGE("CB_ERROR: err is expected.");
1760 break;
1761 }
1762 if (!msg->findInt32("action", &actionCode)) {
1763 ALOGE("CB_ERROR: action is expected.");
1764 break;
1765 }
1766 msg->findString("detail", &detail);
1767 ALOGE("Codec reported error(0x%x), actionCode(%d), detail(%s)",
1768 err, actionCode, detail.c_str());
1769
1770 parent->onHeicCodecError();
1771 break;
1772 }
1773
1774 default: {
1775 ALOGE("kWhatCallbackNotify: callbackID(%d) is unexpected.", cbID);
1776 break;
1777 }
1778 }
1779 break;
1780 }
1781
1782 default:
1783 ALOGE("shouldn't be here");
1784 break;
1785 }
1786}
1787
1788}; // namespace camera3
1789}; // namespace android