blob: f335c2010086e614cd13c374b4e2c20a39b65ca8 [file] [log] [blame]
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -08001/*
2 * Copyright (C) 2019 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#define LOG_TAG "Camera3-HeicCompositeStream"
18#define ATRACE_TAG ATRACE_TAG_CAMERA
19//#define LOG_NDEBUG 0
20
21#include <linux/memfd.h>
22#include <pthread.h>
23#include <sys/syscall.h>
24
25#include <android/hardware/camera/device/3.5/types.h>
Shuzhen Wang219c2992019-02-15 17:24:28 -080026#include <libyuv.h>
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -080027#include <gui/Surface.h>
28#include <utils/Log.h>
29#include <utils/Trace.h>
30
Marco Nelissen13aa1a42019-09-27 10:21:55 -070031#include <mediadrm/ICrypto.h>
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -080032#include <media/MediaCodecBuffer.h>
33#include <media/stagefright/foundation/ABuffer.h>
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -080034#include <media/stagefright/foundation/MediaDefs.h>
35#include <media/stagefright/MediaCodecConstants.h>
36
37#include "common/CameraDeviceBase.h"
38#include "utils/ExifUtils.h"
39#include "HeicEncoderInfoManager.h"
40#include "HeicCompositeStream.h"
41
42using android::hardware::camera::device::V3_5::CameraBlob;
43using android::hardware::camera::device::V3_5::CameraBlobId;
44
45namespace android {
46namespace camera3 {
47
48HeicCompositeStream::HeicCompositeStream(wp<CameraDeviceBase> device,
49 wp<hardware::camera2::ICameraDeviceCallbacks> cb) :
50 CompositeStream(device, cb),
51 mUseHeic(false),
52 mNumOutputTiles(1),
53 mOutputWidth(0),
54 mOutputHeight(0),
55 mMaxHeicBufferSize(0),
56 mGridWidth(HeicEncoderInfoManager::kGridWidth),
57 mGridHeight(HeicEncoderInfoManager::kGridHeight),
58 mGridRows(1),
59 mGridCols(1),
60 mUseGrid(false),
61 mAppSegmentStreamId(-1),
62 mAppSegmentSurfaceId(-1),
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -080063 mMainImageStreamId(-1),
64 mMainImageSurfaceId(-1),
65 mYuvBufferAcquired(false),
66 mProducerListener(new ProducerListener()),
Shuzhen Wang3d00ee52019-09-25 14:19:28 -070067 mDequeuedOutputBufferCnt(0),
Michael Gonzalezb5986a32019-10-09 15:38:17 -070068 mLockedAppSegmentBufferCnt(0),
Shuzhen Wang3d00ee52019-09-25 14:19:28 -070069 mCodecOutputCounter(0),
Shuzhen Wang62f49ed2019-09-04 14:07:53 -070070 mQuality(-1),
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -080071 mGridTimestampUs(0) {
72}
73
74HeicCompositeStream::~HeicCompositeStream() {
75 // Call deinitCodec in case stream hasn't been deleted yet to avoid any
76 // memory/resource leak.
77 deinitCodec();
78
79 mInputAppSegmentBuffers.clear();
80 mCodecOutputBuffers.clear();
81
82 mAppSegmentStreamId = -1;
83 mAppSegmentSurfaceId = -1;
84 mAppSegmentConsumer.clear();
85 mAppSegmentSurface.clear();
86
87 mMainImageStreamId = -1;
88 mMainImageSurfaceId = -1;
89 mMainImageConsumer.clear();
90 mMainImageSurface.clear();
91}
92
93bool HeicCompositeStream::isHeicCompositeStream(const sp<Surface> &surface) {
94 ANativeWindow *anw = surface.get();
95 status_t err;
96 int format;
97 if ((err = anw->query(anw, NATIVE_WINDOW_FORMAT, &format)) != OK) {
98 String8 msg = String8::format("Failed to query Surface format: %s (%d)", strerror(-err),
99 err);
100 ALOGE("%s: %s", __FUNCTION__, msg.string());
101 return false;
102 }
103
104 int dataspace;
105 if ((err = anw->query(anw, NATIVE_WINDOW_DEFAULT_DATASPACE, &dataspace)) != OK) {
106 String8 msg = String8::format("Failed to query Surface dataspace: %s (%d)", strerror(-err),
107 err);
108 ALOGE("%s: %s", __FUNCTION__, msg.string());
109 return false;
110 }
111
112 return ((format == HAL_PIXEL_FORMAT_BLOB) && (dataspace == HAL_DATASPACE_HEIF));
113}
114
115status_t HeicCompositeStream::createInternalStreams(const std::vector<sp<Surface>>& consumers,
116 bool /*hasDeferredConsumer*/, uint32_t width, uint32_t height, int format,
117 camera3_stream_rotation_t rotation, int *id, const String8& physicalCameraId,
118 std::vector<int> *surfaceIds, int /*streamSetId*/, bool /*isShared*/) {
119
120 sp<CameraDeviceBase> device = mDevice.promote();
121 if (!device.get()) {
122 ALOGE("%s: Invalid camera device!", __FUNCTION__);
123 return NO_INIT;
124 }
125
126 status_t res = initializeCodec(width, height, device);
127 if (res != OK) {
128 ALOGE("%s: Failed to initialize HEIC/HEVC codec: %s (%d)",
129 __FUNCTION__, strerror(-res), res);
130 return NO_INIT;
131 }
132
133 sp<IGraphicBufferProducer> producer;
134 sp<IGraphicBufferConsumer> consumer;
135 BufferQueue::createBufferQueue(&producer, &consumer);
Michael Gonzalezb5986a32019-10-09 15:38:17 -0700136 mAppSegmentConsumer = new CpuConsumer(consumer, kMaxAcquiredAppSegment);
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800137 mAppSegmentConsumer->setFrameAvailableListener(this);
138 mAppSegmentConsumer->setName(String8("Camera3-HeicComposite-AppSegmentStream"));
139 mAppSegmentSurface = new Surface(producer);
140
Shuzhen Wange7f4b462019-02-12 08:43:07 -0800141 mStaticInfo = device->info();
142
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800143 res = device->createStream(mAppSegmentSurface, mAppSegmentMaxSize, 1, format,
144 kAppSegmentDataSpace, rotation, &mAppSegmentStreamId, physicalCameraId, surfaceIds);
145 if (res == OK) {
146 mAppSegmentSurfaceId = (*surfaceIds)[0];
147 } else {
148 ALOGE("%s: Failed to create JPEG App segment stream: %s (%d)", __FUNCTION__,
149 strerror(-res), res);
150 return res;
151 }
152
153 if (!mUseGrid) {
154 res = mCodec->createInputSurface(&producer);
155 if (res != OK) {
156 ALOGE("%s: Failed to create input surface for Heic codec: %s (%d)",
157 __FUNCTION__, strerror(-res), res);
158 return res;
159 }
160 } else {
161 BufferQueue::createBufferQueue(&producer, &consumer);
162 mMainImageConsumer = new CpuConsumer(consumer, 1);
163 mMainImageConsumer->setFrameAvailableListener(this);
164 mMainImageConsumer->setName(String8("Camera3-HeicComposite-HevcInputYUVStream"));
165 }
166 mMainImageSurface = new Surface(producer);
167
168 res = mCodec->start();
169 if (res != OK) {
170 ALOGE("%s: Failed to start codec: %s (%d)", __FUNCTION__,
171 strerror(-res), res);
172 return res;
173 }
174
175 std::vector<int> sourceSurfaceId;
176 //Use YUV_888 format if framework tiling is needed.
177 int srcStreamFmt = mUseGrid ? HAL_PIXEL_FORMAT_YCbCr_420_888 :
178 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
179 res = device->createStream(mMainImageSurface, width, height, srcStreamFmt, kHeifDataSpace,
180 rotation, id, physicalCameraId, &sourceSurfaceId);
181 if (res == OK) {
182 mMainImageSurfaceId = sourceSurfaceId[0];
183 mMainImageStreamId = *id;
184 } else {
185 ALOGE("%s: Failed to create main image stream: %s (%d)", __FUNCTION__,
186 strerror(-res), res);
187 return res;
188 }
189
190 mOutputSurface = consumers[0];
191 res = registerCompositeStreamListener(getStreamId());
192 if (res != OK) {
193 ALOGE("%s: Failed to register HAL main image stream", __FUNCTION__);
194 return res;
195 }
196
Shuzhen Wang219c2992019-02-15 17:24:28 -0800197 initCopyRowFunction(width);
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800198 return res;
199}
200
201status_t HeicCompositeStream::deleteInternalStreams() {
202 requestExit();
203 auto res = join();
204 if (res != OK) {
205 ALOGE("%s: Failed to join with the main processing thread: %s (%d)", __FUNCTION__,
206 strerror(-res), res);
207 }
208
209 deinitCodec();
210
211 if (mAppSegmentStreamId >= 0) {
Emilian Peevc0fe54c2020-03-11 14:05:07 -0700212 // Camera devices may not be valid after switching to offline mode.
213 // In this case, all offline streams including internal composite streams
214 // are managed and released by the offline session.
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800215 sp<CameraDeviceBase> device = mDevice.promote();
Emilian Peevc0fe54c2020-03-11 14:05:07 -0700216 if (device.get() != nullptr) {
217 res = device->deleteStream(mAppSegmentStreamId);
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800218 }
219
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800220 mAppSegmentStreamId = -1;
221 }
222
Shuzhen Wang2c545042019-02-07 10:27:35 -0800223 if (mOutputSurface != nullptr) {
224 mOutputSurface->disconnect(NATIVE_WINDOW_API_CAMERA);
225 mOutputSurface.clear();
226 }
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800227 return res;
228}
229
230void HeicCompositeStream::onBufferReleased(const BufferInfo& bufferInfo) {
231 Mutex::Autolock l(mMutex);
232
233 if (bufferInfo.mError) return;
234
235 mCodecOutputBufferTimestamps.push(bufferInfo.mTimestamp);
Shuzhen Wang3d00ee52019-09-25 14:19:28 -0700236 ALOGV("%s: [%" PRId64 "]: Adding codecOutputBufferTimestamp (%zu timestamps in total)",
237 __FUNCTION__, bufferInfo.mTimestamp, mCodecOutputBufferTimestamps.size());
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800238}
239
240// We need to get the settings early to handle the case where the codec output
241// arrives earlier than result metadata.
242void HeicCompositeStream::onBufferRequestForFrameNumber(uint64_t frameNumber, int streamId,
243 const CameraMetadata& settings) {
244 ATRACE_ASYNC_BEGIN("HEIC capture", frameNumber);
245
246 Mutex::Autolock l(mMutex);
247 if (mErrorState || (streamId != getStreamId())) {
248 return;
249 }
250
251 mPendingCaptureResults.emplace(frameNumber, CameraMetadata());
252
253 camera_metadata_ro_entry entry;
254
255 int32_t orientation = 0;
256 entry = settings.find(ANDROID_JPEG_ORIENTATION);
257 if (entry.count == 1) {
258 orientation = entry.data.i32[0];
259 }
260
261 int32_t quality = kDefaultJpegQuality;
262 entry = settings.find(ANDROID_JPEG_QUALITY);
263 if (entry.count == 1) {
264 quality = entry.data.i32[0];
265 }
266
267 mSettingsByFrameNumber[frameNumber] = std::make_pair(orientation, quality);
268}
269
270void HeicCompositeStream::onFrameAvailable(const BufferItem& item) {
271 if (item.mDataSpace == static_cast<android_dataspace>(kAppSegmentDataSpace)) {
272 ALOGV("%s: JPEG APP segments buffer with ts: %" PRIu64 " ms. arrived!",
273 __func__, ns2ms(item.mTimestamp));
274
275 Mutex::Autolock l(mMutex);
276 if (!mErrorState) {
277 mInputAppSegmentBuffers.push_back(item.mTimestamp);
278 mInputReadyCondition.signal();
279 }
280 } else if (item.mDataSpace == kHeifDataSpace) {
281 ALOGV("%s: YUV_888 buffer with ts: %" PRIu64 " ms. arrived!",
282 __func__, ns2ms(item.mTimestamp));
283
284 Mutex::Autolock l(mMutex);
285 if (!mUseGrid) {
286 ALOGE("%s: YUV_888 internal stream is only supported for HEVC tiling",
287 __FUNCTION__);
288 return;
289 }
290 if (!mErrorState) {
291 mInputYuvBuffers.push_back(item.mTimestamp);
292 mInputReadyCondition.signal();
293 }
294 } else {
295 ALOGE("%s: Unexpected data space: 0x%x", __FUNCTION__, item.mDataSpace);
296 }
297}
298
299status_t HeicCompositeStream::getCompositeStreamInfo(const OutputStreamInfo &streamInfo,
300 const CameraMetadata& ch, std::vector<OutputStreamInfo>* compositeOutput /*out*/) {
301 if (compositeOutput == nullptr) {
302 return BAD_VALUE;
303 }
304
305 compositeOutput->clear();
306
307 bool useGrid, useHeic;
308 bool isSizeSupported = isSizeSupportedByHeifEncoder(
309 streamInfo.width, streamInfo.height, &useHeic, &useGrid, nullptr);
310 if (!isSizeSupported) {
311 // Size is not supported by either encoder.
312 return OK;
313 }
314
315 compositeOutput->insert(compositeOutput->end(), 2, streamInfo);
316
317 // JPEG APPS segments Blob stream info
318 (*compositeOutput)[0].width = calcAppSegmentMaxSize(ch);
319 (*compositeOutput)[0].height = 1;
320 (*compositeOutput)[0].format = HAL_PIXEL_FORMAT_BLOB;
321 (*compositeOutput)[0].dataSpace = kAppSegmentDataSpace;
322 (*compositeOutput)[0].consumerUsage = GRALLOC_USAGE_SW_READ_OFTEN;
323
324 // YUV/IMPLEMENTATION_DEFINED stream info
325 (*compositeOutput)[1].width = streamInfo.width;
326 (*compositeOutput)[1].height = streamInfo.height;
327 (*compositeOutput)[1].format = useGrid ? HAL_PIXEL_FORMAT_YCbCr_420_888 :
328 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
329 (*compositeOutput)[1].dataSpace = kHeifDataSpace;
330 (*compositeOutput)[1].consumerUsage = useHeic ? GRALLOC_USAGE_HW_IMAGE_ENCODER :
331 useGrid ? GRALLOC_USAGE_SW_READ_OFTEN : GRALLOC_USAGE_HW_VIDEO_ENCODER;
332
333 return NO_ERROR;
334}
335
336bool HeicCompositeStream::isSizeSupportedByHeifEncoder(int32_t width, int32_t height,
Chong Zhang688abaa2019-05-17 16:32:23 -0700337 bool* useHeic, bool* useGrid, int64_t* stall, AString* hevcName) {
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800338 static HeicEncoderInfoManager& heicManager = HeicEncoderInfoManager::getInstance();
Chong Zhang688abaa2019-05-17 16:32:23 -0700339 return heicManager.isSizeSupported(width, height, useHeic, useGrid, stall, hevcName);
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800340}
341
342bool HeicCompositeStream::isInMemoryTempFileSupported() {
343 int memfd = syscall(__NR_memfd_create, "HEIF-try-memfd", MFD_CLOEXEC);
344 if (memfd == -1) {
345 if (errno != ENOSYS) {
346 ALOGE("%s: Failed to create tmpfs file. errno %d", __FUNCTION__, errno);
347 }
348 return false;
349 }
350 close(memfd);
351 return true;
352}
353
354void HeicCompositeStream::onHeicOutputFrameAvailable(
355 const CodecOutputBufferInfo& outputBufferInfo) {
356 Mutex::Autolock l(mMutex);
357
358 ALOGV("%s: index %d, offset %d, size %d, time %" PRId64 ", flags 0x%x",
359 __FUNCTION__, outputBufferInfo.index, outputBufferInfo.offset,
360 outputBufferInfo.size, outputBufferInfo.timeUs, outputBufferInfo.flags);
361
362 if (!mErrorState) {
363 if ((outputBufferInfo.size > 0) &&
364 ((outputBufferInfo.flags & MediaCodec::BUFFER_FLAG_CODECCONFIG) == 0)) {
365 mCodecOutputBuffers.push_back(outputBufferInfo);
366 mInputReadyCondition.signal();
367 } else {
Shuzhen Wang3d00ee52019-09-25 14:19:28 -0700368 ALOGV("%s: Releasing output buffer: size %d flags: 0x%x ", __FUNCTION__,
369 outputBufferInfo.size, outputBufferInfo.flags);
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800370 mCodec->releaseOutputBuffer(outputBufferInfo.index);
371 }
372 } else {
373 mCodec->releaseOutputBuffer(outputBufferInfo.index);
374 }
375}
376
377void HeicCompositeStream::onHeicInputFrameAvailable(int32_t index) {
378 Mutex::Autolock l(mMutex);
379
380 if (!mUseGrid) {
381 ALOGE("%s: Codec YUV input mode must only be used for Hevc tiling mode", __FUNCTION__);
382 return;
383 }
384
385 mCodecInputBuffers.push_back(index);
386 mInputReadyCondition.signal();
387}
388
389void HeicCompositeStream::onHeicFormatChanged(sp<AMessage>& newFormat) {
390 if (newFormat == nullptr) {
391 ALOGE("%s: newFormat must not be null!", __FUNCTION__);
392 return;
393 }
394
395 Mutex::Autolock l(mMutex);
396
397 AString mime;
398 AString mimeHeic(MIMETYPE_IMAGE_ANDROID_HEIC);
399 newFormat->findString(KEY_MIME, &mime);
400 if (mime != mimeHeic) {
401 // For HEVC codec, below keys need to be filled out or overwritten so that the
402 // muxer can handle them as HEIC output image.
403 newFormat->setString(KEY_MIME, mimeHeic);
404 newFormat->setInt32(KEY_WIDTH, mOutputWidth);
405 newFormat->setInt32(KEY_HEIGHT, mOutputHeight);
406 if (mUseGrid) {
407 newFormat->setInt32(KEY_TILE_WIDTH, mGridWidth);
408 newFormat->setInt32(KEY_TILE_HEIGHT, mGridHeight);
409 newFormat->setInt32(KEY_GRID_ROWS, mGridRows);
410 newFormat->setInt32(KEY_GRID_COLUMNS, mGridCols);
411 }
412 }
413 newFormat->setInt32(KEY_IS_DEFAULT, 1 /*isPrimary*/);
414
415 int32_t gridRows, gridCols;
416 if (newFormat->findInt32(KEY_GRID_ROWS, &gridRows) &&
417 newFormat->findInt32(KEY_GRID_COLUMNS, &gridCols)) {
418 mNumOutputTiles = gridRows * gridCols;
419 } else {
420 mNumOutputTiles = 1;
421 }
422
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800423 mFormat = newFormat;
Shuzhen Wang3d00ee52019-09-25 14:19:28 -0700424
425 ALOGV("%s: mNumOutputTiles is %zu", __FUNCTION__, mNumOutputTiles);
426 mInputReadyCondition.signal();
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800427}
428
429void HeicCompositeStream::onHeicCodecError() {
430 Mutex::Autolock l(mMutex);
431 mErrorState = true;
432}
433
434status_t HeicCompositeStream::configureStream() {
435 if (isRunning()) {
436 // Processing thread is already running, nothing more to do.
437 return NO_ERROR;
438 }
439
440 if (mOutputSurface.get() == nullptr) {
441 ALOGE("%s: No valid output surface set!", __FUNCTION__);
442 return NO_INIT;
443 }
444
445 auto res = mOutputSurface->connect(NATIVE_WINDOW_API_CAMERA, mProducerListener);
446 if (res != OK) {
447 ALOGE("%s: Unable to connect to native window for stream %d",
448 __FUNCTION__, mMainImageStreamId);
449 return res;
450 }
451
452 if ((res = native_window_set_buffers_format(mOutputSurface.get(), HAL_PIXEL_FORMAT_BLOB))
453 != OK) {
454 ALOGE("%s: Unable to configure stream buffer format for stream %d", __FUNCTION__,
455 mMainImageStreamId);
456 return res;
457 }
458
459 ANativeWindow *anwConsumer = mOutputSurface.get();
460 int maxConsumerBuffers;
461 if ((res = anwConsumer->query(anwConsumer, NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS,
462 &maxConsumerBuffers)) != OK) {
463 ALOGE("%s: Unable to query consumer undequeued"
464 " buffer count for stream %d", __FUNCTION__, mMainImageStreamId);
465 return res;
466 }
467
468 // Cannot use SourceSurface buffer count since it could be codec's 512*512 tile
469 // buffer count.
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800470 if ((res = native_window_set_buffer_count(
Shuzhen Wang3d00ee52019-09-25 14:19:28 -0700471 anwConsumer, kMaxOutputSurfaceProducerCount + maxConsumerBuffers)) != OK) {
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800472 ALOGE("%s: Unable to set buffer count for stream %d", __FUNCTION__, mMainImageStreamId);
473 return res;
474 }
475
476 if ((res = native_window_set_buffers_dimensions(anwConsumer, mMaxHeicBufferSize, 1)) != OK) {
477 ALOGE("%s: Unable to set buffer dimension %zu x 1 for stream %d: %s (%d)",
478 __FUNCTION__, mMaxHeicBufferSize, mMainImageStreamId, strerror(-res), res);
479 return res;
480 }
481
482 run("HeicCompositeStreamProc");
483
484 return NO_ERROR;
485}
486
487status_t HeicCompositeStream::insertGbp(SurfaceMap* /*out*/outSurfaceMap,
488 Vector<int32_t>* /*out*/outputStreamIds, int32_t* /*out*/currentStreamId) {
489 if (outSurfaceMap->find(mAppSegmentStreamId) == outSurfaceMap->end()) {
490 (*outSurfaceMap)[mAppSegmentStreamId] = std::vector<size_t>();
491 outputStreamIds->push_back(mAppSegmentStreamId);
492 }
493 (*outSurfaceMap)[mAppSegmentStreamId].push_back(mAppSegmentSurfaceId);
494
495 if (outSurfaceMap->find(mMainImageStreamId) == outSurfaceMap->end()) {
496 (*outSurfaceMap)[mMainImageStreamId] = std::vector<size_t>();
497 outputStreamIds->push_back(mMainImageStreamId);
498 }
499 (*outSurfaceMap)[mMainImageStreamId].push_back(mMainImageSurfaceId);
500
501 if (currentStreamId != nullptr) {
502 *currentStreamId = mMainImageStreamId;
503 }
504
505 return NO_ERROR;
506}
507
Emilian Peev4697b642019-11-19 17:11:14 -0800508status_t HeicCompositeStream::insertCompositeStreamIds(
509 std::vector<int32_t>* compositeStreamIds /*out*/) {
510 if (compositeStreamIds == nullptr) {
511 return BAD_VALUE;
512 }
513
514 compositeStreamIds->push_back(mAppSegmentStreamId);
515 compositeStreamIds->push_back(mMainImageStreamId);
516
517 return OK;
518}
519
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800520void HeicCompositeStream::onShutter(const CaptureResultExtras& resultExtras, nsecs_t timestamp) {
521 Mutex::Autolock l(mMutex);
522 if (mErrorState) {
523 return;
524 }
525
526 if (mSettingsByFrameNumber.find(resultExtras.frameNumber) != mSettingsByFrameNumber.end()) {
Shuzhen Wang3d00ee52019-09-25 14:19:28 -0700527 ALOGV("%s: [%" PRId64 "]: frameNumber %" PRId64, __FUNCTION__,
528 timestamp, resultExtras.frameNumber);
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800529 mFrameNumberMap.emplace(resultExtras.frameNumber, timestamp);
530 mSettingsByTimestamp[timestamp] = mSettingsByFrameNumber[resultExtras.frameNumber];
531 mSettingsByFrameNumber.erase(resultExtras.frameNumber);
532 mInputReadyCondition.signal();
533 }
534}
535
536void HeicCompositeStream::compilePendingInputLocked() {
537 while (!mSettingsByTimestamp.empty()) {
538 auto it = mSettingsByTimestamp.begin();
539 mPendingInputFrames[it->first].orientation = it->second.first;
540 mPendingInputFrames[it->first].quality = it->second.second;
541 mSettingsByTimestamp.erase(it);
Shuzhen Wang62f49ed2019-09-04 14:07:53 -0700542
543 // Set encoder quality if no inflight encoding
544 if (mPendingInputFrames.size() == 1) {
545 int32_t newQuality = mPendingInputFrames.begin()->second.quality;
546 updateCodecQualityLocked(newQuality);
547 }
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800548 }
549
Michael Gonzalezb5986a32019-10-09 15:38:17 -0700550 while (!mInputAppSegmentBuffers.empty()) {
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800551 CpuConsumer::LockedBuffer imgBuffer;
552 auto it = mInputAppSegmentBuffers.begin();
553 auto res = mAppSegmentConsumer->lockNextBuffer(&imgBuffer);
554 if (res == NOT_ENOUGH_DATA) {
Michael Gonzalezb5986a32019-10-09 15:38:17 -0700555 // Can not lock any more buffers.
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800556 break;
557 } else if ((res != OK) || (*it != imgBuffer.timestamp)) {
558 if (res != OK) {
559 ALOGE("%s: Error locking JPEG_APP_SEGMENTS image buffer: %s (%d)", __FUNCTION__,
560 strerror(-res), res);
561 } else {
562 ALOGE("%s: Expecting JPEG_APP_SEGMENTS buffer with time stamp: %" PRId64
563 " received buffer with time stamp: %" PRId64, __FUNCTION__,
564 *it, imgBuffer.timestamp);
Michael Gonzalezb5986a32019-10-09 15:38:17 -0700565 mAppSegmentConsumer->unlockBuffer(imgBuffer);
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800566 }
567 mPendingInputFrames[*it].error = true;
568 mInputAppSegmentBuffers.erase(it);
569 continue;
570 }
571
572 if ((mPendingInputFrames.find(imgBuffer.timestamp) != mPendingInputFrames.end()) &&
573 (mPendingInputFrames[imgBuffer.timestamp].error)) {
574 mAppSegmentConsumer->unlockBuffer(imgBuffer);
575 } else {
576 mPendingInputFrames[imgBuffer.timestamp].appSegmentBuffer = imgBuffer;
Michael Gonzalezb5986a32019-10-09 15:38:17 -0700577 mLockedAppSegmentBufferCnt++;
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800578 }
579 mInputAppSegmentBuffers.erase(it);
580 }
581
582 while (!mInputYuvBuffers.empty() && !mYuvBufferAcquired) {
583 CpuConsumer::LockedBuffer imgBuffer;
584 auto it = mInputYuvBuffers.begin();
585 auto res = mMainImageConsumer->lockNextBuffer(&imgBuffer);
586 if (res == NOT_ENOUGH_DATA) {
Michael Gonzalezb5986a32019-10-09 15:38:17 -0700587 // Can not lock any more buffers.
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800588 break;
589 } else if (res != OK) {
590 ALOGE("%s: Error locking YUV_888 image buffer: %s (%d)", __FUNCTION__,
591 strerror(-res), res);
592 mPendingInputFrames[*it].error = true;
593 mInputYuvBuffers.erase(it);
594 continue;
595 } else if (*it != imgBuffer.timestamp) {
596 ALOGW("%s: Expecting YUV_888 buffer with time stamp: %" PRId64 " received buffer with "
597 "time stamp: %" PRId64, __FUNCTION__, *it, imgBuffer.timestamp);
598 mPendingInputFrames[*it].error = true;
599 mInputYuvBuffers.erase(it);
600 continue;
601 }
602
603 if ((mPendingInputFrames.find(imgBuffer.timestamp) != mPendingInputFrames.end()) &&
604 (mPendingInputFrames[imgBuffer.timestamp].error)) {
605 mMainImageConsumer->unlockBuffer(imgBuffer);
606 } else {
607 mPendingInputFrames[imgBuffer.timestamp].yuvBuffer = imgBuffer;
608 mYuvBufferAcquired = true;
609 }
610 mInputYuvBuffers.erase(it);
611 }
612
613 while (!mCodecOutputBuffers.empty()) {
614 auto it = mCodecOutputBuffers.begin();
615 // Bitstream buffer timestamp doesn't necessarily directly correlate with input
616 // buffer timestamp. Assume encoder input to output is FIFO, use a queue
617 // to look up timestamp.
618 int64_t bufferTime = -1;
619 if (mCodecOutputBufferTimestamps.empty()) {
Michael Gonzalez5c103f22019-10-08 14:30:32 -0700620 ALOGV("%s: Failed to find buffer timestamp for codec output buffer!", __FUNCTION__);
621 break;
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800622 } else {
623 // Direct mapping between camera timestamp (in ns) and codec timestamp (in us).
624 bufferTime = mCodecOutputBufferTimestamps.front();
Shuzhen Wang3d00ee52019-09-25 14:19:28 -0700625 mCodecOutputCounter++;
626 if (mCodecOutputCounter == mNumOutputTiles) {
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800627 mCodecOutputBufferTimestamps.pop();
Shuzhen Wang3d00ee52019-09-25 14:19:28 -0700628 mCodecOutputCounter = 0;
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800629 }
630
631 mPendingInputFrames[bufferTime].codecOutputBuffers.push_back(*it);
Shuzhen Wang3d00ee52019-09-25 14:19:28 -0700632 ALOGV("%s: [%" PRId64 "]: Pushing codecOutputBuffers (time %" PRId64 " us)",
633 __FUNCTION__, bufferTime, it->timeUs);
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800634 }
635 mCodecOutputBuffers.erase(it);
636 }
637
638 while (!mFrameNumberMap.empty()) {
639 auto it = mFrameNumberMap.begin();
640 mPendingInputFrames[it->second].frameNumber = it->first;
Shuzhen Wang3d00ee52019-09-25 14:19:28 -0700641 ALOGV("%s: [%" PRId64 "]: frameNumber is %" PRId64, __FUNCTION__, it->second, it->first);
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800642 mFrameNumberMap.erase(it);
643 }
644
Shuzhen Wange7f4b462019-02-12 08:43:07 -0800645 while (!mCaptureResults.empty()) {
646 auto it = mCaptureResults.begin();
647 // Negative timestamp indicates that something went wrong during the capture result
648 // collection process.
649 if (it->first >= 0) {
650 if (mPendingInputFrames[it->first].frameNumber == std::get<0>(it->second)) {
651 mPendingInputFrames[it->first].result =
652 std::make_unique<CameraMetadata>(std::get<1>(it->second));
653 } else {
654 ALOGE("%s: Capture result frameNumber/timestamp mapping changed between "
655 "shutter and capture result!", __FUNCTION__);
656 }
657 }
658 mCaptureResults.erase(it);
659 }
660
661 // mErrorFrameNumbers stores frame number of dropped buffers.
662 auto it = mErrorFrameNumbers.begin();
663 while (it != mErrorFrameNumbers.end()) {
664 bool frameFound = false;
665 for (auto &inputFrame : mPendingInputFrames) {
666 if (inputFrame.second.frameNumber == *it) {
667 inputFrame.second.error = true;
668 frameFound = true;
669 break;
670 }
671 }
672
673 if (frameFound) {
674 it = mErrorFrameNumbers.erase(it);
675 } else {
676 ALOGW("%s: Not able to find failing input with frame number: %" PRId64, __FUNCTION__,
677 *it);
678 it++;
679 }
680 }
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800681
682 // Distribute codec input buffers to be filled out from YUV output
683 for (auto it = mPendingInputFrames.begin();
684 it != mPendingInputFrames.end() && mCodecInputBuffers.size() > 0; it++) {
685 InputFrame& inputFrame(it->second);
686 if (inputFrame.codecInputCounter < mGridRows * mGridCols) {
687 // Available input tiles that are required for the current input
688 // image.
689 size_t newInputTiles = std::min(mCodecInputBuffers.size(),
690 mGridRows * mGridCols - inputFrame.codecInputCounter);
691 for (size_t i = 0; i < newInputTiles; i++) {
692 CodecInputBufferInfo inputInfo =
693 { mCodecInputBuffers[0], mGridTimestampUs++, inputFrame.codecInputCounter };
694 inputFrame.codecInputBuffers.push_back(inputInfo);
695
696 mCodecInputBuffers.erase(mCodecInputBuffers.begin());
697 inputFrame.codecInputCounter++;
698 }
699 break;
700 }
701 }
702}
703
704bool HeicCompositeStream::getNextReadyInputLocked(int64_t *currentTs /*out*/) {
705 if (currentTs == nullptr) {
706 return false;
707 }
708
709 bool newInputAvailable = false;
Shuzhen Wang3d00ee52019-09-25 14:19:28 -0700710 for (auto& it : mPendingInputFrames) {
711 // New input is considered to be available only if:
712 // 1. input buffers are ready, or
713 // 2. App segment and muxer is created, or
714 // 3. A codec output tile is ready, and an output buffer is available.
715 // This makes sure that muxer gets created only when an output tile is
716 // generated, because right now we only handle 1 HEIC output buffer at a
717 // time (max dequeued buffer count is 1).
Shuzhen Wange7f4b462019-02-12 08:43:07 -0800718 bool appSegmentReady = (it.second.appSegmentBuffer.data != nullptr) &&
Shuzhen Wang3d00ee52019-09-25 14:19:28 -0700719 !it.second.appSegmentWritten && it.second.result != nullptr &&
720 it.second.muxer != nullptr;
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800721 bool codecOutputReady = !it.second.codecOutputBuffers.empty();
722 bool codecInputReady = (it.second.yuvBuffer.data != nullptr) &&
723 (!it.second.codecInputBuffers.empty());
Shuzhen Wang3d00ee52019-09-25 14:19:28 -0700724 bool hasOutputBuffer = it.second.muxer != nullptr ||
725 (mDequeuedOutputBufferCnt < kMaxOutputSurfaceProducerCount);
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800726 if ((!it.second.error) &&
727 (it.first < *currentTs) &&
Shuzhen Wang3d00ee52019-09-25 14:19:28 -0700728 (appSegmentReady || (codecOutputReady && hasOutputBuffer) || codecInputReady)) {
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800729 *currentTs = it.first;
Shuzhen Wang3d00ee52019-09-25 14:19:28 -0700730 if (it.second.format == nullptr && mFormat != nullptr) {
731 it.second.format = mFormat->dup();
732 }
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800733 newInputAvailable = true;
734 break;
735 }
736 }
737
738 return newInputAvailable;
739}
740
741int64_t HeicCompositeStream::getNextFailingInputLocked(int64_t *currentTs /*out*/) {
742 int64_t res = -1;
743 if (currentTs == nullptr) {
744 return res;
745 }
746
747 for (const auto& it : mPendingInputFrames) {
748 if (it.second.error && !it.second.errorNotified && (it.first < *currentTs)) {
749 *currentTs = it.first;
750 res = it.second.frameNumber;
751 break;
752 }
753 }
754
755 return res;
756}
757
758status_t HeicCompositeStream::processInputFrame(nsecs_t timestamp,
759 InputFrame &inputFrame) {
760 ATRACE_CALL();
761 status_t res = OK;
762
Shuzhen Wange7f4b462019-02-12 08:43:07 -0800763 bool appSegmentReady = inputFrame.appSegmentBuffer.data != nullptr &&
Shuzhen Wang3d00ee52019-09-25 14:19:28 -0700764 !inputFrame.appSegmentWritten && inputFrame.result != nullptr &&
765 inputFrame.muxer != nullptr;
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800766 bool codecOutputReady = inputFrame.codecOutputBuffers.size() > 0;
767 bool codecInputReady = inputFrame.yuvBuffer.data != nullptr &&
Shuzhen Wang3d00ee52019-09-25 14:19:28 -0700768 !inputFrame.codecInputBuffers.empty();
769 bool hasOutputBuffer = inputFrame.muxer != nullptr ||
770 (mDequeuedOutputBufferCnt < kMaxOutputSurfaceProducerCount);
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800771
Shuzhen Wang3d00ee52019-09-25 14:19:28 -0700772 ALOGV("%s: [%" PRId64 "]: appSegmentReady %d, codecOutputReady %d, codecInputReady %d,"
773 " dequeuedOutputBuffer %d", __FUNCTION__, timestamp, appSegmentReady,
774 codecOutputReady, codecInputReady, mDequeuedOutputBufferCnt);
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800775
776 // Handle inputs for Hevc tiling
777 if (codecInputReady) {
778 res = processCodecInputFrame(inputFrame);
779 if (res != OK) {
780 ALOGE("%s: Failed to process codec input frame: %s (%d)", __FUNCTION__,
781 strerror(-res), res);
782 return res;
783 }
784 }
785
Shuzhen Wang3d00ee52019-09-25 14:19:28 -0700786 if (!(codecOutputReady && hasOutputBuffer) && !appSegmentReady) {
787 return OK;
788 }
789
790 // Initialize and start muxer if not yet done so. In this case,
791 // codecOutputReady must be true. Otherwise, appSegmentReady is guaranteed
792 // to be false, and the function must have returned early.
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800793 if (inputFrame.muxer == nullptr) {
794 res = startMuxerForInputFrame(timestamp, inputFrame);
795 if (res != OK) {
796 ALOGE("%s: Failed to create and start muxer: %s (%d)", __FUNCTION__,
797 strerror(-res), res);
798 return res;
799 }
800 }
801
802 // Write JPEG APP segments data to the muxer.
Shuzhen Wang3d00ee52019-09-25 14:19:28 -0700803 if (appSegmentReady) {
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800804 res = processAppSegment(timestamp, inputFrame);
805 if (res != OK) {
806 ALOGE("%s: Failed to process JPEG APP segments: %s (%d)", __FUNCTION__,
807 strerror(-res), res);
808 return res;
809 }
810 }
811
812 // Write media codec bitstream buffers to muxer.
813 while (!inputFrame.codecOutputBuffers.empty()) {
814 res = processOneCodecOutputFrame(timestamp, inputFrame);
815 if (res != OK) {
816 ALOGE("%s: Failed to process codec output frame: %s (%d)", __FUNCTION__,
817 strerror(-res), res);
818 return res;
819 }
820 }
821
Michael Gonzalezb5986a32019-10-09 15:38:17 -0700822 if (inputFrame.pendingOutputTiles == 0) {
823 if (inputFrame.appSegmentWritten) {
824 res = processCompletedInputFrame(timestamp, inputFrame);
825 if (res != OK) {
826 ALOGE("%s: Failed to process completed input frame: %s (%d)", __FUNCTION__,
827 strerror(-res), res);
828 return res;
829 }
830 } else if (mLockedAppSegmentBufferCnt == kMaxAcquiredAppSegment) {
831 ALOGE("%s: Out-of-order app segment buffers reaches limit %u", __FUNCTION__,
832 kMaxAcquiredAppSegment);
833 return INVALID_OPERATION;
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800834 }
835 }
836
837 return res;
838}
839
840status_t HeicCompositeStream::startMuxerForInputFrame(nsecs_t timestamp, InputFrame &inputFrame) {
841 sp<ANativeWindow> outputANW = mOutputSurface;
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800842
843 auto res = outputANW->dequeueBuffer(mOutputSurface.get(), &inputFrame.anb, &inputFrame.fenceFd);
844 if (res != OK) {
845 ALOGE("%s: Error retrieving output buffer: %s (%d)", __FUNCTION__, strerror(-res),
846 res);
847 return res;
848 }
Shuzhen Wang3d00ee52019-09-25 14:19:28 -0700849 mDequeuedOutputBufferCnt++;
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800850
851 // Combine current thread id, stream id and timestamp to uniquely identify image.
852 std::ostringstream tempOutputFile;
853 tempOutputFile << "HEIF-" << pthread_self() << "-"
854 << getStreamId() << "-" << timestamp;
855 inputFrame.fileFd = syscall(__NR_memfd_create, tempOutputFile.str().c_str(), MFD_CLOEXEC);
856 if (inputFrame.fileFd < 0) {
857 ALOGE("%s: Failed to create file %s. Error no is %d", __FUNCTION__,
858 tempOutputFile.str().c_str(), errno);
859 return NO_INIT;
860 }
861 inputFrame.muxer = new MediaMuxer(inputFrame.fileFd, MediaMuxer::OUTPUT_FORMAT_HEIF);
862 if (inputFrame.muxer == nullptr) {
863 ALOGE("%s: Failed to create MediaMuxer for file fd %d",
864 __FUNCTION__, inputFrame.fileFd);
865 return NO_INIT;
866 }
867
868 res = inputFrame.muxer->setOrientationHint(inputFrame.orientation);
869 if (res != OK) {
870 ALOGE("%s: Failed to setOrientationHint: %s (%d)", __FUNCTION__,
871 strerror(-res), res);
872 return res;
873 }
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800874
Shuzhen Wang3d00ee52019-09-25 14:19:28 -0700875 ssize_t trackId = inputFrame.muxer->addTrack(inputFrame.format);
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800876 if (trackId < 0) {
877 ALOGE("%s: Failed to addTrack to the muxer: %zd", __FUNCTION__, trackId);
878 return NO_INIT;
879 }
880
881 inputFrame.trackIndex = trackId;
882 inputFrame.pendingOutputTiles = mNumOutputTiles;
883
884 res = inputFrame.muxer->start();
885 if (res != OK) {
886 ALOGE("%s: Failed to start MediaMuxer: %s (%d)",
887 __FUNCTION__, strerror(-res), res);
888 return res;
889 }
890
Shuzhen Wang3d00ee52019-09-25 14:19:28 -0700891 ALOGV("%s: [%" PRId64 "]: Muxer started for inputFrame", __FUNCTION__,
892 timestamp);
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800893 return OK;
894}
895
896status_t HeicCompositeStream::processAppSegment(nsecs_t timestamp, InputFrame &inputFrame) {
897 size_t app1Size = 0;
898 auto appSegmentSize = findAppSegmentsSize(inputFrame.appSegmentBuffer.data,
899 inputFrame.appSegmentBuffer.width * inputFrame.appSegmentBuffer.height,
900 &app1Size);
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800901 if (appSegmentSize == 0) {
902 ALOGE("%s: Failed to find JPEG APP segment size", __FUNCTION__);
903 return NO_INIT;
904 }
905
906 std::unique_ptr<ExifUtils> exifUtils(ExifUtils::create());
907 auto exifRes = exifUtils->initialize(inputFrame.appSegmentBuffer.data, app1Size);
908 if (!exifRes) {
909 ALOGE("%s: Failed to initialize ExifUtils object!", __FUNCTION__);
910 return BAD_VALUE;
911 }
Shuzhen Wange7f4b462019-02-12 08:43:07 -0800912 exifRes = exifUtils->setFromMetadata(*inputFrame.result, mStaticInfo,
913 mOutputWidth, mOutputHeight);
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800914 if (!exifRes) {
915 ALOGE("%s: Failed to set Exif tags using metadata and main image sizes", __FUNCTION__);
916 return BAD_VALUE;
917 }
918 exifRes = exifUtils->setOrientation(inputFrame.orientation);
919 if (!exifRes) {
920 ALOGE("%s: ExifUtils failed to set orientation", __FUNCTION__);
921 return BAD_VALUE;
922 }
923 exifRes = exifUtils->generateApp1();
924 if (!exifRes) {
925 ALOGE("%s: ExifUtils failed to generate APP1 segment", __FUNCTION__);
926 return BAD_VALUE;
927 }
928
929 unsigned int newApp1Length = exifUtils->getApp1Length();
930 const uint8_t *newApp1Segment = exifUtils->getApp1Buffer();
931
932 //Assemble the APP1 marker buffer required by MediaCodec
933 uint8_t kExifApp1Marker[] = {'E', 'x', 'i', 'f', 0xFF, 0xE1, 0x00, 0x00};
934 kExifApp1Marker[6] = static_cast<uint8_t>(newApp1Length >> 8);
935 kExifApp1Marker[7] = static_cast<uint8_t>(newApp1Length & 0xFF);
936 size_t appSegmentBufferSize = sizeof(kExifApp1Marker) +
937 appSegmentSize - app1Size + newApp1Length;
938 uint8_t* appSegmentBuffer = new uint8_t[appSegmentBufferSize];
939 memcpy(appSegmentBuffer, kExifApp1Marker, sizeof(kExifApp1Marker));
940 memcpy(appSegmentBuffer + sizeof(kExifApp1Marker), newApp1Segment, newApp1Length);
941 if (appSegmentSize - app1Size > 0) {
942 memcpy(appSegmentBuffer + sizeof(kExifApp1Marker) + newApp1Length,
943 inputFrame.appSegmentBuffer.data + app1Size, appSegmentSize - app1Size);
944 }
945
946 sp<ABuffer> aBuffer = new ABuffer(appSegmentBuffer, appSegmentBufferSize);
947 auto res = inputFrame.muxer->writeSampleData(aBuffer, inputFrame.trackIndex,
948 timestamp, MediaCodec::BUFFER_FLAG_MUXER_DATA);
949 delete[] appSegmentBuffer;
950
951 if (res != OK) {
952 ALOGE("%s: Failed to write JPEG APP segments to muxer: %s (%d)",
953 __FUNCTION__, strerror(-res), res);
954 return res;
955 }
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800956
Shuzhen Wang3d00ee52019-09-25 14:19:28 -0700957 ALOGV("%s: [%" PRId64 "]: appSegmentSize is %zu, width %d, height %d, app1Size %zu",
958 __FUNCTION__, timestamp, appSegmentSize, inputFrame.appSegmentBuffer.width,
959 inputFrame.appSegmentBuffer.height, app1Size);
Michael Gonzalezb5986a32019-10-09 15:38:17 -0700960
961 inputFrame.appSegmentWritten = true;
962 // Release the buffer now so any pending input app segments can be processed
963 mAppSegmentConsumer->unlockBuffer(inputFrame.appSegmentBuffer);
964 inputFrame.appSegmentBuffer.data = nullptr;
965 mLockedAppSegmentBufferCnt--;
966
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800967 return OK;
968}
969
970status_t HeicCompositeStream::processCodecInputFrame(InputFrame &inputFrame) {
971 for (auto& inputBuffer : inputFrame.codecInputBuffers) {
972 sp<MediaCodecBuffer> buffer;
973 auto res = mCodec->getInputBuffer(inputBuffer.index, &buffer);
974 if (res != OK) {
975 ALOGE("%s: Error getting codec input buffer: %s (%d)", __FUNCTION__,
976 strerror(-res), res);
977 return res;
978 }
979
980 // Copy one tile from source to destination.
981 size_t tileX = inputBuffer.tileIndex % mGridCols;
982 size_t tileY = inputBuffer.tileIndex / mGridCols;
983 size_t top = mGridHeight * tileY;
984 size_t left = mGridWidth * tileX;
985 size_t width = (tileX == static_cast<size_t>(mGridCols) - 1) ?
986 mOutputWidth - tileX * mGridWidth : mGridWidth;
987 size_t height = (tileY == static_cast<size_t>(mGridRows) - 1) ?
988 mOutputHeight - tileY * mGridHeight : mGridHeight;
Shuzhen Wang3d00ee52019-09-25 14:19:28 -0700989 ALOGV("%s: inputBuffer tileIndex [%zu, %zu], top %zu, left %zu, width %zu, height %zu,"
990 " timeUs %" PRId64, __FUNCTION__, tileX, tileY, top, left, width, height,
991 inputBuffer.timeUs);
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -0800992
993 res = copyOneYuvTile(buffer, inputFrame.yuvBuffer, top, left, width, height);
994 if (res != OK) {
995 ALOGE("%s: Failed to copy YUV tile %s (%d)", __FUNCTION__,
996 strerror(-res), res);
997 return res;
998 }
999
1000 res = mCodec->queueInputBuffer(inputBuffer.index, 0, buffer->capacity(),
1001 inputBuffer.timeUs, 0, nullptr /*errorDetailMsg*/);
1002 if (res != OK) {
1003 ALOGE("%s: Failed to queueInputBuffer to Codec: %s (%d)",
1004 __FUNCTION__, strerror(-res), res);
1005 return res;
1006 }
1007 }
1008
1009 inputFrame.codecInputBuffers.clear();
1010 return OK;
1011}
1012
1013status_t HeicCompositeStream::processOneCodecOutputFrame(nsecs_t timestamp,
1014 InputFrame &inputFrame) {
1015 auto it = inputFrame.codecOutputBuffers.begin();
1016 sp<MediaCodecBuffer> buffer;
1017 status_t res = mCodec->getOutputBuffer(it->index, &buffer);
1018 if (res != OK) {
1019 ALOGE("%s: Error getting Heic codec output buffer at index %d: %s (%d)",
1020 __FUNCTION__, it->index, strerror(-res), res);
1021 return res;
1022 }
1023 if (buffer == nullptr) {
1024 ALOGE("%s: Invalid Heic codec output buffer at index %d",
1025 __FUNCTION__, it->index);
1026 return BAD_VALUE;
1027 }
1028
1029 sp<ABuffer> aBuffer = new ABuffer(buffer->data(), buffer->size());
1030 res = inputFrame.muxer->writeSampleData(
1031 aBuffer, inputFrame.trackIndex, timestamp, 0 /*flags*/);
1032 if (res != OK) {
1033 ALOGE("%s: Failed to write buffer index %d to muxer: %s (%d)",
1034 __FUNCTION__, it->index, strerror(-res), res);
1035 return res;
1036 }
1037
1038 mCodec->releaseOutputBuffer(it->index);
1039 if (inputFrame.pendingOutputTiles == 0) {
1040 ALOGW("%s: Codec generated more tiles than expected!", __FUNCTION__);
1041 } else {
1042 inputFrame.pendingOutputTiles--;
1043 }
1044
1045 inputFrame.codecOutputBuffers.erase(inputFrame.codecOutputBuffers.begin());
Shuzhen Wang3d00ee52019-09-25 14:19:28 -07001046
1047 ALOGV("%s: [%" PRId64 "]: Output buffer index %d",
1048 __FUNCTION__, timestamp, it->index);
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -08001049 return OK;
1050}
1051
1052status_t HeicCompositeStream::processCompletedInputFrame(nsecs_t timestamp,
1053 InputFrame &inputFrame) {
1054 sp<ANativeWindow> outputANW = mOutputSurface;
1055 inputFrame.muxer->stop();
1056
1057 // Copy the content of the file to memory.
1058 sp<GraphicBuffer> gb = GraphicBuffer::from(inputFrame.anb);
1059 void* dstBuffer;
1060 auto res = gb->lockAsync(GRALLOC_USAGE_SW_WRITE_OFTEN, &dstBuffer, inputFrame.fenceFd);
1061 if (res != OK) {
1062 ALOGE("%s: Error trying to lock output buffer fence: %s (%d)", __FUNCTION__,
1063 strerror(-res), res);
1064 return res;
1065 }
1066
1067 off_t fSize = lseek(inputFrame.fileFd, 0, SEEK_END);
1068 if (static_cast<size_t>(fSize) > mMaxHeicBufferSize - sizeof(CameraBlob)) {
1069 ALOGE("%s: Error: MediaMuxer output size %ld is larger than buffer sizer %zu",
1070 __FUNCTION__, fSize, mMaxHeicBufferSize - sizeof(CameraBlob));
1071 return BAD_VALUE;
1072 }
1073
1074 lseek(inputFrame.fileFd, 0, SEEK_SET);
1075 ssize_t bytesRead = read(inputFrame.fileFd, dstBuffer, fSize);
1076 if (bytesRead < fSize) {
1077 ALOGE("%s: Only %zd of %ld bytes read", __FUNCTION__, bytesRead, fSize);
1078 return BAD_VALUE;
1079 }
1080
1081 close(inputFrame.fileFd);
1082 inputFrame.fileFd = -1;
1083
1084 // Fill in HEIC header
1085 uint8_t *header = static_cast<uint8_t*>(dstBuffer) + mMaxHeicBufferSize - sizeof(CameraBlob);
1086 struct CameraBlob *blobHeader = (struct CameraBlob *)header;
1087 // Must be in sync with CAMERA3_HEIC_BLOB_ID in android_media_Utils.cpp
1088 blobHeader->blobId = static_cast<CameraBlobId>(0x00FE);
1089 blobHeader->blobSize = fSize;
1090
1091 res = native_window_set_buffers_timestamp(mOutputSurface.get(), timestamp);
1092 if (res != OK) {
1093 ALOGE("%s: Stream %d: Error setting timestamp: %s (%d)",
1094 __FUNCTION__, getStreamId(), strerror(-res), res);
1095 return res;
1096 }
1097
1098 res = outputANW->queueBuffer(mOutputSurface.get(), inputFrame.anb, /*fence*/ -1);
1099 if (res != OK) {
1100 ALOGE("%s: Failed to queueBuffer to Heic stream: %s (%d)", __FUNCTION__,
1101 strerror(-res), res);
1102 return res;
1103 }
1104 inputFrame.anb = nullptr;
Shuzhen Wang3d00ee52019-09-25 14:19:28 -07001105 mDequeuedOutputBufferCnt--;
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -08001106
Shuzhen Wang3d00ee52019-09-25 14:19:28 -07001107 ALOGV("%s: [%" PRId64 "]", __FUNCTION__, timestamp);
Shuzhen Wange7f4b462019-02-12 08:43:07 -08001108 ATRACE_ASYNC_END("HEIC capture", inputFrame.frameNumber);
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -08001109 return OK;
1110}
1111
1112
1113void HeicCompositeStream::releaseInputFrameLocked(InputFrame *inputFrame /*out*/) {
1114 if (inputFrame == nullptr) {
1115 return;
1116 }
1117
1118 if (inputFrame->appSegmentBuffer.data != nullptr) {
1119 mAppSegmentConsumer->unlockBuffer(inputFrame->appSegmentBuffer);
1120 inputFrame->appSegmentBuffer.data = nullptr;
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -08001121 }
1122
1123 while (!inputFrame->codecOutputBuffers.empty()) {
1124 auto it = inputFrame->codecOutputBuffers.begin();
1125 ALOGV("%s: releaseOutputBuffer index %d", __FUNCTION__, it->index);
1126 mCodec->releaseOutputBuffer(it->index);
1127 inputFrame->codecOutputBuffers.erase(it);
1128 }
1129
1130 if (inputFrame->yuvBuffer.data != nullptr) {
1131 mMainImageConsumer->unlockBuffer(inputFrame->yuvBuffer);
1132 inputFrame->yuvBuffer.data = nullptr;
1133 mYuvBufferAcquired = false;
1134 }
1135
1136 while (!inputFrame->codecInputBuffers.empty()) {
1137 auto it = inputFrame->codecInputBuffers.begin();
1138 inputFrame->codecInputBuffers.erase(it);
1139 }
1140
1141 if ((inputFrame->error || mErrorState) && !inputFrame->errorNotified) {
1142 notifyError(inputFrame->frameNumber);
1143 inputFrame->errorNotified = true;
1144 }
1145
1146 if (inputFrame->fileFd >= 0) {
1147 close(inputFrame->fileFd);
1148 inputFrame->fileFd = -1;
1149 }
1150
1151 if (inputFrame->anb != nullptr) {
1152 sp<ANativeWindow> outputANW = mOutputSurface;
1153 outputANW->cancelBuffer(mOutputSurface.get(), inputFrame->anb, /*fence*/ -1);
1154 inputFrame->anb = nullptr;
1155 }
1156}
1157
Michael Gonzalezb5986a32019-10-09 15:38:17 -07001158void HeicCompositeStream::releaseInputFramesLocked() {
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -08001159 auto it = mPendingInputFrames.begin();
Shuzhen Wang62f49ed2019-09-04 14:07:53 -07001160 bool inputFrameDone = false;
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -08001161 while (it != mPendingInputFrames.end()) {
Michael Gonzalezb5986a32019-10-09 15:38:17 -07001162 auto& inputFrame = it->second;
1163 if (inputFrame.error ||
1164 (inputFrame.appSegmentWritten && inputFrame.pendingOutputTiles == 0)) {
1165 releaseInputFrameLocked(&inputFrame);
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -08001166 it = mPendingInputFrames.erase(it);
Shuzhen Wang62f49ed2019-09-04 14:07:53 -07001167 inputFrameDone = true;
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -08001168 } else {
1169 it++;
1170 }
1171 }
Shuzhen Wang62f49ed2019-09-04 14:07:53 -07001172
1173 // Update codec quality based on first upcoming input frame.
1174 // Note that when encoding is in surface mode, currently there is no
1175 // way for camera service to synchronize quality setting on a per-frame
1176 // basis: we don't get notification when codec is ready to consume a new
1177 // input frame. So we update codec quality on a best-effort basis.
1178 if (inputFrameDone) {
1179 auto firstPendingFrame = mPendingInputFrames.begin();
1180 if (firstPendingFrame != mPendingInputFrames.end()) {
1181 updateCodecQualityLocked(firstPendingFrame->second.quality);
1182 }
1183 }
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -08001184}
1185
1186status_t HeicCompositeStream::initializeCodec(uint32_t width, uint32_t height,
1187 const sp<CameraDeviceBase>& cameraDevice) {
1188 ALOGV("%s", __FUNCTION__);
1189
1190 bool useGrid = false;
Chong Zhang688abaa2019-05-17 16:32:23 -07001191 AString hevcName;
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -08001192 bool isSizeSupported = isSizeSupportedByHeifEncoder(width, height,
Chong Zhang688abaa2019-05-17 16:32:23 -07001193 &mUseHeic, &useGrid, nullptr, &hevcName);
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -08001194 if (!isSizeSupported) {
1195 ALOGE("%s: Encoder doesnt' support size %u x %u!",
1196 __FUNCTION__, width, height);
1197 return BAD_VALUE;
1198 }
1199
1200 // Create Looper for MediaCodec.
1201 auto desiredMime = mUseHeic ? MIMETYPE_IMAGE_ANDROID_HEIC : MIMETYPE_VIDEO_HEVC;
1202 mCodecLooper = new ALooper;
1203 mCodecLooper->setName("Camera3-HeicComposite-MediaCodecLooper");
1204 status_t res = mCodecLooper->start(
1205 false, // runOnCallingThread
1206 false, // canCallJava
1207 PRIORITY_AUDIO);
1208 if (res != OK) {
1209 ALOGE("%s: Failed to start codec looper: %s (%d)",
1210 __FUNCTION__, strerror(-res), res);
1211 return NO_INIT;
1212 }
1213
1214 // Create HEIC/HEVC codec.
Chong Zhang688abaa2019-05-17 16:32:23 -07001215 if (mUseHeic) {
1216 mCodec = MediaCodec::CreateByType(mCodecLooper, desiredMime, true /*encoder*/);
1217 } else {
1218 mCodec = MediaCodec::CreateByComponentName(mCodecLooper, hevcName);
1219 }
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -08001220 if (mCodec == nullptr) {
1221 ALOGE("%s: Failed to create codec for %s", __FUNCTION__, desiredMime);
1222 return NO_INIT;
1223 }
1224
1225 // Create Looper and handler for Codec callback.
1226 mCodecCallbackHandler = new CodecCallbackHandler(this);
1227 if (mCodecCallbackHandler == nullptr) {
1228 ALOGE("%s: Failed to create codec callback handler", __FUNCTION__);
1229 return NO_MEMORY;
1230 }
1231 mCallbackLooper = new ALooper;
1232 mCallbackLooper->setName("Camera3-HeicComposite-MediaCodecCallbackLooper");
1233 res = mCallbackLooper->start(
1234 false, // runOnCallingThread
1235 false, // canCallJava
1236 PRIORITY_AUDIO);
1237 if (res != OK) {
1238 ALOGE("%s: Failed to start media callback looper: %s (%d)",
1239 __FUNCTION__, strerror(-res), res);
1240 return NO_INIT;
1241 }
1242 mCallbackLooper->registerHandler(mCodecCallbackHandler);
1243
1244 mAsyncNotify = new AMessage(kWhatCallbackNotify, mCodecCallbackHandler);
1245 res = mCodec->setCallback(mAsyncNotify);
1246 if (res != OK) {
1247 ALOGE("%s: Failed to set MediaCodec callback: %s (%d)", __FUNCTION__,
1248 strerror(-res), res);
1249 return res;
1250 }
1251
1252 // Create output format and configure the Codec.
1253 sp<AMessage> outputFormat = new AMessage();
1254 outputFormat->setString(KEY_MIME, desiredMime);
1255 outputFormat->setInt32(KEY_BITRATE_MODE, BITRATE_MODE_CQ);
1256 outputFormat->setInt32(KEY_QUALITY, kDefaultJpegQuality);
1257 // Ask codec to skip timestamp check and encode all frames.
Chong Zhang70bfcec2019-03-18 12:52:28 -07001258 outputFormat->setInt64(KEY_MAX_PTS_GAP_TO_ENCODER, kNoFrameDropMaxPtsGap);
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -08001259
1260 int32_t gridWidth, gridHeight, gridRows, gridCols;
1261 if (useGrid || mUseHeic) {
1262 gridWidth = HeicEncoderInfoManager::kGridWidth;
1263 gridHeight = HeicEncoderInfoManager::kGridHeight;
1264 gridRows = (height + gridHeight - 1)/gridHeight;
1265 gridCols = (width + gridWidth - 1)/gridWidth;
1266
1267 if (mUseHeic) {
1268 outputFormat->setInt32(KEY_TILE_WIDTH, gridWidth);
1269 outputFormat->setInt32(KEY_TILE_HEIGHT, gridHeight);
1270 outputFormat->setInt32(KEY_GRID_COLUMNS, gridCols);
1271 outputFormat->setInt32(KEY_GRID_ROWS, gridRows);
1272 }
1273
1274 } else {
1275 gridWidth = width;
1276 gridHeight = height;
1277 gridRows = 1;
1278 gridCols = 1;
1279 }
1280
1281 outputFormat->setInt32(KEY_WIDTH, !useGrid ? width : gridWidth);
1282 outputFormat->setInt32(KEY_HEIGHT, !useGrid ? height : gridHeight);
1283 outputFormat->setInt32(KEY_I_FRAME_INTERVAL, 0);
1284 outputFormat->setInt32(KEY_COLOR_FORMAT,
1285 useGrid ? COLOR_FormatYUV420Flexible : COLOR_FormatSurface);
Shuzhen Wang0ca81522019-08-30 14:15:16 -07001286 outputFormat->setInt32(KEY_FRAME_RATE, useGrid ? gridRows * gridCols : kNoGridOpRate);
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -08001287 // This only serves as a hint to encoder when encoding is not real-time.
1288 outputFormat->setInt32(KEY_OPERATING_RATE, useGrid ? kGridOpRate : kNoGridOpRate);
1289
1290 res = mCodec->configure(outputFormat, nullptr /*nativeWindow*/,
1291 nullptr /*crypto*/, CONFIGURE_FLAG_ENCODE);
1292 if (res != OK) {
1293 ALOGE("%s: Failed to configure codec: %s (%d)", __FUNCTION__,
1294 strerror(-res), res);
1295 return res;
1296 }
1297
1298 mGridWidth = gridWidth;
1299 mGridHeight = gridHeight;
1300 mGridRows = gridRows;
1301 mGridCols = gridCols;
1302 mUseGrid = useGrid;
1303 mOutputWidth = width;
1304 mOutputHeight = height;
1305 mAppSegmentMaxSize = calcAppSegmentMaxSize(cameraDevice->info());
1306 mMaxHeicBufferSize = mOutputWidth * mOutputHeight * 3 / 2 + mAppSegmentMaxSize;
1307
1308 return OK;
1309}
1310
1311void HeicCompositeStream::deinitCodec() {
1312 ALOGV("%s", __FUNCTION__);
1313 if (mCodec != nullptr) {
1314 mCodec->stop();
1315 mCodec->release();
1316 mCodec.clear();
1317 }
1318
1319 if (mCodecLooper != nullptr) {
1320 mCodecLooper->stop();
1321 mCodecLooper.clear();
1322 }
1323
1324 if (mCallbackLooper != nullptr) {
1325 mCallbackLooper->stop();
1326 mCallbackLooper.clear();
1327 }
1328
1329 mAsyncNotify.clear();
1330 mFormat.clear();
1331}
1332
1333// Return the size of the complete list of app segment, 0 indicates failure
1334size_t HeicCompositeStream::findAppSegmentsSize(const uint8_t* appSegmentBuffer,
1335 size_t maxSize, size_t *app1SegmentSize) {
1336 if (appSegmentBuffer == nullptr || app1SegmentSize == nullptr) {
1337 ALOGE("%s: Invalid input appSegmentBuffer %p, app1SegmentSize %p",
1338 __FUNCTION__, appSegmentBuffer, app1SegmentSize);
1339 return 0;
1340 }
1341
1342 size_t expectedSize = 0;
1343 // First check for EXIF transport header at the end of the buffer
1344 const uint8_t *header = appSegmentBuffer + (maxSize - sizeof(struct CameraBlob));
1345 const struct CameraBlob *blob = (const struct CameraBlob*)(header);
1346 if (blob->blobId != CameraBlobId::JPEG_APP_SEGMENTS) {
1347 ALOGE("%s: Invalid EXIF blobId %hu", __FUNCTION__, blob->blobId);
1348 return 0;
1349 }
1350
1351 expectedSize = blob->blobSize;
1352 if (expectedSize == 0 || expectedSize > maxSize - sizeof(struct CameraBlob)) {
1353 ALOGE("%s: Invalid blobSize %zu.", __FUNCTION__, expectedSize);
1354 return 0;
1355 }
1356
1357 uint32_t totalSize = 0;
1358
1359 // Verify APP1 marker (mandatory)
1360 uint8_t app1Marker[] = {0xFF, 0xE1};
1361 if (memcmp(appSegmentBuffer, app1Marker, sizeof(app1Marker))) {
1362 ALOGE("%s: Invalid APP1 marker: %x, %x", __FUNCTION__,
1363 appSegmentBuffer[0], appSegmentBuffer[1]);
1364 return 0;
1365 }
1366 totalSize += sizeof(app1Marker);
1367
1368 uint16_t app1Size = (static_cast<uint16_t>(appSegmentBuffer[totalSize]) << 8) +
1369 appSegmentBuffer[totalSize+1];
1370 totalSize += app1Size;
1371
1372 ALOGV("%s: Expected APP segments size %zu, APP1 segment size %u",
1373 __FUNCTION__, expectedSize, app1Size);
1374 while (totalSize < expectedSize) {
1375 if (appSegmentBuffer[totalSize] != 0xFF ||
1376 appSegmentBuffer[totalSize+1] <= 0xE1 ||
1377 appSegmentBuffer[totalSize+1] > 0xEF) {
1378 // Invalid APPn marker
1379 ALOGE("%s: Invalid APPn marker: %x, %x", __FUNCTION__,
1380 appSegmentBuffer[totalSize], appSegmentBuffer[totalSize+1]);
1381 return 0;
1382 }
1383 totalSize += 2;
1384
1385 uint16_t appnSize = (static_cast<uint16_t>(appSegmentBuffer[totalSize]) << 8) +
1386 appSegmentBuffer[totalSize+1];
1387 totalSize += appnSize;
1388 }
1389
1390 if (totalSize != expectedSize) {
1391 ALOGE("%s: Invalid JPEG APP segments: totalSize %u vs expected size %zu",
1392 __FUNCTION__, totalSize, expectedSize);
1393 return 0;
1394 }
1395
1396 *app1SegmentSize = app1Size + sizeof(app1Marker);
1397 return expectedSize;
1398}
1399
1400int64_t HeicCompositeStream::findTimestampInNsLocked(int64_t timeInUs) {
1401 for (const auto& fn : mFrameNumberMap) {
1402 if (timeInUs == ns2us(fn.second)) {
1403 return fn.second;
1404 }
1405 }
1406 for (const auto& inputFrame : mPendingInputFrames) {
1407 if (timeInUs == ns2us(inputFrame.first)) {
1408 return inputFrame.first;
1409 }
1410 }
1411 return -1;
1412}
1413
1414status_t HeicCompositeStream::copyOneYuvTile(sp<MediaCodecBuffer>& codecBuffer,
1415 const CpuConsumer::LockedBuffer& yuvBuffer,
1416 size_t top, size_t left, size_t width, size_t height) {
1417 ATRACE_CALL();
1418
1419 // Get stride information for codecBuffer
1420 sp<ABuffer> imageData;
1421 if (!codecBuffer->meta()->findBuffer("image-data", &imageData)) {
1422 ALOGE("%s: Codec input buffer is not for image data!", __FUNCTION__);
1423 return BAD_VALUE;
1424 }
1425 if (imageData->size() != sizeof(MediaImage2)) {
1426 ALOGE("%s: Invalid codec input image size %zu, expected %zu",
1427 __FUNCTION__, imageData->size(), sizeof(MediaImage2));
1428 return BAD_VALUE;
1429 }
1430 MediaImage2* imageInfo = reinterpret_cast<MediaImage2*>(imageData->data());
1431 if (imageInfo->mType != MediaImage2::MEDIA_IMAGE_TYPE_YUV ||
1432 imageInfo->mBitDepth != 8 ||
1433 imageInfo->mBitDepthAllocated != 8 ||
1434 imageInfo->mNumPlanes != 3) {
1435 ALOGE("%s: Invalid codec input image info: mType %d, mBitDepth %d, "
1436 "mBitDepthAllocated %d, mNumPlanes %d!", __FUNCTION__,
1437 imageInfo->mType, imageInfo->mBitDepth,
1438 imageInfo->mBitDepthAllocated, imageInfo->mNumPlanes);
1439 return BAD_VALUE;
1440 }
1441
1442 ALOGV("%s: yuvBuffer chromaStep %d, chromaStride %d",
1443 __FUNCTION__, yuvBuffer.chromaStep, yuvBuffer.chromaStride);
1444 ALOGV("%s: U offset %u, V offset %u, U rowInc %d, V rowInc %d, U colInc %d, V colInc %d",
1445 __FUNCTION__, imageInfo->mPlane[MediaImage2::U].mOffset,
1446 imageInfo->mPlane[MediaImage2::V].mOffset,
1447 imageInfo->mPlane[MediaImage2::U].mRowInc,
1448 imageInfo->mPlane[MediaImage2::V].mRowInc,
1449 imageInfo->mPlane[MediaImage2::U].mColInc,
1450 imageInfo->mPlane[MediaImage2::V].mColInc);
1451
1452 // Y
1453 for (auto row = top; row < top+height; row++) {
1454 uint8_t *dst = codecBuffer->data() + imageInfo->mPlane[MediaImage2::Y].mOffset +
1455 imageInfo->mPlane[MediaImage2::Y].mRowInc * (row - top);
Shuzhen Wang219c2992019-02-15 17:24:28 -08001456 mFnCopyRow(yuvBuffer.data+row*yuvBuffer.stride+left, dst, width);
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -08001457 }
1458
1459 // U is Cb, V is Cr
1460 bool codecUPlaneFirst = imageInfo->mPlane[MediaImage2::V].mOffset >
1461 imageInfo->mPlane[MediaImage2::U].mOffset;
1462 uint32_t codecUvOffsetDiff = codecUPlaneFirst ?
1463 imageInfo->mPlane[MediaImage2::V].mOffset - imageInfo->mPlane[MediaImage2::U].mOffset :
1464 imageInfo->mPlane[MediaImage2::U].mOffset - imageInfo->mPlane[MediaImage2::V].mOffset;
1465 bool isCodecUvSemiplannar = (codecUvOffsetDiff == 1) &&
1466 (imageInfo->mPlane[MediaImage2::U].mRowInc ==
1467 imageInfo->mPlane[MediaImage2::V].mRowInc) &&
1468 (imageInfo->mPlane[MediaImage2::U].mColInc == 2) &&
1469 (imageInfo->mPlane[MediaImage2::V].mColInc == 2);
1470 bool isCodecUvPlannar =
1471 ((codecUPlaneFirst && codecUvOffsetDiff >=
1472 imageInfo->mPlane[MediaImage2::U].mRowInc * imageInfo->mHeight/2) ||
1473 ((!codecUPlaneFirst && codecUvOffsetDiff >=
1474 imageInfo->mPlane[MediaImage2::V].mRowInc * imageInfo->mHeight/2))) &&
1475 imageInfo->mPlane[MediaImage2::U].mColInc == 1 &&
1476 imageInfo->mPlane[MediaImage2::V].mColInc == 1;
1477 bool cameraUPlaneFirst = yuvBuffer.dataCr > yuvBuffer.dataCb;
1478
1479 if (isCodecUvSemiplannar && yuvBuffer.chromaStep == 2 &&
1480 (codecUPlaneFirst == cameraUPlaneFirst)) {
1481 // UV semiplannar
1482 // The chrome plane could be either Cb first, or Cr first. Take the
1483 // smaller address.
1484 uint8_t *src = std::min(yuvBuffer.dataCb, yuvBuffer.dataCr);
1485 MediaImage2::PlaneIndex dstPlane = codecUvOffsetDiff > 0 ? MediaImage2::U : MediaImage2::V;
1486 for (auto row = top/2; row < (top+height)/2; row++) {
1487 uint8_t *dst = codecBuffer->data() + imageInfo->mPlane[dstPlane].mOffset +
1488 imageInfo->mPlane[dstPlane].mRowInc * (row - top/2);
Shuzhen Wang219c2992019-02-15 17:24:28 -08001489 mFnCopyRow(src+row*yuvBuffer.chromaStride+left, dst, width);
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -08001490 }
1491 } else if (isCodecUvPlannar && yuvBuffer.chromaStep == 1) {
1492 // U plane
1493 for (auto row = top/2; row < (top+height)/2; row++) {
1494 uint8_t *dst = codecBuffer->data() + imageInfo->mPlane[MediaImage2::U].mOffset +
1495 imageInfo->mPlane[MediaImage2::U].mRowInc * (row - top/2);
Shuzhen Wang219c2992019-02-15 17:24:28 -08001496 mFnCopyRow(yuvBuffer.dataCb+row*yuvBuffer.chromaStride+left/2, dst, width/2);
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -08001497 }
1498
1499 // V plane
1500 for (auto row = top/2; row < (top+height)/2; row++) {
1501 uint8_t *dst = codecBuffer->data() + imageInfo->mPlane[MediaImage2::V].mOffset +
1502 imageInfo->mPlane[MediaImage2::V].mRowInc * (row - top/2);
Shuzhen Wang219c2992019-02-15 17:24:28 -08001503 mFnCopyRow(yuvBuffer.dataCr+row*yuvBuffer.chromaStride+left/2, dst, width/2);
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -08001504 }
1505 } else {
Shuzhen Wang219c2992019-02-15 17:24:28 -08001506 // Convert between semiplannar and plannar, or when UV orders are
1507 // different.
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -08001508 uint8_t *dst = codecBuffer->data();
1509 for (auto row = top/2; row < (top+height)/2; row++) {
1510 for (auto col = left/2; col < (left+width)/2; col++) {
1511 // U/Cb
1512 int32_t dstIndex = imageInfo->mPlane[MediaImage2::U].mOffset +
1513 imageInfo->mPlane[MediaImage2::U].mRowInc * (row - top/2) +
1514 imageInfo->mPlane[MediaImage2::U].mColInc * (col - left/2);
1515 int32_t srcIndex = row * yuvBuffer.chromaStride + yuvBuffer.chromaStep * col;
1516 dst[dstIndex] = yuvBuffer.dataCb[srcIndex];
1517
1518 // V/Cr
1519 dstIndex = imageInfo->mPlane[MediaImage2::V].mOffset +
1520 imageInfo->mPlane[MediaImage2::V].mRowInc * (row - top/2) +
1521 imageInfo->mPlane[MediaImage2::V].mColInc * (col - left/2);
1522 srcIndex = row * yuvBuffer.chromaStride + yuvBuffer.chromaStep * col;
1523 dst[dstIndex] = yuvBuffer.dataCr[srcIndex];
1524 }
1525 }
1526 }
1527 return OK;
1528}
1529
Shuzhen Wang219c2992019-02-15 17:24:28 -08001530void HeicCompositeStream::initCopyRowFunction(int32_t width)
1531{
1532 using namespace libyuv;
1533
1534 mFnCopyRow = CopyRow_C;
1535#if defined(HAS_COPYROW_SSE2)
1536 if (TestCpuFlag(kCpuHasSSE2)) {
1537 mFnCopyRow = IS_ALIGNED(width, 32) ? CopyRow_SSE2 : CopyRow_Any_SSE2;
1538 }
1539#endif
1540#if defined(HAS_COPYROW_AVX)
1541 if (TestCpuFlag(kCpuHasAVX)) {
1542 mFnCopyRow = IS_ALIGNED(width, 64) ? CopyRow_AVX : CopyRow_Any_AVX;
1543 }
1544#endif
1545#if defined(HAS_COPYROW_ERMS)
1546 if (TestCpuFlag(kCpuHasERMS)) {
1547 mFnCopyRow = CopyRow_ERMS;
1548 }
1549#endif
1550#if defined(HAS_COPYROW_NEON)
1551 if (TestCpuFlag(kCpuHasNEON)) {
1552 mFnCopyRow = IS_ALIGNED(width, 32) ? CopyRow_NEON : CopyRow_Any_NEON;
1553 }
1554#endif
1555#if defined(HAS_COPYROW_MIPS)
1556 if (TestCpuFlag(kCpuHasMIPS)) {
1557 mFnCopyRow = CopyRow_MIPS;
1558 }
1559#endif
1560}
1561
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -08001562size_t HeicCompositeStream::calcAppSegmentMaxSize(const CameraMetadata& info) {
1563 camera_metadata_ro_entry_t entry = info.find(ANDROID_HEIC_INFO_MAX_JPEG_APP_SEGMENTS_COUNT);
1564 size_t maxAppsSegment = 1;
1565 if (entry.count > 0) {
1566 maxAppsSegment = entry.data.u8[0] < 1 ? 1 :
1567 entry.data.u8[0] > 16 ? 16 : entry.data.u8[0];
1568 }
1569 return maxAppsSegment * (2 + 0xFFFF) + sizeof(struct CameraBlob);
1570}
1571
Shuzhen Wang62f49ed2019-09-04 14:07:53 -07001572void HeicCompositeStream::updateCodecQualityLocked(int32_t quality) {
1573 if (quality != mQuality) {
1574 sp<AMessage> qualityParams = new AMessage;
1575 qualityParams->setInt32(PARAMETER_KEY_VIDEO_BITRATE, quality);
1576 status_t res = mCodec->setParameters(qualityParams);
1577 if (res != OK) {
1578 ALOGE("%s: Failed to set codec quality: %s (%d)",
1579 __FUNCTION__, strerror(-res), res);
1580 } else {
1581 mQuality = quality;
1582 }
1583 }
1584}
1585
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -08001586bool HeicCompositeStream::threadLoop() {
1587 int64_t currentTs = INT64_MAX;
1588 bool newInputAvailable = false;
1589
1590 {
1591 Mutex::Autolock l(mMutex);
1592 if (mErrorState) {
1593 // In case we landed in error state, return any pending buffers and
1594 // halt all further processing.
1595 compilePendingInputLocked();
Michael Gonzalezb5986a32019-10-09 15:38:17 -07001596 releaseInputFramesLocked();
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -08001597 return false;
1598 }
1599
1600
1601 while (!newInputAvailable) {
1602 compilePendingInputLocked();
1603 newInputAvailable = getNextReadyInputLocked(&currentTs);
1604
1605 if (!newInputAvailable) {
1606 auto failingFrameNumber = getNextFailingInputLocked(&currentTs);
1607 if (failingFrameNumber >= 0) {
1608 // We cannot erase 'mPendingInputFrames[currentTs]' at this point because it is
1609 // possible for two internal stream buffers to fail. In such scenario the
1610 // composite stream should notify the client about a stream buffer error only
1611 // once and this information is kept within 'errorNotified'.
1612 // Any present failed input frames will be removed on a subsequent call to
1613 // 'releaseInputFramesLocked()'.
1614 releaseInputFrameLocked(&mPendingInputFrames[currentTs]);
1615 currentTs = INT64_MAX;
1616 }
1617
1618 auto ret = mInputReadyCondition.waitRelative(mMutex, kWaitDuration);
1619 if (ret == TIMED_OUT) {
1620 return true;
1621 } else if (ret != OK) {
1622 ALOGE("%s: Timed wait on condition failed: %s (%d)", __FUNCTION__,
1623 strerror(-ret), ret);
1624 return false;
1625 }
1626 }
1627 }
1628 }
1629
1630 auto res = processInputFrame(currentTs, mPendingInputFrames[currentTs]);
1631 Mutex::Autolock l(mMutex);
1632 if (res != OK) {
1633 ALOGE("%s: Failed processing frame with timestamp: %" PRIu64 ": %s (%d)",
1634 __FUNCTION__, currentTs, strerror(-res), res);
1635 mPendingInputFrames[currentTs].error = true;
1636 }
1637
Michael Gonzalezb5986a32019-10-09 15:38:17 -07001638 releaseInputFramesLocked();
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -08001639
1640 return true;
1641}
1642
1643bool HeicCompositeStream::onStreamBufferError(const CaptureResultExtras& resultExtras) {
1644 bool res = false;
1645 // Buffer errors concerning internal composite streams should not be directly visible to
1646 // camera clients. They must only receive a single buffer error with the public composite
1647 // stream id.
1648 if ((resultExtras.errorStreamId == mAppSegmentStreamId) ||
1649 (resultExtras.errorStreamId == mMainImageStreamId)) {
1650 flagAnErrorFrameNumber(resultExtras.frameNumber);
1651 res = true;
1652 }
1653
1654 return res;
1655}
1656
Shuzhen Wange7f4b462019-02-12 08:43:07 -08001657void HeicCompositeStream::onResultError(const CaptureResultExtras& resultExtras) {
1658 // For result error, since the APPS_SEGMENT buffer already contains EXIF,
1659 // simply skip using the capture result metadata to override EXIF.
1660 Mutex::Autolock l(mMutex);
1661
1662 int64_t timestamp = -1;
1663 for (const auto& fn : mFrameNumberMap) {
1664 if (fn.first == resultExtras.frameNumber) {
1665 timestamp = fn.second;
1666 break;
1667 }
1668 }
1669 if (timestamp == -1) {
1670 for (const auto& inputFrame : mPendingInputFrames) {
1671 if (inputFrame.second.frameNumber == resultExtras.frameNumber) {
1672 timestamp = inputFrame.first;
1673 break;
1674 }
1675 }
1676 }
1677
1678 if (timestamp == -1) {
1679 ALOGE("%s: Failed to find shutter timestamp for result error!", __FUNCTION__);
1680 return;
1681 }
1682
1683 mCaptureResults.emplace(timestamp, std::make_tuple(resultExtras.frameNumber, CameraMetadata()));
1684 mInputReadyCondition.signal();
1685}
1686
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -08001687void HeicCompositeStream::CodecCallbackHandler::onMessageReceived(const sp<AMessage> &msg) {
1688 sp<HeicCompositeStream> parent = mParent.promote();
1689 if (parent == nullptr) return;
1690
1691 switch (msg->what()) {
1692 case kWhatCallbackNotify: {
1693 int32_t cbID;
1694 if (!msg->findInt32("callbackID", &cbID)) {
1695 ALOGE("kWhatCallbackNotify: callbackID is expected.");
1696 break;
1697 }
1698
1699 ALOGV("kWhatCallbackNotify: cbID = %d", cbID);
1700
1701 switch (cbID) {
1702 case MediaCodec::CB_INPUT_AVAILABLE: {
1703 int32_t index;
1704 if (!msg->findInt32("index", &index)) {
1705 ALOGE("CB_INPUT_AVAILABLE: index is expected.");
1706 break;
1707 }
1708 parent->onHeicInputFrameAvailable(index);
1709 break;
1710 }
1711
1712 case MediaCodec::CB_OUTPUT_AVAILABLE: {
1713 int32_t index;
1714 size_t offset;
1715 size_t size;
1716 int64_t timeUs;
1717 int32_t flags;
1718
1719 if (!msg->findInt32("index", &index)) {
1720 ALOGE("CB_OUTPUT_AVAILABLE: index is expected.");
1721 break;
1722 }
1723 if (!msg->findSize("offset", &offset)) {
1724 ALOGE("CB_OUTPUT_AVAILABLE: offset is expected.");
1725 break;
1726 }
1727 if (!msg->findSize("size", &size)) {
1728 ALOGE("CB_OUTPUT_AVAILABLE: size is expected.");
1729 break;
1730 }
1731 if (!msg->findInt64("timeUs", &timeUs)) {
1732 ALOGE("CB_OUTPUT_AVAILABLE: timeUs is expected.");
1733 break;
1734 }
1735 if (!msg->findInt32("flags", &flags)) {
1736 ALOGE("CB_OUTPUT_AVAILABLE: flags is expected.");
1737 break;
1738 }
1739
1740 CodecOutputBufferInfo bufferInfo = {
1741 index,
1742 (int32_t)offset,
1743 (int32_t)size,
1744 timeUs,
1745 (uint32_t)flags};
1746
1747 parent->onHeicOutputFrameAvailable(bufferInfo);
1748 break;
1749 }
1750
1751 case MediaCodec::CB_OUTPUT_FORMAT_CHANGED: {
1752 sp<AMessage> format;
1753 if (!msg->findMessage("format", &format)) {
1754 ALOGE("CB_OUTPUT_FORMAT_CHANGED: format is expected.");
1755 break;
1756 }
Chong Zhang860eff12019-09-16 16:15:00 -07001757 // Here format is MediaCodec's internal copy of output format.
1758 // Make a copy since onHeicFormatChanged() might modify it.
1759 sp<AMessage> formatCopy;
1760 if (format != nullptr) {
1761 formatCopy = format->dup();
1762 }
1763 parent->onHeicFormatChanged(formatCopy);
Shuzhen Wang68ac7ad2019-01-30 14:03:28 -08001764 break;
1765 }
1766
1767 case MediaCodec::CB_ERROR: {
1768 status_t err;
1769 int32_t actionCode;
1770 AString detail;
1771 if (!msg->findInt32("err", &err)) {
1772 ALOGE("CB_ERROR: err is expected.");
1773 break;
1774 }
1775 if (!msg->findInt32("action", &actionCode)) {
1776 ALOGE("CB_ERROR: action is expected.");
1777 break;
1778 }
1779 msg->findString("detail", &detail);
1780 ALOGE("Codec reported error(0x%x), actionCode(%d), detail(%s)",
1781 err, actionCode, detail.c_str());
1782
1783 parent->onHeicCodecError();
1784 break;
1785 }
1786
1787 default: {
1788 ALOGE("kWhatCallbackNotify: callbackID(%d) is unexpected.", cbID);
1789 break;
1790 }
1791 }
1792 break;
1793 }
1794
1795 default:
1796 ALOGE("shouldn't be here");
1797 break;
1798 }
1799}
1800
1801}; // namespace camera3
1802}; // namespace android