Jayant Chowdhary | 2bbdce4 | 2020-01-12 14:55:41 -0800 | [diff] [blame] | 1 | /* |
| 2 | * Copyright (C) 2020 The Android Open Source Project |
| 3 | * |
| 4 | * Licensed under the Apache License, Version 2.0 (the "License"); |
| 5 | * you may not use this file except in compliance with the License. |
| 6 | * You may obtain a copy of the License at |
| 7 | * |
| 8 | * http://www.apache.org/licenses/LICENSE-2.0 |
| 9 | * |
| 10 | * Unless required by applicable law or agreed to in writing, software |
| 11 | * distributed under the License is distributed on an "AS IS" BASIS, |
| 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 13 | * See the License for the specific language governing permissions and |
| 14 | * limitations under the License. |
| 15 | */ |
| 16 | #include "SessionConfigurationUtils.h" |
Colin Cross | b8a9dbb | 2020-08-27 04:12:26 +0000 | [diff] [blame] | 17 | #include "../api2/DepthCompositeStream.h" |
| 18 | #include "../api2/HeicCompositeStream.h" |
| 19 | #include "common/CameraDeviceBase.h" |
| 20 | #include "../CameraService.h" |
| 21 | #include "device3/Camera3Device.h" |
| 22 | #include "device3/Camera3OutputStream.h" |
| 23 | |
| 24 | // Convenience methods for constructing binder::Status objects for error returns |
| 25 | |
| 26 | #define STATUS_ERROR(errorCode, errorString) \ |
| 27 | binder::Status::fromServiceSpecificError(errorCode, \ |
| 28 | String8::format("%s:%d: %s", __FUNCTION__, __LINE__, errorString)) |
| 29 | |
| 30 | #define STATUS_ERROR_FMT(errorCode, errorString, ...) \ |
| 31 | binder::Status::fromServiceSpecificError(errorCode, \ |
| 32 | String8::format("%s:%d: " errorString, __FUNCTION__, __LINE__, \ |
| 33 | __VA_ARGS__)) |
| 34 | |
| 35 | using android::camera3::OutputStreamInfo; |
| 36 | using android::camera3::OutputStreamInfo; |
| 37 | using android::hardware::camera2::ICameraDeviceUser; |
Jayant Chowdhary | 2bbdce4 | 2020-01-12 14:55:41 -0800 | [diff] [blame] | 38 | |
| 39 | namespace android { |
| 40 | |
Colin Cross | b8a9dbb | 2020-08-27 04:12:26 +0000 | [diff] [blame] | 41 | int64_t SessionConfigurationUtils::euclidDistSquare(int32_t x0, int32_t y0, int32_t x1, int32_t y1) { |
| 42 | int64_t d0 = x0 - x1; |
| 43 | int64_t d1 = y0 - y1; |
| 44 | return d0 * d0 + d1 * d1; |
| 45 | } |
| 46 | |
| 47 | bool SessionConfigurationUtils::roundBufferDimensionNearest(int32_t width, int32_t height, |
| 48 | int32_t format, android_dataspace dataSpace, const CameraMetadata& info, |
| 49 | /*out*/int32_t* outWidth, /*out*/int32_t* outHeight) { |
| 50 | |
| 51 | camera_metadata_ro_entry streamConfigs = |
| 52 | (dataSpace == HAL_DATASPACE_DEPTH) ? |
| 53 | info.find(ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS) : |
| 54 | (dataSpace == static_cast<android_dataspace>(HAL_DATASPACE_HEIF)) ? |
| 55 | info.find(ANDROID_HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS) : |
| 56 | info.find(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS); |
| 57 | |
| 58 | int32_t bestWidth = -1; |
| 59 | int32_t bestHeight = -1; |
| 60 | |
| 61 | // Iterate through listed stream configurations and find the one with the smallest euclidean |
| 62 | // distance from the given dimensions for the given format. |
| 63 | for (size_t i = 0; i < streamConfigs.count; i += 4) { |
| 64 | int32_t fmt = streamConfigs.data.i32[i]; |
| 65 | int32_t w = streamConfigs.data.i32[i + 1]; |
| 66 | int32_t h = streamConfigs.data.i32[i + 2]; |
| 67 | |
| 68 | // Ignore input/output type for now |
| 69 | if (fmt == format) { |
| 70 | if (w == width && h == height) { |
| 71 | bestWidth = width; |
| 72 | bestHeight = height; |
| 73 | break; |
| 74 | } else if (w <= ROUNDING_WIDTH_CAP && (bestWidth == -1 || |
| 75 | SessionConfigurationUtils::euclidDistSquare(w, h, width, height) < |
| 76 | SessionConfigurationUtils::euclidDistSquare(bestWidth, bestHeight, width, |
| 77 | height))) { |
| 78 | bestWidth = w; |
| 79 | bestHeight = h; |
| 80 | } |
| 81 | } |
| 82 | } |
| 83 | |
| 84 | if (bestWidth == -1) { |
| 85 | // Return false if no configurations for this format were listed |
| 86 | return false; |
| 87 | } |
| 88 | |
| 89 | // Set the outputs to the closet width/height |
| 90 | if (outWidth != NULL) { |
| 91 | *outWidth = bestWidth; |
| 92 | } |
| 93 | if (outHeight != NULL) { |
| 94 | *outHeight = bestHeight; |
| 95 | } |
| 96 | |
| 97 | // Return true if at least one configuration for this format was listed |
| 98 | return true; |
| 99 | } |
| 100 | |
| 101 | bool SessionConfigurationUtils::isPublicFormat(int32_t format) |
| 102 | { |
| 103 | switch(format) { |
| 104 | case HAL_PIXEL_FORMAT_RGBA_8888: |
| 105 | case HAL_PIXEL_FORMAT_RGBX_8888: |
| 106 | case HAL_PIXEL_FORMAT_RGB_888: |
| 107 | case HAL_PIXEL_FORMAT_RGB_565: |
| 108 | case HAL_PIXEL_FORMAT_BGRA_8888: |
| 109 | case HAL_PIXEL_FORMAT_YV12: |
| 110 | case HAL_PIXEL_FORMAT_Y8: |
| 111 | case HAL_PIXEL_FORMAT_Y16: |
| 112 | case HAL_PIXEL_FORMAT_RAW16: |
| 113 | case HAL_PIXEL_FORMAT_RAW10: |
| 114 | case HAL_PIXEL_FORMAT_RAW12: |
| 115 | case HAL_PIXEL_FORMAT_RAW_OPAQUE: |
| 116 | case HAL_PIXEL_FORMAT_BLOB: |
| 117 | case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED: |
| 118 | case HAL_PIXEL_FORMAT_YCbCr_420_888: |
| 119 | case HAL_PIXEL_FORMAT_YCbCr_422_SP: |
| 120 | case HAL_PIXEL_FORMAT_YCrCb_420_SP: |
| 121 | case HAL_PIXEL_FORMAT_YCbCr_422_I: |
| 122 | return true; |
| 123 | default: |
| 124 | return false; |
| 125 | } |
| 126 | } |
| 127 | |
| 128 | binder::Status SessionConfigurationUtils::createSurfaceFromGbp( |
| 129 | OutputStreamInfo& streamInfo, bool isStreamInfoValid, |
| 130 | sp<Surface>& surface, const sp<IGraphicBufferProducer>& gbp, |
| 131 | const String8 &cameraId, const CameraMetadata &physicalCameraMetadata) { |
| 132 | |
| 133 | // bufferProducer must be non-null |
| 134 | if (gbp == nullptr) { |
| 135 | String8 msg = String8::format("Camera %s: Surface is NULL", cameraId.string()); |
| 136 | ALOGW("%s: %s", __FUNCTION__, msg.string()); |
| 137 | return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string()); |
| 138 | } |
| 139 | // HACK b/10949105 |
| 140 | // Query consumer usage bits to set async operation mode for |
| 141 | // GLConsumer using controlledByApp parameter. |
| 142 | bool useAsync = false; |
| 143 | uint64_t consumerUsage = 0; |
| 144 | status_t err; |
| 145 | if ((err = gbp->getConsumerUsage(&consumerUsage)) != OK) { |
| 146 | String8 msg = String8::format("Camera %s: Failed to query Surface consumer usage: %s (%d)", |
| 147 | cameraId.string(), strerror(-err), err); |
| 148 | ALOGE("%s: %s", __FUNCTION__, msg.string()); |
| 149 | return STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION, msg.string()); |
| 150 | } |
| 151 | if (consumerUsage & GraphicBuffer::USAGE_HW_TEXTURE) { |
| 152 | ALOGW("%s: Camera %s with consumer usage flag: %" PRIu64 ": Forcing asynchronous mode for" |
| 153 | "stream", __FUNCTION__, cameraId.string(), consumerUsage); |
| 154 | useAsync = true; |
| 155 | } |
| 156 | |
| 157 | uint64_t disallowedFlags = GraphicBuffer::USAGE_HW_VIDEO_ENCODER | |
| 158 | GRALLOC_USAGE_RENDERSCRIPT; |
| 159 | uint64_t allowedFlags = GraphicBuffer::USAGE_SW_READ_MASK | |
| 160 | GraphicBuffer::USAGE_HW_TEXTURE | |
| 161 | GraphicBuffer::USAGE_HW_COMPOSER; |
| 162 | bool flexibleConsumer = (consumerUsage & disallowedFlags) == 0 && |
| 163 | (consumerUsage & allowedFlags) != 0; |
| 164 | |
| 165 | surface = new Surface(gbp, useAsync); |
| 166 | ANativeWindow *anw = surface.get(); |
| 167 | |
| 168 | int width, height, format; |
| 169 | android_dataspace dataSpace; |
| 170 | if ((err = anw->query(anw, NATIVE_WINDOW_WIDTH, &width)) != OK) { |
| 171 | String8 msg = String8::format("Camera %s: Failed to query Surface width: %s (%d)", |
| 172 | cameraId.string(), strerror(-err), err); |
| 173 | ALOGE("%s: %s", __FUNCTION__, msg.string()); |
| 174 | return STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION, msg.string()); |
| 175 | } |
| 176 | if ((err = anw->query(anw, NATIVE_WINDOW_HEIGHT, &height)) != OK) { |
| 177 | String8 msg = String8::format("Camera %s: Failed to query Surface height: %s (%d)", |
| 178 | cameraId.string(), strerror(-err), err); |
| 179 | ALOGE("%s: %s", __FUNCTION__, msg.string()); |
| 180 | return STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION, msg.string()); |
| 181 | } |
| 182 | if ((err = anw->query(anw, NATIVE_WINDOW_FORMAT, &format)) != OK) { |
| 183 | String8 msg = String8::format("Camera %s: Failed to query Surface format: %s (%d)", |
| 184 | cameraId.string(), strerror(-err), err); |
| 185 | ALOGE("%s: %s", __FUNCTION__, msg.string()); |
| 186 | return STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION, msg.string()); |
| 187 | } |
| 188 | if ((err = anw->query(anw, NATIVE_WINDOW_DEFAULT_DATASPACE, |
| 189 | reinterpret_cast<int*>(&dataSpace))) != OK) { |
| 190 | String8 msg = String8::format("Camera %s: Failed to query Surface dataspace: %s (%d)", |
| 191 | cameraId.string(), strerror(-err), err); |
| 192 | ALOGE("%s: %s", __FUNCTION__, msg.string()); |
| 193 | return STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION, msg.string()); |
| 194 | } |
| 195 | |
| 196 | // FIXME: remove this override since the default format should be |
| 197 | // IMPLEMENTATION_DEFINED. b/9487482 & b/35317944 |
| 198 | if ((format >= HAL_PIXEL_FORMAT_RGBA_8888 && format <= HAL_PIXEL_FORMAT_BGRA_8888) && |
| 199 | ((consumerUsage & GRALLOC_USAGE_HW_MASK) && |
| 200 | ((consumerUsage & GRALLOC_USAGE_SW_READ_MASK) == 0))) { |
| 201 | ALOGW("%s: Camera %s: Overriding format %#x to IMPLEMENTATION_DEFINED", |
| 202 | __FUNCTION__, cameraId.string(), format); |
| 203 | format = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED; |
| 204 | } |
| 205 | // Round dimensions to the nearest dimensions available for this format |
| 206 | if (flexibleConsumer && isPublicFormat(format) && |
| 207 | !SessionConfigurationUtils::roundBufferDimensionNearest(width, height, |
| 208 | format, dataSpace, physicalCameraMetadata, /*out*/&width, /*out*/&height)) { |
| 209 | String8 msg = String8::format("Camera %s: No supported stream configurations with " |
| 210 | "format %#x defined, failed to create output stream", |
| 211 | cameraId.string(), format); |
| 212 | ALOGE("%s: %s", __FUNCTION__, msg.string()); |
| 213 | return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string()); |
| 214 | } |
| 215 | |
| 216 | if (!isStreamInfoValid) { |
| 217 | streamInfo.width = width; |
| 218 | streamInfo.height = height; |
| 219 | streamInfo.format = format; |
| 220 | streamInfo.dataSpace = dataSpace; |
| 221 | streamInfo.consumerUsage = consumerUsage; |
| 222 | return binder::Status::ok(); |
| 223 | } |
| 224 | if (width != streamInfo.width) { |
| 225 | String8 msg = String8::format("Camera %s:Surface width doesn't match: %d vs %d", |
| 226 | cameraId.string(), width, streamInfo.width); |
| 227 | ALOGE("%s: %s", __FUNCTION__, msg.string()); |
| 228 | return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string()); |
| 229 | } |
| 230 | if (height != streamInfo.height) { |
| 231 | String8 msg = String8::format("Camera %s:Surface height doesn't match: %d vs %d", |
| 232 | cameraId.string(), height, streamInfo.height); |
| 233 | ALOGE("%s: %s", __FUNCTION__, msg.string()); |
| 234 | return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string()); |
| 235 | } |
| 236 | if (format != streamInfo.format) { |
| 237 | String8 msg = String8::format("Camera %s:Surface format doesn't match: %d vs %d", |
| 238 | cameraId.string(), format, streamInfo.format); |
| 239 | ALOGE("%s: %s", __FUNCTION__, msg.string()); |
| 240 | return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string()); |
| 241 | } |
| 242 | if (format != HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) { |
| 243 | if (dataSpace != streamInfo.dataSpace) { |
| 244 | String8 msg = String8::format("Camera %s:Surface dataSpace doesn't match: %d vs %d", |
| 245 | cameraId.string(), dataSpace, streamInfo.dataSpace); |
| 246 | ALOGE("%s: %s", __FUNCTION__, msg.string()); |
| 247 | return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string()); |
| 248 | } |
| 249 | //At the native side, there isn't a way to check whether 2 surfaces come from the same |
| 250 | //surface class type. Use usage flag to approximate the comparison. |
| 251 | if (consumerUsage != streamInfo.consumerUsage) { |
| 252 | String8 msg = String8::format( |
| 253 | "Camera %s:Surface usage flag doesn't match %" PRIu64 " vs %" PRIu64 "", |
| 254 | cameraId.string(), consumerUsage, streamInfo.consumerUsage); |
| 255 | ALOGE("%s: %s", __FUNCTION__, msg.string()); |
| 256 | return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string()); |
| 257 | } |
| 258 | } |
| 259 | return binder::Status::ok(); |
| 260 | } |
| 261 | |
| 262 | |
| 263 | void SessionConfigurationUtils::mapStreamInfo(const OutputStreamInfo &streamInfo, |
| 264 | camera3_stream_rotation_t rotation, String8 physicalId, |
| 265 | hardware::camera::device::V3_4::Stream *stream /*out*/) { |
| 266 | if (stream == nullptr) { |
| 267 | return; |
| 268 | } |
| 269 | |
| 270 | stream->v3_2.streamType = hardware::camera::device::V3_2::StreamType::OUTPUT; |
| 271 | stream->v3_2.width = streamInfo.width; |
| 272 | stream->v3_2.height = streamInfo.height; |
| 273 | stream->v3_2.format = Camera3Device::mapToPixelFormat(streamInfo.format); |
| 274 | auto u = streamInfo.consumerUsage; |
| 275 | camera3::Camera3OutputStream::applyZSLUsageQuirk(streamInfo.format, &u); |
| 276 | stream->v3_2.usage = Camera3Device::mapToConsumerUsage(u); |
| 277 | stream->v3_2.dataSpace = Camera3Device::mapToHidlDataspace(streamInfo.dataSpace); |
| 278 | stream->v3_2.rotation = Camera3Device::mapToStreamRotation(rotation); |
| 279 | stream->v3_2.id = -1; // Invalid stream id |
| 280 | stream->physicalCameraId = std::string(physicalId.string()); |
| 281 | stream->bufferSize = 0; |
| 282 | } |
| 283 | |
| 284 | binder::Status SessionConfigurationUtils::checkPhysicalCameraId( |
| 285 | const std::vector<std::string> &physicalCameraIds, const String8 &physicalCameraId, |
| 286 | const String8 &logicalCameraId) { |
| 287 | if (physicalCameraId.size() == 0) { |
| 288 | return binder::Status::ok(); |
| 289 | } |
| 290 | if (std::find(physicalCameraIds.begin(), physicalCameraIds.end(), |
| 291 | physicalCameraId.string()) == physicalCameraIds.end()) { |
| 292 | String8 msg = String8::format("Camera %s: Camera doesn't support physicalCameraId %s.", |
| 293 | logicalCameraId.string(), physicalCameraId.string()); |
| 294 | ALOGE("%s: %s", __FUNCTION__, msg.string()); |
| 295 | return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string()); |
| 296 | } |
| 297 | return binder::Status::ok(); |
| 298 | } |
| 299 | |
| 300 | binder::Status SessionConfigurationUtils::checkSurfaceType(size_t numBufferProducers, |
| 301 | bool deferredConsumer, int surfaceType) { |
| 302 | if (numBufferProducers > MAX_SURFACES_PER_STREAM) { |
| 303 | ALOGE("%s: GraphicBufferProducer count %zu for stream exceeds limit of %d", |
| 304 | __FUNCTION__, numBufferProducers, MAX_SURFACES_PER_STREAM); |
| 305 | return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, "Surface count is too high"); |
| 306 | } else if ((numBufferProducers == 0) && (!deferredConsumer)) { |
| 307 | ALOGE("%s: Number of consumers cannot be smaller than 1", __FUNCTION__); |
| 308 | return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, "No valid consumers."); |
| 309 | } |
| 310 | |
| 311 | bool validSurfaceType = ((surfaceType == OutputConfiguration::SURFACE_TYPE_SURFACE_VIEW) || |
| 312 | (surfaceType == OutputConfiguration::SURFACE_TYPE_SURFACE_TEXTURE)); |
| 313 | |
| 314 | if (deferredConsumer && !validSurfaceType) { |
| 315 | ALOGE("%s: Target surface has invalid surfaceType = %d.", __FUNCTION__, surfaceType); |
| 316 | return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, "Target Surface is invalid"); |
| 317 | } |
| 318 | |
| 319 | return binder::Status::ok(); |
| 320 | } |
| 321 | |
| 322 | binder::Status SessionConfigurationUtils::checkOperatingMode(int operatingMode, |
| 323 | const CameraMetadata &staticInfo, const String8 &cameraId) { |
| 324 | if (operatingMode < 0) { |
| 325 | String8 msg = String8::format( |
| 326 | "Camera %s: Invalid operating mode %d requested", cameraId.string(), operatingMode); |
| 327 | ALOGE("%s: %s", __FUNCTION__, msg.string()); |
| 328 | return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, |
| 329 | msg.string()); |
| 330 | } |
| 331 | |
| 332 | bool isConstrainedHighSpeed = (operatingMode == ICameraDeviceUser::CONSTRAINED_HIGH_SPEED_MODE); |
| 333 | if (isConstrainedHighSpeed) { |
| 334 | camera_metadata_ro_entry_t entry = staticInfo.find(ANDROID_REQUEST_AVAILABLE_CAPABILITIES); |
| 335 | bool isConstrainedHighSpeedSupported = false; |
| 336 | for(size_t i = 0; i < entry.count; ++i) { |
| 337 | uint8_t capability = entry.data.u8[i]; |
| 338 | if (capability == ANDROID_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO) { |
| 339 | isConstrainedHighSpeedSupported = true; |
| 340 | break; |
| 341 | } |
| 342 | } |
| 343 | if (!isConstrainedHighSpeedSupported) { |
| 344 | String8 msg = String8::format( |
| 345 | "Camera %s: Try to create a constrained high speed configuration on a device" |
| 346 | " that doesn't support it.", cameraId.string()); |
| 347 | ALOGE("%s: %s", __FUNCTION__, msg.string()); |
| 348 | return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, |
| 349 | msg.string()); |
| 350 | } |
| 351 | } |
| 352 | |
| 353 | return binder::Status::ok(); |
| 354 | } |
| 355 | |
Jayant Chowdhary | 2bbdce4 | 2020-01-12 14:55:41 -0800 | [diff] [blame] | 356 | binder::Status |
| 357 | SessionConfigurationUtils::convertToHALStreamCombination( |
| 358 | const SessionConfiguration& sessionConfiguration, |
| 359 | const String8 &logicalCameraId, const CameraMetadata &deviceInfo, |
| 360 | metadataGetter getMetadata, const std::vector<std::string> &physicalCameraIds, |
| 361 | hardware::camera::device::V3_4::StreamConfiguration &streamConfiguration, bool *earlyExit) { |
Colin Cross | b8a9dbb | 2020-08-27 04:12:26 +0000 | [diff] [blame] | 362 | |
| 363 | auto operatingMode = sessionConfiguration.getOperatingMode(); |
| 364 | binder::Status res = checkOperatingMode(operatingMode, deviceInfo, logicalCameraId); |
| 365 | if (!res.isOk()) { |
| 366 | return res; |
| 367 | } |
| 368 | |
| 369 | if (earlyExit == nullptr) { |
| 370 | String8 msg("earlyExit nullptr"); |
| 371 | ALOGE("%s: %s", __FUNCTION__, msg.string()); |
| 372 | return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string()); |
| 373 | } |
| 374 | *earlyExit = false; |
| 375 | auto ret = Camera3Device::mapToStreamConfigurationMode( |
| 376 | static_cast<camera3_stream_configuration_mode_t> (operatingMode), |
| 377 | /*out*/ &streamConfiguration.operationMode); |
| 378 | if (ret != OK) { |
| 379 | String8 msg = String8::format( |
| 380 | "Camera %s: Failed mapping operating mode %d requested: %s (%d)", |
| 381 | logicalCameraId.string(), operatingMode, strerror(-ret), ret); |
| 382 | ALOGE("%s: %s", __FUNCTION__, msg.string()); |
| 383 | return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, |
| 384 | msg.string()); |
| 385 | } |
| 386 | |
| 387 | bool isInputValid = (sessionConfiguration.getInputWidth() > 0) && |
| 388 | (sessionConfiguration.getInputHeight() > 0) && |
| 389 | (sessionConfiguration.getInputFormat() > 0); |
| 390 | auto outputConfigs = sessionConfiguration.getOutputConfigurations(); |
| 391 | size_t streamCount = outputConfigs.size(); |
| 392 | streamCount = isInputValid ? streamCount + 1 : streamCount; |
| 393 | streamConfiguration.streams.resize(streamCount); |
| 394 | size_t streamIdx = 0; |
| 395 | if (isInputValid) { |
| 396 | streamConfiguration.streams[streamIdx++] = {{/*streamId*/0, |
| 397 | hardware::camera::device::V3_2::StreamType::INPUT, |
| 398 | static_cast<uint32_t> (sessionConfiguration.getInputWidth()), |
| 399 | static_cast<uint32_t> (sessionConfiguration.getInputHeight()), |
| 400 | Camera3Device::mapToPixelFormat(sessionConfiguration.getInputFormat()), |
| 401 | /*usage*/ 0, HAL_DATASPACE_UNKNOWN, |
| 402 | hardware::camera::device::V3_2::StreamRotation::ROTATION_0}, |
| 403 | /*physicalId*/ nullptr, /*bufferSize*/0}; |
| 404 | } |
| 405 | |
| 406 | for (const auto &it : outputConfigs) { |
| 407 | const std::vector<sp<IGraphicBufferProducer>>& bufferProducers = |
| 408 | it.getGraphicBufferProducers(); |
| 409 | bool deferredConsumer = it.isDeferred(); |
| 410 | String8 physicalCameraId = String8(it.getPhysicalCameraId()); |
| 411 | size_t numBufferProducers = bufferProducers.size(); |
| 412 | bool isStreamInfoValid = false; |
| 413 | OutputStreamInfo streamInfo; |
| 414 | |
| 415 | res = checkSurfaceType(numBufferProducers, deferredConsumer, it.getSurfaceType()); |
| 416 | if (!res.isOk()) { |
| 417 | return res; |
| 418 | } |
| 419 | res = checkPhysicalCameraId(physicalCameraIds, physicalCameraId, |
| 420 | logicalCameraId); |
| 421 | if (!res.isOk()) { |
| 422 | return res; |
| 423 | } |
| 424 | |
| 425 | if (deferredConsumer) { |
| 426 | streamInfo.width = it.getWidth(); |
| 427 | streamInfo.height = it.getHeight(); |
| 428 | streamInfo.format = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED; |
| 429 | streamInfo.dataSpace = android_dataspace_t::HAL_DATASPACE_UNKNOWN; |
| 430 | auto surfaceType = it.getSurfaceType(); |
| 431 | streamInfo.consumerUsage = GraphicBuffer::USAGE_HW_TEXTURE; |
| 432 | if (surfaceType == OutputConfiguration::SURFACE_TYPE_SURFACE_VIEW) { |
| 433 | streamInfo.consumerUsage |= GraphicBuffer::USAGE_HW_COMPOSER; |
| 434 | } |
| 435 | mapStreamInfo(streamInfo, CAMERA3_STREAM_ROTATION_0, physicalCameraId, |
| 436 | &streamConfiguration.streams[streamIdx++]); |
| 437 | isStreamInfoValid = true; |
| 438 | |
| 439 | if (numBufferProducers == 0) { |
| 440 | continue; |
| 441 | } |
| 442 | } |
| 443 | |
| 444 | for (auto& bufferProducer : bufferProducers) { |
| 445 | sp<Surface> surface; |
| 446 | const CameraMetadata &physicalDeviceInfo = getMetadata(physicalCameraId); |
| 447 | res = createSurfaceFromGbp(streamInfo, isStreamInfoValid, surface, bufferProducer, |
| 448 | logicalCameraId, |
| 449 | physicalCameraId.size() > 0 ? physicalDeviceInfo : deviceInfo ); |
| 450 | |
| 451 | if (!res.isOk()) |
| 452 | return res; |
| 453 | |
| 454 | if (!isStreamInfoValid) { |
| 455 | bool isDepthCompositeStream = |
| 456 | camera3::DepthCompositeStream::isDepthCompositeStream(surface); |
| 457 | bool isHeicCompositeStream = |
| 458 | camera3::HeicCompositeStream::isHeicCompositeStream(surface); |
| 459 | if (isDepthCompositeStream || isHeicCompositeStream) { |
| 460 | // We need to take in to account that composite streams can have |
| 461 | // additional internal camera streams. |
| 462 | std::vector<OutputStreamInfo> compositeStreams; |
| 463 | if (isDepthCompositeStream) { |
| 464 | ret = camera3::DepthCompositeStream::getCompositeStreamInfo(streamInfo, |
| 465 | deviceInfo, &compositeStreams); |
| 466 | } else { |
| 467 | ret = camera3::HeicCompositeStream::getCompositeStreamInfo(streamInfo, |
| 468 | deviceInfo, &compositeStreams); |
| 469 | } |
| 470 | if (ret != OK) { |
| 471 | String8 msg = String8::format( |
| 472 | "Camera %s: Failed adding composite streams: %s (%d)", |
| 473 | logicalCameraId.string(), strerror(-ret), ret); |
| 474 | ALOGE("%s: %s", __FUNCTION__, msg.string()); |
| 475 | return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string()); |
| 476 | } |
| 477 | |
| 478 | if (compositeStreams.size() == 0) { |
| 479 | // No internal streams means composite stream not |
| 480 | // supported. |
| 481 | *earlyExit = true; |
| 482 | return binder::Status::ok(); |
| 483 | } else if (compositeStreams.size() > 1) { |
| 484 | streamCount += compositeStreams.size() - 1; |
| 485 | streamConfiguration.streams.resize(streamCount); |
| 486 | } |
| 487 | |
| 488 | for (const auto& compositeStream : compositeStreams) { |
| 489 | mapStreamInfo(compositeStream, |
| 490 | static_cast<camera3_stream_rotation_t> (it.getRotation()), |
| 491 | physicalCameraId, &streamConfiguration.streams[streamIdx++]); |
| 492 | } |
| 493 | } else { |
| 494 | mapStreamInfo(streamInfo, |
| 495 | static_cast<camera3_stream_rotation_t> (it.getRotation()), |
| 496 | physicalCameraId, &streamConfiguration.streams[streamIdx++]); |
| 497 | } |
| 498 | isStreamInfoValid = true; |
| 499 | } |
| 500 | } |
| 501 | } |
| 502 | return binder::Status::ok(); |
| 503 | |
Jayant Chowdhary | 2bbdce4 | 2020-01-12 14:55:41 -0800 | [diff] [blame] | 504 | } |
| 505 | |
| 506 | }// namespace android |