Merge "codec2: add C2SoftHevcEnc"
diff --git a/apex/Android.bp b/apex/Android.bp
index c077a77..9455290 100644
--- a/apex/Android.bp
+++ b/apex/Android.bp
@@ -12,9 +12,8 @@
// See the License for the specific language governing permissions and
// limitations under the License.
-apex {
- name: "com.android.media",
- manifest: "manifest.json",
+apex_defaults {
+ name: "com.android.media-defaults",
java_libs: ["updatable-media"],
compile_multilib: "both",
multilib: {
@@ -42,16 +41,29 @@
},
},
key: "com.android.media.key",
+ certificate: ":com.android.media.certificate",
}
apex {
- name: "com.android.media.swcodec",
- manifest: "manifest_codec.json",
+ name: "com.android.media",
+ manifest: "manifest.json",
+ defaults: ["com.android.media-defaults"],
+}
+
+apex_defaults {
+ name: "com.android.media.swcodec-defaults",
native_shared_libs: [
"libmedia_codecserviceregistrant",
],
use_vendor: true,
key: "com.android.media.swcodec.key",
+ certificate: ":com.android.media.swcodec.certificate",
+}
+
+apex {
+ name: "com.android.media.swcodec",
+ manifest: "manifest_codec.json",
+ defaults: ["com.android.media.swcodec-defaults"],
}
apex_key {
@@ -65,3 +77,13 @@
public_key: "com.android.media.swcodec.avbpubkey",
private_key: "com.android.media.swcodec.pem",
}
+
+android_app_certificate {
+ name: "com.android.media.certificate",
+ certificate: "com.android.media",
+}
+
+android_app_certificate {
+ name: "com.android.media.swcodec.certificate",
+ certificate: "com.android.media.swcodec",
+}
diff --git a/apex/com.android.media.pk8 b/apex/com.android.media.pk8
new file mode 100644
index 0000000..6df741e
--- /dev/null
+++ b/apex/com.android.media.pk8
Binary files differ
diff --git a/apex/com.android.media.swcodec.pk8 b/apex/com.android.media.swcodec.pk8
new file mode 100644
index 0000000..05a4216
--- /dev/null
+++ b/apex/com.android.media.swcodec.pk8
Binary files differ
diff --git a/apex/com.android.media.swcodec.x509.pem b/apex/com.android.media.swcodec.x509.pem
new file mode 100644
index 0000000..67b9b4f
--- /dev/null
+++ b/apex/com.android.media.swcodec.x509.pem
@@ -0,0 +1,34 @@
+-----BEGIN CERTIFICATE-----
+MIIF3jCCA8agAwIBAgIJAIM72JpD4v6XMA0GCSqGSIb3DQEBCwUAMIGCMQswCQYD
+VQQGEwJVUzETMBEGA1UECAwKQ2FsaWZvcm5pYTEWMBQGA1UEBwwNTW91bnRhaW4g
+VmlldzEQMA4GA1UECgwHQW5kcm9pZDEQMA4GA1UECwwHQW5kcm9pZDEiMCAGA1UE
+AwwZY29tLmFuZHJvaWQubWVkaWEuc3djb2RlYzAgFw0xOTAyMTEwMjExMTFaGA80
+NzU3MDEwNzAyMTExMVowgYIxCzAJBgNVBAYTAlVTMRMwEQYDVQQIDApDYWxpZm9y
+bmlhMRYwFAYDVQQHDA1Nb3VudGFpbiBWaWV3MRAwDgYDVQQKDAdBbmRyb2lkMRAw
+DgYDVQQLDAdBbmRyb2lkMSIwIAYDVQQDDBljb20uYW5kcm9pZC5tZWRpYS5zd2Nv
+ZGVjMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAsqXE0AIWpLW9Tgq2
+nQGph7KZ6L2Q9oxviqCVHxIaPqfhM2SwTbycADIQeqrrlRxhddVkjLuMUkJa7mev
+fERmgpiOfnPIlGK6PTs2gljCkskZhF3bgfeyuHt0tsYO+UaN8MVoZD7/QdiE46w2
+OMDClG1UqgiqOBhLTEN/cHXObnUiiVXUYqN8aYZf6L6Fs3yQi2ZZgfbxTVFewqdv
+aLLOqCYnVYXZH+ZxbXESA0M+WXKgRKsYTj2GYs3eko1rFi4Y6uHVLx45yaoT5u/i
+SxPEkocyMCKvGJWu4XlSOd3EjSOMaqCOYVyGLxdlnQWQU7PZDqBSJ0SysWgpFHpB
+I15c2jhRdXOCfQ9ZtDfPZkE0a2A8kJDAoF1mzTp6IvBAWUsl5nHPw5CWkFpNad/h
+tqqGCScWbiKZuvrQ4/RQNm3f1K+mxX9TrjFigpqNO6d4pGAo1fa6sHR3xWPw/myq
+h5ZJjVnXU5Yq64S4xWOssfjpOg7RfNuvzuk3ok3MYs1mbx3vhZOj5km1f3qrgX9c
+mXjYnyXD0jJBm4uAJWXLdK9PlZvlXbztMCzYj832Io4pFLCtSxkzX75t1em36Nv0
+mNp6NtSSy6SFSq8l7IsXV2FNyUiyHWxS/UQm8pYg5Q5dWHvEEF78P6lV0wRa6FQl
+BBSgpqTAI092KIjDDtB7GQCgV5ECAwEAAaNTMFEwHQYDVR0OBBYEFAFIdFTDEDft
+ewSSAS7Fa3OZ5TXzMB8GA1UdIwQYMBaAFAFIdFTDEDftewSSAS7Fa3OZ5TXzMA8G
+A1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQELBQADggIBAC5e3zXythJCGmz1FmAV
+8Y/UI+Glg6G0x/k04WaRG0DPLLjlJ1F0LM1/IReBSgXcYAL0CAgPycf/rGPOgMFm
+tQxYyjBUxKdjpIqU5DJoV1feanGveIRpto1YRKNgHuzG9rZGR4AgPnt6X4Yxlq04
+lI7QpWadXe1myARJhj3niSNY9+2wEInkx4ZuCO1LtIGqnbdc8jQ8YoVqIE5N4kuM
+ccyPYgsdABtopbjN92rueu8sfF8R6ROy+tNgb6OjpAAevtnBfZ2LXqfObKirHCK+
+k6w4WSB1UUoZ3Xgz8sJtXgokvYeInkN8tHuTagHYU2VQTcA0rdBGMN/1OljJpWlN
+0UUq4fAYU6cN4lHxr2LM9If4WvAzdLAWvaIZrDqaU4i/zYT9l6rR4lC2KW3EHWov
+nPXfgEJJ8AP1iRGibvew3i3SB6XTWFQYTUIBeJfDz/KDXQabP+yzXWISdZCUMUpx
+f+Raqsb5MoKaJdVgnSL0mBunjCyJDzzg34J7oGx6/BnwoiOrwLN4Qaz5U8jbrPSx
+p9LfleCcO7ZdeE8GKqx0X1T4d7tradtmxOS8Iwr4niskkHGRkzozvVvuyGKmoN2k
+162Vfjq+ddj7qEpSh3BS6hHU+vlMbC9L0trGxPxFEAHDrwu0KwGNduTkiu/3jvfB
+JTgH8P9mD1loYxRdo+vet8eQ
+-----END CERTIFICATE-----
diff --git a/apex/com.android.media.x509.pem b/apex/com.android.media.x509.pem
new file mode 100644
index 0000000..e7908fa
--- /dev/null
+++ b/apex/com.android.media.x509.pem
@@ -0,0 +1,33 @@
+-----BEGIN CERTIFICATE-----
+MIIFzDCCA7SgAwIBAgIJAO05DBBusaaLMA0GCSqGSIb3DQEBCwUAMHoxCzAJBgNV
+BAYTAlVTMRMwEQYDVQQIDApDYWxpZm9ybmlhMRYwFAYDVQQHDA1Nb3VudGFpbiBW
+aWV3MRAwDgYDVQQKDAdBbmRyb2lkMRAwDgYDVQQLDAdBbmRyb2lkMRowGAYDVQQD
+DBFjb20uYW5kcm9pZC5tZWRpYTAgFw0xOTAxMjUxNzE3MTdaGA80NzU2MTIyMTE3
+MTcxN1owejELMAkGA1UEBhMCVVMxEzARBgNVBAgMCkNhbGlmb3JuaWExFjAUBgNV
+BAcMDU1vdW50YWluIFZpZXcxEDAOBgNVBAoMB0FuZHJvaWQxEDAOBgNVBAsMB0Fu
+ZHJvaWQxGjAYBgNVBAMMEWNvbS5hbmRyb2lkLm1lZGlhMIICIjANBgkqhkiG9w0B
+AQEFAAOCAg8AMIICCgKCAgEAmNkVxUbp/bLbeGbvKqYXzwBycSDpmOhh///lNGYQ
+/AMUD0q6EaZzU2bd4aL0rOGqfoYlhKd0kMVmMUmfdE9ODAfKxleEeEaRl2GJS8a9
+ABi770l3GHbB2xMI2sEWeOD9xsPFF6+ByPZmoUuNhMr4pUbXsDpE3h8ljrgXHtIg
+bh7ofbvddruwBV0lS1k9OZ9jPVGhEKkJnhgQa67cwgdjizAMbI0Dcz9gtMMawsDj
+Z2aQd1r+vxgh1/XkI/NMmXCnG2ERytXcJeC5S4gEtHfTTPoP0FuVgSB6y6dalMuZ
+F0NBZw8Mvgdy3QJip0uNa36J63CMZKTJWbTdlFpPL2hk0PgaYvje8C5Xtk5282wT
+dMocc8n2zIXbzbnSXGvjcNZib3Pfu55YUnX6eTqZ1BxlJ0FHZAsC4quFFWXxYBYD
+LCRoNNFEtIDQpuvuHF2DuHNDULpAQjy2y6+7eot0KEsVoDmZ4H8BpuAVVu2SxYNb
+gYflR9SmM0tmYeAcRT48q3xrocGyEHMqvgQRUpPfvct/8l8xVcDzOI/sJVDqmYzM
+u0Cj3fkSypGDJOMF/esFSmVvoI01tS7kaNS5vvtKYib//xqKRC9f0dCsGfFLnuUK
+o4KYbYWYwMyJqEd/5/ZvXyKIPAEeJL174L9+wTkc3cQpoBwJN4t+2E5MnhOEq6do
+5L0CAwEAAaNTMFEwHQYDVR0OBBYEFHjNK/GZko1RdZp+8iavWXL5xz9wMB8GA1Ud
+IwQYMBaAFHjNK/GZko1RdZp+8iavWXL5xz9wMA8GA1UdEwEB/wQFMAMBAf8wDQYJ
+KoZIhvcNAQELBQADggIBACmPQMksuLrNV1vbI44S1f70I0FHdBxchFGB39zuLbcn
+SsYom/LPtYJiD0Dl4bB4eb+ZnxkQP2XeP6pycmUH2j1EWexFwvdUvlfe8Qz+wAec
+ap4AxiX4Z2Ke2ivYotIZFUHdZOLkX20js8Wex1mzY43MLQn5APl9gK1VZTxDggeR
+EObH1S+JVjGwQqYZj2e6gNZH34Q25NQ698RL85GDkYtSISAifJtaJsU/B3vKm82I
+k9xMiCooCH6bRdGHG1jze4SRpidjxEm8cxkiaQagfcuXeCLziXJr3qAMKYiEY6bp
+0+bAqCt3S8OrrN3RQZfQrnlwitsM1jJJ/+C+WoDg4eY5AFrXDLvNeKh1qO/f8xv+
+fCXkQPcVVphLfRH9oxNrSgOWBP5/qIDH4s1YUL9luGT6H+08dlue3RkbzDbBqsQu
+7fQ/BbrIG/GuVKgyEM+a7C9gv7zc86YlueVYJEyxKidnn7RxOqyDBqyyfXA3zvme
+Rro7xIrMHPL7Nu3AWjwjXzbp/w0z+tEFPsfVB+OOHKsWPcUG0HUTJGkyeO/uHRjN
+qPEkkf7BHHUO4V2gjOIdCsELxKwHf7vsZTOk40EV751fZ7FDHMr1eddQkgH4eqAb
+DB79uP+SLfUo+42n4q6eMmoqw8d76bBXRoUhIo/Ms4sebhV0sRtAS67OQioc9UUg
+-----END CERTIFICATE-----
diff --git a/apex/testing/Android.bp b/apex/testing/Android.bp
new file mode 100644
index 0000000..701ced7
--- /dev/null
+++ b/apex/testing/Android.bp
@@ -0,0 +1,29 @@
+// Copyright (C) 2018 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+apex {
+ name: "test_com.android.media",
+ manifest: "test_manifest.json",
+ file_contexts: "com.android.media",
+ defaults: ["com.android.media-defaults"],
+ installable: false,
+}
+
+apex {
+ name: "test_com.android.media.swcodec",
+ manifest: "test_manifest_codec.json",
+ file_contexts: "com.android.media.swcodec",
+ defaults: ["com.android.media.swcodec-defaults"],
+ installable: false,
+}
diff --git a/apex/testing/test_manifest.json b/apex/testing/test_manifest.json
new file mode 100644
index 0000000..9f81f9f
--- /dev/null
+++ b/apex/testing/test_manifest.json
@@ -0,0 +1,4 @@
+{
+ "name": "com.android.media",
+ "version": 2
+}
diff --git a/apex/testing/test_manifest_codec.json b/apex/testing/test_manifest_codec.json
new file mode 100644
index 0000000..c956454
--- /dev/null
+++ b/apex/testing/test_manifest_codec.json
@@ -0,0 +1,4 @@
+{
+ "name": "com.android.media.swcodec",
+ "version": 2
+}
diff --git a/media/codec2/components/base/SimpleC2Component.cpp b/media/codec2/components/base/SimpleC2Component.cpp
index b8baec8..b158f8f 100644
--- a/media/codec2/components/base/SimpleC2Component.cpp
+++ b/media/codec2/components/base/SimpleC2Component.cpp
@@ -151,7 +151,7 @@
c2_status_t status;
do {
status = mBase->fetchLinearBlock(capacity, usage, block);
- } while (status == C2_TIMED_OUT);
+ } while (status == C2_BLOCKING);
return status;
}
@@ -162,7 +162,7 @@
c2_status_t status;
do {
status = mBase->fetchCircularBlock(capacity, usage, block);
- } while (status == C2_TIMED_OUT);
+ } while (status == C2_BLOCKING);
return status;
}
@@ -174,7 +174,7 @@
do {
status = mBase->fetchGraphicBlock(width, height, format, usage,
block);
- } while (status == C2_TIMED_OUT);
+ } while (status == C2_BLOCKING);
return status;
}
diff --git a/media/codec2/core/include/C2Buffer.h b/media/codec2/core/include/C2Buffer.h
index 2997f6e..c428122 100644
--- a/media/codec2/core/include/C2Buffer.h
+++ b/media/codec2/core/include/C2Buffer.h
@@ -888,6 +888,7 @@
* \retval C2_OK the operation was successful
* \retval C2_NO_MEMORY not enough memory to complete any required allocation
* \retval C2_TIMED_OUT the operation timed out
+ * \retval C2_BLOCKING the operation is blocked
* \retval C2_REFUSED no permission to complete any required allocation
* \retval C2_BAD_VALUE capacity or usage are not supported (invalid) (caller error)
* \retval C2_OMITTED this pool does not support linear blocks
@@ -916,6 +917,7 @@
* \retval C2_OK the operation was successful
* \retval C2_NO_MEMORY not enough memory to complete any required allocation
* \retval C2_TIMED_OUT the operation timed out
+ * \retval C2_BLOCKING the operation is blocked
* \retval C2_REFUSED no permission to complete any required allocation
* \retval C2_BAD_VALUE capacity or usage are not supported (invalid) (caller error)
* \retval C2_OMITTED this pool does not support circular blocks
@@ -946,6 +948,7 @@
* \retval C2_OK the operation was successful
* \retval C2_NO_MEMORY not enough memory to complete any required allocation
* \retval C2_TIMED_OUT the operation timed out
+ * \retval C2_BLOCKING the operation is blocked
* \retval C2_REFUSED no permission to complete any required allocation
* \retval C2_BAD_VALUE width, height, format or usage are not supported (invalid) (caller
* error)
diff --git a/media/codec2/hidl/1.0/utils/types.cpp b/media/codec2/hidl/1.0/utils/types.cpp
index 02cdc23..343bcb5 100644
--- a/media/codec2/hidl/1.0/utils/types.cpp
+++ b/media/codec2/hidl/1.0/utils/types.cpp
@@ -1803,7 +1803,8 @@
}
sp<HGraphicBufferProducer> getHgbp(const sp<IGraphicBufferProducer>& igbp) {
- sp<HGraphicBufferProducer> hgbp = igbp->getHalInterface();
+ sp<HGraphicBufferProducer> hgbp =
+ igbp->getHalInterface<HGraphicBufferProducer>();
return hgbp ? hgbp :
new TWGraphicBufferProducer<HGraphicBufferProducer>(igbp);
}
diff --git a/media/codec2/hidl/client/client.cpp b/media/codec2/hidl/client/client.cpp
index 3808be5..7a2e549 100644
--- a/media/codec2/hidl/client/client.cpp
+++ b/media/codec2/hidl/client/client.cpp
@@ -1064,7 +1064,9 @@
C2BlockPool::local_id_t blockPoolId,
const sp<IGraphicBufferProducer>& surface,
uint32_t generation) {
- sp<HGraphicBufferProducer> igbp = surface->getHalInterface();
+ sp<HGraphicBufferProducer> igbp =
+ surface->getHalInterface<HGraphicBufferProducer>();
+
if (!igbp) {
igbp = new TWGraphicBufferProducer<HGraphicBufferProducer>(surface);
}
diff --git a/media/codec2/sfplugin/PipelineWatcher.cpp b/media/codec2/sfplugin/PipelineWatcher.cpp
index cdcc41b..df81d49 100644
--- a/media/codec2/sfplugin/PipelineWatcher.cpp
+++ b/media/codec2/sfplugin/PipelineWatcher.cpp
@@ -139,8 +139,8 @@
std::chrono::duration_cast<std::chrono::milliseconds>(elapsed).count());
durations.push_back(elapsed);
}
- nth_element(durations.begin(), durations.end(), durations.begin() + n,
- std::greater<Clock::duration>());
+ std::nth_element(durations.begin(), durations.end(), durations.begin() + n,
+ std::greater<Clock::duration>());
return durations[n];
}
diff --git a/media/codec2/sfplugin/PipelineWatcher.h b/media/codec2/sfplugin/PipelineWatcher.h
index 1c127e4..1e23147 100644
--- a/media/codec2/sfplugin/PipelineWatcher.h
+++ b/media/codec2/sfplugin/PipelineWatcher.h
@@ -26,7 +26,8 @@
namespace android {
/**
- * PipelineWatcher watches the status of the work.
+ * PipelineWatcher watches the pipeline and infers the status of work items from
+ * events.
*/
class PipelineWatcher {
public:
@@ -39,21 +40,81 @@
mSmoothnessFactor(0) {}
~PipelineWatcher() = default;
+ /**
+ * \param value the new input delay value
+ * \return this object
+ */
PipelineWatcher &inputDelay(uint32_t value);
+
+ /**
+ * \param value the new pipeline delay value
+ * \return this object
+ */
PipelineWatcher &pipelineDelay(uint32_t value);
+
+ /**
+ * \param value the new output delay value
+ * \return this object
+ */
PipelineWatcher &outputDelay(uint32_t value);
+
+ /**
+ * \param value the new smoothness factor value
+ * \return this object
+ */
PipelineWatcher &smoothnessFactor(uint32_t value);
+ /**
+ * Client queued a work item to the component.
+ *
+ * \param frameIndex input frame index of this work
+ * \param buffers input buffers of the queued work item
+ * \param queuedAt time when the client queued the buffer
+ */
void onWorkQueued(
uint64_t frameIndex,
std::vector<std::shared_ptr<C2Buffer>> &&buffers,
const Clock::time_point &queuedAt);
+
+ /**
+ * The component released input buffers from a work item.
+ *
+ * \param frameIndex input frame index
+ * \param arrayIndex index of the buffer at the original |buffers| in
+ * onWorkQueued().
+ * \return buffers[arrayIndex]
+ */
std::shared_ptr<C2Buffer> onInputBufferReleased(
uint64_t frameIndex, size_t arrayIndex);
+
+ /**
+ * The component finished processing a work item.
+ *
+ * \param frameIndex input frame index
+ */
void onWorkDone(uint64_t frameIndex);
+
+ /**
+ * Flush the pipeline.
+ */
void flush();
+ /**
+ * \return true if pipeline does not need more work items to proceed
+ * smoothly, considering delays and smoothness factor;
+ * false otherwise.
+ */
bool pipelineFull() const;
+
+ /**
+ * Return elapsed processing time of a work item, nth from the longest
+ * processing time to the shortest.
+ *
+ * \param now current timestamp
+ * \param n nth work item, from the longest processing time to the
+ * shortest. It's a 0-based index.
+ * \return elapsed processing time of nth work item.
+ */
Clock::duration elapsed(const Clock::time_point &now, size_t n) const;
private:
diff --git a/media/codec2/vndk/platform/C2BqBuffer.cpp b/media/codec2/vndk/platform/C2BqBuffer.cpp
index 7bf3d64..41a5b3f 100644
--- a/media/codec2/vndk/platform/C2BqBuffer.cpp
+++ b/media/codec2/vndk/platform/C2BqBuffer.cpp
@@ -207,12 +207,16 @@
// dequeueBuffer returns flag.
if (!transStatus.isOk() || status < android::OK) {
ALOGD("cannot dequeue buffer %d", status);
- if (transStatus.isOk() && status == android::INVALID_OPERATION) {
- // Too many buffer dequeued. retrying after some time is required.
- return C2_TIMED_OUT;
- } else {
- return C2_BAD_VALUE;
+ if (transStatus.isOk()) {
+ if (status == android::INVALID_OPERATION ||
+ status == android::TIMED_OUT ||
+ status == android::WOULD_BLOCK) {
+ // Dequeue buffer is blocked temporarily. Retrying is
+ // required.
+ return C2_BLOCKING;
+ }
}
+ return C2_BAD_VALUE;
}
ALOGV("dequeued a buffer successfully");
native_handle_t* nh = nullptr;
@@ -227,7 +231,7 @@
if (status == -ETIME) {
// fence is not signalled yet.
(void)mProducer->cancelBuffer(slot, fenceHandle).isOk();
- return C2_TIMED_OUT;
+ return C2_BLOCKING;
}
if (status != android::NO_ERROR) {
ALOGD("buffer fence wait error %d", status);
@@ -353,14 +357,14 @@
return C2_OK;
}
c2_status_t status = fetchFromIgbp_l(width, height, format, usage, block);
- if (status == C2_TIMED_OUT) {
+ if (status == C2_BLOCKING) {
lock.unlock();
::usleep(kMaxIgbpRetryDelayUs);
continue;
}
return status;
}
- return C2_TIMED_OUT;
+ return C2_BLOCKING;
}
void setRenderCallback(const OnRenderCallback &renderCallback) {
diff --git a/media/libaudioclient/IAudioPolicyService.cpp b/media/libaudioclient/IAudioPolicyService.cpp
index d9f6e36..1bce16f 100644
--- a/media/libaudioclient/IAudioPolicyService.cpp
+++ b/media/libaudioclient/IAudioPolicyService.cpp
@@ -1167,8 +1167,6 @@
case SET_STREAM_VOLUME:
case REGISTER_POLICY_MIXES:
case SET_MASTER_MONO:
- case START_AUDIO_SOURCE:
- case STOP_AUDIO_SOURCE:
case GET_SURROUND_FORMATS:
case SET_SURROUND_FORMAT_ENABLED:
case SET_ASSISTANT_UID:
diff --git a/media/libaudioprocessing/AudioMixer.cpp b/media/libaudioprocessing/AudioMixer.cpp
index 86777d6..2c57db7 100644
--- a/media/libaudioprocessing/AudioMixer.cpp
+++ b/media/libaudioprocessing/AudioMixer.cpp
@@ -113,10 +113,10 @@
// Integer volume.
// Currently integer volume is kept for the legacy integer mixer.
// Will be removed when the legacy mixer path is removed.
- t->volume[0] = UNITY_GAIN_INT;
- t->volume[1] = UNITY_GAIN_INT;
- t->prevVolume[0] = UNITY_GAIN_INT << 16;
- t->prevVolume[1] = UNITY_GAIN_INT << 16;
+ t->volume[0] = 0;
+ t->volume[1] = 0;
+ t->prevVolume[0] = 0 << 16;
+ t->prevVolume[1] = 0 << 16;
t->volumeInc[0] = 0;
t->volumeInc[1] = 0;
t->auxLevel = 0;
@@ -124,10 +124,10 @@
t->prevAuxLevel = 0;
// Floating point volume.
- t->mVolume[0] = UNITY_GAIN_FLOAT;
- t->mVolume[1] = UNITY_GAIN_FLOAT;
- t->mPrevVolume[0] = UNITY_GAIN_FLOAT;
- t->mPrevVolume[1] = UNITY_GAIN_FLOAT;
+ t->mVolume[0] = 0.f;
+ t->mVolume[1] = 0.f;
+ t->mPrevVolume[0] = 0.f;
+ t->mPrevVolume[1] = 0.f;
t->mVolumeInc[0] = 0.;
t->mVolumeInc[1] = 0.;
t->mAuxLevel = 0.;
diff --git a/media/libmedia/IOMX.cpp b/media/libmedia/IOMX.cpp
index a073081..747b88f 100644
--- a/media/libmedia/IOMX.cpp
+++ b/media/libmedia/IOMX.cpp
@@ -552,7 +552,7 @@
};
IMPLEMENT_META_INTERFACE(OMX, "android.hardware.IOMX");
-IMPLEMENT_HYBRID_META_INTERFACE(OMXNode, IOmxNode, "android.hardware.IOMXNode");
+IMPLEMENT_HYBRID_META_INTERFACE(OMXNode, "android.hardware.IOMXNode");
////////////////////////////////////////////////////////////////////////////////
diff --git a/media/libmedia/MediaProfiles.cpp b/media/libmedia/MediaProfiles.cpp
index 08c6a50..98c5497 100644
--- a/media/libmedia/MediaProfiles.cpp
+++ b/media/libmedia/MediaProfiles.cpp
@@ -92,6 +92,19 @@
{"highspeed720p", CAMCORDER_QUALITY_HIGH_SPEED_720P},
{"highspeed1080p", CAMCORDER_QUALITY_HIGH_SPEED_1080P},
{"highspeed2160p", CAMCORDER_QUALITY_HIGH_SPEED_2160P},
+
+ // Vendor-specific profiles
+ {"vga", CAMCORDER_QUALITY_VGA},
+ {"4kdci", CAMCORDER_QUALITY_4KDCI},
+ {"timelapsevga", CAMCORDER_QUALITY_TIME_LAPSE_VGA},
+ {"timelapse4kdci", CAMCORDER_QUALITY_TIME_LAPSE_4KDCI},
+ {"highspeedcif", CAMCORDER_QUALITY_HIGH_SPEED_CIF},
+ {"highspeedvga", CAMCORDER_QUALITY_HIGH_SPEED_VGA},
+ {"highspeed4kdci", CAMCORDER_QUALITY_HIGH_SPEED_4KDCI},
+ {"qhd", CAMCORDER_QUALITY_QHD},
+ {"2k", CAMCORDER_QUALITY_2k},
+ {"timelapseqhd", CAMCORDER_QUALITY_TIME_LAPSE_QHD},
+ {"timelapse2k", CAMCORDER_QUALITY_TIME_LAPSE_2k},
};
#if LOG_NDEBUG
diff --git a/media/libmedia/include/media/MediaProfiles.h b/media/libmedia/include/media/MediaProfiles.h
index 0feb4f3..3e8e7c8 100644
--- a/media/libmedia/include/media/MediaProfiles.h
+++ b/media/libmedia/include/media/MediaProfiles.h
@@ -34,7 +34,11 @@
CAMCORDER_QUALITY_1080P = 6,
CAMCORDER_QUALITY_QVGA = 7,
CAMCORDER_QUALITY_2160P = 8,
- CAMCORDER_QUALITY_LIST_END = 8,
+ CAMCORDER_QUALITY_VGA = 9,
+ CAMCORDER_QUALITY_4KDCI = 10,
+ CAMCORDER_QUALITY_QHD = 11,
+ CAMCORDER_QUALITY_2k = 12,
+ CAMCORDER_QUALITY_LIST_END = 12,
CAMCORDER_QUALITY_TIME_LAPSE_LIST_START = 1000,
CAMCORDER_QUALITY_TIME_LAPSE_LOW = 1000,
@@ -46,7 +50,11 @@
CAMCORDER_QUALITY_TIME_LAPSE_1080P = 1006,
CAMCORDER_QUALITY_TIME_LAPSE_QVGA = 1007,
CAMCORDER_QUALITY_TIME_LAPSE_2160P = 1008,
- CAMCORDER_QUALITY_TIME_LAPSE_LIST_END = 1008,
+ CAMCORDER_QUALITY_TIME_LAPSE_VGA = 1009,
+ CAMCORDER_QUALITY_TIME_LAPSE_4KDCI = 1010,
+ CAMCORDER_QUALITY_TIME_LAPSE_QHD = 1011,
+ CAMCORDER_QUALITY_TIME_LAPSE_2k = 1012,
+ CAMCORDER_QUALITY_TIME_LAPSE_LIST_END = 1012,
CAMCORDER_QUALITY_HIGH_SPEED_LIST_START = 2000,
CAMCORDER_QUALITY_HIGH_SPEED_LOW = 2000,
@@ -55,7 +63,10 @@
CAMCORDER_QUALITY_HIGH_SPEED_720P = 2003,
CAMCORDER_QUALITY_HIGH_SPEED_1080P = 2004,
CAMCORDER_QUALITY_HIGH_SPEED_2160P = 2005,
- CAMCORDER_QUALITY_HIGH_SPEED_LIST_END = 2005,
+ CAMCORDER_QUALITY_HIGH_SPEED_CIF = 2006,
+ CAMCORDER_QUALITY_HIGH_SPEED_VGA = 2007,
+ CAMCORDER_QUALITY_HIGH_SPEED_4KDCI = 2008,
+ CAMCORDER_QUALITY_HIGH_SPEED_LIST_END = 2008,
};
enum video_decoder {
diff --git a/media/libmedia/include/media/omx/1.0/WOmxNode.h b/media/libmedia/include/media/omx/1.0/WOmxNode.h
index eebc8c6..1db4248 100644
--- a/media/libmedia/include/media/omx/1.0/WOmxNode.h
+++ b/media/libmedia/include/media/omx/1.0/WOmxNode.h
@@ -59,7 +59,7 @@
* - TW = Treble Wrapper --- It wraps a legacy object inside a Treble object.
*/
-struct LWOmxNode : public H2BConverter<IOmxNode, IOMXNode, BnOMXNode> {
+struct LWOmxNode : public H2BConverter<IOmxNode, BnOMXNode> {
LWOmxNode(sp<IOmxNode> const& base) : CBase(base) {}
status_t freeNode() override;
status_t sendCommand(
diff --git a/media/libmedia/omx/1.0/WGraphicBufferSource.cpp b/media/libmedia/omx/1.0/WGraphicBufferSource.cpp
index 31d1df9..1ed1d07 100644
--- a/media/libmedia/omx/1.0/WGraphicBufferSource.cpp
+++ b/media/libmedia/omx/1.0/WGraphicBufferSource.cpp
@@ -32,7 +32,7 @@
BnStatus LWGraphicBufferSource::configure(
const sp<IOMXNode>& omxNode, int32_t dataSpace) {
- sp<IOmxNode> hOmxNode = omxNode->getHalInterface();
+ sp<IOmxNode> hOmxNode = omxNode->getHalInterface<IOmxNode>();
return toBinderStatus(mBase->configure(
hOmxNode == nullptr ? new TWOmxNode(omxNode) : hOmxNode,
toHardwareDataspace(dataSpace)));
diff --git a/media/libmediaplayerservice/include/MediaPlayerInterface.h b/media/libmediaplayerservice/include/MediaPlayerInterface.h
index 3119950..0ad4d04 100644
--- a/media/libmediaplayerservice/include/MediaPlayerInterface.h
+++ b/media/libmediaplayerservice/include/MediaPlayerInterface.h
@@ -151,13 +151,13 @@
virtual media::VolumeShaper::Status applyVolumeShaper(
const sp<media::VolumeShaper::Configuration>& configuration,
- const sp<media::VolumeShaper::Operation>& operation);
- virtual sp<media::VolumeShaper::State> getVolumeShaperState(int id);
+ const sp<media::VolumeShaper::Operation>& operation) = 0;
+ virtual sp<media::VolumeShaper::State> getVolumeShaperState(int id) = 0;
// AudioRouting
- virtual status_t setOutputDevice(audio_port_handle_t deviceId);
- virtual status_t getRoutedDeviceId(audio_port_handle_t* deviceId);
- virtual status_t enableAudioDeviceCallback(bool enabled);
+ virtual status_t setOutputDevice(audio_port_handle_t deviceId) = 0;
+ virtual status_t getRoutedDeviceId(audio_port_handle_t* deviceId) = 0;
+ virtual status_t enableAudioDeviceCallback(bool enabled) = 0;
};
MediaPlayerBase() {}
diff --git a/media/libstagefright/ACodec.cpp b/media/libstagefright/ACodec.cpp
index 52cb5fa..9d3338b 100644
--- a/media/libstagefright/ACodec.cpp
+++ b/media/libstagefright/ACodec.cpp
@@ -171,11 +171,7 @@
}
struct CodecObserver : public BnOMXObserver {
- CodecObserver() {}
-
- void setNotificationMessage(const sp<AMessage> &msg) {
- mNotify = msg;
- }
+ explicit CodecObserver(const sp<AMessage> &msg) : mNotify(msg) {}
// from IOMXObserver
virtual void onMessages(const std::list<omx_message> &messages) {
@@ -251,7 +247,7 @@
virtual ~CodecObserver() {}
private:
- sp<AMessage> mNotify;
+ const sp<AMessage> mNotify;
DISALLOW_EVIL_CONSTRUCTORS(CodecObserver);
};
@@ -1248,6 +1244,7 @@
info.mRenderInfo = NULL;
info.mGraphicBuffer = graphicBuffer;
info.mNewGraphicBuffer = false;
+ info.mDequeuedAt = mDequeueCounter;
// TODO: We shouln't need to create MediaCodecBuffer. In metadata mode
// OMX doesn't use the shared memory buffer, but some code still
@@ -4431,9 +4428,9 @@
h264type.nRefFrames = 2;
h264type.nBFrames = mLatency == 0 ? 1 : std::min(1U, mLatency - 1);
- // disable B-frames until MPEG4Writer can guarantee finalizing files with B-frames
- // h264type.nRefFrames = 1;
- // h264type.nBFrames = 0;
+ // disable B-frames until we have explicit settings for enabling the feature.
+ h264type.nRefFrames = 1;
+ h264type.nBFrames = 0;
h264type.nPFrames = setPFramesSpacing(iFrameInterval, frameRate, h264type.nBFrames);
h264type.nAllowedPictureTypes =
@@ -6539,8 +6536,10 @@
if (mDeathNotifier != NULL) {
if (mCodec->mOMXNode != NULL) {
- auto tOmxNode = mCodec->mOMXNode->getHalInterface();
- tOmxNode->unlinkToDeath(mDeathNotifier);
+ auto tOmxNode = mCodec->mOMXNode->getHalInterface<IOmxNode>();
+ if (tOmxNode) {
+ tOmxNode->unlinkToDeath(mDeathNotifier);
+ }
}
mDeathNotifier.clear();
}
@@ -6627,7 +6626,8 @@
CHECK(mCodec->mOMXNode == NULL);
- sp<AMessage> notify = new AMessage(kWhatOMXDied, mCodec);
+ sp<AMessage> notify = new AMessage(kWhatOMXMessageList, mCodec);
+ notify->setInt32("generation", mCodec->mNodeGeneration + 1);
sp<RefBase> obj;
CHECK(msg->findObject("codecInfo", &obj));
@@ -6642,7 +6642,7 @@
AString componentName;
CHECK(msg->findString("componentName", &componentName));
- sp<CodecObserver> observer = new CodecObserver;
+ sp<CodecObserver> observer = new CodecObserver(notify);
sp<IOMX> omx;
sp<IOMXNode> omxNode;
@@ -6668,14 +6668,12 @@
}
mDeathNotifier = new DeathNotifier(notify);
- auto tOmxNode = omxNode->getHalInterface();
- if (!tOmxNode->linkToDeath(mDeathNotifier, 0)) {
+ auto tOmxNode = omxNode->getHalInterface<IOmxNode>();
+ if (tOmxNode && !tOmxNode->linkToDeath(mDeathNotifier, 0)) {
mDeathNotifier.clear();
}
- notify = new AMessage(kWhatOMXMessageList, mCodec);
- notify->setInt32("generation", ++mCodec->mNodeGeneration);
- observer->setNotificationMessage(notify);
+ ++mCodec->mNodeGeneration;
mCodec->mComponentName = componentName;
mCodec->mRenderTracker.setComponentName(componentName);
@@ -8165,6 +8163,10 @@
OMX_CommandPortEnable, kPortIndexOutput);
}
+ // Clear the RenderQueue in which queued GraphicBuffers hold the
+ // actual buffer references in order to free them early.
+ mCodec->mRenderTracker.clear(systemTime(CLOCK_MONOTONIC));
+
if (err == OK) {
err = mCodec->allocateBuffersOnPort(kPortIndexOutput);
ALOGE_IF(err != OK, "Failed to allocate output port buffers after port "
@@ -8570,7 +8572,7 @@
}
sp<IOMX> omx = client.interface();
- sp<CodecObserver> observer = new CodecObserver;
+ sp<CodecObserver> observer = new CodecObserver(new AMessage);
sp<IOMXNode> omxNode;
err = omx->allocateNode(name, observer, &omxNode);
diff --git a/media/libstagefright/StagefrightMetadataRetriever.cpp b/media/libstagefright/StagefrightMetadataRetriever.cpp
index f34d54c..fa3d372 100644
--- a/media/libstagefright/StagefrightMetadataRetriever.cpp
+++ b/media/libstagefright/StagefrightMetadataRetriever.cpp
@@ -165,6 +165,9 @@
for (i = 0; i < n; ++i) {
sp<MetaData> meta = mExtractor->getTrackMetaData(i);
+ if (!meta) {
+ continue;
+ }
ALOGV("getting track %zu of %zu, meta=%s", i, n, meta->toString().c_str());
const char *mime;
@@ -186,6 +189,9 @@
}
sp<MetaData> trackMeta = mExtractor->getTrackMetaData(i);
+ if (!trackMeta) {
+ return NULL;
+ }
if (metaOnly) {
return FrameDecoder::getMetadataOnly(trackMeta, colorFormat, thumbnail);
@@ -280,6 +286,9 @@
size_t i;
for (i = 0; i < n; ++i) {
sp<MetaData> meta = mExtractor->getTrackMetaData(i);
+ if (!meta) {
+ continue;
+ }
const char *mime;
CHECK(meta->findCString(kKeyMIMEType, &mime));
@@ -296,6 +305,9 @@
sp<MetaData> trackMeta = mExtractor->getTrackMetaData(
i, MediaExtractor::kIncludeExtensiveMetaData);
+ if (!trackMeta) {
+ return UNKNOWN_ERROR;
+ }
if (metaOnly) {
if (outFrame != NULL) {
@@ -529,6 +541,9 @@
String8 timedTextLang;
for (size_t i = 0; i < numTracks; ++i) {
sp<MetaData> trackMeta = mExtractor->getTrackMetaData(i);
+ if (!trackMeta) {
+ continue;
+ }
int64_t durationUs;
if (trackMeta->findInt64(kKeyDuration, &durationUs)) {
@@ -667,8 +682,9 @@
!strcasecmp(fileMIME, "video/x-matroska")) {
sp<MetaData> trackMeta = mExtractor->getTrackMetaData(0);
const char *trackMIME;
- CHECK(trackMeta->findCString(kKeyMIMEType, &trackMIME));
-
+ if (trackMeta != nullptr) {
+ CHECK(trackMeta->findCString(kKeyMIMEType, &trackMIME));
+ }
if (!strncasecmp("audio/", trackMIME, 6)) {
// The matroska file only contains a single audio track,
// rewrite its mime type.
diff --git a/media/libstagefright/Utils.cpp b/media/libstagefright/Utils.cpp
index 09424b8..16b3319 100644
--- a/media/libstagefright/Utils.cpp
+++ b/media/libstagefright/Utils.cpp
@@ -588,6 +588,7 @@
{ "genre", kKeyGenre },
{ "location", kKeyLocation },
{ "lyricist", kKeyWriter },
+ { "manufacturer", kKeyManufacturer },
{ "title", kKeyTitle },
{ "year", kKeyYear },
}
diff --git a/media/libstagefright/include/media/stagefright/MetaDataBase.h b/media/libstagefright/include/media/stagefright/MetaDataBase.h
index 437bdb7..a0407af 100644
--- a/media/libstagefright/include/media/stagefright/MetaDataBase.h
+++ b/media/libstagefright/include/media/stagefright/MetaDataBase.h
@@ -144,6 +144,9 @@
// The language code for this media
kKeyMediaLanguage = 'lang', // cstring
+ // The manufacturer code for this media
+ kKeyManufacturer = 'manu', // cstring
+
// To store the timed text format data
kKeyTextFormatData = 'text', // raw data
diff --git a/media/ndk/Android.bp b/media/ndk/Android.bp
index 74754ea..339f622 100644
--- a/media/ndk/Android.bp
+++ b/media/ndk/Android.bp
@@ -106,6 +106,10 @@
symbol_file: "libmediandk.map.txt",
versions: ["29"],
},
+
+ // Bug: http://b/124522995 libmediandk has linker errors when built with
+ // coverage
+ native_coverage: false,
}
llndk_library {
diff --git a/media/ndk/NdkMediaFormat.cpp b/media/ndk/NdkMediaFormat.cpp
index cd8ecb5..26a6238 100644
--- a/media/ndk/NdkMediaFormat.cpp
+++ b/media/ndk/NdkMediaFormat.cpp
@@ -342,6 +342,7 @@
EXPORT const char* AMEDIAFORMAT_KEY_LOCATION = "location";
EXPORT const char* AMEDIAFORMAT_KEY_LOOP = "loop";
EXPORT const char* AMEDIAFORMAT_KEY_LYRICIST = "lyricist";
+EXPORT const char* AMEDIAFORMAT_KEY_MANUFACTURER = "manufacturer";
EXPORT const char* AMEDIAFORMAT_KEY_MAX_BIT_RATE = "max-bitrate";
EXPORT const char* AMEDIAFORMAT_KEY_MAX_HEIGHT = "max-height";
EXPORT const char* AMEDIAFORMAT_KEY_MAX_INPUT_SIZE = "max-input-size";
diff --git a/media/ndk/include/media/NdkMediaFormat.h b/media/ndk/include/media/NdkMediaFormat.h
index cc1d9ef..ddf5291 100644
--- a/media/ndk/include/media/NdkMediaFormat.h
+++ b/media/ndk/include/media/NdkMediaFormat.h
@@ -214,6 +214,7 @@
extern const char* AMEDIAFORMAT_KEY_LOCATION __INTRODUCED_IN(29);
extern const char* AMEDIAFORMAT_KEY_LOOP __INTRODUCED_IN(29);
extern const char* AMEDIAFORMAT_KEY_LYRICIST __INTRODUCED_IN(29);
+extern const char* AMEDIAFORMAT_KEY_MANUFACTURER __INTRODUCED_IN(29);
extern const char* AMEDIAFORMAT_KEY_MAX_BIT_RATE __INTRODUCED_IN(29);
extern const char* AMEDIAFORMAT_KEY_MPEG2_STREAM_HEADER __INTRODUCED_IN(29);
extern const char* AMEDIAFORMAT_KEY_PCM_BIG_ENDIAN __INTRODUCED_IN(29);
diff --git a/media/ndk/libmediandk.map.txt b/media/ndk/libmediandk.map.txt
index 171167d..7bdd3ad 100644
--- a/media/ndk/libmediandk.map.txt
+++ b/media/ndk/libmediandk.map.txt
@@ -105,6 +105,7 @@
AMEDIAFORMAT_KEY_LOCATION; # var introduced=29
AMEDIAFORMAT_KEY_LOOP; # var introduced=29
AMEDIAFORMAT_KEY_LYRICIST; # var introduced=29
+ AMEDIAFORMAT_KEY_MANUFACTURER; # var introduced=29
AMEDIAFORMAT_KEY_MAX_BIT_RATE; # var introduced=29
AMEDIAFORMAT_KEY_MAX_HEIGHT; # var introduced=21
AMEDIAFORMAT_KEY_MAX_INPUT_SIZE; # var introduced=21
diff --git a/media/utils/ServiceUtilities.cpp b/media/utils/ServiceUtilities.cpp
index 1c54aec..599c446 100644
--- a/media/utils/ServiceUtilities.cpp
+++ b/media/utils/ServiceUtilities.cpp
@@ -85,7 +85,7 @@
return false;
}
} else {
- if (appOps.noteOp(op, uid, resolvedOpPackageName) != AppOpsManager::MODE_ALLOWED) {
+ if (appOps.checkOp(op, uid, resolvedOpPackageName) != AppOpsManager::MODE_ALLOWED) {
ALOGE("Request denied by app op: %d", op);
return false;
}
diff --git a/services/audioflinger/Tracks.cpp b/services/audioflinger/Tracks.cpp
index 922547d..65f799e 100644
--- a/services/audioflinger/Tracks.cpp
+++ b/services/audioflinger/Tracks.cpp
@@ -698,6 +698,7 @@
// TODO: compensate for time shift between HW modules.
void AudioFlinger::PlaybackThread::Track::interceptBuffer(
const AudioBufferProvider::Buffer& sourceBuffer) {
+ auto start = std::chrono::steady_clock::now();
const size_t frameCount = sourceBuffer.frameCount;
for (auto& sink : mTeePatches) {
RecordThread::PatchRecord* patchRecord = sink.patchRecord.get();
@@ -715,6 +716,11 @@
"buffer %zu/%zu, dropping %zu frames", __func__, mId, patchRecord->mId,
framesWritten, frameCount, framesLeft);
}
+ auto spent = ceil<std::chrono::microseconds>(std::chrono::steady_clock::now() - start);
+ using namespace std::chrono_literals;
+ // Average is ~20us per track, this should virtually never be logged (Logging takes >200us)
+ ALOGD_IF(spent > 200us, "%s: took %lldus to intercept %zu tracks", __func__,
+ spent.count(), mTeePatches.size());
}
size_t AudioFlinger::PlaybackThread::Track::writeFrames(AudioBufferProvider* dest,
diff --git a/services/audiopolicy/config/hearing_aid_audio_policy_configuration.xml b/services/audiopolicy/config/hearing_aid_audio_policy_configuration.xml
index 3c48e88..e6e6bdb 100644
--- a/services/audiopolicy/config/hearing_aid_audio_policy_configuration.xml
+++ b/services/audiopolicy/config/hearing_aid_audio_policy_configuration.xml
@@ -2,7 +2,7 @@
<!-- Hearing aid Audio HAL Audio Policy Configuration file -->
<module name="hearing_aid" halVersion="2.0">
<mixPorts>
- <mixPort name="hearing aid output" role="source" flags="AUDIO_OUTPUT_FLAG_PRIMARY">
+ <mixPort name="hearing aid output" role="source">
<profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
samplingRates="24000,16000"
channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
index 32cc380..b1aa92d 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
@@ -4048,14 +4048,18 @@
std::vector<const char*> fileNames;
status_t ret;
- if (property_get_bool("ro.bluetooth.a2dp_offload.supported", false) &&
- property_get_bool("persist.bluetooth.a2dp_offload.disabled", false)) {
- // A2DP offload supported but disabled: try to use special XML file
- if (property_get_bool("persist.bluetooth.bluetooth_audio_hal.enabled", false)) {
- fileNames.push_back(AUDIO_POLICY_BLUETOOTH_HAL_ENABLED_XML_CONFIG_FILE_NAME);
- } else {
+ if (property_get_bool("ro.bluetooth.a2dp_offload.supported", false)) {
+ if (property_get_bool("persist.bluetooth.a2dp_offload.disabled", false)) {
fileNames.push_back(AUDIO_POLICY_A2DP_OFFLOAD_DISABLED_XML_CONFIG_FILE_NAME);
+ } else if (property_get_bool("persist.bluetooth.bluetooth_audio_hal.enabled", false)) {
+ // This property persist.bluetooth.bluetooth_audio_hal.enabled is temporary only.
+ // xml files AUDIO_POLICY_BLUETOOTH_HAL_ENABLED_XML_CONFIG_FILE_NAME, although having
+ // the same name, must be different in offload and non offload cases in device
+ // specific configuration file.
+ fileNames.push_back(AUDIO_POLICY_BLUETOOTH_HAL_ENABLED_XML_CONFIG_FILE_NAME);
}
+ } else if (property_get_bool("persist.bluetooth.bluetooth_audio_hal.enabled", false)) {
+ fileNames.push_back(AUDIO_POLICY_BLUETOOTH_HAL_ENABLED_XML_CONFIG_FILE_NAME);
}
fileNames.push_back(AUDIO_POLICY_XML_CONFIG_FILE_NAME);
diff --git a/services/audiopolicy/service/AudioPolicyService.cpp b/services/audiopolicy/service/AudioPolicyService.cpp
index a39477d..76ac191 100644
--- a/services/audiopolicy/service/AudioPolicyService.cpp
+++ b/services/audiopolicy/service/AudioPolicyService.cpp
@@ -383,6 +383,8 @@
// OR The client is an accessibility service
// AND is on TOP OR latest started
// AND the source is VOICE_RECOGNITION or HOTWORD
+// OR the source is one of: AUDIO_SOURCE_VOICE_DOWNLINK, AUDIO_SOURCE_VOICE_UPLINK,
+// AUDIO_SOURCE_VOICE_CALL
// OR Any other client
// AND The assistant is not on TOP
// AND is on TOP OR latest started
@@ -463,6 +465,10 @@
(source == AUDIO_SOURCE_VOICE_RECOGNITION || source == AUDIO_SOURCE_HOTWORD)) {
forceIdle = false;
}
+ } else if (source == AUDIO_SOURCE_VOICE_DOWNLINK ||
+ source == AUDIO_SOURCE_VOICE_CALL ||
+ (source == AUDIO_SOURCE_VOICE_UPLINK)) {
+ forceIdle = false;
} else {
if (!isAssistantOnTop && (isOnTop || isLatest) &&
(!isSensitiveActive || isLatestSensitive)) {
diff --git a/services/camera/libcameraservice/Android.bp b/services/camera/libcameraservice/Android.bp
index 2ca8356..7ec0e4c 100644
--- a/services/camera/libcameraservice/Android.bp
+++ b/services/camera/libcameraservice/Android.bp
@@ -94,6 +94,7 @@
"libsensorprivacy",
"libstagefright",
"libstagefright_foundation",
+ "libyuv",
"android.frameworks.cameraservice.common@2.0",
"android.frameworks.cameraservice.service@2.0",
"android.frameworks.cameraservice.device@2.0",
@@ -137,6 +138,7 @@
name: "libdepthphoto",
srcs: [
+ "utils/ExifUtils.cpp",
"common/DepthPhotoProcessor.cpp",
],
@@ -150,6 +152,8 @@
"libcutils",
"libjpeg",
"libmemunreachable",
+ "libexif",
+ "libcamera_client",
],
include_dirs: [
diff --git a/services/camera/libcameraservice/api2/DepthCompositeStream.cpp b/services/camera/libcameraservice/api2/DepthCompositeStream.cpp
index 2eec0f7..9525ad2 100644
--- a/services/camera/libcameraservice/api2/DepthCompositeStream.cpp
+++ b/services/camera/libcameraservice/api2/DepthCompositeStream.cpp
@@ -339,6 +339,21 @@
} else {
depthPhoto.mIsLensDistortionValid = 0;
}
+ entry = inputFrame.result.find(ANDROID_JPEG_ORIENTATION);
+ if (entry.count > 0) {
+ // The camera jpeg orientation values must be within [0, 90, 180, 270].
+ switch (entry.data.i32[0]) {
+ case 0:
+ case 90:
+ case 180:
+ case 270:
+ depthPhoto.mOrientation = static_cast<DepthPhotoOrientation> (entry.data.i32[0]);
+ break;
+ default:
+ ALOGE("%s: Unexpected jpeg orientation value: %d, default to 0 degrees",
+ __FUNCTION__, entry.data.i32[0]);
+ }
+ }
size_t actualJpegSize = 0;
res = mDepthPhotoProcess(depthPhoto, finalJpegBufferSize, dstBuffer, &actualJpegSize);
diff --git a/services/camera/libcameraservice/api2/HeicCompositeStream.cpp b/services/camera/libcameraservice/api2/HeicCompositeStream.cpp
index 8e9c39e..9fd0e8b 100644
--- a/services/camera/libcameraservice/api2/HeicCompositeStream.cpp
+++ b/services/camera/libcameraservice/api2/HeicCompositeStream.cpp
@@ -23,6 +23,7 @@
#include <sys/syscall.h>
#include <android/hardware/camera/device/3.5/types.h>
+#include <libyuv.h>
#include <gui/Surface.h>
#include <utils/Log.h>
#include <utils/Trace.h>
@@ -136,6 +137,8 @@
mAppSegmentConsumer->setName(String8("Camera3-HeicComposite-AppSegmentStream"));
mAppSegmentSurface = new Surface(producer);
+ mStaticInfo = device->info();
+
res = device->createStream(mAppSegmentSurface, mAppSegmentMaxSize, 1, format,
kAppSegmentDataSpace, rotation, &mAppSegmentStreamId, physicalCameraId, surfaceIds);
if (res == OK) {
@@ -190,6 +193,7 @@
return res;
}
+ initCopyRowFunction(width);
return res;
}
@@ -606,9 +610,42 @@
mFrameNumberMap.erase(it);
}
- // Heic composition doesn't depend on capture result, so no need to check
- // mErrorFrameNumbers. Just remove them.
- mErrorFrameNumbers.clear();
+ while (!mCaptureResults.empty()) {
+ auto it = mCaptureResults.begin();
+ // Negative timestamp indicates that something went wrong during the capture result
+ // collection process.
+ if (it->first >= 0) {
+ if (mPendingInputFrames[it->first].frameNumber == std::get<0>(it->second)) {
+ mPendingInputFrames[it->first].result =
+ std::make_unique<CameraMetadata>(std::get<1>(it->second));
+ } else {
+ ALOGE("%s: Capture result frameNumber/timestamp mapping changed between "
+ "shutter and capture result!", __FUNCTION__);
+ }
+ }
+ mCaptureResults.erase(it);
+ }
+
+ // mErrorFrameNumbers stores frame number of dropped buffers.
+ auto it = mErrorFrameNumbers.begin();
+ while (it != mErrorFrameNumbers.end()) {
+ bool frameFound = false;
+ for (auto &inputFrame : mPendingInputFrames) {
+ if (inputFrame.second.frameNumber == *it) {
+ inputFrame.second.error = true;
+ frameFound = true;
+ break;
+ }
+ }
+
+ if (frameFound) {
+ it = mErrorFrameNumbers.erase(it);
+ } else {
+ ALOGW("%s: Not able to find failing input with frame number: %" PRId64, __FUNCTION__,
+ *it);
+ it++;
+ }
+ }
// Distribute codec input buffers to be filled out from YUV output
for (auto it = mPendingInputFrames.begin();
@@ -639,14 +676,14 @@
bool newInputAvailable = false;
for (const auto& it : mPendingInputFrames) {
- bool appSegmentBufferReady = (it.second.appSegmentBuffer.data != nullptr) &&
- !it.second.appSegmentWritten;
+ bool appSegmentReady = (it.second.appSegmentBuffer.data != nullptr) &&
+ !it.second.appSegmentWritten && it.second.result != nullptr;
bool codecOutputReady = !it.second.codecOutputBuffers.empty();
bool codecInputReady = (it.second.yuvBuffer.data != nullptr) &&
(!it.second.codecInputBuffers.empty());
if ((!it.second.error) &&
(it.first < *currentTs) &&
- (appSegmentBufferReady || codecOutputReady || codecInputReady)) {
+ (appSegmentReady || codecOutputReady || codecInputReady)) {
*currentTs = it.first;
newInputAvailable = true;
break;
@@ -678,13 +715,13 @@
ATRACE_CALL();
status_t res = OK;
- bool appSegmentBufferReady = inputFrame.appSegmentBuffer.data != nullptr &&
- !inputFrame.appSegmentWritten;
+ bool appSegmentReady = inputFrame.appSegmentBuffer.data != nullptr &&
+ !inputFrame.appSegmentWritten && inputFrame.result != nullptr;
bool codecOutputReady = inputFrame.codecOutputBuffers.size() > 0;
bool codecInputReady = inputFrame.yuvBuffer.data != nullptr &&
!inputFrame.codecInputBuffers.empty();
- if (!appSegmentBufferReady && !codecOutputReady && !codecInputReady) {
+ if (!appSegmentReady && !codecOutputReady && !codecInputReady) {
ALOGW("%s: No valid appSegmentBuffer/codec input/outputBuffer available!", __FUNCTION__);
return OK;
}
@@ -710,7 +747,7 @@
}
// Write JPEG APP segments data to the muxer.
- if (appSegmentBufferReady && inputFrame.muxer != nullptr) {
+ if (appSegmentReady && inputFrame.muxer != nullptr) {
res = processAppSegment(timestamp, inputFrame);
if (res != OK) {
ALOGE("%s: Failed to process JPEG APP segments: %s (%d)", __FUNCTION__,
@@ -829,10 +866,8 @@
ALOGE("%s: Failed to initialize ExifUtils object!", __FUNCTION__);
return BAD_VALUE;
}
- //TODO: Use capture result metadata and static metadata to fill out the
- //rest.
- CameraMetadata dummyMeta;
- exifRes = exifUtils->setFromMetadata(dummyMeta, mOutputWidth, mOutputHeight);
+ exifRes = exifUtils->setFromMetadata(*inputFrame.result, mStaticInfo,
+ mOutputWidth, mOutputHeight);
if (!exifRes) {
ALOGE("%s: Failed to set Exif tags using metadata and main image sizes", __FUNCTION__);
return BAD_VALUE;
@@ -1012,6 +1047,7 @@
}
inputFrame.anb = nullptr;
+ ATRACE_ASYNC_END("HEIC capture", inputFrame.frameNumber);
return OK;
}
@@ -1339,7 +1375,7 @@
for (auto row = top; row < top+height; row++) {
uint8_t *dst = codecBuffer->data() + imageInfo->mPlane[MediaImage2::Y].mOffset +
imageInfo->mPlane[MediaImage2::Y].mRowInc * (row - top);
- memcpy(dst, yuvBuffer.data+row*yuvBuffer.stride+left, width);
+ mFnCopyRow(yuvBuffer.data+row*yuvBuffer.stride+left, dst, width);
}
// U is Cb, V is Cr
@@ -1372,24 +1408,25 @@
for (auto row = top/2; row < (top+height)/2; row++) {
uint8_t *dst = codecBuffer->data() + imageInfo->mPlane[dstPlane].mOffset +
imageInfo->mPlane[dstPlane].mRowInc * (row - top/2);
- memcpy(dst, src+row*yuvBuffer.chromaStride+left, width);
+ mFnCopyRow(src+row*yuvBuffer.chromaStride+left, dst, width);
}
} else if (isCodecUvPlannar && yuvBuffer.chromaStep == 1) {
// U plane
for (auto row = top/2; row < (top+height)/2; row++) {
uint8_t *dst = codecBuffer->data() + imageInfo->mPlane[MediaImage2::U].mOffset +
imageInfo->mPlane[MediaImage2::U].mRowInc * (row - top/2);
- memcpy(dst, yuvBuffer.dataCb+row*yuvBuffer.chromaStride+left/2, width/2);
+ mFnCopyRow(yuvBuffer.dataCb+row*yuvBuffer.chromaStride+left/2, dst, width/2);
}
// V plane
for (auto row = top/2; row < (top+height)/2; row++) {
uint8_t *dst = codecBuffer->data() + imageInfo->mPlane[MediaImage2::V].mOffset +
imageInfo->mPlane[MediaImage2::V].mRowInc * (row - top/2);
- memcpy(dst, yuvBuffer.dataCr+row*yuvBuffer.chromaStride+left/2, width/2);
+ mFnCopyRow(yuvBuffer.dataCr+row*yuvBuffer.chromaStride+left/2, dst, width/2);
}
} else {
- // Convert between semiplannar and plannar
+ // Convert between semiplannar and plannar, or when UV orders are
+ // different.
uint8_t *dst = codecBuffer->data();
for (auto row = top/2; row < (top+height)/2; row++) {
for (auto col = left/2; col < (left+width)/2; col++) {
@@ -1412,6 +1449,38 @@
return OK;
}
+void HeicCompositeStream::initCopyRowFunction(int32_t width)
+{
+ using namespace libyuv;
+
+ mFnCopyRow = CopyRow_C;
+#if defined(HAS_COPYROW_SSE2)
+ if (TestCpuFlag(kCpuHasSSE2)) {
+ mFnCopyRow = IS_ALIGNED(width, 32) ? CopyRow_SSE2 : CopyRow_Any_SSE2;
+ }
+#endif
+#if defined(HAS_COPYROW_AVX)
+ if (TestCpuFlag(kCpuHasAVX)) {
+ mFnCopyRow = IS_ALIGNED(width, 64) ? CopyRow_AVX : CopyRow_Any_AVX;
+ }
+#endif
+#if defined(HAS_COPYROW_ERMS)
+ if (TestCpuFlag(kCpuHasERMS)) {
+ mFnCopyRow = CopyRow_ERMS;
+ }
+#endif
+#if defined(HAS_COPYROW_NEON)
+ if (TestCpuFlag(kCpuHasNEON)) {
+ mFnCopyRow = IS_ALIGNED(width, 32) ? CopyRow_NEON : CopyRow_Any_NEON;
+ }
+#endif
+#if defined(HAS_COPYROW_MIPS)
+ if (TestCpuFlag(kCpuHasMIPS)) {
+ mFnCopyRow = CopyRow_MIPS;
+ }
+#endif
+}
+
size_t HeicCompositeStream::calcAppSegmentMaxSize(const CameraMetadata& info) {
camera_metadata_ro_entry_t entry = info.find(ANDROID_HEIC_INFO_MAX_JPEG_APP_SEGMENTS_COUNT);
size_t maxAppsSegment = 1;
@@ -1497,6 +1566,36 @@
return res;
}
+void HeicCompositeStream::onResultError(const CaptureResultExtras& resultExtras) {
+ // For result error, since the APPS_SEGMENT buffer already contains EXIF,
+ // simply skip using the capture result metadata to override EXIF.
+ Mutex::Autolock l(mMutex);
+
+ int64_t timestamp = -1;
+ for (const auto& fn : mFrameNumberMap) {
+ if (fn.first == resultExtras.frameNumber) {
+ timestamp = fn.second;
+ break;
+ }
+ }
+ if (timestamp == -1) {
+ for (const auto& inputFrame : mPendingInputFrames) {
+ if (inputFrame.second.frameNumber == resultExtras.frameNumber) {
+ timestamp = inputFrame.first;
+ break;
+ }
+ }
+ }
+
+ if (timestamp == -1) {
+ ALOGE("%s: Failed to find shutter timestamp for result error!", __FUNCTION__);
+ return;
+ }
+
+ mCaptureResults.emplace(timestamp, std::make_tuple(resultExtras.frameNumber, CameraMetadata()));
+ mInputReadyCondition.signal();
+}
+
void HeicCompositeStream::CodecCallbackHandler::onMessageReceived(const sp<AMessage> &msg) {
sp<HeicCompositeStream> parent = mParent.promote();
if (parent == nullptr) return;
diff --git a/services/camera/libcameraservice/api2/HeicCompositeStream.h b/services/camera/libcameraservice/api2/HeicCompositeStream.h
index 0a76256..2aa3c38 100644
--- a/services/camera/libcameraservice/api2/HeicCompositeStream.h
+++ b/services/camera/libcameraservice/api2/HeicCompositeStream.h
@@ -77,7 +77,7 @@
bool threadLoop() override;
bool onStreamBufferError(const CaptureResultExtras& resultExtras) override;
- void onResultError(const CaptureResultExtras& /*resultExtras*/) override {}
+ void onResultError(const CaptureResultExtras& resultExtras) override;
private:
//
@@ -145,12 +145,13 @@
int32_t orientation;
int32_t quality;
- CpuConsumer::LockedBuffer appSegmentBuffer;
+ CpuConsumer::LockedBuffer appSegmentBuffer;
std::vector<CodecOutputBufferInfo> codecOutputBuffers;
+ std::unique_ptr<CameraMetadata> result;
// Fields that are only applicable to HEVC tiling.
- CpuConsumer::LockedBuffer yuvBuffer;
- std::vector<CodecInputBufferInfo> codecInputBuffers;
+ CpuConsumer::LockedBuffer yuvBuffer;
+ std::vector<CodecInputBufferInfo> codecInputBuffers;
bool error;
bool errorNotified;
@@ -194,6 +195,7 @@
status_t copyOneYuvTile(sp<MediaCodecBuffer>& codecBuffer,
const CpuConsumer::LockedBuffer& yuvBuffer,
size_t top, size_t left, size_t width, size_t height);
+ void initCopyRowFunction(int32_t width);
static size_t calcAppSegmentMaxSize(const CameraMetadata& info);
static const nsecs_t kWaitDuration = 10000000; // 10 ms
@@ -209,6 +211,7 @@
sp<Surface> mAppSegmentSurface;
bool mAppSegmentBufferAcquired;
size_t mAppSegmentMaxSize;
+ CameraMetadata mStaticInfo;
int mMainImageStreamId, mMainImageSurfaceId;
sp<Surface> mMainImageSurface;
@@ -242,6 +245,9 @@
// In most common use case, entries are accessed in order.
std::map<int64_t, InputFrame> mPendingInputFrames;
+
+ // Function pointer of libyuv row copy.
+ void (*mFnCopyRow)(const uint8_t* src, uint8_t* dst, int width);
};
}; // namespace camera3
diff --git a/services/camera/libcameraservice/common/DepthPhotoProcessor.cpp b/services/camera/libcameraservice/common/DepthPhotoProcessor.cpp
index a945aca..6d96163 100644
--- a/services/camera/libcameraservice/common/DepthPhotoProcessor.cpp
+++ b/services/camera/libcameraservice/common/DepthPhotoProcessor.cpp
@@ -32,9 +32,12 @@
#include <dynamic_depth/profile.h>
#include <dynamic_depth/profiles.h>
#include <jpeglib.h>
+#include <libexif/exif-data.h>
+#include <libexif/exif-system.h>
#include <math.h>
#include <sstream>
#include <utils/Errors.h>
+#include <utils/ExifUtils.h>
#include <utils/Log.h>
#include <xmpmeta/xmp_data.h>
#include <xmpmeta/xmp_writer.h>
@@ -61,8 +64,44 @@
namespace android {
namespace camera3 {
+ExifOrientation getExifOrientation(const unsigned char *jpegBuffer, size_t jpegBufferSize) {
+ if ((jpegBuffer == nullptr) || (jpegBufferSize == 0)) {
+ return ExifOrientation::ORIENTATION_UNDEFINED;
+ }
+
+ auto exifData = exif_data_new();
+ exif_data_load_data(exifData, jpegBuffer, jpegBufferSize);
+ ExifEntry *orientation = exif_content_get_entry(exifData->ifd[EXIF_IFD_0],
+ EXIF_TAG_ORIENTATION);
+ if ((orientation == nullptr) || (orientation->size != sizeof(ExifShort))) {
+ ALOGV("%s: Orientation EXIF entry invalid!", __FUNCTION__);
+ exif_data_unref(exifData);
+ return ExifOrientation::ORIENTATION_0_DEGREES;
+ }
+
+ auto orientationValue = exif_get_short(orientation->data, exif_data_get_byte_order(exifData));
+ ExifOrientation ret;
+ switch (orientationValue) {
+ case ExifOrientation::ORIENTATION_0_DEGREES:
+ case ExifOrientation::ORIENTATION_90_DEGREES:
+ case ExifOrientation::ORIENTATION_180_DEGREES:
+ case ExifOrientation::ORIENTATION_270_DEGREES:
+ ret = static_cast<ExifOrientation> (orientationValue);
+ break;
+ default:
+ ALOGE("%s: Unexpected EXIF orientation value: %d, defaulting to 0 degrees",
+ __FUNCTION__, orientationValue);
+ ret = ExifOrientation::ORIENTATION_0_DEGREES;
+ }
+
+ exif_data_unref(exifData);
+
+ return ret;
+}
+
status_t encodeGrayscaleJpeg(size_t width, size_t height, uint8_t *in, void *out,
- const size_t maxOutSize, uint8_t jpegQuality, size_t &actualSize) {
+ const size_t maxOutSize, uint8_t jpegQuality, ExifOrientation exifOrientation,
+ size_t &actualSize) {
status_t ret;
// libjpeg is a C library so we use C-style "inheritance" by
// putting libjpeg's jpeg_destination_mgr first in our custom
@@ -151,6 +190,23 @@
jpeg_start_compress(&cinfo, TRUE);
+ if (exifOrientation != ExifOrientation::ORIENTATION_UNDEFINED) {
+ std::unique_ptr<ExifUtils> utils(ExifUtils::create());
+ utils->initializeEmpty();
+ utils->setImageWidth(width);
+ utils->setImageHeight(height);
+ utils->setOrientationValue(exifOrientation);
+
+ if (utils->generateApp1()) {
+ const uint8_t* exifBuffer = utils->getApp1Buffer();
+ size_t exifBufferSize = utils->getApp1Length();
+ jpeg_write_marker(&cinfo, JPEG_APP0 + 1, static_cast<const JOCTET*>(exifBuffer),
+ exifBufferSize);
+ } else {
+ ALOGE("%s: Unable to generate App1 buffer", __FUNCTION__);
+ }
+ }
+
for (size_t i = 0; i < cinfo.image_height; i++) {
auto currentRow = static_cast<JSAMPROW>(in + i*width);
jpeg_write_scanlines(&cinfo, ¤tRow, /*num_lines*/1);
@@ -168,8 +224,106 @@
return ret;
}
+inline void unpackDepth16(uint16_t value, std::vector<float> *points /*out*/,
+ std::vector<float> *confidence /*out*/, float *near /*out*/, float *far /*out*/) {
+ // Android densely packed depth map. The units for the range are in
+ // millimeters and need to be scaled to meters.
+ // The confidence value is encoded in the 3 most significant bits.
+ // The confidence data needs to be additionally normalized with
+ // values 1.0f, 0.0f representing maximum and minimum confidence
+ // respectively.
+ auto point = static_cast<float>(value & 0x1FFF) / 1000.f;
+ points->push_back(point);
+
+ auto conf = (value >> 13) & 0x7;
+ float normConfidence = (conf == 0) ? 1.f : (static_cast<float>(conf) - 1) / 7.f;
+ confidence->push_back(normConfidence);
+
+ if (*near > point) {
+ *near = point;
+ }
+ if (*far < point) {
+ *far = point;
+ }
+}
+
+// Trivial case, read forward from top,left corner.
+void rotate0AndUnpack(DepthPhotoInputFrame inputFrame, std::vector<float> *points /*out*/,
+ std::vector<float> *confidence /*out*/, float *near /*out*/, float *far /*out*/) {
+ for (size_t i = 0; i < inputFrame.mDepthMapHeight; i++) {
+ for (size_t j = 0; j < inputFrame.mDepthMapWidth; j++) {
+ unpackDepth16(inputFrame.mDepthMapBuffer[i*inputFrame.mDepthMapStride + j], points,
+ confidence, near, far);
+ }
+ }
+}
+
+// 90 degrees CW rotation can be applied by starting to read from bottom, left corner
+// transposing rows and columns.
+void rotate90AndUnpack(DepthPhotoInputFrame inputFrame, std::vector<float> *points /*out*/,
+ std::vector<float> *confidence /*out*/, float *near /*out*/, float *far /*out*/) {
+ for (size_t i = 0; i < inputFrame.mDepthMapWidth; i++) {
+ for (ssize_t j = inputFrame.mDepthMapHeight-1; j >= 0; j--) {
+ unpackDepth16(inputFrame.mDepthMapBuffer[j*inputFrame.mDepthMapStride + i], points,
+ confidence, near, far);
+ }
+ }
+}
+
+// 180 CW degrees rotation can be applied by starting to read backwards from bottom, right corner.
+void rotate180AndUnpack(DepthPhotoInputFrame inputFrame, std::vector<float> *points /*out*/,
+ std::vector<float> *confidence /*out*/, float *near /*out*/, float *far /*out*/) {
+ for (ssize_t i = inputFrame.mDepthMapHeight-1; i >= 0; i--) {
+ for (ssize_t j = inputFrame.mDepthMapWidth-1; j >= 0; j--) {
+ unpackDepth16(inputFrame.mDepthMapBuffer[i*inputFrame.mDepthMapStride + j], points,
+ confidence, near, far);
+ }
+ }
+}
+
+// 270 degrees CW rotation can be applied by starting to read from top, right corner
+// transposing rows and columns.
+void rotate270AndUnpack(DepthPhotoInputFrame inputFrame, std::vector<float> *points /*out*/,
+ std::vector<float> *confidence /*out*/, float *near /*out*/, float *far /*out*/) {
+ for (ssize_t i = inputFrame.mDepthMapWidth-1; i >= 0; i--) {
+ for (size_t j = 0; j < inputFrame.mDepthMapHeight; j++) {
+ unpackDepth16(inputFrame.mDepthMapBuffer[j*inputFrame.mDepthMapStride + i], points,
+ confidence, near, far);
+ }
+ }
+}
+
+bool rotateAndUnpack(DepthPhotoInputFrame inputFrame, std::vector<float> *points /*out*/,
+ std::vector<float> *confidence /*out*/, float *near /*out*/, float *far /*out*/) {
+ switch (inputFrame.mOrientation) {
+ case DepthPhotoOrientation::DEPTH_ORIENTATION_0_DEGREES:
+ rotate0AndUnpack(inputFrame, points, confidence, near, far);
+ return false;
+ case DepthPhotoOrientation::DEPTH_ORIENTATION_90_DEGREES:
+ rotate90AndUnpack(inputFrame, points, confidence, near, far);
+ return true;
+ case DepthPhotoOrientation::DEPTH_ORIENTATION_180_DEGREES:
+ rotate180AndUnpack(inputFrame, points, confidence, near, far);
+ return false;
+ case DepthPhotoOrientation::DEPTH_ORIENTATION_270_DEGREES:
+ rotate270AndUnpack(inputFrame, points, confidence, near, far);
+ return true;
+ default:
+ ALOGE("%s: Unsupported depth photo rotation: %d, default to 0", __FUNCTION__,
+ inputFrame.mOrientation);
+ rotate0AndUnpack(inputFrame, points, confidence, near, far);
+ }
+
+ return false;
+}
+
std::unique_ptr<dynamic_depth::DepthMap> processDepthMapFrame(DepthPhotoInputFrame inputFrame,
- std::vector<std::unique_ptr<Item>> *items /*out*/) {
+ ExifOrientation exifOrientation, std::vector<std::unique_ptr<Item>> *items /*out*/,
+ bool *switchDimensions /*out*/) {
+ if ((items == nullptr) || (switchDimensions == nullptr)) {
+ return nullptr;
+ }
+
std::vector<float> points, confidence;
size_t pointCount = inputFrame.mDepthMapWidth * inputFrame.mDepthMapHeight;
@@ -177,29 +331,21 @@
confidence.reserve(pointCount);
float near = UINT16_MAX;
float far = .0f;
- for (size_t i = 0; i < inputFrame.mDepthMapHeight; i++) {
- for (size_t j = 0; j < inputFrame.mDepthMapWidth; j++) {
- // Android densely packed depth map. The units for the range are in
- // millimeters and need to be scaled to meters.
- // The confidence value is encoded in the 3 most significant bits.
- // The confidence data needs to be additionally normalized with
- // values 1.0f, 0.0f representing maximum and minimum confidence
- // respectively.
- auto value = inputFrame.mDepthMapBuffer[i*inputFrame.mDepthMapStride + j];
- auto point = static_cast<float>(value & 0x1FFF) / 1000.f;
- points.push_back(point);
+ *switchDimensions = false;
+ // Physical rotation of depth and confidence maps may be needed in case
+ // the EXIF orientation is set to 0 degrees and the depth photo orientation
+ // (source color image) has some different value.
+ if (exifOrientation == ExifOrientation::ORIENTATION_0_DEGREES) {
+ *switchDimensions = rotateAndUnpack(inputFrame, &points, &confidence, &near, &far);
+ } else {
+ rotate0AndUnpack(inputFrame, &points, &confidence, &near, &far);
+ }
- auto conf = (value >> 13) & 0x7;
- float normConfidence = (conf == 0) ? 1.f : (static_cast<float>(conf) - 1) / 7.f;
- confidence.push_back(normConfidence);
-
- if (near > point) {
- near = point;
- }
- if (far < point) {
- far = point;
- }
- }
+ size_t width = inputFrame.mDepthMapWidth;
+ size_t height = inputFrame.mDepthMapHeight;
+ if (*switchDimensions) {
+ width = inputFrame.mDepthMapHeight;
+ height = inputFrame.mDepthMapWidth;
}
if (near == far) {
@@ -225,18 +371,18 @@
depthParams.depth_image_data.resize(inputFrame.mMaxJpegSize);
depthParams.confidence_data.resize(inputFrame.mMaxJpegSize);
size_t actualJpegSize;
- auto ret = encodeGrayscaleJpeg(inputFrame.mDepthMapWidth, inputFrame.mDepthMapHeight,
- pointsQuantized.data(), depthParams.depth_image_data.data(), inputFrame.mMaxJpegSize,
- inputFrame.mJpegQuality, actualJpegSize);
+ auto ret = encodeGrayscaleJpeg(width, height, pointsQuantized.data(),
+ depthParams.depth_image_data.data(), inputFrame.mMaxJpegSize,
+ inputFrame.mJpegQuality, exifOrientation, actualJpegSize);
if (ret != NO_ERROR) {
ALOGE("%s: Depth map compression failed!", __FUNCTION__);
return nullptr;
}
depthParams.depth_image_data.resize(actualJpegSize);
- ret = encodeGrayscaleJpeg(inputFrame.mDepthMapWidth, inputFrame.mDepthMapHeight,
- confidenceQuantized.data(), depthParams.confidence_data.data(), inputFrame.mMaxJpegSize,
- inputFrame.mJpegQuality, actualJpegSize);
+ ret = encodeGrayscaleJpeg(width, height, confidenceQuantized.data(),
+ depthParams.confidence_data.data(), inputFrame.mMaxJpegSize,
+ inputFrame.mJpegQuality, exifOrientation, actualJpegSize);
if (ret != NO_ERROR) {
ALOGE("%s: Confidence map compression failed!", __FUNCTION__);
return nullptr;
@@ -262,7 +408,12 @@
return BAD_VALUE;
}
- cameraParams->depth_map = processDepthMapFrame(inputFrame, &items);
+ ExifOrientation exifOrientation = getExifOrientation(
+ reinterpret_cast<const unsigned char*> (inputFrame.mMainJpegBuffer),
+ inputFrame.mMainJpegSize);
+ bool switchDimensions;
+ cameraParams->depth_map = processDepthMapFrame(inputFrame, exifOrientation, &items,
+ &switchDimensions);
if (cameraParams->depth_map == nullptr) {
ALOGE("%s: Depth map processing failed!", __FUNCTION__);
return BAD_VALUE;
@@ -274,7 +425,13 @@
// [focalLengthX, focalLengthY, opticalCenterX, opticalCenterY, skew]
const dynamic_depth::Point<double> focalLength(inputFrame.mInstrinsicCalibration[0],
inputFrame.mInstrinsicCalibration[1]);
- const Dimension imageSize(inputFrame.mMainJpegWidth, inputFrame.mMainJpegHeight);
+ size_t width = inputFrame.mMainJpegWidth;
+ size_t height = inputFrame.mMainJpegHeight;
+ if (switchDimensions) {
+ width = inputFrame.mMainJpegHeight;
+ height = inputFrame.mMainJpegWidth;
+ }
+ const Dimension imageSize(width, height);
ImagingModelParams imagingParams(focalLength, imageSize);
imagingParams.principal_point.x = inputFrame.mInstrinsicCalibration[2];
imagingParams.principal_point.y = inputFrame.mInstrinsicCalibration[3];
diff --git a/services/camera/libcameraservice/common/DepthPhotoProcessor.h b/services/camera/libcameraservice/common/DepthPhotoProcessor.h
index 19889a1..6a2fbff 100644
--- a/services/camera/libcameraservice/common/DepthPhotoProcessor.h
+++ b/services/camera/libcameraservice/common/DepthPhotoProcessor.h
@@ -23,19 +23,27 @@
namespace android {
namespace camera3 {
+enum DepthPhotoOrientation {
+ DEPTH_ORIENTATION_0_DEGREES = 0,
+ DEPTH_ORIENTATION_90_DEGREES = 90,
+ DEPTH_ORIENTATION_180_DEGREES = 180,
+ DEPTH_ORIENTATION_270_DEGREES = 270,
+};
+
struct DepthPhotoInputFrame {
- const char* mMainJpegBuffer;
- size_t mMainJpegSize;
- size_t mMainJpegWidth, mMainJpegHeight;
- uint16_t* mDepthMapBuffer;
- size_t mDepthMapWidth, mDepthMapHeight, mDepthMapStride;
- size_t mMaxJpegSize;
- uint8_t mJpegQuality;
- uint8_t mIsLogical;
- float mInstrinsicCalibration[5];
- uint8_t mIsInstrinsicCalibrationValid;
- float mLensDistortion[5];
- uint8_t mIsLensDistortionValid;
+ const char* mMainJpegBuffer;
+ size_t mMainJpegSize;
+ size_t mMainJpegWidth, mMainJpegHeight;
+ uint16_t* mDepthMapBuffer;
+ size_t mDepthMapWidth, mDepthMapHeight, mDepthMapStride;
+ size_t mMaxJpegSize;
+ uint8_t mJpegQuality;
+ uint8_t mIsLogical;
+ float mInstrinsicCalibration[5];
+ uint8_t mIsInstrinsicCalibrationValid;
+ float mLensDistortion[5];
+ uint8_t mIsLensDistortionValid;
+ DepthPhotoOrientation mOrientation;
DepthPhotoInputFrame() :
mMainJpegBuffer(nullptr),
@@ -52,7 +60,8 @@
mInstrinsicCalibration{0.f},
mIsInstrinsicCalibrationValid(0),
mLensDistortion{0.f},
- mIsLensDistortionValid(0) {}
+ mIsLensDistortionValid(0),
+ mOrientation(DepthPhotoOrientation::DEPTH_ORIENTATION_0_DEGREES) {}
};
static const char *kDepthPhotoLibrary = "libdepthphoto.so";
diff --git a/services/camera/libcameraservice/device3/Camera3Device.cpp b/services/camera/libcameraservice/device3/Camera3Device.cpp
index f9ef996..923d17a 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Device.cpp
@@ -2151,7 +2151,11 @@
// Pause to reconfigure
status_t Camera3Device::internalPauseAndWaitLocked(nsecs_t maxExpectedDuration) {
- mRequestThread->setPaused(true);
+ if (mRequestThread.get() != nullptr) {
+ mRequestThread->setPaused(true);
+ } else {
+ return NO_INIT;
+ }
ALOGV("%s: Camera %s: Internal wait until idle (% " PRIi64 " ns)", __FUNCTION__, mId.string(),
maxExpectedDuration);
@@ -4558,7 +4562,7 @@
return;
}
- auto err = mHidlSession_3_5->signalStreamFlush(streamIds, mNextStreamConfigCounter);
+ auto err = mHidlSession_3_5->signalStreamFlush(streamIds, mNextStreamConfigCounter - 1);
if (!err.isOk()) {
ALOGE("%s: Transaction error: %s", __FUNCTION__, err.description().c_str());
return;
@@ -5890,16 +5894,16 @@
if (mPaused == false) {
ALOGV("%s: RequestThread: Going idle", __FUNCTION__);
mPaused = true;
- // Let the tracker know
- sp<StatusTracker> statusTracker = mStatusTracker.promote();
- if (statusTracker != 0) {
- statusTracker->markComponentIdle(mStatusId, Fence::NO_FENCE);
- }
if (mNotifyPipelineDrain) {
mInterface->signalPipelineDrain(mStreamIdsToBeDrained);
mNotifyPipelineDrain = false;
mStreamIdsToBeDrained.clear();
}
+ // Let the tracker know
+ sp<StatusTracker> statusTracker = mStatusTracker.promote();
+ if (statusTracker != 0) {
+ statusTracker->markComponentIdle(mStatusId, Fence::NO_FENCE);
+ }
sp<Camera3Device> parent = mParent.promote();
if (parent != nullptr) {
parent->mRequestBufferSM.onRequestThreadPaused();
@@ -5983,16 +5987,16 @@
if (mPaused == false) {
mPaused = true;
ALOGV("%s: RequestThread: Paused", __FUNCTION__);
- // Let the tracker know
- sp<StatusTracker> statusTracker = mStatusTracker.promote();
- if (statusTracker != 0) {
- statusTracker->markComponentIdle(mStatusId, Fence::NO_FENCE);
- }
if (mNotifyPipelineDrain) {
mInterface->signalPipelineDrain(mStreamIdsToBeDrained);
mNotifyPipelineDrain = false;
mStreamIdsToBeDrained.clear();
}
+ // Let the tracker know
+ sp<StatusTracker> statusTracker = mStatusTracker.promote();
+ if (statusTracker != 0) {
+ statusTracker->markComponentIdle(mStatusId, Fence::NO_FENCE);
+ }
sp<Camera3Device> parent = mParent.promote();
if (parent != nullptr) {
parent->mRequestBufferSM.onRequestThreadPaused();
diff --git a/services/camera/libcameraservice/tests/Android.mk b/services/camera/libcameraservice/tests/Android.mk
index d777ca1..b4e7c32 100644
--- a/services/camera/libcameraservice/tests/Android.mk
+++ b/services/camera/libcameraservice/tests/Android.mk
@@ -27,6 +27,8 @@
libcamera_client \
libcamera_metadata \
libutils \
+ libjpeg \
+ libexif \
android.hardware.camera.common@1.0 \
android.hardware.camera.provider@2.4 \
android.hardware.camera.provider@2.5 \
@@ -36,6 +38,8 @@
LOCAL_C_INCLUDES += \
system/media/private/camera/include \
+ external/dynamic_depth/includes \
+ external/dynamic_depth/internal \
LOCAL_CFLAGS += -Wall -Wextra -Werror
diff --git a/services/camera/libcameraservice/tests/DepthProcessorTest.cpp b/services/camera/libcameraservice/tests/DepthProcessorTest.cpp
new file mode 100644
index 0000000..2162514
--- /dev/null
+++ b/services/camera/libcameraservice/tests/DepthProcessorTest.cpp
@@ -0,0 +1,382 @@
+/*
+ * Copyright (C) 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_NDEBUG 0
+#define LOG_TAG "DepthProcessorTest"
+
+#include <array>
+#include <random>
+
+#include <dlfcn.h>
+#include <gtest/gtest.h>
+
+#include "../common/DepthPhotoProcessor.h"
+#include "../utils/ExifUtils.h"
+#include "NV12Compressor.h"
+
+using namespace android;
+using namespace android::camera3;
+
+static const size_t kTestBufferWidth = 640;
+static const size_t kTestBufferHeight = 480;
+static const size_t kTestBufferNV12Size ((((kTestBufferWidth) * (kTestBufferHeight)) * 3) / 2);
+static const size_t kTestBufferDepthSize (kTestBufferWidth * kTestBufferHeight);
+static const size_t kSeed = 1234;
+
+void linkToDepthPhotoLibrary(void **libHandle /*out*/,
+ process_depth_photo_frame *processFrameFunc /*out*/) {
+ ASSERT_NE(libHandle, nullptr);
+ ASSERT_NE(processFrameFunc, nullptr);
+
+ *libHandle = dlopen(kDepthPhotoLibrary, RTLD_NOW | RTLD_LOCAL);
+ if (*libHandle != nullptr) {
+ *processFrameFunc = reinterpret_cast<camera3::process_depth_photo_frame> (
+ dlsym(*libHandle, kDepthPhotoProcessFunction));
+ ASSERT_NE(*processFrameFunc, nullptr);
+ }
+}
+
+void generateColorJpegBuffer(int jpegQuality, ExifOrientation orientationValue, bool includeExif,
+ bool switchDimensions, std::vector<uint8_t> *colorJpegBuffer /*out*/) {
+ ASSERT_NE(colorJpegBuffer, nullptr);
+
+ std::array<uint8_t, kTestBufferNV12Size> colorSourceBuffer;
+ std::default_random_engine gen(kSeed);
+ std::uniform_int_distribution<int> uniDist(0, UINT8_MAX - 1);
+ for (size_t i = 0; i < colorSourceBuffer.size(); i++) {
+ colorSourceBuffer[i] = uniDist(gen);
+ }
+
+ size_t width = kTestBufferWidth;
+ size_t height = kTestBufferHeight;
+ if (switchDimensions) {
+ width = kTestBufferHeight;
+ height = kTestBufferWidth;
+ }
+
+ NV12Compressor jpegCompressor;
+ if (includeExif) {
+ ASSERT_TRUE(jpegCompressor.compressWithExifOrientation(
+ reinterpret_cast<const unsigned char*> (colorSourceBuffer.data()), width, height,
+ jpegQuality, orientationValue));
+ } else {
+ ASSERT_TRUE(jpegCompressor.compress(
+ reinterpret_cast<const unsigned char*> (colorSourceBuffer.data()), width, height,
+ jpegQuality));
+ }
+
+ *colorJpegBuffer = std::move(jpegCompressor.getCompressedData());
+ ASSERT_FALSE(colorJpegBuffer->empty());
+}
+
+void generateDepth16Buffer(std::array<uint16_t, kTestBufferDepthSize> *depth16Buffer /*out*/) {
+ ASSERT_NE(depth16Buffer, nullptr);
+ std::default_random_engine gen(kSeed+1);
+ std::uniform_int_distribution<int> uniDist(0, UINT16_MAX - 1);
+ for (size_t i = 0; i < depth16Buffer->size(); i++) {
+ (*depth16Buffer)[i] = uniDist(gen);
+ }
+}
+
+TEST(DepthProcessorTest, LinkToLibray) {
+ void *libHandle;
+ process_depth_photo_frame processFunc;
+ linkToDepthPhotoLibrary(&libHandle, &processFunc);
+ if (libHandle != nullptr) {
+ dlclose(libHandle);
+ }
+}
+
+TEST(DepthProcessorTest, BadInput) {
+ void *libHandle;
+ int jpegQuality = 95;
+
+ process_depth_photo_frame processFunc;
+ linkToDepthPhotoLibrary(&libHandle, &processFunc);
+ if (libHandle == nullptr) {
+ // Depth library no present, nothing more to test.
+ return;
+ }
+
+ DepthPhotoInputFrame inputFrame;
+ // Worst case both depth and confidence maps have the same size as the main color image.
+ inputFrame.mMaxJpegSize = inputFrame.mMainJpegSize * 3;
+
+ std::vector<uint8_t> colorJpegBuffer;
+ generateColorJpegBuffer(jpegQuality, ExifOrientation::ORIENTATION_UNDEFINED,
+ /*includeExif*/ false, /*switchDimensions*/ false, &colorJpegBuffer);
+
+ std::array<uint16_t, kTestBufferDepthSize> depth16Buffer;
+ generateDepth16Buffer(&depth16Buffer);
+
+ std::vector<uint8_t> depthPhotoBuffer(inputFrame.mMaxJpegSize);
+ size_t actualDepthPhotoSize = 0;
+
+ inputFrame.mMainJpegWidth = kTestBufferWidth;
+ inputFrame.mMainJpegHeight = kTestBufferHeight;
+ inputFrame.mJpegQuality = jpegQuality;
+ ASSERT_NE(processFunc(inputFrame, depthPhotoBuffer.size(), depthPhotoBuffer.data(),
+ &actualDepthPhotoSize), 0);
+
+ inputFrame.mMainJpegBuffer = reinterpret_cast<const char*> (colorJpegBuffer.data());
+ inputFrame.mMainJpegSize = colorJpegBuffer.size();
+ ASSERT_NE(processFunc(inputFrame, depthPhotoBuffer.size(), depthPhotoBuffer.data(),
+ &actualDepthPhotoSize), 0);
+
+ inputFrame.mDepthMapBuffer = depth16Buffer.data();
+ inputFrame.mDepthMapWidth = inputFrame.mDepthMapStride = kTestBufferWidth;
+ inputFrame.mDepthMapHeight = kTestBufferHeight;
+ ASSERT_NE(processFunc(inputFrame, depthPhotoBuffer.size(), nullptr,
+ &actualDepthPhotoSize), 0);
+
+ ASSERT_NE(processFunc(inputFrame, depthPhotoBuffer.size(), depthPhotoBuffer.data(), nullptr),
+ 0);
+
+ dlclose(libHandle);
+}
+
+TEST(DepthProcessorTest, BasicDepthPhotoValidation) {
+ void *libHandle;
+ int jpegQuality = 95;
+
+ process_depth_photo_frame processFunc;
+ linkToDepthPhotoLibrary(&libHandle, &processFunc);
+ if (libHandle == nullptr) {
+ // Depth library no present, nothing more to test.
+ return;
+ }
+
+ std::vector<uint8_t> colorJpegBuffer;
+ generateColorJpegBuffer(jpegQuality, ExifOrientation::ORIENTATION_UNDEFINED,
+ /*includeExif*/ false, /*switchDimensions*/ false, &colorJpegBuffer);
+
+ std::array<uint16_t, kTestBufferDepthSize> depth16Buffer;
+ generateDepth16Buffer(&depth16Buffer);
+
+ DepthPhotoInputFrame inputFrame;
+ inputFrame.mMainJpegBuffer = reinterpret_cast<const char*> (colorJpegBuffer.data());
+ inputFrame.mMainJpegSize = colorJpegBuffer.size();
+ // Worst case both depth and confidence maps have the same size as the main color image.
+ inputFrame.mMaxJpegSize = inputFrame.mMainJpegSize * 3;
+ inputFrame.mMainJpegWidth = kTestBufferWidth;
+ inputFrame.mMainJpegHeight = kTestBufferHeight;
+ inputFrame.mJpegQuality = jpegQuality;
+ inputFrame.mDepthMapBuffer = depth16Buffer.data();
+ inputFrame.mDepthMapWidth = inputFrame.mDepthMapStride = kTestBufferWidth;
+ inputFrame.mDepthMapHeight = kTestBufferHeight;
+
+ std::vector<uint8_t> depthPhotoBuffer(inputFrame.mMaxJpegSize);
+ size_t actualDepthPhotoSize = 0;
+ ASSERT_EQ(processFunc(inputFrame, depthPhotoBuffer.size(), depthPhotoBuffer.data(),
+ &actualDepthPhotoSize), 0);
+ ASSERT_TRUE((actualDepthPhotoSize > 0) && (depthPhotoBuffer.size() >= actualDepthPhotoSize));
+
+ // The final depth photo must consist of three jpeg images:
+ // - the main color image
+ // - the depth map image
+ // - the confidence map image
+ size_t mainJpegSize = 0;
+ ASSERT_EQ(NV12Compressor::findJpegSize(depthPhotoBuffer.data(), actualDepthPhotoSize,
+ &mainJpegSize), OK);
+ ASSERT_TRUE((mainJpegSize > 0) && (mainJpegSize < actualDepthPhotoSize));
+ size_t depthMapSize = 0;
+ ASSERT_EQ(NV12Compressor::findJpegSize(depthPhotoBuffer.data() + mainJpegSize,
+ actualDepthPhotoSize - mainJpegSize, &depthMapSize), OK);
+ ASSERT_TRUE((depthMapSize > 0) && (depthMapSize < (actualDepthPhotoSize - mainJpegSize)));
+
+ dlclose(libHandle);
+}
+
+TEST(DepthProcessorTest, TestDepthPhotoExifOrientation) {
+ void *libHandle;
+ int jpegQuality = 95;
+
+ process_depth_photo_frame processFunc;
+ linkToDepthPhotoLibrary(&libHandle, &processFunc);
+ if (libHandle == nullptr) {
+ // Depth library no present, nothing more to test.
+ return;
+ }
+
+ ExifOrientation exifOrientations[] = { ExifOrientation::ORIENTATION_UNDEFINED,
+ ExifOrientation::ORIENTATION_0_DEGREES, ExifOrientation::ORIENTATION_90_DEGREES,
+ ExifOrientation::ORIENTATION_180_DEGREES, ExifOrientation::ORIENTATION_270_DEGREES };
+ for (auto exifOrientation : exifOrientations) {
+ std::vector<uint8_t> colorJpegBuffer;
+ generateColorJpegBuffer(jpegQuality, exifOrientation, /*includeExif*/ true,
+ /*switchDimensions*/ false, &colorJpegBuffer);
+ if (exifOrientation != ExifOrientation::ORIENTATION_UNDEFINED) {
+ auto jpegExifOrientation = ExifOrientation::ORIENTATION_UNDEFINED;
+ ASSERT_EQ(NV12Compressor::getExifOrientation(colorJpegBuffer.data(),
+ colorJpegBuffer.size(), &jpegExifOrientation), OK);
+ ASSERT_EQ(exifOrientation, jpegExifOrientation);
+ }
+
+ std::array<uint16_t, kTestBufferDepthSize> depth16Buffer;
+ generateDepth16Buffer(&depth16Buffer);
+
+ DepthPhotoInputFrame inputFrame;
+ inputFrame.mMainJpegBuffer = reinterpret_cast<const char*> (colorJpegBuffer.data());
+ inputFrame.mMainJpegSize = colorJpegBuffer.size();
+ // Worst case both depth and confidence maps have the same size as the main color image.
+ inputFrame.mMaxJpegSize = inputFrame.mMainJpegSize * 3;
+ inputFrame.mMainJpegWidth = kTestBufferWidth;
+ inputFrame.mMainJpegHeight = kTestBufferHeight;
+ inputFrame.mJpegQuality = jpegQuality;
+ inputFrame.mDepthMapBuffer = depth16Buffer.data();
+ inputFrame.mDepthMapWidth = inputFrame.mDepthMapStride = kTestBufferWidth;
+ inputFrame.mDepthMapHeight = kTestBufferHeight;
+
+ std::vector<uint8_t> depthPhotoBuffer(inputFrame.mMaxJpegSize);
+ size_t actualDepthPhotoSize = 0;
+ ASSERT_EQ(processFunc(inputFrame, depthPhotoBuffer.size(), depthPhotoBuffer.data(),
+ &actualDepthPhotoSize), 0);
+ ASSERT_TRUE((actualDepthPhotoSize > 0) &&
+ (depthPhotoBuffer.size() >= actualDepthPhotoSize));
+
+ size_t mainJpegSize = 0;
+ ASSERT_EQ(NV12Compressor::findJpegSize(depthPhotoBuffer.data(), actualDepthPhotoSize,
+ &mainJpegSize), OK);
+ ASSERT_TRUE((mainJpegSize > 0) && (mainJpegSize < actualDepthPhotoSize));
+ size_t depthMapSize = 0;
+ ASSERT_EQ(NV12Compressor::findJpegSize(depthPhotoBuffer.data() + mainJpegSize,
+ actualDepthPhotoSize - mainJpegSize, &depthMapSize), OK);
+ ASSERT_TRUE((depthMapSize > 0) && (depthMapSize < (actualDepthPhotoSize - mainJpegSize)));
+ size_t confidenceMapSize = actualDepthPhotoSize - (mainJpegSize + depthMapSize);
+
+ //Depth and confidence images must have the same EXIF orientation as the source
+ auto depthJpegExifOrientation = ExifOrientation::ORIENTATION_UNDEFINED;
+ ASSERT_EQ(NV12Compressor::getExifOrientation(depthPhotoBuffer.data() + mainJpegSize,
+ depthMapSize, &depthJpegExifOrientation), OK);
+ if (exifOrientation == ORIENTATION_UNDEFINED) {
+ // In case of undefined or missing EXIF orientation, always expect 0 degrees in the
+ // depth map.
+ ASSERT_EQ(depthJpegExifOrientation, ExifOrientation::ORIENTATION_0_DEGREES);
+ } else {
+ ASSERT_EQ(depthJpegExifOrientation, exifOrientation);
+ }
+
+ auto confidenceJpegExifOrientation = ExifOrientation::ORIENTATION_UNDEFINED;
+ ASSERT_EQ(NV12Compressor::getExifOrientation(
+ depthPhotoBuffer.data() + mainJpegSize + depthMapSize,
+ confidenceMapSize, &confidenceJpegExifOrientation), OK);
+ if (exifOrientation == ORIENTATION_UNDEFINED) {
+ // In case of undefined or missing EXIF orientation, always expect 0 degrees in the
+ // confidence map.
+ ASSERT_EQ(confidenceJpegExifOrientation, ExifOrientation::ORIENTATION_0_DEGREES);
+ } else {
+ ASSERT_EQ(confidenceJpegExifOrientation, exifOrientation);
+ }
+ }
+
+ dlclose(libHandle);
+}
+
+TEST(DepthProcessorTest, TestDephtPhotoPhysicalRotation) {
+ void *libHandle;
+ int jpegQuality = 95;
+
+ process_depth_photo_frame processFunc;
+ linkToDepthPhotoLibrary(&libHandle, &processFunc);
+ if (libHandle == nullptr) {
+ // Depth library no present, nothing more to test.
+ return;
+ }
+
+ // In case of physical rotation, the EXIF orientation must always be 0.
+ auto exifOrientation = ExifOrientation::ORIENTATION_0_DEGREES;
+ DepthPhotoOrientation depthOrientations[] = {
+ DepthPhotoOrientation::DEPTH_ORIENTATION_0_DEGREES,
+ DepthPhotoOrientation::DEPTH_ORIENTATION_90_DEGREES,
+ DepthPhotoOrientation::DEPTH_ORIENTATION_180_DEGREES,
+ DepthPhotoOrientation::DEPTH_ORIENTATION_270_DEGREES };
+ for (auto depthOrientation : depthOrientations) {
+ std::vector<uint8_t> colorJpegBuffer;
+ bool switchDimensions = false;
+ size_t expectedWidth = kTestBufferWidth;
+ size_t expectedHeight = kTestBufferHeight;
+ if ((depthOrientation == DepthPhotoOrientation::DEPTH_ORIENTATION_90_DEGREES) ||
+ (depthOrientation == DepthPhotoOrientation::DEPTH_ORIENTATION_270_DEGREES)) {
+ switchDimensions = true;
+ expectedWidth = kTestBufferHeight;
+ expectedHeight = kTestBufferWidth;
+ }
+ generateColorJpegBuffer(jpegQuality, exifOrientation, /*includeExif*/ true,
+ switchDimensions, &colorJpegBuffer);
+ auto jpegExifOrientation = ExifOrientation::ORIENTATION_UNDEFINED;
+ ASSERT_EQ(NV12Compressor::getExifOrientation(colorJpegBuffer.data(), colorJpegBuffer.size(),
+ &jpegExifOrientation), OK);
+ ASSERT_EQ(exifOrientation, jpegExifOrientation);
+
+ std::array<uint16_t, kTestBufferDepthSize> depth16Buffer;
+ generateDepth16Buffer(&depth16Buffer);
+
+ DepthPhotoInputFrame inputFrame;
+ inputFrame.mMainJpegBuffer = reinterpret_cast<const char*> (colorJpegBuffer.data());
+ inputFrame.mMainJpegSize = colorJpegBuffer.size();
+ // Worst case both depth and confidence maps have the same size as the main color image.
+ inputFrame.mMaxJpegSize = inputFrame.mMainJpegSize * 3;
+ inputFrame.mMainJpegWidth = kTestBufferWidth;
+ inputFrame.mMainJpegHeight = kTestBufferHeight;
+ inputFrame.mJpegQuality = jpegQuality;
+ inputFrame.mDepthMapBuffer = depth16Buffer.data();
+ inputFrame.mDepthMapWidth = inputFrame.mDepthMapStride = kTestBufferWidth;
+ inputFrame.mDepthMapHeight = kTestBufferHeight;
+ inputFrame.mOrientation = depthOrientation;
+
+ std::vector<uint8_t> depthPhotoBuffer(inputFrame.mMaxJpegSize);
+ size_t actualDepthPhotoSize = 0;
+ ASSERT_EQ(processFunc(inputFrame, depthPhotoBuffer.size(), depthPhotoBuffer.data(),
+ &actualDepthPhotoSize), 0);
+ ASSERT_TRUE((actualDepthPhotoSize > 0) &&
+ (depthPhotoBuffer.size() >= actualDepthPhotoSize));
+
+ size_t mainJpegSize = 0;
+ ASSERT_EQ(NV12Compressor::findJpegSize(depthPhotoBuffer.data(), actualDepthPhotoSize,
+ &mainJpegSize), OK);
+ ASSERT_TRUE((mainJpegSize > 0) && (mainJpegSize < actualDepthPhotoSize));
+ size_t depthMapSize = 0;
+ ASSERT_EQ(NV12Compressor::findJpegSize(depthPhotoBuffer.data() + mainJpegSize,
+ actualDepthPhotoSize - mainJpegSize, &depthMapSize), OK);
+ ASSERT_TRUE((depthMapSize > 0) && (depthMapSize < (actualDepthPhotoSize - mainJpegSize)));
+ size_t confidenceMapSize = actualDepthPhotoSize - (mainJpegSize + depthMapSize);
+
+ //Depth and confidence images must have the same EXIF orientation as the source
+ auto depthJpegExifOrientation = ExifOrientation::ORIENTATION_UNDEFINED;
+ ASSERT_EQ(NV12Compressor::getExifOrientation(depthPhotoBuffer.data() + mainJpegSize,
+ depthMapSize, &depthJpegExifOrientation), OK);
+ ASSERT_EQ(depthJpegExifOrientation, exifOrientation);
+ size_t depthMapWidth, depthMapHeight;
+ ASSERT_EQ(NV12Compressor::getJpegImageDimensions(depthPhotoBuffer.data() + mainJpegSize,
+ depthMapSize, &depthMapWidth, &depthMapHeight), OK);
+ ASSERT_EQ(depthMapWidth, expectedWidth);
+ ASSERT_EQ(depthMapHeight, expectedHeight);
+
+ auto confidenceJpegExifOrientation = ExifOrientation::ORIENTATION_UNDEFINED;
+ ASSERT_EQ(NV12Compressor::getExifOrientation(
+ depthPhotoBuffer.data() + mainJpegSize + depthMapSize, confidenceMapSize,
+ &confidenceJpegExifOrientation), OK);
+ ASSERT_EQ(confidenceJpegExifOrientation, exifOrientation);
+ size_t confidenceMapWidth, confidenceMapHeight;
+ ASSERT_EQ(NV12Compressor::getJpegImageDimensions(
+ depthPhotoBuffer.data() + mainJpegSize + depthMapSize, confidenceMapSize,
+ &confidenceMapWidth, &confidenceMapHeight), OK);
+ ASSERT_EQ(confidenceMapWidth, expectedWidth);
+ ASSERT_EQ(confidenceMapHeight, expectedHeight);
+ }
+
+ dlclose(libHandle);
+}
diff --git a/services/camera/libcameraservice/tests/NV12Compressor.cpp b/services/camera/libcameraservice/tests/NV12Compressor.cpp
new file mode 100644
index 0000000..0a41a1f
--- /dev/null
+++ b/services/camera/libcameraservice/tests/NV12Compressor.cpp
@@ -0,0 +1,379 @@
+/*
+* Copyright (C) 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_NDEBUG 0
+#define LOG_TAG "Test_NV12Compressor"
+
+#include "NV12Compressor.h"
+
+#include <libexif/exif-data.h>
+#include <netinet/in.h>
+
+using namespace android;
+using namespace android::camera3;
+
+namespace std {
+template <>
+struct default_delete<ExifEntry> {
+ inline void operator()(ExifEntry* entry) const { exif_entry_unref(entry); }
+};
+
+template <>
+struct default_delete<ExifData> {
+ inline void operator()(ExifData* data) const { exif_data_unref(data); }
+};
+
+} // namespace std
+
+bool NV12Compressor::compress(const unsigned char* data, int width, int height, int quality) {
+ if (!configureCompressor(width, height, quality)) {
+ // the method will have logged a more detailed error message than we can
+ // provide here so just return.
+ return false;
+ }
+
+ return compressData(data, /*exifData*/ nullptr);
+}
+
+bool NV12Compressor::compressWithExifOrientation(const unsigned char* data, int width, int height,
+ int quality, android::camera3::ExifOrientation exifValue) {
+ std::unique_ptr<ExifData> exifData(exif_data_new());
+ if (exifData.get() == nullptr) {
+ return false;
+ }
+
+ exif_data_set_option(exifData.get(), EXIF_DATA_OPTION_FOLLOW_SPECIFICATION);
+ exif_data_set_data_type(exifData.get(), EXIF_DATA_TYPE_COMPRESSED);
+ exif_data_set_byte_order(exifData.get(), EXIF_BYTE_ORDER_INTEL);
+ std::unique_ptr<ExifEntry> exifEntry(exif_entry_new());
+ if (exifEntry.get() == nullptr) {
+ return false;
+ }
+
+ exifEntry->tag = EXIF_TAG_ORIENTATION;
+ exif_content_add_entry(exifData->ifd[EXIF_IFD_0], exifEntry.get());
+ exif_entry_initialize(exifEntry.get(), exifEntry->tag);
+ exif_set_short(exifEntry->data, EXIF_BYTE_ORDER_INTEL, exifValue);
+
+ if (!configureCompressor(width, height, quality)) {
+ return false;
+ }
+
+ return compressData(data, exifData.get());
+}
+
+const std::vector<uint8_t>& NV12Compressor::getCompressedData() const {
+ return mDestManager.mBuffer;
+}
+
+bool NV12Compressor::configureCompressor(int width, int height, int quality) {
+ mCompressInfo.err = jpeg_std_error(&mErrorManager);
+ // NOTE! DANGER! Do not construct any non-trivial objects below setjmp!
+ // The compiler will not generate code to destroy them during the return
+ // below so they will leak. Additionally, do not place any calls to libjpeg
+ // that can fail above this line or any error will cause undefined behavior.
+ if (setjmp(mErrorManager.mJumpBuffer)) {
+ // This is where the error handler will jump in case setup fails
+ // The error manager will ALOG an appropriate error message
+ return false;
+ }
+
+ jpeg_create_compress(&mCompressInfo);
+
+ mCompressInfo.image_width = width;
+ mCompressInfo.image_height = height;
+ mCompressInfo.input_components = 3;
+ mCompressInfo.in_color_space = JCS_YCbCr;
+ jpeg_set_defaults(&mCompressInfo);
+
+ jpeg_set_quality(&mCompressInfo, quality, TRUE);
+ // It may seem weird to set color space here again but this will also set
+ // other fields. These fields might be overwritten by jpeg_set_defaults
+ jpeg_set_colorspace(&mCompressInfo, JCS_YCbCr);
+ mCompressInfo.raw_data_in = TRUE;
+ mCompressInfo.dct_method = JDCT_IFAST;
+ // Set sampling factors
+ mCompressInfo.comp_info[0].h_samp_factor = 2;
+ mCompressInfo.comp_info[0].v_samp_factor = 2;
+ mCompressInfo.comp_info[1].h_samp_factor = 1;
+ mCompressInfo.comp_info[1].v_samp_factor = 1;
+ mCompressInfo.comp_info[2].h_samp_factor = 1;
+ mCompressInfo.comp_info[2].v_samp_factor = 1;
+
+ mCompressInfo.dest = &mDestManager;
+
+ return true;
+}
+
+static void deinterleave(const uint8_t* vuPlanar, std::vector<uint8_t>& uRows,
+ std::vector<uint8_t>& vRows, int rowIndex, int width, int height, int stride) {
+ int numRows = (height - rowIndex) / 2;
+ if (numRows > 8) numRows = 8;
+ for (int row = 0; row < numRows; ++row) {
+ int offset = ((rowIndex >> 1) + row) * stride;
+ const uint8_t* vu = vuPlanar + offset;
+ for (int i = 0; i < (width >> 1); ++i) {
+ int index = row * (width >> 1) + i;
+ uRows[index] = vu[1];
+ vRows[index] = vu[0];
+ vu += 2;
+ }
+ }
+}
+
+bool NV12Compressor::compressData(const unsigned char* data, ExifData* exifData) {
+ const uint8_t* y[16];
+ const uint8_t* cb[8];
+ const uint8_t* cr[8];
+ const uint8_t** planes[3] = { y, cb, cr };
+
+ int i, offset;
+ int width = mCompressInfo.image_width;
+ int height = mCompressInfo.image_height;
+ const uint8_t* yPlanar = data;
+ const uint8_t* vuPlanar = data + (width * height);
+ std::vector<uint8_t> uRows(8 * (width >> 1));
+ std::vector<uint8_t> vRows(8 * (width >> 1));
+
+ // NOTE! DANGER! Do not construct any non-trivial objects below setjmp!
+ // The compiler will not generate code to destroy them during the return
+ // below so they will leak. Additionally, do not place any calls to libjpeg
+ // that can fail above this line or any error will cause undefined behavior.
+ if (setjmp(mErrorManager.mJumpBuffer)) {
+ // This is where the error handler will jump in case compression fails
+ // The error manager will ALOG an appropriate error message
+ return false;
+ }
+
+ jpeg_start_compress(&mCompressInfo, TRUE);
+
+ attachExifData(exifData);
+
+ // process 16 lines of Y and 8 lines of U/V each time.
+ while (mCompressInfo.next_scanline < mCompressInfo.image_height) {
+ //deinterleave u and v
+ deinterleave(vuPlanar, uRows, vRows, mCompressInfo.next_scanline,
+ width, height, width);
+
+ // Jpeg library ignores the rows whose indices are greater than height.
+ for (i = 0; i < 16; i++) {
+ // y row
+ y[i] = yPlanar + (mCompressInfo.next_scanline + i) * width;
+
+ // construct u row and v row
+ if ((i & 1) == 0) {
+ // height and width are both halved because of downsampling
+ offset = (i >> 1) * (width >> 1);
+ cb[i/2] = &uRows[offset];
+ cr[i/2] = &vRows[offset];
+ }
+ }
+ jpeg_write_raw_data(&mCompressInfo, const_cast<JSAMPIMAGE>(planes), 16);
+ }
+
+ jpeg_finish_compress(&mCompressInfo);
+ jpeg_destroy_compress(&mCompressInfo);
+
+ return true;
+}
+
+bool NV12Compressor::attachExifData(ExifData* exifData) {
+ if (exifData == nullptr) {
+ // This is not an error, we don't require EXIF data
+ return true;
+ }
+
+ // Save the EXIF data to memory
+ unsigned char* rawData = nullptr;
+ unsigned int size = 0;
+ exif_data_save_data(exifData, &rawData, &size);
+ if (rawData == nullptr) {
+ ALOGE("Failed to create EXIF data block");
+ return false;
+ }
+
+ jpeg_write_marker(&mCompressInfo, JPEG_APP0 + 1, rawData, size);
+ free(rawData);
+ return true;
+}
+
+NV12Compressor::ErrorManager::ErrorManager() {
+ error_exit = &onJpegError;
+}
+
+void NV12Compressor::ErrorManager::onJpegError(j_common_ptr cinfo) {
+ // NOTE! Do not construct any non-trivial objects in this method at the top
+ // scope. Their destructors will not be called. If you do need such an
+ // object create a local scope that does not include the longjmp call,
+ // that ensures the object is destroyed before longjmp is called.
+ ErrorManager* errorManager = reinterpret_cast<ErrorManager*>(cinfo->err);
+
+ // Format and log error message
+ char errorMessage[JMSG_LENGTH_MAX];
+ (*errorManager->format_message)(cinfo, errorMessage);
+ errorMessage[sizeof(errorMessage) - 1] = '\0';
+ ALOGE("JPEG compression error: %s", errorMessage);
+ jpeg_destroy(cinfo);
+
+ // And through the looking glass we go
+ longjmp(errorManager->mJumpBuffer, 1);
+}
+
+NV12Compressor::DestinationManager::DestinationManager() {
+ init_destination = &initDestination;
+ empty_output_buffer = &emptyOutputBuffer;
+ term_destination = &termDestination;
+}
+
+void NV12Compressor::DestinationManager::initDestination(j_compress_ptr cinfo) {
+ auto manager = reinterpret_cast<DestinationManager*>(cinfo->dest);
+
+ // Start out with some arbitrary but not too large buffer size
+ manager->mBuffer.resize(16 * 1024);
+ manager->next_output_byte = &manager->mBuffer[0];
+ manager->free_in_buffer = manager->mBuffer.size();
+}
+
+boolean NV12Compressor::DestinationManager::emptyOutputBuffer(
+ j_compress_ptr cinfo) {
+ auto manager = reinterpret_cast<DestinationManager*>(cinfo->dest);
+
+ // Keep doubling the size of the buffer for a very low, amortized
+ // performance cost of the allocations
+ size_t oldSize = manager->mBuffer.size();
+ manager->mBuffer.resize(oldSize * 2);
+ manager->next_output_byte = &manager->mBuffer[oldSize];
+ manager->free_in_buffer = manager->mBuffer.size() - oldSize;
+ return manager->free_in_buffer != 0;
+}
+
+void NV12Compressor::DestinationManager::termDestination(j_compress_ptr cinfo) {
+ auto manager = reinterpret_cast<DestinationManager*>(cinfo->dest);
+
+ // Resize down to the exact size of the output, that is remove as many
+ // bytes as there are left in the buffer
+ manager->mBuffer.resize(manager->mBuffer.size() - manager->free_in_buffer);
+}
+
+status_t NV12Compressor::findJpegSize(uint8_t *jpegBuffer, size_t maxSize, size_t *size /*out*/) {
+ if ((size == nullptr) || (jpegBuffer == nullptr)) {
+ return BAD_VALUE;
+ }
+
+ if (checkJpegStart(jpegBuffer) == 0) {
+ return BAD_VALUE;
+ }
+
+ // Read JFIF segment markers, skip over segment data
+ *size = kMarkerLength; //jump to Start Of Image
+ while (*size <= maxSize - kMarkerLength) {
+ segment_t *segment = (segment_t*)(jpegBuffer + *size);
+ uint8_t type = checkJpegMarker(segment->marker);
+ if (type == 0) { // invalid marker, no more segments, begin JPEG data
+ break;
+ }
+ if (type == kEndOfImage || *size > maxSize - sizeof(segment_t)) {
+ return BAD_VALUE;
+ }
+
+ size_t length = ntohs(segment->length);
+ *size += length + kMarkerLength;
+ }
+
+ // Find End of Image
+ // Scan JPEG buffer until End of Image
+ bool foundEnd = false;
+ for ( ; *size <= maxSize - kMarkerLength; (*size)++) {
+ if (checkJpegEnd(jpegBuffer + *size)) {
+ foundEnd = true;
+ *size += kMarkerLength;
+ break;
+ }
+ }
+
+ if (!foundEnd) {
+ return BAD_VALUE;
+ }
+
+ if (*size > maxSize) {
+ *size = maxSize;
+ }
+
+ return OK;
+}
+
+status_t NV12Compressor::getJpegImageDimensions(uint8_t *jpegBuffer,
+ size_t jpegBufferSize, size_t *width /*out*/, size_t *height /*out*/) {
+ if ((jpegBuffer == nullptr) || (width == nullptr) || (height == nullptr) ||
+ (jpegBufferSize == 0u)) {
+ return BAD_VALUE;
+ }
+
+ // Scan JPEG buffer until Start of Frame
+ bool foundSOF = false;
+ size_t currentPos;
+ for (currentPos = 0; currentPos <= jpegBufferSize - kMarkerLength; currentPos++) {
+ if (checkStartOfFrame(jpegBuffer + currentPos)) {
+ foundSOF = true;
+ currentPos += kMarkerLength;
+ break;
+ }
+ }
+
+ if (!foundSOF) {
+ ALOGE("%s: Start of Frame not found", __func__);
+ return BAD_VALUE;
+ }
+
+ sof_t *startOfFrame = reinterpret_cast<sof_t *> (jpegBuffer + currentPos);
+ *width = ntohs(startOfFrame->width);
+ *height = ntohs(startOfFrame->height);
+
+ return OK;
+}
+
+status_t NV12Compressor::getExifOrientation(uint8_t *jpegBuffer, size_t jpegBufferSize,
+ ExifOrientation *exifValue /*out*/) {
+ if ((jpegBuffer == nullptr) || (exifValue == nullptr) || (jpegBufferSize == 0u)) {
+ return BAD_VALUE;
+ }
+
+ std::unique_ptr<ExifData> exifData(exif_data_new());
+ exif_data_load_data(exifData.get(), jpegBuffer, jpegBufferSize);
+ ExifEntry *orientation = exif_content_get_entry(exifData->ifd[EXIF_IFD_0],
+ EXIF_TAG_ORIENTATION);
+ if ((orientation == nullptr) || (orientation->size != sizeof(ExifShort))) {
+ return BAD_VALUE;
+ }
+
+ auto orientationValue = exif_get_short(orientation->data,
+ exif_data_get_byte_order(exifData.get()));
+ status_t ret;
+ switch (orientationValue) {
+ case ExifOrientation::ORIENTATION_0_DEGREES:
+ case ExifOrientation::ORIENTATION_90_DEGREES:
+ case ExifOrientation::ORIENTATION_180_DEGREES:
+ case ExifOrientation::ORIENTATION_270_DEGREES:
+ *exifValue = static_cast<ExifOrientation> (orientationValue);
+ ret = OK;
+ break;
+ default:
+ ALOGE("%s: Unexpected EXIF orientation value: %u", __FUNCTION__, orientationValue);
+ ret = BAD_VALUE;
+ }
+
+ return ret;
+}
diff --git a/services/camera/libcameraservice/tests/NV12Compressor.h b/services/camera/libcameraservice/tests/NV12Compressor.h
new file mode 100644
index 0000000..ee22d5e
--- /dev/null
+++ b/services/camera/libcameraservice/tests/NV12Compressor.h
@@ -0,0 +1,134 @@
+/*
+ * Copyright (C) 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef TEST_CAMERA_JPEG_STUB_NV12_COMPRESSOR_H
+#define TEST_CAMERA_JPEG_STUB_NV12_COMPRESSOR_H
+
+#include <setjmp.h>
+#include <stdlib.h>
+extern "C" {
+#include <jpeglib.h>
+#include <jerror.h>
+}
+
+#include <utils/Errors.h>
+#include <vector>
+
+#include "../utils/ExifUtils.h"
+
+struct _ExifData;
+typedef _ExifData ExifData;
+
+class NV12Compressor {
+public:
+ NV12Compressor() {}
+
+ /* Compress |data| which represents raw NV21 encoded data of dimensions
+ * |width| * |height|.
+ */
+ bool compress(const unsigned char* data, int width, int height, int quality);
+ bool compressWithExifOrientation(const unsigned char* data, int width, int height, int quality,
+ android::camera3::ExifOrientation exifValue);
+
+ /* Get a reference to the compressed data, this will return an empty vector
+ * if compress has not been called yet
+ */
+ const std::vector<unsigned char>& getCompressedData() const;
+
+ // Utility methods
+ static android::status_t findJpegSize(uint8_t *jpegBuffer, size_t maxSize,
+ size_t *size /*out*/);
+
+ static android::status_t getExifOrientation(uint8_t *jpegBuffer,
+ size_t jpegBufferSize, android::camera3::ExifOrientation *exifValue /*out*/);
+
+ /* Get Jpeg image dimensions from the first Start Of Frame. Please note that due to the
+ * way the jpeg buffer is scanned if the image contains a thumbnail, then the size returned
+ * will be of the thumbnail and not the main image.
+ */
+ static android::status_t getJpegImageDimensions(uint8_t *jpegBuffer, size_t jpegBufferSize,
+ size_t *width /*out*/, size_t *height /*out*/);
+
+private:
+
+ struct DestinationManager : jpeg_destination_mgr {
+ DestinationManager();
+
+ static void initDestination(j_compress_ptr cinfo);
+ static boolean emptyOutputBuffer(j_compress_ptr cinfo);
+ static void termDestination(j_compress_ptr cinfo);
+
+ std::vector<unsigned char> mBuffer;
+ };
+
+ struct ErrorManager : jpeg_error_mgr {
+ ErrorManager();
+
+ static void onJpegError(j_common_ptr cinfo);
+
+ jmp_buf mJumpBuffer;
+ };
+
+ static const size_t kMarkerLength = 2; // length of a marker
+ static const uint8_t kMarker = 0xFF; // First byte of marker
+ static const uint8_t kStartOfImage = 0xD8; // Start of Image
+ static const uint8_t kEndOfImage = 0xD9; // End of Image
+ static const uint8_t kStartOfFrame = 0xC0; // Start of Frame
+
+ struct __attribute__((packed)) segment_t {
+ uint8_t marker[kMarkerLength];
+ uint16_t length;
+ };
+
+ struct __attribute__((packed)) sof_t {
+ uint16_t length;
+ uint8_t precision;
+ uint16_t height;
+ uint16_t width;
+ };
+
+ // check for start of image marker
+ static bool checkStartOfFrame(uint8_t* buf) {
+ return buf[0] == kMarker && buf[1] == kStartOfFrame;
+ }
+
+ // check for start of image marker
+ static bool checkJpegStart(uint8_t* buf) {
+ return buf[0] == kMarker && buf[1] == kStartOfImage;
+ }
+
+ // check for End of Image marker
+ static bool checkJpegEnd(uint8_t *buf) {
+ return buf[0] == kMarker && buf[1] == kEndOfImage;
+ }
+
+ // check for arbitrary marker, returns marker type (second byte)
+ // returns 0 if no marker found. Note: 0x00 is not a valid marker type
+ static uint8_t checkJpegMarker(uint8_t *buf) {
+ return (buf[0] == kMarker) ? buf[1] : 0;
+ }
+
+ jpeg_compress_struct mCompressInfo;
+ DestinationManager mDestManager;
+ ErrorManager mErrorManager;
+
+ bool configureCompressor(int width, int height, int quality);
+ bool compressData(const unsigned char* data, ExifData* exifData);
+ bool attachExifData(ExifData* exifData);
+};
+
+#endif // TEST_CAMERA_JPEG_STUB_NV12_COMPRESSOR_H
+
diff --git a/services/camera/libcameraservice/utils/ExifUtils.cpp b/services/camera/libcameraservice/utils/ExifUtils.cpp
index a4027cc..c0afdc1 100644
--- a/services/camera/libcameraservice/utils/ExifUtils.cpp
+++ b/services/camera/libcameraservice/utils/ExifUtils.cpp
@@ -55,82 +55,63 @@
// Initialize() can be called multiple times. The setting of Exif tags will be
// cleared.
virtual bool initialize(const unsigned char *app1Segment, size_t app1SegmentSize);
+ virtual bool initializeEmpty();
// set all known fields from a metadata structure
virtual bool setFromMetadata(const CameraMetadata& metadata,
+ const CameraMetadata& staticInfo,
const size_t imageWidth,
const size_t imageHeight);
// sets the len aperture.
// Returns false if memory allocation fails.
- virtual bool setAperture(uint32_t numerator, uint32_t denominator);
-
- // sets the value of brightness.
- // Returns false if memory allocation fails.
- virtual bool setBrightness(int32_t numerator, int32_t denominator);
+ virtual bool setAperture(float aperture);
// sets the color space.
// Returns false if memory allocation fails.
virtual bool setColorSpace(uint16_t color_space);
- // sets the information to compressed data.
- // Returns false if memory allocation fails.
- virtual bool setComponentsConfiguration(const std::string& components_configuration);
-
- // sets the compression scheme used for the image data.
- // Returns false if memory allocation fails.
- virtual bool setCompression(uint16_t compression);
-
- // sets image contrast.
- // Returns false if memory allocation fails.
- virtual bool setContrast(uint16_t contrast);
-
// sets the date and time of image last modified. It takes local time. The
// name of the tag is DateTime in IFD0.
// Returns false if memory allocation fails.
virtual bool setDateTime(const struct tm& t);
- // sets the image description.
- // Returns false if memory allocation fails.
- virtual bool setDescription(const std::string& description);
-
// sets the digital zoom ratio. If the numerator is 0, it means digital zoom
// was not used.
// Returns false if memory allocation fails.
- virtual bool setDigitalZoomRatio(uint32_t numerator, uint32_t denominator);
+ virtual bool setDigitalZoomRatio(
+ uint32_t crop_width, uint32_t crop_height,
+ uint32_t sensor_width, uint32_t sensor_height);
- // sets the exposure bias.
+ // Sets the exposure bias.
// Returns false if memory allocation fails.
- virtual bool setExposureBias(int32_t numerator, int32_t denominator);
+ virtual bool setExposureBias(int32_t ev,
+ uint32_t ev_step_numerator, uint32_t ev_step_denominator);
// sets the exposure mode set when the image was shot.
// Returns false if memory allocation fails.
- virtual bool setExposureMode(uint16_t exposure_mode);
-
- // sets the program used by the camera to set exposure when the picture is
- // taken.
- // Returns false if memory allocation fails.
- virtual bool setExposureProgram(uint16_t exposure_program);
+ virtual bool setExposureMode(uint8_t exposure_mode);
// sets the exposure time, given in seconds.
// Returns false if memory allocation fails.
- virtual bool setExposureTime(uint32_t numerator, uint32_t denominator);
+ virtual bool setExposureTime(float exposure_time);
// sets the status of flash.
// Returns false if memory allocation fails.
- virtual bool setFlash(uint16_t flash);
+ virtual bool setFlash(uint8_t flash_available, uint8_t flash_state, uint8_t ae_mode);
// sets the F number.
// Returns false if memory allocation fails.
- virtual bool setFNumber(uint32_t numerator, uint32_t denominator);
+ virtual bool setFNumber(float f_number);
// sets the focal length of lens used to take the image in millimeters.
// Returns false if memory allocation fails.
- virtual bool setFocalLength(uint32_t numerator, uint32_t denominator);
+ virtual bool setFocalLength(float focal_length);
- // sets the degree of overall image gain adjustment.
+ // sets the focal length of lens for 35mm film used to take the image in millimeters.
// Returns false if memory allocation fails.
- virtual bool setGainControl(uint16_t gain_control);
+ virtual bool setFocalLengthIn35mmFilm(float focal_length,
+ float sensor_size_x, float sensor_size_y);
// sets the altitude in meters.
// Returns false if memory allocation fails.
@@ -164,45 +145,25 @@
// Returns false if memory allocation fails.
virtual bool setIsoSpeedRating(uint16_t iso_speed_ratings);
- // sets the kind of light source.
- // Returns false if memory allocation fails.
- virtual bool setLightSource(uint16_t light_source);
-
// sets the smallest F number of the lens.
// Returns false if memory allocation fails.
- virtual bool setMaxAperture(uint32_t numerator, uint32_t denominator);
-
- // sets the metering mode.
- // Returns false if memory allocation fails.
- virtual bool setMeteringMode(uint16_t metering_mode);
+ virtual bool setMaxAperture(float aperture);
// sets image orientation.
// Returns false if memory allocation fails.
- virtual bool setOrientation(uint16_t orientation);
+ virtual bool setOrientation(uint16_t degrees);
- // sets the unit for measuring XResolution and YResolution.
+ // sets image orientation.
// Returns false if memory allocation fails.
- virtual bool setResolutionUnit(uint16_t resolution_unit);
-
- // sets image saturation.
- // Returns false if memory allocation fails.
- virtual bool setSaturation(uint16_t saturation);
-
- // sets the type of scene that was shot.
- // Returns false if memory allocation fails.
- virtual bool setSceneCaptureType(uint16_t type);
-
- // sets image sharpness.
- // Returns false if memory allocation fails.
- virtual bool setSharpness(uint16_t sharpness);
+ virtual bool setOrientationValue(ExifOrientation orientationValue);
// sets the shutter speed.
// Returns false if memory allocation fails.
- virtual bool setShutterSpeed(int32_t numerator, int32_t denominator);
+ virtual bool setShutterSpeed(float exposure_time);
// sets the distance to the subject, given in meters.
// Returns false if memory allocation fails.
- virtual bool setSubjectDistance(uint32_t numerator, uint32_t denominator);
+ virtual bool setSubjectDistance(float diopters);
// sets the fractions of seconds for the <DateTime> tag.
// Returns false if memory allocation fails.
@@ -210,28 +171,7 @@
// sets the white balance mode set when the image was shot.
// Returns false if memory allocation fails.
- virtual bool setWhiteBalance(uint16_t white_balance);
-
- // sets the number of pixels per resolution unit in the image width.
- // Returns false if memory allocation fails.
- virtual bool setXResolution(uint32_t numerator, uint32_t denominator);
-
- // sets the position of chrominance components in relation to the luminance
- // component.
- // Returns false if memory allocation fails.
- virtual bool setYCbCrPositioning(uint16_t ycbcr_positioning);
-
- // sets the number of pixels per resolution unit in the image length.
- // Returns false if memory allocation fails.
- virtual bool setYResolution(uint32_t numerator, uint32_t denominator);
-
- // sets the manufacturer of camera.
- // Returns false if memory allocation fails.
- virtual bool setMake(const std::string& make);
-
- // sets the model number of camera.
- // Returns false if memory allocation fails.
- virtual bool setModel(const std::string& model);
+ virtual bool setWhiteBalance(uint8_t white_balance);
// Generates APP1 segment.
// Returns false if generating APP1 segment fails.
@@ -280,6 +220,10 @@
virtual bool setString(ExifIfd ifd, ExifTag tag, ExifFormat format,
const std::string& buffer, const std::string& msg);
+ float convertToApex(float val) {
+ return 2.0f * log2f(val);
+ }
+
// Destroys the buffer of APP1 segment if exists.
virtual void destroyApp1();
@@ -291,6 +235,8 @@
// The length of |app1_buffer_|.
unsigned int app1_length_;
+ // How precise the float-to-rational conversion for EXIF tags would be.
+ const static int kRationalPrecision = 10000;
};
#define SET_SHORT(ifd, tag, value) \
@@ -373,13 +319,31 @@
return true;
}
-bool ExifUtilsImpl::setAperture(uint32_t numerator, uint32_t denominator) {
- SET_RATIONAL(EXIF_IFD_EXIF, EXIF_TAG_APERTURE_VALUE, numerator, denominator);
+bool ExifUtilsImpl::initializeEmpty() {
+ reset();
+ exif_data_ = exif_data_new();
+ if (exif_data_ == nullptr) {
+ ALOGE("%s: allocate memory for exif_data_ failed", __FUNCTION__);
+ return false;
+ }
+ // set the image options.
+ exif_data_set_option(exif_data_, EXIF_DATA_OPTION_FOLLOW_SPECIFICATION);
+ exif_data_set_data_type(exif_data_, EXIF_DATA_TYPE_COMPRESSED);
+ exif_data_set_byte_order(exif_data_, EXIF_BYTE_ORDER_INTEL);
+
+ // set exif version to 2.2.
+ if (!setExifVersion("0220")) {
+ return false;
+ }
+
return true;
}
-bool ExifUtilsImpl::setBrightness(int32_t numerator, int32_t denominator) {
- SET_SRATIONAL(EXIF_IFD_EXIF, EXIF_TAG_BRIGHTNESS_VALUE, numerator, denominator);
+bool ExifUtilsImpl::setAperture(float aperture) {
+ float apexValue = convertToApex(aperture);
+ SET_RATIONAL(EXIF_IFD_EXIF, EXIF_TAG_APERTURE_VALUE,
+ static_cast<uint32_t>(std::round(apexValue * kRationalPrecision)),
+ kRationalPrecision);
return true;
}
@@ -388,23 +352,6 @@
return true;
}
-bool ExifUtilsImpl::setComponentsConfiguration(
- const std::string& components_configuration) {
- SET_STRING(EXIF_IFD_EXIF, EXIF_TAG_COMPONENTS_CONFIGURATION,
- EXIF_FORMAT_UNDEFINED, components_configuration);
- return true;
-}
-
-bool ExifUtilsImpl::setCompression(uint16_t compression) {
- SET_SHORT(EXIF_IFD_0, EXIF_TAG_COMPRESSION, compression);
- return true;
-}
-
-bool ExifUtilsImpl::setContrast(uint16_t contrast) {
- SET_SHORT(EXIF_IFD_EXIF, EXIF_TAG_CONTRAST, contrast);
- return true;
-}
-
bool ExifUtilsImpl::setDateTime(const struct tm& t) {
// The length is 20 bytes including NULL for termination in Exif standard.
char str[20];
@@ -421,53 +368,111 @@
return true;
}
-bool ExifUtilsImpl::setDescription(const std::string& description) {
- SET_STRING(EXIF_IFD_0, EXIF_TAG_IMAGE_DESCRIPTION, EXIF_FORMAT_ASCII, description);
+bool ExifUtilsImpl::setDigitalZoomRatio(
+ uint32_t crop_width, uint32_t crop_height,
+ uint32_t sensor_width, uint32_t sensor_height) {
+ float zoomRatioX = (crop_width == 0) ? 1.0 : 1.0 * sensor_width / crop_width;
+ float zoomRatioY = (crop_height == 0) ? 1.0 : 1.0 * sensor_height / crop_height;
+ float zoomRatio = std::max(zoomRatioX, zoomRatioY);
+ const static float noZoomThreshold = 1.02f;
+
+ if (zoomRatio <= noZoomThreshold) {
+ SET_RATIONAL(EXIF_IFD_EXIF, EXIF_TAG_DIGITAL_ZOOM_RATIO, 0, 1);
+ } else {
+ SET_RATIONAL(EXIF_IFD_EXIF, EXIF_TAG_DIGITAL_ZOOM_RATIO,
+ static_cast<uint32_t>(std::round(zoomRatio * kRationalPrecision)),
+ kRationalPrecision);
+ }
return true;
}
-bool ExifUtilsImpl::setDigitalZoomRatio(uint32_t numerator, uint32_t denominator) {
- SET_RATIONAL(EXIF_IFD_EXIF, EXIF_TAG_DIGITAL_ZOOM_RATIO, numerator, denominator);
+bool ExifUtilsImpl::setExposureMode(uint8_t exposure_mode) {
+ uint16_t exposureMode = (exposure_mode == ANDROID_CONTROL_AE_MODE_OFF) ? 1 : 0;
+ SET_SHORT(EXIF_IFD_EXIF, EXIF_TAG_EXPOSURE_MODE, exposureMode);
return true;
}
-bool ExifUtilsImpl::setExposureBias(int32_t numerator, int32_t denominator) {
- SET_SRATIONAL(EXIF_IFD_EXIF, EXIF_TAG_EXPOSURE_BIAS_VALUE, numerator, denominator);
+bool ExifUtilsImpl::setExposureTime(float exposure_time) {
+ SET_RATIONAL(EXIF_IFD_EXIF, EXIF_TAG_EXPOSURE_TIME,
+ static_cast<uint32_t>(std::round(exposure_time * kRationalPrecision)),
+ kRationalPrecision);
return true;
}
-bool ExifUtilsImpl::setExposureMode(uint16_t exposure_mode) {
- SET_SHORT(EXIF_IFD_EXIF, EXIF_TAG_EXPOSURE_MODE, exposure_mode);
- return true;
-}
+bool ExifUtilsImpl::setFlash(uint8_t flash_available, uint8_t flash_state, uint8_t ae_mode) {
+ // EXIF_TAG_FLASH bits layout per EXIF standard:
+ // Bit 0: 0 - did not fire
+ // 1 - fired
+ // Bit 1-2: status of return light
+ // Bit 3-4: 0 - unknown
+ // 1 - compulsory flash firing
+ // 2 - compulsory flash suppression
+ // 3 - auto mode
+ // Bit 5: 0 - flash function present
+ // 1 - no flash function
+ // Bit 6: 0 - no red-eye reduction mode or unknown
+ // 1 - red-eye reduction supported
+ uint16_t flash = 0x20;
-bool ExifUtilsImpl::setExposureProgram(uint16_t exposure_program) {
- SET_SHORT(EXIF_IFD_EXIF, EXIF_TAG_EXPOSURE_PROGRAM, exposure_program);
- return true;
-}
+ if (flash_available == ANDROID_FLASH_INFO_AVAILABLE_TRUE) {
+ flash = 0x00;
-bool ExifUtilsImpl::setExposureTime(uint32_t numerator, uint32_t denominator) {
- SET_RATIONAL(EXIF_IFD_EXIF, EXIF_TAG_EXPOSURE_TIME, numerator, denominator);
- return true;
-}
+ if (flash_state == ANDROID_FLASH_STATE_FIRED) {
+ flash |= 0x1;
+ }
+ if (ae_mode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
+ flash |= 0x40;
+ }
-bool ExifUtilsImpl::setFlash(uint16_t flash) {
+ uint16_t flashMode = 0;
+ switch (ae_mode) {
+ case ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH:
+ case ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE:
+ flashMode = 3; // AUTO
+ break;
+ case ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH:
+ case ANDROID_CONTROL_AE_MODE_ON_EXTERNAL_FLASH:
+ flashMode = 1; // ON
+ break;
+ case ANDROID_CONTROL_AE_MODE_OFF:
+ case ANDROID_CONTROL_AE_MODE_ON:
+ flashMode = 2; // OFF
+ break;
+ default:
+ flashMode = 0; // UNKNOWN
+ break;
+ }
+ flash |= (flashMode << 3);
+ }
SET_SHORT(EXIF_IFD_EXIF, EXIF_TAG_FLASH, flash);
return true;
}
-bool ExifUtilsImpl::setFNumber(uint32_t numerator, uint32_t denominator) {
- SET_RATIONAL(EXIF_IFD_EXIF, EXIF_TAG_FNUMBER, numerator, denominator);
+bool ExifUtilsImpl::setFNumber(float f_number) {
+ SET_RATIONAL(EXIF_IFD_EXIF, EXIF_TAG_FNUMBER,
+ static_cast<uint32_t>(std::round(f_number * kRationalPrecision)),
+ kRationalPrecision);
return true;
}
-bool ExifUtilsImpl::setFocalLength(uint32_t numerator, uint32_t denominator) {
- SET_RATIONAL(EXIF_IFD_EXIF, EXIF_TAG_FOCAL_LENGTH, numerator, denominator);
+bool ExifUtilsImpl::setFocalLength(float focal_length) {
+ uint32_t numerator = static_cast<uint32_t>(std::round(focal_length * kRationalPrecision));
+ SET_RATIONAL(EXIF_IFD_EXIF, EXIF_TAG_FOCAL_LENGTH, numerator, kRationalPrecision);
return true;
}
-bool ExifUtilsImpl::setGainControl(uint16_t gain_control) {
- SET_SHORT(EXIF_IFD_EXIF, EXIF_TAG_GAIN_CONTROL, gain_control);
+bool ExifUtilsImpl::setFocalLengthIn35mmFilm(
+ float focal_length, float sensor_size_x, float sensor_size_y) {
+ static const float filmDiagonal = 43.27; // diagonal of 35mm film
+ static const float minSensorDiagonal = 0.01;
+ float sensorDiagonal = std::sqrt(
+ sensor_size_x * sensor_size_x + sensor_size_y * sensor_size_y);
+ sensorDiagonal = std::max(sensorDiagonal, minSensorDiagonal);
+ float focalLength35mmFilm = std::round(focal_length * filmDiagonal / sensorDiagonal);
+ focalLength35mmFilm = std::min(1.0f * 65535, focalLength35mmFilm);
+
+ SET_SHORT(EXIF_IFD_EXIF, EXIF_TAG_FOCAL_LENGTH_IN_35MM_FILM,
+ static_cast<uint16_t>(focalLength35mmFilm));
return true;
}
@@ -614,77 +619,74 @@
return true;
}
-bool ExifUtilsImpl::setLightSource(uint16_t light_source) {
- SET_SHORT(EXIF_IFD_EXIF, EXIF_TAG_LIGHT_SOURCE, light_source);
+bool ExifUtilsImpl::setMaxAperture(float aperture) {
+ float maxAperture = convertToApex(aperture);
+ SET_RATIONAL(EXIF_IFD_EXIF, EXIF_TAG_MAX_APERTURE_VALUE,
+ static_cast<uint32_t>(std::round(maxAperture * kRationalPrecision)),
+ kRationalPrecision);
return true;
}
-bool ExifUtilsImpl::setMaxAperture(uint32_t numerator, uint32_t denominator) {
- SET_RATIONAL(EXIF_IFD_EXIF, EXIF_TAG_MAX_APERTURE_VALUE, numerator, denominator);
+bool ExifUtilsImpl::setExposureBias(int32_t ev,
+ uint32_t ev_step_numerator, uint32_t ev_step_denominator) {
+ SET_RATIONAL(EXIF_IFD_EXIF, EXIF_TAG_EXPOSURE_BIAS_VALUE,
+ ev * ev_step_numerator, ev_step_denominator);
return true;
}
-bool ExifUtilsImpl::setMeteringMode(uint16_t metering_mode) {
- SET_SHORT(EXIF_IFD_EXIF, EXIF_TAG_METERING_MODE, metering_mode);
- return true;
-}
-
-bool ExifUtilsImpl::setOrientation(uint16_t orientation) {
- /*
- * Orientation value:
- * 1 2 3 4 5 6 7 8
- *
- * 888888 888888 88 88 8888888888 88 88 8888888888
- * 88 88 88 88 88 88 88 88 88 88 88 88
- * 8888 8888 8888 8888 88 8888888888 8888888888 88
- * 88 88 88 88
- * 88 88 888888 888888
- */
- int value = 1;
- switch (orientation) {
+bool ExifUtilsImpl::setOrientation(uint16_t degrees) {
+ ExifOrientation value = ExifOrientation::ORIENTATION_0_DEGREES;
+ switch (degrees) {
case 90:
- value = 6;
+ value = ExifOrientation::ORIENTATION_90_DEGREES;
break;
case 180:
- value = 3;
+ value = ExifOrientation::ORIENTATION_180_DEGREES;
break;
case 270:
- value = 8;
+ value = ExifOrientation::ORIENTATION_270_DEGREES;
break;
default:
break;
}
- SET_SHORT(EXIF_IFD_0, EXIF_TAG_ORIENTATION, value);
+ return setOrientationValue(value);
+}
+
+bool ExifUtilsImpl::setOrientationValue(ExifOrientation orientationValue) {
+ SET_SHORT(EXIF_IFD_0, EXIF_TAG_ORIENTATION, orientationValue);
return true;
}
-bool ExifUtilsImpl::setResolutionUnit(uint16_t resolution_unit) {
- SET_SHORT(EXIF_IFD_EXIF, EXIF_TAG_RESOLUTION_UNIT, resolution_unit);
+bool ExifUtilsImpl::setShutterSpeed(float exposure_time) {
+ float shutterSpeed = -log2f(exposure_time);
+ SET_SRATIONAL(EXIF_IFD_EXIF, EXIF_TAG_SHUTTER_SPEED_VALUE,
+ static_cast<uint32_t>(shutterSpeed * kRationalPrecision), kRationalPrecision);
return true;
}
-bool ExifUtilsImpl::setSaturation(uint16_t saturation) {
- SET_SHORT(EXIF_IFD_EXIF, EXIF_TAG_SATURATION, saturation);
- return true;
-}
+bool ExifUtilsImpl::setSubjectDistance(float diopters) {
+ const static float kInfinityDiopters = 1.0e-6;
+ uint32_t numerator, denominator;
+ uint16_t distanceRange;
+ if (diopters > kInfinityDiopters) {
+ float focusDistance = 1.0f / diopters;
+ numerator = static_cast<uint32_t>(std::round(focusDistance * kRationalPrecision));
+ denominator = kRationalPrecision;
-bool ExifUtilsImpl::setSceneCaptureType(uint16_t type) {
- SET_SHORT(EXIF_IFD_EXIF, EXIF_TAG_SCENE_CAPTURE_TYPE, type);
- return true;
-}
-
-bool ExifUtilsImpl::setSharpness(uint16_t sharpness) {
- SET_SHORT(EXIF_IFD_EXIF, EXIF_TAG_SHARPNESS, sharpness);
- return true;
-}
-
-bool ExifUtilsImpl::setShutterSpeed(int32_t numerator, int32_t denominator) {
- SET_SRATIONAL(EXIF_IFD_EXIF, EXIF_TAG_SHUTTER_SPEED_VALUE, numerator, denominator);
- return true;
-}
-
-bool ExifUtilsImpl::setSubjectDistance(uint32_t numerator, uint32_t denominator) {
+ if (focusDistance < 1.0f) {
+ distanceRange = 1; // Macro
+ } else if (focusDistance < 3.0f) {
+ distanceRange = 2; // Close
+ } else {
+ distanceRange = 3; // Distant
+ }
+ } else {
+ numerator = 0xFFFFFFFF;
+ denominator = 1;
+ distanceRange = 3; // Distant
+ }
SET_RATIONAL(EXIF_IFD_EXIF, EXIF_TAG_SUBJECT_DISTANCE, numerator, denominator);
+ SET_SHORT(EXIF_IFD_EXIF, EXIF_TAG_SUBJECT_DISTANCE_RANGE, distanceRange);
return true;
}
@@ -695,23 +697,9 @@
return true;
}
-bool ExifUtilsImpl::setWhiteBalance(uint16_t white_balance) {
- SET_SHORT(EXIF_IFD_EXIF, EXIF_TAG_WHITE_BALANCE, white_balance);
- return true;
-}
-
-bool ExifUtilsImpl::setXResolution(uint32_t numerator, uint32_t denominator) {
- SET_RATIONAL(EXIF_IFD_EXIF, EXIF_TAG_X_RESOLUTION, numerator, denominator);
- return true;
-}
-
-bool ExifUtilsImpl::setYCbCrPositioning(uint16_t ycbcr_positioning) {
- SET_SHORT(EXIF_IFD_0, EXIF_TAG_YCBCR_POSITIONING, ycbcr_positioning);
- return true;
-}
-
-bool ExifUtilsImpl::setYResolution(uint32_t numerator, uint32_t denominator) {
- SET_RATIONAL(EXIF_IFD_EXIF, EXIF_TAG_Y_RESOLUTION, numerator, denominator);
+bool ExifUtilsImpl::setWhiteBalance(uint8_t white_balance) {
+ uint16_t whiteBalance = (white_balance == ANDROID_CONTROL_AWB_MODE_AUTO) ? 0 : 1;
+ SET_SHORT(EXIF_IFD_EXIF, EXIF_TAG_WHITE_BALANCE, whiteBalance);
return true;
}
@@ -748,16 +736,6 @@
return true;
}
-bool ExifUtilsImpl::setMake(const std::string& make) {
- SET_STRING(EXIF_IFD_0, EXIF_TAG_MAKE, EXIF_FORMAT_ASCII, make);
- return true;
-}
-
-bool ExifUtilsImpl::setModel(const std::string& model) {
- SET_STRING(EXIF_IFD_0, EXIF_TAG_MODEL, EXIF_FORMAT_ASCII, model);
- return true;
-}
-
void ExifUtilsImpl::reset() {
destroyApp1();
if (exif_data_) {
@@ -898,9 +876,8 @@
}
bool ExifUtilsImpl::setFromMetadata(const CameraMetadata& metadata,
+ const CameraMetadata& staticInfo,
const size_t imageWidth, const size_t imageHeight) {
- // How precise the float-to-rational conversion for EXIF tags would be.
- constexpr int kRationalPrecision = 10000;
if (!setImageWidth(imageWidth) ||
!setImageHeight(imageHeight)) {
ALOGE("%s: setting image resolution failed.", __FUNCTION__);
@@ -921,15 +898,37 @@
if (entry.count) {
focal_length = entry.data.f[0];
- if (!setFocalLength(
- static_cast<uint32_t>(focal_length * kRationalPrecision), kRationalPrecision)) {
+ if (!setFocalLength(focal_length)) {
ALOGE("%s: setting focal length failed.", __FUNCTION__);
return false;
}
+
+ camera_metadata_ro_entry sensorSizeEntry =
+ staticInfo.find(ANDROID_SENSOR_INFO_PHYSICAL_SIZE);
+ if (sensorSizeEntry.count == 2) {
+ if (!setFocalLengthIn35mmFilm(
+ focal_length, sensorSizeEntry.data.f[0], sensorSizeEntry.data.f[1])) {
+ ALOGE("%s: setting focal length in 35mm failed.", __FUNCTION__);
+ return false;
+ }
+ }
} else {
ALOGV("%s: Cannot find focal length in metadata.", __FUNCTION__);
}
+ if (metadata.exists(ANDROID_SCALER_CROP_REGION) &&
+ staticInfo.exists(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE)) {
+ entry = metadata.find(ANDROID_SCALER_CROP_REGION);
+ camera_metadata_ro_entry activeArrayEntry =
+ staticInfo.find(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE);
+
+ if (!setDigitalZoomRatio(entry.data.i32[2], entry.data.i32[3],
+ activeArrayEntry.data.i32[2], activeArrayEntry.data.i32[3])) {
+ ALOGE("%s: setting digital zoom ratio failed.", __FUNCTION__);
+ return false;
+ }
+ }
+
if (metadata.exists(ANDROID_JPEG_GPS_COORDINATES)) {
entry = metadata.find(ANDROID_JPEG_GPS_COORDINATES);
if (entry.count < 3) {
@@ -973,6 +972,18 @@
}
}
+ if (staticInfo.exists(ANDROID_CONTROL_AE_COMPENSATION_STEP) &&
+ metadata.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
+ entry = metadata.find(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION);
+ camera_metadata_ro_entry stepEntry =
+ staticInfo.find(ANDROID_CONTROL_AE_COMPENSATION_STEP);
+ if (!setExposureBias(entry.data.i32[0], stepEntry.data.r[0].numerator,
+ stepEntry.data.r[0].denominator)) {
+ ALOGE("%s: setting exposure bias failed.", __FUNCTION__);
+ return false;
+ }
+ }
+
if (metadata.exists(ANDROID_JPEG_ORIENTATION)) {
entry = metadata.find(ANDROID_JPEG_ORIENTATION);
if (!setOrientation(entry.data.i32[0])) {
@@ -983,50 +994,97 @@
if (metadata.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
entry = metadata.find(ANDROID_SENSOR_EXPOSURE_TIME);
- // int64_t of nanoseconds
- if (!setExposureTime(entry.data.i64[0],1000000000u)) {
+ float exposure_time = 1.0f * entry.data.i64[0] / 1e9;
+ if (!setExposureTime(exposure_time)) {
ALOGE("%s: setting exposure time failed.", __FUNCTION__);
return false;
}
+
+ if (!setShutterSpeed(exposure_time)) {
+ ALOGE("%s: setting shutter speed failed.", __FUNCTION__);
+ return false;
+ }
+ }
+
+ if (metadata.exists(ANDROID_LENS_FOCUS_DISTANCE)) {
+ entry = metadata.find(ANDROID_LENS_FOCUS_DISTANCE);
+ if (!setSubjectDistance(entry.data.f[0])) {
+ ALOGE("%s: setting subject distance failed.", __FUNCTION__);
+ return false;
+ }
+ }
+
+ if (metadata.exists(ANDROID_SENSOR_SENSITIVITY)) {
+ entry = metadata.find(ANDROID_SENSOR_SENSITIVITY);
+ int32_t iso = entry.data.i32[0];
+ camera_metadata_ro_entry postRawSensEntry =
+ metadata.find(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST);
+ if (postRawSensEntry.count > 0) {
+ iso = iso * postRawSensEntry.data.i32[0] / 100;
+ }
+
+ if (!setIsoSpeedRating(static_cast<uint16_t>(iso))) {
+ ALOGE("%s: setting iso rating failed.", __FUNCTION__);
+ return false;
+ }
}
if (metadata.exists(ANDROID_LENS_APERTURE)) {
- const int kAperturePrecision = 10000;
entry = metadata.find(ANDROID_LENS_APERTURE);
- if (!setFNumber(entry.data.f[0] * kAperturePrecision, kAperturePrecision)) {
+ if (!setFNumber(entry.data.f[0])) {
ALOGE("%s: setting F number failed.", __FUNCTION__);
return false;
}
+ if (!setAperture(entry.data.f[0])) {
+ ALOGE("%s: setting aperture failed.", __FUNCTION__);
+ return false;
+ }
}
- if (metadata.exists(ANDROID_FLASH_INFO_AVAILABLE)) {
- entry = metadata.find(ANDROID_FLASH_INFO_AVAILABLE);
- if (entry.data.u8[0] == ANDROID_FLASH_INFO_AVAILABLE_FALSE) {
- const uint32_t kNoFlashFunction = 0x20;
- if (!setFlash(kNoFlashFunction)) {
- ALOGE("%s: setting flash failed.", __FUNCTION__);
- return false;
- }
- } else {
- ALOGE("%s: Unsupported flash info: %d",__FUNCTION__, entry.data.u8[0]);
+ static const uint16_t kSRGBColorSpace = 1;
+ if (!setColorSpace(kSRGBColorSpace)) {
+ ALOGE("%s: setting color space failed.", __FUNCTION__);
+ return false;
+ }
+
+ if (staticInfo.exists(ANDROID_LENS_INFO_AVAILABLE_APERTURES)) {
+ entry = staticInfo.find(ANDROID_LENS_INFO_AVAILABLE_APERTURES);
+ if (!setMaxAperture(entry.data.f[0])) {
+ ALOGE("%s: setting max aperture failed.", __FUNCTION__);
+ return false;
+ }
+ }
+
+ if (staticInfo.exists(ANDROID_FLASH_INFO_AVAILABLE)) {
+ entry = staticInfo.find(ANDROID_FLASH_INFO_AVAILABLE);
+ camera_metadata_ro_entry flashStateEntry = metadata.find(ANDROID_FLASH_STATE);
+ camera_metadata_ro_entry aeModeEntry = metadata.find(ANDROID_CONTROL_AE_MODE);
+ uint8_t flashState = flashStateEntry.count > 0 ?
+ flashStateEntry.data.u8[0] : ANDROID_FLASH_STATE_UNAVAILABLE;
+ uint8_t aeMode = aeModeEntry.count > 0 ?
+ aeModeEntry.data.u8[0] : ANDROID_CONTROL_AE_MODE_OFF;
+
+ if (!setFlash(entry.data.u8[0], flashState, aeMode)) {
+ ALOGE("%s: setting flash failed.", __FUNCTION__);
return false;
}
}
if (metadata.exists(ANDROID_CONTROL_AWB_MODE)) {
entry = metadata.find(ANDROID_CONTROL_AWB_MODE);
- if (entry.data.u8[0] == ANDROID_CONTROL_AWB_MODE_AUTO) {
- const uint16_t kAutoWhiteBalance = 0;
- if (!setWhiteBalance(kAutoWhiteBalance)) {
- ALOGE("%s: setting white balance failed.", __FUNCTION__);
- return false;
- }
- } else {
- ALOGE("%s: Unsupported awb mode: %d", __FUNCTION__, entry.data.u8[0]);
+ if (!setWhiteBalance(entry.data.u8[0])) {
+ ALOGE("%s: setting white balance failed.", __FUNCTION__);
return false;
}
}
+ if (metadata.exists(ANDROID_CONTROL_AE_MODE)) {
+ entry = metadata.find(ANDROID_CONTROL_AE_MODE);
+ if (!setExposureMode(entry.data.u8[0])) {
+ ALOGE("%s: setting exposure mode failed.", __FUNCTION__);
+ return false;
+ }
+ }
if (time_available) {
char str[4];
if (snprintf(str, sizeof(str), "%03ld", tp.tv_nsec / 1000000) < 0) {
diff --git a/services/camera/libcameraservice/utils/ExifUtils.h b/services/camera/libcameraservice/utils/ExifUtils.h
index 8ccdd8f..f1d0205 100644
--- a/services/camera/libcameraservice/utils/ExifUtils.h
+++ b/services/camera/libcameraservice/utils/ExifUtils.h
@@ -22,6 +22,24 @@
namespace android {
namespace camera3 {
+/*
+ * Orientation value:
+ * 1 2 3 4 5 6 7 8
+ *
+ * 888888 888888 88 88 8888888888 88 88 8888888888
+ * 88 88 88 88 88 88 88 88 88 88 88 88
+ * 8888 8888 8888 8888 88 8888888888 8888888888 88
+ * 88 88 88 88
+ * 88 88 888888 888888
+ */
+enum ExifOrientation : uint16_t {
+ ORIENTATION_UNDEFINED = 0x0,
+ ORIENTATION_0_DEGREES = 0x1,
+ ORIENTATION_90_DEGREES = 0x6,
+ ORIENTATION_180_DEGREES = 0x3,
+ ORIENTATION_270_DEGREES = 0x8,
+};
+
// This is based on the camera HIDL shim implementation, which was in turned
// based on original ChromeOS ARC implementation of a V4L2 HAL
@@ -49,81 +67,61 @@
// Initialize() can be called multiple times. The setting of Exif tags will be
// cleared.
virtual bool initialize(const unsigned char *app1Segment, size_t app1SegmentSize) = 0;
+ virtual bool initializeEmpty() = 0;
// Set all known fields from a metadata structure
virtual bool setFromMetadata(const CameraMetadata& metadata,
+ const CameraMetadata& staticInfo,
const size_t imageWidth, const size_t imageHeight) = 0;
// Sets the len aperture.
// Returns false if memory allocation fails.
- virtual bool setAperture(uint32_t numerator, uint32_t denominator) = 0;
+ virtual bool setAperture(float aperture) = 0;
- // Sets the value of brightness.
- // Returns false if memory allocation fails.
- virtual bool setBrightness(int32_t numerator, int32_t denominator) = 0;
-
- // Sets the color space.
+ // sets the color space.
// Returns false if memory allocation fails.
virtual bool setColorSpace(uint16_t color_space) = 0;
- // Sets the information to compressed data.
- // Returns false if memory allocation fails.
- virtual bool setComponentsConfiguration(const std::string& components_configuration) = 0;
-
- // Sets the compression scheme used for the image data.
- // Returns false if memory allocation fails.
- virtual bool setCompression(uint16_t compression) = 0;
-
- // Sets image contrast.
- // Returns false if memory allocation fails.
- virtual bool setContrast(uint16_t contrast) = 0;
-
// Sets the date and time of image last modified. It takes local time. The
// name of the tag is DateTime in IFD0.
// Returns false if memory allocation fails.
virtual bool setDateTime(const struct tm& t) = 0;
- // Sets the image description.
- // Returns false if memory allocation fails.
- virtual bool setDescription(const std::string& description) = 0;
-
// Sets the digital zoom ratio. If the numerator is 0, it means digital zoom
// was not used.
// Returns false if memory allocation fails.
- virtual bool setDigitalZoomRatio(uint32_t numerator, uint32_t denominator) = 0;
+ virtual bool setDigitalZoomRatio(uint32_t crop_width, uint32_t crop_height,
+ uint32_t sensor_width, uint32_t sensor_height) = 0;
// Sets the exposure bias.
// Returns false if memory allocation fails.
- virtual bool setExposureBias(int32_t numerator, int32_t denominator) = 0;
+ virtual bool setExposureBias(int32_t ev,
+ uint32_t ev_step_numerator, uint32_t ev_step_denominator) = 0;
// Sets the exposure mode set when the image was shot.
// Returns false if memory allocation fails.
- virtual bool setExposureMode(uint16_t exposure_mode) = 0;
-
- // Sets the program used by the camera to set exposure when the picture is
- // taken.
- // Returns false if memory allocation fails.
- virtual bool setExposureProgram(uint16_t exposure_program) = 0;
+ virtual bool setExposureMode(uint8_t exposure_mode) = 0;
// Sets the exposure time, given in seconds.
// Returns false if memory allocation fails.
- virtual bool setExposureTime(uint32_t numerator, uint32_t denominator) = 0;
+ virtual bool setExposureTime(float exposure_time) = 0;
// Sets the status of flash.
// Returns false if memory allocation fails.
- virtual bool setFlash(uint16_t flash) = 0;
+ virtual bool setFlash(uint8_t flash_available, uint8_t flash_state, uint8_t ae_mode) = 0;
// Sets the F number.
// Returns false if memory allocation fails.
- virtual bool setFNumber(uint32_t numerator, uint32_t denominator) = 0;
+ virtual bool setFNumber(float f_number) = 0;
// Sets the focal length of lens used to take the image in millimeters.
// Returns false if memory allocation fails.
- virtual bool setFocalLength(uint32_t numerator, uint32_t denominator) = 0;
+ virtual bool setFocalLength(float focal_length) = 0;
- // Sets the degree of overall image gain adjustment.
+ // Sets the focal length of lens for 35mm film used to take the image in millimeters.
// Returns false if memory allocation fails.
- virtual bool setGainControl(uint16_t gain_control) = 0;
+ virtual bool setFocalLengthIn35mmFilm(float focal_length,
+ float sensor_size_x, float sensor_size_y) = 0;
// Sets the altitude in meters.
// Returns false if memory allocation fails.
@@ -157,45 +155,25 @@
// Returns false if memory allocation fails.
virtual bool setIsoSpeedRating(uint16_t iso_speed_ratings) = 0;
- // Sets the kind of light source.
- // Returns false if memory allocation fails.
- virtual bool setLightSource(uint16_t light_source) = 0;
-
// Sets the smallest F number of the lens.
// Returns false if memory allocation fails.
- virtual bool setMaxAperture(uint32_t numerator, uint32_t denominator) = 0;
-
- // Sets the metering mode.
- // Returns false if memory allocation fails.
- virtual bool setMeteringMode(uint16_t metering_mode) = 0;
+ virtual bool setMaxAperture(float aperture) = 0;
// Sets image orientation.
// Returns false if memory allocation fails.
- virtual bool setOrientation(uint16_t orientation) = 0;
+ virtual bool setOrientation(uint16_t degrees) = 0;
- // Sets the unit for measuring XResolution and YResolution.
+ // Sets image orientation.
// Returns false if memory allocation fails.
- virtual bool setResolutionUnit(uint16_t resolution_unit) = 0;
-
- // Sets image saturation.
- // Returns false if memory allocation fails.
- virtual bool setSaturation(uint16_t saturation) = 0;
-
- // Sets the type of scene that was shot.
- // Returns false if memory allocation fails.
- virtual bool setSceneCaptureType(uint16_t type) = 0;
-
- // Sets image sharpness.
- // Returns false if memory allocation fails.
- virtual bool setSharpness(uint16_t sharpness) = 0;
+ virtual bool setOrientationValue(ExifOrientation orientationValue) = 0;
// Sets the shutter speed.
// Returns false if memory allocation fails.
- virtual bool setShutterSpeed(int32_t numerator, int32_t denominator) = 0;
+ virtual bool setShutterSpeed(float exposure_time) = 0;
// Sets the distance to the subject, given in meters.
// Returns false if memory allocation fails.
- virtual bool setSubjectDistance(uint32_t numerator, uint32_t denominator) = 0;
+ virtual bool setSubjectDistance(float diopters) = 0;
// Sets the fractions of seconds for the <DateTime> tag.
// Returns false if memory allocation fails.
@@ -203,28 +181,7 @@
// Sets the white balance mode set when the image was shot.
// Returns false if memory allocation fails.
- virtual bool setWhiteBalance(uint16_t white_balance) = 0;
-
- // Sets the number of pixels per resolution unit in the image width.
- // Returns false if memory allocation fails.
- virtual bool setXResolution(uint32_t numerator, uint32_t denominator) = 0;
-
- // Sets the position of chrominance components in relation to the luminance
- // component.
- // Returns false if memory allocation fails.
- virtual bool setYCbCrPositioning(uint16_t ycbcr_positioning) = 0;
-
- // Sets the number of pixels per resolution unit in the image length.
- // Returns false if memory allocation fails.
- virtual bool setYResolution(uint32_t numerator, uint32_t denominator) = 0;
-
- // Sets the manufacturer of camera.
- // Returns false if memory allocation fails.
- virtual bool setMake(const std::string& make) = 0;
-
- // Sets the model number of camera.
- // Returns false if memory allocation fails.
- virtual bool setModel(const std::string& model) = 0;
+ virtual bool setWhiteBalance(uint8_t white_blanace) = 0;
// Generates APP1 segment.
// Returns false if generating APP1 segment fails.