Merge "VolumeShaper: Fixes for updated Cts test" into oc-dev
diff --git a/camera/include/camera/ndk/NdkCameraMetadataTags.h b/camera/include/camera/ndk/NdkCameraMetadataTags.h
index 25d364e..8b76cdf 100644
--- a/camera/include/camera/ndk/NdkCameraMetadataTags.h
+++ b/camera/include/camera/ndk/NdkCameraMetadataTags.h
@@ -1547,6 +1547,7 @@
* <code>false</code> if present.</p>
* <p>For applications targeting SDK versions older than O, the value of enableZsl in all
* capture templates is always <code>false</code> if present.</p>
+ * <p>For application-operated ZSL, use CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG template.</p>
*
* @see ACAMERA_CONTROL_CAPTURE_INTENT
* @see ACAMERA_SENSOR_TIMESTAMP
diff --git a/cmds/stagefright/stagefright.cpp b/cmds/stagefright/stagefright.cpp
index 80aad2f..dfd5df7 100644
--- a/cmds/stagefright/stagefright.cpp
+++ b/cmds/stagefright/stagefright.cpp
@@ -1048,6 +1048,10 @@
bool haveVideo = false;
for (size_t i = 0; i < numTracks; ++i) {
sp<IMediaSource> source = extractor->getTrack(i);
+ if (source == nullptr) {
+ fprintf(stderr, "skip NULL track %zu, track count %zu.\n", i, numTracks);
+ continue;
+ }
const char *mime;
CHECK(source->getFormat()->findCString(
@@ -1110,6 +1114,10 @@
}
mediaSource = extractor->getTrack(i);
+ if (mediaSource == nullptr) {
+ fprintf(stderr, "skip NULL track %zu, total tracks %zu.\n", i, numTracks);
+ return -1;
+ }
}
}
diff --git a/cmds/stagefright/stream.cpp b/cmds/stagefright/stream.cpp
index 8f9333a..2e1d240 100644
--- a/cmds/stagefright/stream.cpp
+++ b/cmds/stagefright/stream.cpp
@@ -171,7 +171,8 @@
mWriter = new MPEG2TSWriter(
this, &MyConvertingStreamSource::WriteDataWrapper);
- for (size_t i = 0; i < extractor->countTracks(); ++i) {
+ size_t numTracks = extractor->countTracks();
+ for (size_t i = 0; i < numTracks; ++i) {
const sp<MetaData> &meta = extractor->getTrackMetaData(i);
const char *mime;
@@ -181,7 +182,12 @@
continue;
}
- CHECK_EQ(mWriter->addSource(extractor->getTrack(i)), (status_t)OK);
+ sp<IMediaSource> track = extractor->getTrack(i);
+ if (track == nullptr) {
+ fprintf(stderr, "skip NULL track %zu, total tracks %zu\n", i, numTracks);
+ continue;
+ }
+ CHECK_EQ(mWriter->addSource(track), (status_t)OK);
}
CHECK_EQ(mWriter->start(), (status_t)OK);
diff --git a/include/ndk/NdkImageReader.h b/include/ndk/NdkImageReader.h
index a158da9..e3600c2 100644
--- a/include/ndk/NdkImageReader.h
+++ b/include/ndk/NdkImageReader.h
@@ -307,22 +307,38 @@
* for the consumer usage. All other parameters and the return values are identical to those passed
* to {@line AImageReader_new}.
*
- * @param usage0 specifies how the consumer will access the AImage, using combination of the
- * AHARDWAREBUFFER_USAGE0 flags described in {@link hardware_buffer.h}.
- * Passing {@link AHARDWAREBUFFER_USAGE0_CPU_READ_OFTEN} is equivalent to calling
- * {@link AImageReader_new} with the same parameters. Note that consumers that do not
- * require CPU access to the buffer should omit {@link
- * AHARDWAREBUFFER_USAGE0_CPU_READ_OFTEN} to improve performance.
- * @param usage1 specifies how the consumer will access the AImage, using combination of the
- * AHARDWAREBUFFER_USAGE1 flags described in {@link hardware_buffer.h}.
+ * @param usage specifies how the consumer will access the AImage, using combination of the
+ * AHARDWAREBUFFER_USAGE flags described in {@link hardware_buffer.h}.
+ * Passing {@link AHARDWAREBUFFER_USAGE_CPU_READ_OFTEN} is equivalent to calling
+ * {@link AImageReader_new} with the same parameters.
+ *
+ * Note that not all format and usage flag combination is supported by the {@link AImageReader}.
+ * Below are the combinations supported by the {@link AImageReader}.
+ * <table>
+ * <tr>
+ * <th>Format</th>
+ * <th>Compatible usage flags</th>
+ * </tr>
+ * <tr>
+ * <td>non-{@link AIMAGE_FORMAT_PRIVATE PRIVATE} formats defined in {@link AImage.h}
+ * </td>
+ * <td>{@link AHARDWAREBUFFER_USAGE_CPU_READ_RARELY} or
+ * {@link AHARDWAREBUFFER_USAGE_CPU_READ_OFTEN}</td>
+ * </tr>
+ * <tr>
+ * <td>{@link AIMAGE_FORMAT_RGBA_8888}</td>
+ * <td>{@link AHARDWAREBUFFER_USAGE_VIDEO_ENCODE} or
+ * {@link AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE}, or combined</td>
+ * </tr>
+ * </table>
*
* @see AImage
* @see AImageReader_new
* @see AHardwareBuffer
*/
media_status_t AImageReader_newWithUsage(
- int32_t width, int32_t height, int32_t format, uint64_t usage0,
- uint64_t usage1, int32_t maxImages, /*out*/ AImageReader** reader);
+ int32_t width, int32_t height, int32_t format, uint64_t usage, int32_t maxImages,
+ /*out*/ AImageReader** reader);
/*
* Acquire the next {@link AImage} from the image reader's queue asynchronously.
diff --git a/media/libaaudio/examples/input_monitor/src/input_monitor_callback.cpp b/media/libaaudio/examples/input_monitor/src/input_monitor_callback.cpp
index b482e93..7c34252 100644
--- a/media/libaaudio/examples/input_monitor/src/input_monitor_callback.cpp
+++ b/media/libaaudio/examples/input_monitor/src/input_monitor_callback.cpp
@@ -24,7 +24,7 @@
#include <time.h>
#include <aaudio/AAudio.h>
-#define NUM_SECONDS 10
+#define NUM_SECONDS 5
#define NANOS_PER_MICROSECOND ((int64_t)1000)
#define NANOS_PER_MILLISECOND (NANOS_PER_MICROSECOND * 1000)
#define NANOS_PER_SECOND (NANOS_PER_MILLISECOND * 1000)
@@ -33,13 +33,13 @@
#define SHARING_MODE AAUDIO_SHARING_MODE_SHARED
/**
- * Simple wrapper for AAudio that opens a default stream and then calls
- * a callback function to fill the output buffers.
+ * Simple wrapper for AAudio that opens an input stream and then calls
+ * a callback function to process the input data.
*/
-class SimpleAAudioPlayer {
+class SimpleAAudioRecorder {
public:
- SimpleAAudioPlayer() {}
- ~SimpleAAudioPlayer() {
+ SimpleAAudioRecorder() {}
+ ~SimpleAAudioRecorder() {
close();
};
@@ -71,6 +71,15 @@
}
return AAudioStream_getSamplesPerFrame(mStream);;
}
+ /**
+ * Only call this after open() has been called.
+ */
+ int64_t getFramesRead() {
+ if (mStream == nullptr) {
+ return AAUDIO_ERROR_INVALID_STATE;
+ }
+ return AAudioStream_getFramesRead(mStream);;
+ }
/**
* Open a stream
@@ -85,7 +94,7 @@
AAudioStreamBuilder_setDirection(mBuilder, AAUDIO_DIRECTION_INPUT);
AAudioStreamBuilder_setSharingMode(mBuilder, mRequestedSharingMode);
AAudioStreamBuilder_setDataCallback(mBuilder, proc, userContext);
- AAudioStreamBuilder_setFormat(mBuilder, AAUDIO_FORMAT_PCM_I16);
+ AAudioStreamBuilder_setFormat(mBuilder, AAUDIO_FORMAT_PCM_FLOAT);
// Open an AAudioStream using the Builder.
result = AAudioStreamBuilder_openStream(mBuilder, &mStream);
@@ -121,11 +130,10 @@
}
// Write zero data to fill up the buffer and prevent underruns.
- // Assume format is PCM_I16. TODO use floats.
aaudio_result_t prime() {
int32_t samplesPerFrame = AAudioStream_getSamplesPerFrame(mStream);
const int numFrames = 32; // arbitrary
- int16_t zeros[numFrames * samplesPerFrame];
+ float zeros[numFrames * samplesPerFrame];
memset(zeros, 0, sizeof(zeros));
aaudio_result_t result = numFrames;
while (result == numFrames) {
@@ -151,8 +159,16 @@
fprintf(stderr, "ERROR - AAudioStream_requestStop() returned %d %s\n",
result, AAudio_convertResultToText(result));
}
- int32_t xRunCount = AAudioStream_getXRunCount(mStream);
- printf("AAudioStream_getXRunCount %d\n", xRunCount);
+ return result;
+ }
+
+ // Pause the stream. AAudio will stop calling your callback function.
+ aaudio_result_t pause() {
+ aaudio_result_t result = AAudioStream_requestPause(mStream);
+ if (result != AAUDIO_OK) {
+ fprintf(stderr, "ERROR - AAudioStream_requestPause() returned %d %s\n",
+ result, AAudio_convertResultToText(result));
+ }
return result;
}
@@ -227,7 +243,7 @@
int main(int argc, char **argv)
{
(void)argc; // unused
- SimpleAAudioPlayer player;
+ SimpleAAudioRecorder recorder;
PeakTrackerData_t myData = {0.0};
aaudio_result_t result;
const int displayRateHz = 20; // arbitrary
@@ -238,37 +254,60 @@
setvbuf(stdout, nullptr, _IONBF, (size_t) 0);
printf("%s - Display audio input using an AAudio callback\n", argv[0]);
- player.setSharingMode(SHARING_MODE);
+ recorder.setSharingMode(SHARING_MODE);
- result = player.open(MyDataCallbackProc, &myData);
+ result = recorder.open(MyDataCallbackProc, &myData);
if (result != AAUDIO_OK) {
- fprintf(stderr, "ERROR - player.open() returned %d\n", result);
+ fprintf(stderr, "ERROR - recorder.open() returned %d\n", result);
goto error;
}
- printf("player.getFramesPerSecond() = %d\n", player.getFramesPerSecond());
- printf("player.getSamplesPerFrame() = %d\n", player.getSamplesPerFrame());
+ printf("recorder.getFramesPerSecond() = %d\n", recorder.getFramesPerSecond());
+ printf("recorder.getSamplesPerFrame() = %d\n", recorder.getSamplesPerFrame());
- result = player.start();
+ result = recorder.start();
if (result != AAUDIO_OK) {
- fprintf(stderr, "ERROR - player.start() returned %d\n", result);
+ fprintf(stderr, "ERROR - recorder.start() returned %d\n", result);
goto error;
}
- printf("Sleep for %d seconds while audio plays in a callback thread.\n", NUM_SECONDS);
- for (int i = 0; i < loopsNeeded; i++)
+ printf("Sleep for %d seconds while audio record in a callback thread.\n", NUM_SECONDS);
+ for (int i = 0; i < loopsNeeded; i++)
{
const struct timespec request = { .tv_sec = 0,
.tv_nsec = NANOS_PER_SECOND / displayRateHz };
(void) clock_nanosleep(CLOCK_MONOTONIC, 0 /*flags*/, &request, NULL /*remain*/);
+ printf("%08d: ", (int)recorder.getFramesRead());
+ displayPeakLevel(myData.peakLevel);
+ }
+ printf("Woke up. Stop for a moment.\n");
+
+ result = recorder.stop();
+ if (result != AAUDIO_OK) {
+ goto error;
+ }
+ sleep(1);
+ result = recorder.start();
+ if (result != AAUDIO_OK) {
+ fprintf(stderr, "ERROR - recorder.start() returned %d\n", result);
+ goto error;
+ }
+
+ printf("Sleep for %d seconds while audio records in a callback thread.\n", NUM_SECONDS);
+ for (int i = 0; i < loopsNeeded; i++)
+ {
+ const struct timespec request = { .tv_sec = 0,
+ .tv_nsec = NANOS_PER_SECOND / displayRateHz };
+ (void) clock_nanosleep(CLOCK_MONOTONIC, 0 /*flags*/, &request, NULL /*remain*/);
+ printf("%08d: ", (int)recorder.getFramesRead());
displayPeakLevel(myData.peakLevel);
}
printf("Woke up now.\n");
- result = player.stop();
+ result = recorder.stop();
if (result != AAUDIO_OK) {
goto error;
}
- result = player.close();
+ result = recorder.close();
if (result != AAUDIO_OK) {
goto error;
}
@@ -276,7 +315,7 @@
printf("SUCCESS\n");
return EXIT_SUCCESS;
error:
- player.close();
+ recorder.close();
printf("exiting - AAudio result = %d = %s\n", result, AAudio_convertResultToText(result));
return EXIT_FAILURE;
}
diff --git a/media/libaaudio/examples/write_sine/jni/Android.mk b/media/libaaudio/examples/write_sine/jni/Android.mk
index 5a884e1..0bda008 100644
--- a/media/libaaudio/examples/write_sine/jni/Android.mk
+++ b/media/libaaudio/examples/write_sine/jni/Android.mk
@@ -4,7 +4,8 @@
LOCAL_MODULE_TAGS := tests
LOCAL_C_INCLUDES := \
$(call include-path-for, audio-utils) \
- frameworks/av/media/libaaudio/include
+ frameworks/av/media/libaaudio/include \
+ frameworks/av/media/libaaudio/src
# NDK recommends using this kind of relative path instead of an absolute path.
LOCAL_SRC_FILES:= ../src/write_sine.cpp
diff --git a/media/libaaudio/examples/write_sine/src/SineGenerator.h b/media/libaaudio/examples/write_sine/src/SineGenerator.h
index f2eb984..64b772d 100644
--- a/media/libaaudio/examples/write_sine/src/SineGenerator.h
+++ b/media/libaaudio/examples/write_sine/src/SineGenerator.h
@@ -79,7 +79,7 @@
}
}
- double mAmplitude = 0.005; // unitless scaler
+ double mAmplitude = 0.05; // unitless scaler
double mPhase = 0.0;
double mPhaseIncrement = 440 * M_PI * 2 / 48000;
double mFrameRate = 48000;
diff --git a/media/libaaudio/examples/write_sine/src/write_sine.cpp b/media/libaaudio/examples/write_sine/src/write_sine.cpp
index df55c3f..57a5273 100644
--- a/media/libaaudio/examples/write_sine/src/write_sine.cpp
+++ b/media/libaaudio/examples/write_sine/src/write_sine.cpp
@@ -22,8 +22,8 @@
#include <aaudio/AAudio.h>
#include "SineGenerator.h"
-#define SAMPLE_RATE 48000
-#define NUM_SECONDS 5
+#define SAMPLE_RATE 48000
+#define NUM_SECONDS 5
#define NANOS_PER_MICROSECOND ((int64_t)1000)
#define NANOS_PER_MILLISECOND (NANOS_PER_MICROSECOND * 1000)
#define NANOS_PER_SECOND (NANOS_PER_MILLISECOND * 1000)
@@ -104,6 +104,10 @@
AAudioStreamBuilder_setFormat(aaudioBuilder, REQUESTED_FORMAT);
AAudioStreamBuilder_setSharingMode(aaudioBuilder, REQUESTED_SHARING_MODE);
+ AAudioStreamBuilder_setPerformanceMode(aaudioBuilder, AAUDIO_PERFORMANCE_MODE_NONE);
+ //AAudioStreamBuilder_setPerformanceMode(aaudioBuilder, AAUDIO_PERFORMANCE_MODE_LOW_LATENCY);
+ //AAudioStreamBuilder_setPerformanceMode(aaudioBuilder, AAUDIO_PERFORMANCE_MODE_POWER_SAVING);
+
// Create an AAudioStream using the Builder.
result = AAudioStreamBuilder_openStream(aaudioBuilder, &aaudioStream);
if (result != AAUDIO_OK) {
@@ -132,7 +136,6 @@
// This is the number of frames that are read in one chunk by a DMA controller
// or a DSP or a mixer.
framesPerBurst = AAudioStream_getFramesPerBurst(aaudioStream);
- printf("Buffer: framesPerBurst = %d\n",framesPerBurst);
printf("Buffer: bufferSize = %d\n", AAudioStream_getBufferSizeInFrames(aaudioStream));
bufferCapacity = AAudioStream_getBufferCapacityInFrames(aaudioStream);
printf("Buffer: bufferCapacity = %d, remainder = %d\n",
@@ -144,12 +147,15 @@
while (framesPerWrite < 48) {
framesPerWrite *= 2;
}
- printf("DataFormat: framesPerWrite = %d\n",framesPerWrite);
+ printf("Buffer: framesPerBurst = %d\n",framesPerBurst);
+ printf("Buffer: framesPerWrite = %d\n",framesPerWrite);
actualDataFormat = AAudioStream_getFormat(aaudioStream);
printf("DataFormat: requested = %d, actual = %d\n", REQUESTED_FORMAT, actualDataFormat);
// TODO handle other data formats
+ printf("PerformanceMode: %d\n", AAudioStream_getPerformanceMode(aaudioStream));
+
// Allocate a buffer for the audio data.
if (actualDataFormat == AAUDIO_FORMAT_PCM_FLOAT) {
floatData = new float[framesPerWrite * actualChannelCount];
diff --git a/media/libaaudio/examples/write_sine/src/write_sine_callback.cpp b/media/libaaudio/examples/write_sine/src/write_sine_callback.cpp
index a7e32bd..1a66f35 100644
--- a/media/libaaudio/examples/write_sine/src/write_sine_callback.cpp
+++ b/media/libaaudio/examples/write_sine/src/write_sine_callback.cpp
@@ -88,6 +88,10 @@
// AAudioStreamBuilder_setFramesPerDataCallback(mBuilder, CALLBACK_SIZE_FRAMES);
AAudioStreamBuilder_setBufferCapacityInFrames(mBuilder, 48 * 8);
+ //AAudioStreamBuilder_setPerformanceMode(mBuilder, AAUDIO_PERFORMANCE_MODE_NONE);
+ AAudioStreamBuilder_setPerformanceMode(mBuilder, AAUDIO_PERFORMANCE_MODE_LOW_LATENCY);
+ //AAudioStreamBuilder_setPerformanceMode(mBuilder, AAUDIO_PERFORMANCE_MODE_POWER_SAVING);
+
// Open an AAudioStream using the Builder.
result = AAudioStreamBuilder_openStream(mBuilder, &mStream);
if (result != AAUDIO_OK) goto finish1;
@@ -98,7 +102,6 @@
AAudioStream_getBufferSizeInFrames(mStream));
printf("AAudioStream_getBufferCapacityInFrames() = %d\n",
AAudioStream_getBufferCapacityInFrames(mStream));
- return result;
finish1:
AAudioStreamBuilder_delete(mBuilder);
@@ -227,7 +230,7 @@
// Make printf print immediately so that debug info is not stuck
// in a buffer if we hang or crash.
setvbuf(stdout, nullptr, _IONBF, (size_t) 0);
- printf("%s - Play a sine sweep using an AAudio callback, Z1\n", argv[0]);
+ printf("%s - Play a sine sweep using an AAudio callback\n", argv[0]);
player.setSharingMode(SHARING_MODE);
@@ -278,6 +281,7 @@
printf("Stream state is %d %s!\n", state, AAudio_convertStreamStateToText(state));
break;
}
+ printf("framesWritten = %d\n", (int) AAudioStream_getFramesWritten(player.getStream()));
}
printf("Woke up now.\n");
diff --git a/media/libaaudio/examples/write_sine/static/Android.mk b/media/libaaudio/examples/write_sine/static/Android.mk
index e4da6a8..3fee08a 100644
--- a/media/libaaudio/examples/write_sine/static/Android.mk
+++ b/media/libaaudio/examples/write_sine/static/Android.mk
@@ -4,6 +4,7 @@
LOCAL_MODULE_TAGS := examples
LOCAL_C_INCLUDES := \
$(call include-path-for, audio-utils) \
+ frameworks/av/media/libaaudio/src \
frameworks/av/media/libaaudio/include
# NDK recommends using this kind of relative path instead of an absolute path.
diff --git a/media/libaaudio/include/aaudio/AAudio.h b/media/libaaudio/include/aaudio/AAudio.h
index 532c372..c21caa4 100644
--- a/media/libaaudio/include/aaudio/AAudio.h
+++ b/media/libaaudio/include/aaudio/AAudio.h
@@ -125,6 +125,25 @@
};
typedef int32_t aaudio_sharing_mode_t;
+
+enum {
+ /**
+ * No particular performance needs. Default.
+ */
+ AAUDIO_PERFORMANCE_MODE_NONE = 10,
+
+ /**
+ * Extending battery life is most important.
+ */
+ AAUDIO_PERFORMANCE_MODE_POWER_SAVING,
+
+ /**
+ * Reducing latency is most important.
+ */
+ AAUDIO_PERFORMANCE_MODE_LOW_LATENCY
+};
+typedef int32_t aaudio_performance_mode_t;
+
typedef struct AAudioStreamStruct AAudioStream;
typedef struct AAudioStreamBuilderStruct AAudioStreamBuilder;
@@ -279,6 +298,18 @@
*/
AAUDIO_API void AAudioStreamBuilder_setBufferCapacityInFrames(AAudioStreamBuilder* builder,
int32_t numFrames);
+
+/**
+ * Set the requested performance mode.
+ *
+ * The default, if you do not call this function, is AAUDIO_PERFORMANCE_MODE_NONE.
+ *
+ * @param builder reference provided by AAudio_createStreamBuilder()
+ * @param mode the desired performance mode, eg. AAUDIO_PERFORMANCE_MODE_LOW_LATENCY
+ */
+AAUDIO_API void AAudioStreamBuilder_setPerformanceMode(AAudioStreamBuilder* builder,
+ aaudio_performance_mode_t mode);
+
/**
* Return one of these values from the data callback function.
*/
@@ -476,6 +507,9 @@
* Use AAudioStream_Start() to resume playback after a pause.
* After this call the state will be in AAUDIO_STREAM_STATE_PAUSING or AAUDIO_STREAM_STATE_PAUSED.
*
+ * This will return AAUDIO_ERROR_UNIMPLEMENTED for input streams.
+ * For input streams use AAudioStream_requestStop().
+ *
* @param stream reference provided by AAudioStreamBuilder_openStream()
* @return AAUDIO_OK or a negative error.
*/
@@ -488,6 +522,8 @@
* Frame counters are not reset by a flush. They may be advanced.
* After this call the state will be in AAUDIO_STREAM_STATE_FLUSHING or AAUDIO_STREAM_STATE_FLUSHED.
*
+ * This will return AAUDIO_ERROR_UNIMPLEMENTED for input streams.
+ *
* @param stream reference provided by AAudioStreamBuilder_openStream()
* @return AAUDIO_OK or a negative error.
*/
@@ -673,6 +709,9 @@
*
* An underrun or overrun can cause an audible "pop" or "glitch".
*
+ * Note that some INPUT devices may not support this function.
+ * In that case a 0 will always be returned.
+ *
* @param stream reference provided by AAudioStreamBuilder_openStream()
* @return the underrun or overrun count
*/
@@ -722,6 +761,13 @@
AAUDIO_API aaudio_sharing_mode_t AAudioStream_getSharingMode(AAudioStream* stream);
/**
+ * Get the performance mode used by the stream.
+ *
+ * @param stream reference provided by AAudioStreamBuilder_openStream()
+ */
+AAUDIO_API aaudio_performance_mode_t AAudioStream_getPerformanceMode(AAudioStream* stream);
+
+/**
* @param stream reference provided by AAudioStreamBuilder_openStream()
* @return direction
*/
diff --git a/media/libaaudio/libaaudio.map.txt b/media/libaaudio/libaaudio.map.txt
index efd92ae..8f74800 100644
--- a/media/libaaudio/libaaudio.map.txt
+++ b/media/libaaudio/libaaudio.map.txt
@@ -3,6 +3,7 @@
AAudio_convertResultToText;
AAudio_convertStreamStateToText;
AAudio_createStreamBuilder;
+ AAudioStreamBuilder_setPerformanceMode;
AAudioStreamBuilder_setDeviceId;
AAudioStreamBuilder_setDataCallback;
AAudioStreamBuilder_setErrorCallback;
@@ -34,6 +35,7 @@
AAudioStream_getSampleRate;
AAudioStream_getSamplesPerFrame;
AAudioStream_getChannelCount;
+ AAudioStream_getPerformanceMode;
AAudioStream_getDeviceId;
AAudioStream_getFormat;
AAudioStream_getSharingMode;
diff --git a/media/libaaudio/src/Android.mk b/media/libaaudio/src/Android.mk
index b5bb75f..f43c0ad 100644
--- a/media/libaaudio/src/Android.mk
+++ b/media/libaaudio/src/Android.mk
@@ -39,6 +39,7 @@
utility/FixedBlockAdapter.cpp \
utility/FixedBlockReader.cpp \
utility/FixedBlockWriter.cpp \
+ utility/LinearRamp.cpp \
fifo/FifoBuffer.cpp \
fifo/FifoControllerBase.cpp \
client/AudioEndpoint.cpp \
@@ -93,6 +94,7 @@
utility/FixedBlockAdapter.cpp \
utility/FixedBlockReader.cpp \
utility/FixedBlockWriter.cpp \
+ utility/LinearRamp.cpp \
fifo/FifoBuffer.cpp \
fifo/FifoControllerBase.cpp \
client/AudioEndpoint.cpp \
diff --git a/media/libaaudio/src/binding/SharedMemoryParcelable.cpp b/media/libaaudio/src/binding/SharedMemoryParcelable.cpp
index 0f501dd..1094d9e 100644
--- a/media/libaaudio/src/binding/SharedMemoryParcelable.cpp
+++ b/media/libaaudio/src/binding/SharedMemoryParcelable.cpp
@@ -50,7 +50,8 @@
if (status != NO_ERROR) return status;
if (mSizeInBytes > 0) {
status = parcel->writeDupFileDescriptor(mFd);
- ALOGE_IF(status != NO_ERROR, "SharedMemoryParcelable writeDupFileDescriptor failed : %d", status);
+ ALOGE_IF(status != NO_ERROR, "SharedMemoryParcelable writeDupFileDescriptor failed : %d",
+ status);
}
return status;
}
@@ -61,29 +62,37 @@
return status;
}
if (mSizeInBytes > 0) {
- int originalFD = parcel->readFileDescriptor();
- mFd = fcntl(originalFD, F_DUPFD_CLOEXEC, 0);
+ mOriginalFd = parcel->readFileDescriptor();
+ ALOGV("SharedMemoryParcelable::readFromParcel() LEAK? mOriginalFd = %d\n", mOriginalFd);
+ mFd = fcntl(mOriginalFd, F_DUPFD_CLOEXEC, 0);
+ ALOGV("SharedMemoryParcelable::readFromParcel() LEAK? mFd = %d\n", mFd);
if (mFd == -1) {
status = -errno;
- ALOGE("SharedMemoryParcelable readFileDescriptor fcntl() failed : %d", status);
+ ALOGE("SharedMemoryParcelable readFromParcel fcntl() failed : %d", status);
}
}
return status;
}
aaudio_result_t SharedMemoryParcelable::close() {
- if (mResolvedAddress != nullptr) {
+ if (mResolvedAddress != MMAP_UNRESOLVED_ADDRESS) {
int err = munmap(mResolvedAddress, mSizeInBytes);
if (err < 0) {
ALOGE("SharedMemoryParcelable::close() munmap() failed %d", err);
return AAudioConvert_androidToAAudioResult(err);
}
- mResolvedAddress = nullptr;
+ mResolvedAddress = MMAP_UNRESOLVED_ADDRESS;
}
if (mFd != -1) {
+ ALOGV("SharedMemoryParcelable::close() LEAK? mFd = %d\n", mFd);
::close(mFd);
mFd = -1;
}
+ if (mOriginalFd != -1) {
+ ALOGV("SharedMemoryParcelable::close() LEAK? mOriginalFd = %d\n", mOriginalFd);
+ ::close(mOriginalFd);
+ mOriginalFd = -1;
+ }
return AAUDIO_OK;
}
@@ -99,11 +108,12 @@
offsetInBytes, sizeInBytes, mSizeInBytes);
return AAUDIO_ERROR_OUT_OF_RANGE;
}
- if (mResolvedAddress == nullptr) {
+ if (mResolvedAddress == MMAP_UNRESOLVED_ADDRESS) {
mResolvedAddress = (uint8_t *) mmap(0, mSizeInBytes, PROT_READ|PROT_WRITE,
MAP_SHARED, mFd, 0);
- if (mResolvedAddress == nullptr) {
- ALOGE("SharedMemoryParcelable mmap failed for fd = %d", mFd);
+ if (mResolvedAddress == MMAP_UNRESOLVED_ADDRESS) {
+ ALOGE("SharedMemoryParcelable mmap failed for fd = %d, errno = %s",
+ mFd, strerror(errno));
return AAUDIO_ERROR_INTERNAL;
}
}
diff --git a/media/libaaudio/src/binding/SharedMemoryParcelable.h b/media/libaaudio/src/binding/SharedMemoryParcelable.h
index 22e16f0..64fb473 100644
--- a/media/libaaudio/src/binding/SharedMemoryParcelable.h
+++ b/media/libaaudio/src/binding/SharedMemoryParcelable.h
@@ -64,9 +64,13 @@
void dump();
protected:
- int mFd = -1;
- int32_t mSizeInBytes = 0;
- uint8_t *mResolvedAddress = nullptr;
+
+#define MMAP_UNRESOLVED_ADDRESS reinterpret_cast<uint8_t*>(MAP_FAILED)
+
+ int mFd = -1;
+ int mOriginalFd = -1;
+ int32_t mSizeInBytes = 0;
+ uint8_t *mResolvedAddress = MMAP_UNRESOLVED_ADDRESS;
};
} /* namespace aaudio */
diff --git a/media/libaaudio/src/client/AudioStreamInternal.cpp b/media/libaaudio/src/client/AudioStreamInternal.cpp
index af4b93a..5214db8 100644
--- a/media/libaaudio/src/client/AudioStreamInternal.cpp
+++ b/media/libaaudio/src/client/AudioStreamInternal.cpp
@@ -32,9 +32,10 @@
#include "binding/AAudioStreamConfiguration.h"
#include "binding/IAAudioService.h"
#include "binding/AAudioServiceMessage.h"
-#include "fifo/FifoBuffer.h"
-
#include "core/AudioStreamBuilder.h"
+#include "fifo/FifoBuffer.h"
+#include "utility/LinearRamp.h"
+
#include "AudioStreamInternal.h"
#define LOG_TIMESTAMPS 0
@@ -478,8 +479,9 @@
ALOGW("WARNING - processCommands() AAUDIO_SERVICE_EVENT_DISCONNECTED");
break;
case AAUDIO_SERVICE_EVENT_VOLUME:
- mVolume = message->event.dataDouble;
- ALOGD_IF(MYLOG_CONDITION, "processCommands() AAUDIO_SERVICE_EVENT_VOLUME %f", mVolume);
+ mVolumeRamp.setTarget((float) message->event.dataDouble);
+ ALOGD_IF(MYLOG_CONDITION, "processCommands() AAUDIO_SERVICE_EVENT_VOLUME %f",
+ message->event.dataDouble);
break;
default:
ALOGW("WARNING - processCommands() Unrecognized event = %d",
@@ -639,10 +641,10 @@
}
-// TODO this function needs a major cleanup.
aaudio_result_t AudioStreamInternal::writeNowWithConversion(const void *buffer,
int32_t numFrames) {
- // ALOGD_IF(MYLOG_CONDITION, "AudioStreamInternal::writeNowWithConversion(%p, %d)", buffer, numFrames);
+ // ALOGD_IF(MYLOG_CONDITION, "AudioStreamInternal::writeNowWithConversion(%p, %d)",
+ // buffer, numFrames);
WrappingBuffer wrappingBuffer;
uint8_t *source = (uint8_t *) buffer;
int32_t framesLeft = numFrames;
@@ -659,31 +661,67 @@
framesToWrite = framesAvailable;
}
int32_t numBytes = getBytesPerFrame() * framesToWrite;
- // TODO handle volume scaling
- if (getFormat() == mDeviceFormat) {
- // Copy straight through.
- memcpy(wrappingBuffer.data[partIndex], source, numBytes);
- } else if (getFormat() == AAUDIO_FORMAT_PCM_FLOAT
- && mDeviceFormat == AAUDIO_FORMAT_PCM_I16) {
- // Data conversion.
- AAudioConvert_floatToPcm16(
- (const float *) source,
- framesToWrite * getSamplesPerFrame(),
- (int16_t *) wrappingBuffer.data[partIndex]);
- } else if (getFormat() == AAUDIO_FORMAT_PCM_I16
- && mDeviceFormat == AAUDIO_FORMAT_PCM_FLOAT) {
- // Data conversion.
- AAudioConvert_pcm16ToFloat(
- (const int16_t *) source,
- framesToWrite * getSamplesPerFrame(),
- (float *) wrappingBuffer.data[partIndex]);
- } else {
- // TODO handle more conversions
- ALOGE("AudioStreamInternal::writeNowWithConversion() unsupported formats: %d, %d",
- getFormat(), mDeviceFormat);
- return AAUDIO_ERROR_UNEXPECTED_VALUE;
+ int32_t numSamples = framesToWrite * getSamplesPerFrame();
+ // Data conversion.
+ float levelFrom;
+ float levelTo;
+ bool ramping = mVolumeRamp.nextSegment(framesToWrite * getSamplesPerFrame(),
+ &levelFrom, &levelTo);
+ // The formats are validated when the stream is opened so we do not have to
+ // check for illegal combinations here.
+ if (getFormat() == AAUDIO_FORMAT_PCM_FLOAT) {
+ if (mDeviceFormat == AAUDIO_FORMAT_PCM_FLOAT) {
+ AAudio_linearRamp(
+ (const float *) source,
+ (float *) wrappingBuffer.data[partIndex],
+ framesToWrite,
+ getSamplesPerFrame(),
+ levelFrom,
+ levelTo);
+ } else if (mDeviceFormat == AAUDIO_FORMAT_PCM_I16) {
+ if (ramping) {
+ AAudioConvert_floatToPcm16(
+ (const float *) source,
+ (int16_t *) wrappingBuffer.data[partIndex],
+ framesToWrite,
+ getSamplesPerFrame(),
+ levelFrom,
+ levelTo);
+ } else {
+ AAudioConvert_floatToPcm16(
+ (const float *) source,
+ (int16_t *) wrappingBuffer.data[partIndex],
+ numSamples,
+ levelTo);
+ }
+ }
+ } else if (getFormat() == AAUDIO_FORMAT_PCM_I16) {
+ if (mDeviceFormat == AAUDIO_FORMAT_PCM_FLOAT) {
+ if (ramping) {
+ AAudioConvert_pcm16ToFloat(
+ (const int16_t *) source,
+ (float *) wrappingBuffer.data[partIndex],
+ framesToWrite,
+ getSamplesPerFrame(),
+ levelFrom,
+ levelTo);
+ } else {
+ AAudioConvert_pcm16ToFloat(
+ (const int16_t *) source,
+ (float *) wrappingBuffer.data[partIndex],
+ numSamples,
+ levelTo);
+ }
+ } else if (mDeviceFormat == AAUDIO_FORMAT_PCM_I16) {
+ AAudio_linearRamp(
+ (const int16_t *) source,
+ (int16_t *) wrappingBuffer.data[partIndex],
+ framesToWrite,
+ getSamplesPerFrame(),
+ levelFrom,
+ levelTo);
+ }
}
-
source += numBytes;
framesLeft -= framesToWrite;
} else {
@@ -753,4 +791,10 @@
return framesRead;
}
-// TODO implement getTimestamp
+int64_t AudioStreamInternal::getFramesWritten()
+{
+ int64_t getFramesWritten = mAudioEndpoint.getDownDataWriteCounter()
+ + mFramesOffsetFromService;
+ ALOGD_IF(MYLOG_CONDITION, "AudioStreamInternal::getFramesWritten() returns %lld", (long long)getFramesWritten);
+ return getFramesWritten;
+}
diff --git a/media/libaaudio/src/client/AudioStreamInternal.h b/media/libaaudio/src/client/AudioStreamInternal.h
index 8244311..a43f6c5 100644
--- a/media/libaaudio/src/client/AudioStreamInternal.h
+++ b/media/libaaudio/src/client/AudioStreamInternal.h
@@ -22,11 +22,11 @@
#include "binding/IAAudioService.h"
#include "binding/AudioEndpointParcelable.h"
+#include "binding/AAudioServiceInterface.h"
#include "client/IsochronousClockModel.h"
#include "client/AudioEndpoint.h"
#include "core/AudioStream.h"
-
-#include "binding/AAudioServiceInterface.h"
+#include "utility/LinearRamp.h"
using android::sp;
using android::IAAudioService;
@@ -77,6 +77,7 @@
int32_t getFramesPerBurst() const override;
int64_t getFramesRead() override;
+ int64_t getFramesWritten() override;
int32_t getXRunCount() const override {
return mXRunCount;
@@ -89,6 +90,11 @@
// Called internally from 'C'
void *callbackLoop();
+
+ bool isMMap() override {
+ return true;
+ }
+
protected:
aaudio_result_t processCommands();
@@ -154,7 +160,7 @@
int64_t mLastFramesRead = 0; // used to prevent retrograde motion
int32_t mFramesPerBurst; // frames per HAL transfer
int32_t mXRunCount = 0; // how many underrun events?
- float mVolume = 1.0; // volume that the server told us to use
+ LinearRamp mVolumeRamp;
AAudioServiceInterface &mServiceInterface; // abstract interface to the service
diff --git a/media/libaaudio/src/core/AAudioAudio.cpp b/media/libaaudio/src/core/AAudioAudio.cpp
index 462ecb3..59032d5 100644
--- a/media/libaaudio/src/core/AAudioAudio.cpp
+++ b/media/libaaudio/src/core/AAudioAudio.cpp
@@ -114,8 +114,15 @@
return AAUDIO_OK;
}
+AAUDIO_API void AAudioStreamBuilder_setPerformanceMode(AAudioStreamBuilder* builder,
+ aaudio_performance_mode_t mode)
+{
+ AudioStreamBuilder *streamBuilder = convertAAudioBuilderToStreamBuilder(builder);
+ streamBuilder->setPerformanceMode(mode);
+}
+
AAUDIO_API void AAudioStreamBuilder_setDeviceId(AAudioStreamBuilder* builder,
- int32_t deviceId)
+ int32_t deviceId)
{
AudioStreamBuilder *streamBuilder = convertAAudioBuilderToStreamBuilder(builder);
streamBuilder->setDeviceId(deviceId);
@@ -195,26 +202,21 @@
streamBuilder->setFramesPerDataCallback(frames);
}
-// TODO merge AAudioInternal_openStream into AAudioStreamBuilder_openStream
-static aaudio_result_t AAudioInternal_openStream(AudioStreamBuilder *streamBuilder,
- AAudioStream** streamPtr)
-{
- AudioStream *audioStream = nullptr;
- aaudio_result_t result = streamBuilder->build(&audioStream);
- if (result != AAUDIO_OK) {
- return result;
- } else {
- *streamPtr = (AAudioStream*) audioStream;
- return AAUDIO_OK;
- }
-}
-
AAUDIO_API aaudio_result_t AAudioStreamBuilder_openStream(AAudioStreamBuilder* builder,
AAudioStream** streamPtr)
{
+ AudioStream *audioStream = nullptr;
ALOGD("AAudioStreamBuilder_openStream() ----------------------------------------------");
AudioStreamBuilder *streamBuilder = COMMON_GET_FROM_BUILDER_OR_RETURN(streamPtr);
- return AAudioInternal_openStream(streamBuilder, streamPtr);
+ aaudio_result_t result = streamBuilder->build(&audioStream);
+ ALOGD("AAudioStreamBuilder_openStream() returns %d -----------------------------------",
+ result);
+ if (result == AAUDIO_OK) {
+ *streamPtr = (AAudioStream*) audioStream;
+ } else {
+ *streamPtr = nullptr;
+ }
+ return result;
}
AAUDIO_API aaudio_result_t AAudioStreamBuilder_delete(AAudioStreamBuilder* builder)
@@ -408,6 +410,12 @@
return audioStream->getXRunCount();
}
+AAUDIO_API aaudio_performance_mode_t AAudioStream_getPerformanceMode(AAudioStream* stream)
+{
+ AudioStream *audioStream = convertAAudioStreamToAudioStream(stream);
+ return audioStream->getPerformanceMode();
+}
+
AAUDIO_API int32_t AAudioStream_getDeviceId(AAudioStream* stream)
{
AudioStream *audioStream = convertAAudioStreamToAudioStream(stream);
diff --git a/media/libaaudio/src/core/AudioStream.cpp b/media/libaaudio/src/core/AudioStream.cpp
index 9690848..c3cf27f 100644
--- a/media/libaaudio/src/core/AudioStream.cpp
+++ b/media/libaaudio/src/core/AudioStream.cpp
@@ -47,11 +47,14 @@
mSharingMode = builder.getSharingMode();
mSharingModeMatchRequired = builder.isSharingModeMatchRequired();
+ mPerformanceMode = builder.getPerformanceMode();
+
// callbacks
mFramesPerDataCallback = builder.getFramesPerDataCallback();
mDataCallbackProc = builder.getDataCallbackProc();
mErrorCallbackProc = builder.getErrorCallbackProc();
mDataCallbackUserData = builder.getDataCallbackUserData();
+ mErrorCallbackUserData = builder.getErrorCallbackUserData();
// This is very helpful for debugging in the future.
ALOGI("AudioStream.open(): rate = %d, channels = %d, format = %d, sharing = %d",
@@ -72,6 +75,16 @@
return AAUDIO_ERROR_UNEXPECTED_VALUE;
}
+ switch(mPerformanceMode) {
+ case AAUDIO_PERFORMANCE_MODE_NONE:
+ case AAUDIO_PERFORMANCE_MODE_POWER_SAVING:
+ case AAUDIO_PERFORMANCE_MODE_LOW_LATENCY:
+ break;
+ default:
+ ALOGE("AudioStream::open(): illegal performanceMode %d", mPerformanceMode);
+ return AAUDIO_ERROR_UNEXPECTED_VALUE;
+ }
+
return AAUDIO_OK;
}
diff --git a/media/libaaudio/src/core/AudioStream.h b/media/libaaudio/src/core/AudioStream.h
index 916870b..c49b46b 100644
--- a/media/libaaudio/src/core/AudioStream.h
+++ b/media/libaaudio/src/core/AudioStream.h
@@ -134,6 +134,10 @@
return mState == AAUDIO_STREAM_STATE_STARTING || mState == AAUDIO_STREAM_STATE_STARTED;
}
+ virtual bool isMMap() {
+ return false;
+ }
+
aaudio_result_t getSampleRate() const {
return mSampleRate;
}
@@ -146,6 +150,14 @@
return mSamplesPerFrame;
}
+ virtual int32_t getPerformanceMode() const {
+ return mPerformanceMode;
+ }
+
+ void setPerformanceMode(aaudio_performance_mode_t performanceMode) {
+ mPerformanceMode = performanceMode;
+ }
+
int32_t getDeviceId() const {
return mDeviceId;
}
@@ -223,11 +235,11 @@
protected:
virtual int64_t incrementFramesWritten(int32_t frames) {
- return static_cast<int64_t>(mFramesWritten.increment(frames));
+ return mFramesWritten.increment(frames);
}
virtual int64_t incrementFramesRead(int32_t frames) {
- return static_cast<int64_t>(mFramesRead.increment(frames));
+ return mFramesRead.increment(frames);
}
/**
@@ -293,6 +305,8 @@
aaudio_direction_t mDirection = AAUDIO_DIRECTION_OUTPUT;
aaudio_stream_state_t mState = AAUDIO_STREAM_STATE_UNINITIALIZED;
+ aaudio_performance_mode_t mPerformanceMode = AAUDIO_PERFORMANCE_MODE_NONE;
+
// callback ----------------------------------
AAudioStream_dataCallback mDataCallbackProc = nullptr; // external callback functions
diff --git a/media/libaaudio/src/core/AudioStreamBuilder.cpp b/media/libaaudio/src/core/AudioStreamBuilder.cpp
index 4e0b8c6..9bc2ef2 100644
--- a/media/libaaudio/src/core/AudioStreamBuilder.cpp
+++ b/media/libaaudio/src/core/AudioStreamBuilder.cpp
@@ -47,66 +47,80 @@
AudioStreamBuilder::~AudioStreamBuilder() {
}
+static aaudio_result_t builder_createStream(aaudio_direction_t direction,
+ aaudio_sharing_mode_t sharingMode,
+ bool tryMMap,
+ AudioStream **audioStreamPtr) {
+ *audioStreamPtr = nullptr;
+ aaudio_result_t result = AAUDIO_OK;
+ switch (direction) {
+
+ case AAUDIO_DIRECTION_INPUT:
+ if (sharingMode == AAUDIO_SHARING_MODE_SHARED) {
+ *audioStreamPtr = new AudioStreamRecord();
+ } else {
+ ALOGE("AudioStreamBuilder(): bad sharing mode = %d for input", sharingMode);
+ result = AAUDIO_ERROR_ILLEGAL_ARGUMENT;
+ }
+ break;
+
+ case AAUDIO_DIRECTION_OUTPUT:
+ if (tryMMap) {
+ // TODO use a singleton for the AAudioBinderClient
+ AAudioBinderClient *aaudioClient = new AAudioBinderClient();
+ *audioStreamPtr = new AudioStreamInternal(*aaudioClient, false);
+ } else {
+ *audioStreamPtr = new AudioStreamTrack();
+ }
+ break;
+
+ default:
+ ALOGE("AudioStreamBuilder(): bad direction = %d", direction);
+ result = AAUDIO_ERROR_ILLEGAL_ARGUMENT;
+ }
+ return result;
+}
+
aaudio_result_t AudioStreamBuilder::build(AudioStream** streamPtr) {
- AudioStream* audioStream = nullptr;
- AAudioBinderClient *aaudioClient = nullptr;
- const aaudio_sharing_mode_t sharingMode = getSharingMode();
+ aaudio_sharing_mode_t sharingMode = getSharingMode();
+ if ((sharingMode == AAUDIO_SHARING_MODE_EXCLUSIVE) && (MMAP_EXCLUSIVE_ENABLED == 0)) {
+ ALOGE("AudioStreamBuilder(): EXCLUSIVE sharing mode not supported");
+ return AAUDIO_ERROR_UNAVAILABLE;
+ }
- switch (getDirection()) {
+ AudioStream *audioStream = nullptr;
+ *streamPtr = nullptr;
- case AAUDIO_DIRECTION_INPUT:
- switch (sharingMode) {
- case AAUDIO_SHARING_MODE_SHARED:
- audioStream = new(std::nothrow) AudioStreamRecord();
- break;
- default:
- ALOGE("AudioStreamBuilder(): bad sharing mode = %d", sharingMode);
- return AAUDIO_ERROR_ILLEGAL_ARGUMENT;
- break;
+ bool tryMMap = (sharingMode == AAUDIO_SHARING_MODE_SHARED) && MMAP_SHARED_ENABLED;
+ aaudio_result_t result = builder_createStream(getDirection(), sharingMode,
+ tryMMap, &audioStream);
+ if (result == AAUDIO_OK) {
+ // Open the stream using the parameters from the builder.
+ result = audioStream->open(*this);
+ if (result == AAUDIO_OK) {
+ *streamPtr = audioStream;
+ } else {
+ bool isMMap = audioStream->isMMap();
+ delete audioStream;
+ audioStream = nullptr;
+
+ if (isMMap) {
+ ALOGD("AudioStreamBuilder.build() MMAP stream did not open so try Legacy path");
+ // If MMAP stream failed to open then TRY using a legacy stream.
+ result = builder_createStream(getDirection(), sharingMode,
+ false, &audioStream);
+ if (result == AAUDIO_OK) {
+ result = audioStream->open(*this);
+ if (result == AAUDIO_OK) {
+ *streamPtr = audioStream;
+ } else {
+ delete audioStream;
+ }
+ }
+ }
}
- break;
-
- case AAUDIO_DIRECTION_OUTPUT:
- switch (sharingMode) {
- case AAUDIO_SHARING_MODE_SHARED:
-#if MMAP_SHARED_ENABLED
- aaudioClient = new AAudioBinderClient();
- audioStream = new(std::nothrow) AudioStreamInternal(*aaudioClient, false);
-#else
- audioStream = new(std::nothrow) AudioStreamTrack();
-#endif
- break;
-#if MMAP_EXCLUSIVE_ENABLED
- case AAUDIO_SHARING_MODE_EXCLUSIVE:
- aaudioClient = new AAudioBinderClient();
- audioStream = new(std::nothrow) AudioStreamInternal(*aaudioClient, false);
- break;
-#endif
- default:
- ALOGE("AudioStreamBuilder(): bad sharing mode = %d", sharingMode);
- return AAUDIO_ERROR_ILLEGAL_ARGUMENT;
- break;
- }
- break;
-
- default:
- ALOGE("AudioStreamBuilder(): bad direction = %d", getDirection());
- return AAUDIO_ERROR_ILLEGAL_ARGUMENT;
- break;
}
- if (audioStream == nullptr) {
- delete aaudioClient;
- return AAUDIO_ERROR_NO_MEMORY;
- }
- ALOGD("AudioStreamBuilder(): created audioStream = %p", audioStream);
- // TODO maybe move this out of build and pass the builder to the constructors
- // Open the stream using the parameters from the builder.
- const aaudio_result_t result = audioStream->open(*this);
- if (result != AAUDIO_OK) {
- delete audioStream;
- } else {
- *streamPtr = audioStream;
- }
+ ALOGD("AudioStreamBuilder(): returned %d", result);
return result;
}
diff --git a/media/libaaudio/src/core/AudioStreamBuilder.h b/media/libaaudio/src/core/AudioStreamBuilder.h
index 25baf4c..569ca63 100644
--- a/media/libaaudio/src/core/AudioStreamBuilder.h
+++ b/media/libaaudio/src/core/AudioStreamBuilder.h
@@ -100,6 +100,15 @@
return this;
}
+ int32_t getPerformanceMode() const {
+ return mPerformanceMode;
+ }
+
+ AudioStreamBuilder* setPerformanceMode(aaudio_performance_mode_t performanceMode) {
+ mPerformanceMode = performanceMode;
+ return this;
+ }
+
int32_t getDeviceId() const {
return mDeviceId;
}
@@ -157,14 +166,15 @@
aaudio_result_t build(AudioStream **streamPtr);
private:
- int32_t mSamplesPerFrame = AAUDIO_UNSPECIFIED;
- int32_t mSampleRate = AAUDIO_UNSPECIFIED;
- int32_t mDeviceId = AAUDIO_DEVICE_UNSPECIFIED;
- aaudio_sharing_mode_t mSharingMode = AAUDIO_SHARING_MODE_SHARED;
- bool mSharingModeMatchRequired = false; // must match sharing mode requested
- aaudio_audio_format_t mFormat = AAUDIO_FORMAT_UNSPECIFIED;
- aaudio_direction_t mDirection = AAUDIO_DIRECTION_OUTPUT;
- int32_t mBufferCapacity = AAUDIO_UNSPECIFIED;
+ int32_t mSamplesPerFrame = AAUDIO_UNSPECIFIED;
+ int32_t mSampleRate = AAUDIO_UNSPECIFIED;
+ int32_t mDeviceId = AAUDIO_DEVICE_UNSPECIFIED;
+ aaudio_sharing_mode_t mSharingMode = AAUDIO_SHARING_MODE_SHARED;
+ bool mSharingModeMatchRequired = false; // must match sharing mode requested
+ aaudio_audio_format_t mFormat = AAUDIO_FORMAT_UNSPECIFIED;
+ aaudio_direction_t mDirection = AAUDIO_DIRECTION_OUTPUT;
+ int32_t mBufferCapacity = AAUDIO_UNSPECIFIED;
+ aaudio_performance_mode_t mPerformanceMode = AAUDIO_PERFORMANCE_MODE_NONE;
AAudioStream_dataCallback mDataCallbackProc = nullptr; // external callback functions
void *mDataCallbackUserData = nullptr;
diff --git a/media/libaaudio/src/legacy/AudioStreamLegacy.cpp b/media/libaaudio/src/legacy/AudioStreamLegacy.cpp
index baa24c9..59d754a 100644
--- a/media/libaaudio/src/legacy/AudioStreamLegacy.cpp
+++ b/media/libaaudio/src/legacy/AudioStreamLegacy.cpp
@@ -83,6 +83,7 @@
}
if (callbackResult == AAUDIO_CALLBACK_RESULT_CONTINUE) {
audioBuffer->size = audioBuffer->frameCount * getBytesPerFrame();
+ incrementClientFrameCounter(audioBuffer->frameCount);
} else {
audioBuffer->size = 0;
}
@@ -107,4 +108,3 @@
break;
}
}
-
diff --git a/media/libaaudio/src/legacy/AudioStreamLegacy.h b/media/libaaudio/src/legacy/AudioStreamLegacy.h
index c109ee7..d5a3ede 100644
--- a/media/libaaudio/src/legacy/AudioStreamLegacy.h
+++ b/media/libaaudio/src/legacy/AudioStreamLegacy.h
@@ -70,6 +70,8 @@
// Implement FixedBlockProcessor
int32_t onProcessFixedBlock(uint8_t *buffer, int32_t numBytes) override;
+ virtual int64_t incrementClientFrameCounter(int32_t frames) = 0;
+
protected:
FixedBlockAdapter *mBlockAdapter = nullptr;
aaudio_wrapping_frames_t mPositionWhenStarting = 0;
diff --git a/media/libaaudio/src/legacy/AudioStreamRecord.cpp b/media/libaaudio/src/legacy/AudioStreamRecord.cpp
index f0a6ceb..918f5dd 100644
--- a/media/libaaudio/src/legacy/AudioStreamRecord.cpp
+++ b/media/libaaudio/src/legacy/AudioStreamRecord.cpp
@@ -177,10 +177,13 @@
aaudio_result_t AudioStreamRecord::requestPause()
{
+ // This does not make sense for an input stream.
+ // There is no real difference between pause() and stop().
return AAUDIO_ERROR_UNIMPLEMENTED;
}
aaudio_result_t AudioStreamRecord::requestFlush() {
+ // This does not make sense for an input stream.
return AAUDIO_ERROR_UNIMPLEMENTED;
}
@@ -259,7 +262,7 @@
int32_t AudioStreamRecord::getXRunCount() const
{
- return AAUDIO_ERROR_UNIMPLEMENTED; // TODO implement when AudioRecord supports it
+ return 0; // TODO implement when AudioRecord supports it
}
int32_t AudioStreamRecord::getFramesPerBurst() const
diff --git a/media/libaaudio/src/legacy/AudioStreamRecord.h b/media/libaaudio/src/legacy/AudioStreamRecord.h
index 897a5b3..f4a78e1 100644
--- a/media/libaaudio/src/legacy/AudioStreamRecord.h
+++ b/media/libaaudio/src/legacy/AudioStreamRecord.h
@@ -68,6 +68,10 @@
// This is public so it can be called from the C callback function.
void processCallback(int event, void *info) override;
+ int64_t incrementClientFrameCounter(int32_t frames) override {
+ return incrementFramesRead(frames);
+ }
+
private:
android::sp<android::AudioRecord> mAudioRecord;
// adapts between variable sized blocks and fixed size blocks
diff --git a/media/libaaudio/src/legacy/AudioStreamTrack.cpp b/media/libaaudio/src/legacy/AudioStreamTrack.cpp
index 1bb9e53..b4b1c04 100644
--- a/media/libaaudio/src/legacy/AudioStreamTrack.cpp
+++ b/media/libaaudio/src/legacy/AudioStreamTrack.cpp
@@ -68,15 +68,29 @@
ALOGD("AudioStreamTrack::open(), samplesPerFrame = %d, channelMask = 0x%08x",
samplesPerFrame, channelMask);
- // TODO add more performance options
- audio_output_flags_t flags = (audio_output_flags_t) AUDIO_OUTPUT_FLAG_FAST;
+ audio_output_flags_t flags = AUDIO_OUTPUT_FLAG_NONE;
+ switch(getPerformanceMode()) {
+ case AAUDIO_PERFORMANCE_MODE_LOW_LATENCY:
+ // Bypass the normal mixer and go straight to the FAST mixer.
+ flags = (audio_output_flags_t)(AUDIO_OUTPUT_FLAG_FAST | AUDIO_OUTPUT_FLAG_RAW);
+ break;
+
+ case AAUDIO_PERFORMANCE_MODE_POWER_SAVING:
+ // This uses a mixer that wakes up less often than the FAST mixer.
+ flags = AUDIO_OUTPUT_FLAG_DEEP_BUFFER;
+ break;
+
+ case AAUDIO_PERFORMANCE_MODE_NONE:
+ default:
+ // No flags. Use a normal mixer in front of the FAST mixer.
+ break;
+ }
int32_t frameCount = builder.getBufferCapacity();
ALOGD("AudioStreamTrack::open(), requested buffer capacity %d", frameCount);
int32_t notificationFrames = 0;
- // TODO implement an unspecified AudioTrack format then use that.
audio_format_t format = (getFormat() == AAUDIO_FORMAT_UNSPECIFIED)
? AUDIO_FORMAT_PCM_FLOAT
: AAudioConvert_aaudioToAndroidDataFormat(getFormat());
@@ -354,6 +368,8 @@
case AAUDIO_STREAM_STATE_STARTING:
case AAUDIO_STREAM_STATE_STARTED:
case AAUDIO_STREAM_STATE_STOPPING:
+ case AAUDIO_STREAM_STATE_PAUSING:
+ case AAUDIO_STREAM_STATE_PAUSED:
result = mAudioTrack->getPosition(&position);
if (result == OK) {
mFramesRead.update32(position);
diff --git a/media/libaaudio/src/legacy/AudioStreamTrack.h b/media/libaaudio/src/legacy/AudioStreamTrack.h
index 29f5d15..186a08e 100644
--- a/media/libaaudio/src/legacy/AudioStreamTrack.h
+++ b/media/libaaudio/src/legacy/AudioStreamTrack.h
@@ -68,6 +68,10 @@
// This is public so it can be called from the C callback function.
void processCallback(int event, void *info) override;
+ int64_t incrementClientFrameCounter(int32_t frames) override {
+ return incrementFramesWritten(frames);
+ }
+
private:
android::sp<android::AudioTrack> mAudioTrack;
diff --git a/media/libaaudio/src/utility/AAudioUtilities.cpp b/media/libaaudio/src/utility/AAudioUtilities.cpp
index efbbfc5..5fa228a 100644
--- a/media/libaaudio/src/utility/AAudioUtilities.cpp
+++ b/media/libaaudio/src/utility/AAudioUtilities.cpp
@@ -27,6 +27,11 @@
using namespace android;
+// This is 3 dB, (10^(3/20)), to match the maximum headroom in AudioTrack for float data.
+// It is designed to allow occasional transient peaks.
+#define MAX_HEADROOM (1.41253754f)
+#define MIN_HEADROOM (0 - MAX_HEADROOM)
+
int32_t AAudioConvert_formatToSizeInBytes(aaudio_audio_format_t format) {
int32_t size = AAUDIO_ERROR_ILLEGAL_ARGUMENT;
switch (format) {
@@ -42,24 +47,153 @@
return size;
}
-// TODO This similar to a function in audio_utils. Consider using that instead.
-void AAudioConvert_floatToPcm16(const float *source, int32_t numSamples, int16_t *destination) {
+
+// TODO call clamp16_from_float function in primitives.h
+static inline int16_t clamp16_from_float(float f) {
+ /* Offset is used to expand the valid range of [-1.0, 1.0) into the 16 lsbs of the
+ * floating point significand. The normal shift is 3<<22, but the -15 offset
+ * is used to multiply by 32768.
+ */
+ static const float offset = (float)(3 << (22 - 15));
+ /* zero = (0x10f << 22) = 0x43c00000 (not directly used) */
+ static const int32_t limneg = (0x10f << 22) /*zero*/ - 32768; /* 0x43bf8000 */
+ static const int32_t limpos = (0x10f << 22) /*zero*/ + 32767; /* 0x43c07fff */
+
+ union {
+ float f;
+ int32_t i;
+ } u;
+
+ u.f = f + offset; /* recenter valid range */
+ /* Now the valid range is represented as integers between [limneg, limpos].
+ * Clamp using the fact that float representation (as an integer) is an ordered set.
+ */
+ if (u.i < limneg)
+ u.i = -32768;
+ else if (u.i > limpos)
+ u.i = 32767;
+ return u.i; /* Return lower 16 bits, the part of interest in the significand. */
+}
+
+// Same but without clipping.
+// Convert -1.0f to +1.0f to -32768 to +32767
+static inline int16_t floatToInt16(float f) {
+ static const float offset = (float)(3 << (22 - 15));
+ union {
+ float f;
+ int32_t i;
+ } u;
+ u.f = f + offset; /* recenter valid range */
+ return u.i; /* Return lower 16 bits, the part of interest in the significand. */
+}
+
+static float clipAndClampFloatToPcm16(float sample, float scaler) {
+ // Clip to valid range of a float sample to prevent excessive volume.
+ if (sample > MAX_HEADROOM) sample = MAX_HEADROOM;
+ else if (sample < MIN_HEADROOM) sample = MIN_HEADROOM;
+
+ // Scale and convert to a short.
+ float fval = sample * scaler;
+ return clamp16_from_float(fval);
+}
+
+void AAudioConvert_floatToPcm16(const float *source,
+ int16_t *destination,
+ int32_t numSamples,
+ float amplitude) {
+ float scaler = amplitude;
for (int i = 0; i < numSamples; i++) {
- float fval = source[i];
- fval += 1.0; // to avoid discontinuity at 0.0 caused by truncation
- fval *= 32768.0f;
- int32_t sample = (int32_t) fval;
- // clip to 16-bit range
- if (sample < 0) sample = 0;
- else if (sample > 0x0FFFF) sample = 0x0FFFF;
- sample -= 32768; // center at zero
- destination[i] = (int16_t) sample;
+ float sample = *source++;
+ *destination++ = clipAndClampFloatToPcm16(sample, scaler);
}
}
-void AAudioConvert_pcm16ToFloat(const int16_t *source, int32_t numSamples, float *destination) {
+void AAudioConvert_floatToPcm16(const float *source,
+ int16_t *destination,
+ int32_t numFrames,
+ int32_t samplesPerFrame,
+ float amplitude1,
+ float amplitude2) {
+ float scaler = amplitude1;
+ // divide by numFrames so that we almost reach amplitude2
+ float delta = (amplitude2 - amplitude1) / numFrames;
+ for (int frameIndex = 0; frameIndex < numFrames; frameIndex++) {
+ for (int sampleIndex = 0; sampleIndex < samplesPerFrame; sampleIndex++) {
+ float sample = *source++;
+ *destination++ = clipAndClampFloatToPcm16(sample, scaler);
+ }
+ scaler += delta;
+ }
+}
+
+#define SHORT_SCALE 32768
+
+void AAudioConvert_pcm16ToFloat(const int16_t *source,
+ float *destination,
+ int32_t numSamples,
+ float amplitude) {
+ float scaler = amplitude / SHORT_SCALE;
for (int i = 0; i < numSamples; i++) {
- destination[i] = source[i] * (1.0f / 32768.0f);
+ destination[i] = source[i] * scaler;
+ }
+}
+
+// This code assumes amplitude1 and amplitude2 are between 0.0 and 1.0
+void AAudioConvert_pcm16ToFloat(const int16_t *source,
+ float *destination,
+ int32_t numFrames,
+ int32_t samplesPerFrame,
+ float amplitude1,
+ float amplitude2) {
+ float scaler = amplitude1 / SHORT_SCALE;
+ float delta = (amplitude2 - amplitude1) / (SHORT_SCALE * (float) numFrames);
+ for (int frameIndex = 0; frameIndex < numFrames; frameIndex++) {
+ for (int sampleIndex = 0; sampleIndex < samplesPerFrame; sampleIndex++) {
+ *destination++ = *source++ * scaler;
+ }
+ scaler += delta;
+ }
+}
+
+// This code assumes amplitude1 and amplitude2 are between 0.0 and 1.0
+void AAudio_linearRamp(const float *source,
+ float *destination,
+ int32_t numFrames,
+ int32_t samplesPerFrame,
+ float amplitude1,
+ float amplitude2) {
+ float scaler = amplitude1;
+ float delta = (amplitude2 - amplitude1) / numFrames;
+ for (int frameIndex = 0; frameIndex < numFrames; frameIndex++) {
+ for (int sampleIndex = 0; sampleIndex < samplesPerFrame; sampleIndex++) {
+ float sample = *source++;
+
+ // Clip to valid range of a float sample to prevent excessive volume.
+ if (sample > MAX_HEADROOM) sample = MAX_HEADROOM;
+ else if (sample < MIN_HEADROOM) sample = MIN_HEADROOM;
+
+ *destination++ = sample * scaler;
+ }
+ scaler += delta;
+ }
+}
+
+// This code assumes amplitude1 and amplitude2 are between 0.0 and 1.0
+void AAudio_linearRamp(const int16_t *source,
+ int16_t *destination,
+ int32_t numFrames,
+ int32_t samplesPerFrame,
+ float amplitude1,
+ float amplitude2) {
+ float scaler = amplitude1 / SHORT_SCALE;
+ float delta = (amplitude2 - amplitude1) / (SHORT_SCALE * (float) numFrames);
+ for (int frameIndex = 0; frameIndex < numFrames; frameIndex++) {
+ for (int sampleIndex = 0; sampleIndex < samplesPerFrame; sampleIndex++) {
+ // No need to clip because int16_t range is inherently limited.
+ float sample = *source++ * scaler;
+ *destination++ = floatToInt16(sample);
+ }
+ scaler += delta;
}
}
diff --git a/media/libaaudio/src/utility/AAudioUtilities.h b/media/libaaudio/src/utility/AAudioUtilities.h
index 3dc501e..0078cbb 100644
--- a/media/libaaudio/src/utility/AAudioUtilities.h
+++ b/media/libaaudio/src/utility/AAudioUtilities.h
@@ -35,9 +35,120 @@
*/
aaudio_result_t AAudioConvert_androidToAAudioResult(android::status_t status);
-void AAudioConvert_floatToPcm16(const float *source, int32_t numSamples, int16_t *destination);
+/**
+ * Convert an array of floats to an array of int16_t.
+ *
+ * @param source
+ * @param destination
+ * @param numSamples number of values in the array
+ * @param amplitude level between 0.0 and 1.0
+ */
+void AAudioConvert_floatToPcm16(const float *source,
+ int16_t *destination,
+ int32_t numSamples,
+ float amplitude);
-void AAudioConvert_pcm16ToFloat(const int16_t *source, int32_t numSamples, float *destination);
+/**
+ * Convert floats to int16_t and scale by a linear ramp.
+ *
+ * The ramp stops just short of reaching amplitude2 so that the next
+ * ramp can start at amplitude2 without causing a discontinuity.
+ *
+ * @param source
+ * @param destination
+ * @param numFrames
+ * @param samplesPerFrame AKA number of channels
+ * @param amplitude1 level at start of ramp, between 0.0 and 1.0
+ * @param amplitude2 level past end of ramp, between 0.0 and 1.0
+ */
+void AAudioConvert_floatToPcm16(const float *source,
+ int16_t *destination,
+ int32_t numFrames,
+ int32_t samplesPerFrame,
+ float amplitude1,
+ float amplitude2);
+
+/**
+ * Convert int16_t array to float array ranging from -1.0 to +1.0.
+ * @param source
+ * @param destination
+ * @param numSamples
+ */
+//void AAudioConvert_pcm16ToFloat(const int16_t *source, int32_t numSamples,
+// float *destination);
+
+/**
+ *
+ * Convert int16_t array to float array ranging from +/- amplitude.
+ * @param source
+ * @param destination
+ * @param numSamples
+ * @param amplitude
+ */
+void AAudioConvert_pcm16ToFloat(const int16_t *source,
+ float *destination,
+ int32_t numSamples,
+ float amplitude);
+
+/**
+ * Convert floats to int16_t and scale by a linear ramp.
+ *
+ * The ramp stops just short of reaching amplitude2 so that the next
+ * ramp can start at amplitude2 without causing a discontinuity.
+ *
+ * @param source
+ * @param destination
+ * @param numFrames
+ * @param samplesPerFrame AKA number of channels
+ * @param amplitude1 level at start of ramp, between 0.0 and 1.0
+ * @param amplitude2 level at end of ramp, between 0.0 and 1.0
+ */
+void AAudioConvert_pcm16ToFloat(const int16_t *source,
+ float *destination,
+ int32_t numFrames,
+ int32_t samplesPerFrame,
+ float amplitude1,
+ float amplitude2);
+
+/**
+ * Scale floats by a linear ramp.
+ *
+ * The ramp stops just short of reaching amplitude2 so that the next
+ * ramp can start at amplitude2 without causing a discontinuity.
+ *
+ * @param source
+ * @param destination
+ * @param numFrames
+ * @param samplesPerFrame
+ * @param amplitude1
+ * @param amplitude2
+ */
+void AAudio_linearRamp(const float *source,
+ float *destination,
+ int32_t numFrames,
+ int32_t samplesPerFrame,
+ float amplitude1,
+ float amplitude2);
+
+/**
+ * Scale int16_t's by a linear ramp.
+ *
+ * The ramp stops just short of reaching amplitude2 so that the next
+ * ramp can start at amplitude2 without causing a discontinuity.
+ *
+ * @param source
+ * @param destination
+ * @param numFrames
+ * @param samplesPerFrame
+ * @param amplitude1
+ * @param amplitude2
+ */
+void AAudio_linearRamp(const int16_t *source,
+ int16_t *destination,
+ int32_t numFrames,
+ int32_t samplesPerFrame,
+ float amplitude1,
+ float amplitude2);
/**
* Calculate the number of bytes and prevent numeric overflow.
diff --git a/media/libaaudio/src/utility/LinearRamp.cpp b/media/libaaudio/src/utility/LinearRamp.cpp
new file mode 100644
index 0000000..1714bbf
--- /dev/null
+++ b/media/libaaudio/src/utility/LinearRamp.cpp
@@ -0,0 +1,50 @@
+/*
+ * Copyright 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "LinearRamp.h"
+
+bool LinearRamp::isRamping() {
+ float target = mTarget.load();
+ if (target != mLevelTo) {
+ // Update target. Continue from previous level.
+ mLevelTo = target;
+ mRemaining = mLengthInFrames;
+ return true;
+ } else {
+ return mRemaining > 0;
+ }
+}
+
+bool LinearRamp::nextSegment(int32_t frames, float *levelFrom, float *levelTo) {
+ bool ramping = isRamping();
+ *levelFrom = mLevelFrom;
+ if (ramping) {
+ float level;
+ if (frames >= mRemaining) {
+ level = mLevelTo;
+ mRemaining = 0;
+ } else {
+ // Interpolate to a point along the full ramp.
+ level = mLevelFrom + (frames * (mLevelTo - mLevelFrom) / mRemaining);
+ mRemaining -= frames;
+ }
+ mLevelFrom = level; // for next ramp
+ *levelTo = level;
+ } else {
+ *levelTo = mLevelTo;
+ }
+ return ramping;
+}
\ No newline at end of file
diff --git a/media/libaaudio/src/utility/LinearRamp.h b/media/libaaudio/src/utility/LinearRamp.h
new file mode 100644
index 0000000..ff09dce
--- /dev/null
+++ b/media/libaaudio/src/utility/LinearRamp.h
@@ -0,0 +1,97 @@
+/*
+ * Copyright 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef AAUDIO_LINEAR_RAMP_H
+#define AAUDIO_LINEAR_RAMP_H
+
+#include <atomic>
+#include <stdint.h>
+
+/**
+ * Generate segments along a linear ramp.
+ * The ramp target can be updated from another thread.
+ * When the target is updated, a new ramp is started from the current position.
+ *
+ * The first ramp starts at 0.0.
+ *
+ */
+class LinearRamp {
+public:
+ LinearRamp() {
+ mTarget.store(1.0f);
+ }
+
+ void setLengthInFrames(int32_t frames) {
+ mLengthInFrames = frames;
+ }
+
+ int32_t getLengthInFrames() {
+ return mLengthInFrames;
+ }
+
+ /**
+ * This may be called by another thread.
+ * @param target
+ */
+ void setTarget(float target) {
+ mTarget.store(target);
+ }
+
+ float getTarget() {
+ return mTarget.load();
+ }
+
+ /**
+ * Force the nextSegment to start from this level.
+ *
+ * WARNING: this can cause a discontinuity if called while the ramp is being used.
+ * Only call this when setting the initial ramp.
+ *
+ * @param level
+ */
+ void forceCurrent(float level) {
+ mLevelFrom = level;
+ mLevelTo = level; // forces a ramp if it does not match target
+ }
+
+ float getCurrent() {
+ return mLevelFrom;
+ }
+
+ /**
+ * Get levels for next ramp segment.
+ *
+ * @param frames number of frames in the segment
+ * @param levelFrom pointer to starting amplitude
+ * @param levelTo pointer to ending amplitude
+ * @return true if ramp is still moving towards the target
+ */
+ bool nextSegment(int32_t frames, float *levelFrom, float *levelTo);
+
+private:
+
+ bool isRamping();
+
+ std::atomic<float> mTarget;
+
+ int32_t mLengthInFrames = 48000 / 50; // 20 msec at 48000 Hz
+ int32_t mRemaining = 0;
+ float mLevelFrom = 0.0f;
+ float mLevelTo = 0.0f;
+};
+
+
+#endif //AAUDIO_LINEAR_RAMP_H
diff --git a/media/libaaudio/tests/Android.mk b/media/libaaudio/tests/Android.mk
index 06c9364..01360b1 100644
--- a/media/libaaudio/tests/Android.mk
+++ b/media/libaaudio/tests/Android.mk
@@ -35,3 +35,15 @@
LOCAL_STATIC_LIBRARIES := libaaudio
LOCAL_MODULE := test_block_adapter
include $(BUILD_NATIVE_TEST)
+
+include $(CLEAR_VARS)
+LOCAL_C_INCLUDES := \
+ $(call include-path-for, audio-utils) \
+ frameworks/av/media/libaaudio/include \
+ frameworks/av/media/libaaudio/src
+LOCAL_SRC_FILES:= test_linear_ramp.cpp
+LOCAL_SHARED_LIBRARIES := libaudioclient libaudioutils libbinder \
+ libcutils liblog libmedia libutils
+LOCAL_STATIC_LIBRARIES := libaaudio
+LOCAL_MODULE := test_linear_ramp
+include $(BUILD_NATIVE_TEST)
diff --git a/media/libaaudio/tests/test_linear_ramp.cpp b/media/libaaudio/tests/test_linear_ramp.cpp
new file mode 100644
index 0000000..5c53982
--- /dev/null
+++ b/media/libaaudio/tests/test_linear_ramp.cpp
@@ -0,0 +1,108 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <iostream>
+
+#include <gtest/gtest.h>
+
+#include "utility/AAudioUtilities.h"
+#include "utility/LinearRamp.h"
+
+
+TEST(test_linear_ramp, linear_ramp_segments) {
+ LinearRamp ramp;
+ const float source[4] = {1.0f, 1.0f, 1.0f, 1.0f };
+ float destination[4] = {1.0f, 1.0f, 1.0f, 1.0f };
+
+ float levelFrom = -1.0f;
+ float levelTo = -1.0f;
+ ramp.setLengthInFrames(8);
+ ramp.setTarget(8.0f);
+
+ ASSERT_EQ(8, ramp.getLengthInFrames());
+
+ bool ramping = ramp.nextSegment(4, &levelFrom, &levelTo);
+ ASSERT_EQ(1, ramping);
+ ASSERT_EQ(0.0f, levelFrom);
+ ASSERT_EQ(4.0f, levelTo);
+
+ AAudio_linearRamp(source, destination, 4, 1, levelFrom, levelTo);
+ ASSERT_EQ(0.0f, destination[0]);
+ ASSERT_EQ(1.0f, destination[1]);
+ ASSERT_EQ(2.0f, destination[2]);
+ ASSERT_EQ(3.0f, destination[3]);
+
+ ramping = ramp.nextSegment(4, &levelFrom, &levelTo);
+ ASSERT_EQ(1, ramping);
+ ASSERT_EQ(4.0f, levelFrom);
+ ASSERT_EQ(8.0f, levelTo);
+
+ AAudio_linearRamp(source, destination, 4, 1, levelFrom, levelTo);
+ ASSERT_EQ(4.0f, destination[0]);
+ ASSERT_EQ(5.0f, destination[1]);
+ ASSERT_EQ(6.0f, destination[2]);
+ ASSERT_EQ(7.0f, destination[3]);
+
+ ramping = ramp.nextSegment(4, &levelFrom, &levelTo);
+ ASSERT_EQ(0, ramping);
+ ASSERT_EQ(8.0f, levelFrom);
+ ASSERT_EQ(8.0f, levelTo);
+
+ AAudio_linearRamp(source, destination, 4, 1, levelFrom, levelTo);
+ ASSERT_EQ(8.0f, destination[0]);
+ ASSERT_EQ(8.0f, destination[1]);
+ ASSERT_EQ(8.0f, destination[2]);
+ ASSERT_EQ(8.0f, destination[3]);
+
+};
+
+
+TEST(test_linear_ramp, linear_ramp_forced) {
+ LinearRamp ramp;
+ const float source[4] = {1.0f, 1.0f, 1.0f, 1.0f };
+ float destination[4] = {1.0f, 1.0f, 1.0f, 1.0f };
+
+ float levelFrom = -1.0f;
+ float levelTo = -1.0f;
+ ramp.setLengthInFrames(4);
+ ramp.setTarget(8.0f);
+ ramp.forceCurrent(4.0f);
+ ASSERT_EQ(4.0f, ramp.getCurrent());
+
+ bool ramping = ramp.nextSegment(4, &levelFrom, &levelTo);
+ ASSERT_EQ(1, ramping);
+ ASSERT_EQ(4.0f, levelFrom);
+ ASSERT_EQ(8.0f, levelTo);
+
+ AAudio_linearRamp(source, destination, 4, 1, levelFrom, levelTo);
+ ASSERT_EQ(4.0f, destination[0]);
+ ASSERT_EQ(5.0f, destination[1]);
+ ASSERT_EQ(6.0f, destination[2]);
+ ASSERT_EQ(7.0f, destination[3]);
+
+ ramping = ramp.nextSegment(4, &levelFrom, &levelTo);
+ ASSERT_EQ(0, ramping);
+ ASSERT_EQ(8.0f, levelFrom);
+ ASSERT_EQ(8.0f, levelTo);
+
+ AAudio_linearRamp(source, destination, 4, 1, levelFrom, levelTo);
+ ASSERT_EQ(8.0f, destination[0]);
+ ASSERT_EQ(8.0f, destination[1]);
+ ASSERT_EQ(8.0f, destination[2]);
+ ASSERT_EQ(8.0f, destination[3]);
+
+};
+
diff --git a/media/libaudiohal/Android.mk b/media/libaudiohal/Android.mk
index 68a1f7b..e592169 100644
--- a/media/libaudiohal/Android.mk
+++ b/media/libaudiohal/Android.mk
@@ -5,30 +5,37 @@
LOCAL_SHARED_LIBRARIES := \
libcutils \
liblog \
- libutils
+ libutils \
+ libhardware
+
+LOCAL_SRC_FILES := \
+ DeviceHalLocal.cpp \
+ DevicesFactoryHalHybrid.cpp \
+ DevicesFactoryHalLocal.cpp \
+ StreamHalLocal.cpp
+
+LOCAL_CFLAGS := -Wall -Werror
ifeq ($(USE_LEGACY_LOCAL_AUDIO_HAL), true)
# Use audiohal directly w/o hwbinder middleware.
# This is for performance comparison and debugging only.
-LOCAL_SRC_FILES := \
- DeviceHalLocal.cpp \
- DevicesFactoryHalLocal.cpp \
+LOCAL_SRC_FILES += \
EffectBufferHalLocal.cpp \
- EffectHalLocal.cpp \
EffectsFactoryHalLocal.cpp \
- StreamHalLocal.cpp
+ EffectHalLocal.cpp
LOCAL_SHARED_LIBRARIES += \
- libeffects \
- libhardware
+ libeffects
+
+LOCAL_CFLAGS += -DUSE_LEGACY_LOCAL_AUDIO_HAL
else # if !USE_LEGACY_LOCAL_AUDIO_HAL
-LOCAL_SRC_FILES := \
+LOCAL_SRC_FILES += \
ConversionHelperHidl.cpp \
- HalDeathHandlerHidl.cpp \
+ HalDeathHandlerHidl.cpp \
DeviceHalHidl.cpp \
DevicesFactoryHalHidl.cpp \
EffectBufferHalHidl.cpp \
@@ -60,6 +67,4 @@
LOCAL_MODULE := libaudiohal
-LOCAL_CFLAGS := -Wall -Werror
-
include $(BUILD_SHARED_LIBRARY)
diff --git a/media/libaudiohal/DevicesFactoryHalHidl.cpp b/media/libaudiohal/DevicesFactoryHalHidl.cpp
index fc2645e..31da263 100644
--- a/media/libaudiohal/DevicesFactoryHalHidl.cpp
+++ b/media/libaudiohal/DevicesFactoryHalHidl.cpp
@@ -33,11 +33,6 @@
namespace android {
-// static
-sp<DevicesFactoryHalInterface> DevicesFactoryHalInterface::create() {
- return new DevicesFactoryHalHidl();
-}
-
DevicesFactoryHalHidl::DevicesFactoryHalHidl() {
mDevicesFactory = IDevicesFactory::getService();
if (mDevicesFactory != 0) {
diff --git a/media/libaudiohal/DevicesFactoryHalHidl.h b/media/libaudiohal/DevicesFactoryHalHidl.h
index a26dec1..e2f1ad1 100644
--- a/media/libaudiohal/DevicesFactoryHalHidl.h
+++ b/media/libaudiohal/DevicesFactoryHalHidl.h
@@ -36,7 +36,7 @@
virtual status_t openDevice(const char *name, sp<DeviceHalInterface> *device);
private:
- friend class DevicesFactoryHalInterface;
+ friend class DevicesFactoryHalHybrid;
sp<IDevicesFactory> mDevicesFactory;
diff --git a/media/libaudiohal/DevicesFactoryHalHybrid.cpp b/media/libaudiohal/DevicesFactoryHalHybrid.cpp
new file mode 100644
index 0000000..454b03b
--- /dev/null
+++ b/media/libaudiohal/DevicesFactoryHalHybrid.cpp
@@ -0,0 +1,54 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "DevicesFactoryHalHybrid"
+//#define LOG_NDEBUG 0
+
+#include "DevicesFactoryHalHybrid.h"
+#include "DevicesFactoryHalLocal.h"
+#ifndef USE_LEGACY_LOCAL_AUDIO_HAL
+#include "DevicesFactoryHalHidl.h"
+#endif
+
+namespace android {
+
+// static
+sp<DevicesFactoryHalInterface> DevicesFactoryHalInterface::create() {
+ return new DevicesFactoryHalHybrid();
+}
+
+DevicesFactoryHalHybrid::DevicesFactoryHalHybrid()
+ : mLocalFactory(new DevicesFactoryHalLocal()),
+ mHidlFactory(
+#ifdef USE_LEGACY_LOCAL_AUDIO_HAL
+ nullptr
+#else
+ new DevicesFactoryHalHidl()
+#endif
+ ) {
+}
+
+DevicesFactoryHalHybrid::~DevicesFactoryHalHybrid() {
+}
+
+status_t DevicesFactoryHalHybrid::openDevice(const char *name, sp<DeviceHalInterface> *device) {
+ if (mHidlFactory != 0 && strcmp(AUDIO_HARDWARE_MODULE_ID_A2DP, name) != 0) {
+ return mHidlFactory->openDevice(name, device);
+ }
+ return mLocalFactory->openDevice(name, device);
+}
+
+} // namespace android
diff --git a/media/libaudiohal/DevicesFactoryHalHybrid.h b/media/libaudiohal/DevicesFactoryHalHybrid.h
new file mode 100644
index 0000000..abd57d6
--- /dev/null
+++ b/media/libaudiohal/DevicesFactoryHalHybrid.h
@@ -0,0 +1,47 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_HARDWARE_DEVICES_FACTORY_HAL_HYBRID_H
+#define ANDROID_HARDWARE_DEVICES_FACTORY_HAL_HYBRID_H
+
+#include <media/audiohal/DevicesFactoryHalInterface.h>
+#include <utils/Errors.h>
+#include <utils/RefBase.h>
+
+namespace android {
+
+class DevicesFactoryHalHybrid : public DevicesFactoryHalInterface
+{
+ public:
+ // Opens a device with the specified name. To close the device, it is
+ // necessary to release references to the returned object.
+ virtual status_t openDevice(const char *name, sp<DeviceHalInterface> *device);
+
+ private:
+ friend class DevicesFactoryHalInterface;
+
+ // Can not be constructed directly by clients.
+ DevicesFactoryHalHybrid();
+
+ virtual ~DevicesFactoryHalHybrid();
+
+ sp<DevicesFactoryHalInterface> mLocalFactory;
+ sp<DevicesFactoryHalInterface> mHidlFactory;
+};
+
+} // namespace android
+
+#endif // ANDROID_HARDWARE_DEVICES_FACTORY_HAL_HYBRID_H
diff --git a/media/libaudiohal/DevicesFactoryHalLocal.cpp b/media/libaudiohal/DevicesFactoryHalLocal.cpp
index cd9a9e7..13a9acd 100644
--- a/media/libaudiohal/DevicesFactoryHalLocal.cpp
+++ b/media/libaudiohal/DevicesFactoryHalLocal.cpp
@@ -27,11 +27,6 @@
namespace android {
-// static
-sp<DevicesFactoryHalInterface> DevicesFactoryHalInterface::create() {
- return new DevicesFactoryHalLocal();
-}
-
static status_t load_audio_interface(const char *if_name, audio_hw_device_t **dev)
{
const hw_module_t *mod;
diff --git a/media/libaudiohal/DevicesFactoryHalLocal.h b/media/libaudiohal/DevicesFactoryHalLocal.h
index 58ce4ff..b9d18ab 100644
--- a/media/libaudiohal/DevicesFactoryHalLocal.h
+++ b/media/libaudiohal/DevicesFactoryHalLocal.h
@@ -33,7 +33,7 @@
virtual status_t openDevice(const char *name, sp<DeviceHalInterface> *device);
private:
- friend class DevicesFactoryHalInterface;
+ friend class DevicesFactoryHalHybrid;
// Can not be constructed directly by clients.
DevicesFactoryHalLocal() {}
diff --git a/media/libaudiohal/EffectBufferHalHidl.cpp b/media/libaudiohal/EffectBufferHalHidl.cpp
index d6a41a2..8b5201b 100644
--- a/media/libaudiohal/EffectBufferHalHidl.cpp
+++ b/media/libaudiohal/EffectBufferHalHidl.cpp
@@ -47,7 +47,7 @@
status_t EffectBufferHalInterface::mirror(
void* external, size_t size, sp<EffectBufferHalInterface>* buffer) {
sp<EffectBufferHalInterface> tempBuffer = new EffectBufferHalHidl(size);
- status_t result = reinterpret_cast<EffectBufferHalHidl*>(tempBuffer.get())->init();
+ status_t result = static_cast<EffectBufferHalHidl*>(tempBuffer.get())->init();
if (result == OK) {
tempBuffer->setExternalData(external);
*buffer = tempBuffer;
@@ -56,7 +56,8 @@
}
EffectBufferHalHidl::EffectBufferHalHidl(size_t size)
- : mBufferSize(size), mExternalData(nullptr), mAudioBuffer{0, {nullptr}} {
+ : mBufferSize(size), mFrameCountChanged(false),
+ mExternalData(nullptr), mAudioBuffer{0, {nullptr}} {
mHidlBuffer.id = makeUniqueId();
mHidlBuffer.frameCount = 0;
}
@@ -107,6 +108,13 @@
void EffectBufferHalHidl::setFrameCount(size_t frameCount) {
mHidlBuffer.frameCount = frameCount;
mAudioBuffer.frameCount = frameCount;
+ mFrameCountChanged = true;
+}
+
+bool EffectBufferHalHidl::checkFrameCountChange() {
+ bool result = mFrameCountChanged;
+ mFrameCountChanged = false;
+ return result;
}
void EffectBufferHalHidl::setExternalData(void* external) {
diff --git a/media/libaudiohal/EffectBufferHalHidl.h b/media/libaudiohal/EffectBufferHalHidl.h
index 6e9fd0b..66a81c2 100644
--- a/media/libaudiohal/EffectBufferHalHidl.h
+++ b/media/libaudiohal/EffectBufferHalHidl.h
@@ -37,6 +37,7 @@
virtual void setExternalData(void* external);
virtual void setFrameCount(size_t frameCount);
+ virtual bool checkFrameCountChange();
virtual void update();
virtual void commit();
@@ -51,6 +52,7 @@
static uint64_t makeUniqueId();
const size_t mBufferSize;
+ bool mFrameCountChanged;
void* mExternalData;
AudioBuffer mHidlBuffer;
sp<IMemory> mMemory;
diff --git a/media/libaudiohal/EffectBufferHalLocal.cpp b/media/libaudiohal/EffectBufferHalLocal.cpp
index 9fe2c7b..7951c8e 100644
--- a/media/libaudiohal/EffectBufferHalLocal.cpp
+++ b/media/libaudiohal/EffectBufferHalLocal.cpp
@@ -39,13 +39,13 @@
EffectBufferHalLocal::EffectBufferHalLocal(size_t size)
: mOwnBuffer(new uint8_t[size]),
- mBufferSize(size),
+ mBufferSize(size), mFrameCountChanged(false),
mAudioBuffer{0, {mOwnBuffer.get()}} {
}
EffectBufferHalLocal::EffectBufferHalLocal(void* external, size_t size)
: mOwnBuffer(nullptr),
- mBufferSize(size),
+ mBufferSize(size), mFrameCountChanged(false),
mAudioBuffer{0, {external}} {
}
@@ -62,6 +62,7 @@
void EffectBufferHalLocal::setFrameCount(size_t frameCount) {
mAudioBuffer.frameCount = frameCount;
+ mFrameCountChanged = true;
}
void EffectBufferHalLocal::setExternalData(void* external) {
@@ -69,6 +70,12 @@
mAudioBuffer.raw = external;
}
+bool EffectBufferHalLocal::checkFrameCountChange() {
+ bool result = mFrameCountChanged;
+ mFrameCountChanged = false;
+ return result;
+}
+
void EffectBufferHalLocal::update() {
}
diff --git a/media/libaudiohal/EffectBufferHalLocal.h b/media/libaudiohal/EffectBufferHalLocal.h
index 202d878..d2b624b 100644
--- a/media/libaudiohal/EffectBufferHalLocal.h
+++ b/media/libaudiohal/EffectBufferHalLocal.h
@@ -32,6 +32,7 @@
virtual void setExternalData(void* external);
virtual void setFrameCount(size_t frameCount);
+ virtual bool checkFrameCountChange();
virtual void update();
virtual void commit();
@@ -43,6 +44,7 @@
std::unique_ptr<uint8_t[]> mOwnBuffer;
const size_t mBufferSize;
+ bool mFrameCountChanged;
audio_buffer_t mAudioBuffer;
// Can not be constructed directly by clients.
diff --git a/media/libaudiohal/EffectHalHidl.cpp b/media/libaudiohal/EffectHalHidl.cpp
index 0babfda..b49b975 100644
--- a/media/libaudiohal/EffectHalHidl.cpp
+++ b/media/libaudiohal/EffectHalHidl.cpp
@@ -168,13 +168,20 @@
return OK;
}
+bool EffectHalHidl::needToResetBuffers() {
+ if (mBuffersChanged) return true;
+ bool inBufferFrameCountUpdated = mInBuffer->checkFrameCountChange();
+ bool outBufferFrameCountUpdated = mOutBuffer->checkFrameCountChange();
+ return inBufferFrameCountUpdated || outBufferFrameCountUpdated;
+}
+
status_t EffectHalHidl::processImpl(uint32_t mqFlag) {
if (mEffect == 0 || mInBuffer == 0 || mOutBuffer == 0) return NO_INIT;
status_t status;
if (!mStatusMQ && (status = prepareForProcessing()) != OK) {
return status;
}
- if (mBuffersChanged && (status = setProcessBuffers()) != OK) {
+ if (needToResetBuffers() && (status = setProcessBuffers()) != OK) {
return status;
}
// The data is already in the buffers, just need to flush it and wake up the server side.
@@ -202,8 +209,8 @@
status_t EffectHalHidl::setProcessBuffers() {
Return<Result> ret = mEffect->setProcessBuffers(
- reinterpret_cast<EffectBufferHalHidl*>(mInBuffer.get())->hidlBuffer(),
- reinterpret_cast<EffectBufferHalHidl*>(mOutBuffer.get())->hidlBuffer());
+ static_cast<EffectBufferHalHidl*>(mInBuffer.get())->hidlBuffer(),
+ static_cast<EffectBufferHalHidl*>(mOutBuffer.get())->hidlBuffer());
if (ret.isOk() && ret == Result::OK) {
mBuffersChanged = false;
return OK;
diff --git a/media/libaudiohal/EffectHalHidl.h b/media/libaudiohal/EffectHalHidl.h
index c8db36f..6ffdaf1 100644
--- a/media/libaudiohal/EffectHalHidl.h
+++ b/media/libaudiohal/EffectHalHidl.h
@@ -58,6 +58,9 @@
// Free resources on the remote side.
virtual status_t close();
+ // Whether it's a local implementation.
+ virtual bool isLocal() const { return false; }
+
uint64_t effectId() const { return mEffectId; }
static void effectDescriptorToHal(
@@ -92,6 +95,7 @@
status_t getConfigImpl(uint32_t cmdCode, uint32_t *replySize, void *pReplyData);
status_t prepareForProcessing();
+ bool needToResetBuffers();
status_t processImpl(uint32_t mqFlag);
status_t setConfigImpl(
uint32_t cmdCode, uint32_t cmdSize, void *pCmdData,
diff --git a/media/libaudiohal/EffectHalLocal.h b/media/libaudiohal/EffectHalLocal.h
index b499462..693fb50 100644
--- a/media/libaudiohal/EffectHalLocal.h
+++ b/media/libaudiohal/EffectHalLocal.h
@@ -48,6 +48,9 @@
// Free resources on the remote side.
virtual status_t close();
+ // Whether it's a local implementation.
+ virtual bool isLocal() const { return true; }
+
effect_handle_t handle() const { return mHandle; }
private:
diff --git a/media/libaudiohal/StreamHalLocal.cpp b/media/libaudiohal/StreamHalLocal.cpp
index b25e518..05800a0 100644
--- a/media/libaudiohal/StreamHalLocal.cpp
+++ b/media/libaudiohal/StreamHalLocal.cpp
@@ -79,11 +79,13 @@
}
status_t StreamHalLocal::addEffect(sp<EffectHalInterface> effect) {
+ LOG_ALWAYS_FATAL_IF(!effect->isLocal(), "Only local effects can be added for a local stream");
return mStream->add_audio_effect(mStream,
static_cast<EffectHalLocal*>(effect.get())->handle());
}
status_t StreamHalLocal::removeEffect(sp<EffectHalInterface> effect) {
+ LOG_ALWAYS_FATAL_IF(!effect->isLocal(), "Only local effects can be removed for a local stream");
return mStream->remove_audio_effect(mStream,
static_cast<EffectHalLocal*>(effect.get())->handle());
}
@@ -162,7 +164,7 @@
// correctly the case when the callback is invoked while StreamOutHalLocal's destructor is
// already running, because the destructor is invoked after the refcount has been atomically
// decremented.
- wp<StreamOutHalLocal> weakSelf(reinterpret_cast<StreamOutHalLocal*>(cookie));
+ wp<StreamOutHalLocal> weakSelf(static_cast<StreamOutHalLocal*>(cookie));
sp<StreamOutHalLocal> self = weakSelf.promote();
if (self == 0) return 0;
sp<StreamOutHalInterfaceCallback> callback = self->mCallback.promote();
diff --git a/media/libaudiohal/include/EffectBufferHalInterface.h b/media/libaudiohal/include/EffectBufferHalInterface.h
index 6fa7940..e862f6e 100644
--- a/media/libaudiohal/include/EffectBufferHalInterface.h
+++ b/media/libaudiohal/include/EffectBufferHalInterface.h
@@ -39,6 +39,8 @@
virtual void setExternalData(void* external) = 0;
virtual void setFrameCount(size_t frameCount) = 0;
+ virtual bool checkFrameCountChange() = 0; // returns whether frame count has been updated
+ // since the last call to this method
virtual void update() = 0; // copies data from the external buffer, noop for allocated buffers
virtual void commit() = 0; // copies data to the external buffer, noop for allocated buffers
diff --git a/media/libaudiohal/include/EffectHalInterface.h b/media/libaudiohal/include/EffectHalInterface.h
index 7f9a6fd..92622aa 100644
--- a/media/libaudiohal/include/EffectHalInterface.h
+++ b/media/libaudiohal/include/EffectHalInterface.h
@@ -52,6 +52,9 @@
// Free resources on the remote side.
virtual status_t close() = 0;
+ // Whether it's a local implementation.
+ virtual bool isLocal() const = 0;
+
protected:
// Subclasses can not be constructed directly by clients.
EffectHalInterface() {}
diff --git a/media/libmedia/include/media/IMediaExtractor.h b/media/libmedia/include/media/IMediaExtractor.h
index cf1b9fb..ab40f53 100644
--- a/media/libmedia/include/media/IMediaExtractor.h
+++ b/media/libmedia/include/media/IMediaExtractor.h
@@ -34,6 +34,9 @@
DECLARE_META_INTERFACE(MediaExtractor);
virtual size_t countTracks() = 0;
+ // This function could return NULL IMediaSource even when index is within the
+ // track count returned by countTracks, since it's possible the track is malformed
+ // and it's not detected during countTracks call.
virtual sp<IMediaSource> getTrack(size_t index) = 0;
enum GetTrackMetaDataFlags {
diff --git a/media/libmediaplayerservice/nuplayer/GenericSource.cpp b/media/libmediaplayerservice/nuplayer/GenericSource.cpp
index fb1f42b..22b09d4 100644
--- a/media/libmediaplayerservice/nuplayer/GenericSource.cpp
+++ b/media/libmediaplayerservice/nuplayer/GenericSource.cpp
@@ -379,6 +379,11 @@
source.get(), mFd, (long long)mOffset, (long long)mLength);
if (source.get() != nullptr) {
mDataSource = DataSource::CreateFromIDataSource(source);
+ if (mDataSource != nullptr) {
+ // Close the local file descriptor as it is not needed anymore.
+ close(mFd);
+ mFd = -1;
+ }
} else {
ALOGW("extractor service cannot make data source");
}
@@ -390,7 +395,9 @@
ALOGD("FileSource local");
mDataSource = new FileSource(mFd, mOffset, mLength);
}
-
+ // TODO: close should always be done on mFd, see the lines following
+ // DataSource::CreateFromIDataSource above,
+ // and the FileSource constructor should dup the mFd argument as needed.
mFd = -1;
}
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
index d048777..0d4c730 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
@@ -202,7 +202,8 @@
mPaused(false),
mPausedByClient(true),
mPausedForBuffering(false),
- mIsDrmProtected(false) {
+ mIsDrmProtected(false),
+ mDataSourceType(DATA_SOURCE_TYPE_NONE) {
clearFlushComplete();
}
@@ -225,6 +226,7 @@
msg->setObject("source", new StreamingSource(notify, source));
msg->post();
+ mDataSourceType = DATA_SOURCE_TYPE_STREAM;
}
static bool IsHTTPLiveURL(const char *url) {
@@ -258,10 +260,12 @@
if (IsHTTPLiveURL(url)) {
source = new HTTPLiveSource(notify, httpService, url, headers);
ALOGV("setDataSourceAsync HTTPLiveSource %s", url);
+ mDataSourceType = DATA_SOURCE_TYPE_HTTP_LIVE;
} else if (!strncasecmp(url, "rtsp://", 7)) {
source = new RTSPSource(
notify, httpService, url, headers, mUIDValid, mUID);
ALOGV("setDataSourceAsync RTSPSource %s", url);
+ mDataSourceType = DATA_SOURCE_TYPE_RTSP;
} else if ((!strncasecmp(url, "http://", 7)
|| !strncasecmp(url, "https://", 8))
&& ((len >= 4 && !strcasecmp(".sdp", &url[len - 4]))
@@ -269,6 +273,7 @@
source = new RTSPSource(
notify, httpService, url, headers, mUIDValid, mUID, true);
ALOGV("setDataSourceAsync RTSPSource http/https/.sdp %s", url);
+ mDataSourceType = DATA_SOURCE_TYPE_RTSP;
} else {
ALOGV("setDataSourceAsync GenericSource %s", url);
@@ -282,6 +287,9 @@
} else {
ALOGE("Failed to set data source!");
}
+
+ // regardless of success/failure
+ mDataSourceType = DATA_SOURCE_TYPE_GENERIC_URL;
}
msg->setObject("source", source);
msg->post();
@@ -307,6 +315,7 @@
msg->setObject("source", source);
msg->post();
+ mDataSourceType = DATA_SOURCE_TYPE_GENERIC_FD;
}
void NuPlayer::setDataSourceAsync(const sp<DataSource> &dataSource) {
@@ -323,6 +332,7 @@
msg->setObject("source", source);
msg->post();
+ mDataSourceType = DATA_SOURCE_TYPE_MEDIA;
}
status_t NuPlayer::getDefaultBufferingSettings(
@@ -2651,6 +2661,32 @@
}
}
+const char *NuPlayer::getDataSourceType() {
+ switch (mDataSourceType) {
+ case DATA_SOURCE_TYPE_HTTP_LIVE:
+ return "HTTPLive";
+
+ case DATA_SOURCE_TYPE_RTSP:
+ return "RTSP";
+
+ case DATA_SOURCE_TYPE_GENERIC_URL:
+ return "GenURL";
+
+ case DATA_SOURCE_TYPE_GENERIC_FD:
+ return "GenFD";
+
+ case DATA_SOURCE_TYPE_MEDIA:
+ return "Media";
+
+ case DATA_SOURCE_TYPE_STREAM:
+ return "Stream";
+
+ case DATA_SOURCE_TYPE_NONE:
+ default:
+ return "None";
+ }
+ }
+
// Modular DRM begin
status_t NuPlayer::prepareDrm(const uint8_t uuid[16], const Vector<uint8_t> &drmSessionId)
{
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayer.h b/media/libmediaplayerservice/nuplayer/NuPlayer.h
index d542749..c69835f 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayer.h
+++ b/media/libmediaplayerservice/nuplayer/NuPlayer.h
@@ -93,6 +93,8 @@
status_t prepareDrm(const uint8_t uuid[16], const Vector<uint8_t> &drmSessionId);
status_t releaseDrm();
+ const char *getDataSourceType();
+
protected:
virtual ~NuPlayer();
@@ -236,6 +238,18 @@
sp<ICrypto> mCrypto;
bool mIsDrmProtected;
+ typedef enum {
+ DATA_SOURCE_TYPE_NONE,
+ DATA_SOURCE_TYPE_HTTP_LIVE,
+ DATA_SOURCE_TYPE_RTSP,
+ DATA_SOURCE_TYPE_GENERIC_URL,
+ DATA_SOURCE_TYPE_GENERIC_FD,
+ DATA_SOURCE_TYPE_MEDIA,
+ DATA_SOURCE_TYPE_STREAM,
+ } DATA_SOURCE_TYPE;
+
+ std::atomic<DATA_SOURCE_TYPE> mDataSourceType;
+
inline const sp<DecoderBase> &getDecoder(bool audio) {
return audio ? mAudioDecoder : mVideoDecoder;
}
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp
index 01008b4..0c06976 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp
@@ -53,6 +53,7 @@
static const char *kPlayerPlaying = "android.media.mediaplayer.playingMs";
static const char *kPlayerError = "android.media.mediaplayer.err";
static const char *kPlayerErrorCode = "android.media.mediaplayer.errcode";
+static const char *kPlayerDataSourceType = "android.media.mediaplayer.dataSource";
NuPlayerDriver::NuPlayerDriver(pid_t pid)
@@ -570,6 +571,8 @@
mAnalyticsItem->setInt64(kPlayerDuration, duration_ms);
mAnalyticsItem->setInt64(kPlayerPlaying, (mPlayingTimeUs+500)/1000 );
+
+ mAnalyticsItem->setCString(kPlayerDataSourceType, mPlayer->getDataSourceType());
}
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp
index 9fe61703..5775b43 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp
@@ -1010,6 +1010,13 @@
}
// EOS
+ if (mPaused) {
+ // Do not notify EOS when paused.
+ // This is needed to avoid switch to next clip while in pause.
+ ALOGV("onDrainAudioQueue(): Do not notify EOS when paused");
+ return false;
+ }
+
int64_t postEOSDelayUs = 0;
if (mAudioSink->needsTrailingPadding()) {
postEOSDelayUs = getPendingAudioPlayoutDurationUs(ALooper::GetNowUs());
diff --git a/media/libstagefright/ACodec.cpp b/media/libstagefright/ACodec.cpp
index 2877ba2..9f1be22 100644
--- a/media/libstagefright/ACodec.cpp
+++ b/media/libstagefright/ACodec.cpp
@@ -1063,6 +1063,34 @@
return err;
}
+ OMX_INDEXTYPE index;
+ err = mOMXNode->getExtensionIndex(
+ "OMX.google.android.index.AndroidNativeBufferConsumerUsage",
+ &index);
+
+ if (err != OK) {
+ // allow failure
+ err = OK;
+ } else {
+ int usageBits = 0;
+ if (nativeWindow->query(
+ nativeWindow,
+ NATIVE_WINDOW_CONSUMER_USAGE_BITS,
+ &usageBits) == OK) {
+ OMX_PARAM_U32TYPE params;
+ InitOMXParams(¶ms);
+ params.nPortIndex = kPortIndexOutput;
+ params.nU32 = (OMX_U32)usageBits;
+
+ err = mOMXNode->setParameter(index, ¶ms, sizeof(params));
+
+ if (err != OK) {
+ ALOGE("Fail to set AndroidNativeBufferConsumerUsage: %d", err);
+ return err;
+ }
+ }
+ }
+
OMX_U32 usage = 0;
err = mOMXNode->getGraphicBufferUsage(kPortIndexOutput, &usage);
if (err != 0) {
@@ -1106,7 +1134,8 @@
if (err == OK) {
err = setupNativeWindowSizeFormatAndUsage(
- mNativeWindow.get(), &mNativeWindowUsageBits, preregister /* reconnect */);
+ mNativeWindow.get(), &mNativeWindowUsageBits,
+ preregister && !mTunneled /* reconnect */);
}
if (err != OK) {
mNativeWindowUsageBits = 0;
@@ -5401,6 +5430,8 @@
ALOGE_IF("[%s] failed to release codec instance: err=%d",
mCodec->mComponentName.c_str(), err);
mCodec->mCallback->onReleaseCompleted();
+
+ mCodec->changeState(mCodec->mUninitializedState);
break;
}
diff --git a/media/libstagefright/NuMediaExtractor.cpp b/media/libstagefright/NuMediaExtractor.cpp
index e3ca516..51f1ba3 100644
--- a/media/libstagefright/NuMediaExtractor.cpp
+++ b/media/libstagefright/NuMediaExtractor.cpp
@@ -297,6 +297,10 @@
sp<IMediaSource> source = mImpl->getTrack(index);
+ if (source == nullptr) {
+ return ERROR_MALFORMED;
+ }
+
status_t ret = source->start();
if (ret != OK) {
return ret;
diff --git a/media/libstagefright/OMXClient.cpp b/media/libstagefright/OMXClient.cpp
index 02d275b..5f9aa01 100644
--- a/media/libstagefright/OMXClient.cpp
+++ b/media/libstagefright/OMXClient.cpp
@@ -37,6 +37,10 @@
OMXClient::OMXClient() {
}
+status_t OMXClient::connect() {
+ return connect(nullptr);
+}
+
status_t OMXClient::connect(bool* trebleFlag) {
if (property_get_bool("persist.media.treble_omx", true)) {
if (trebleFlag != nullptr) {
diff --git a/media/libstagefright/SurfaceUtils.cpp b/media/libstagefright/SurfaceUtils.cpp
index b6b315d..b7c1598 100644
--- a/media/libstagefright/SurfaceUtils.cpp
+++ b/media/libstagefright/SurfaceUtils.cpp
@@ -91,9 +91,19 @@
return err;
}
- // Check if the ANativeWindow uses hardware protected buffers.
- if (queuesToNativeWindow != 1 && !(consumerUsage & GRALLOC_USAGE_PROTECTED)) {
- ALOGE("native window could not be authenticated");
+ // Check if the consumer end of the ANativeWindow can handle protected content.
+ int isConsumerProtected = 0;
+ err = nativeWindow->query(
+ nativeWindow, NATIVE_WINDOW_CONSUMER_IS_PROTECTED, &isConsumerProtected);
+ if (err != NO_ERROR) {
+ ALOGE("error query native window: %s (%d)", strerror(-err), -err);
+ return err;
+ }
+
+ // Deny queuing into native window if neither condition is satisfied.
+ if (queuesToNativeWindow != 1 && isConsumerProtected != 1) {
+ ALOGE("native window cannot handle protected buffers: the consumer should either be "
+ "a hardware composer or support hardware protection");
return PERMISSION_DENIED;
}
}
diff --git a/media/libstagefright/include/OMXClient.h b/media/libstagefright/include/OMXClient.h
index 315f19b..203a181 100644
--- a/media/libstagefright/include/OMXClient.h
+++ b/media/libstagefright/include/OMXClient.h
@@ -26,7 +26,9 @@
public:
OMXClient();
- status_t connect(bool* trebleFlag = nullptr);
+ status_t connect();
+ status_t connect(bool* trebleFlag);
+
status_t connectLegacy();
status_t connectTreble();
void disconnect();
diff --git a/media/libstagefright/omx/1.0/WGraphicBufferProducer.cpp b/media/libstagefright/omx/1.0/WGraphicBufferProducer.cpp
index 36bd624..650db8e 100644
--- a/media/libstagefright/omx/1.0/WGraphicBufferProducer.cpp
+++ b/media/libstagefright/omx/1.0/WGraphicBufferProducer.cpp
@@ -248,7 +248,7 @@
Return<int32_t> TWGraphicBufferProducer::setSidebandStream(const hidl_handle& stream) {
return static_cast<int32_t>(mBase->setSidebandStream(NativeHandle::create(
- native_handle_clone(stream), true)));
+ stream ? native_handle_clone(stream) : NULL, true)));
}
Return<void> TWGraphicBufferProducer::allocateBuffers(
diff --git a/media/libstagefright/omx/OMXNodeInstance.cpp b/media/libstagefright/omx/OMXNodeInstance.cpp
index d0f64ca..db99ef2 100644
--- a/media/libstagefright/omx/OMXNodeInstance.cpp
+++ b/media/libstagefright/omx/OMXNodeInstance.cpp
@@ -1861,7 +1861,7 @@
}
void OMXNodeInstance::codecBufferFilled(omx_message &msg) {
- Mutex::Autolock autoLock(mBufferIDLock);
+ Mutex::Autolock autoLock(mLock);
if (mMaxTimestampGapUs <= 0ll || mRestorePtsFailed) {
return;
diff --git a/media/ndk/NdkImage.cpp b/media/ndk/NdkImage.cpp
index 95fdf36..6d28d1b 100644
--- a/media/ndk/NdkImage.cpp
+++ b/media/ndk/NdkImage.cpp
@@ -31,12 +31,10 @@
#define ALIGN(x, mask) ( ((x) + (mask) - 1) & ~((mask) - 1) )
-AImage::AImage(AImageReader* reader, int32_t format, uint64_t usage0, uint64_t usage1,
- BufferItem* buffer, int64_t timestamp,
- int32_t width, int32_t height, int32_t numPlanes) :
- mReader(reader), mFormat(format), mUsage0(usage0), mUsage1(usage1),
- mBuffer(buffer), mLockedBuffer(nullptr), mTimestamp(timestamp),
- mWidth(width), mHeight(height), mNumPlanes(numPlanes) {
+AImage::AImage(AImageReader* reader, int32_t format, uint64_t usage, BufferItem* buffer,
+ int64_t timestamp, int32_t width, int32_t height, int32_t numPlanes) :
+ mReader(reader), mFormat(format), mUsage(usage), mBuffer(buffer), mLockedBuffer(nullptr),
+ mTimestamp(timestamp), mWidth(width), mHeight(height), mNumPlanes(numPlanes) {
}
// Can only be called by free() with mLock hold
@@ -178,9 +176,9 @@
return AMEDIA_ERROR_INVALID_OBJECT;
}
- if ((mUsage0 & AHARDWAREBUFFER_USAGE0_CPU_READ_OFTEN) == 0) {
+ if ((mUsage & AHARDWAREBUFFER_USAGE_CPU_READ_OFTEN) == 0) {
ALOGE("%s: AImage %p does not have any software read usage bits set, usage=%" PRIu64 "",
- __FUNCTION__, this, mUsage0);
+ __FUNCTION__, this, mUsage);
return AMEDIA_IMGREADER_CANNOT_LOCK_IMAGE;
}
@@ -191,13 +189,10 @@
auto lockedBuffer = std::make_unique<CpuConsumer::LockedBuffer>();
- uint64_t producerUsage;
- uint64_t consumerUsage;
- android_hardware_HardwareBuffer_convertToGrallocUsageBits(
- &producerUsage, &consumerUsage, mUsage0, mUsage1);
+ uint64_t grallocUsage = android_hardware_HardwareBuffer_convertToGrallocUsageBits(mUsage);
status_t ret =
- lockImageFromBuffer(mBuffer, consumerUsage, mBuffer->mFence->dup(), lockedBuffer.get());
+ lockImageFromBuffer(mBuffer, grallocUsage, mBuffer->mFence->dup(), lockedBuffer.get());
if (ret != OK) {
ALOGE("%s: AImage %p failed to lock, error=%d", __FUNCTION__, this, ret);
return AMEDIA_IMGREADER_CANNOT_LOCK_IMAGE;
diff --git a/media/ndk/NdkImagePriv.h b/media/ndk/NdkImagePriv.h
index 1fcb495..e9073d5 100644
--- a/media/ndk/NdkImagePriv.h
+++ b/media/ndk/NdkImagePriv.h
@@ -32,9 +32,8 @@
// TODO: this only supports ImageReader
struct AImage {
- AImage(AImageReader* reader, int32_t format, uint64_t usage0, uint64_t usage1,
- BufferItem* buffer, int64_t timestamp,
- int32_t width, int32_t height, int32_t numPlanes);
+ AImage(AImageReader* reader, int32_t format, uint64_t usage, BufferItem* buffer,
+ int64_t timestamp, int32_t width, int32_t height, int32_t numPlanes);
// free all resources while keeping object alive. Caller must obtain reader lock
void close() { close(-1); }
@@ -75,8 +74,7 @@
// When reader is close, AImage will only accept close API call
wp<AImageReader> mReader;
const int32_t mFormat;
- const uint64_t mUsage0; // AHARDWAREBUFFER_USAGE0* flags.
- const uint64_t mUsage1; // AHARDWAREBUFFER_USAGE1* flags.
+ const uint64_t mUsage; // AHARDWAREBUFFER_USAGE_* flags.
BufferItem* mBuffer;
std::unique_ptr<CpuConsumer::LockedBuffer> mLockedBuffer;
const int64_t mTimestamp;
diff --git a/media/ndk/NdkImageReader.cpp b/media/ndk/NdkImageReader.cpp
index c449611..5d1a20b 100644
--- a/media/ndk/NdkImageReader.cpp
+++ b/media/ndk/NdkImageReader.cpp
@@ -238,14 +238,12 @@
AImageReader::AImageReader(int32_t width,
int32_t height,
int32_t format,
- uint64_t usage0,
- uint64_t usage1,
+ uint64_t usage,
int32_t maxImages)
: mWidth(width),
mHeight(height),
mFormat(format),
- mUsage0(usage0),
- mUsage1(usage1),
+ mUsage(usage),
mMaxImages(maxImages),
mNumPlanes(getNumPlanesForFormat(format)),
mFrameListener(new FrameListener(this)),
@@ -256,20 +254,14 @@
PublicFormat publicFormat = static_cast<PublicFormat>(mFormat);
mHalFormat = android_view_Surface_mapPublicFormatToHalFormat(publicFormat);
mHalDataSpace = android_view_Surface_mapPublicFormatToHalDataspace(publicFormat);
-
- uint64_t producerUsage;
- uint64_t consumerUsage;
- android_hardware_HardwareBuffer_convertToGrallocUsageBits(
- &producerUsage, &consumerUsage, mUsage0, mUsage1);
- // Strip out producerUsage here.
- mHalUsage = android_convertGralloc1To0Usage(0, consumerUsage);
+ mHalUsage = android_hardware_HardwareBuffer_convertToGrallocUsageBits(mUsage);
sp<IGraphicBufferProducer> gbProducer;
sp<IGraphicBufferConsumer> gbConsumer;
BufferQueue::createBufferQueue(&gbProducer, &gbConsumer);
- String8 consumerName = String8::format("ImageReader-%dx%df%xu%" PRIu64 "u%" PRIu64 "m%d-%d-%d",
- mWidth, mHeight, mFormat, mUsage0, mUsage1, mMaxImages, getpid(),
+ String8 consumerName = String8::format("ImageReader-%dx%df%xu%" PRIu64 "m%d-%d-%d",
+ mWidth, mHeight, mFormat, mUsage, mMaxImages, getpid(),
createProcessUniqueId());
mBufferItemConsumer =
@@ -445,10 +437,10 @@
}
if (mHalFormat == HAL_PIXEL_FORMAT_BLOB) {
- *image = new AImage(this, mFormat, mUsage0, mUsage1, buffer, buffer->mTimestamp,
+ *image = new AImage(this, mFormat, mUsage, buffer, buffer->mTimestamp,
readerWidth, readerHeight, mNumPlanes);
} else {
- *image = new AImage(this, mFormat, mUsage0, mUsage1, buffer, buffer->mTimestamp,
+ *image = new AImage(this, mFormat, mUsage, buffer, buffer->mTimestamp,
bufferWidth, bufferHeight, mNumPlanes);
}
mAcquiredImages.push_back(*image);
@@ -587,12 +579,12 @@
/*out*/AImageReader** reader) {
ALOGV("%s", __FUNCTION__);
return AImageReader_newWithUsage(
- width, height, format, AHARDWAREBUFFER_USAGE0_CPU_READ_OFTEN, 0, maxImages, reader);
+ width, height, format, AHARDWAREBUFFER_USAGE_CPU_READ_OFTEN, maxImages, reader);
}
EXPORT
media_status_t AImageReader_newWithUsage(
- int32_t width, int32_t height, int32_t format, uint64_t usage0, uint64_t usage1,
+ int32_t width, int32_t height, int32_t format, uint64_t usage,
int32_t maxImages, /*out*/ AImageReader** reader) {
ALOGV("%s", __FUNCTION__);
@@ -626,7 +618,7 @@
}
AImageReader* tmpReader = new AImageReader(
- width, height, format, usage0, usage1, maxImages);
+ width, height, format, usage, maxImages);
if (tmpReader == nullptr) {
ALOGE("%s: AImageReader allocation failed", __FUNCTION__);
return AMEDIA_ERROR_UNKNOWN;
diff --git a/media/ndk/NdkImageReaderPriv.h b/media/ndk/NdkImageReaderPriv.h
index a233ec8..35af169 100644
--- a/media/ndk/NdkImageReaderPriv.h
+++ b/media/ndk/NdkImageReaderPriv.h
@@ -55,8 +55,7 @@
AImageReader(int32_t width,
int32_t height,
int32_t format,
- uint64_t usage0,
- uint64_t usage1,
+ uint64_t usage,
int32_t maxImages);
~AImageReader();
@@ -117,8 +116,7 @@
const int32_t mWidth;
const int32_t mHeight;
const int32_t mFormat;
- const uint64_t mUsage0; // AHARDWAREBUFFER_USAGE0* flags.
- const uint64_t mUsage1; // AHARDWAREBUFFER_USAGE1* flags.
+ const uint64_t mUsage; // AHARDWAREBUFFER_USAGE_* flags.
const int32_t mMaxImages;
// TODO(jwcai) Seems completely unused in AImageReader class.
diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp
index 3665875..a6857fe 100644
--- a/services/audioflinger/Threads.cpp
+++ b/services/audioflinger/Threads.cpp
@@ -2962,6 +2962,10 @@
// and then that string will be logged at the next convenient opportunity.
const char *logString = NULL;
+ // Estimated time for next buffer to be written to hal. This is used only on
+ // suspended mode (for now) to help schedule the wait time until next iteration.
+ nsecs_t timeLoopNextNs = 0;
+
checkSilentMode_l();
#if 0
int z = 0; // used in logFormat example
@@ -3327,6 +3331,29 @@
} else {
ATRACE_BEGIN("sleep");
Mutex::Autolock _l(mLock);
+ // suspended requires accurate metering of sleep time.
+ if (isSuspended()) {
+ // advance by expected sleepTime
+ timeLoopNextNs += microseconds((nsecs_t)mSleepTimeUs);
+ const nsecs_t nowNs = systemTime();
+
+ // compute expected next time vs current time.
+ // (negative deltas are treated as delays).
+ nsecs_t deltaNs = timeLoopNextNs - nowNs;
+ if (deltaNs < -kMaxNextBufferDelayNs) {
+ // Delays longer than the max allowed trigger a reset.
+ ALOGV("DelayNs: %lld, resetting timeLoopNextNs", (long long) deltaNs);
+ deltaNs = microseconds((nsecs_t)mSleepTimeUs);
+ timeLoopNextNs = nowNs + deltaNs;
+ } else if (deltaNs < 0) {
+ // Delays within the max delay allowed: zero the delta/sleepTime
+ // to help the system catch up in the next iteration(s)
+ ALOGV("DelayNs: %lld, catching-up", (long long) deltaNs);
+ deltaNs = 0;
+ }
+ // update sleep time (which is >= 0)
+ mSleepTimeUs = deltaNs / 1000;
+ }
if (!mSignalPending && mConfigEvents.isEmpty() && !exitPending()) {
mWaitWorkCV.waitRelative(mLock, microseconds((nsecs_t)mSleepTimeUs));
}
@@ -8296,22 +8323,24 @@
mEffectChains[0]->setVolume_l(&vol, &vol);
volume = (float)vol / (1 << 24);
}
-
- mOutput->stream->setVolume(volume, volume);
-
- sp<MmapStreamCallback> callback = mCallback.promote();
- if (callback != 0) {
- int channelCount;
- if (isOutput()) {
- channelCount = audio_channel_count_from_out_mask(mChannelMask);
+ // Try to use HW volume control and fall back to SW control if not implemented
+ if (mOutput->stream->setVolume(volume, volume) != NO_ERROR) {
+ sp<MmapStreamCallback> callback = mCallback.promote();
+ if (callback != 0) {
+ int channelCount;
+ if (isOutput()) {
+ channelCount = audio_channel_count_from_out_mask(mChannelMask);
+ } else {
+ channelCount = audio_channel_count_from_in_mask(mChannelMask);
+ }
+ Vector<float> values;
+ for (int i = 0; i < channelCount; i++) {
+ values.add(volume);
+ }
+ callback->onVolumeChanged(mChannelMask, values);
} else {
- channelCount = audio_channel_count_from_in_mask(mChannelMask);
+ ALOGW("Could not set MMAP stream volume: no volume callback!");
}
- Vector<float> values;
- for (int i = 0; i < channelCount; i++) {
- values.add(volume);
- }
- callback->onVolumeChanged(mChannelMask, values);
}
}
}
diff --git a/services/audioflinger/Threads.h b/services/audioflinger/Threads.h
index 7469710..80b368e 100644
--- a/services/audioflinger/Threads.h
+++ b/services/audioflinger/Threads.h
@@ -623,6 +623,12 @@
// 14 tracks max per client allows for 2 misbehaving application leaving 4 available tracks.
static const uint32_t kMaxTracksPerUid = 14;
+ // Maximum delay (in nanoseconds) for upcoming buffers in suspend mode, otherwise
+ // if delay is greater, the estimated time for timeLoopNextNs is reset.
+ // This allows for catch-up to be done for small delays, while resetting the estimate
+ // for initial conditions or large delays.
+ static const nsecs_t kMaxNextBufferDelayNs = 100000000;
+
PlaybackThread(const sp<AudioFlinger>& audioFlinger, AudioStreamOut* output,
audio_io_handle_t id, audio_devices_t device, type_t type, bool systemReady);
virtual ~PlaybackThread();
diff --git a/services/audioflinger/Tracks.cpp b/services/audioflinger/Tracks.cpp
index 5480513..8fefb36 100644
--- a/services/audioflinger/Tracks.cpp
+++ b/services/audioflinger/Tracks.cpp
@@ -597,7 +597,9 @@
status_t status = mServerProxy->obtainBuffer(&buf);
buffer->frameCount = buf.mFrameCount;
buffer->raw = buf.mRaw;
- if (buf.mFrameCount == 0) {
+ if (buf.mFrameCount == 0 && !isStopping() && !isStopped() && !isPaused()) {
+ ALOGV("underrun, framesReady(%zu) < framesDesired(%zd), state: %d",
+ buf.mFrameCount, desiredFrames, mState);
mAudioTrackServerProxy->tallyUnderrunFrames(desiredFrames);
} else {
mAudioTrackServerProxy->tallyUnderrunFrames(0);
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
index e3a23f9..aaa6134 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
@@ -1007,11 +1007,14 @@
if (offloadInfo != NULL) {
config.offload_info = *offloadInfo;
}
+ DeviceVector outputDevices = mAvailableOutputDevices.getDevicesFromType(device);
+ String8 address = outputDevices.size() > 0 ? outputDevices.itemAt(0)->mAddress
+ : String8("");
status = mpClientInterface->openOutput(profile->getModuleHandle(),
&output,
&config,
&outputDesc->mDevice,
- String8(""),
+ address,
&outputDesc->mLatency,
outputDesc->mFlags);
@@ -1699,6 +1702,12 @@
config.channel_mask = profileChannelMask;
config.format = profileFormat;
+ if (address == "") {
+ DeviceVector inputDevices = mAvailableInputDevices.getDevicesFromType(device);
+ // the inputs vector must be of size 1, but we don't want to crash here
+ address = inputDevices.size() > 0 ? inputDevices.itemAt(0)->mAddress : String8("");
+ }
+
status_t status = mpClientInterface->openInput(profile->getModuleHandle(),
&input,
&config,
diff --git a/services/camera/libcameraservice/Android.mk b/services/camera/libcameraservice/Android.mk
index 2d78f99..ef2f3b3 100644
--- a/services/camera/libcameraservice/Android.mk
+++ b/services/camera/libcameraservice/Android.mk
@@ -27,7 +27,6 @@
CameraFlashlight.cpp \
common/Camera2ClientBase.cpp \
common/CameraDeviceBase.cpp \
- common/CameraModule.cpp \
common/CameraProviderManager.cpp \
common/FrameProcessorBase.cpp \
api1/CameraClient.cpp \
diff --git a/services/camera/libcameraservice/CameraFlashlight.cpp b/services/camera/libcameraservice/CameraFlashlight.cpp
index 4537ae6..836972a 100644
--- a/services/camera/libcameraservice/CameraFlashlight.cpp
+++ b/services/camera/libcameraservice/CameraFlashlight.cpp
@@ -36,16 +36,9 @@
// CameraFlashlight implementation begins
// used by camera service to control flashflight.
/////////////////////////////////////////////////////////////////////
-CameraFlashlight::CameraFlashlight(CameraModule* cameraModule,
- camera_module_callbacks_t* callbacks) :
- mCameraModule(cameraModule),
- mCallbacks(callbacks),
- mFlashlightMapInitialized(false) {
-}
CameraFlashlight::CameraFlashlight(sp<CameraProviderManager> providerManager,
camera_module_callbacks_t* callbacks) :
- mCameraModule(nullptr),
mProviderManager(providerManager),
mCallbacks(callbacks),
mFlashlightMapInitialized(false) {
@@ -61,52 +54,12 @@
return INVALID_OPERATION;
}
- status_t res = OK;
-
- if (mCameraModule == nullptr) {
- if (mProviderManager->supportSetTorchMode(cameraId.string())) {
- mFlashControl = new ProviderFlashControl(mProviderManager);
- } else {
- // Only HAL1 devices do not support setTorchMode
- mFlashControl =
- new CameraHardwareInterfaceFlashControl(mProviderManager, *mCallbacks);
- }
- } else if (mCameraModule->getModuleApiVersion() >= CAMERA_MODULE_API_VERSION_2_4) {
- mFlashControl = new ModuleFlashControl(*mCameraModule);
- if (mFlashControl == NULL) {
- ALOGV("%s: cannot create flash control for module api v2.4+",
- __FUNCTION__);
- return NO_MEMORY;
- }
+ if (mProviderManager->supportSetTorchMode(cameraId.string())) {
+ mFlashControl = new ProviderFlashControl(mProviderManager);
} else {
- uint32_t deviceVersion = CAMERA_DEVICE_API_VERSION_1_0;
-
- if (mCameraModule->getModuleApiVersion() >=
- CAMERA_MODULE_API_VERSION_2_0) {
- camera_info info;
- res = mCameraModule->getCameraInfo(
- atoi(cameraId.string()), &info);
- if (res) {
- ALOGE("%s: failed to get camera info for camera %s",
- __FUNCTION__, cameraId.string());
- return res;
- }
- deviceVersion = info.device_version;
- }
-
- if (deviceVersion >= CAMERA_DEVICE_API_VERSION_3_0) {
- CameraDeviceClientFlashControl *flashControl =
- new CameraDeviceClientFlashControl(*mCameraModule,
- *mCallbacks);
- if (!flashControl) {
- return NO_MEMORY;
- }
-
- mFlashControl = flashControl;
- } else {
- mFlashControl =
- new CameraHardwareInterfaceFlashControl(mCameraModule, *mCallbacks);
- }
+ // Only HAL1 devices do not support setTorchMode
+ mFlashControl =
+ new CameraHardwareInterfaceFlashControl(mProviderManager, *mCallbacks);
}
return OK;
@@ -170,11 +123,7 @@
}
int CameraFlashlight::getNumberOfCameras() {
- if (mCameraModule) {
- return mCameraModule->getNumberOfCameras();
- } else {
- return mProviderManager->getStandardCameraCount();
- }
+ return mProviderManager->getAPI1CompatibleCameraCount();
}
status_t CameraFlashlight::findFlashUnits() {
@@ -184,16 +133,10 @@
std::vector<String8> cameraIds;
int numberOfCameras = getNumberOfCameras();
cameraIds.resize(numberOfCameras);
- if (mCameraModule) {
- for (size_t i = 0; i < cameraIds.size(); i++) {
- cameraIds[i] = String8::format("%zu", i);
- }
- } else {
- // No module, must be provider
- std::vector<std::string> ids = mProviderManager->getStandardCameraDeviceIds();
- for (size_t i = 0; i < cameraIds.size(); i++) {
- cameraIds[i] = String8(ids[i].c_str());
- }
+ // No module, must be provider
+ std::vector<std::string> ids = mProviderManager->getAPI1CompatibleCameraDeviceIds();
+ for (size_t i = 0; i < cameraIds.size(); i++) {
+ cameraIds[i] = String8(ids[i].c_str());
}
mHasFlashlightMap.clear();
@@ -251,9 +194,7 @@
bool CameraFlashlight::isBackwardCompatibleMode(const String8& cameraId) {
bool backwardCompatibleMode = false;
- if (mCameraModule && mCameraModule->getModuleApiVersion() < CAMERA_MODULE_API_VERSION_2_4) {
- backwardCompatibleMode = true;
- } else if (mProviderManager != nullptr &&
+ if (mProviderManager != nullptr &&
!mProviderManager->supportSetTorchMode(cameraId.string())) {
backwardCompatibleMode = true;
}
@@ -368,366 +309,13 @@
// ProviderFlashControl implementation ends
/////////////////////////////////////////////////////////////////////
-// ModuleFlashControl implementation begins
-// Flash control for camera module v2.4 and above.
-/////////////////////////////////////////////////////////////////////
-ModuleFlashControl::ModuleFlashControl(CameraModule& cameraModule) :
- mCameraModule(&cameraModule) {
-}
-
-ModuleFlashControl::~ModuleFlashControl() {
-}
-
-status_t ModuleFlashControl::hasFlashUnit(const String8& cameraId, bool *hasFlash) {
- if (!hasFlash) {
- return BAD_VALUE;
- }
-
- *hasFlash = false;
- Mutex::Autolock l(mLock);
-
- camera_info info;
- status_t res = mCameraModule->getCameraInfo(atoi(cameraId.string()),
- &info);
- if (res != 0) {
- return res;
- }
-
- CameraMetadata metadata;
- metadata = info.static_camera_characteristics;
- camera_metadata_entry flashAvailable =
- metadata.find(ANDROID_FLASH_INFO_AVAILABLE);
- if (flashAvailable.count == 1 && flashAvailable.data.u8[0] == 1) {
- *hasFlash = true;
- }
-
- return OK;
-}
-
-status_t ModuleFlashControl::setTorchMode(const String8& cameraId, bool enabled) {
- ALOGV("%s: set camera %s torch mode to %d", __FUNCTION__,
- cameraId.string(), enabled);
-
- Mutex::Autolock l(mLock);
- return mCameraModule->setTorchMode(cameraId.string(), enabled);
-}
-// ModuleFlashControl implementation ends
-
-/////////////////////////////////////////////////////////////////////
-// CameraDeviceClientFlashControl implementation begins
-// Flash control for camera module <= v2.3 and camera HAL v2-v3
-/////////////////////////////////////////////////////////////////////
-CameraDeviceClientFlashControl::CameraDeviceClientFlashControl(
- CameraModule& cameraModule,
- const camera_module_callbacks_t& callbacks) :
- mCameraModule(&cameraModule),
- mCallbacks(&callbacks),
- mTorchEnabled(false),
- mMetadata(NULL),
- mStreaming(false) {
-}
-
-CameraDeviceClientFlashControl::~CameraDeviceClientFlashControl() {
- disconnectCameraDevice();
- if (mMetadata) {
- delete mMetadata;
- }
-
- mSurface.clear();
- mSurfaceTexture.clear();
- mProducer.clear();
- mConsumer.clear();
-
- if (mTorchEnabled) {
- if (mCallbacks) {
- ALOGV("%s: notify the framework that torch was turned off",
- __FUNCTION__);
- mCallbacks->torch_mode_status_change(mCallbacks,
- mCameraId.string(), TORCH_MODE_STATUS_AVAILABLE_OFF);
- }
- }
-}
-
-status_t CameraDeviceClientFlashControl::initializeSurface(
- sp<CameraDeviceBase> &device, int32_t width, int32_t height) {
- status_t res;
- BufferQueue::createBufferQueue(&mProducer, &mConsumer);
-
- mSurfaceTexture = new GLConsumer(mConsumer, 0, GLConsumer::TEXTURE_EXTERNAL,
- true, true);
- if (mSurfaceTexture == NULL) {
- return NO_MEMORY;
- }
-
- int32_t format = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
- res = mSurfaceTexture->setDefaultBufferSize(width, height);
- if (res) {
- return res;
- }
- res = mSurfaceTexture->setDefaultBufferFormat(format);
- if (res) {
- return res;
- }
-
- mSurface = new Surface(mProducer, /*useAsync*/ true);
- if (mSurface == NULL) {
- return NO_MEMORY;
- }
- res = device->createStream(mSurface, width, height, format,
- HAL_DATASPACE_UNKNOWN, CAMERA3_STREAM_ROTATION_0, &mStreamId);
- if (res) {
- return res;
- }
-
- res = device->configureStreams();
- if (res) {
- return res;
- }
-
- return res;
-}
-
-status_t CameraDeviceClientFlashControl::getSmallestSurfaceSize(
- const camera_info& info, int32_t *width, int32_t *height) {
- if (!width || !height) {
- return BAD_VALUE;
- }
-
- int32_t w = INT32_MAX;
- int32_t h = 1;
-
- CameraMetadata metadata;
- metadata = info.static_camera_characteristics;
- camera_metadata_entry streamConfigs =
- metadata.find(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS);
- for (size_t i = 0; i < streamConfigs.count; i += 4) {
- int32_t fmt = streamConfigs.data.i32[i];
- if (fmt == ANDROID_SCALER_AVAILABLE_FORMATS_IMPLEMENTATION_DEFINED) {
- int32_t ww = streamConfigs.data.i32[i + 1];
- int32_t hh = streamConfigs.data.i32[i + 2];
-
- if (w * h > ww * hh) {
- w = ww;
- h = hh;
- }
- }
- }
-
- // if stream configuration is not found, try available processed sizes.
- if (streamConfigs.count == 0) {
- camera_metadata_entry availableProcessedSizes =
- metadata.find(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES);
- for (size_t i = 0; i < availableProcessedSizes.count; i += 2) {
- int32_t ww = availableProcessedSizes.data.i32[i];
- int32_t hh = availableProcessedSizes.data.i32[i + 1];
- if (w * h > ww * hh) {
- w = ww;
- h = hh;
- }
- }
- }
-
- if (w == INT32_MAX) {
- return NAME_NOT_FOUND;
- }
-
- *width = w;
- *height = h;
-
- return OK;
-}
-
-status_t CameraDeviceClientFlashControl::connectCameraDevice(
- const String8& cameraId) {
- camera_info info;
- status_t res = mCameraModule->getCameraInfo(atoi(cameraId.string()), &info);
- if (res != 0) {
- ALOGE("%s: failed to get camera info for camera %s", __FUNCTION__,
- cameraId.string());
- return res;
- }
-
- sp<CameraDeviceBase> device =
- new Camera3Device(cameraId);
- if (device == NULL) {
- return NO_MEMORY;
- }
-
- res = device->initialize(mCameraModule);
- if (res) {
- return res;
- }
-
- int32_t width, height;
- res = getSmallestSurfaceSize(info, &width, &height);
- if (res) {
- return res;
- }
- res = initializeSurface(device, width, height);
- if (res) {
- return res;
- }
-
- mCameraId = cameraId;
- mStreaming = (info.device_version <= CAMERA_DEVICE_API_VERSION_3_1);
- mDevice = device;
-
- return OK;
-}
-
-status_t CameraDeviceClientFlashControl::disconnectCameraDevice() {
- if (mDevice != NULL) {
- mDevice->disconnect();
- mDevice.clear();
- }
-
- return OK;
-}
-
-
-
-status_t CameraDeviceClientFlashControl::hasFlashUnit(const String8& cameraId,
- bool *hasFlash) {
- ALOGV("%s: checking if camera %s has a flash unit", __FUNCTION__,
- cameraId.string());
-
- Mutex::Autolock l(mLock);
- return hasFlashUnitLocked(cameraId, hasFlash);
-
-}
-
-status_t CameraDeviceClientFlashControl::hasFlashUnitLocked(
- const String8& cameraId, bool *hasFlash) {
- if (!hasFlash) {
- return BAD_VALUE;
- }
-
- camera_info info;
- status_t res = mCameraModule->getCameraInfo(
- atoi(cameraId.string()), &info);
- if (res != 0) {
- ALOGE("%s: failed to get camera info for camera %s", __FUNCTION__,
- cameraId.string());
- return res;
- }
-
- CameraMetadata metadata;
- metadata = info.static_camera_characteristics;
- camera_metadata_entry flashAvailable =
- metadata.find(ANDROID_FLASH_INFO_AVAILABLE);
- if (flashAvailable.count == 1 && flashAvailable.data.u8[0] == 1) {
- *hasFlash = true;
- }
-
- return OK;
-}
-
-status_t CameraDeviceClientFlashControl::submitTorchEnabledRequest() {
- status_t res;
-
- if (mMetadata == NULL) {
- mMetadata = new CameraMetadata();
- if (mMetadata == NULL) {
- return NO_MEMORY;
- }
- res = mDevice->createDefaultRequest(
- CAMERA3_TEMPLATE_PREVIEW, mMetadata);
- if (res) {
- return res;
- }
- }
-
- uint8_t torchOn = ANDROID_FLASH_MODE_TORCH;
- mMetadata->update(ANDROID_FLASH_MODE, &torchOn, 1);
- mMetadata->update(ANDROID_REQUEST_OUTPUT_STREAMS, &mStreamId, 1);
-
- uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
- mMetadata->update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
-
- int32_t requestId = 0;
- mMetadata->update(ANDROID_REQUEST_ID, &requestId, 1);
-
- if (mStreaming) {
- res = mDevice->setStreamingRequest(*mMetadata);
- } else {
- res = mDevice->capture(*mMetadata);
- }
- return res;
-}
-
-
-
-
-status_t CameraDeviceClientFlashControl::setTorchMode(
- const String8& cameraId, bool enabled) {
- bool hasFlash = false;
-
- Mutex::Autolock l(mLock);
- status_t res = hasFlashUnitLocked(cameraId, &hasFlash);
-
- // pre-check
- if (enabled) {
- // invalid camera?
- if (res) {
- return -EINVAL;
- }
- // no flash unit?
- if (!hasFlash) {
- return -ENOSYS;
- }
- // already opened for a different device?
- if (mDevice != NULL && cameraId != mCameraId) {
- return BAD_INDEX;
- }
- } else if (mDevice == NULL || cameraId != mCameraId) {
- // disabling the torch mode of an un-opened or different device.
- return OK;
- } else {
- // disabling the torch mode of currently opened device
- disconnectCameraDevice();
- mTorchEnabled = false;
- mCallbacks->torch_mode_status_change(mCallbacks,
- cameraId.string(), TORCH_MODE_STATUS_AVAILABLE_OFF);
- return OK;
- }
-
- if (mDevice == NULL) {
- res = connectCameraDevice(cameraId);
- if (res) {
- return res;
- }
- }
-
- res = submitTorchEnabledRequest();
- if (res) {
- return res;
- }
-
- mTorchEnabled = true;
- mCallbacks->torch_mode_status_change(mCallbacks,
- cameraId.string(), TORCH_MODE_STATUS_AVAILABLE_ON);
- return OK;
-}
-// CameraDeviceClientFlashControl implementation ends
-
-
-/////////////////////////////////////////////////////////////////////
// CameraHardwareInterfaceFlashControl implementation begins
// Flash control for camera module <= v2.3 and camera HAL v1
/////////////////////////////////////////////////////////////////////
-CameraHardwareInterfaceFlashControl::CameraHardwareInterfaceFlashControl(
- CameraModule* cameraModule,
- const camera_module_callbacks_t& callbacks) :
- mCameraModule(cameraModule),
- mProviderManager(nullptr),
- mCallbacks(&callbacks),
- mTorchEnabled(false) {
-}
CameraHardwareInterfaceFlashControl::CameraHardwareInterfaceFlashControl(
sp<CameraProviderManager> manager,
const camera_module_callbacks_t& callbacks) :
- mCameraModule(nullptr),
mProviderManager(manager),
mCallbacks(&callbacks),
mTorchEnabled(false) {
@@ -931,12 +519,7 @@
sp<CameraHardwareInterface> device =
new CameraHardwareInterface(cameraId.string());
- status_t res;
- if (mCameraModule != nullptr) {
- res = device->initialize(mCameraModule);
- } else {
- res = device->initialize(mProviderManager);
- }
+ status_t res = device->initialize(mProviderManager);
if (res) {
ALOGE("%s: initializing camera %s failed", __FUNCTION__,
cameraId.string());
diff --git a/services/camera/libcameraservice/CameraFlashlight.h b/services/camera/libcameraservice/CameraFlashlight.h
index 98f269a..c86ee85 100644
--- a/services/camera/libcameraservice/CameraFlashlight.h
+++ b/services/camera/libcameraservice/CameraFlashlight.h
@@ -23,7 +23,6 @@
#include <utils/KeyedVector.h>
#include <utils/SortedVector.h>
#include "common/CameraProviderManager.h"
-#include "common/CameraModule.h"
#include "common/CameraDeviceBase.h"
#include "device1/CameraHardwareInterface.h"
@@ -55,8 +54,6 @@
*/
class CameraFlashlight : public virtual VirtualLightRefBase {
public:
- CameraFlashlight(CameraModule* cameraModule,
- camera_module_callbacks_t* callbacks);
CameraFlashlight(sp<CameraProviderManager> providerManager,
camera_module_callbacks_t* callbacks);
virtual ~CameraFlashlight();
@@ -100,7 +97,6 @@
sp<FlashControlBase> mFlashControl;
- CameraModule *mCameraModule;
sp<CameraProviderManager> mProviderManager;
const camera_module_callbacks_t *mCallbacks;
@@ -132,86 +128,11 @@
};
/**
- * Flash control for camera module v2.4 and above.
- */
-class ModuleFlashControl : public FlashControlBase {
- public:
- ModuleFlashControl(CameraModule& cameraModule);
- virtual ~ModuleFlashControl();
-
- // FlashControlBase
- status_t hasFlashUnit(const String8& cameraId, bool *hasFlash);
- status_t setTorchMode(const String8& cameraId, bool enabled);
-
- private:
- CameraModule *mCameraModule;
-
- Mutex mLock;
-};
-
-/**
- * Flash control for camera module <= v2.3 and camera HAL v2-v3
- */
-class CameraDeviceClientFlashControl : public FlashControlBase {
- public:
- CameraDeviceClientFlashControl(CameraModule& cameraModule,
- const camera_module_callbacks_t& callbacks);
- virtual ~CameraDeviceClientFlashControl();
-
- // FlashControlBase
- status_t setTorchMode(const String8& cameraId, bool enabled);
- status_t hasFlashUnit(const String8& cameraId, bool *hasFlash);
-
- private:
- // connect to a camera device
- status_t connectCameraDevice(const String8& cameraId);
- // disconnect and free mDevice
- status_t disconnectCameraDevice();
-
- // initialize a surface
- status_t initializeSurface(sp<CameraDeviceBase>& device, int32_t width,
- int32_t height);
-
- // submit a request to enable the torch mode
- status_t submitTorchEnabledRequest();
-
- // get the smallest surface size of IMPLEMENTATION_DEFINED
- status_t getSmallestSurfaceSize(const camera_info& info, int32_t *width,
- int32_t *height);
-
- // protected by mLock
- status_t hasFlashUnitLocked(const String8& cameraId, bool *hasFlash);
-
- CameraModule *mCameraModule;
- const camera_module_callbacks_t *mCallbacks;
- String8 mCameraId;
- bool mTorchEnabled;
- CameraMetadata *mMetadata;
- // WORKAROUND: will be set to true for HAL v2 devices where
- // setStreamingRequest() needs to be call for torch mode settings to
- // take effect.
- bool mStreaming;
-
- sp<CameraDeviceBase> mDevice;
-
- sp<IGraphicBufferProducer> mProducer;
- sp<IGraphicBufferConsumer> mConsumer;
- sp<GLConsumer> mSurfaceTexture;
- sp<Surface> mSurface;
- int32_t mStreamId;
-
- Mutex mLock;
-};
-
-/**
* Flash control for camera module <= v2.3 and camera HAL v1
*/
class CameraHardwareInterfaceFlashControl : public FlashControlBase {
public:
CameraHardwareInterfaceFlashControl(
- CameraModule* cameraModule,
- const camera_module_callbacks_t& callbacks);
- CameraHardwareInterfaceFlashControl(
sp<CameraProviderManager> manager,
const camera_module_callbacks_t& callbacks);
virtual ~CameraHardwareInterfaceFlashControl();
@@ -244,7 +165,6 @@
// function, keepDeviceOpen is ignored.
status_t hasFlashUnitLocked(const String8& cameraId, bool *hasFlash, bool keepDeviceOpen);
- CameraModule *mCameraModule;
sp<CameraProviderManager> mProviderManager;
const camera_module_callbacks_t *mCallbacks;
sp<CameraHardwareInterface> mDevice;
diff --git a/services/camera/libcameraservice/CameraService.cpp b/services/camera/libcameraservice/CameraService.cpp
index 39351e7..0031441 100644
--- a/services/camera/libcameraservice/CameraService.cpp
+++ b/services/camera/libcameraservice/CameraService.cpp
@@ -167,7 +167,7 @@
CameraService::CameraService() :
mEventLog(DEFAULT_EVENT_LOG_LENGTH),
mNumberOfCameras(0), mNumberOfNormalCameras(0),
- mSoundRef(0), mInitialized(false), mModule(nullptr) {
+ mSoundRef(0), mInitialized(false) {
ALOGI("CameraService started (pid=%d)", getpid());
this->camera_device_status_change = android::camera_device_status_change;
@@ -189,13 +189,7 @@
status_t res = INVALID_OPERATION;
- bool disableTreble = property_get_bool("camera.disable_treble", false);
- if (disableTreble) {
- ALOGI("Treble disabled - using legacy path");
- res = loadLegacyHalModule();
- } else {
- res = enumerateProviders();
- }
+ res = enumerateProviders();
if (res == OK) {
mInitialized = true;
}
@@ -203,108 +197,6 @@
CameraService::pingCameraServiceProxy();
}
-status_t CameraService::loadLegacyHalModule() {
- camera_module_t *rawModule;
- int err = hw_get_module(CAMERA_HARDWARE_MODULE_ID,
- (const hw_module_t **)&rawModule);
- if (err < 0) {
- ALOGE("Could not load camera HAL module: %d (%s)", err, strerror(-err));
- logServiceError("Could not load camera HAL module", err);
- return INVALID_OPERATION;
- }
-
- mModule = new CameraModule(rawModule);
- err = mModule->init();
- if (err != OK) {
- ALOGE("Could not initialize camera HAL module: %d (%s)", err,
- strerror(-err));
- logServiceError("Could not initialize camera HAL module", err);
-
- delete mModule;
- mModule = nullptr;
- return INVALID_OPERATION;
- }
- ALOGI("Loaded \"%s\" camera module", mModule->getModuleName());
-
- mNumberOfCameras = mModule->getNumberOfCameras();
- mNumberOfNormalCameras = mNumberOfCameras;
-
- // Setup vendor tags before we call get_camera_info the first time
- // because HAL might need to setup static vendor keys in get_camera_info
- VendorTagDescriptor::clearGlobalVendorTagDescriptor();
- if (mModule->getModuleApiVersion() >= CAMERA_MODULE_API_VERSION_2_2) {
- setUpVendorTags();
- }
-
- mFlashlight = new CameraFlashlight(mModule, this);
- status_t res = mFlashlight->findFlashUnits();
- if (res) {
- // impossible because we haven't open any camera devices.
- ALOGE("Failed to find flash units.");
- }
-
- int latestStrangeCameraId = INT_MAX;
- for (int i = 0; i < mNumberOfCameras; i++) {
- String8 cameraId = String8::format("%d", i);
-
- // Get camera info
-
- struct camera_info info;
- bool haveInfo = true;
- status_t rc = mModule->getCameraInfo(i, &info);
- if (rc != NO_ERROR) {
- ALOGE("%s: Received error loading camera info for device %d, cost and"
- " conflicting devices fields set to defaults for this device.",
- __FUNCTION__, i);
- haveInfo = false;
- }
-
- // Check for backwards-compatibility support
- if (haveInfo) {
- if (checkCameraCapabilities(i, info, &latestStrangeCameraId) != OK) {
- delete mModule;
- mModule = nullptr;
- return INVALID_OPERATION;
- }
- }
-
- // Defaults to use for cost and conflicting devices
- int cost = 100;
- char** conflicting_devices = nullptr;
- size_t conflicting_devices_length = 0;
-
- // If using post-2.4 module version, query the cost + conflicting devices from the HAL
- if (mModule->getModuleApiVersion() >= CAMERA_MODULE_API_VERSION_2_4 && haveInfo) {
- cost = info.resource_cost;
- conflicting_devices = info.conflicting_devices;
- conflicting_devices_length = info.conflicting_devices_length;
- }
-
- std::set<String8> conflicting;
- for (size_t i = 0; i < conflicting_devices_length; i++) {
- conflicting.emplace(String8(conflicting_devices[i]));
- }
-
- // Initialize state for each camera device
- {
- Mutex::Autolock lock(mCameraStatesLock);
- mCameraStates.emplace(cameraId, std::make_shared<CameraState>(cameraId, cost,
- conflicting));
- }
-
- if (mFlashlight->hasFlashUnit(cameraId)) {
- mTorchStatusMap.add(cameraId,
- TorchModeStatus::AVAILABLE_OFF);
- }
- }
-
- if (mModule->getModuleApiVersion() >= CAMERA_MODULE_API_VERSION_2_1) {
- mModule->setCallbacks(this);
- }
-
- return OK;
-}
-
status_t CameraService::enumerateProviders() {
mCameraProviderManager = new CameraProviderManager();
status_t res;
@@ -316,7 +208,8 @@
}
mNumberOfCameras = mCameraProviderManager->getCameraCount();
- mNumberOfNormalCameras = mCameraProviderManager->getStandardCameraCount();
+ mNumberOfNormalCameras =
+ mCameraProviderManager->getAPI1CompatibleCameraCount();
// Setup vendor tags before we call get_camera_info the first time
// because HAL might need to setup static vendor keys in get_camera_info
@@ -380,10 +273,6 @@
}
CameraService::~CameraService() {
- if (mModule) {
- delete mModule;
- mModule = nullptr;
- }
VendorTagDescriptor::clearGlobalVendorTagDescriptor();
}
@@ -548,28 +437,13 @@
}
Status ret = Status::ok();
- if (mModule != nullptr) {
- struct camera_info info;
- ret = filterGetInfoErrorCode(mModule->getCameraInfo(cameraId, &info));
-
- if (ret.isOk()) {
- cameraInfo->facing = info.facing;
- cameraInfo->orientation = info.orientation;
- // CameraInfo is for android.hardware.Camera which does not
- // support external camera facing. The closest approximation would be
- // front camera.
- if (cameraInfo->facing == CAMERA_FACING_EXTERNAL) {
- cameraInfo->facing = hardware::CAMERA_FACING_FRONT;
- }
- }
- } else {
- status_t err = mCameraProviderManager->getCameraInfo(std::to_string(cameraId), cameraInfo);
- if (err != OK) {
- ret = STATUS_ERROR_FMT(ERROR_INVALID_OPERATION,
- "Error retrieving camera info from device %d: %s (%d)", cameraId,
- strerror(-err), err);
- }
+ status_t err = mCameraProviderManager->getCameraInfo(std::to_string(cameraId), cameraInfo);
+ if (err != OK) {
+ ret = STATUS_ERROR_FMT(ERROR_INVALID_OPERATION,
+ "Error retrieving camera info from device %d: %s (%d)", cameraId,
+ strerror(-err), err);
}
+
return ret;
}
@@ -598,34 +472,12 @@
Status ret{};
- if (mModule != nullptr) {
- int id = cameraIdToInt(String8(cameraId));
-
- if (id < 0 || id >= mNumberOfCameras) {
- ALOGE("%s: Invalid camera id: %d", __FUNCTION__, id);
- return STATUS_ERROR_FMT(ERROR_ILLEGAL_ARGUMENT,
- "Invalid camera id: %d", id);
- }
-
- int version = getDeviceVersion(String8(cameraId));
- if (version < CAMERA_DEVICE_API_VERSION_3_0) {
- return STATUS_ERROR_FMT(ERROR_ILLEGAL_ARGUMENT, "Can't get camera characteristics"
- " for devices with HAL version < 3.0, %d is version %x", id, version);
- }
-
- struct camera_info info;
- ret = filterGetInfoErrorCode(mModule->getCameraInfo(id, &info));
- if (ret.isOk()) {
- *cameraInfo = info.static_camera_characteristics;
- }
- } else {
- status_t res = mCameraProviderManager->getCameraCharacteristics(
- String8(cameraId).string(), cameraInfo);
- if (res != OK) {
- return STATUS_ERROR_FMT(ERROR_INVALID_OPERATION, "Unable to retrieve camera "
- "characteristics for device %s: %s (%d)", String8(cameraId).string(),
- strerror(-res), res);
- }
+ status_t res = mCameraProviderManager->getCameraCharacteristics(
+ String8(cameraId).string(), cameraInfo);
+ if (res != OK) {
+ return STATUS_ERROR_FMT(ERROR_INVALID_OPERATION, "Unable to retrieve camera "
+ "characteristics for device %s: %s (%d)", String8(cameraId).string(),
+ strerror(-res), res);
}
return ret;
@@ -682,39 +534,20 @@
int deviceVersion = 0;
- if (mModule != nullptr) {
- int id = cameraIdToInt(cameraId);
- if (id < 0) return -1;
+ status_t res;
+ hardware::hidl_version maxVersion{0,0};
+ res = mCameraProviderManager->getHighestSupportedVersion(cameraId.string(),
+ &maxVersion);
+ if (res != OK) return -1;
+ deviceVersion = HARDWARE_DEVICE_API_VERSION(maxVersion.get_major(), maxVersion.get_minor());
- struct camera_info info;
- if (mModule->getCameraInfo(id, &info) != OK) {
- return -1;
- }
-
- if (mModule->getModuleApiVersion() >= CAMERA_MODULE_API_VERSION_2_0) {
- deviceVersion = info.device_version;
- } else {
- deviceVersion = CAMERA_DEVICE_API_VERSION_1_0;
- }
-
- if (facing) {
- *facing = info.facing;
- }
- } else {
- status_t res;
- hardware::hidl_version maxVersion{0,0};
- res = mCameraProviderManager->getHighestSupportedVersion(cameraId.string(),
- &maxVersion);
+ hardware::CameraInfo info;
+ if (facing) {
+ res = mCameraProviderManager->getCameraInfo(cameraId.string(), &info);
if (res != OK) return -1;
- deviceVersion = HARDWARE_DEVICE_API_VERSION(maxVersion.get_major(), maxVersion.get_minor());
-
- hardware::CameraInfo info;
- if (facing) {
- res = mCameraProviderManager->getCameraInfo(cameraId.string(), &info);
- if (res != OK) return -1;
- *facing = info.facing;
- }
+ *facing = info.facing;
}
+
return deviceVersion;
}
@@ -735,45 +568,6 @@
}
}
-bool CameraService::setUpVendorTags() {
- ATRACE_CALL();
- if (mModule == nullptr) return false;
-
- vendor_tag_ops_t vOps = vendor_tag_ops_t();
-
- // Check if vendor operations have been implemented
- if (!mModule->isVendorTagDefined()) {
- ALOGI("%s: No vendor tags defined for this device.", __FUNCTION__);
- return false;
- }
-
- mModule->getVendorTagOps(&vOps);
-
- // Ensure all vendor operations are present
- if (vOps.get_tag_count == NULL || vOps.get_all_tags == NULL ||
- vOps.get_section_name == NULL || vOps.get_tag_name == NULL ||
- vOps.get_tag_type == NULL) {
- ALOGE("%s: Vendor tag operations not fully defined. Ignoring definitions."
- , __FUNCTION__);
- return false;
- }
-
- // Read all vendor tag definitions into a descriptor
- sp<VendorTagDescriptor> desc;
- status_t res;
- if ((res = VendorTagDescriptor::createDescriptorFromOps(&vOps, /*out*/desc))
- != OK) {
- ALOGE("%s: Could not generate descriptor from vendor tag operations,"
- "received error %s (%d). Camera clients will not be able to use"
- "vendor tags", __FUNCTION__, strerror(res), res);
- return false;
- }
-
- // Set the global descriptor to use with camera metadata
- VendorTagDescriptor::setAsGlobalVendorTagDescriptor(desc);
- return true;
-}
-
Status CameraService::makeClient(const sp<CameraService>& cameraService,
const sp<IInterface>& cameraCb, const String16& packageName, const String8& cameraId,
int facing, int clientPid, uid_t clientUid, int servicePid, bool legacyMode,
@@ -1403,25 +1197,6 @@
ATRACE_CALL();
String8 id = String8::format("%d", cameraId);
- if (mModule != nullptr) {
- int apiVersion = mModule->getModuleApiVersion();
- if (halVersion != CAMERA_HAL_API_VERSION_UNSPECIFIED &&
- apiVersion < CAMERA_MODULE_API_VERSION_2_3) {
- /*
- * Either the HAL version is unspecified in which case this just creates
- * a camera client selected by the latest device version, or
- * it's a particular version in which case the HAL must supported
- * the open_legacy call
- */
- String8 msg = String8::format("Camera HAL module version %x too old for connectLegacy!",
- apiVersion);
- ALOGE("%s: %s",
- __FUNCTION__, msg.string());
- logRejected(id, getCallingPid(), String8(clientPackageName),
- msg);
- return STATUS_ERROR(ERROR_ILLEGAL_ARGUMENT, msg.string());
- }
- }
Status ret = Status::ok();
sp<Client> client = nullptr;
@@ -1564,12 +1339,7 @@
LOG_ALWAYS_FATAL_IF(client.get() == nullptr, "%s: CameraService in invalid state",
__FUNCTION__);
- if (mModule != nullptr) {
- err = client->initialize(mModule);
- } else {
- err = client->initialize(mCameraProviderManager);
- }
-
+ err = client->initialize(mCameraProviderManager);
if (err != OK) {
ALOGE("%s: Could not initialize client from HAL.", __FUNCTION__);
// Errors could be from the HAL module open call or from AppOpsManager
@@ -1988,83 +1758,6 @@
return ret;
}
-
-/**
- * Check camera capabilities, such as support for basic color operation
- * Also check that the device HAL version is still in support
- */
-int CameraService::checkCameraCapabilities(int id, camera_info info, int *latestStrangeCameraId) {
- if (mModule == nullptr) return NO_INIT;
-
- // device_version undefined in CAMERA_MODULE_API_VERSION_1_0,
- // All CAMERA_MODULE_API_VERSION_1_0 devices are backward-compatible
- if (mModule->getModuleApiVersion() >= CAMERA_MODULE_API_VERSION_2_0) {
- // Verify the device version is in the supported range
- switch (info.device_version) {
- case CAMERA_DEVICE_API_VERSION_1_0:
- case CAMERA_DEVICE_API_VERSION_3_0:
- case CAMERA_DEVICE_API_VERSION_3_1:
- case CAMERA_DEVICE_API_VERSION_3_2:
- case CAMERA_DEVICE_API_VERSION_3_3:
- case CAMERA_DEVICE_API_VERSION_3_4:
- // in support
- break;
- case CAMERA_DEVICE_API_VERSION_2_0:
- case CAMERA_DEVICE_API_VERSION_2_1:
- // no longer supported
- default:
- ALOGE("%s: Device %d has HAL version %x, which is not supported",
- __FUNCTION__, id, info.device_version);
- String8 msg = String8::format(
- "Unsupported device HAL version %x for device %d",
- info.device_version, id);
- logServiceError(msg.string(), NO_INIT);
- return NO_INIT;
- }
- }
-
- // Assume all devices pre-v3.3 are backward-compatible
- bool isBackwardCompatible = true;
- if (mModule->getModuleApiVersion() >= CAMERA_MODULE_API_VERSION_2_0
- && info.device_version >= CAMERA_DEVICE_API_VERSION_3_3) {
- isBackwardCompatible = false;
- status_t res;
- camera_metadata_ro_entry_t caps;
- res = find_camera_metadata_ro_entry(
- info.static_camera_characteristics,
- ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
- &caps);
- if (res != 0) {
- ALOGW("%s: Unable to find camera capabilities for camera device %d",
- __FUNCTION__, id);
- caps.count = 0;
- }
- for (size_t i = 0; i < caps.count; i++) {
- if (caps.data.u8[i] ==
- ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE) {
- isBackwardCompatible = true;
- break;
- }
- }
- }
-
- if (!isBackwardCompatible) {
- mNumberOfNormalCameras--;
- *latestStrangeCameraId = id;
- } else {
- if (id > *latestStrangeCameraId) {
- ALOGE("%s: Normal camera ID %d higher than strange camera ID %d. "
- "This is not allowed due backward-compatibility requirements",
- __FUNCTION__, id, *latestStrangeCameraId);
- logServiceError("Invalid order of camera devices", NO_INIT);
- mNumberOfCameras = 0;
- mNumberOfNormalCameras = 0;
- return NO_INIT;
- }
- }
- return OK;
-}
-
std::shared_ptr<CameraService::CameraState> CameraService::getCameraState(
const String8& cameraId) const {
std::shared_ptr<CameraState> state;
@@ -2815,61 +2508,13 @@
cameraId.string());
}
- if (mModule != nullptr) {
- dprintf(fd, "== Camera HAL device %s static information: ==\n", cameraId.string());
-
- camera_info info;
- status_t rc = mModule->getCameraInfo(cameraIdToInt(cameraId), &info);
- int deviceVersion = -1;
- if (rc != OK) {
- dprintf(fd, " Error reading static information!\n");
- } else {
- dprintf(fd, " Facing: %s\n",
- info.facing == CAMERA_FACING_BACK ? "BACK" :
- info.facing == CAMERA_FACING_FRONT ? "FRONT" : "EXTERNAL");
- dprintf(fd, " Orientation: %d\n", info.orientation);
-
- if (mModule->getModuleApiVersion() < CAMERA_MODULE_API_VERSION_2_0) {
- deviceVersion = CAMERA_DEVICE_API_VERSION_1_0;
- } else {
- deviceVersion = info.device_version;
- }
- }
-
- auto conflicting = state.second->getConflicting();
- dprintf(fd, " Resource Cost: %d\n", state.second->getCost());
- dprintf(fd, " Conflicting Devices:");
- for (auto& id : conflicting) {
- dprintf(fd, " %s", id.string());
- }
- if (conflicting.size() == 0) {
- dprintf(fd, " NONE");
- }
- dprintf(fd, "\n");
-
- dprintf(fd, " Device version: %#x\n", deviceVersion);
- if (deviceVersion >= CAMERA_DEVICE_API_VERSION_3_0) {
- dprintf(fd, " Device static metadata:\n");
- dump_indented_camera_metadata(info.static_camera_characteristics,
- fd, /*verbosity*/2, /*indentation*/4);
- }
- }
-
}
if (stateLocked) mCameraStatesLock.unlock();
if (locked) mServiceLock.unlock();
- if (mModule == nullptr) {
- mCameraProviderManager->dump(fd, args);
- } else {
- dprintf(fd, "\n== Camera Module HAL static info: ==\n");
- dprintf(fd, "Camera module HAL API version: 0x%x\n", mModule->getHalApiVersion());
- dprintf(fd, "Camera module API version: 0x%x\n", mModule->getModuleApiVersion());
- dprintf(fd, "Camera module name: %s\n", mModule->getModuleName());
- dprintf(fd, "Camera module author: %s\n", mModule->getModuleAuthor());
- }
+ mCameraProviderManager->dump(fd, args);
dprintf(fd, "\n== Vendor tags: ==\n\n");
diff --git a/services/camera/libcameraservice/CameraService.h b/services/camera/libcameraservice/CameraService.h
index e49fe62..7d81993 100644
--- a/services/camera/libcameraservice/CameraService.h
+++ b/services/camera/libcameraservice/CameraService.h
@@ -37,7 +37,6 @@
#include "CameraFlashlight.h"
-#include "common/CameraModule.h"
#include "common/CameraProviderManager.h"
#include "media/RingBuffer.h"
#include "utils/AutoConditionLock.h"
@@ -198,7 +197,6 @@
class BasicClient : public virtual RefBase {
public:
- virtual status_t initialize(CameraModule *module) = 0;
virtual status_t initialize(sp<CameraProviderManager> manager) = 0;
virtual binder::Status disconnect();
@@ -508,9 +506,6 @@
// Delay-load the Camera HAL module
virtual void onFirstRef();
- // Load the legacy HAL module
- status_t loadLegacyHalModule();
-
// Eumerate all camera providers in the system
status_t enumerateProviders();
@@ -568,11 +563,6 @@
std::set<userid_t> mAllowedUsers;
/**
- * Check camera capabilities, such as support for basic color operation
- */
- int checkCameraCapabilities(int id, camera_info info, int *latestStrangeCameraId);
-
- /**
* Get the camera state for a given camera id.
*
* This acquires mCameraStatesLock.
@@ -687,7 +677,6 @@
// Basic flag on whether the camera subsystem is in a usable state
bool mInitialized;
- CameraModule* mModule;
sp<CameraProviderManager> mCameraProviderManager;
// Guarded by mStatusListenerMutex
@@ -745,10 +734,6 @@
// IBinder::DeathRecipient implementation
virtual void binderDied(const wp<IBinder> &who);
- // Helpers
-
- bool setUpVendorTags();
-
/**
* Initialize and cache the metadata used by the HAL1 shim for a given cameraId.
*
diff --git a/services/camera/libcameraservice/api1/Camera2Client.cpp b/services/camera/libcameraservice/api1/Camera2Client.cpp
index 335e999..a28518e 100644
--- a/services/camera/libcameraservice/api1/Camera2Client.cpp
+++ b/services/camera/libcameraservice/api1/Camera2Client.cpp
@@ -68,10 +68,6 @@
mLegacyMode = legacyMode;
}
-status_t Camera2Client::initialize(CameraModule *module) {
- return initializeImpl(module);
-}
-
status_t Camera2Client::initialize(sp<CameraProviderManager> manager) {
return initializeImpl(manager);
}
diff --git a/services/camera/libcameraservice/api1/Camera2Client.h b/services/camera/libcameraservice/api1/Camera2Client.h
index 9738aca..72315d4 100644
--- a/services/camera/libcameraservice/api1/Camera2Client.h
+++ b/services/camera/libcameraservice/api1/Camera2Client.h
@@ -100,7 +100,6 @@
virtual ~Camera2Client();
- virtual status_t initialize(CameraModule *module) override;
virtual status_t initialize(sp<CameraProviderManager> manager) override;
virtual status_t dump(int fd, const Vector<String16>& args);
diff --git a/services/camera/libcameraservice/api1/CameraClient.cpp b/services/camera/libcameraservice/api1/CameraClient.cpp
index f12cc7b..075c2e3 100644
--- a/services/camera/libcameraservice/api1/CameraClient.cpp
+++ b/services/camera/libcameraservice/api1/CameraClient.cpp
@@ -62,16 +62,7 @@
LOG1("CameraClient::CameraClient X (pid %d, id %d)", callingPid, cameraId);
}
-status_t CameraClient::initialize(CameraModule *module) {
- return initializeImpl<CameraModule*>(module);
-}
-
status_t CameraClient::initialize(sp<CameraProviderManager> manager) {
- return initializeImpl<sp<CameraProviderManager>>(manager);
-}
-
-template<typename TProviderPtr>
-status_t CameraClient::initializeImpl(TProviderPtr providerPtr) {
int callingPid = getCallingPid();
status_t res;
@@ -87,7 +78,7 @@
snprintf(camera_device_name, sizeof(camera_device_name), "%d", mCameraId);
mHardware = new CameraHardwareInterface(camera_device_name);
- res = mHardware->initialize(providerPtr);
+ res = mHardware->initialize(manager);
if (res != OK) {
ALOGE("%s: Camera %d: unable to initialize device: %s (%d)",
__FUNCTION__, mCameraId, strerror(-res), res);
diff --git a/services/camera/libcameraservice/api1/CameraClient.h b/services/camera/libcameraservice/api1/CameraClient.h
index 1073384..7f93fef 100644
--- a/services/camera/libcameraservice/api1/CameraClient.h
+++ b/services/camera/libcameraservice/api1/CameraClient.h
@@ -72,7 +72,6 @@
bool legacyMode = false);
~CameraClient();
- virtual status_t initialize(CameraModule *module) override;
virtual status_t initialize(sp<CameraProviderManager> manager) override;
virtual status_t dump(int fd, const Vector<String16>& args);
@@ -81,9 +80,6 @@
private:
- template<typename TProviderPtr>
- status_t initializeImpl(TProviderPtr providerPtr);
-
// check whether the calling process matches mClientPid.
status_t checkPid() const;
status_t checkPidAndHardware() const; // also check mHardware != 0
diff --git a/services/camera/libcameraservice/api1/client2/FrameProcessor.cpp b/services/camera/libcameraservice/api1/client2/FrameProcessor.cpp
index 733a78e..6e21126 100644
--- a/services/camera/libcameraservice/api1/client2/FrameProcessor.cpp
+++ b/services/camera/libcameraservice/api1/client2/FrameProcessor.cpp
@@ -41,11 +41,7 @@
{
SharedParameters::Lock l(client->getParameters());
- if (client->getCameraDeviceVersion() >= CAMERA_DEVICE_API_VERSION_3_2) {
- mUsePartialResult = (mNumPartialResults > 1);
- } else {
- mUsePartialResult = l.mParameters.quirks.partialResults;
- }
+ mUsePartialResult = (mNumPartialResults > 1);
// Initialize starting 3A state
m3aState.afTriggerId = l.mParameters.afTriggerCounter;
@@ -76,16 +72,7 @@
bool isPartialResult = false;
if (mUsePartialResult) {
- if (client->getCameraDeviceVersion() >= CAMERA_DEVICE_API_VERSION_3_2) {
- isPartialResult = frame.mResultExtras.partialResultCount < mNumPartialResults;
- } else {
- camera_metadata_entry_t entry;
- entry = frame.mMetadata.find(ANDROID_QUIRKS_PARTIAL_RESULT);
- if (entry.count > 0 &&
- entry.data.u8[0] == ANDROID_QUIRKS_PARTIAL_RESULT_PARTIAL) {
- isPartialResult = true;
- }
- }
+ isPartialResult = frame.mResultExtras.partialResultCount < mNumPartialResults;
}
if (!isPartialResult && processFaceDetect(frame.mMetadata, client) != OK) {
@@ -291,16 +278,8 @@
gotAllStates &= updatePendingState<uint8_t>(metadata, ANDROID_CONTROL_AWB_STATE,
&pendingState.awbState, frameNumber, cameraId);
- if (client->getCameraDeviceVersion() >= CAMERA_DEVICE_API_VERSION_3_2) {
- pendingState.afTriggerId = frame.mResultExtras.afTriggerId;
- pendingState.aeTriggerId = frame.mResultExtras.precaptureTriggerId;
- } else {
- gotAllStates &= updatePendingState<int32_t>(metadata,
- ANDROID_CONTROL_AF_TRIGGER_ID, &pendingState.afTriggerId, frameNumber, cameraId);
-
- gotAllStates &= updatePendingState<int32_t>(metadata,
- ANDROID_CONTROL_AE_PRECAPTURE_ID, &pendingState.aeTriggerId, frameNumber, cameraId);
- }
+ pendingState.afTriggerId = frame.mResultExtras.afTriggerId;
+ pendingState.aeTriggerId = frame.mResultExtras.precaptureTriggerId;
if (!gotAllStates) {
// If not all states are received, put the pending state to mPending3AStates.
diff --git a/services/camera/libcameraservice/api1/client2/Parameters.cpp b/services/camera/libcameraservice/api1/client2/Parameters.cpp
index 1c78a08..a305bc7 100644
--- a/services/camera/libcameraservice/api1/client2/Parameters.cpp
+++ b/services/camera/libcameraservice/api1/client2/Parameters.cpp
@@ -2854,32 +2854,14 @@
}
sizes->clear();
- if (mDeviceVersion >= CAMERA_DEVICE_API_VERSION_3_2) {
- Vector<StreamConfiguration> scs = getStreamConfigurations();
- for (size_t i=0; i < scs.size(); i++) {
- const StreamConfiguration &sc = scs[i];
- if (sc.isInput == ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT &&
- sc.format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED &&
- sc.width <= limit.width && sc.height <= limit.height) {
- Size sz = {sc.width, sc.height};
- sizes->push(sz);
- }
- }
- } else {
- const size_t SIZE_COUNT = sizeof(Size) / sizeof(int);
- camera_metadata_ro_entry_t availableProcessedSizes =
- staticInfo(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES, SIZE_COUNT);
- if (availableProcessedSizes.count < SIZE_COUNT) return BAD_VALUE;
-
- Size filteredSize;
- for (size_t i = 0; i < availableProcessedSizes.count; i += SIZE_COUNT) {
- filteredSize.width = availableProcessedSizes.data.i32[i];
- filteredSize.height = availableProcessedSizes.data.i32[i+1];
- // Need skip the preview sizes that are too large.
- if (filteredSize.width <= limit.width &&
- filteredSize.height <= limit.height) {
- sizes->push(filteredSize);
- }
+ Vector<StreamConfiguration> scs = getStreamConfigurations();
+ for (size_t i=0; i < scs.size(); i++) {
+ const StreamConfiguration &sc = scs[i];
+ if (sc.isInput == ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT &&
+ sc.format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED &&
+ sc.width <= limit.width && sc.height <= limit.height) {
+ Size sz = {sc.width, sc.height};
+ sizes->push(sz);
}
}
@@ -2934,10 +2916,6 @@
const int STREAM_HEIGHT_OFFSET = 2;
const int STREAM_IS_INPUT_OFFSET = 3;
Vector<StreamConfiguration> scs;
- if (mDeviceVersion < CAMERA_DEVICE_API_VERSION_3_2) {
- ALOGE("StreamConfiguration is only valid after device HAL 3.2!");
- return scs;
- }
camera_metadata_ro_entry_t availableStreamConfigs =
staticInfo(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS);
@@ -2953,37 +2931,10 @@
}
int64_t Parameters::getJpegStreamMinFrameDurationNs(Parameters::Size size) {
- if (mDeviceVersion >= CAMERA_DEVICE_API_VERSION_3_2) {
- return getMinFrameDurationNs(size, HAL_PIXEL_FORMAT_BLOB);
- } else {
- Vector<Size> availableJpegSizes = getAvailableJpegSizes();
- size_t streamIdx = availableJpegSizes.size();
- for (size_t i = 0; i < availableJpegSizes.size(); i++) {
- if (availableJpegSizes[i].width == size.width &&
- availableJpegSizes[i].height == size.height) {
- streamIdx = i;
- break;
- }
- }
- if (streamIdx != availableJpegSizes.size()) {
- camera_metadata_ro_entry_t jpegMinDurations =
- staticInfo(ANDROID_SCALER_AVAILABLE_JPEG_MIN_DURATIONS);
- if (streamIdx < jpegMinDurations.count) {
- return jpegMinDurations.data.i64[streamIdx];
- }
- }
- }
- ALOGE("%s: cannot find min frame duration for jpeg size %dx%d",
- __FUNCTION__, size.width, size.height);
- return -1;
+ return getMinFrameDurationNs(size, HAL_PIXEL_FORMAT_BLOB);
}
int64_t Parameters::getMinFrameDurationNs(Parameters::Size size, int fmt) {
- if (mDeviceVersion < CAMERA_DEVICE_API_VERSION_3_2) {
- ALOGE("Min frame duration for HAL 3.1 or lower is not supported");
- return -1;
- }
-
const int STREAM_DURATION_SIZE = 4;
const int STREAM_FORMAT_OFFSET = 0;
const int STREAM_WIDTH_OFFSET = 1;
@@ -3005,11 +2956,6 @@
}
bool Parameters::isFpsSupported(const Vector<Size> &sizes, int format, int32_t fps) {
- // Skip the check for older HAL version, as the min duration is not supported.
- if (mDeviceVersion < CAMERA_DEVICE_API_VERSION_3_2) {
- return true;
- }
-
// Get min frame duration for each size and check if the given fps range can be supported.
for (size_t i = 0 ; i < sizes.size(); i++) {
int64_t minFrameDuration = getMinFrameDurationNs(sizes[i], format);
@@ -3030,48 +2976,29 @@
SortedVector<int32_t> Parameters::getAvailableOutputFormats() {
SortedVector<int32_t> outputFormats; // Non-duplicated output formats
- if (mDeviceVersion >= CAMERA_DEVICE_API_VERSION_3_2) {
- Vector<StreamConfiguration> scs = getStreamConfigurations();
- for (size_t i = 0; i < scs.size(); i++) {
- const StreamConfiguration &sc = scs[i];
- if (sc.isInput == ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT) {
- outputFormats.add(sc.format);
- }
- }
- } else {
- camera_metadata_ro_entry_t availableFormats = staticInfo(ANDROID_SCALER_AVAILABLE_FORMATS);
- for (size_t i = 0; i < availableFormats.count; i++) {
- outputFormats.add(availableFormats.data.i32[i]);
+ Vector<StreamConfiguration> scs = getStreamConfigurations();
+ for (size_t i = 0; i < scs.size(); i++) {
+ const StreamConfiguration &sc = scs[i];
+ if (sc.isInput == ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT) {
+ outputFormats.add(sc.format);
}
}
+
return outputFormats;
}
Vector<Parameters::Size> Parameters::getAvailableJpegSizes() {
Vector<Parameters::Size> jpegSizes;
- if (mDeviceVersion >= CAMERA_DEVICE_API_VERSION_3_2) {
- Vector<StreamConfiguration> scs = getStreamConfigurations();
- for (size_t i = 0; i < scs.size(); i++) {
- const StreamConfiguration &sc = scs[i];
- if (sc.isInput == ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT &&
- sc.format == HAL_PIXEL_FORMAT_BLOB) {
- Size sz = {sc.width, sc.height};
- jpegSizes.add(sz);
- }
- }
- } else {
- const int JPEG_SIZE_ENTRY_COUNT = 2;
- const int WIDTH_OFFSET = 0;
- const int HEIGHT_OFFSET = 1;
- camera_metadata_ro_entry_t availableJpegSizes =
- staticInfo(ANDROID_SCALER_AVAILABLE_JPEG_SIZES);
- for (size_t i = 0; i < availableJpegSizes.count; i+= JPEG_SIZE_ENTRY_COUNT) {
- int width = availableJpegSizes.data.i32[i + WIDTH_OFFSET];
- int height = availableJpegSizes.data.i32[i + HEIGHT_OFFSET];
- Size sz = {width, height};
+ Vector<StreamConfiguration> scs = getStreamConfigurations();
+ for (size_t i = 0; i < scs.size(); i++) {
+ const StreamConfiguration &sc = scs[i];
+ if (sc.isInput == ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT &&
+ sc.format == HAL_PIXEL_FORMAT_BLOB) {
+ Size sz = {sc.width, sc.height};
jpegSizes.add(sz);
}
}
+
return jpegSizes;
}
diff --git a/services/camera/libcameraservice/api1/client2/StreamingProcessor.cpp b/services/camera/libcameraservice/api1/client2/StreamingProcessor.cpp
index bf92a2b..d79e430 100644
--- a/services/camera/libcameraservice/api1/client2/StreamingProcessor.cpp
+++ b/services/camera/libcameraservice/api1/client2/StreamingProcessor.cpp
@@ -114,16 +114,11 @@
}
// Use CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG for ZSL streaming case.
- if (client->getCameraDeviceVersion() >= CAMERA_DEVICE_API_VERSION_3_0) {
- if (params.useZeroShutterLag() && !params.recordingHint) {
- res = device->createDefaultRequest(CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG,
- &mPreviewRequest);
- } else {
- res = device->createDefaultRequest(CAMERA3_TEMPLATE_PREVIEW,
- &mPreviewRequest);
- }
+ if (params.useZeroShutterLag() && !params.recordingHint) {
+ res = device->createDefaultRequest(
+ CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG, &mPreviewRequest);
} else {
- res = device->createDefaultRequest(CAMERA2_TEMPLATE_PREVIEW,
+ res = device->createDefaultRequest(CAMERA3_TEMPLATE_PREVIEW,
&mPreviewRequest);
}
diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
index f2e8df8..0429e7f 100644
--- a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
+++ b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
@@ -82,10 +82,6 @@
ALOGI("CameraDeviceClient %s: Opened", cameraId.string());
}
-status_t CameraDeviceClient::initialize(CameraModule *module) {
- return initializeImpl(module);
-}
-
status_t CameraDeviceClient::initialize(sp<CameraProviderManager> manager) {
return initializeImpl(manager);
}
diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.h b/services/camera/libcameraservice/api2/CameraDeviceClient.h
index 2a95c88..2bf73a0 100644
--- a/services/camera/libcameraservice/api2/CameraDeviceClient.h
+++ b/services/camera/libcameraservice/api2/CameraDeviceClient.h
@@ -149,7 +149,6 @@
int servicePid);
virtual ~CameraDeviceClient();
- virtual status_t initialize(CameraModule *module) override;
virtual status_t initialize(sp<CameraProviderManager> manager) override;
virtual status_t dump(int fd, const Vector<String16>& args);
diff --git a/services/camera/libcameraservice/common/Camera2ClientBase.cpp b/services/camera/libcameraservice/common/Camera2ClientBase.cpp
index 93a584b..32ee273 100644
--- a/services/camera/libcameraservice/common/Camera2ClientBase.cpp
+++ b/services/camera/libcameraservice/common/Camera2ClientBase.cpp
@@ -79,11 +79,6 @@
}
template <typename TClientBase>
-status_t Camera2ClientBase<TClientBase>::initialize(CameraModule *module) {
- return initializeImpl(module);
-}
-
-template <typename TClientBase>
status_t Camera2ClientBase<TClientBase>::initialize(sp<CameraProviderManager> manager) {
return initializeImpl(manager);
}
diff --git a/services/camera/libcameraservice/common/Camera2ClientBase.h b/services/camera/libcameraservice/common/Camera2ClientBase.h
index a4c08ef..e898d5d 100644
--- a/services/camera/libcameraservice/common/Camera2ClientBase.h
+++ b/services/camera/libcameraservice/common/Camera2ClientBase.h
@@ -18,7 +18,6 @@
#define ANDROID_SERVERS_CAMERA_CAMERA2CLIENT_BASE_H
#include "common/CameraDeviceBase.h"
-#include "common/CameraModule.h"
#include "camera/CaptureResult.h"
namespace android {
@@ -56,7 +55,6 @@
int servicePid);
virtual ~Camera2ClientBase();
- virtual status_t initialize(CameraModule *module);
virtual status_t initialize(sp<CameraProviderManager> manager);
virtual status_t dumpClient(int fd, const Vector<String16>& args);
diff --git a/services/camera/libcameraservice/common/CameraDeviceBase.h b/services/camera/libcameraservice/common/CameraDeviceBase.h
index 4f788ae..d9059f3 100644
--- a/services/camera/libcameraservice/common/CameraDeviceBase.h
+++ b/services/camera/libcameraservice/common/CameraDeviceBase.h
@@ -30,7 +30,6 @@
#include "hardware/camera3.h"
#include "camera/CameraMetadata.h"
#include "camera/CaptureResult.h"
-#include "common/CameraModule.h"
#include "gui/IGraphicBufferProducer.h"
#include "device3/Camera3StreamInterface.h"
#include "binder/Status.h"
@@ -55,7 +54,6 @@
*/
virtual const String8& getId() const = 0;
- virtual status_t initialize(CameraModule *module) = 0;
virtual status_t initialize(sp<CameraProviderManager> manager) = 0;
virtual status_t disconnect() = 0;
@@ -307,11 +305,6 @@
virtual status_t prepare(int maxCount, int streamId) = 0;
/**
- * Get the HAL device version.
- */
- virtual uint32_t getDeviceVersion() = 0;
-
- /**
* Set the deferred consumer surface and finish the rest of the stream configuration.
*/
virtual status_t setConsumerSurfaces(int streamId,
diff --git a/services/camera/libcameraservice/common/CameraModule.cpp b/services/camera/libcameraservice/common/CameraModule.cpp
deleted file mode 100644
index 073144c..0000000
--- a/services/camera/libcameraservice/common/CameraModule.cpp
+++ /dev/null
@@ -1,403 +0,0 @@
-/*
- * Copyright (C) 2015 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#define LOG_TAG "CameraModule"
-#define ATRACE_TAG ATRACE_TAG_CAMERA
-//#define LOG_NDEBUG 0
-
-#include <utils/Trace.h>
-
-#include "CameraModule.h"
-
-namespace android {
-
-void CameraModule::deriveCameraCharacteristicsKeys(
- uint32_t deviceVersion, CameraMetadata &chars) {
- ATRACE_CALL();
-
- Vector<int32_t> derivedCharKeys;
- Vector<int32_t> derivedRequestKeys;
- Vector<int32_t> derivedResultKeys;
- // Keys added in HAL3.3
- if (deviceVersion < CAMERA_DEVICE_API_VERSION_3_3) {
- Vector<uint8_t> controlModes;
- uint8_t data = ANDROID_CONTROL_AE_LOCK_AVAILABLE_TRUE;
- chars.update(ANDROID_CONTROL_AE_LOCK_AVAILABLE, &data, /*count*/1);
- data = ANDROID_CONTROL_AWB_LOCK_AVAILABLE_TRUE;
- chars.update(ANDROID_CONTROL_AWB_LOCK_AVAILABLE, &data, /*count*/1);
- controlModes.push(ANDROID_CONTROL_MODE_AUTO);
- camera_metadata_entry entry = chars.find(ANDROID_CONTROL_AVAILABLE_SCENE_MODES);
- if (entry.count > 1 || entry.data.u8[0] != ANDROID_CONTROL_SCENE_MODE_DISABLED) {
- controlModes.push(ANDROID_CONTROL_MODE_USE_SCENE_MODE);
- }
-
- // Only advertise CONTROL_OFF mode if 3A manual controls are supported.
- bool isManualAeSupported = false;
- bool isManualAfSupported = false;
- bool isManualAwbSupported = false;
- entry = chars.find(ANDROID_CONTROL_AE_AVAILABLE_MODES);
- if (entry.count > 0) {
- for (size_t i = 0; i < entry.count; i++) {
- if (entry.data.u8[i] == ANDROID_CONTROL_AE_MODE_OFF) {
- isManualAeSupported = true;
- break;
- }
- }
- }
- entry = chars.find(ANDROID_CONTROL_AF_AVAILABLE_MODES);
- if (entry.count > 0) {
- for (size_t i = 0; i < entry.count; i++) {
- if (entry.data.u8[i] == ANDROID_CONTROL_AF_MODE_OFF) {
- isManualAfSupported = true;
- break;
- }
- }
- }
- entry = chars.find(ANDROID_CONTROL_AWB_AVAILABLE_MODES);
- if (entry.count > 0) {
- for (size_t i = 0; i < entry.count; i++) {
- if (entry.data.u8[i] == ANDROID_CONTROL_AWB_MODE_OFF) {
- isManualAwbSupported = true;
- break;
- }
- }
- }
- if (isManualAeSupported && isManualAfSupported && isManualAwbSupported) {
- controlModes.push(ANDROID_CONTROL_MODE_OFF);
- }
-
- chars.update(ANDROID_CONTROL_AVAILABLE_MODES, controlModes);
-
- entry = chars.find(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS);
- // HAL3.2 devices passing existing CTS test should all support all LSC modes and LSC map
- bool lensShadingModeSupported = false;
- if (entry.count > 0) {
- for (size_t i = 0; i < entry.count; i++) {
- if (entry.data.i32[i] == ANDROID_SHADING_MODE) {
- lensShadingModeSupported = true;
- break;
- }
- }
- }
- Vector<uint8_t> lscModes;
- Vector<uint8_t> lscMapModes;
- lscModes.push(ANDROID_SHADING_MODE_FAST);
- lscModes.push(ANDROID_SHADING_MODE_HIGH_QUALITY);
- lscMapModes.push(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF);
- if (lensShadingModeSupported) {
- lscModes.push(ANDROID_SHADING_MODE_OFF);
- lscMapModes.push(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON);
- }
- chars.update(ANDROID_SHADING_AVAILABLE_MODES, lscModes);
- chars.update(ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES, lscMapModes);
-
- derivedCharKeys.push(ANDROID_CONTROL_AE_LOCK_AVAILABLE);
- derivedCharKeys.push(ANDROID_CONTROL_AWB_LOCK_AVAILABLE);
- derivedCharKeys.push(ANDROID_CONTROL_AVAILABLE_MODES);
- derivedCharKeys.push(ANDROID_SHADING_AVAILABLE_MODES);
- derivedCharKeys.push(ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES);
-
- // Need update android.control.availableHighSpeedVideoConfigurations since HAL3.3
- // adds batch size to this array.
- entry = chars.find(ANDROID_CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS);
- if (entry.count > 0) {
- Vector<int32_t> highSpeedConfig;
- for (size_t i = 0; i < entry.count; i += 4) {
- highSpeedConfig.add(entry.data.i32[i]); // width
- highSpeedConfig.add(entry.data.i32[i + 1]); // height
- highSpeedConfig.add(entry.data.i32[i + 2]); // fps_min
- highSpeedConfig.add(entry.data.i32[i + 3]); // fps_max
- highSpeedConfig.add(1); // batchSize_max. default to 1 for HAL3.2
- }
- chars.update(ANDROID_CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS,
- highSpeedConfig);
- }
- }
-
- // Keys added in HAL3.4
- if (deviceVersion < CAMERA_DEVICE_API_VERSION_3_4) {
- // Check if HAL supports RAW_OPAQUE output
- camera_metadata_entry entry = chars.find(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS);
- bool supportRawOpaque = false;
- bool supportAnyRaw = false;
- const int STREAM_CONFIGURATION_SIZE = 4;
- const int STREAM_FORMAT_OFFSET = 0;
- const int STREAM_WIDTH_OFFSET = 1;
- const int STREAM_HEIGHT_OFFSET = 2;
- const int STREAM_IS_INPUT_OFFSET = 3;
- Vector<int32_t> rawOpaqueSizes;
-
- for (size_t i=0; i < entry.count; i += STREAM_CONFIGURATION_SIZE) {
- int32_t format = entry.data.i32[i + STREAM_FORMAT_OFFSET];
- int32_t width = entry.data.i32[i + STREAM_WIDTH_OFFSET];
- int32_t height = entry.data.i32[i + STREAM_HEIGHT_OFFSET];
- int32_t isInput = entry.data.i32[i + STREAM_IS_INPUT_OFFSET];
- if (isInput == ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT &&
- format == HAL_PIXEL_FORMAT_RAW_OPAQUE) {
- supportRawOpaque = true;
- rawOpaqueSizes.push(width);
- rawOpaqueSizes.push(height);
- // 2 bytes per pixel. This rough estimation is only used when
- // HAL does not fill in the opaque raw size
- rawOpaqueSizes.push(width * height *2);
- }
- if (isInput == ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT &&
- (format == HAL_PIXEL_FORMAT_RAW16 ||
- format == HAL_PIXEL_FORMAT_RAW10 ||
- format == HAL_PIXEL_FORMAT_RAW12 ||
- format == HAL_PIXEL_FORMAT_RAW_OPAQUE)) {
- supportAnyRaw = true;
- }
- }
-
- if (supportRawOpaque) {
- entry = chars.find(ANDROID_SENSOR_OPAQUE_RAW_SIZE);
- if (entry.count == 0) {
- // Fill in estimated value if HAL does not list it
- chars.update(ANDROID_SENSOR_OPAQUE_RAW_SIZE, rawOpaqueSizes);
- derivedCharKeys.push(ANDROID_SENSOR_OPAQUE_RAW_SIZE);
- }
- }
-
- // Check if HAL supports any RAW output, if so, fill in postRawSensitivityBoost range
- if (supportAnyRaw) {
- int32_t defaultRange[2] = {100, 100};
- entry = chars.find(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE);
- if (entry.count == 0) {
- // Fill in default value (100, 100)
- chars.update(
- ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
- defaultRange, 2);
- derivedCharKeys.push(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE);
- // Actual request/results will be derived by camera device.
- derivedRequestKeys.push(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST);
- derivedResultKeys.push(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST);
- }
- }
- }
-
- // Always add a default for the pre-correction active array if the vendor chooses to omit this
- camera_metadata_entry entry = chars.find(ANDROID_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE);
- if (entry.count == 0) {
- Vector<int32_t> preCorrectionArray;
- entry = chars.find(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE);
- preCorrectionArray.appendArray(entry.data.i32, entry.count);
- chars.update(ANDROID_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE, preCorrectionArray);
- derivedCharKeys.push(ANDROID_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE);
- }
-
- // Add those newly added keys to AVAILABLE_CHARACTERISTICS_KEYS
- // This has to be done at this end of this function.
- if (derivedCharKeys.size() > 0) {
- appendAvailableKeys(
- chars, ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, derivedCharKeys);
- }
- if (derivedRequestKeys.size() > 0) {
- appendAvailableKeys(
- chars, ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, derivedRequestKeys);
- }
- if (derivedResultKeys.size() > 0) {
- appendAvailableKeys(
- chars, ANDROID_REQUEST_AVAILABLE_RESULT_KEYS, derivedResultKeys);
- }
- return;
-}
-
-void CameraModule::appendAvailableKeys(CameraMetadata &chars,
- int32_t keyTag, const Vector<int32_t>& appendKeys) {
- camera_metadata_entry entry = chars.find(keyTag);
- Vector<int32_t> availableKeys;
- availableKeys.setCapacity(entry.count + appendKeys.size());
- for (size_t i = 0; i < entry.count; i++) {
- availableKeys.push(entry.data.i32[i]);
- }
- for (size_t i = 0; i < appendKeys.size(); i++) {
- availableKeys.push(appendKeys[i]);
- }
- chars.update(keyTag, availableKeys);
-}
-
-CameraModule::CameraModule(camera_module_t *module) {
- if (module == NULL) {
- ALOGE("%s: camera hardware module must not be null", __FUNCTION__);
- assert(0);
- }
- mModule = module;
-}
-
-CameraModule::~CameraModule()
-{
- while (mCameraInfoMap.size() > 0) {
- camera_info cameraInfo = mCameraInfoMap.editValueAt(0);
- if (cameraInfo.static_camera_characteristics != NULL) {
- free_camera_metadata(
- const_cast<camera_metadata_t*>(cameraInfo.static_camera_characteristics));
- }
- mCameraInfoMap.removeItemsAt(0);
- }
-}
-
-int CameraModule::init() {
- ATRACE_CALL();
- int res = OK;
- if (getModuleApiVersion() >= CAMERA_MODULE_API_VERSION_2_4 &&
- mModule->init != NULL) {
- ATRACE_BEGIN("camera_module->init");
- res = mModule->init();
- ATRACE_END();
- }
- mCameraInfoMap.setCapacity(getNumberOfCameras());
- return res;
-}
-
-int CameraModule::getCameraInfo(int cameraId, struct camera_info *info) {
- ATRACE_CALL();
- Mutex::Autolock lock(mCameraInfoLock);
- if (cameraId < 0) {
- ALOGE("%s: Invalid camera ID %d", __FUNCTION__, cameraId);
- return -EINVAL;
- }
-
- // Only override static_camera_characteristics for API2 devices
- int apiVersion = mModule->common.module_api_version;
- if (apiVersion < CAMERA_MODULE_API_VERSION_2_0) {
- int ret;
- ATRACE_BEGIN("camera_module->get_camera_info");
- ret = mModule->get_camera_info(cameraId, info);
- // Fill in this so CameraService won't be confused by
- // possibly 0 device_version
- info->device_version = CAMERA_DEVICE_API_VERSION_1_0;
- ATRACE_END();
- return ret;
- }
-
- ssize_t index = mCameraInfoMap.indexOfKey(cameraId);
- if (index == NAME_NOT_FOUND) {
- // Get camera info from raw module and cache it
- camera_info rawInfo, cameraInfo;
- ATRACE_BEGIN("camera_module->get_camera_info");
- int ret = mModule->get_camera_info(cameraId, &rawInfo);
- ATRACE_END();
- if (ret != 0) {
- return ret;
- }
- int deviceVersion = rawInfo.device_version;
- if (deviceVersion < CAMERA_DEVICE_API_VERSION_3_0) {
- // static_camera_characteristics is invalid
- *info = rawInfo;
- return ret;
- }
- CameraMetadata m;
- m = rawInfo.static_camera_characteristics;
- deriveCameraCharacteristicsKeys(rawInfo.device_version, m);
- cameraInfo = rawInfo;
- cameraInfo.static_camera_characteristics = m.release();
- index = mCameraInfoMap.add(cameraId, cameraInfo);
- }
-
- assert(index != NAME_NOT_FOUND);
- // return the cached camera info
- *info = mCameraInfoMap[index];
- return OK;
-}
-
-int CameraModule::open(const char* id, struct hw_device_t** device) {
- int res;
- ATRACE_BEGIN("camera_module->open");
- res = filterOpenErrorCode(mModule->common.methods->open(&mModule->common, id, device));
- ATRACE_END();
- return res;
-}
-
-int CameraModule::openLegacy(
- const char* id, uint32_t halVersion, struct hw_device_t** device) {
- int res;
- ATRACE_BEGIN("camera_module->open_legacy");
- res = mModule->open_legacy(&mModule->common, id, halVersion, device);
- ATRACE_END();
- return res;
-}
-
-int CameraModule::getNumberOfCameras() {
- int numCameras;
- ATRACE_BEGIN("camera_module->get_number_of_cameras");
- numCameras = mModule->get_number_of_cameras();
- ATRACE_END();
- return numCameras;
-}
-
-int CameraModule::setCallbacks(const camera_module_callbacks_t *callbacks) {
- int res;
- ATRACE_BEGIN("camera_module->set_callbacks");
- res = mModule->set_callbacks(callbacks);
- ATRACE_END();
- return res;
-}
-
-bool CameraModule::isVendorTagDefined() {
- return mModule->get_vendor_tag_ops != NULL;
-}
-
-void CameraModule::getVendorTagOps(vendor_tag_ops_t* ops) {
- if (mModule->get_vendor_tag_ops) {
- ATRACE_BEGIN("camera_module->get_vendor_tag_ops");
- mModule->get_vendor_tag_ops(ops);
- ATRACE_END();
- }
-}
-
-int CameraModule::setTorchMode(const char* camera_id, bool enable) {
- int res;
- ATRACE_BEGIN("camera_module->set_torch_mode");
- res = mModule->set_torch_mode(camera_id, enable);
- ATRACE_END();
- return res;
-}
-
-status_t CameraModule::filterOpenErrorCode(status_t err) {
- switch(err) {
- case NO_ERROR:
- case -EBUSY:
- case -EINVAL:
- case -EUSERS:
- return err;
- default:
- break;
- }
- return -ENODEV;
-}
-
-uint16_t CameraModule::getModuleApiVersion() {
- return mModule->common.module_api_version;
-}
-
-const char* CameraModule::getModuleName() {
- return mModule->common.name;
-}
-
-uint16_t CameraModule::getHalApiVersion() {
- return mModule->common.hal_api_version;
-}
-
-const char* CameraModule::getModuleAuthor() {
- return mModule->common.author;
-}
-
-void* CameraModule::getDso() {
- return mModule->common.dso;
-}
-
-}; // namespace android
diff --git a/services/camera/libcameraservice/common/CameraModule.h b/services/camera/libcameraservice/common/CameraModule.h
deleted file mode 100644
index d131a26..0000000
--- a/services/camera/libcameraservice/common/CameraModule.h
+++ /dev/null
@@ -1,71 +0,0 @@
-/*
- * Copyright (C) 2015 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef ANDROID_SERVERS_CAMERA_CAMERAMODULE_H
-#define ANDROID_SERVERS_CAMERA_CAMERAMODULE_H
-
-#include <hardware/camera.h>
-#include <camera/CameraMetadata.h>
-#include <utils/Mutex.h>
-#include <utils/KeyedVector.h>
-
-namespace android {
-/**
- * A wrapper class for HAL camera module.
- *
- * This class wraps camera_module_t returned from HAL to provide a wrapped
- * get_camera_info implementation which CameraService generates some
- * camera characteristics keys defined in newer HAL version on an older HAL.
- */
-class CameraModule {
-public:
- explicit CameraModule(camera_module_t *module);
- virtual ~CameraModule();
-
- // Must be called after construction
- // Returns OK on success, NO_INIT on failure
- int init();
-
- int getCameraInfo(int cameraId, struct camera_info *info);
- int getNumberOfCameras(void);
- int open(const char* id, struct hw_device_t** device);
- int openLegacy(const char* id, uint32_t halVersion, struct hw_device_t** device);
- int setCallbacks(const camera_module_callbacks_t *callbacks);
- bool isVendorTagDefined();
- void getVendorTagOps(vendor_tag_ops_t* ops);
- int setTorchMode(const char* camera_id, bool enable);
- uint16_t getModuleApiVersion();
- const char* getModuleName();
- uint16_t getHalApiVersion();
- const char* getModuleAuthor();
- // Only used by CameraModuleFixture native test. Do NOT use elsewhere.
- void *getDso();
-
-private:
- // Derive camera characteristics keys defined after HAL device version
- static void deriveCameraCharacteristicsKeys(uint32_t deviceVersion, CameraMetadata &chars);
- // Helper function to append available[request|result|chars]Keys
- static void appendAvailableKeys(CameraMetadata &chars,
- int32_t keyTag, const Vector<int32_t>& appendKeys);
- status_t filterOpenErrorCode(status_t err);
- camera_module_t *mModule;
- KeyedVector<int, camera_info> mCameraInfoMap;
- Mutex mCameraInfoLock;
-};
-
-} // namespace android
-
-#endif
diff --git a/services/camera/libcameraservice/common/CameraProviderManager.cpp b/services/camera/libcameraservice/common/CameraProviderManager.cpp
index f3a81cb..38fe1b6 100644
--- a/services/camera/libcameraservice/common/CameraProviderManager.cpp
+++ b/services/camera/libcameraservice/common/CameraProviderManager.cpp
@@ -82,12 +82,16 @@
return count;
}
-int CameraProviderManager::getStandardCameraCount() const {
+int CameraProviderManager::getAPI1CompatibleCameraCount() const {
std::lock_guard<std::mutex> lock(mInterfaceMutex);
int count = 0;
for (auto& provider : mProviders) {
if (kStandardProviderTypes.find(provider->getType()) != std::string::npos) {
- count += provider->mUniqueDeviceCount;
+ for (auto& device : provider->mDevices) {
+ if (device->isAPI1Compatible()) {
+ count++;
+ }
+ }
}
}
return count;
@@ -104,13 +108,15 @@
return deviceIds;
}
-std::vector<std::string> CameraProviderManager::getStandardCameraDeviceIds() const {
+std::vector<std::string> CameraProviderManager::getAPI1CompatibleCameraDeviceIds() const {
std::lock_guard<std::mutex> lock(mInterfaceMutex);
std::vector<std::string> deviceIds;
for (auto& provider : mProviders) {
if (kStandardProviderTypes.find(provider->getType()) != std::string::npos) {
- for (auto& id : provider->mUniqueCameraIds) {
- deviceIds.push_back(id);
+ for (auto& device : provider->mDevices) {
+ if (device->isAPI1Compatible()) {
+ deviceIds.push_back(device->mId);
+ }
}
}
}
@@ -983,6 +989,20 @@
return OK;
}
+bool CameraProviderManager::ProviderInfo::DeviceInfo3::isAPI1Compatible() const {
+ bool isBackwardCompatible = false;
+ camera_metadata_ro_entry_t caps = mCameraCharacteristics.find(
+ ANDROID_REQUEST_AVAILABLE_CAPABILITIES);
+ for (size_t i = 0; i < caps.count; i++) {
+ if (caps.data.u8[i] ==
+ ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE) {
+ isBackwardCompatible = true;
+ break;
+ }
+ }
+
+ return isBackwardCompatible;
+}
status_t CameraProviderManager::ProviderInfo::DeviceInfo3::getCameraCharacteristics(
CameraMetadata *characteristics) const {
diff --git a/services/camera/libcameraservice/common/CameraProviderManager.h b/services/camera/libcameraservice/common/CameraProviderManager.h
index 2df4fd5..3afc1d9 100644
--- a/services/camera/libcameraservice/common/CameraProviderManager.h
+++ b/services/camera/libcameraservice/common/CameraProviderManager.h
@@ -125,16 +125,16 @@
int getCameraCount() const;
/**
- * Retrieve the number of 'standard' cameras; these are internal and
+ * Retrieve the number of API1 compatible cameras; these are internal and
* backwards-compatible. This is the set of cameras that will be
* accessible via the old camera API, with IDs in range of
- * [0, getStandardCameraCount()-1]. This value is not expected to change dynamically.
+ * [0, getAPI1CompatibleCameraCount()-1]. This value is not expected to change dynamically.
*/
- int getStandardCameraCount() const;
+ int getAPI1CompatibleCameraCount() const;
std::vector<std::string> getCameraDeviceIds() const;
- std::vector<std::string> getStandardCameraDeviceIds() const;
+ std::vector<std::string> getAPI1CompatibleCameraDeviceIds() const;
/**
* Return true if a device with a given ID and major version exists
@@ -291,6 +291,7 @@
bool hasFlashUnit() const { return mHasFlashUnit; }
virtual status_t setTorchMode(bool enabled) = 0;
virtual status_t getCameraInfo(hardware::CameraInfo *info) const = 0;
+ virtual bool isAPI1Compatible() const = 0;
virtual status_t getCameraCharacteristics(CameraMetadata *characteristics) const {
(void) characteristics;
return INVALID_OPERATION;
@@ -321,7 +322,8 @@
virtual status_t setTorchMode(bool enabled) override;
virtual status_t getCameraInfo(hardware::CameraInfo *info) const override;
-
+ //In case of Device1Info assume that we are always API1 compatible
+ virtual bool isAPI1Compatible() const override { return true; }
DeviceInfo1(const std::string& name, const metadata_vendor_id_t tagId,
const std::string &id, uint16_t minorVersion,
const hardware::camera::common::V1_0::CameraResourceCost& resourceCost,
@@ -338,6 +340,7 @@
virtual status_t setTorchMode(bool enabled) override;
virtual status_t getCameraInfo(hardware::CameraInfo *info) const override;
+ virtual bool isAPI1Compatible() const override;
virtual status_t getCameraCharacteristics(
CameraMetadata *characteristics) const override;
diff --git a/services/camera/libcameraservice/common/FrameProcessorBase.cpp b/services/camera/libcameraservice/common/FrameProcessorBase.cpp
index 9e78f88..41c953b 100644
--- a/services/camera/libcameraservice/common/FrameProcessorBase.cpp
+++ b/services/camera/libcameraservice/common/FrameProcessorBase.cpp
@@ -32,8 +32,7 @@
mDevice(device),
mNumPartialResults(1) {
sp<CameraDeviceBase> cameraDevice = device.promote();
- if (cameraDevice != 0 &&
- cameraDevice->getDeviceVersion() >= CAMERA_DEVICE_API_VERSION_3_2) {
+ if (cameraDevice != 0) {
CameraMetadata staticInfo = cameraDevice->info();
camera_metadata_entry_t entry = staticInfo.find(ANDROID_REQUEST_PARTIAL_RESULT_COUNT);
if (entry.count > 0) {
@@ -171,18 +170,8 @@
camera_metadata_ro_entry_t entry;
// Check if this result is partial.
- bool isPartialResult = false;
- if (device->getDeviceVersion() >= CAMERA_DEVICE_API_VERSION_3_2) {
- isPartialResult = result.mResultExtras.partialResultCount < mNumPartialResults;
- } else {
- entry = result.mMetadata.find(ANDROID_QUIRKS_PARTIAL_RESULT);
- if (entry.count != 0 &&
- entry.data.u8[0] == ANDROID_QUIRKS_PARTIAL_RESULT_PARTIAL) {
- ALOGV("%s: Camera %s: This is a partial result",
- __FUNCTION__, device->getId().string());
- isPartialResult = true;
- }
- }
+ bool isPartialResult =
+ result.mResultExtras.partialResultCount < mNumPartialResults;
// TODO: instead of getting requestID from CameraMetadata, we should get it
// from CaptureResultExtras. This will require changing Camera2Device.
diff --git a/services/camera/libcameraservice/device1/CameraHardwareInterface.cpp b/services/camera/libcameraservice/device1/CameraHardwareInterface.cpp
index 9df7cd4..469c86c 100644
--- a/services/camera/libcameraservice/device1/CameraHardwareInterface.cpp
+++ b/services/camera/libcameraservice/device1/CameraHardwareInterface.cpp
@@ -41,37 +41,6 @@
}
}
-status_t CameraHardwareInterface::initialize(CameraModule *module)
-{
- if (mHidlDevice != nullptr) {
- ALOGE("%s: camera hardware interface has been initialized to HIDL path!", __FUNCTION__);
- return INVALID_OPERATION;
- }
- ALOGI("Opening camera %s", mName.string());
- camera_info info;
- status_t res = module->getCameraInfo(atoi(mName.string()), &info);
- if (res != OK) {
- return res;
- }
-
- int rc = OK;
- if (module->getModuleApiVersion() >= CAMERA_MODULE_API_VERSION_2_3 &&
- info.device_version > CAMERA_DEVICE_API_VERSION_1_0) {
- // Open higher version camera device as HAL1.0 device.
- rc = module->openLegacy(mName.string(),
- CAMERA_DEVICE_API_VERSION_1_0,
- (hw_device_t **)&mDevice);
- } else {
- rc = module->open(mName.string(), (hw_device_t **)&mDevice);
- }
- if (rc != OK) {
- ALOGE("Could not open camera %s: %d", mName.string(), rc);
- return rc;
- }
- initHalPreviewWindow();
- return rc;
-}
-
status_t CameraHardwareInterface::initialize(sp<CameraProviderManager> manager) {
if (mDevice) {
ALOGE("%s: camera hardware interface has been initialized to libhardware path!",
diff --git a/services/camera/libcameraservice/device1/CameraHardwareInterface.h b/services/camera/libcameraservice/device1/CameraHardwareInterface.h
index 907065f..1c38d00 100644
--- a/services/camera/libcameraservice/device1/CameraHardwareInterface.h
+++ b/services/camera/libcameraservice/device1/CameraHardwareInterface.h
@@ -28,7 +28,6 @@
#include <system/window.h>
#include <hardware/camera.h>
-#include <common/CameraModule.h>
#include <common/CameraProviderManager.h>
namespace android {
@@ -107,7 +106,6 @@
~CameraHardwareInterface();
- status_t initialize(CameraModule *module);
status_t initialize(sp<CameraProviderManager> manager);
/** Set the ANativeWindow to which preview frames are sent */
diff --git a/services/camera/libcameraservice/device3/Camera3Device.cpp b/services/camera/libcameraservice/device3/Camera3Device.cpp
index b64488c..4706319 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Device.cpp
@@ -94,82 +94,6 @@
return mId;
}
-/**
- * CameraDeviceBase interface
- */
-
-status_t Camera3Device::initialize(CameraModule *module)
-{
- ATRACE_CALL();
- Mutex::Autolock il(mInterfaceLock);
- Mutex::Autolock l(mLock);
-
- ALOGV("%s: Initializing device for camera %s", __FUNCTION__, mId.string());
- if (mStatus != STATUS_UNINITIALIZED) {
- CLOGE("Already initialized!");
- return INVALID_OPERATION;
- }
-
- /** Open HAL device */
-
- status_t res;
-
- camera3_device_t *device;
-
- ATRACE_BEGIN("CameraHal::open");
- res = module->open(mId.string(),
- reinterpret_cast<hw_device_t**>(&device));
- ATRACE_END();
-
- if (res != OK) {
- SET_ERR_L("Could not open camera: %s (%d)", strerror(-res), res);
- return res;
- }
-
- /** Cross-check device version */
- if (device->common.version < CAMERA_DEVICE_API_VERSION_3_2) {
- SET_ERR_L("Could not open camera: "
- "Camera device should be at least %x, reports %x instead",
- CAMERA_DEVICE_API_VERSION_3_2,
- device->common.version);
- device->common.close(&device->common);
- return BAD_VALUE;
- }
-
- camera_info info;
- res = module->getCameraInfo(atoi(mId), &info);
- if (res != OK) return res;
-
- if (info.device_version != device->common.version) {
- SET_ERR_L("HAL reporting mismatched camera_info version (%x)"
- " and device version (%x).",
- info.device_version, device->common.version);
- device->common.close(&device->common);
- return BAD_VALUE;
- }
-
- /** Initialize device with callback functions */
-
- ATRACE_BEGIN("CameraHal::initialize");
- res = device->ops->initialize(device, this);
- ATRACE_END();
-
- if (res != OK) {
- SET_ERR_L("Unable to initialize HAL device: %s (%d)",
- strerror(-res), res);
- device->common.close(&device->common);
- return res;
- }
-
- /** Everything is good to go */
-
- mDeviceVersion = device->common.version;
- mDeviceInfo = info.static_camera_characteristics;
- mInterface = std::make_unique<HalInterface>(device);
-
- return initializeCommonLocked();
-}
-
status_t Camera3Device::initialize(sp<CameraProviderManager> manager) {
ATRACE_CALL();
Mutex::Autolock il(mInterfaceLock);
@@ -212,8 +136,7 @@
if (!requestQueueRet.isOk()) {
ALOGE("Transaction error when getting request metadata fmq: %s, not use it",
requestQueueRet.description().c_str());
- queue = nullptr;
- // Don't use the queue onwards.
+ return DEAD_OBJECT;
}
auto resultQueueRet = session->getCaptureResultMetadataQueue(
[&queue = mResultMetadataQueue](const auto& descriptor) {
@@ -227,13 +150,9 @@
if (!resultQueueRet.isOk()) {
ALOGE("Transaction error when getting result metadata queue from camera session: %s",
resultQueueRet.description().c_str());
- mResultMetadataQueue = nullptr;
- // Don't use the queue onwards.
+ return DEAD_OBJECT;
}
- // TODO: camera service will absorb 3_2/3_3/3_4 differences in the future
- // for now use 3_4 to keep legacy devices working
- mDeviceVersion = CAMERA_DEVICE_API_VERSION_3_4;
mInterface = std::make_unique<HalInterface>(session, queue);
std::string providerType;
mVendorTagId = manager->getProviderTagIdLocked(mId.string());
@@ -262,17 +181,8 @@
mTagMonitor.initialize(mVendorTagId);
- bool aeLockAvailable = false;
- camera_metadata_entry aeLockAvailableEntry = mDeviceInfo.find(
- ANDROID_CONTROL_AE_LOCK_AVAILABLE);
- if (aeLockAvailableEntry.count > 0) {
- aeLockAvailable = (aeLockAvailableEntry.data.u8[0] ==
- ANDROID_CONTROL_AE_LOCK_AVAILABLE_TRUE);
- }
-
/** Start up request queue thread */
- mRequestThread = new RequestThread(this, mStatusTracker, mInterface.get(), mDeviceVersion,
- aeLockAvailable);
+ mRequestThread = new RequestThread(this, mStatusTracker, mInterface.get());
res = mRequestThread->run(String8::format("C3Dev-%s-ReqQueue", mId.string()).string());
if (res != OK) {
SET_ERR_L("Unable to start request queue thread: %s (%d)",
@@ -284,13 +194,6 @@
mPreparerThread = new PreparerThread();
- // Determine whether we need to derive sensitivity boost values for older devices.
- // If post-RAW sensitivity boost range is listed, so should post-raw sensitivity control
- // be listed (as the default value 100)
- if (mDeviceInfo.exists(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE)) {
- mDerivePostRawSensKey = true;
- }
-
internalUpdateStatusLocked(STATUS_UNCONFIGURED);
mNextStreamId = 0;
mDummyStreamId = NO_STREAM;
@@ -306,19 +209,11 @@
}
// Will the HAL be sending in early partial result metadata?
- if (mDeviceVersion >= CAMERA_DEVICE_API_VERSION_3_2) {
- camera_metadata_entry partialResultsCount =
- mDeviceInfo.find(ANDROID_REQUEST_PARTIAL_RESULT_COUNT);
- if (partialResultsCount.count > 0) {
- mNumPartialResults = partialResultsCount.data.i32[0];
- mUsePartialResult = (mNumPartialResults > 1);
- }
- } else {
- camera_metadata_entry partialResultsQuirk =
- mDeviceInfo.find(ANDROID_QUIRKS_USE_PARTIAL_RESULT);
- if (partialResultsQuirk.count > 0 && partialResultsQuirk.data.u8[0] == 1) {
- mUsePartialResult = true;
- }
+ camera_metadata_entry partialResultsCount =
+ mDeviceInfo.find(ANDROID_REQUEST_PARTIAL_RESULT_COUNT);
+ if (partialResultsCount.count > 0) {
+ mNumPartialResults = partialResultsCount.data.i32[0];
+ mUsePartialResult = (mNumPartialResults > 1);
}
camera_metadata_entry configs =
@@ -434,48 +329,32 @@
Camera3Device::Size Camera3Device::getMaxJpegResolution() const {
int32_t maxJpegWidth = 0, maxJpegHeight = 0;
- if (mDeviceVersion >= CAMERA_DEVICE_API_VERSION_3_2) {
- const int STREAM_CONFIGURATION_SIZE = 4;
- const int STREAM_FORMAT_OFFSET = 0;
- const int STREAM_WIDTH_OFFSET = 1;
- const int STREAM_HEIGHT_OFFSET = 2;
- const int STREAM_IS_INPUT_OFFSET = 3;
- camera_metadata_ro_entry_t availableStreamConfigs =
- mDeviceInfo.find(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS);
- if (availableStreamConfigs.count == 0 ||
- availableStreamConfigs.count % STREAM_CONFIGURATION_SIZE != 0) {
- return Size(0, 0);
- }
+ const int STREAM_CONFIGURATION_SIZE = 4;
+ const int STREAM_FORMAT_OFFSET = 0;
+ const int STREAM_WIDTH_OFFSET = 1;
+ const int STREAM_HEIGHT_OFFSET = 2;
+ const int STREAM_IS_INPUT_OFFSET = 3;
+ camera_metadata_ro_entry_t availableStreamConfigs =
+ mDeviceInfo.find(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS);
+ if (availableStreamConfigs.count == 0 ||
+ availableStreamConfigs.count % STREAM_CONFIGURATION_SIZE != 0) {
+ return Size(0, 0);
+ }
- // Get max jpeg size (area-wise).
- for (size_t i=0; i < availableStreamConfigs.count; i+= STREAM_CONFIGURATION_SIZE) {
- int32_t format = availableStreamConfigs.data.i32[i + STREAM_FORMAT_OFFSET];
- int32_t width = availableStreamConfigs.data.i32[i + STREAM_WIDTH_OFFSET];
- int32_t height = availableStreamConfigs.data.i32[i + STREAM_HEIGHT_OFFSET];
- int32_t isInput = availableStreamConfigs.data.i32[i + STREAM_IS_INPUT_OFFSET];
- if (isInput == ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT
- && format == HAL_PIXEL_FORMAT_BLOB &&
- (width * height > maxJpegWidth * maxJpegHeight)) {
- maxJpegWidth = width;
- maxJpegHeight = height;
- }
- }
- } else {
- camera_metadata_ro_entry availableJpegSizes =
- mDeviceInfo.find(ANDROID_SCALER_AVAILABLE_JPEG_SIZES);
- if (availableJpegSizes.count == 0 || availableJpegSizes.count % 2 != 0) {
- return Size(0, 0);
- }
-
- // Get max jpeg size (area-wise).
- for (size_t i = 0; i < availableJpegSizes.count; i += 2) {
- if ((availableJpegSizes.data.i32[i] * availableJpegSizes.data.i32[i + 1])
- > (maxJpegWidth * maxJpegHeight)) {
- maxJpegWidth = availableJpegSizes.data.i32[i];
- maxJpegHeight = availableJpegSizes.data.i32[i + 1];
- }
+ // Get max jpeg size (area-wise).
+ for (size_t i=0; i < availableStreamConfigs.count; i+= STREAM_CONFIGURATION_SIZE) {
+ int32_t format = availableStreamConfigs.data.i32[i + STREAM_FORMAT_OFFSET];
+ int32_t width = availableStreamConfigs.data.i32[i + STREAM_WIDTH_OFFSET];
+ int32_t height = availableStreamConfigs.data.i32[i + STREAM_HEIGHT_OFFSET];
+ int32_t isInput = availableStreamConfigs.data.i32[i + STREAM_IS_INPUT_OFFSET];
+ if (isInput == ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT
+ && format == HAL_PIXEL_FORMAT_BLOB &&
+ (width * height > maxJpegWidth * maxJpegHeight)) {
+ maxJpegWidth = width;
+ maxJpegHeight = height;
}
}
+
return Size(maxJpegWidth, maxJpegHeight);
}
@@ -497,31 +376,6 @@
return measured;
}
-/**
- * Map Android N dataspace definitions back to Android M definitions, for
- * use with HALv3.3 or older.
- *
- * Only map where correspondences exist, and otherwise preserve the value.
- */
-android_dataspace Camera3Device::mapToLegacyDataspace(android_dataspace dataSpace) {
- switch (dataSpace) {
- case HAL_DATASPACE_V0_SRGB_LINEAR:
- return HAL_DATASPACE_SRGB_LINEAR;
- case HAL_DATASPACE_V0_SRGB:
- return HAL_DATASPACE_SRGB;
- case HAL_DATASPACE_V0_JFIF:
- return HAL_DATASPACE_JFIF;
- case HAL_DATASPACE_V0_BT601_625:
- return HAL_DATASPACE_BT601_625;
- case HAL_DATASPACE_V0_BT601_525:
- return HAL_DATASPACE_BT601_525;
- case HAL_DATASPACE_V0_BT709:
- return HAL_DATASPACE_BT709;
- default:
- return dataSpace;
- }
-}
-
hardware::graphics::common::V1_0::PixelFormat Camera3Device::mapToPixelFormat(
int frameworkFormat) {
return (hardware::graphics::common::V1_0::PixelFormat) frameworkFormat;
@@ -1365,32 +1219,17 @@
assert(mStatus != STATUS_ACTIVE);
sp<Camera3OutputStream> newStream;
- // Overwrite stream set id to invalid for HAL3.2 or lower, as buffer manager does support
- // such devices.
- if (mDeviceVersion <= CAMERA_DEVICE_API_VERSION_3_2) {
- streamSetId = CAMERA3_STREAM_SET_ID_INVALID;
- }
if (consumers.size() == 0 && !hasDeferredConsumer) {
ALOGE("%s: Number of consumers cannot be smaller than 1", __FUNCTION__);
return BAD_VALUE;
}
- // HAL3.1 doesn't support deferred consumer stream creation as it requires buffer registration
- // which requires a consumer surface to be available.
- if (hasDeferredConsumer && mDeviceVersion < CAMERA_DEVICE_API_VERSION_3_2) {
- ALOGE("HAL3.1 doesn't support deferred consumer stream creation");
- return BAD_VALUE;
- }
if (hasDeferredConsumer && format != HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
ALOGE("Deferred consumer stream creation only support IMPLEMENTATION_DEFINED format");
return BAD_VALUE;
}
- // Use legacy dataspace values for older HALs
- if (mDeviceVersion <= CAMERA_DEVICE_API_VERSION_3_3) {
- dataSpace = mapToLegacyDataspace(dataSpace);
- }
if (format == HAL_PIXEL_FORMAT_BLOB) {
ssize_t blobBufferSize;
if (dataSpace != HAL_DATASPACE_DEPTH) {
@@ -1433,15 +1272,7 @@
}
newStream->setStatusTracker(mStatusTracker);
- /**
- * Camera3 Buffer manager is only supported by HAL3.3 onwards, as the older HALs ( < HAL3.2)
- * requires buffers to be statically allocated for internal static buffer registration, while
- * the buffers provided by buffer manager are really dynamically allocated. For HAL3.2, because
- * not all HAL implementation supports dynamic buffer registeration, exlude it as well.
- */
- if (mDeviceVersion > CAMERA_DEVICE_API_VERSION_3_2) {
- newStream->setBufferManager(mBufferManager);
- }
+ newStream->setBufferManager(mBufferManager);
res = mOutputStreams.add(mNextStreamId, newStream);
if (res < 0) {
@@ -1659,15 +1490,6 @@
set_camera_metadata_vendor_id(rawRequest, mVendorTagId);
mRequestTemplateCache[templateId].acquire(rawRequest);
- // Derive some new keys for backward compatibility
- if (mDerivePostRawSensKey && !mRequestTemplateCache[templateId].exists(
- ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST)) {
- int32_t defaultBoost[1] = {100};
- mRequestTemplateCache[templateId].update(
- ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
- defaultBoost, 1);
- }
-
*request = mRequestTemplateCache[templateId];
return OK;
}
@@ -1919,15 +1741,7 @@
mRequestThread->clear(/*out*/frameNumber);
}
- status_t res;
- if (mDeviceVersion >= CAMERA_DEVICE_API_VERSION_3_1) {
- res = mRequestThread->flush();
- } else {
- Mutex::Autolock l(mLock);
- res = waitUntilDrainedLocked();
- }
-
- return res;
+ return mRequestThread->flush();
}
status_t Camera3Device::prepare(int streamId) {
@@ -1968,14 +1782,6 @@
Mutex::Autolock il(mInterfaceLock);
Mutex::Autolock l(mLock);
- // Teardown can only be accomplished on devices that don't require register_stream_buffers,
- // since we cannot call register_stream_buffers except right after configure_streams.
- if (mDeviceVersion < CAMERA_DEVICE_API_VERSION_3_2) {
- ALOGE("%s: Unable to tear down streams on device HAL v%x",
- __FUNCTION__, mDeviceVersion);
- return NO_INIT;
- }
-
sp<Camera3StreamInterface> stream;
ssize_t outputStreamIdx = mOutputStreams.indexOfKey(streamId);
if (outputStreamIdx == NAME_NOT_FOUND) {
@@ -2013,12 +1819,6 @@
return OK;
}
-uint32_t Camera3Device::getDeviceVersion() {
- ATRACE_CALL();
- Mutex::Autolock il(mInterfaceLock);
- return mDeviceVersion;
-}
-
/**
* Methods called by subclasses
*/
@@ -2511,14 +2311,13 @@
status_t Camera3Device::registerInFlight(uint32_t frameNumber,
int32_t numBuffers, CaptureResultExtras resultExtras, bool hasInput,
- const AeTriggerCancelOverride_t &aeTriggerCancelOverride,
bool hasAppCallback) {
ATRACE_CALL();
Mutex::Autolock l(mInFlightLock);
ssize_t res;
res = mInFlightMap.add(frameNumber, InFlightRequest(numBuffers, resultExtras, hasInput,
- aeTriggerCancelOverride, hasAppCallback));
+ hasAppCallback));
if (res < 0) return res;
if (mInFlightMap.size() == 1) {
@@ -2603,8 +2402,8 @@
}
}
-void Camera3Device::insertResultLocked(CaptureResult *result, uint32_t frameNumber,
- const AeTriggerCancelOverride_t &aeTriggerCancelOverride) {
+void Camera3Device::insertResultLocked(CaptureResult *result,
+ uint32_t frameNumber) {
if (result == nullptr) return;
camera_metadata_t *meta = const_cast<camera_metadata_t *>(
@@ -2623,8 +2422,6 @@
return;
}
- overrideResultForPrecaptureCancel(&result->mMetadata, aeTriggerCancelOverride);
-
// Valid result, insert into queue
List<CaptureResult>::iterator queuedResult =
mResultQueue.insert(mResultQueue.end(), CaptureResult(*result));
@@ -2639,15 +2436,14 @@
void Camera3Device::sendPartialCaptureResult(const camera_metadata_t * partialResult,
- const CaptureResultExtras &resultExtras, uint32_t frameNumber,
- const AeTriggerCancelOverride_t &aeTriggerCancelOverride) {
+ const CaptureResultExtras &resultExtras, uint32_t frameNumber) {
Mutex::Autolock l(mOutputLock);
CaptureResult captureResult;
captureResult.mResultExtras = resultExtras;
captureResult.mMetadata = partialResult;
- insertResultLocked(&captureResult, frameNumber, aeTriggerCancelOverride);
+ insertResultLocked(&captureResult, frameNumber);
}
@@ -2655,8 +2451,7 @@
CaptureResultExtras &resultExtras,
CameraMetadata &collectedPartialResult,
uint32_t frameNumber,
- bool reprocess,
- const AeTriggerCancelOverride_t &aeTriggerCancelOverride) {
+ bool reprocess) {
if (pendingMetadata.isEmpty())
return;
@@ -2690,15 +2485,6 @@
captureResult.mMetadata.append(collectedPartialResult);
}
- // Derive some new keys for backward compaibility
- if (mDerivePostRawSensKey && !captureResult.mMetadata.exists(
- ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST)) {
- int32_t defaultBoost[1] = {100};
- captureResult.mMetadata.update(
- ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
- defaultBoost, 1);
- }
-
captureResult.mMetadata.sort();
// Check that there's a timestamp in the result metadata
@@ -2712,7 +2498,7 @@
mTagMonitor.monitorMetadata(TagMonitor::RESULT,
frameNumber, timestamp.data.i64[0], captureResult.mMetadata);
- insertResultLocked(&captureResult, frameNumber, aeTriggerCancelOverride);
+ insertResultLocked(&captureResult, frameNumber);
}
/**
@@ -2732,10 +2518,7 @@
return;
}
- // For HAL3.2 or above, If HAL doesn't support partial, it must always set
- // partial_result to 1 when metadata is included in this result.
if (!mUsePartialResult &&
- mDeviceVersion >= CAMERA_DEVICE_API_VERSION_3_2 &&
result->result != NULL &&
result->partial_result != 1) {
SET_ERR("Result is malformed for frame %d: partial_result %u must be 1"
@@ -2780,39 +2563,21 @@
// Check if this result carries only partial metadata
if (mUsePartialResult && result->result != NULL) {
- if (mDeviceVersion >= CAMERA_DEVICE_API_VERSION_3_2) {
- if (result->partial_result > mNumPartialResults || result->partial_result < 1) {
- SET_ERR("Result is malformed for frame %d: partial_result %u must be in"
- " the range of [1, %d] when metadata is included in the result",
- frameNumber, result->partial_result, mNumPartialResults);
- return;
- }
- isPartialResult = (result->partial_result < mNumPartialResults);
- if (isPartialResult) {
- request.collectedPartialResult.append(result->result);
- }
- } else {
- camera_metadata_ro_entry_t partialResultEntry;
- res = find_camera_metadata_ro_entry(result->result,
- ANDROID_QUIRKS_PARTIAL_RESULT, &partialResultEntry);
- if (res != NAME_NOT_FOUND &&
- partialResultEntry.count > 0 &&
- partialResultEntry.data.u8[0] ==
- ANDROID_QUIRKS_PARTIAL_RESULT_PARTIAL) {
- // A partial result. Flag this as such, and collect this
- // set of metadata into the in-flight entry.
- isPartialResult = true;
- request.collectedPartialResult.append(
- result->result);
- request.collectedPartialResult.erase(
- ANDROID_QUIRKS_PARTIAL_RESULT);
- }
+ if (result->partial_result > mNumPartialResults || result->partial_result < 1) {
+ SET_ERR("Result is malformed for frame %d: partial_result %u must be in"
+ " the range of [1, %d] when metadata is included in the result",
+ frameNumber, result->partial_result, mNumPartialResults);
+ return;
+ }
+ isPartialResult = (result->partial_result < mNumPartialResults);
+ if (isPartialResult) {
+ request.collectedPartialResult.append(result->result);
}
if (isPartialResult && request.hasCallback) {
// Send partial capture result
- sendPartialCaptureResult(result->result, request.resultExtras, frameNumber,
- request.aeTriggerCancelOverride);
+ sendPartialCaptureResult(result->result, request.resultExtras,
+ frameNumber);
}
}
@@ -2877,8 +2642,8 @@
CameraMetadata metadata;
metadata = result->result;
sendCaptureResult(metadata, request.resultExtras,
- collectedPartialResult, frameNumber, hasInputBufferInRequest,
- request.aeTriggerCancelOverride);
+ collectedPartialResult, frameNumber,
+ hasInputBufferInRequest);
}
}
@@ -3058,7 +2823,7 @@
// send pending result and buffers
sendCaptureResult(r.pendingMetadata, r.resultExtras,
r.collectedPartialResult, msg.frame_number,
- r.hasInputBuffer, r.aeTriggerCancelOverride);
+ r.hasInputBuffer);
}
returnOutputBuffers(r.pendingOutputBuffers.array(),
r.pendingOutputBuffers.size(), r.shutterTimestamp);
@@ -3096,9 +2861,6 @@
* HalInterface inner class methods
*/
-Camera3Device::HalInterface::HalInterface(camera3_device_t *device) :
- mHal3Device(device) {}
-
Camera3Device::HalInterface::HalInterface(
sp<ICameraDeviceSession> &session,
std::shared_ptr<RequestMetadataQueue> queue) :
@@ -3171,7 +2933,7 @@
// Unknown template ID
return BAD_VALUE;
}
- mHidlSession->constructDefaultRequestSettings(id,
+ auto err = mHidlSession->constructDefaultRequestSettings(id,
[&status, &requestTemplate]
(common::V1_0::Status s, const device::V3_2::CameraMetadata& request) {
status = s;
@@ -3193,7 +2955,12 @@
}
}
});
- res = CameraProviderManager::mapToStatusT(status);
+ if (!err.isOk()) {
+ ALOGE("%s: Transaction error: %s", __FUNCTION__, err.description().c_str());
+ res = DEAD_OBJECT;
+ } else {
+ res = CameraProviderManager::mapToStatusT(status);
+ }
}
return res;
}
@@ -3267,12 +3034,17 @@
HalStreamConfiguration finalConfiguration;
common::V1_0::Status status;
- mHidlSession->configureStreams(requestedConfiguration,
+ auto err = mHidlSession->configureStreams(requestedConfiguration,
[&status, &finalConfiguration]
(common::V1_0::Status s, const HalStreamConfiguration& halConfiguration) {
finalConfiguration = halConfiguration;
status = s;
});
+ if (!err.isOk()) {
+ ALOGE("%s: Transaction error: %s", __FUNCTION__, err.description().c_str());
+ return DEAD_OBJECT;
+ }
+
if (status != common::V1_0::Status::OK ) {
return CameraProviderManager::mapToStatusT(status);
}
@@ -3458,12 +3230,15 @@
captureRequest->fmqSettingsSize = 0u;
}
}
- mHidlSession->processCaptureRequest(captureRequests, cachesToRemove,
+ auto err = mHidlSession->processCaptureRequest(captureRequests, cachesToRemove,
[&status, &numRequestProcessed] (auto s, uint32_t n) {
status = s;
*numRequestProcessed = n;
});
-
+ if (!err.isOk()) {
+ ALOGE("%s: Transaction error: %s", __FUNCTION__, err.description().c_str());
+ return DEAD_OBJECT;
+ }
if (status == common::V1_0::Status::OK && *numRequestProcessed != batchSize) {
ALOGE("%s: processCaptureRequest returns OK but processed %d/%zu requests",
__FUNCTION__, *numRequestProcessed, batchSize);
@@ -3501,7 +3276,13 @@
if (mHal3Device != nullptr) {
res = mHal3Device->ops->flush(mHal3Device);
} else {
- res = CameraProviderManager::mapToStatusT(mHidlSession->flush());
+ auto err = mHidlSession->flush();
+ if (!err.isOk()) {
+ ALOGE("%s: Transaction error: %s", __FUNCTION__, err.description().c_str());
+ res = DEAD_OBJECT;
+ } else {
+ res = CameraProviderManager::mapToStatusT(err);
+ }
}
return res;
}
@@ -3527,7 +3308,11 @@
if (mHal3Device != nullptr) {
mHal3Device->common.close(&mHal3Device->common);
} else {
- mHidlSession->close();
+ auto err = mHidlSession->close();
+ // Interface will be dead shortly anyway, so don't log errors
+ if (!err.isOk()) {
+ res = DEAD_OBJECT;
+ }
}
return res;
}
@@ -3606,14 +3391,11 @@
Camera3Device::RequestThread::RequestThread(wp<Camera3Device> parent,
sp<StatusTracker> statusTracker,
- HalInterface* interface,
- uint32_t deviceVersion,
- bool aeLockAvailable) :
+ HalInterface* interface) :
Thread(/*canCallJava*/false),
mParent(parent),
mStatusTracker(statusTracker),
mInterface(interface),
- mDeviceVersion(deviceVersion),
mListener(nullptr),
mId(getId(parent)),
mReconfigured(false),
@@ -3625,7 +3407,6 @@
mCurrentPreCaptureTriggerId(0),
mRepeatingLastFrameNumber(
hardware::camera2::ICameraDeviceUser::NO_IN_FLIGHT_REPEATING_FRAMES),
- mAeLockAvailable(aeLockAvailable),
mPrepareVideoStream(false) {
mStatusId = statusTracker->addComponent();
}
@@ -3820,11 +3601,7 @@
ATRACE_CALL();
Mutex::Autolock l(mFlushLock);
- if (mDeviceVersion >= CAMERA_DEVICE_API_VERSION_3_1) {
- return mInterface->flush();
- }
-
- return -ENOTSUP;
+ return mInterface->flush();
}
void Camera3Device::RequestThread::setPaused(bool paused) {
@@ -3857,65 +3634,6 @@
mRequestSignal.signal();
}
-
-/**
- * For devices <= CAMERA_DEVICE_API_VERSION_3_2, AE_PRECAPTURE_TRIGGER_CANCEL is not supported so
- * we need to override AE_PRECAPTURE_TRIGGER_CANCEL to AE_PRECAPTURE_TRIGGER_IDLE and AE_LOCK_OFF
- * to AE_LOCK_ON to start cancelling AE precapture. If AE lock is not available, it still overrides
- * AE_PRECAPTURE_TRIGGER_CANCEL to AE_PRECAPTURE_TRIGGER_IDLE but doesn't add AE_LOCK_ON to the
- * request.
- */
-void Camera3Device::RequestThread::handleAePrecaptureCancelRequest(const sp<CaptureRequest>& request) {
- request->mAeTriggerCancelOverride.applyAeLock = false;
- request->mAeTriggerCancelOverride.applyAePrecaptureTrigger = false;
-
- if (mDeviceVersion > CAMERA_DEVICE_API_VERSION_3_2) {
- return;
- }
-
- camera_metadata_entry_t aePrecaptureTrigger =
- request->mSettings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER);
- if (aePrecaptureTrigger.count > 0 &&
- aePrecaptureTrigger.data.u8[0] == ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_CANCEL) {
- // Always override CANCEL to IDLE
- uint8_t aePrecaptureTrigger = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
- request->mSettings.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &aePrecaptureTrigger, 1);
- request->mAeTriggerCancelOverride.applyAePrecaptureTrigger = true;
- request->mAeTriggerCancelOverride.aePrecaptureTrigger =
- ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_CANCEL;
-
- if (mAeLockAvailable == true) {
- camera_metadata_entry_t aeLock = request->mSettings.find(ANDROID_CONTROL_AE_LOCK);
- if (aeLock.count == 0 || aeLock.data.u8[0] == ANDROID_CONTROL_AE_LOCK_OFF) {
- uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_ON;
- request->mSettings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
- request->mAeTriggerCancelOverride.applyAeLock = true;
- request->mAeTriggerCancelOverride.aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
- }
- }
- }
-}
-
-/**
- * Override result metadata for cancelling AE precapture trigger applied in
- * handleAePrecaptureCancelRequest().
- */
-void Camera3Device::overrideResultForPrecaptureCancel(
- CameraMetadata *result, const AeTriggerCancelOverride_t &aeTriggerCancelOverride) {
- if (aeTriggerCancelOverride.applyAeLock) {
- // Only devices <= v3.2 should have this override
- assert(mDeviceVersion <= CAMERA_DEVICE_API_VERSION_3_2);
- result->update(ANDROID_CONTROL_AE_LOCK, &aeTriggerCancelOverride.aeLock, 1);
- }
-
- if (aeTriggerCancelOverride.applyAePrecaptureTrigger) {
- // Only devices <= v3.2 should have this override
- assert(mDeviceVersion <= CAMERA_DEVICE_API_VERSION_3_2);
- result->update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
- &aeTriggerCancelOverride.aePrecaptureTrigger, 1);
- }
-}
-
void Camera3Device::RequestThread::checkAndStopRepeatingRequest() {
bool surfaceAbandoned = false;
int64_t lastFrameNumber = 0;
@@ -4283,7 +4001,6 @@
res = parent->registerInFlight(halRequest->frame_number,
totalNumBuffers, captureRequest->mResultExtras,
/*hasInput*/halRequest->input_buffer != NULL,
- captureRequest->mAeTriggerCancelOverride,
hasCallback);
ALOGVV("%s: registered in flight requestId = %" PRId32 ", frameNumber = %" PRId64
", burstId = %" PRId32 ".",
@@ -4542,8 +4259,6 @@
}
}
- handleAePrecaptureCancelRequest(nextRequest);
-
return nextRequest;
}
@@ -4629,9 +4344,7 @@
request->mResultExtras.afTriggerId = triggerId;
mCurrentAfTriggerId = triggerId;
}
- if (parent->mDeviceVersion >= CAMERA_DEVICE_API_VERSION_3_2) {
- continue; // Trigger ID tag is deprecated since device HAL 3.2
- }
+ continue;
}
camera_metadata_entry entry = metadata.find(tag);
diff --git a/services/camera/libcameraservice/device3/Camera3Device.h b/services/camera/libcameraservice/device3/Camera3Device.h
index 8b76a97..1ca6811 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.h
+++ b/services/camera/libcameraservice/device3/Camera3Device.h
@@ -87,7 +87,6 @@
const String8& getId() const override;
// Transitions to idle state on success.
- status_t initialize(CameraModule *module) override;
status_t initialize(sp<CameraProviderManager> manager) override;
status_t disconnect() override;
status_t dump(int fd, const Vector<String16> &args) override;
@@ -166,8 +165,6 @@
status_t prepare(int maxCount, int streamId) override;
- uint32_t getDeviceVersion() override;
-
ssize_t getJpegBufferSize(uint32_t width, uint32_t height) const override;
ssize_t getPointCloudBufferSize() const;
ssize_t getRawOpaqueBufferSize(int32_t width, int32_t height) const;
@@ -235,7 +232,6 @@
*/
class HalInterface : public camera3::Camera3StreamBufferFreedListener {
public:
- HalInterface(camera3_device_t *device);
HalInterface(sp<hardware::camera::device::V3_2::ICameraDeviceSession> &session,
std::shared_ptr<RequestMetadataQueue> queue);
HalInterface(const HalInterface &other);
@@ -342,12 +338,6 @@
CameraMetadata mRequestTemplateCache[CAMERA3_TEMPLATE_COUNT];
- uint32_t mDeviceVersion;
-
- // whether Camera3Device should derive ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST for
- // backward compatibility. Should not be changed after initialization.
- bool mDerivePostRawSensKey = false;
-
struct Size {
uint32_t width;
uint32_t height;
@@ -405,13 +395,6 @@
// words, camera device shouldn't be open during CPU suspend.
nsecs_t mTimestampOffset;
- typedef struct AeTriggerCancelOverride {
- bool applyAeLock;
- uint8_t aeLock;
- bool applyAePrecaptureTrigger;
- uint8_t aePrecaptureTrigger;
- } AeTriggerCancelOverride_t;
-
class CaptureRequest : public LightRefBase<CaptureRequest> {
public:
CameraMetadata mSettings;
@@ -421,9 +404,6 @@
mOutputStreams;
SurfaceMap mOutputSurfaces;
CaptureResultExtras mResultExtras;
- // Used to cancel AE precapture trigger for devices doesn't support
- // CONTROL_AE_PRECAPTURE_TRIGGER_CANCEL
- AeTriggerCancelOverride_t mAeTriggerCancelOverride;
// The number of requests that should be submitted to HAL at a time.
// For example, if batch size is 8, this request and the following 7
// requests will be submitted to HAL at a time. The batch size for
@@ -599,11 +579,6 @@
static nsecs_t getMonoToBoottimeOffset();
/**
- * Helper function to map between legacy and new dataspace enums
- */
- static android_dataspace mapToLegacyDataspace(android_dataspace dataSpace);
-
- /**
* Helper functions to map between framework and HIDL values
*/
static hardware::graphics::common::V1_0::PixelFormat mapToPixelFormat(int frameworkFormat);
@@ -648,9 +623,7 @@
RequestThread(wp<Camera3Device> parent,
sp<camera3::StatusTracker> statusTracker,
- HalInterface* interface,
- uint32_t deviceVersion,
- bool aeLockAvailable);
+ HalInterface* interface);
~RequestThread();
void setNotificationListener(wp<NotificationListener> listener);
@@ -784,9 +757,6 @@
// If the input request is in mRepeatingRequests. Must be called with mRequestLock hold
bool isRepeatingRequestLocked(const sp<CaptureRequest>&);
- // Handle AE precapture trigger cancel for devices <= CAMERA_DEVICE_API_VERSION_3_2.
- void handleAePrecaptureCancelRequest(const sp<CaptureRequest>& request);
-
// Clear repeating requests. Must be called with mRequestLock held.
status_t clearRepeatingRequestsLocked(/*out*/ int64_t *lastFrameNumber = NULL);
@@ -799,7 +769,6 @@
wp<Camera3Device> mParent;
wp<camera3::StatusTracker> mStatusTracker;
HalInterface* mInterface;
- uint32_t mDeviceVersion;
wp<NotificationListener> mListener;
@@ -849,9 +818,6 @@
int64_t mRepeatingLastFrameNumber;
- // Whether the device supports AE lock
- bool mAeLockAvailable;
-
// Flag indicating if we should prepare video stream for video requests.
bool mPrepareVideoStream;
};
@@ -892,10 +858,6 @@
// the shutter event.
Vector<camera3_stream_buffer_t> pendingOutputBuffers;
- // Used to cancel AE precapture trigger for devices doesn't support
- // CONTROL_AE_PRECAPTURE_TRIGGER_CANCEL
- AeTriggerCancelOverride_t aeTriggerCancelOverride;
-
// Whether this inflight request's shutter and result callback are to be
// called. The policy is that if the request is the last one in the constrained
// high speed recording request list, this flag will be true. If the request list
@@ -910,12 +872,11 @@
haveResultMetadata(false),
numBuffersLeft(0),
hasInputBuffer(false),
- aeTriggerCancelOverride({false, 0, false, 0}),
hasCallback(true) {
}
InFlightRequest(int numBuffers, CaptureResultExtras extras, bool hasInput,
- AeTriggerCancelOverride aeTriggerCancelOverride, bool hasAppCallback) :
+ bool hasAppCallback) :
shutterTimestamp(0),
sensorTimestamp(0),
requestStatus(OK),
@@ -923,7 +884,6 @@
numBuffersLeft(numBuffers),
resultExtras(extras),
hasInputBuffer(hasInput),
- aeTriggerCancelOverride(aeTriggerCancelOverride),
hasCallback(hasAppCallback) {
}
};
@@ -937,14 +897,7 @@
status_t registerInFlight(uint32_t frameNumber,
int32_t numBuffers, CaptureResultExtras resultExtras, bool hasInput,
- const AeTriggerCancelOverride_t &aeTriggerCancelOverride, bool callback);
-
- /**
- * Override result metadata for cancelling AE precapture trigger applied in
- * handleAePrecaptureCancelRequest().
- */
- void overrideResultForPrecaptureCancel(CameraMetadata* result,
- const AeTriggerCancelOverride_t &aeTriggerCancelOverride);
+ bool callback);
/**
* Tracking for idle detection
@@ -1039,21 +992,19 @@
// Send a partial capture result.
void sendPartialCaptureResult(const camera_metadata_t * partialResult,
- const CaptureResultExtras &resultExtras, uint32_t frameNumber,
- const AeTriggerCancelOverride_t &aeTriggerCancelOverride);
+ const CaptureResultExtras &resultExtras, uint32_t frameNumber);
// Send a total capture result given the pending metadata and result extras,
// partial results, and the frame number to the result queue.
void sendCaptureResult(CameraMetadata &pendingMetadata,
CaptureResultExtras &resultExtras,
CameraMetadata &collectedPartialResult, uint32_t frameNumber,
- bool reprocess, const AeTriggerCancelOverride_t &aeTriggerCancelOverride);
+ bool reprocess);
// Insert the result to the result queue after updating frame number and overriding AE
// trigger cancel.
// mOutputLock must be held when calling this function.
- void insertResultLocked(CaptureResult *result, uint32_t frameNumber,
- const AeTriggerCancelOverride_t &aeTriggerCancelOverride);
+ void insertResultLocked(CaptureResult *result, uint32_t frameNumber);
/**** Scope for mInFlightLock ****/
diff --git a/services/camera/libcameraservice/device3/Camera3Stream.cpp b/services/camera/libcameraservice/device3/Camera3Stream.cpp
index 2b1a899..b45ef77 100644
--- a/services/camera/libcameraservice/device3/Camera3Stream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Stream.cpp
@@ -663,89 +663,6 @@
}
}
-status_t Camera3Stream::registerBuffersLocked(camera3_device *hal3Device) {
- ATRACE_CALL();
-
- /**
- * >= CAMERA_DEVICE_API_VERSION_3_2:
- *
- * camera3_device_t->ops->register_stream_buffers() is not called and must
- * be NULL.
- */
- if (hal3Device->common.version >= CAMERA_DEVICE_API_VERSION_3_2) {
- ALOGV("%s: register_stream_buffers unused as of HAL3.2", __FUNCTION__);
-
- if (hal3Device->ops->register_stream_buffers != NULL) {
- ALOGE("%s: register_stream_buffers is deprecated in HAL3.2; "
- "must be set to NULL in camera3_device::ops", __FUNCTION__);
- return INVALID_OPERATION;
- }
-
- return OK;
- }
-
- ALOGV("%s: register_stream_buffers using deprecated code path", __FUNCTION__);
-
- status_t res;
-
- size_t bufferCount = getBufferCountLocked();
-
- Vector<buffer_handle_t*> buffers;
- buffers.insertAt(/*prototype_item*/NULL, /*index*/0, bufferCount);
-
- camera3_stream_buffer_set bufferSet = camera3_stream_buffer_set();
- bufferSet.stream = this;
- bufferSet.num_buffers = bufferCount;
- bufferSet.buffers = buffers.editArray();
-
- Vector<camera3_stream_buffer_t> streamBuffers;
- streamBuffers.insertAt(camera3_stream_buffer_t(), /*index*/0, bufferCount);
-
- // Register all buffers with the HAL. This means getting all the buffers
- // from the stream, providing them to the HAL with the
- // register_stream_buffers() method, and then returning them back to the
- // stream in the error state, since they won't have valid data.
- //
- // Only registered buffers can be sent to the HAL.
-
- uint32_t bufferIdx = 0;
- for (; bufferIdx < bufferCount; bufferIdx++) {
- res = getBufferLocked( &streamBuffers.editItemAt(bufferIdx) );
- if (res != OK) {
- ALOGE("%s: Unable to get buffer %d for registration with HAL",
- __FUNCTION__, bufferIdx);
- // Skip registering, go straight to cleanup
- break;
- }
-
- sp<Fence> fence = new Fence(streamBuffers[bufferIdx].acquire_fence);
- fence->waitForever("Camera3Stream::registerBuffers");
-
- buffers.editItemAt(bufferIdx) = streamBuffers[bufferIdx].buffer;
- }
- if (bufferIdx == bufferCount) {
- // Got all buffers, register with HAL
- ALOGV("%s: Registering %zu buffers with camera HAL",
- __FUNCTION__, bufferCount);
- ATRACE_BEGIN("camera3->register_stream_buffers");
- res = hal3Device->ops->register_stream_buffers(hal3Device,
- &bufferSet);
- ATRACE_END();
- }
-
- // Return all valid buffers to stream, in ERROR state to indicate
- // they weren't filled.
- for (size_t i = 0; i < bufferIdx; i++) {
- streamBuffers.editItemAt(i).release_fence = -1;
- streamBuffers.editItemAt(i).status = CAMERA3_BUFFER_STATUS_ERROR;
- returnBufferLocked(streamBuffers[i], 0);
- }
-
- mPrepared = true;
-
- return res;
-}
-
status_t Camera3Stream::getBufferLocked(camera3_stream_buffer *,
const std::vector<size_t>&) {
ALOGE("%s: This type of stream does not support output", __FUNCTION__);
diff --git a/services/camera/libcameraservice/device3/Camera3Stream.h b/services/camera/libcameraservice/device3/Camera3Stream.h
index 27ef86d..9cdc1b3 100644
--- a/services/camera/libcameraservice/device3/Camera3Stream.h
+++ b/services/camera/libcameraservice/device3/Camera3Stream.h
@@ -474,9 +474,6 @@
Condition mInputBufferReturnedSignal;
static const nsecs_t kWaitForBufferDuration = 3000000000LL; // 3000 ms
- // Gets all buffers from endpoint and registers them with the HAL.
- status_t registerBuffersLocked(camera3_device *hal3Device);
-
void fireBufferListenersLocked(const camera3_stream_buffer& buffer,
bool acquired, bool output);
List<wp<Camera3StreamBufferListener> > mBufferListenerList;
diff --git a/services/mediadrm/Android.mk b/services/mediadrm/Android.mk
index 1d5fa07..fa3a02b 100644
--- a/services/mediadrm/Android.mk
+++ b/services/mediadrm/Android.mk
@@ -43,8 +43,9 @@
# TODO: Some legacy DRM plugins only support 32-bit. They need to be migrated to
# 64-bit. (b/18948909) Once all of a device's legacy DRM plugins support 64-bit,
-# that device can turn on ENABLE_MEDIADRM_64 to build this service as 64-bit.
-ifneq ($(ENABLE_MEDIADRM_64), true)
+# that device can turn on TARGET_ENABLE_MEDIADRM_64 to build this service as
+# 64-bit.
+ifneq ($(TARGET_ENABLE_MEDIADRM_64), true)
LOCAL_32_BIT_ONLY := true
endif
diff --git a/services/oboeservice/AAudioServiceEndpoint.cpp b/services/oboeservice/AAudioServiceEndpoint.cpp
index b197798..d3e182a 100644
--- a/services/oboeservice/AAudioServiceEndpoint.cpp
+++ b/services/oboeservice/AAudioServiceEndpoint.cpp
@@ -46,6 +46,7 @@
// Use 2 for "double buffered"
#define BUFFER_SIZE_IN_BURSTS 2
+#define BURSTS_PER_MIX_LOOP 1
// The mStreamInternal will use a service interface that does not go through Binder.
AAudioServiceEndpoint::AAudioServiceEndpoint(AAudioService &audioService)
diff --git a/services/oboeservice/AAudioServiceStreamBase.cpp b/services/oboeservice/AAudioServiceStreamBase.cpp
index d8882c9..8248f8b 100644
--- a/services/oboeservice/AAudioServiceStreamBase.cpp
+++ b/services/oboeservice/AAudioServiceStreamBase.cpp
@@ -59,6 +59,7 @@
std::lock_guard<std::mutex> lock(mLockUpMessageQueue);
delete mUpMessageQueue;
mUpMessageQueue = nullptr;
+
return AAUDIO_OK;
}
diff --git a/services/oboeservice/AAudioServiceStreamBase.h b/services/oboeservice/AAudioServiceStreamBase.h
index d6b6ee3..9318c2e 100644
--- a/services/oboeservice/AAudioServiceStreamBase.h
+++ b/services/oboeservice/AAudioServiceStreamBase.h
@@ -133,9 +133,6 @@
// This is used by one thread to tell another thread to exit. So it must be atomic.
std::atomic<bool> mThreadEnabled;
-
- int mAudioDataFileDescriptor = -1;
-
aaudio_audio_format_t mAudioFormat = AAUDIO_FORMAT_UNSPECIFIED;
int32_t mFramesPerBurst = 0;
int32_t mSamplesPerFrame = AAUDIO_UNSPECIFIED;
diff --git a/services/oboeservice/AAudioServiceStreamMMAP.cpp b/services/oboeservice/AAudioServiceStreamMMAP.cpp
index b2e7fc9..cadc2a4 100644
--- a/services/oboeservice/AAudioServiceStreamMMAP.cpp
+++ b/services/oboeservice/AAudioServiceStreamMMAP.cpp
@@ -60,6 +60,13 @@
// FIXME Make closing synchronous.
AudioClock::sleepForNanos(100 * AAUDIO_NANOS_PER_MILLISECOND);
+ if (mAudioDataFileDescriptor != -1) {
+ ALOGV("AAudioServiceStreamMMAP: LEAK? close(mAudioDataFileDescriptor = %d)\n",
+ mAudioDataFileDescriptor);
+ ::close(mAudioDataFileDescriptor);
+ mAudioDataFileDescriptor = -1;
+ }
+
return AAudioServiceStreamBase::close();
}
@@ -125,6 +132,9 @@
MmapStreamInterface::stream_direction_t streamDirection = (direction == AAUDIO_DIRECTION_OUTPUT)
? MmapStreamInterface::DIRECTION_OUTPUT : MmapStreamInterface::DIRECTION_INPUT;
+ ALOGD("AAudioServiceStreamMMAP::open() request devId = %d, sRate = %d",
+ deviceId, config.sample_rate);
+
// Open HAL stream.
status_t status = MmapStreamInterface::openMmapStream(streamDirection,
&attributes,
@@ -161,11 +171,16 @@
: audio_channel_count_from_in_mask(config.channel_mask);
mAudioDataFileDescriptor = mMmapBufferinfo.shared_memory_fd;
+ ALOGV("AAudioServiceStreamMMAP::open LEAK? mAudioDataFileDescriptor = %d\n",
+ mAudioDataFileDescriptor);
mFramesPerBurst = mMmapBufferinfo.burst_size_frames;
mCapacityInFrames = mMmapBufferinfo.buffer_size_frames;
mAudioFormat = AAudioConvert_androidToAAudioDataFormat(config.format);
mSampleRate = config.sample_rate;
+ ALOGD("AAudioServiceStreamMMAP::open() got devId = %d, sRate = %d",
+ deviceId, config.sample_rate);
+
// Fill in AAudioStreamConfiguration
configurationOutput.setSampleRate(mSampleRate);
configurationOutput.setSamplesPerFrame(mSamplesPerFrame);
diff --git a/services/oboeservice/AAudioServiceStreamMMAP.h b/services/oboeservice/AAudioServiceStreamMMAP.h
index a8e63a6..fe75a10 100644
--- a/services/oboeservice/AAudioServiceStreamMMAP.h
+++ b/services/oboeservice/AAudioServiceStreamMMAP.h
@@ -127,6 +127,7 @@
MonotonicCounter mFramesWritten;
MonotonicCounter mFramesRead;
int32_t mPreviousFrameCounter = 0; // from HAL
+ int mAudioDataFileDescriptor = -1;
// Interface to the AudioFlinger MMAP support.
android::sp<android::MmapStreamInterface> mMmapStream;
diff --git a/services/oboeservice/AAudioServiceStreamShared.cpp b/services/oboeservice/AAudioServiceStreamShared.cpp
index b5d9927..713d1f8 100644
--- a/services/oboeservice/AAudioServiceStreamShared.cpp
+++ b/services/oboeservice/AAudioServiceStreamShared.cpp
@@ -200,6 +200,10 @@
mEndpointManager.closeEndpoint(endpoint);
mServiceEndpoint = nullptr;
}
+ if (mAudioDataQueue != nullptr) {
+ delete mAudioDataQueue;
+ mAudioDataQueue = nullptr;
+ }
return AAudioServiceStreamBase::close();
}
diff --git a/services/oboeservice/Android.mk b/services/oboeservice/Android.mk
index a9c80ae..afb477e 100644
--- a/services/oboeservice/Android.mk
+++ b/services/oboeservice/Android.mk
@@ -48,8 +48,7 @@
libcutils \
libmediautils \
libutils \
- liblog \
- libtinyalsa
+ liblog
include $(BUILD_SHARED_LIBRARY)
diff --git a/services/oboeservice/SharedRingBuffer.cpp b/services/oboeservice/SharedRingBuffer.cpp
index efcc9d6..6b3fb4c 100644
--- a/services/oboeservice/SharedRingBuffer.cpp
+++ b/services/oboeservice/SharedRingBuffer.cpp
@@ -18,6 +18,8 @@
//#define LOG_NDEBUG 0
#include <utils/Log.h>
+#include <sys/mman.h>
+
#include "binding/RingBufferParcelable.h"
#include "binding/AudioEndpointParcelable.h"
@@ -31,9 +33,13 @@
if (mSharedMemory != nullptr) {
delete mFifoBuffer;
munmap(mSharedMemory, mSharedMemorySizeInBytes);
- close(mFileDescriptor);
mSharedMemory = nullptr;
}
+ if (mFileDescriptor != -1) {
+ ALOGV("SharedRingBuffer: LEAK? close(mFileDescriptor = %d)\n", mFileDescriptor);
+ close(mFileDescriptor);
+ mFileDescriptor = -1;
+ }
}
aaudio_result_t SharedRingBuffer::allocate(fifo_frames_t bytesPerFrame,
@@ -44,10 +50,12 @@
mDataMemorySizeInBytes = bytesPerFrame * capacityInFrames;
mSharedMemorySizeInBytes = mDataMemorySizeInBytes + (2 * (sizeof(fifo_counter_t)));
mFileDescriptor = ashmem_create_region("AAudioSharedRingBuffer", mSharedMemorySizeInBytes);
+ ALOGV("SharedRingBuffer::allocate() LEAK? mFileDescriptor = %d\n", mFileDescriptor);
if (mFileDescriptor < 0) {
ALOGE("SharedRingBuffer::allocate() ashmem_create_region() failed %d", errno);
return AAUDIO_ERROR_INTERNAL;
}
+
int err = ashmem_set_prot_region(mFileDescriptor, PROT_READ|PROT_WRITE); // TODO error handling?
if (err < 0) {
ALOGE("SharedRingBuffer::allocate() ashmem_set_prot_region() failed %d", errno);
@@ -73,9 +81,9 @@
(fifo_counter_t *) &mSharedMemory[SHARED_RINGBUFFER_WRITE_OFFSET];
uint8_t *dataAddress = &mSharedMemory[SHARED_RINGBUFFER_DATA_OFFSET];
- mFifoBuffer = new(std::nothrow) FifoBuffer(bytesPerFrame, capacityInFrames,
+ mFifoBuffer = new FifoBuffer(bytesPerFrame, capacityInFrames,
readCounterAddress, writeCounterAddress, dataAddress);
- return (mFifoBuffer == nullptr) ? AAUDIO_ERROR_NO_MEMORY : AAUDIO_OK;
+ return AAUDIO_OK;
}
void SharedRingBuffer::fillParcelable(AudioEndpointParcelable &endpointParcelable,