libeffects: Pre-processing migration to new webrtc
All changes done are kept under the macro WEBRTC_LEGACY.
Bug: 132097139
Test: Tested using standalone application by listening to the outputs.
Change-Id: I17b4d144dbc490ca84753cc10bd001c281531e94
diff --git a/media/libeffects/preprocessing/Android.bp b/media/libeffects/preprocessing/Android.bp
index 16cd0ad..5217cf9 100644
--- a/media/libeffects/preprocessing/Android.bp
+++ b/media/libeffects/preprocessing/Android.bp
@@ -1,6 +1,6 @@
// audio preprocessing wrapper
cc_library_shared {
- name: "libaudiopreprocessing",
+ name: "libaudiopreprocessing_legacy",
vendor: true,
@@ -17,6 +17,7 @@
cflags: [
"-DWEBRTC_POSIX",
+ "-DWEBRTC_LEGACY",
"-fvisibility=hidden",
"-Wall",
"-Werror",
@@ -27,3 +28,34 @@
"libhardware_headers",
],
}
+
+cc_library_shared {
+ name: "libaudiopreprocessing",
+ vendor: true,
+ relative_install_path: "soundfx",
+ srcs: ["PreProcessing.cpp"],
+ local_include_dirs: [
+ ".",
+ ],
+ cflags: [
+ "-Wall",
+ "-Werror",
+ "-Wextra",
+ "-Wno-unused-parameter",
+ ],
+
+ shared_libs: [
+ "liblog",
+ "libutils",
+ ],
+
+ static_libs: [
+ "webrtc_audio_processing",
+ ],
+
+ header_libs: [
+ "libaudioeffects",
+ "libhardware_headers",
+ "libwebrtc_absl_headers",
+ ],
+}
diff --git a/media/libeffects/preprocessing/PreProcessing.cpp b/media/libeffects/preprocessing/PreProcessing.cpp
index c7afe2f..f2f74a5 100644
--- a/media/libeffects/preprocessing/PreProcessing.cpp
+++ b/media/libeffects/preprocessing/PreProcessing.cpp
@@ -23,10 +23,15 @@
#include <hardware/audio_effect.h>
#include <audio_effects/effect_aec.h>
#include <audio_effects/effect_agc.h>
+#ifndef WEBRTC_LEGACY
+#include <audio_effects/effect_agc2.h>
+#endif
#include <audio_effects/effect_ns.h>
#include <module_common_types.h>
#include <audio_processing.h>
+#ifdef WEBRTC_LEGACY
#include "speex/speex_resampler.h"
+#endif
// undefine to perform multi channels API functional tests
//#define DUAL_MIC_TEST
@@ -42,6 +47,9 @@
enum preproc_id
{
PREPROC_AGC, // Automatic Gain Control
+#ifndef WEBRTC_LEGACY
+ PREPROC_AGC2, // Automatic Gain Control 2
+#endif
PREPROC_AEC, // Acoustic Echo Canceler
PREPROC_NS, // Noise Suppressor
PREPROC_NUM_EFFECTS
@@ -103,6 +111,10 @@
int id; // audio session ID
int io; // handle of input stream this session is on
webrtc::AudioProcessing* apm; // handle on webRTC audio processing module (APM)
+#ifndef WEBRTC_LEGACY
+ // Audio Processing module builder
+ webrtc::AudioProcessingBuilder ap_builder;
+#endif
size_t apmFrameCount; // buffer size for webRTC process (10 ms)
uint32_t apmSamplingRate; // webRTC APM sampling rate (8/16 or 32 kHz)
size_t frameCount; // buffer size before input resampler ( <=> apmFrameCount)
@@ -113,25 +125,42 @@
uint32_t enabledMsk; // bit field containing IDs of enabled pre processors
uint32_t processedMsk; // bit field containing IDs of pre processors already
// processed in current round
+#ifdef WEBRTC_LEGACY
webrtc::AudioFrame *procFrame; // audio frame passed to webRTC AMP ProcessStream()
+#else
+ // audio config strucutre
+ webrtc::AudioProcessing::Config config;
+ webrtc::StreamConfig inputConfig; // input stream configuration
+ webrtc::StreamConfig outputConfig; // output stream configuration
+#endif
int16_t *inBuf; // input buffer used when resampling
size_t inBufSize; // input buffer size in frames
size_t framesIn; // number of frames in input buffer
+#ifdef WEBRTC_LEGACY
SpeexResamplerState *inResampler; // handle on input speex resampler
+#endif
int16_t *outBuf; // output buffer used when resampling
size_t outBufSize; // output buffer size in frames
size_t framesOut; // number of frames in output buffer
+#ifdef WEBRTC_LEGACY
SpeexResamplerState *outResampler; // handle on output speex resampler
+#endif
uint32_t revChannelCount; // number of channels on reverse stream
uint32_t revEnabledMsk; // bit field containing IDs of enabled pre processors
// with reverse channel
uint32_t revProcessedMsk; // bit field containing IDs of pre processors with reverse
// channel already processed in current round
+#ifdef WEBRTC_LEGACY
webrtc::AudioFrame *revFrame; // audio frame passed to webRTC AMP AnalyzeReverseStream()
+#else
+ webrtc::StreamConfig revConfig; // reverse stream configuration.
+#endif
int16_t *revBuf; // reverse channel input buffer
size_t revBufSize; // reverse channel input buffer size
size_t framesRev; // number of frames in reverse channel input buffer
+#ifdef WEBRTC_LEGACY
SpeexResamplerState *revResampler; // handle on reverse channel input speex resampler
+#endif
};
#ifdef DUAL_MIC_TEST
@@ -188,6 +217,20 @@
"The Android Open Source Project"
};
+#ifndef WEBRTC_LEGACY
+// Automatic Gain Control 2
+static const effect_descriptor_t sAgc2Descriptor = {
+ { 0xae3c653b, 0xbe18, 0x4ab8, 0x8938, { 0x41, 0x8f, 0x0a, 0x7f, 0x06, 0xac } }, // type
+ { 0x89f38e65, 0xd4d2, 0x4d64, 0xad0e, { 0x2b, 0x3e, 0x79, 0x9e, 0xa8, 0x86 } }, // uuid
+ EFFECT_CONTROL_API_VERSION,
+ (EFFECT_FLAG_TYPE_PRE_PROC|EFFECT_FLAG_DEVICE_IND),
+ 0, //FIXME indicate CPU load
+ 0, //FIXME indicate memory usage
+ "Automatic Gain Control 2",
+ "The Android Open Source Project"
+};
+#endif
+
// Acoustic Echo Cancellation
static const effect_descriptor_t sAecDescriptor = {
{ 0x7b491460, 0x8d4d, 0x11e0, 0xbd61, { 0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b } }, // type
@@ -215,6 +258,9 @@
static const effect_descriptor_t *sDescriptors[PREPROC_NUM_EFFECTS] = {
&sAgcDescriptor,
+#ifndef WEBRTC_LEGACY
+ &sAgc2Descriptor,
+#endif
&sAecDescriptor,
&sNsDescriptor
};
@@ -225,6 +271,9 @@
const effect_uuid_t * const sUuidToPreProcTable[PREPROC_NUM_EFFECTS] = {
FX_IID_AGC,
+#ifndef WEBRTC_LEGACY
+ FX_IID_AGC2,
+#endif
FX_IID_AEC,
FX_IID_NS
};
@@ -266,19 +315,50 @@
static const int kAgcDefaultCompGain = 9;
static const bool kAgcDefaultLimiter = true;
+#ifndef WEBRTC_LEGACY
+int Agc2Init (preproc_effect_t *effect)
+{
+ ALOGV("Agc2Init");
+ effect->session->config = effect->session->apm->GetConfig();
+ effect->session->config.gain_controller2.fixed_digital.gain_db = 0.f;
+ effect->session->config.gain_controller2.adaptive_digital.level_estimator =
+ effect->session->config.gain_controller2.kRms;
+ effect->session->config.gain_controller2.adaptive_digital.extra_saturation_margin_db = 2.f;
+ effect->session->apm->ApplyConfig(effect->session->config);
+ return 0;
+}
+#endif
+
int AgcInit (preproc_effect_t *effect)
{
ALOGV("AgcInit");
+#ifdef WEBRTC_LEGACY
webrtc::GainControl *agc = static_cast<webrtc::GainControl *>(effect->engine);
agc->set_mode(webrtc::GainControl::kFixedDigital);
agc->set_target_level_dbfs(kAgcDefaultTargetLevel);
agc->set_compression_gain_db(kAgcDefaultCompGain);
agc->enable_limiter(kAgcDefaultLimiter);
+#else
+ effect->session->config = effect->session->apm->GetConfig();
+ effect->session->config.gain_controller1.target_level_dbfs = kAgcDefaultTargetLevel;
+ effect->session->config.gain_controller1.compression_gain_db = kAgcDefaultCompGain;
+ effect->session->config.gain_controller1.enable_limiter = kAgcDefaultLimiter;
+ effect->session->apm->ApplyConfig(effect->session->config);
+#endif
return 0;
}
+#ifndef WEBRTC_LEGACY
+int Agc2Create(preproc_effect_t *effect)
+{
+ Agc2Init(effect);
+ return 0;
+}
+#endif
+
int AgcCreate(preproc_effect_t *effect)
{
+#ifdef WEBRTC_LEGACY
webrtc::GainControl *agc = effect->session->apm->gain_control();
ALOGV("AgcCreate got agc %p", agc);
if (agc == NULL) {
@@ -286,10 +366,93 @@
return -ENOMEM;
}
effect->engine = static_cast<preproc_fx_handle_t>(agc);
+#endif
AgcInit(effect);
return 0;
}
+#ifndef WEBRTC_LEGACY
+int Agc2GetParameter(preproc_effect_t *effect,
+ void *pParam,
+ uint32_t *pValueSize,
+ void *pValue)
+{
+ int status = 0;
+ uint32_t param = *(uint32_t *)pParam;
+ agc2_settings_t *pProperties = (agc2_settings_t *)pValue;
+
+ switch (param) {
+ case AGC2_PARAM_FIXED_DIGITAL_GAIN:
+ if (*pValueSize < sizeof(float)) {
+ *pValueSize = 0.f;
+ return -EINVAL;
+ }
+ break;
+ case AGC2_PARAM_ADAPT_DIGI_LEVEL_ESTIMATOR:
+ if (*pValueSize < sizeof(int32_t)) {
+ *pValueSize = 0;
+ return -EINVAL;
+ }
+ break;
+ case AGC2_PARAM_ADAPT_DIGI_EXTRA_SATURATION_MARGIN:
+ if (*pValueSize < sizeof(float)) {
+ *pValueSize = 0.f;
+ return -EINVAL;
+ }
+ break;
+ case AGC2_PARAM_PROPERTIES:
+ if (*pValueSize < sizeof(agc2_settings_t)) {
+ *pValueSize = 0;
+ return -EINVAL;
+ }
+ break;
+
+ default:
+ ALOGW("Agc2GetParameter() unknown param %08x", param);
+ status = -EINVAL;
+ break;
+ }
+
+ effect->session->config = effect->session->apm->GetConfig();
+ switch (param) {
+ case AGC2_PARAM_FIXED_DIGITAL_GAIN:
+ *(float *) pValue =
+ (float)(effect->session->config.gain_controller2.fixed_digital.gain_db);
+ ALOGV("Agc2GetParameter() target level %f dB", *(float *) pValue);
+ break;
+ case AGC2_PARAM_ADAPT_DIGI_LEVEL_ESTIMATOR:
+ *(uint32_t *) pValue =
+ (uint32_t)(effect->session->config.gain_controller2.adaptive_digital.
+ level_estimator);
+ ALOGV("Agc2GetParameter() level estimator %d",
+ *(webrtc::AudioProcessing::Config::GainController2::LevelEstimator *) pValue);
+ break;
+ case AGC2_PARAM_ADAPT_DIGI_EXTRA_SATURATION_MARGIN:
+ *(float *) pValue =
+ (float)(effect->session->config.gain_controller2.adaptive_digital.
+ extra_saturation_margin_db);
+ ALOGV("Agc2GetParameter() extra saturation margin %f dB", *(float *) pValue);
+ break;
+ case AGC2_PARAM_PROPERTIES:
+ pProperties->fixedDigitalGain =
+ (float)(effect->session->config.gain_controller2.fixed_digital.gain_db);
+ pProperties->level_estimator =
+ (uint32_t)(effect->session->config.gain_controller2.adaptive_digital.
+ level_estimator);
+ pProperties->extraSaturationMargin =
+ (float)(effect->session->config.gain_controller2.adaptive_digital.
+ extra_saturation_margin_db);
+ break;
+ default:
+ ALOGW("Agc2GetParameter() unknown param %d", param);
+ status = -EINVAL;
+ break;
+ }
+
+ return status;
+}
+#endif
+
int AgcGetParameter(preproc_effect_t *effect,
void *pParam,
uint32_t *pValueSize,
@@ -298,7 +461,9 @@
int status = 0;
uint32_t param = *(uint32_t *)pParam;
t_agc_settings *pProperties = (t_agc_settings *)pValue;
+#ifdef WEBRTC_LEGACY
webrtc::GainControl *agc = static_cast<webrtc::GainControl *>(effect->engine);
+#endif
switch (param) {
case AGC_PARAM_TARGET_LEVEL:
@@ -327,6 +492,7 @@
break;
}
+#ifdef WEBRTC_LEGACY
switch (param) {
case AGC_PARAM_TARGET_LEVEL:
*(int16_t *) pValue = (int16_t)(agc->target_level_dbfs() * -100);
@@ -351,12 +517,98 @@
status = -EINVAL;
break;
}
+#else
+ effect->session->config = effect->session->apm->GetConfig();
+ switch (param) {
+ case AGC_PARAM_TARGET_LEVEL:
+ *(int16_t *) pValue =
+ (int16_t)(effect->session->config.gain_controller1.target_level_dbfs * -100);
+ ALOGV("AgcGetParameter() target level %d milliBels", *(int16_t *) pValue);
+ break;
+ case AGC_PARAM_COMP_GAIN:
+ *(int16_t *) pValue =
+ (int16_t)(effect->session->config.gain_controller1.compression_gain_db * -100);
+ ALOGV("AgcGetParameter() comp gain %d milliBels", *(int16_t *) pValue);
+ break;
+ case AGC_PARAM_LIMITER_ENA:
+ *(bool *) pValue =
+ (bool)(effect->session->config.gain_controller1.enable_limiter);
+ ALOGV("AgcGetParameter() limiter enabled %s",
+ (*(int16_t *) pValue != 0) ? "true" : "false");
+ break;
+ case AGC_PARAM_PROPERTIES:
+ pProperties->targetLevel =
+ (int16_t)(effect->session->config.gain_controller1.target_level_dbfs * -100);
+ pProperties->compGain =
+ (int16_t)(effect->session->config.gain_controller1.compression_gain_db * -100);
+ pProperties->limiterEnabled =
+ (bool)(effect->session->config.gain_controller1.enable_limiter);
+ break;
+ default:
+ ALOGW("AgcGetParameter() unknown param %d", param);
+ status = -EINVAL;
+ break;
+ }
+#endif
return status;
}
+#ifndef WEBRTC_LEGACY
+int Agc2SetParameter (preproc_effect_t *effect, void *pParam, void *pValue)
+{
+ int status = 0;
+ uint32_t param = *(uint32_t *)pParam;
+ float valueFloat = 0.f;
+ agc2_settings_t *pProperties = (agc2_settings_t *)pValue;
+ effect->session->config = effect->session->apm->GetConfig();
+ switch (param) {
+ case AGC2_PARAM_FIXED_DIGITAL_GAIN:
+ valueFloat = (float)(*(int32_t *) pValue);
+ ALOGV("Agc2SetParameter() fixed digital gain %f dB", valueFloat);
+ effect->session->config.gain_controller2.fixed_digital.gain_db = valueFloat;
+ break;
+ case AGC2_PARAM_ADAPT_DIGI_LEVEL_ESTIMATOR:
+ ALOGV("Agc2SetParameter() level estimator %d", *(webrtc::AudioProcessing::Config::
+ GainController2::LevelEstimator *) pValue);
+ effect->session->config.gain_controller2.adaptive_digital.level_estimator =
+ (*(webrtc::AudioProcessing::Config::GainController2::LevelEstimator *) pValue);
+ break;
+ case AGC2_PARAM_ADAPT_DIGI_EXTRA_SATURATION_MARGIN:
+ valueFloat = (float)(*(int32_t *) pValue);
+ ALOGV("Agc2SetParameter() extra saturation margin %f dB", valueFloat);
+ effect->session->config.gain_controller2.adaptive_digital.extra_saturation_margin_db =
+ valueFloat;
+ break;
+ case AGC2_PARAM_PROPERTIES:
+ ALOGV("Agc2SetParameter() properties gain %f, level %d margin %f",
+ pProperties->fixedDigitalGain,
+ pProperties->level_estimator,
+ pProperties->extraSaturationMargin);
+ effect->session->config.gain_controller2.fixed_digital.gain_db =
+ pProperties->fixedDigitalGain;
+ effect->session->config.gain_controller2.adaptive_digital.level_estimator =
+ (webrtc::AudioProcessing::Config::GainController2::LevelEstimator)pProperties->
+ level_estimator;
+ effect->session->config.gain_controller2.adaptive_digital.extra_saturation_margin_db =
+ pProperties->extraSaturationMargin;
+ break;
+ default:
+ ALOGW("Agc2SetParameter() unknown param %08x value %08x", param, *(uint32_t *)pValue);
+ status = -EINVAL;
+ break;
+ }
+ effect->session->apm->ApplyConfig(effect->session->config);
+
+ ALOGV("Agc2SetParameter() done status %d", status);
+
+ return status;
+}
+#endif
+
int AgcSetParameter (preproc_effect_t *effect, void *pParam, void *pValue)
{
int status = 0;
+#ifdef WEBRTC_LEGACY
uint32_t param = *(uint32_t *)pParam;
t_agc_settings *pProperties = (t_agc_settings *)pValue;
webrtc::GainControl *agc = static_cast<webrtc::GainControl *>(effect->engine);
@@ -390,27 +642,95 @@
status = -EINVAL;
break;
}
+#else
+ uint32_t param = *(uint32_t *)pParam;
+ t_agc_settings *pProperties = (t_agc_settings *)pValue;
+ effect->session->config = effect->session->apm->GetConfig();
+ switch (param) {
+ case AGC_PARAM_TARGET_LEVEL:
+ ALOGV("AgcSetParameter() target level %d milliBels", *(int16_t *)pValue);
+ effect->session->config.gain_controller1.target_level_dbfs =
+ (-(*(int16_t *)pValue / 100));
+ break;
+ case AGC_PARAM_COMP_GAIN:
+ ALOGV("AgcSetParameter() comp gain %d milliBels", *(int16_t *)pValue);
+ effect->session->config.gain_controller1.compression_gain_db =
+ (*(int16_t *)pValue / 100);
+ break;
+ case AGC_PARAM_LIMITER_ENA:
+ ALOGV("AgcSetParameter() limiter enabled %s", *(bool *)pValue ? "true" : "false");
+ effect->session->config.gain_controller1.enable_limiter =
+ (*(bool *)pValue);
+ break;
+ case AGC_PARAM_PROPERTIES:
+ ALOGV("AgcSetParameter() properties level %d, gain %d limiter %d",
+ pProperties->targetLevel,
+ pProperties->compGain,
+ pProperties->limiterEnabled);
+ effect->session->config.gain_controller1.target_level_dbfs =
+ -(pProperties->targetLevel / 100);
+ effect->session->config.gain_controller1.compression_gain_db =
+ pProperties->compGain / 100;
+ effect->session->config.gain_controller1.enable_limiter =
+ pProperties->limiterEnabled;
+ break;
+ default:
+ ALOGW("AgcSetParameter() unknown param %08x value %08x", param, *(uint32_t *)pValue);
+ status = -EINVAL;
+ break;
+ }
+ effect->session->apm->ApplyConfig(effect->session->config);
+#endif
ALOGV("AgcSetParameter() done status %d", status);
return status;
}
+#ifndef WEBRTC_LEGACY
+void Agc2Enable(preproc_effect_t *effect)
+{
+ effect->session->config = effect->session->apm->GetConfig();
+ effect->session->config.gain_controller2.enabled = true;
+ effect->session->apm->ApplyConfig(effect->session->config);
+}
+#endif
+
void AgcEnable(preproc_effect_t *effect)
{
+#ifdef WEBRTC_LEGACY
webrtc::GainControl *agc = static_cast<webrtc::GainControl *>(effect->engine);
ALOGV("AgcEnable agc %p", agc);
agc->Enable(true);
+#else
+ effect->session->config = effect->session->apm->GetConfig();
+ effect->session->config.gain_controller1.enabled = true;
+ effect->session->apm->ApplyConfig(effect->session->config);
+#endif
}
+#ifndef WEBRTC_LEGACY
+void Agc2Disable(preproc_effect_t *effect)
+{
+ effect->session->config = effect->session->apm->GetConfig();
+ effect->session->config.gain_controller2.enabled = false;
+ effect->session->apm->ApplyConfig(effect->session->config);
+}
+#endif
+
void AgcDisable(preproc_effect_t *effect)
{
+#ifdef WEBRTC_LEGACY
ALOGV("AgcDisable");
webrtc::GainControl *agc = static_cast<webrtc::GainControl *>(effect->engine);
agc->Enable(false);
+#else
+ effect->session->config = effect->session->apm->GetConfig();
+ effect->session->config.gain_controller1.enabled = false;
+ effect->session->apm->ApplyConfig(effect->session->config);
+#endif
}
-
static const preproc_ops_t sAgcOps = {
AgcCreate,
AgcInit,
@@ -422,26 +742,48 @@
NULL
};
+#ifndef WEBRTC_LEGACY
+static const preproc_ops_t sAgc2Ops = {
+ Agc2Create,
+ Agc2Init,
+ NULL,
+ Agc2Enable,
+ Agc2Disable,
+ Agc2SetParameter,
+ Agc2GetParameter,
+ NULL
+};
+#endif
//------------------------------------------------------------------------------
// Acoustic Echo Canceler (AEC)
//------------------------------------------------------------------------------
+#ifdef WEBRTC_LEGACY
static const webrtc::EchoControlMobile::RoutingMode kAecDefaultMode =
webrtc::EchoControlMobile::kEarpiece;
static const bool kAecDefaultComfortNoise = true;
+#endif
int AecInit (preproc_effect_t *effect)
{
ALOGV("AecInit");
+#ifdef WEBRTC_LEGACY
webrtc::EchoControlMobile *aec = static_cast<webrtc::EchoControlMobile *>(effect->engine);
aec->set_routing_mode(kAecDefaultMode);
aec->enable_comfort_noise(kAecDefaultComfortNoise);
+#else
+ effect->session->config =
+ effect->session->apm->GetConfig() ;
+ effect->session->config.echo_canceller.mobile_mode = false;
+ effect->session->apm->ApplyConfig(effect->session->config);
+#endif
return 0;
}
int AecCreate(preproc_effect_t *effect)
{
+#ifdef WEBRTC_LEGACY
webrtc::EchoControlMobile *aec = effect->session->apm->echo_control_mobile();
ALOGV("AecCreate got aec %p", aec);
if (aec == NULL) {
@@ -449,6 +791,7 @@
return -ENOMEM;
}
effect->engine = static_cast<preproc_fx_handle_t>(aec);
+#endif
AecInit (effect);
return 0;
}
@@ -470,6 +813,14 @@
*(uint32_t *)pValue = 1000 * effect->session->apm->stream_delay_ms();
ALOGV("AecGetParameter() echo delay %d us", *(uint32_t *)pValue);
break;
+#ifndef WEBRTC_LEGACY
+ case AEC_PARAM_MOBILE_MODE:
+ effect->session->config =
+ effect->session->apm->GetConfig() ;
+ *(uint32_t *)pValue = effect->session->config.echo_canceller.mobile_mode;
+ ALOGV("AecGetParameter() mobile mode %d us", *(uint32_t *)pValue);
+ break;
+#endif
default:
ALOGW("AecGetParameter() unknown param %08x value %08x", param, *(uint32_t *)pValue);
status = -EINVAL;
@@ -490,6 +841,15 @@
status = effect->session->apm->set_stream_delay_ms(value/1000);
ALOGV("AecSetParameter() echo delay %d us, status %d", value, status);
break;
+#ifndef WEBRTC_LEGACY
+ case AEC_PARAM_MOBILE_MODE:
+ effect->session->config =
+ effect->session->apm->GetConfig() ;
+ effect->session->config.echo_canceller.mobile_mode = value;
+ ALOGV("AecSetParameter() mobile mode %d us", value);
+ effect->session->apm->ApplyConfig(effect->session->config);
+ break;
+#endif
default:
ALOGW("AecSetParameter() unknown param %08x value %08x", param, *(uint32_t *)pValue);
status = -EINVAL;
@@ -500,28 +860,43 @@
void AecEnable(preproc_effect_t *effect)
{
+#ifdef WEBRTC_LEGACY
webrtc::EchoControlMobile *aec = static_cast<webrtc::EchoControlMobile *>(effect->engine);
ALOGV("AecEnable aec %p", aec);
aec->Enable(true);
+#else
+ effect->session->config = effect->session->apm->GetConfig();
+ effect->session->config.echo_canceller.enabled = true;
+ effect->session->apm->ApplyConfig(effect->session->config);
+#endif
}
void AecDisable(preproc_effect_t *effect)
{
+#ifdef WEBRTC_LEGACY
ALOGV("AecDisable");
webrtc::EchoControlMobile *aec = static_cast<webrtc::EchoControlMobile *>(effect->engine);
aec->Enable(false);
+#else
+ effect->session->config = effect->session->apm->GetConfig();
+ effect->session->config.echo_canceller.enabled = false;
+ effect->session->apm->ApplyConfig(effect->session->config);
+#endif
}
int AecSetDevice(preproc_effect_t *effect, uint32_t device)
{
ALOGV("AecSetDevice %08x", device);
+#ifdef WEBRTC_LEGACY
webrtc::EchoControlMobile *aec = static_cast<webrtc::EchoControlMobile *>(effect->engine);
webrtc::EchoControlMobile::RoutingMode mode = webrtc::EchoControlMobile::kQuietEarpieceOrHeadset;
+#endif
if (audio_is_input_device(device)) {
return 0;
}
+#ifdef WEBRTC_LEGACY
switch(device) {
case AUDIO_DEVICE_OUT_EARPIECE:
mode = webrtc::EchoControlMobile::kEarpiece;
@@ -536,6 +911,7 @@
break;
}
aec->set_routing_mode(mode);
+#endif
return 0;
}
@@ -554,11 +930,17 @@
// Noise Suppression (NS)
//------------------------------------------------------------------------------
+#ifdef WEBRTC_LEGACY
static const webrtc::NoiseSuppression::Level kNsDefaultLevel = webrtc::NoiseSuppression::kModerate;
+#else
+static const webrtc::AudioProcessing::Config::NoiseSuppression::Level kNsDefaultLevel =
+ webrtc::AudioProcessing::Config::NoiseSuppression::kModerate;
+#endif
int NsInit (preproc_effect_t *effect)
{
ALOGV("NsInit");
+#ifdef WEBRTC_LEGACY
webrtc::NoiseSuppression *ns = static_cast<webrtc::NoiseSuppression *>(effect->engine);
ns->set_level(kNsDefaultLevel);
webrtc::Config config;
@@ -575,12 +957,20 @@
config.Set<webrtc::Beamforming>(
new webrtc::Beamforming(false, geometry));
effect->session->apm->SetExtraOptions(config);
+#else
+ effect->session->config =
+ effect->session->apm->GetConfig() ;
+ effect->session->config.noise_suppression.level =
+ kNsDefaultLevel;
+ effect->session->apm->ApplyConfig(effect->session->config);
+#endif
effect->type = NS_TYPE_SINGLE_CHANNEL;
return 0;
}
int NsCreate(preproc_effect_t *effect)
{
+#ifdef WEBRTC_LEGACY
webrtc::NoiseSuppression *ns = effect->session->apm->noise_suppression();
ALOGV("NsCreate got ns %p", ns);
if (ns == NULL) {
@@ -588,6 +978,7 @@
return -ENOMEM;
}
effect->engine = static_cast<preproc_fx_handle_t>(ns);
+#endif
NsInit (effect);
return 0;
}
@@ -604,6 +995,7 @@
int NsSetParameter (preproc_effect_t *effect, void *pParam, void *pValue)
{
int status = 0;
+#ifdef WEBRTC_LEGACY
webrtc::NoiseSuppression *ns = static_cast<webrtc::NoiseSuppression *>(effect->engine);
uint32_t param = *(uint32_t *)pParam;
uint32_t value = *(uint32_t *)pValue;
@@ -629,12 +1021,30 @@
ALOGW("NsSetParameter() unknown param %08x value %08x", param, value);
status = -EINVAL;
}
+#else
+ uint32_t param = *(uint32_t *)pParam;
+ uint32_t value = *(uint32_t *)pValue;
+ effect->session->config =
+ effect->session->apm->GetConfig();
+ switch (param) {
+ case NS_PARAM_LEVEL:
+ effect->session->config.noise_suppression.level =
+ (webrtc::AudioProcessing::Config::NoiseSuppression::Level)value;
+ ALOGV("NsSetParameter() level %d", value);
+ break;
+ default:
+ ALOGW("NsSetParameter() unknown param %08x value %08x", param, value);
+ status = -EINVAL;
+ }
+ effect->session->apm->ApplyConfig(effect->session->config);
+#endif
return status;
}
void NsEnable(preproc_effect_t *effect)
{
+#ifdef WEBRTC_LEGACY
webrtc::NoiseSuppression *ns = static_cast<webrtc::NoiseSuppression *>(effect->engine);
ALOGV("NsEnable ns %p", ns);
ns->Enable(true);
@@ -644,17 +1054,30 @@
config.Set<webrtc::Beamforming>(new webrtc::Beamforming(true, geometry));
effect->session->apm->SetExtraOptions(config);
}
+#else
+ effect->session->config =
+ effect->session->apm->GetConfig();
+ effect->session->config.noise_suppression.enabled = true;
+ effect->session->apm->ApplyConfig(effect->session->config);
+#endif
}
void NsDisable(preproc_effect_t *effect)
{
ALOGV("NsDisable");
+#ifdef WEBRTC_LEGACY
webrtc::NoiseSuppression *ns = static_cast<webrtc::NoiseSuppression *>(effect->engine);
ns->Enable(false);
webrtc::Config config;
std::vector<webrtc::Point> geometry;
config.Set<webrtc::Beamforming>(new webrtc::Beamforming(false, geometry));
effect->session->apm->SetExtraOptions(config);
+#else
+ effect->session->config =
+ effect->session->apm->GetConfig();
+ effect->session->config.noise_suppression.enabled = false;
+ effect->session->apm->ApplyConfig(effect->session->config);
+#endif
}
static const preproc_ops_t sNsOps = {
@@ -669,8 +1092,12 @@
};
+
static const preproc_ops_t *sPreProcOps[PREPROC_NUM_EFFECTS] = {
&sAgcOps,
+#ifndef WEBRTC_LEGACY
+ &sAgc2Ops,
+#endif
&sAecOps,
&sNsOps
};
@@ -812,7 +1239,9 @@
session->id = 0;
session->io = 0;
session->createdMsk = 0;
+#ifdef WEBRTC_LEGACY
session->apm = NULL;
+#endif
for (i = 0; i < PREPROC_NUM_EFFECTS && status == 0; i++) {
status = Effect_Init(&session->effects[i], i);
}
@@ -829,6 +1258,7 @@
ALOGV("Session_CreateEffect procId %d, createdMsk %08x", procId, session->createdMsk);
if (session->createdMsk == 0) {
+#ifdef WEBRTC_LEGACY
session->apm = webrtc::AudioProcessing::Create();
if (session->apm == NULL) {
ALOGW("Session_CreateEffect could not get apm engine");
@@ -850,28 +1280,53 @@
ALOGW("Session_CreateEffect could not allocate reverse audio frame");
goto error;
}
+#else
+ session->apm = session->ap_builder.Create();
+ if (session->apm == NULL) {
+ ALOGW("Session_CreateEffect could not get apm engine");
+ goto error;
+ }
+#endif
session->apmSamplingRate = kPreprocDefaultSr;
session->apmFrameCount = (kPreprocDefaultSr) / 100;
session->frameCount = session->apmFrameCount;
session->samplingRate = kPreprocDefaultSr;
session->inChannelCount = kPreProcDefaultCnl;
session->outChannelCount = kPreProcDefaultCnl;
+#ifdef WEBRTC_LEGACY
session->procFrame->sample_rate_hz_ = kPreprocDefaultSr;
session->procFrame->num_channels_ = kPreProcDefaultCnl;
+#else
+ session->inputConfig.set_sample_rate_hz(kPreprocDefaultSr);
+ session->inputConfig.set_num_channels(kPreProcDefaultCnl);
+ session->outputConfig.set_sample_rate_hz(kPreprocDefaultSr);
+ session->outputConfig.set_num_channels(kPreProcDefaultCnl);
+#endif
session->revChannelCount = kPreProcDefaultCnl;
+#ifdef WEBRTC_LEGACY
session->revFrame->sample_rate_hz_ = kPreprocDefaultSr;
session->revFrame->num_channels_ = kPreProcDefaultCnl;
+#else
+ session->revConfig.set_sample_rate_hz(kPreprocDefaultSr);
+ session->revConfig.set_num_channels(kPreProcDefaultCnl);
+#endif
session->enabledMsk = 0;
session->processedMsk = 0;
session->revEnabledMsk = 0;
session->revProcessedMsk = 0;
+#ifdef WEBRTC_LEGACY
session->inResampler = NULL;
+#endif
session->inBuf = NULL;
session->inBufSize = 0;
+#ifdef WEBRTC_LEGACY
session->outResampler = NULL;
+#endif
session->outBuf = NULL;
session->outBufSize = 0;
+#ifdef WEBRTC_LEGACY
session->revResampler = NULL;
+#endif
session->revBuf = NULL;
session->revBufSize = 0;
}
@@ -885,12 +1340,17 @@
error:
if (session->createdMsk == 0) {
+#ifdef WEBRTC_LEGACY
delete session->revFrame;
session->revFrame = NULL;
delete session->procFrame;
session->procFrame = NULL;
delete session->apm;
session->apm = NULL; // NOLINT(clang-analyzer-cplusplus.NewDelete)
+#else
+ delete session->apm;
+ session->apm = NULL;
+#endif
}
return status;
}
@@ -901,6 +1361,7 @@
ALOGW_IF(Effect_Release(fx) != 0, " Effect_Release() failed for proc ID %d", fx->procId);
session->createdMsk &= ~(1<<fx->procId);
if (session->createdMsk == 0) {
+#ifdef WEBRTC_LEGACY
delete session->apm;
session->apm = NULL;
delete session->procFrame;
@@ -919,6 +1380,10 @@
speex_resampler_destroy(session->revResampler);
session->revResampler = NULL;
}
+#else
+ delete session->apm;
+ session->apm = NULL;
+#endif
delete session->inBuf;
session->inBuf = NULL;
delete session->outBuf;
@@ -946,7 +1411,9 @@
ALOGV("Session_SetConfig sr %d cnl %08x",
config->inputCfg.samplingRate, config->inputCfg.channels);
+#ifdef WEBRTC_LEGACY
int status;
+#endif
// AEC implementation is limited to 16kHz
if (config->inputCfg.samplingRate >= 32000 && !(session->createdMsk & (1 << PREPROC_AEC))) {
@@ -958,6 +1425,7 @@
session->apmSamplingRate = 8000;
}
+#ifdef WEBRTC_LEGACY
const webrtc::ProcessingConfig processing_config = {
{{static_cast<int>(session->apmSamplingRate), inCnl},
{static_cast<int>(session->apmSamplingRate), outCnl},
@@ -967,23 +1435,41 @@
if (status < 0) {
return -EINVAL;
}
+#endif
session->samplingRate = config->inputCfg.samplingRate;
session->apmFrameCount = session->apmSamplingRate / 100;
if (session->samplingRate == session->apmSamplingRate) {
session->frameCount = session->apmFrameCount;
} else {
+#ifdef WEBRTC_LEGACY
session->frameCount = (session->apmFrameCount * session->samplingRate) /
session->apmSamplingRate + 1;
+#else
+ session->frameCount = (session->apmFrameCount * session->samplingRate) /
+ session->apmSamplingRate;
+#endif
}
session->inChannelCount = inCnl;
session->outChannelCount = outCnl;
+#ifdef WEBRTC_LEGACY
session->procFrame->num_channels_ = inCnl;
session->procFrame->sample_rate_hz_ = session->apmSamplingRate;
+#else
+ session->inputConfig.set_sample_rate_hz(session->samplingRate);
+ session->inputConfig.set_num_channels(inCnl);
+ session->outputConfig.set_sample_rate_hz(session->samplingRate);
+ session->outputConfig.set_num_channels(inCnl);
+#endif
session->revChannelCount = inCnl;
+#ifdef WEBRTC_LEGACY
session->revFrame->num_channels_ = inCnl;
session->revFrame->sample_rate_hz_ = session->apmSamplingRate;
+#else
+ session->revConfig.set_sample_rate_hz(session->samplingRate);
+ session->revConfig.set_num_channels(inCnl);
+#endif
// force process buffer reallocation
session->inBufSize = 0;
@@ -992,6 +1478,7 @@
session->framesOut = 0;
+#ifdef WEBRTC_LEGACY
if (session->inResampler != NULL) {
speex_resampler_destroy(session->inResampler);
session->inResampler = NULL;
@@ -1043,6 +1530,7 @@
return -EINVAL;
}
}
+#endif
session->state = PREPROC_SESSION_STATE_CONFIG;
return 0;
@@ -1079,6 +1567,7 @@
return -EINVAL;
}
uint32_t inCnl = audio_channel_count_from_out_mask(config->inputCfg.channels);
+#ifdef WEBRTC_LEGACY
const webrtc::ProcessingConfig processing_config = {
{{static_cast<int>(session->apmSamplingRate), session->inChannelCount},
{static_cast<int>(session->apmSamplingRate), session->outChannelCount},
@@ -1088,9 +1577,12 @@
if (status < 0) {
return -EINVAL;
}
+#endif
session->revChannelCount = inCnl;
+#ifdef WEBRTC_LEGACY
session->revFrame->num_channels_ = inCnl;
session->revFrame->sample_rate_hz_ = session->apmSamplingRate;
+#endif
// force process buffer reallocation
session->revBufSize = 0;
session->framesRev = 0;
@@ -1114,6 +1606,7 @@
if (enabled) {
if(session->enabledMsk == 0) {
session->framesIn = 0;
+#ifdef WEBRTC_LEGACY
if (session->inResampler != NULL) {
speex_resampler_reset_mem(session->inResampler);
}
@@ -1121,13 +1614,16 @@
if (session->outResampler != NULL) {
speex_resampler_reset_mem(session->outResampler);
}
+#endif
}
session->enabledMsk |= (1 << procId);
if (HasReverseStream(procId)) {
session->framesRev = 0;
+#ifdef WEBRTC_LEGACY
if (session->revResampler != NULL) {
speex_resampler_reset_mem(session->revResampler);
}
+#endif
session->revEnabledMsk |= (1 << procId);
}
} else {
@@ -1252,6 +1748,7 @@
return 0;
}
+#ifdef WEBRTC_LEGACY
if (session->inResampler != NULL) {
size_t fr = session->frameCount - session->framesIn;
if (inBuffer->frameCount < fr) {
@@ -1335,6 +1832,28 @@
session->procFrame->samples_per_channel_ = session->apmFrameCount;
effect->session->apm->ProcessStream(session->procFrame);
+#else
+ size_t fr = session->frameCount - session->framesIn;
+ if (inBuffer->frameCount < fr) {
+ fr = inBuffer->frameCount;
+ }
+ session->framesIn += fr;
+ inBuffer->frameCount = fr;
+ if (session->framesIn < session->frameCount) {
+ return 0;
+ }
+ session->framesIn = 0;
+ if (int status = effect->session->apm->ProcessStream(
+ (const int16_t* const)inBuffer->s16,
+ (const webrtc::StreamConfig)effect->session->inputConfig,
+ (const webrtc::StreamConfig)effect->session->outputConfig,
+ (int16_t* const)outBuffer->s16);
+ status != 0) {
+ ALOGE("Process Stream failed with error %d\n", status);
+ return status;
+ }
+ outBuffer->frameCount = inBuffer->frameCount;
+#endif
if (session->outBufSize < session->framesOut + session->frameCount) {
int16_t *buf;
@@ -1350,6 +1869,7 @@
session->outBuf = buf;
}
+#ifdef WEBRTC_LEGACY
if (session->outResampler != NULL) {
spx_uint32_t frIn = session->apmFrameCount;
spx_uint32_t frOut = session->frameCount;
@@ -1375,6 +1895,9 @@
session->framesOut += session->frameCount;
}
size_t fr = session->framesOut;
+#else
+ fr = session->framesOut;
+#endif
if (framesRq - framesWr < fr) {
fr = framesRq - framesWr;
}
@@ -1794,6 +2317,7 @@
if ((session->revProcessedMsk & session->revEnabledMsk) == session->revEnabledMsk) {
effect->session->revProcessedMsk = 0;
+#ifdef WEBRTC_LEGACY
if (session->revResampler != NULL) {
size_t fr = session->frameCount - session->framesRev;
if (inBuffer->frameCount < fr) {
@@ -1858,6 +2382,27 @@
}
session->revFrame->samples_per_channel_ = session->apmFrameCount;
effect->session->apm->AnalyzeReverseStream(session->revFrame);
+#else
+ size_t fr = session->frameCount - session->framesRev;
+ if (inBuffer->frameCount < fr) {
+ fr = inBuffer->frameCount;
+ }
+ session->framesRev += fr;
+ inBuffer->frameCount = fr;
+ if (session->framesRev < session->frameCount) {
+ return 0;
+ }
+ session->framesRev = 0;
+ if (int status = effect->session->apm->ProcessReverseStream(
+ (const int16_t* const)inBuffer->s16,
+ (const webrtc::StreamConfig)effect->session->revConfig,
+ (const webrtc::StreamConfig)effect->session->revConfig,
+ (int16_t* const)outBuffer->s16);
+ status != 0) {
+ ALOGE("Process Reverse Stream failed with error %d\n", status);
+ return status;
+ }
+#endif
return 0;
} else {
return -ENODATA;
diff --git a/media/libeffects/preprocessing/tests/Android.bp b/media/libeffects/preprocessing/tests/Android.bp
index 71f6e8f..045b0d3 100644
--- a/media/libeffects/preprocessing/tests/Android.bp
+++ b/media/libeffects/preprocessing/tests/Android.bp
@@ -1,5 +1,37 @@
// audio preprocessing unit test
cc_test {
+ name: "AudioPreProcessingLegacyTest",
+
+ vendor: true,
+
+ relative_install_path: "soundfx",
+
+ srcs: ["PreProcessingTest.cpp"],
+
+ shared_libs: [
+ "libaudiopreprocessing_legacy",
+ "libaudioutils",
+ "liblog",
+ "libutils",
+ "libwebrtc_audio_preprocessing",
+ ],
+
+ cflags: [
+ "-DWEBRTC_POSIX",
+ "-DWEBRTC_LEGACY",
+ "-fvisibility=default",
+ "-Wall",
+ "-Werror",
+ "-Wextra",
+ ],
+
+ header_libs: [
+ "libaudioeffects",
+ "libhardware_headers",
+ ],
+}
+
+cc_test {
name: "AudioPreProcessingTest",
vendor: true,
@@ -13,16 +45,7 @@
"libaudioutils",
"liblog",
"libutils",
- "libwebrtc_audio_preprocessing",
],
-
- cflags: [
- "-DWEBRTC_POSIX",
- "-fvisibility=default",
- "-Wall",
- "-Werror",
- ],
-
header_libs: [
"libaudioeffects",
"libhardware_headers",
diff --git a/media/libeffects/preprocessing/tests/PreProcessingTest.cpp b/media/libeffects/preprocessing/tests/PreProcessingTest.cpp
index 5c81d78..3244c1f 100644
--- a/media/libeffects/preprocessing/tests/PreProcessingTest.cpp
+++ b/media/libeffects/preprocessing/tests/PreProcessingTest.cpp
@@ -14,23 +14,19 @@
* limitations under the License.
*/
+#include <getopt.h>
+#include <stddef.h>
+#include <stdint.h>
+#include <sys/stat.h>
+#include <vector>
+
#include <audio_effects/effect_aec.h>
#include <audio_effects/effect_agc.h>
+#ifndef WEBRTC_LEGACY
+#include <audio_effects/effect_agc2.h>
+#endif
#include <audio_effects/effect_ns.h>
-#include <audio_processing.h>
-#include <getopt.h>
-#include <hardware/audio_effect.h>
-#include <module_common_types.h>
-#include <stdlib.h>
-#include <string.h>
-#include <sys/stat.h>
-#include <utils/Log.h>
-#include <utils/Timers.h>
-
-#include <audio_utils/channels.h>
-#include <audio_utils/primitives.h>
#include <log/log.h>
-#include <system/audio.h>
// This is the only symbol that needs to be imported
extern audio_effect_library_t AUDIO_EFFECT_LIBRARY_INFO_SYM;
@@ -42,6 +38,9 @@
// types of pre processing modules
enum PreProcId {
PREPROC_AGC, // Automatic Gain Control
+#ifndef WEBRTC_LEGACY
+ PREPROC_AGC2, // Automatic Gain Control 2
+#endif
PREPROC_AEC, // Acoustic Echo Canceler
PREPROC_NS, // Noise Suppressor
PREPROC_NUM_EFFECTS
@@ -58,6 +57,12 @@
ARG_AGC_COMP_LVL,
ARG_AEC_DELAY,
ARG_NS_LVL,
+#ifndef WEBRTC_LEGACY
+ ARG_AEC_MOBILE,
+ ARG_AGC2_GAIN,
+ ARG_AGC2_LVL,
+ ARG_AGC2_SAT_MGN
+#endif
};
struct preProcConfigParams_t {
@@ -66,11 +71,19 @@
int nsLevel = 0; // a value between 0-3
int agcTargetLevel = 3; // in dB
int agcCompLevel = 9; // in dB
+#ifndef WEBRTC_LEGACY
+ float agc2Gain = 0.f; // in dB
+ float agc2SaturationMargin = 2.f; // in dB
+ int agc2Level = 0; // either kRms(0) or kPeak(1)
+#endif
int aecDelay = 0; // in ms
};
const effect_uuid_t kPreProcUuids[PREPROC_NUM_EFFECTS] = {
{0xaa8130e0, 0x66fc, 0x11e0, 0xbad0, {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}}, // agc uuid
+#ifndef WEBRTC_LEGACY
+ {0x89f38e65, 0xd4d2, 0x4d64, 0xad0e, {0x2b, 0x3e, 0x79, 0x9e, 0xa8, 0x86}}, // agc2 uuid
+#endif
{0xbb392ec0, 0x8d4d, 0x11e0, 0xa896, {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}}, // aec uuid
{0xc06c8400, 0x8e06, 0x11e0, 0x9cb6, {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}}, // ns uuid
};
@@ -126,14 +139,30 @@
printf("\n Enable Noise Suppression, default disabled");
printf("\n --agc");
printf("\n Enable Gain Control, default disabled");
+#ifndef WEBRTC_LEGACY
+ printf("\n --agc2");
+ printf("\n Enable Gain Controller 2, default disabled");
+#endif
printf("\n --ns_lvl <ns_level>");
printf("\n Noise Suppression level in dB, default value 0dB");
printf("\n --agc_tgt_lvl <target_level>");
printf("\n AGC Target Level in dB, default value 3dB");
printf("\n --agc_comp_lvl <comp_level>");
printf("\n AGC Comp Level in dB, default value 9dB");
+#ifndef WEBRTC_LEGACY
+ printf("\n --agc2_gain <fixed_digital_gain>");
+ printf("\n AGC Fixed Digital Gain in dB, default value 0dB");
+ printf("\n --agc2_lvl <level_estimator>");
+ printf("\n AGC Adaptive Digital Level Estimator, default value kRms");
+ printf("\n --agc2_sat_mgn <saturation_margin>");
+ printf("\n AGC Adaptive Digital Saturation Margin in dB, default value 2dB");
+#endif
printf("\n --aec_delay <delay>");
printf("\n AEC delay value in ms, default value 0ms");
+#ifndef WEBRTC_LEGACY
+ printf("\n --aec_mobile");
+ printf("\n Enable mobile mode of echo canceller, default disabled");
+#endif
printf("\n");
}
@@ -184,6 +213,9 @@
const char *outputFile = nullptr;
const char *farFile = nullptr;
int effectEn[PREPROC_NUM_EFFECTS] = {0};
+#ifndef WEBRTC_LEGACY
+ int aecMobileMode = 0;
+#endif
const option long_opts[] = {
{"help", no_argument, nullptr, ARG_HELP},
@@ -194,11 +226,22 @@
{"ch_mask", required_argument, nullptr, ARG_CH_MASK},
{"agc_tgt_lvl", required_argument, nullptr, ARG_AGC_TGT_LVL},
{"agc_comp_lvl", required_argument, nullptr, ARG_AGC_COMP_LVL},
+#ifndef WEBRTC_LEGACY
+ {"agc2_gain", required_argument, nullptr, ARG_AGC2_GAIN},
+ {"agc2_lvl", required_argument, nullptr, ARG_AGC2_LVL},
+ {"agc2_sat_mgn", required_argument, nullptr, ARG_AGC2_SAT_MGN},
+#endif
{"aec_delay", required_argument, nullptr, ARG_AEC_DELAY},
{"ns_lvl", required_argument, nullptr, ARG_NS_LVL},
{"aec", no_argument, &effectEn[PREPROC_AEC], 1},
{"agc", no_argument, &effectEn[PREPROC_AGC], 1},
+#ifndef WEBRTC_LEGACY
+ {"agc2", no_argument, &effectEn[PREPROC_AGC2], 1},
+#endif
{"ns", no_argument, &effectEn[PREPROC_NS], 1},
+#ifndef WEBRTC_LEGACY
+ {"aec_mobile", no_argument, &aecMobileMode, 1},
+#endif
{nullptr, 0, nullptr, 0},
};
struct preProcConfigParams_t preProcCfgParams {};
@@ -246,6 +289,20 @@
preProcCfgParams.agcCompLevel = atoi(optarg);
break;
}
+#ifndef WEBRTC_LEGACY
+ case ARG_AGC2_GAIN: {
+ preProcCfgParams.agc2Gain = atof(optarg);
+ break;
+ }
+ case ARG_AGC2_LVL: {
+ preProcCfgParams.agc2Level = atoi(optarg);
+ break;
+ }
+ case ARG_AGC2_SAT_MGN: {
+ preProcCfgParams.agc2SaturationMargin = atof(optarg);
+ break;
+ }
+#endif
case ARG_AEC_DELAY: {
preProcCfgParams.aecDelay = atoi(optarg);
break;
@@ -342,6 +399,31 @@
return EXIT_FAILURE;
}
}
+#ifndef WEBRTC_LEGACY
+ if (effectEn[PREPROC_AGC2]) {
+ if (int status = preProcSetConfigParam(AGC2_PARAM_FIXED_DIGITAL_GAIN,
+ (float)preProcCfgParams.agc2Gain,
+ effectHandle[PREPROC_AGC2]);
+ status != 0) {
+ ALOGE("Invalid AGC2 Fixed Digital Gain. Error %d\n", status);
+ return EXIT_FAILURE;
+ }
+ if (int status = preProcSetConfigParam(AGC2_PARAM_ADAPT_DIGI_LEVEL_ESTIMATOR,
+ (uint32_t)preProcCfgParams.agc2Level,
+ effectHandle[PREPROC_AGC2]);
+ status != 0) {
+ ALOGE("Invalid AGC2 Level Estimator. Error %d\n", status);
+ return EXIT_FAILURE;
+ }
+ if (int status = preProcSetConfigParam(AGC2_PARAM_ADAPT_DIGI_EXTRA_SATURATION_MARGIN,
+ (float)preProcCfgParams.agc2SaturationMargin,
+ effectHandle[PREPROC_AGC2]);
+ status != 0) {
+ ALOGE("Invalid AGC2 Saturation Margin. Error %d\n", status);
+ return EXIT_FAILURE;
+ }
+ }
+#endif
if (effectEn[PREPROC_NS]) {
if (int status = preProcSetConfigParam(NS_PARAM_LEVEL, (uint32_t)preProcCfgParams.nsLevel,
effectHandle[PREPROC_NS]);
@@ -350,6 +432,16 @@
return EXIT_FAILURE;
}
}
+#ifndef WEBRTC_LEGACY
+ if (effectEn[PREPROC_AEC]) {
+ if (int status = preProcSetConfigParam(AEC_PARAM_MOBILE_MODE, (uint32_t)aecMobileMode,
+ effectHandle[PREPROC_AEC]);
+ status != 0) {
+ ALOGE("Invalid AEC mobile mode value %d\n", status);
+ return EXIT_FAILURE;
+ }
+ }
+#endif
// Process Call
const int frameLength = (int)(preProcCfgParams.samplingFreq * kTenMilliSecVal);