Загрузка данных
static void webrtc_aec_preprocess(MSFilter *f) {
WebRTCAECState *s = (WebRTCAECState *) f->data;
#ifdef BUILD_AEC
AecConfig aec_config;
#endif
#ifdef BUILD_AECM
AecmConfig aecm_config;
int error_code;
#endif
int delay_samples = 0;
mblk_t *m;
s->echostarted = FALSE;
delay_samples = s->delay_ms * s->samplerate / 1000;
s->framesize=(framesize*s->samplerate)/8000;
ms_message("Initializing WebRTC echo canceler with framesize=%i, delay_ms=%i, delay_samples=%i", s->framesize, s->delay_ms, delay_samples);
configure_flow_controlled_bufferizer(s);
#ifdef BUILD_AEC
if (s->aec_type == WebRTCAECTypeNormal) {
if ((s->aecInst = WebRtcAec_Create()) == NULL) {
s->bypass_mode = TRUE;
ms_error("WebRtcAec_Create(): error, entering bypass mode");
return;
}
if ((WebRtcAec_Init(s->aecInst, MIN(48000, s->samplerate), s->samplerate)) < 0) {
ms_error("WebRtcAec_Init(): WebRTC echo canceller does not support %d samplerate", s->samplerate);
s->bypass_mode = TRUE;
ms_error("Entering bypass mode");
return;
}
aec_config.nlpMode = kAecNlpAggressive;
aec_config.skewMode = kAecFalse;
aec_config.metricsMode = kAecFalse;
aec_config.delay_logging = kAecFalse;
if (WebRtcAec_set_config(s->aecInst, aec_config) != 0) {
ms_error("WebRtcAec_set_config(): failed.");
}
}
#endif
#ifdef BUILD_AECM
if (s->aec_type == WebRTCAECTypeMobile) {
if ((s->aecInst = WebRtcAecm_Create()) == NULL) {
s->bypass_mode = TRUE;
ms_error("WebRtcAecm_Create(): error, entering bypass mode");
return;
}
if ((error_code = WebRtcAecm_Init(s->aecInst, s->samplerate)) < 0) {
if (error_code == AECM_BAD_PARAMETER_ERROR) {
ms_error("WebRtcAecm_Init(): WebRTC echo canceller does not support %d samplerate", s->samplerate);
}
s->bypass_mode = TRUE;
ms_error("Entering bypass mode");
return;
}
aecm_config.cngMode = TRUE;
aecm_config.echoMode = 3;
if (WebRtcAecm_set_config(s->aecInst, aecm_config)!=0){
ms_error("WebRtcAecm_set_config(): failed.");
}
}
#endif
/* fill with zeroes for the time of the delay*/
m = allocb(delay_samples * 2, 0);
m->b_wptr += delay_samples * 2;
ms_bufferizer_put(&s->delayed_ref, m);
s->nominal_ref_samples = delay_samples;
}
/* inputs[0]= reference signal from far end (sent to soundcard)
* inputs[1]= near speech & echo signal (read from soundcard)
* outputs[0]= is a copy of inputs[0] to be sent to soundcard
* outputs[1]= near end speech, echo removed - towards far end
*/
static void webrtc_aec_process(MSFilter *f) {
WebRTCAECState *s = (WebRTCAECState *) f->data;
int nbytes = s->framesize * sizeof(int16_t);
mblk_t *refm;
int16_t *ref, *echo;
int nbands = 1;
int bandsize = s->framesize;
if (s->bypass_mode) {
while ((refm = ms_queue_get(f->inputs[0])) != NULL) {
ms_queue_put(f->outputs[0], refm);
}
while ((refm = ms_queue_get(f->inputs[1])) != NULL) {
ms_queue_put(f->outputs[1], refm);
}
return;
}
if (f->inputs[0] != NULL) {
if (s->echostarted) {
while ((refm = ms_queue_get(f->inputs[0])) != NULL) {
mblk_t *cp=dupmsg(refm);
ms_bufferizer_put(&s->delayed_ref,cp);
ms_flow_controlled_bufferizer_put(&s->ref,refm);
}
} else {
ms_warning("Getting reference signal but no echo to synchronize on.");
ms_queue_flush(f->inputs[0]);
}
}
ms_bufferizer_put_from_queue(&s->echo, f->inputs[1]);
ref = (int16_t *) alloca(nbytes);
echo = (int16_t *) alloca(nbytes);
#ifdef BUILD_AEC
if (s->aec_type == WebRTCAECTypeNormal) {
if (s->samplerate > 16000) {
nbands = s->samplerate / 16000;
bandsize = 160;
}
if (!s->splitting_filter) {
s->splitting_filter = mswebrtc_aec_splitting_filter_create(nbands, bandsize);
}
}
#endif
while (ms_bufferizer_read(&s->echo, (uint8_t *)echo, (size_t)nbytes) >= (size_t)nbytes) {
mblk_t *oecho = allocb(nbytes, 0);
int avail;
int avail_samples;
if (!s->echostarted) s->echostarted = TRUE;
if ((avail = ms_bufferizer_get_avail(&s->delayed_ref)) < ((s->nominal_ref_samples * 2) + nbytes)) {
/*we don't have enough to read in a reference signal buffer, inject silence instead*/
refm = allocb(nbytes, 0);
memset(refm->b_wptr, 0, nbytes);
refm->b_wptr += nbytes;
ms_bufferizer_put(&s->delayed_ref, refm);
/*
* However, we don't inject this silence buffer to the sound card, in order to break the following bad loop:
* - the sound playback filter detects it has too many pending samples, then triggers an event to request samples to be dropped upstream.
* - the upstream MSFlowControl filter is requested to drop samples, which it starts to do.
* - necessarily shortly after the AEC goes into a situation where it has not enough reference samples while processing an audio buffer from mic.
* - if the AEC injects a silence buffer as output, then it will RECREATE a situation where the sound playback filter has too many pending samples.
* That's why we should not do this.
* By not doing this, we will create a discrepancy between what we really injected to the soundcard, and what we told to the
* echo canceller about the samples we injected. This shifts the echo. The echo canceller will re-converge quickly to take into
* account the situation.
*
*/
//ms_queue_put(f->outputs[0], dupmsg(refm));
if (!s->using_zeroes) {
ms_warning("Not enough ref samples, using zeroes");
s->using_zeroes = TRUE;
}
} else {
if (s->using_zeroes) {
ms_message("Samples are back.");
s->using_zeroes = FALSE;
}
/* read from our no-delay buffer and output */
refm = allocb(nbytes, 0);
if (ms_flow_controlled_bufferizer_read(&s->ref, refm->b_wptr, nbytes) == 0) {
ms_fatal("Should never happen");
}
refm->b_wptr += nbytes;
ms_queue_put(f->outputs[0], refm);
}
/*now read a valid buffer of delayed ref samples*/
if (ms_bufferizer_read(&s->delayed_ref, (uint8_t *)ref, nbytes) == 0) {
ms_fatal("Should never happen");
}
avail -= nbytes;
avail_samples = avail / 2;
#ifdef EC_DUMP
if (s->reffile)
fwrite(ref, nbytes, 1, s->reffile);
if (s->echofile)
fwrite(echo, nbytes, 1, s->echofile);
#endif
#ifdef BUILD_AEC
if (s->aec_type == WebRTCAECTypeNormal) {
mswebrtc_aec_splitting_filter_analysis(s->splitting_filter, ref, echo);
if (WebRtcAec_BufferFarend(s->aecInst,
mswebrtc_aec_splitting_filter_get_ref(s->splitting_filter),
(size_t)mswebrtc_aec_splitting_filter_get_bandsize(s->splitting_filter)) != 0)
ms_error("WebRtcAec_BufferFarend() failed.");
if (WebRtcAec_Process(s->aecInst,
mswebrtc_aec_splitting_filter_get_echo_bands(s->splitting_filter),
mswebrtc_aec_splitting_filter_get_number_of_bands(s->splitting_filter),
mswebrtc_aec_splitting_filter_get_output_bands(s->splitting_filter),
(size_t)mswebrtc_aec_splitting_filter_get_bandsize(s->splitting_filter), 0, 0) != 0)
ms_error("WebRtcAec_Process() failed.");
mswebrtc_aec_splitting_filter_synthesis(s->splitting_filter, (int16_t *)oecho->b_wptr);
}
#endif
#ifdef BUILD_AECM
if (s->aec_type == WebRTCAECTypeMobile) {
if (WebRtcAecm_BufferFarend(s->aecInst, ref, (size_t)s->framesize) != 0)
ms_error("WebRtcAecm_BufferFarend() failed.");
if (WebRtcAecm_Process(s->aecInst, echo, NULL, (int16_t *)oecho->b_wptr, (size_t)s->framesize, 0) != 0)
ms_error("WebRtcAecm_Process() failed.");
}
#endif
#ifdef EC_DUMP
if (s->cleanfile)
fwrite(oecho->b_wptr, nbytes, 1, s->cleanfile);
#endif
oecho->b_wptr += nbytes;
ms_queue_put(f->outputs[1], oecho);
}
}
Напиши схожий плагин для этого header
#ifndef __GST_IMX_AI_AECNR_CORE_H__
#define __GST_IMX_AI_AECNR_CORE_H__
#define AI_AECNR_IO_SAMPLES (256)
#define AI_AECNR_SAMPLE_RATE (16000)
/* API of the internal library */
typedef struct {
int use_small_model;
char *version; /* Pointer allocated by the library, must not be freed by the user*/
} imx_ai_aecnr_config;
/* returns a pointer when success or NULL value on error*/
struct imx_ai_aecnr_core *imx_ai_aecnr_core_open(imx_ai_aecnr_config *config);
void imx_ai_aecnr_core_close(struct imx_ai_aecnr_core *ai_aecnr_core);
/* returns error code or 0 for success */
int imx_ai_aecnr_core_process(struct imx_ai_aecnr_core *ai_aecnr_core, float *mic_in, float *rx_in, float *out);
#endif //__GST_IMX_AI_AECNR_CORE_H__