android webrtc aec测试例子

调用aec接口

#include <stdio.h>
#include "webrtc/modules/audio_processing/aec/echo_cancellation.h"
#include <errno.h>
#define  NN 160

int AudioBufferFarendSet(void* m_AecmInst, int nDataLen, char *szData)
{
	short *sPointer = (short *)szData;
	float m_sFar_frame[NN];
	memset(m_sFar_frame, 0, sizeof(float)*NN);
	int i = 0;
	for (i = 0; (i * 2) < nDataLen; i++)
	{
		m_sFar_frame[i] = *(sPointer + i);
	}

	const float* ptr = m_sFar_frame;

	int nRet = WebRtcAec_BufferFarend(m_AecmInst, ptr, NN);
	printf("WebRtcAec_BufferFarend = %d\n", nRet);
	return 0;
}

int AudioProcess(void* m_AecmInst, int nDataLen, char *szInData, char *szOutData)
{
	short *sInPointer = (short *)szInData;
	short *sOutPointer= (short *)szOutData;

	float m_sOutNear_frame[NN];
	float m_sInNear_frame[NN];

 	memset(m_sInNear_frame, 0, sizeof(float) * NN);
 	memset(m_sOutNear_frame, 0, sizeof(float) * NN);

	int i = 0;
	
	for (i = 0; (i * 2) < nDataLen; i++)
	{
		m_sInNear_frame[i] = *(sInPointer + i);
		
	}

	float* const p = m_sInNear_frame;
	const float* const*  ptr = &p;

	float* const q = m_sOutNear_frame;
	float* const* ptr2 = &q;

	WebRtcAec_Process(m_AecmInst, ptr, 1, ptr2, NN, 109, 0);
	
	for (i = 0; (i * 2) < nDataLen; i++)
	{
		*(sOutPointer + i) = (short)m_sOutNear_frame[i];
	}
	
	return 0;
}
#if 0
char *f_far = "/data/AudioHfpDLTaskOutput_Dump.1.16000_2ch.pcm";
char *f_near = "/data/AudioHfpULTaskInput_Dump.1.16000_2ch.pcm";
char *f_out = "/data/out.pcm";
#else 
char *f_far = "/sdcard/mtklog/audio_dump/AudioHfpDLTaskOutput_Dump.0.16000_2ch.pcm";
char *f_near = "/sdcard/mtklog/audio_dump/AudioHfpULTaskInput_Dump.0.16000_2ch.pcm";
char *f_out = "/sdcard/mtklog/audio_dump/out.pcm";
#endif
int main(int argc, char const *argv[])
{
	void *aecmInst = NULL;
	int nn = 160;
	char far_frame[1280];
	char near_frame[1280];
	char out_frame[1280];
	aecmInst = WebRtcAec_Create(/*&aecmInst*/);
	int ret = WebRtcAec_Init(aecmInst, 16000,16000);
	printf("ret WebRtcAec_Init: %d\n",ret);
	AecConfig config;
	config.skewMode = kAecFalse;
	config.metricsMode = kAecFalse;
	config.delay_logging = kAecFalse;        
	config.nlpMode = kAecNlpConservative;
	ret = WebRtcAec_set_config(aecmInst, config);
	printf("ret WebRtcAec_set_config: %d\n",ret);
	// FILE *fp_far  = fopen("/data/AudioHfpDLTaskOutput_Dump.1.16000_2ch.pcm", "rb");
	FILE *fp_far  = fopen(f_far, "rb");
	printf("fp_far %s %d\n",fp_far,errno );
	FILE *fp_near = fopen(f_near, "rb");
	// FILE *fp_near = fopen("/data/AudioHfpULTaskInput_Dump.1.16000_2ch.pcm", "rb");
	printf("fp_near %s %d\n",fp_near,errno );

	FILE *fp_out  = fopen(f_out, "wb");
	// FILE *fp_out  = fopen("/data/out.pcm", "wb");
	printf(" fp_out %s %d\n",fp_out,errno );


	do {


  if(!fp_far || !fp_near || !fp_out)
     {
        printf("WebRtcAecTest open file err \n");
        break;
     }


	while(1)
	 {
	   if (NN == fread(far_frame, sizeof(short), nn, fp_far))
	     {
				fread(near_frame, sizeof(short), nn, fp_near);
				printf(" fread near_frame %d nn %d\n",errno,nn );

				// ret = WebRtcAec_BufferFarend(aecmInst, far_frame, nn);//对参考声音(回声)的处理
				ret =  AudioBufferFarendSet(aecmInst,  nn, far_frame);

				printf("AudioBufferFarendSet %d ret:%d\n",errno,ret);

				// WebRtcAec_Process(aecmInst, near_frame, 1, out_frame, nn,109,0);//回声消除


				printf("WebRtcAec_Process %d errno %d\n", AudioProcess(aecmInst, nn, near_frame,out_frame ),errno);
				printf("-WebRtcAec_Process\n");

				fwrite(out_frame, sizeof(short), nn, fp_out);
				printf("out_frame\n");


	    }
	   else
	    {
	       break;
	   }
	}
}while(0);

	/* code */
	return 0;
}

调用aecm接口

#include <stdio.h>
#include "webrtc/modules/audio_processing/aecm/echo_control_mobile.h"
#include <errno.h>
#define  NN 160

int AudioBufferFarendSet(void* m_AecmInst, int nDataLen, char *szData)
{
	 int16_t* sDataIn = (int16_t *)szData;
	 int16_t  m_Frame[NN];
	 memset(m_Frame,0,sizeof(int16_t)*NN);
	 for (int i = 0; (i*2) < nDataLen; ++i)
	 {
	 	/* code */
	 	m_Frame[i] = *(sDataIn+i);
	 }
	const int16_t* farend = m_Frame; 
	int nRet = WebRtcAecm_BufferFarend(m_AecmInst, farend, NN);

	printf("WebRtcAec_BufferFarend = %d\n", nRet);
	return 0;
}

int AudioProcess(void* m_AecmInst, int nDataLen, char *szInData, char *szOutData)
{
	 int16_t* sDataIn = (int16_t *)szInData;
	 int16_t* sDataOut = (int16_t *)szOutData;
	 int16_t  m_FrameIn[NN];
	 int16_t m_FrameOut[NN];
	 memset(m_FrameIn,0,sizeof(int16_t)*NN);
	 memset(m_FrameOut,0,sizeof(int16_t)*NN);
	 for (int i = 0; (i*2) < nDataLen; ++i)
	 {
	 	/* code */
	 	m_FrameIn[i] = *(sDataIn+i);
	 }
	const int16_t* fnear = m_FrameIn; 
	int16_t* out = m_FrameOut;

	WebRtcAecm_Process( m_AecmInst,
                           fnear,
                           NULL,
                           out,
                           NN,
                          116);
	for (int i = 0; (i*2) < nDataLen; ++i)
	{
		/* code */
		*(sDataOut+i) =  (int16_t)m_FrameOut[i];

	}
	return 0;
}
#if 0
char *f_far = "/data/AudioHfpDLTaskOutput_Dump.1.16000_2ch.pcm";
char *f_near = "/data/AudioHfpULTaskInput_Dump.1.16000_2ch.pcm";
char *f_out = "/data/out.pcm";
#else 
char *f_far = "/sdcard/mtklog/audio_dump/AudioHfpDLTaskOutput_Dump.0.16000_2ch.pcm";
char *f_near = "/sdcard/mtklog/audio_dump/AudioHfpULTaskInput_Dump.0.16000_2ch.pcm";
char *f_out = "/sdcard/mtklog/audio_dump/out_70.pcm";
#endif
int main(int argc, char const *argv[])
{
	void *aecmInst = NULL;
	int nn = 160;
	char far_frame[1280];
	char near_frame[1280];
	char out_frame[1280];
	aecmInst = WebRtcAecm_Create(/*&aecmInst*/);
	int ret = WebRtcAecm_Init(aecmInst,16000);
	printf("ret WebRtcAec_Init: %d\n",ret);
	AecmConfig config;
	config.cngMode = AecmFalse;
	config.echoMode = 3;
	ret = WebRtcAecm_set_config(aecmInst, config);
	printf("ret WebRtcAec_set_config: %d\n",ret);
	// FILE *fp_far  = fopen("/data/AudioHfpDLTaskOutput_Dump.1.16000_2ch.pcm", "rb");
	FILE *fp_far  = fopen(f_far, "rb");
	printf("fp_far %s %d\n",fp_far,errno );
	FILE *fp_near = fopen(f_near, "rb");
	// FILE *fp_near = fopen("/data/AudioHfpULTaskInput_Dump.1.16000_2ch.pcm", "rb");
	printf("fp_near %s %d\n",fp_near,errno );

	FILE *fp_out  = fopen(f_out, "wb");
	// FILE *fp_out  = fopen("/data/out.pcm", "wb");
	printf(" fp_out %s %d\n",fp_out,errno );


	do {


  if(!fp_far || !fp_near || !fp_out)
     {
        printf("WebRtcAecTest open file err \n");
        break;
     }


	while(1)
	 {
	   if (NN == fread(far_frame, sizeof(short), nn, fp_far))
	     {
				fread(near_frame, sizeof(short), nn, fp_near);
				printf(" fread near_frame %d nn %d\n",errno,nn );

				// ret = WebRtcAec_BufferFarend(aecmInst, far_frame, nn);//对参考声音(回声)的处理
				ret =  AudioBufferFarendSet(aecmInst,  nn, far_frame);

				printf("AudioBufferFarendSet %d ret:%d\n",errno,ret);

				// WebRtcAec_Process(aecmInst, near_frame, 1, out_frame, nn,109,0);//回声消除


				printf("WebRtcAec_Process %d errno %d\n", AudioProcess(aecmInst, nn, near_frame,out_frame ),errno);
				printf("-WebRtcAec_Process\n");

				fwrite(out_frame, sizeof(short), nn, fp_out);
				printf("out_frame\n");


	    }
	   else
	    {
	       break;
	   }
	}
}while(0);

	/* code */
	return 0;
}

好像在android o中的削除效果都不大.有点失望,还是说我哪里的参数有问题,知道的小伙伴请告诉我一下,谢谢了

  • 0
    点赞
  • 1
    收藏
    觉得还不错? 一键收藏
  • 9
    评论
__init__和__init__是同一个概念,都是Python中的特殊方法,用于初始化一个类的实例。\[1\]在Python中,每当创建一个类的实例时,都会自动调用该类的__init__方法。__init__方法的第一个参数永远是self,表示创建的实例本身。\[3\]在__init__方法内部,可以将各种属性绑定到self,以便在创建实例时进行初始化操作。\[3\]通过在__init__方法中定义属性,可以确保每个实例都具有相同的属性,并且可以在创建实例时传入与__init__方法匹配的参数进行初始化。\[2\]__init__.py文件是一个特殊的文件,用于将一个目录作为Python包进行导入。\[1\]它可以为空文件,也可以包含一些初始化代码。__init__.py文件的存在告诉Python解释器该目录是一个包,并且可以在该包中导入其他模块。\[1\] #### 引用[.reference_title] - *1* *2* [__init__文件和__init__函数](https://blog.csdn.net/dingding_12345/article/details/70196528)[target="_blank" data-report-click={"spm":"1018.2226.3001.9630","extra":{"utm_source":"vip_chatgpt_common_search_pc_result","utm_medium":"distribute.pc_search_result.none-task-cask-2~all~insert_cask~default-1-null.142^v91^control,239^v3^insert_chatgpt"}} ] [.reference_item] - *3* [Python中__init__和__init__.py的作用](https://blog.csdn.net/mch2869253130/article/details/88864962)[target="_blank" data-report-click={"spm":"1018.2226.3001.9630","extra":{"utm_source":"vip_chatgpt_common_search_pc_result","utm_medium":"distribute.pc_search_result.none-task-cask-2~all~insert_cask~default-1-null.142^v91^control,239^v3^insert_chatgpt"}} ] [.reference_item] [ .reference_list ]
评论 9
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值