使用AudioCapturer采集音频,
使用AudioRenderer渲染音频,
需要权限:
ohos.permission.MICROPHONE
代码实现:api9
import audio from '@ohos.multimedia.audio';
import hilog from '@ohos.hilog';
const TAG = 'DJJManage';
/**
* @author: hwk
* @description:对讲机
* @date: 2024/3/15 14:11
*/
export class DJJManage {
private static mInstance: DJJManage;
private constructor() {
}
public static get(): DJJManage {
if (!this.mInstance) {
this.mInstance = new DJJManage();
}
return this.mInstance;
}
private audioManager: audio.AudioManager;
private isStart: boolean = false; //对讲机是否开始
private audioCapturer: audio.AudioCapturer = undefined;
private audioRenderer: audio.AudioRenderer = undefined;
private audioStreamInfo: audio.AudioStreamInfo = {
samplingRate: audio.AudioSamplingRate.SAMPLE_RATE_48000, //音频文件的采样率。
channels: audio.AudioChannel.CHANNEL_2, //音频文件的通道数
sampleFormat: audio.AudioSampleFormat.SAMPLE_FORMAT_S16LE, //音频采样格式
encodingType: audio.AudioEncodingType.ENCODING_TYPE_RAW //音频编码格式
}
private audioCapturerInfo: audio.AudioCapturerInfo = { //采集器信息
source: audio.SourceType.SOURCE_TYPE_VOICE_COMMUNICATION, // 音源类型
capturerFlags: 0 // 音频采集器标志
}
private audioRendererInfo: audio.AudioRendererInfo = { //渲染器信息
content: audio.ContentType.CONTENT_TYPE_SPEECH, // 媒体类型
usage: audio.StreamUsage.STREAM_USAGE_VOICE_COMMUNICATION, // 音频流使用类型
rendererFlags: 0 // 音频渲染器标志
}
private audioCapturerOptions: audio.AudioCapturerOptions = { //音频捕获器配置
streamInfo: this.audioStreamInfo, //表示音频流信息
capturerInfo: this.audioCapturerInfo //表示采集器信息
}
private audioRendererOptions = { //音频渲染器配置
streamInfo: this.audioStreamInfo,
rendererInfo: this.audioRendererInfo
}
//初始化
public init() {
this.audioManager = audio.getAudioManager();
let audioRoutingManager = this.audioManager.getRoutingManager();
audioRoutingManager.setCommunicationDevice(audio.CommunicationDeviceType.SPEAKER, true, (err) => {
if (err) {
console.error(`Failed to set the active status of the device. ${err}`);
return;
}
console.info('Callback invoked to indicate that the device is set to the active status.');
});
audio.createAudioRenderer(this.audioRendererOptions, (err, audioRenderer: audio.AudioRenderer) => { // 创建AudioRenderer实例
if (!err) {
console.error(`${TAG}: 渲染 creating AudioRenderer success`);
this.audioRenderer = audioRenderer;
this.audioRenderer.on('stateChange', (state) => { // 设置监听事件,当转换到指定的状态时触发回调
if (state == 2) {
console.error(' 渲染 audio renderer state is: STATE_RUNNING');
}
});
this.audioRenderer.on('markReach', 1000, (position) => { // 订阅markReach事件,当渲染的帧数达到1000帧时触发回调
if (position == 1000) {
console.error(' 渲染 ON Triggered successfully');
}
});
} else {
console.error(`${TAG}: 渲染 creating AudioRenderer failed, error: ${err.message}`);
}
});
audio.createAudioCapturer(this.audioCapturerOptions, (err, capturer: audio.AudioCapturer) => { // 创建AudioCapturer实例
if (err) {
console.error(` 采集 Invoke createAudioCapturer failed, code is ${err.code}, message is ${err.message}`);
return;
}
console.error(`${TAG}: 采集 create AudioCapturer success`);
this.audioCapturer = capturer;
this.audioCapturer.on('markReach', 1000, (position) => { // 订阅markReach事件,当采集的帧数达到1000时触发回调
if (position === 1000) {
console.error(' 采集 ON Triggered successfully');
}
});
this.audioCapturer.on('periodReach', 2000, (position) => { // 订阅periodReach事件,当采集的帧数达到2000时触发回调
if (position === 2000) {
console.error(' 采集 ON Triggered successfully');
}
});
});
}
// 开始
async start() {
this.audioManager.getAudioScene((err, value) => {
if (err) {
console.error(`audioManager Failed to obtain the audio scene mode. ${err}`);
return;
}
console.error(`audioManager Callback invoked to indicate that the audio scene mode is obtained ${value}.`);
});
this.isStart = true;
let stateGroup = [audio.AudioState.STATE_PREPARED, audio.AudioState.STATE_PAUSED, audio.AudioState.STATE_STOPPED];
if (stateGroup.indexOf(this.audioRenderer.state) === -1) { // 当且仅当状态为prepared、paused和stopped之一时才能启动渲染
console.error(`${TAG} 渲染 audioRenderer start failed`);
return;
}
await this.audioRenderer.start(); // 启动渲染
if (stateGroup.indexOf(this.audioCapturer.state) === -1) { // 当且仅当状态为STATE_PREPARED、STATE_PAUSED和STATE_STOPPED之一时才能启动采集
console.error(`${TAG} 采集 audioCapturer start failed`);
return;
}
await this.audioCapturer.start(); // 启动采集
while (this.isStart) {
let bufferSizeCapturer = await this.audioCapturer.getBufferSize();
let bufferCapturer = await this.audioCapturer.read(bufferSizeCapturer, true);
if (bufferCapturer === undefined) {
console.error(`${TAG}: 采集 read buffer failed`);
} else {
console.error(`${TAG}: 采集 write date: audioCapturer ${bufferCapturer.byteLength}`);
//采集到数据了,此处播放渲染
await new Promise((resolve, reject) => {
this.audioRenderer.write(bufferCapturer, (err, writeSize) => {
if (err) {
reject(err);
} else {
resolve(writeSize);
}
});
});
}
}
}
// 停止采集
async stop() {
this.isStart = false;
// 只有渲染器状态为running或paused的时候才可以停止
if (this.audioRenderer.state !== audio.AudioState.STATE_RUNNING && this.audioRenderer.state !== audio.AudioState.STATE_PAUSED) {
console.error('Renderer is not running or paused.');
return;
}
await this.audioRenderer.stop(); // 停止渲染
// 只有采集器状态为STATE_RUNNING或STATE_PAUSED的时候才可以停止
if (this.audioCapturer.state !== audio.AudioState.STATE_RUNNING && this.audioCapturer.state !== audio.AudioState.STATE_PAUSED) {
console.error('Capturer is not running or paused');
return;
}
await this.audioCapturer.stop(); // 停止采集
this.release()
}
// 销毁实例,释放资源
async release() {
if (this.audioRenderer.state === audio.AudioState.STATE_RELEASED) {
console.error('Renderer already released');
return;
}
await this.audioRenderer.release(); // 释放资源
// 采集器状态不是STATE_RELEASED或STATE_NEW状态,才能release
if (this.audioCapturer.state === audio.AudioState.STATE_RELEASED || this.audioCapturer.state === audio.AudioState.STATE_NEW) {
console.error('Capturer already released');
return;
}
await this.audioCapturer.release(); // 释放资源
}
}
使用方式:
初始化:DJJManage.get().init();
开始:DJJManage.get().start();
停止:DJJManage.get().stop();
问题:
目前使用的是api9,直接边录边播会有回音。
AudioManager没有设置音频场景模式的接口,只有获取的接口,api10有设置接口;