Android下AudioRecord采集音频写入PCM和WAV文件中

前面一篇介绍了《Android下使用 AudioRecord 和 AudioTrack 实现音频 PCM 数据的采集和播放》,这一篇介绍怎么将采集到的音频数据保存到PCM和wav文件中。其实保存PCM数据不难,只要直接写入文件即可,而保存数据至wav文件只需要将wav文件格式头补上即可。

1、MainActivity.java文件

package com.example.tongjiangsong.audiorecordtrackdemo;

import android.os.Bundle;

import java.io.IOException;
import java.io.PipedInputStream;
import java.io.PipedOutputStream;

import android.app.Activity;
import android.os.Bundle;
import android.view.View;
import android.widget.Button;

public class MainActivity extends Activity  implements View.OnClickListener {
    PipedInputStream in;
    boolean isRecord;
    MAudioRecord m_audio_record ;
    Button record_btn;
    @Override
    protected void onCreate(Bundle savedInstanceState) {
        super.onCreate(savedInstanceState);
        setContentView(R.layout.activity_main);

        isRecord = false;
        record_btn = (Button)findViewById(R.id.button);
        record_btn.setOnClickListener(this);
    }

    private void startRecord(){
        in = new PipedInputStream();
        new Thread(new Runnable() {

            @Override
            public void run() {
                try {
                    m_audio_record = new MAudioRecord(MainActivity.this, in);
                    m_audio_record.StartAudioData();
                } catch (IOException e) {
                    e.printStackTrace();
                }
            }
        }).start();
    }

    @Override
    public void onClick(View view) {
        if (isRecord){
            isRecord = false;
            m_audio_record.stopRecord();
            m_audio_player.stopPlay();
        }else{
            isRecord = true;
            startRecord();
        }
    }
}

2、MAudioRecord.java文件

package com.example.tongjiangsong.audiorecordtrackdemo;


import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.PipedInputStream;
import java.io.PipedOutputStream;

import android.content.Context;
import android.media.AudioFormat;
import android.media.AudioRecord;
import android.media.MediaRecorder;
import android.os.Environment;
import android.util.Log;

import static android.content.ContentValues.TAG;

/*
 * To getaudio or play audio
 * */
public class MAudioRecord {
    private AudioRecord audioRecord;
    private Context context;
    private boolean isRecording = false ;
    private int buffersize = 0;
    private PipedOutputStream outstream ;//利用管道传输数据
    public MAudioRecord(Context context , PipedInputStream instream) throws IOException {
        this.context  = context;
        //初始化管道流 用于向外传输数据
        outstream = new PipedOutputStream();
        outstream.connect(instream);
    }
    public void StartAudioData(){//得到录音数据
        int frequency = 11025;
        int channelConfiguration = AudioFormat.CHANNEL_IN_STEREO;
        int audioEncoding = AudioFormat.ENCODING_PCM_16BIT;
        buffersize = AudioRecord.getMinBufferSize(frequency, channelConfiguration, audioEncoding);
        audioRecord = new AudioRecord(MediaRecorder.AudioSource.MIC, frequency, channelConfiguration, audioEncoding, buffersize);
        byte[] buffer  = new byte[1024];
        audioRecord.startRecording();//开始录音
        isRecording = true;
        int bufferReadSize = 1024;
        String tmpName = System.currentTimeMillis()+"_"+44100+"";
        final File tmpFile = createFile(tmpName+".pcm");
        final File tmpOutFile = createFile(tmpName+".wav");
        try {
            FileOutputStream outputStream = new FileOutputStream(tmpFile.getAbsoluteFile());

            while (isRecording){
                int n_size = audioRecord.read(buffer, 0, 1024);
                Log.w("audiotest", "StartAudioData: ------------------------"+n_size);

                try {
                    outputStream.write(buffer);
                } catch (IOException e) {
                    e.printStackTrace();
                }
            }
            outputStream.close();
            pcmToWave(tmpFile.getAbsolutePath(), tmpOutFile.getAbsolutePath());
        } catch (FileNotFoundException e) {
            e.printStackTrace();
        } catch (IOException e) {
            e.printStackTrace();
        }
    }

    public File createFile(String name) {
        String dirPath = Environment.getExternalStorageDirectory().getPath()+"/AudioRecord/";
        File file = new File(dirPath);
        if(!file.exists()) {
            file.mkdirs();
        }
        String filePath = dirPath +name;
        File objFile = new File(filePath);
        if (!objFile.exists()) {
            try {
                objFile.createNewFile();
            } catch (IOException e) {
                e.printStackTrace();
            }
            return objFile;
        }
        return null;
    }

    public void stopRecord(){//停止录音
        isRecording = false;
        audioRecord.stop();
        audioRecord.release();
        try {
            outstream.close();
        } catch (IOException e) {
            e.printStackTrace();
        }
    }

    public void pcmToWave(String inFileName, String outFileName){
        FileInputStream in = null;
        FileOutputStream out = null;
        long totalAudiolen = 0;
        long longSamplRate = 11025;
        long totalDataLen = totalAudiolen+36;//由于不包括RIFF和WAV
        int channels = 2;
        long byteRate = 16*longSamplRate*channels/8;
        byte[] data = new byte[1024];
        try {
            in = new FileInputStream(inFileName);

            out = new FileOutputStream(outFileName);
            totalAudiolen = in.getChannel().size();
            totalDataLen = totalAudiolen+36;
            writeWaveFileHeader(out, totalAudiolen, totalDataLen, longSamplRate, channels, byteRate);
            while (in.read(data) != -1) {
                out.write(data);
            }
            in.close();
            out.close();
        } catch (FileNotFoundException e) {
            e.printStackTrace();
        } catch (IOException e) {
            e.printStackTrace();
        }
    }

    public void writeWaveFileHeader(FileOutputStream out, long totalAudioLen, long totalDataLen, long longSampleRate,
                                    int channels, long byteRate) {
        byte[] header = new byte[44];
        header[0] = 'R'; // RIFF
        header[1] = 'I';
        header[2] = 'F';
        header[3] = 'F';
        header[4] = (byte) (totalDataLen & 0xff);//数据大小
        header[5] = (byte) ((totalDataLen >> 8) & 0xff);
        header[6] = (byte) ((totalDataLen >> 16) & 0xff);
        header[7] = (byte) ((totalDataLen >> 24) & 0xff);
        header[8] = 'W';//WAVE
        header[9] = 'A';
        header[10] = 'V';
        header[11] = 'E';
        //FMT Chunk
        header[12] = 'f'; // 'fmt '
        header[13] = 'm';
        header[14] = 't';
        header[15] = ' ';//过渡字节
        //数据大小
        header[16] = 16; // 4 bytes: size of 'fmt ' chunk
        header[17] = 0;
        header[18] = 0;
        header[19] = 0;
        //编码方式 10H为PCM编码格式
        header[20] = 1; // format = 1
        header[21] = 0;
        //通道数
        header[22] = (byte) channels;
        header[23] = 0;
        //采样率,每个通道的播放速度
        header[24] = (byte) (longSampleRate & 0xff);
        header[25] = (byte) ((longSampleRate >> 8) & 0xff);
        header[26] = (byte) ((longSampleRate >> 16) & 0xff);
        header[27] = (byte) ((longSampleRate >> 24) & 0xff);
        //音频数据传送速率,采样率*通道数*采样深度/8
        header[28] = (byte) (byteRate & 0xff);
        header[29] = (byte) ((byteRate >> 8) & 0xff);
        header[30] = (byte) ((byteRate >> 16) & 0xff);
        header[31] = (byte) ((byteRate >> 24) & 0xff);
        // 确定系统一次要处理多少个这样字节的数据,确定缓冲区,通道数*采样位数
        header[32] = (byte) (channels * 16 / 8);
        header[33] = 0;
        //每个样本的数据位数
        header[34] = 16;
        header[35] = 0;
        //Data chunk
        header[36] = 'd';//data
        header[37] = 'a';
        header[38] = 't';
        header[39] = 'a';
        header[40] = (byte) (totalAudioLen & 0xff);
        header[41] = (byte) ((totalAudioLen >> 8) & 0xff);
        header[42] = (byte) ((totalAudioLen >> 16) & 0xff);
        header[43] = (byte) ((totalAudioLen >> 24) & 0xff);
        try {
            out.write(header, 0, 44);
        } catch (IOException e) {
            e.printStackTrace();
        }
    }
}

3、设置权限

 <uses-permission android:name="android.permission.RECORD_AUDIO"/>
 <uses-permission android:name="android.permission.READ_EXTERNAL_STORAGE" />
 <uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE" />

 

  • 4
    点赞
  • 2
    收藏
    觉得还不错? 一键收藏
  • 打赏
    打赏
  • 0
    评论
使用AudioRecord录制音频并转换成wav格式,需要进行以下步骤: 1. 设置录音参数:采样率、音频通道、编码格式等 2. 创建一个AudioRecord对象 3. 开始录制音频,将音频数据写入到一个缓存区 4. 录制完成后,停止录音并释放AudioRecord对象 5. 将缓存区音频数据写入到一个wav文件 下面是一个简单的示例代码,演示如何使用AudioRecord录制音频并将其转换成wav格式: ``` // 设置录音参数 int sampleRateInHz = 44100; int channelConfig = AudioFormat.CHANNEL_IN_MONO; int audioFormat = AudioFormat.ENCODING_PCM_16BIT; int bufferSizeInBytes = AudioRecord.getMinBufferSize(sampleRateInHz, channelConfig, audioFormat); // 创建AudioRecord对象 AudioRecord audioRecord = new AudioRecord(MediaRecorder.AudioSource.MIC, sampleRateInHz, channelConfig, audioFormat, bufferSizeInBytes); // 开始录音 audioRecord.startRecording(); // 定义缓存区 byte[] buffer = new byte[bufferSizeInBytes]; // 定义输出文件 String outputFileName = "output.wav"; File outputFile = new File(Environment.getExternalStorageDirectory(), outputFileName); // 定义输出流 FileOutputStream outputStream = new FileOutputStream(outputFile); // 写入wav文件WaveHeader waveHeader = new WaveHeader(sampleRateInHz, channelConfig, audioFormat, bufferSizeInBytes); waveHeader.write(outputStream); // 写入音频数据 int readSize; while ((readSize = audioRecord.read(buffer, 0, bufferSizeInBytes)) != AudioRecord.ERROR_INVALID_OPERATION) { outputStream.write(buffer, 0, readSize); } // 停止录音并释放资源 audioRecord.stop(); audioRecord.release(); // 关闭输出流 outputStream.close(); ``` 在上述代码,我们使用了一个自定义的WaveHeader类,用于生成wav文件头信息。该类的实现可以参考下面的示例代码: ``` public class WaveHeader { private int sampleRate; private int channelCount; private int audioFormat; private int audioDataLength; public WaveHeader(int sampleRate, int channelCount, int audioFormat, int audioDataLength) { this.sampleRate = sampleRate; this.channelCount = channelCount; this.audioFormat = audioFormat; this.audioDataLength = audioDataLength; } public void write(OutputStream outputStream) throws IOException { outputStream.write("RIFF".getBytes()); outputStream.write(intToByteArray(36 + audioDataLength), 0, 4); outputStream.write("WAVE".getBytes()); outputStream.write("fmt ".getBytes()); outputStream.write(intToByteArray(16), 0, 4); outputStream.write(shortToByteArray((short) 1), 0, 2); outputStream.write(shortToByteArray((short) channelCount), 0, 2); outputStream.write(intToByteArray(sampleRate), 0, 4); outputStream.write(intToByteArray(sampleRate * channelCount * audioFormat / 8), 0, 4); outputStream.write(shortToByteArray((short) (channelCount * audioFormat / 8)), 0, 2); outputStream.write(shortToByteArray((short) audioFormat), 0, 2); outputStream.write("data".getBytes()); outputStream.write(intToByteArray(audioDataLength), 0, 4); } private byte[] intToByteArray(int value) { byte[] byteArray = new byte[4]; byteArray[0] = (byte) (value & 0xff); byteArray[1] = (byte) ((value >> 8) & 0xff); byteArray[2] = (byte) ((value >> 16) & 0xff); byteArray[3] = (byte) ((value >> 24) & 0xff); return byteArray; } private byte[] shortToByteArray(short value) { byte[] byteArray = new byte[2]; byteArray[0] = (byte) (value & 0xff); byteArray[1] = (byte) ((value >> 8) & 0xff); return byteArray; } } ``` 通过以上代码,我们可以实现将AudioRecord录制的音频转换成wav格式并保存到文件。需要注意的是,由于Android 6.0及以上版本需要动态获取录音权限,因此在使用前需要先请求录音权限。
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包

打赏作者

爱技术爱生活

你的鼓励将是我创作的最大动力

¥1 ¥2 ¥4 ¥6 ¥10 ¥20
扫码支付:¥1
获取中
扫码支付

您的余额不足,请更换扫码支付或充值

打赏作者

实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值