android 中提供了 AudioRecord 和 MediaRecorder 录音的API。
audioRecord
主要是实现边录边播(AudioRecord+AudioTrack)以及对音频的实时处理(如会说话的汤姆猫、语音)
优点:语音的实时处理,可以用代码实现各种音频的封装
缺点:输出是PCM语音数据,如果保存成音频文件,是不能够被播放器播放的,所以必须先写代码实现数据编码以及压缩
MediaRecorder
已经集成了录音、编码、压缩等,支持少量的录音音频格式,大概有.aac(API = 16) .amr .3gp
优点:大部分以及集成,直接调用相关接口即可,代码量小
缺点:无法实时处理音频;输出的音频格式不是很多,例如没有输出mp3格式文件
已经集成了录音、编码、压缩等,支持少量的录音音频格式,大概有.aac(API = 16) .amr .3gp
优点:大部分以及集成,直接调用相关接口即可,代码量小
缺点:无法实时处理音频;输出的音频格式不是很多,例如没有输出mp3格式文件
用audioRecord来实现录音频率图 主活动代码如下:
public class MainActivity extends Activity {
/** Called when the activity is first created. */
static int frequency = 8000;// 分辨率
static final int channelConfiguration = AudioFormat.CHANNEL_CONFIGURATION_MONO;
static final int audioEncodeing = AudioFormat.ENCODING_PCM_16BIT;
static final int yMax = 50;// Y轴缩小比例最大值
static final int yMin = 1;// Y轴缩小比例最小值
int minBufferSize;// 采集数据需要的缓冲区大小
AudioRecord audioRecord;// 录音
AudioProcess audioProcess = new AudioProcess();// 处理
Button btnStart, btnExit; // 开始停止按钮
SurfaceView sfv; // 绘图所用
protected boolean isPlay = true;
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.main);
initView();
}
@Override
protected void onDestroy() {
super.onDestroy();
android.os.Process.killProcess(android.os.Process.myPid());
}
// 初始化控件信息
private void initView() {
Context mContext = getApplicationContext();
// 按键
btnStart = (Button) this.findViewById(R.id.btnStart);
// 画笔和画板
sfv = (SurfaceView) this.findViewById(R.id.SurfaceView01);
// 初始化显示
audioProcess.initDraw(yMax / 2, sfv.getHeight(), mContext, frequency);
btnStart.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
if (isPlay ) {
isPlay = false;
try {
// 录音
minBufferSize = AudioRecord.getMinBufferSize(
frequency, channelConfiguration,
audioEncodeing);
// minBufferSize = 2 * minBufferSize;
audioRecord = new AudioRecord(
MediaRecorder.AudioSource.MIC, frequency,
channelConfiguration, audioEncodeing,
minBufferSize);
audioProcess.baseLine = sfv.getHeight() - 100;
audioProcess.frequence = frequency;
audioProcess.start(audioRecord, minBufferSize, sfv);
Toast.makeText(
MainActivity.this,
"当前设备支持您所选择的采样率:"
+ String.valueOf(frequency),
Toast.LENGTH_SHORT).show();
btnStart.setText(R.string.btn_exit);
} catch (Exception e) {
// TODO: handle exception
Toast.makeText(
MainActivity.this,
"当前设备不支持你所选择的采样率"
+ String.valueOf(frequency)
+ ",请重新选择", Toast.LENGTH_SHORT)
.show();
}
} else {
isPlay = true;
btnStart.setText(R.string.btn_start);
audioProcess.stop(sfv);
}
}
});
}
}
解析频率的类代码如下:
import java.util.ArrayList;
import java.lang.Short;
import android.content.Context;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.DashPathEffect;
import android.graphics.Paint;
import android.graphics.Path;
import android.graphics.PathEffect;
import android.graphics.Rect;
import android.media.AudioRecord;
import android.util.Log;
import android.view.SurfaceView;
public class AudioProcess {
public static final float pi= (float) 3.1415926;
//应该把处理前后处理后的普线都显示出来
private ArrayList<short[]> inBuf = new ArrayList<short[]>();//原始录入数据
private ArrayList<int[]> outBuf = new ArrayList<int[]>();//处理后的数据
private boolean isRecording = false;
Context mContext;
private int shift = 30;
public int frequence = 0;
private int length = 256;
//y轴缩小的比例
public int rateY = 21;
//y轴基线
public int baseLine = 0;
//初始化画图的一些参数
public void initDraw(int rateY, int baseLine,Context mContext, int frequence){
this.mContext = mContext;
this.rateY = rateY;
this.baseLine = baseLine;
this.frequence = frequence;
}
//启动程序
public void start(AudioRecord audioRecord, int minBufferSize, SurfaceView sfvSurfaceView) {
isRecording = true;
new RecordThread(audioRecord, minBufferSize).start();
//new ProcessThread().start();
new DrawThread(sfvSurfaceView).start();
}
//停止程序
public void stop(SurfaceView sfvSurfaceView){
isRecording = false;
inBuf.clear();
}
//录音线程
class RecordThread extends Thread{
private AudioRecord audioRecord;
private int minBufferSize;
public RecordThread(AudioRecord audioRecord,int minBufferSize){
this.audioRecord = audioRecord;
this.minBufferSize = minBufferSize;
}
public void run(){
try{
short[] buffer = new short[minBufferSize];
audioRecord.startRecording();
while(isRecording){
int res = audioRecord.read(buffer, 0, minBufferSize);
synchronized (inBuf){
inBuf.add(buffer);
}
//保证长度为2的幂次数
length=up2int(res);
//length = 256;
short[]tmpBuf = new short[length];
System.arraycopy(buffer, 0, tmpBuf, 0, length);
Complex[]complexs = new Complex[length];
int[]outInt = new int[length];
for(int i=0;i < length; i++){
Short short1 = tmpBuf[i];
complexs[i] = new Complex(short1.doubleValue());
}
fft(complexs,length);
for (int i = 0; i < length; i++) {
outInt[i] = complexs[i].getIntValue();
}
synchronized (outBuf) {
outBuf.add(outInt);
}
}
audioRecord.stop();
}catch (Exception e) {
Log.i("Rec E",e.toString());
}
}
}
//绘图线程
class DrawThread extends Thread{
//画板
private SurfaceView sfvSurfaceView;
//当前画图所在屏幕x轴的坐标
//画笔
private Paint mPaint;
private Paint tPaint;
private Paint dashPaint;
public DrawThread(SurfaceView sfvSurfaceView) {
this.sfvSurfaceView = sfvSurfaceView;
//设置画笔属性
mPaint = new Paint();
mPaint.setColor(Color.BLUE);
mPaint.setStrokeWidth(2);
mPaint.setAntiAlias(true);
tPaint = new Paint();
tPaint.setColor(Color.YELLOW);
tPaint.setStrokeWidth(1);
tPaint.setAntiAlias(true);
//画虚线
dashPaint = new Paint();
dashPaint.setStyle(Paint.Style.STROKE);
dashPaint.setColor(Color.GRAY);
Path path = new Path();
path.moveTo(0, 10);
path.lineTo(480,10);
PathEffect effects = new DashPathEffect(new float[]{5,5,5,5},1);
dashPaint.setPathEffect(effects);
}
@SuppressWarnings("unchecked")
public void run() {
while (isRecording) {
ArrayList<int[]>buf = new ArrayList<int[]>();
synchronized (outBuf) {
if (outBuf.size() == 0) {
continue;
}
buf = (ArrayList<int[]>)outBuf.clone();
outBuf.clear();
}
//根据ArrayList中的short数组开始绘图
for(int i = 0; i < buf.size(); i++){
int[]tmpBuf = buf.get(i);
SimpleDraw(tmpBuf, rateY, baseLine);
}
}
}
/**
* 绘制指定区域
*
* @param start
* X 轴开始的位置(全屏)
* @param buffer
* 缓冲区
* @param rate
* Y 轴数据缩小的比例
* @param baseLine
* Y 轴基线
*/
private void SimpleDraw(int[] buffer, int rate, int baseLine){
Canvas canvas = sfvSurfaceView.getHolder().lockCanvas(
new Rect(0, 0, buffer.length,sfvSurfaceView.getHeight()));
canvas.drawColor(Color.BLACK);
baseLine = sfvSurfaceView.getHeight()/2;
// canvas.drawText("幅度值", 0, 3, 2, 15, tPaint);
// canvas.drawText("原点(0,0)", 0, 7, 5, baseLine + 15, tPaint);
// canvas.drawText("频率(HZ)", 0, 6, sfvSurfaceView.getWidth() - 50, baseLine + 30, tPaint);
//canvas.drawLine(shift, 20, shift, baseLine, tPaint);
canvas.drawLine(shift, baseLine, sfvSurfaceView.getWidth(), baseLine, tPaint);
canvas.save();
canvas.rotate(30, shift, 20);
//canvas.drawLine(shift, 20, shift, 30, tPaint);
canvas.rotate(-60, shift, 20);
//canvas.drawLine(shift, 20, shift, 30, tPaint);
canvas.rotate(30, shift, 20);
canvas.rotate(30, sfvSurfaceView.getWidth()-1, baseLine);
canvas.drawLine(sfvSurfaceView.getWidth() - 1, baseLine, sfvSurfaceView.getWidth() - 11, baseLine, tPaint);
canvas.rotate(-60, sfvSurfaceView.getWidth()-1, baseLine);
canvas.drawLine(sfvSurfaceView.getWidth() - 1, baseLine, sfvSurfaceView.getWidth() - 11, baseLine, tPaint);
canvas.restore();
int y,y1;
for(int i = 0; i < buffer.length; i = i + 1){
//y = baseLine - buffer[i] / rateY ;
y = baseLine - buffer[i]/100;
y1 = baseLine + buffer[i]/100;
canvas.drawLine(2*i + shift, baseLine, 2*i +shift, y, mPaint);
canvas.drawLine(2*i + shift, baseLine, 2*i +shift, y1, mPaint);
}
sfvSurfaceView.getHolder().unlockCanvasAndPost(canvas);
}
}
/**
* 向上取最接近iint的2的幂次数.比如iint=320时,返回256
* @param iint
* @return
*/
private int up2int(int iint) {
int ret = 1;
while (ret<=iint) {
ret = ret << 1;
}
return ret>>1;
}
//快速傅里叶变换
public void fft(Complex[] xin,int N)
{
int f,m,N2,nm,i,k,j,L;//L:运算级数
float p;
int e2,le,B,ip;
Complex w = new Complex();
Complex t = new Complex();
N2 = N / 2;//每一级中蝶形的个数,同时也代表m位二进制数最高位的十进制权
f = N;//f是为了求流程的级数而设立的
for(m = 1; (f = f / 2) != 1; m++); //得到流程图的共几级
nm = N - 2;
j = N2;
/******倒序运算——雷德算法******/
for(i = 1; i <= nm; i++)
{
if(i < j)//防止重复交换
{
t = xin[j];
xin[j] = xin[i];
xin[i] = t;
}
k = N2;
while(j >= k)
{
j = j - k;
k = k / 2;
}
j = j + k;
}
/******蝶形图计算部分******/
for(L=1; L<=m; L++) //从第1级到第m级
{
e2 = (int) Math.pow(2, L);
//e2=(int)2.pow(L);
le=e2+1;
B=e2/2;
for(j=0;j<B;j++) //j从0到2^(L-1)-1
{
p=2*pi/e2;
w.real = Math.cos(p * j);
//w.real=Math.cos((double)p*j); //系数W
w.image = Math.sin(p*j) * -1;
//w.imag = -sin(p*j);
for(i=j;i<N;i=i+e2) //计算具有相同系数的数据
{
ip=i+B; //对应蝶形的数据间隔为2^(L-1)
t=xin[ip].cc(w);
xin[ip] = xin[i].cut(t);
xin[i] = xin[i].sum(t);
}
}
}
}
}
工具类代码如下:
public class Complex {
public double real;
public double image;
public Complex() {
// TODO Auto-generated constructor stub
this.real = 0;
this.image = 0;
}
public Complex(double real, double image){
this.real = real;
this.image = image;
}
public Complex(int real, int image) {
Integer integer = real;
this.real = integer.floatValue();
integer = image;
this.image = integer.floatValue();
}
public Complex(double real) {
this.real = real;
this.image = 0;
}
public Complex cc(Complex complex) {
Complex tmpComplex = new Complex();
tmpComplex.real = this.real * complex.real - this.image * complex.image;
tmpComplex.image = this.real * complex.image + this.image * complex.real;
return tmpComplex;
}
public Complex sum(Complex complex) {
Complex tmpComplex = new Complex();
tmpComplex.real = this.real + complex.real;
tmpComplex.image = this.image + complex.image;
return tmpComplex;
}
public Complex cut(Complex complex) {
Complex tmpComplex = new Complex();
tmpComplex.real = this.real - complex.real;
tmpComplex.image = this.image - complex.image;
return tmpComplex;
}
public int getIntValue(){
int ret = 0;
ret = (int) Math.round(Math.sqrt(this.real*this.real - this.image*this.image));
return ret;
}
}
如果用 MediaRecorder 来实现录音的话,由于它不能解析声波的频率,只能获得声音强弱值调用 MediaRecorder.getMaxAmplitude()方法来获得声音的最大值。如果把MediaRecorder 和audioRecord结合一起用的话系统会出错。所有如果一定要做频率图,我们只能利用声音的最大值来做一个假的效果图。下面是我利用sin函数做的假的效果图,也可以利用别的函数来做。代码如下:
import android.content.Context;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.DashPathEffect;
import android.graphics.LinearGradient;
import android.graphics.Paint;
import android.graphics.Path;
import android.graphics.PathEffect;
import android.graphics.Rect;
import android.graphics.Shader;
import android.util.Log;
import android.view.SurfaceView;
public class AudioProcess {
private long[] outBuf;// 处理后的数据
private boolean isRecording = false;
Context mContext;
private int shift = 30;
// y轴基线
public int baseLine = 0;
public long mMaxLine;
// 初始化最高值
// 初始化画图的一些参数
public void initDraw(int baseLine, Context mContext) {
this.mContext = mContext;
this.baseLine = baseLine;
}
// 启动程序
public void start(SurfaceView sfvSurfaceView) {
isRecording = true;
new DrawThread(sfvSurfaceView).start();
}
// 停止程序
public void stop(SurfaceView sfvSurfaceView) {
isRecording = false;
}
public void getMaximum(long i){
mMaxLine = i;
}
//获取假的数据
private void getData(SurfaceView surfaceView){
//SurfaceView的宽
int mWidth;
//要画点的个数
int number;
//每个点相隔的距离
int distance = 9;
//相隔点减少的值
mWidth = surfaceView.getWidth();
number = mWidth / distance;
outBuf = new long [number];
for (int i = 0; i < number; i++) {
outBuf[i] = (long) (mMaxLine/2 + mMaxLine/2 * Math.sin(i*mMaxLine)) ;
if (i == number/2) {
outBuf[i] = mMaxLine;
}
}
}
// 绘图线程
class DrawThread extends Thread {
// 画板
private SurfaceView sfvSurfaceView;
// 当前画图所在屏幕x轴的坐标
// 画笔
private Paint mPaint;
private Paint tPaint;
private Paint dashPaint;
public DrawThread(SurfaceView sfvSurfaceView) {
this.sfvSurfaceView = sfvSurfaceView;
// 设置画笔属性
mPaint = new Paint();
LinearGradient lg=new LinearGradient(0,0,6,100,Color.WHITE,Color.BLUE,Shader.TileMode.MIRROR);
mPaint.setShader(lg);
mPaint.setStrokeWidth(6);
mPaint.setAntiAlias(true);
tPaint = new Paint();
tPaint.setColor(Color.YELLOW);
tPaint.setStrokeWidth(1);
tPaint.setAntiAlias(true);
// 画虚线
dashPaint = new Paint();
dashPaint.setStyle(Paint.Style.STROKE);
dashPaint.setColor(Color.GRAY);
Path path = new Path();
path.moveTo(0, 10);
path.lineTo(480, 10);
PathEffect effects = new DashPathEffect(new float[] { 5, 5, 5, 5 },
1);
dashPaint.setPathEffect(effects);
}
@SuppressWarnings("unchecked")
public void run() {
while (isRecording) {
getData(sfvSurfaceView);
if (outBuf != null) {
SimpleDraw(outBuf, baseLine);
}
try {
Thread.sleep(100);
} catch (InterruptedException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
}
/**
* 绘制指定区域
*
* @param start
* X 轴开始的位置(全屏)
* @param buffer
* 缓冲区
* @param rate
* Y 轴数据缩小的比例
* @param baseLine
* Y 轴基线
*/
private void SimpleDraw(long[] buffer, int baseLine) {
Canvas canvas = sfvSurfaceView.getHolder().lockCanvas();
canvas.drawColor(Color.BLACK);
baseLine = sfvSurfaceView.getHeight()/2;
// canvas.drawLine(10, baseLine, sfvSurfaceView.getWidth(),
// baseLine, tPaint);
canvas.save();
canvas.rotate(30, shift, 20);
canvas.rotate(-60, shift, 20);
canvas.rotate(30, shift, 20);
canvas.rotate(30, sfvSurfaceView.getWidth() - 1, baseLine);
// canvas.drawLine(sfvSurfaceView.getWidth() - 1, baseLine,
// sfvSurfaceView.getWidth() - 11, baseLine, tPaint);
canvas.rotate(-60, sfvSurfaceView.getWidth() - 1, baseLine);
// canvas.drawLine(sfvSurfaceView.getWidth() - 1, baseLine,
// sfvSurfaceView.getWidth() - 11, baseLine, tPaint);
canvas.restore();
int y,y1,a;
for (int i = 0; i < buffer.length; i = i + 1) {
a = (int) (buffer[i] / 200);
if (a < 20) {
a = a + 5;
}
y = (int) (baseLine - a);
y1 = (int) (baseLine + a);
canvas.drawLine(9 * i + 10, baseLine, 9 * i + 10, y,
mPaint);
canvas.drawLine(9 * i + 10, baseLine, 9 * i + 10, y1, mPaint);
}
sfvSurfaceView.getHolder().unlockCanvasAndPost(canvas);
}
}
}
在MainActivity中调用代码如下:
public class MainActivity extends ActionBarActivity {
private Button mStop;
private Button mStart;
private SurfaceView surfaceView;
private AudioProcess audioProcess;
protected boolean isRecorder = true;
private final Handler mHandler = new Handler();
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
audioProcess = new AudioProcess();
surfaceView = (SurfaceView) findViewById(R.id.surfaceView);
audioProcess.initDraw(surfaceView.getHeight(), this);
mStop = (Button) findViewById(R.id.stop);
mStart = (Button) findViewById(R.id.start);
mStart.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
//1、首先启动录音
...........
//启动频率图的动画
audioProcess.start(surfaceView);
mHandler.postDelayed(mUpdateVUMetur, 100);
}
});
mStop.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
//停止线程
isRecorder = false;
//停止录音
..........
//停止频率图的动画
audioProcess.stop(surfaceView);
}
});
}
private Runnable mUpdateVUMetur = new Runnable() {
@Override
public void run() {
if (isRecorder ) {
updateVUMeterView();
}
}
};
private void updateVUMeterView() {
//在线程中获取最大值的数据 mRecorder为MediaRecorder
int max = mRecorder.getMaxAmplitude();
audioProcess.getMaximum(max);
}
}