Android利用AudioTrack录制和播放PCM格式音频:
public class PCMTalk {
private AudioRecord audioRecord;
private int sampleRate;
private int channel;
private ByteBuffer recordBuf;
private AudioTrack track;
public PCMTalk(int sampleRate, int chn) {
this.sampleRate = sampleRate;
this.channel = chn;
int bufsize = AudioRecord.getMinBufferSize(this.sampleRate, AudioFormat.CHANNEL_IN_MONO,
AudioFormat.ENCODING_PCM_16BIT) * 2;
audioRecord = new AudioRecord(MediaRecorder.AudioSource.MIC, this.sampleRate,
AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT, bufsize);
track = new AudioTrack(AudioManager.STREAM_MUSIC, this.sampleRate, AudioFormat.CHANNEL_OUT_MONO, AudioFormat.ENCODING_PCM_16BIT, bufsize, AudioTrack.MODE_STREAM);
recordBuf = ByteBuffer.allocate(bufsize);
}
public void startRecord() {
if (audioRecord.getRecordingState() == AudioRecord.RECORDSTATE_STOPPED)
audioRecord.startRecording();
}
public ByteBuffer recordFrame() {
if (audioRecord == null || audioRecord.getRecordingState() == AudioRecord.RECORDSTATE_STOPPED)
return null;
recordBuf.clear();
int rt = audioRecord.read(recordBuf.array(), 0, recordBuf.remaining());
if (rt < 0 || rt > recordBuf.capacity())
return null;
recordBuf.limit(rt);
return recordBuf;
}
public void playFrame(ByteBuffer frame) {
if (track.getPlayState() == AudioTrack.PLAYSTATE_STOPPED)
track.play();
track.write(frame.array(), frame.position(), frame.remaining());
}
public void stop() {
if (audioRecord != null) {
if (audioRecord.getRecordingState() != AudioRecord.RECORDSTATE_STOPPED) {
audioRecord.stop();
}
audioRecord.release();
audioRecord = null;
}
if (track != null) {
track.stop();
track.release();
track = null;
}
}
}
通过MediaRecorder录制AAC格式的音频
public class AudioRecorder {
enum MPEGVersionType {
MPEG_4, MPEG_2;
}
static class ADTS {
MPEGVersionType MPEGVersion;
int MPEG4AudioObjectType;
int samplingValue;
int channelConfiguration;
int aacFrameLength;
@Override
public String toString() {
return "ADTS{" + "MPEGVersion=" + MPEGVersion + ", MPEG4AudioObjectType=" + MPEG4AudioObjectType
+ ", samplingValue=" + samplingValue + ", channelConfiguration=" + channelConfiguration
+ ", aacFrameLength=" + aacFrameLength + '}';
}
}
public static final int SamplingValueTable[] = { 96000, 88200, 64000, 48000, 44100, 32000, 24000, 22050, 16000,
12000, 11025, 8000, 7350, 0, 0 };
private MediaRecorder mMediaRecorder;
private InputStream localInputStream;
private ParcelFileDescriptor parcelRead;
private ParcelFileDescriptor parcelWrite;
public AudioRecorder() {
ParcelFileDescriptor[] parcelFileDescriptors = null;
try {
parcelFileDescriptors = ParcelFileDescriptor.createPipe();
} catch (IOException e) {
e.printStackTrace();
}
parcelRead = new ParcelFileDescriptor(parcelFileDescriptors[0]);
parcelWrite = new ParcelFileDescriptor(parcelFileDescriptors[1]);
}
public void startRecord() {
if (mMediaRecorder == null) {
mMediaRecorder = new MediaRecorder();
mMediaRecorder.setAudioSource(MediaRecorder.AudioSource.MIC);
mMediaRecorder.setOutputFormat(MediaRecorder.OutputFormat.AAC_ADTS);
mMediaRecorder.setAudioEncoder(MediaRecorder.AudioEncoder.AAC);
mMediaRecorder.setOutputFile(parcelWrite.getFileDescriptor());
}
if (localInputStream == null) {
localInputStream = new ParcelFileDescriptor.AutoCloseInputStream(parcelRead);
}
try {
mMediaRecorder.prepare();
mMediaRecorder.start();
} catch (IOException e) {
Log.e(MainActivity.TAG, e.toString());
e.printStackTrace();
}
}
public void stopRecord() {
if (localInputStream != null) {
try {
localInputStream.close();
localInputStream = null;
} catch (IOException e) {
e.printStackTrace();
}
}
if (mMediaRecorder != null) {
// mMediaRecorder.stop();
mMediaRecorder.reset();
mMediaRecorder.release();
mMediaRecorder = null;
}
}
// 1024 ~ 2048
public int read(byte[] readBytes) {
int nLength = 0;
try {
nLength = localInputStream.read(readBytes);
} catch (IOException e) {
e.printStackTrace();
}
return nLength;
}
public ByteBuffer frame() {
try {
Thread.sleep(40);
return readNextFrame(localInputStream);
} catch (IOException | InterruptedException e) {
Log.e(MainActivity.TAG, e.toString());
e.printStackTrace();
return null;
}
}
public static ByteBuffer readNextFrame(InputStream is) throws IOException {
if (is == null || is.available() < 1)
return null;
ByteBuffer adtsBuf = ByteBuffer.allocate(14);
ADTS adts = null;
int len;
for (;;) {
len = is.read(adtsBuf.array(), adtsBuf.position(), 7);
if (len < 0)
return null;
adtsBuf.position(adtsBuf.position() + len);
if (adtsBuf.position() < 7) {
continue;
}
adtsBuf.flip();
for (; adtsBuf.remaining() >= 7; adtsBuf.get()) {
adts = parseADTS(adtsBuf.array(), adtsBuf.position(), adtsBuf.remaining());
if (adts != null)
break;
}
if (adts == null) {
ByteBuffer adtsBuf2 = ByteBuffer.allocate(14);
adtsBuf2.put(adtsBuf.array(), adtsBuf.position(), adtsBuf.remaining());
adtsBuf = adtsBuf2;
continue;
}
ByteBuffer frame = ByteBuffer.allocate(2048);
frame.put(adtsBuf);
int leftSize = adts.aacFrameLength - frame.position();
if (leftSize < 0 || leftSize > frame.remaining())
throw new RuntimeException(
"readNextFrame - leftSize: " + leftSize + ", frame.remaining: " + frame.remaining());
for (len = 0; len != leftSize;) {
int tmpLen = is.read(frame.array(), frame.position(), leftSize - len);
if (len < 0)
return null;
len += tmpLen;
}
frame.position(frame.position() + len);
frame.flip();
return frame;
}
}
public static ADTS parseADTS(byte src[], int offset, int length) {
if (length < 7) {
return null;
}
if (src[offset] != (byte)0xff) {
return null;
}
if ((src[offset + 1] & (byte) 0xf0) != (byte) 0xf0) {
return null;
}
ADTS adts = new ADTS();
adts.MPEGVersion = (src[offset + 1] & (byte) 0x08) > 0 ? MPEGVersionType.MPEG_4 : MPEGVersionType.MPEG_2;
int layer = src[offset + 1] & 0x06;
if (layer != 0) {
return null;
}
adts.MPEG4AudioObjectType = src[2] >> 6 + 1;
int sample_index = (src[2] >> 2) & 0xf;
if (sample_index > SamplingValueTable.length) {
return null;
}
adts.samplingValue = SamplingValueTable[sample_index];
adts.channelConfiguration = ((src[2] & (byte) 1) << 2) | (src[3] >> 6);
adts.aacFrameLength = ((src[3] & (byte) 0x3) << 11) | (src[4] << 3) | (src[5] >> 5) & (byte) 0x7;
if (adts.aacFrameLength < 7 || adts.aacFrameLength > 0x7ff) {
Log.e(MainActivity.TAG, "readAFrame - parse frame size fail: " + adts.aacFrameLength);
return null;
}
return adts;
}
}
cpp层PCM与G711A格式互相转换
#define MAX 32635
static uint8_t encode(uint16_t pcm) {
int sign = (pcm & 0x8000) >> 8;
if (sign != 0)
pcm = -pcm;
if (pcm > MAX) pcm = MAX;
int exponent = 7;
int expMask;
for (expMask = 0x4000; (pcm & expMask) == 0
&& exponent > 0; exponent--, expMask >>= 1);
int mantissa = (pcm >> ((exponent == 0) ? 4 : (exponent + 3))) & 0x0f;
uint8_t alaw = (uint8_t) (sign | exponent << 4 | mantissa);
return (uint8_t) (alaw^0xD5);
}
static uint16_t decode(uint8_t alaw) {
alaw ^= 0xD5;
int sign = alaw & 0x80;
int exponent = (alaw & 0x70) >> 4;
int data = alaw & 0x0f;
data <<= 4;
data += 8;
if (exponent != 0)
data += 0x100;
if (exponent > 1)
data <<= (exponent - 1);
return (uint16_t) (sign == 0 ? data : -data);
}
/**
* 2个字节压缩成1个
* @param src
* @return
*/
bytebuf::sharedptr_t util::pcm_to_g711a(bytebuf::sharedptr_t src) {
if (src->remaining() < 2) return nullptr;
if (src->remaining() % 2 == 1)
src->limit(src->limit() - 1);
auto g711a_frame = make_shared<bytebuf::ByteBuffer>(src->remaining() / 2);
for (; src->hasRemaining() && g711a_frame->hasRemaining(); g711a_frame->put(encode(src->getShort())));
g711a_frame->flip();
return g711a_frame;
}
bytebuf::sharedptr_t util::pcm_to_g711a(const void* src, const size_t& offset, const size_t& length) {
if (length < 2) return nullptr;
size_t short_size = length / 2;
uint16_t* array = (uint16_t*) ((uint8_t*) src + offset);
auto g711a_frame = make_shared<bytebuf::ByteBuffer>(short_size);
for (size_t i = 0; i < short_size && g711a_frame->hasRemaining(); g711a_frame->put(encode(array[i++])));
g711a_frame->flip();
return g711a_frame;
}
/**
* 1个字节解压成2个
* @param src
* @return
*/
bytebuf::sharedptr_t util::g711a_to_pcm(bytebuf::sharedptr_t src) {
if (!src->hasRemaining()) return nullptr;
auto pcm_frame = make_shared<bytebuf::ByteBuffer>(src->remaining() * 2);
for (; src->hasRemaining() && pcm_frame->hasRemaining(); pcm_frame->put(decode(src->get())));
pcm_frame->flip();
return pcm_frame;
}
bytebuf::sharedptr_t util::g711a_to_pcm(const void* src, const size_t& offset, const size_t& length) {
if (length == 0)
return nullptr;
uint8_t* array = (uint8_t*) src + offset;
auto pcm_frame = make_shared<bytebuf::ByteBuffer>(length * 2);
for (size_t i = 0; i < length && pcm_frame->hasRemaining(); pcm_frame->put(decode(array[i++])));
pcm_frame->flip();
return pcm_frame;
}
bytebuf::sharedptr_t是我自己封装的缓冲区类,可以用其他容器代替。