<dependency>
<groupId>org.bytedeco</groupId>
<artifactId>javacv</artifactId>
<version>1.5.6</version>
</dependency>
<dependency>
<groupId>org.bytedeco</groupId>
<artifactId>ffmpeg-platform</artifactId>
<version>4.4-1.5.6</version>
</dependency>
StringBuffer sdpStr = new StringBuffer();
sdpStr.append("m=audio 10010 RTP/AVP 8\n");
sdpStr.append("a=rtpmap:8 PCMA/8000\n");
sdpStr.append("c=IN IP4 127.0.0.1\n");
FFmpegFrameGrabber grabber = new FFmpegFrameGrabber(new ByteArrayInputStream(sdpStr.toString().getBytes()));
grabber.setFormat("sdp");
grabber.setOption("protocol_whitelist","file,rtp,udp");
grabber.setFrameRate(25);
grabber.setSampleRate(8000);
//设置声道
grabber.setAudioChannels(1);
grabber.setAudioCodec(avcodec.AV_CODEC_ID_PCM_ALAW);
try {
grabber.start();
int sampleFormat = grabber.getSampleFormat();
AudioFormat af = AudioUtils.getAudioFormat(grabber);
if(af != null){
DataLine.Info dataLineInfo;
dataLineInfo = new DataLine.Info(SourceDataLine.class, af, AudioSystem.NOT_SPECIFIED);
SourceDataLine sourceDataLine = (SourceDataLine) AudioSystem.getLine(dataLineInfo);
sourceDataLine.open(af);
sourceDataLine.start();
while(soundBtnStart){
Frame frame = grabber.grabSamples();
if (frame != null) {
AudioUtils.processAudio(frame.samples, sampleFormat, sourceDataLine);
}
}
}else {
log.severe("audio start error audioFormat in null");
}
grabber.stop();
} catch (FrameGrabber.Exception e) {
log.severe("grabber start error");
log.severe(e.getMessage());
} catch (LineUnavailableException e) {
log.severe(e.getMessage());
}
AudioUtils
import org.bytedeco.ffmpeg.global.avutil;
import org.bytedeco.javacv.FFmpegFrameGrabber;
import javax.sound.sampled.AudioFormat;
import javax.sound.sampled.SourceDataLine;
import java.nio.Buffer;
import java.nio.ByteBuffer;
import java.nio.FloatBuffer;
import java.nio.ShortBuffer;
public class AudioUtils {
public static void processAudio(Buffer[] samples, int sampleFormat, SourceDataLine sourceDataLine)
{
int k;
Buffer[] buf = samples;
FloatBuffer leftData, rightData;
ShortBuffer ILData, IRData;
ByteBuffer TLData, TRData;
float vol = 1;// volume
byte[] tl, tr;
byte[] combine;
switch (sampleFormat) {
case avutil.AV_SAMPLE_FMT_FLTP:
leftData = (FloatBuffer) buf[0];
TLData = floatToByteValue(leftData, vol);
rightData = (FloatBuffer) buf[1];
TRData = floatToByteValue(rightData, vol);
tl = TLData.array();
tr = TRData.array();
combine = new byte[tl.length + tr.length];
k = 0;
for (int i = 0; i < tl.length; i = i + 2) {//Mix two channels
for (int j = 0; j < 2; j++) {
combine[j + 4 * k] = tl[i + j];
combine[j + 2 + 4 * k] = tr[i + j];
}
k++;
}
sourceDataLine.write(combine, 0, combine.length);
break;
case avutil.AV_SAMPLE_FMT_S16:
ILData = (ShortBuffer) buf[0];
TLData = shortToByteValue(ILData, vol);
tl = TLData.array();
sourceDataLine.write(tl, 0, tl.length);
break;
case avutil.AV_SAMPLE_FMT_FLT:
leftData = (FloatBuffer) buf[0];
TLData = floatToByteValue(leftData, vol);
tl = TLData.array();
sourceDataLine.write(tl, 0, tl.length);
break;
case avutil.AV_SAMPLE_FMT_S16P:
ILData = (ShortBuffer) buf[0];
IRData = (ShortBuffer) buf[1];
TLData = shortToByteValue(ILData, vol);
TRData = shortToByteValue(IRData, vol);
tl = TLData.array();
tr = TRData.array();
combine = new byte[tl.length + tr.length];
k = 0;
for (int i = 0; i < tl.length; i = i + 2) {
for (int j = 0; j < 2; j++) {
combine[j + 4 * k] = tl[i + j];
combine[j + 2 + 4 * k] = tr[i + j];
}
k++;
}
sourceDataLine.write(combine, 0, combine.length);
break;
default:
// JOptionPane.showMessageDialog(null, "unsupport audio format", "unsupport audio format",JOptionPane.ERROR_MESSAGE);
// System.exit(0);
break;
}
}
public static ByteBuffer shortToByteValue(ShortBuffer arr,float vol) {
int len = arr.capacity();
ByteBuffer bb = ByteBuffer.allocate(len * 2);
for(int i = 0;i<len;i++){
bb.putShort(i*2,(short)((float)arr.get(i)*vol));
}
return bb; // 默认转为大端序
}
public static ByteBuffer floatToByteValue(FloatBuffer arr,float vol){
int len = arr.capacity();
float f;
float v;
ByteBuffer res = ByteBuffer.allocate(len*2);
v = 32768.0f*vol;
for(int i=0;i<len;i++){
f = arr.get(i)*v;//Ref:https://stackoverflow.com/questions/15087668/how-to-convert-pcm-samples-in-byte-array-as-floating-point-numbers-in-the-range
if(f>v) f = v;
if(f<-v) f = v;
//默认转为大端序
res.putShort(i*2,(short)f);//注意乘以2,因为一次写入两个字节。
}
return res;
}
public static AudioFormat getAudioFormat(FFmpegFrameGrabber fg){
AudioFormat af = null;
switch (fg.getSampleFormat()) {
case avutil.AV_SAMPLE_FMT_U8:// unsigned short 8bit
break;
case avutil.AV_SAMPLE_FMT_S16:// sign short 16bit
af = new AudioFormat(AudioFormat.Encoding.PCM_SIGNED, fg.getSampleRate(), 16, fg.getAudioChannels(),
fg.getAudioChannels() * 2, fg.getSampleRate(), true);
break;
case avutil.AV_SAMPLE_FMT_S32:
break;
case avutil.AV_SAMPLE_FMT_FLT:
af = new AudioFormat(AudioFormat.Encoding.PCM_SIGNED, fg.getSampleRate(), 16, fg.getAudioChannels(),
fg.getAudioChannels() * 2, fg.getSampleRate(), true);
break;
case avutil.AV_SAMPLE_FMT_DBL:
break;
case avutil.AV_SAMPLE_FMT_U8P:
break;
case avutil.AV_SAMPLE_FMT_S16P://Signed short 16bit, flat type
af = new AudioFormat(AudioFormat.Encoding.PCM_SIGNED, fg.getSampleRate(), 16, fg.getAudioChannels(),
fg.getAudioChannels() * 2, fg.getSampleRate(), true);
break;
case avutil.AV_SAMPLE_FMT_S32P:// signed short
// 32 bit
af = new AudioFormat(AudioFormat.Encoding.PCM_SIGNED, fg.getSampleRate(), 32, fg.getAudioChannels(),
fg.getAudioChannels() * 2, fg.getSampleRate(), true);
break;
case avutil.AV_SAMPLE_FMT_FLTP:// float convert to 16bit short
af = new AudioFormat(AudioFormat.Encoding.PCM_SIGNED, fg.getSampleRate(), 16, fg.getAudioChannels(),
fg.getAudioChannels() * 2, fg.getSampleRate(), true);
break;
case avutil.AV_SAMPLE_FMT_DBLP:
break;
case avutil.AV_SAMPLE_FMT_S64:// signed short 64bit
break;
case avutil.AV_SAMPLE_FMT_S64P://signed short 64bit
break;
default:
// System.out.println("unsupported");
// System.exit(0);
}
return af;
}
}