记录用的,我也记不得了,别看,这文章
package com.example.xiaobaiyu;
import android.os.Bundle;
import androidx.appcompat.app.AppCompatActivity;
import org.webrtc.AudioSource;
import org.webrtc.AudioTrack;
import org.webrtc.Camera1Enumerator;
import org.webrtc.EglBase;
import org.webrtc.MediaConstraints;
import org.webrtc.MediaStream;
import org.webrtc.PeerConnection;
import org.webrtc.PeerConnectionFactory;
import org.webrtc.SurfaceTextureHelper;
import org.webrtc.SurfaceViewRenderer;
import org.webrtc.VideoCapturer;
import org.webrtc.VideoSource;
import org.webrtc.VideoTrack;
public class MainActivity extends AppCompatActivity {
PeerConnectionFactory peerConnectionFactory;
PeerConnection peerConnectionLocal;
PeerConnection peerConnectionRemote;
SurfaceViewRenderer localView;
SurfaceViewRenderer remoteView;
MediaStream mediaStreamLocal;
MediaStream mediaStreamRemote;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
// create PeerConnectionFactory
PeerConnectionFactory.InitializationOptions initializationOptions = PeerConnectionFactory.InitializationOptions.builder(this).createInitializationOptions();
PeerConnectionFactory.initialize(initializationOptions);
PeerConnectionFactory peerConnectionFactory = PeerConnectionFactory.builder().createPeerConnectionFactory();
// 有了这个对象,就有了声音合视频对象peerConnetctionFactory
// create AudioSource 音频源
//audioSource 开始合暂停声音
AudioSource audioSource = peerConnectionFactory.createAudioSource(new MediaConstraints());//MediaConstraionts 把视频和声音合起来
//一个包,把audioSource 装到AudioStream里
AudioTrack audioTrack = peerConnectionFactory.createAudioTrack("101", audioSource);
//EglBase提供EGL渲染
EglBase.Context eglBaseContext = EglBase.create().getEglBaseContext();
//使用SurfaceTextureHelper初始化VideoCapturer 否者调用预览异常
SurfaceTextureHelper surfaceTextureHelper = SurfaceTextureHelper.create("CaptureThread", eglBaseContext);
// create VideoCapturer提供摄像头数据 控制分辨率和帧率
VideoCapturer videoCapturer = createCameraCapturer();
VideoSource videoSource = peerConnectionFactory.createVideoSource(videoCapturer.isScreencast());
videoCapturer.initialize(surfaceTextureHelper, getApplicationContext(), videoSource.getCapturerObserver());
videoCapturer.startCapture(480, 640, 30);
//渲染
SurfaceViewRenderer localView = findViewById(R.id.localView);
localView.setMirror(true);
localView.init(eglBaseContext, null);
// create VideoTrack视频轨道 作用将VideoCapturer获取到的视频数据结合VideoSource输出到SurfaceViewRenderer渲染显示。
VideoTrack videoTrack = peerConnectionFactory.createVideoTrack("101", videoSource);
// display in localView
videoTrack.addSink(localView);
}
private VideoCapturer createCameraCapturer() {
// CameraEnumerator接口兼容Camera1和Camera2
Camera1Enumerator enumerator = new Camera1Enumerator(false);
final String[] deviceNames = enumerator.getDeviceNames();
// First, try to find front facing camera
for (String deviceName : deviceNames) {
if (enumerator.isFrontFacing(deviceName)) {
VideoCapturer videoCapturer = enumerator.createCapturer(deviceName, null);
if (videoCapturer != null) {
return videoCapturer;
}
}
}
// Front facing camera not found, try something else
for (String deviceName : deviceNames) {
if (!enumerator.isFrontFacing(deviceName)) {
VideoCapturer videoCapturer = enumerator.createCapturer(deviceName, null);
if (videoCapturer != null) {
return videoCapturer;
}
}
}
return null;
}
}