Android IOS WebRTC 音视频开发总结(八)

5 篇文章 0 订阅

本文转载:http://www.cnblogs.com/lingyunhu/p/4066383.html

这篇文章主要介绍ios webrtcdemo的实现及相关注意事项,转载请说明出处(博客园RTC.Blacker)

 

前面很多人问webrtc android下有webrtcdemo,ios上怎么找不到,放在哪里呢?

答案:webrtcdemo在ios上没有实现,如果要实现也很简单,既然安卓都有了,依葫芦画瓢即可移植到ios上,不过可能要求您熟悉android语法,这里给出ios上的参考代码:

复制代码
  1 -(BOOL)initWebrtcObjects
  2 {
     //转载请说明出处: RTC_Blacker http://www.cnblogs.com/lingyunhu
  3     if ((voE = webrtc::VoiceEngine::Create()) == NULL) {
  4         DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
  5         return FALSE;
  6     }
  7     if ((voeBase = webrtc::VoEBase::GetInterface(voE)) == NULL) {
  8         DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
  9         return FALSE;
 10     }
 11     if ((voeCodec = webrtc::VoECodec::GetInterface(voE)) == NULL) {
 12         DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
 13         return FALSE;
 14     }
 15     if ((voeFile=webrtc::VoEFile::GetInterface(voE))==NULL) {
 16         DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
 17     }
 18     if ((voeHardware = webrtc::VoEHardware::GetInterface(voE)) == NULL) {
 19         DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
 20         return FALSE;
 21     }
 22     if ((voeNetwork = webrtc::VoENetwork::GetInterface(voE)) == NULL) {
 23         DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
 24         return FALSE;
 25     }
 26     if ((voeAudioProccessing = webrtc::VoEAudioProcessing::GetInterface(voE)) == NULL) {
 27         DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
 28         return FALSE;
 29     }
 30     if ((voeRtpRtcp = webrtc::VoERTP_RTCP::GetInterface(voE)) == NULL) {
 31         DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
 32         return FALSE;
 33     }
 34     
 35     if(voeBase->Init()!=0){
 36         DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
 37     }
 38     
 39     if ((viE = webrtc::VideoEngine::Create()) == NULL) {
 40         DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
 41         return FALSE;
 42     }
 43     if ((vieBase = webrtc::ViEBase::GetInterface(viE)) == NULL) {
 44         DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
 45         return FALSE;
 46     }
 47     if ((vieCapture = webrtc::ViECapture::GetInterface(viE)) == NULL) {
 48         DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
 49         return FALSE;
 50     }
 51     if ((vieRender = webrtc::ViERender::GetInterface(viE)) == NULL) {
 52         DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
 53         return FALSE;
 54     }
 55     if ((vieCodec = webrtc::ViECodec::GetInterface(viE)) == NULL) {
 56         DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
 57         return FALSE;
 58     }
 59     if ((vieNetwork = webrtc::ViENetwork::GetInterface(viE)) == NULL) {
 60         DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
 61         return FALSE;
 62     }
 63     if ((vieRtpRtcp = webrtc::ViERTP_RTCP::GetInterface(viE)) == NULL) {
 64         DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
 65         return FALSE;
 66     }
 67     
 68     if (vieBase->Init() != 0) {
 69         DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
 70         return FALSE;
 71     }
 72     
 73     [self initAudioCodec];
 74     [self initVideoCodec];
 75     
 76     captureID = 0;
 77     videoChannel = -1;
 78     
 79     return TRUE;
 80 }
 81 
 82 -(void)initAudioCodec
 83 {
 84     memset(&voeCodecInst, 0, sizeof(webrtc::CodecInst));
 85     
 86     if (voeCodec != NULL) {
 87         for (int index=0; index < voeCodec->NumOfCodecs(); index++) {
 88             webrtc::CodecInst ci;
 89             voeCodec->GetCodec(index, ci);
 90             if (strncmp(ci.plname, "ISAC", 4) == 0) {
 91                 memcpy(&voeCodecInst, &ci, sizeof(webrtc::CodecInst));
 92                 break;
 93             }
 94         }
 95         //voeCodecInst.channels = 1;
 96         //voeCodecInst.rate = -1;
 97     }
 98 }
 99 
100 -(BOOL)start
101 {
102    f ((audioChannel = voeBase->CreateChannel())!=0) {
103         DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
104         return FALSE;
105     }
106     if (vieBase->CreateChannel(videoChannel) != 0) {
107         DebugLog(@"AVErr: %d %s at line %d", vieBase->LastError(),__FUNCTION__, __LINE__);
108         return FALSE;
109     }
110     DebugLog(@"AVInfo: CreateChannel success! %d, %d",videoChannel,audioChannel);
111     
112     //vieCodec->SetReceiveCodec(videoChannel,videoCodec);
113     
114     if(voeAudioProccessing->SetAecmMode()!=0){
115         DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
116         return FALSE;
117     }
118     voeAudioProccessing->SetAgcStatus(TRUE, webrtc::kAgcDefault);
119     voeAudioProccessing->SetNsStatus(TRUE, webrtc::kNsHighSuppression);
120     _voice_capture_device_index = -1;
121     voeHardware->SetRecordingDevice(_voice_capture_device_index);
122     voeHardware->SetPlayoutDevice(_voice_playback_device_index);
123     if(voeHardware->SetLoudspeakerStatus(true)!=0){
124         DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
125     }
126     voeCodec->SetSendCodec(audioChannel, voeCodecInst);
127     
128     RtpRtcpStreamStruct streamStruct=[self createRtpStreamStruct];
129     voeChannelTransport=new webrtc::test::VoiceChannelTransport(voeNetwork, audioChannel);
130     voeChannelTransport->SetLocalReceiver2(localARtpPort.rtp,streamStruct );
131     voeChannelTransport->SetSendDestination2([remoteIPAddress UTF8String], remoteARtpPort.rtp, remoteARtpPort.rtcp);
132     
133     if(vieCodec->SetSendCodec(videoChannel, videoCodec) != 0)
134     {
135         DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
136         return FALSE;
137     }
138     vieRtpRtcp->SetNACKStatus(videoChannel, TRUE);
139     vieRtpRtcp->SetRTCPStatus(videoChannel, webrtc::kRtcpNonCompound_RFC5506);
140     vieRtpRtcp->SetKeyFrameRequestMethod(videoChannel, webrtc::kViEKeyFrameRequestPliRtcp);
141     
142     vieBase->SetVoiceEngine(voE);
143     if (vieBase->ConnectAudioChannel(videoChannel, audioChannel)) {
144         DebugLog(@"AVErr:%s at line %d",__FUNCTION__,__LINE__);
145         return FALSE;
146     }
147     
148     if (deviceUniqueID == nil) {
149         DebugLog(@"AVInfo NumberOfCaptureDevices is %d", vieCapture->NumberOfCaptureDevices());
150         int list_count=vieCapture->NumberOfCaptureDevices();
151         if ( list_count> 0) {
152             int list_number=0;
153             if (list_count>1) {
154                 list_number=1;//[[AVShareData instance] isUseFrontCamera]?0:1;
155             }
156             char device_name[KMaxDeviceNameLength];
157             char unique_id[KMaxUniqueIdLength];
158             memset(unique_id, 0, KMaxUniqueIdLength);
159             vieCapture->GetCaptureDevice(list_number, device_name, KMaxDeviceNameLength, unique_id, KMaxUniqueIdLength);
160             deviceUniqueID = [NSString stringWithFormat:@"%s", unique_id];
161         }
162     }
163     DebugLog(@"AVInfo deviceUniqueID is %@", deviceUniqueID);
164     
165     if ((vieCapture->AllocateCaptureDevice([deviceUniqueID UTF8String], deviceUniqueID.length, captureID)) != 0) {
166         DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
167         return FALSE;
168     }
169     
170     DebugLog(@"AVInfo captureID is %d", captureID);
171     
172     if (vieCapture->ConnectCaptureDevice(captureID, videoChannel) != 0) {
173         DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
174         return FALSE;
175     }
176     
177     webrtc::CaptureCapability captureCapability;
178     captureCapability.width=352;
179     captureCapability.height=288;
180     captureCapability.codecType=webrtc::kVideoCodecVP8;
181     captureCapability.maxFPS=DEFAULT_VIDEO_CODEC_MAX_FRAMERATE;
182     //vieCapture->SetRotateCapturedFrames(captureID, <#const webrtc::RotateCapturedFrame rotation#>)
183     if (vieCapture->StartCapture(captureID,captureCapability) != 0) {
184         //if (vieCapture->StartCapture(captureID) != 0) {
185         DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
186         return FALSE;
187     }
188     if((vieRender->AddRenderer(captureID, [self localRenderView], 0, 0.0, 0.0, 1.0, 1.0)) != 0){
189         DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
190         return FALSE;
191     }
192     /*
193     if((vieRender->AddRenderer(captureID, [self localRenderView2], 0, 0.0, 0.0, 1.0, 1.0)) != 0){
194         DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
195         return FALSE;
196     }
197     */
198     
199     if (vieRender->StartRender(captureID) != 0) {
200         DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
201         return FALSE;
202     }
203     if(vieRender->AddRenderer(videoChannel, [self remoteRenderView], 1, 0.0f, 0.0f, 1.0f, 1.0f)!=0){
204         DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
205         return FALSE;
206     }
207     if(vieRender->StartRender(videoChannel)!=0){
208         DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
209         return FALSE;
210     }
211     
212     if (vieBase->StartReceive(videoChannel)!=0) {
213         DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
214         return FALSE;
215     }
216     if (vieBase->StartSend(videoChannel)!=0) {
217         DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
218         return FALSE;
219     }
220     if(voeBase->StartReceive(audioChannel) != 0)
221     {
222         DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
223         return FALSE;
224     }
225     if(voeBase->StartPlayout(audioChannel) != 0)
226     {
227         DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
228         return FALSE;
229     }
230     if(voeBase->StartSend(audioChannel) != 0)
231     {
232         DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
233         return FALSE;
234     }
235     
236     //webrtc::CodecInst ci;
237     //voeFile->StartRecordingMicrophone(@"a.avi",ci,1000);
238     
239     DebugLog(@"AVInfo: %s at line %d success!", __FUNCTION__, __LINE__);
240     return TRUE;
241 }
242 
243 -(BOOL)stop
244 {
245     if(voeBase->StopSend(audioChannel)!=0){
246         DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
247         return FALSE;
248     }
249     if(voeBase->StopReceive(audioChannel)!=0){
250         DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
251         return FALSE;
252     }
253     if(voeBase->StopPlayout(audioChannel)!=0){
254         DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
255         return FALSE;
256     }
257     
258     if(vieBase->StopSend(videoChannel)!=0){
259         DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
260         return FALSE;
261     }
262     if(vieBase->StopReceive(videoChannel)!=0){
263         DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
264         return FALSE;
265     }
266     if(vieCapture->StopCapture(captureID)!=0){
267         DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
268         return FALSE;
269     }
270     if(vieCapture->ReleaseCaptureDevice(captureID)!=0){
271         DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
272         return FALSE;
273     }
274     if(vieRender->StopRender(videoChannel)!=0){
275         DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
276         return FALSE;
277     }
278     if(vieRender->RemoveRenderer(videoChannel)!=0){
279         DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
280         return FALSE;
281     }
282     
283     if(voeBase->DeleteChannel(audioChannel)!=0){
284         DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
285         return FALSE;
286     }
287     if(vieBase->DeleteChannel(videoChannel)!=0){
288         DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
289         return FALSE;
290     }
291     
292     DebugLog(@"AVInfo: %s at line %d success", __FUNCTION__, __LINE__);
293      
294     return TRUE;
295 }
复制代码

 

相关说明:

1,声音处理:

1.1. webrtc支持很多种音频编码,ilbc. isac. G711. G722. opus等等,不能编码适用不同场景,可根据自己需求调整.

1.2. 声音处理最大的难题就是噪声,回声,抖动,自动增益的处理,这也是最有价值的部分,webrtc和系统里面都有相应的处理,不过因为安卓机型众多,加上厂商DIY所以不同机器问题不一样,有些问题还得自己去处理,如webrtc团队基本上就不会用小米,酷派啥的测试.

1.3. AECM目前在安卓上都是通过软件在处理,看资料说后面一些厂商会直接集成到硬件上,具体效果拭目以待.

 

2,视频处理:

2.1. webrtc默认使用vp8编码,这也是Google力推的一种编码格式,后面会推VP9.

2.2. 如果需兼容H264,则需要自己去集成,实际上有人已经这么做了,不过WebRTC后面也会支持H264.

2.3. vp8与和h264孰优孰劣,最好自己去比较测试,不要道听途书,我相信Google力推的东西不会差到哪去.

2.4. NACK,字面解释就是协商确认包,实际就是起到丢包重传的作用,网络不好时因为丢包造成花屏,通过这个可解决,但会带来一定的延迟.

2.5. FEC,字面解释就是向前纠错编码,与NACK不同,包里面已经携带了纠错码,即时前一包未正确接收,也可根据他的信息正确计算出来.


  • 0
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 0
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值