這篇文章主要介紹ios webrtcdemo的實現及相關注意事項,轉載請說明出處(博客園RTC.Blacker)
前面很多人問webrtc android下有webrtcdemo,ios上怎么找不到,放在哪里呢?
答案:webrtcdemo在ios上沒有實現,如果要實現也很簡單,既然安卓都有了,依葫蘆畫瓢即可移植到ios上,不過可能要求您熟悉android語法,這里給出ios上的參考代碼:
1 -(BOOL)initWebrtcObjects
2 {
//轉載請說明出處: RTC_Blacker http://www.cnblogs.com/lingyunhu
3 if ((voE = webrtc::VoiceEngine::Create()) == NULL) {
4 DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
5 return FALSE;
6 }
7 if ((voeBase = webrtc::VoEBase::GetInterface(voE)) == NULL) {
8 DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
9 return FALSE;
10 }
11 if ((voeCodec = webrtc::VoECodec::GetInterface(voE)) == NULL) {
12 DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
13 return FALSE;
14 }
15 if ((voeFile=webrtc::VoEFile::GetInterface(voE))==NULL) {
16 DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
17 }
18 if ((voeHardware = webrtc::VoEHardware::GetInterface(voE)) == NULL) {
19 DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
20 return FALSE;
21 }
22 if ((voeNetwork = webrtc::VoENetwork::GetInterface(voE)) == NULL) {
23 DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
24 return FALSE;
25 }
26 if ((voeAudioProccessing = webrtc::VoEAudioProcessing::GetInterface(voE)) == NULL) {
27 DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
28 return FALSE;
29 }
30 if ((voeRtpRtcp = webrtc::VoERTP_RTCP::GetInterface(voE)) == NULL) {
31 DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
32 return FALSE;
33 }
34
35 if(voeBase->Init()!=0){
36 DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
37 }
38
39 if ((viE = webrtc::VideoEngine::Create()) == NULL) {
40 DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
41 return FALSE;
42 }
43 if ((vieBase = webrtc::ViEBase::GetInterface(viE)) == NULL) {
44 DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
45 return FALSE;
46 }
47 if ((vieCapture = webrtc::ViECapture::GetInterface(viE)) == NULL) {
48 DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
49 return FALSE;
50 }
51 if ((vieRender = webrtc::ViERender::GetInterface(viE)) == NULL) {
52 DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
53 return FALSE;
54 }
55 if ((vieCodec = webrtc::ViECodec::GetInterface(viE)) == NULL) {
56 DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
57 return FALSE;
58 }
59 if ((vieNetwork = webrtc::ViENetwork::GetInterface(viE)) == NULL) {
60 DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
61 return FALSE;
62 }
63 if ((vieRtpRtcp = webrtc::ViERTP_RTCP::GetInterface(viE)) == NULL) {
64 DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
65 return FALSE;
66 }
67
68 if (vieBase->Init() != 0) {
69 DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
70 return FALSE;
71 }
72
73 [self initAudioCodec];
74 [self initVideoCodec];
75
76 captureID = 0;
77 videoChannel = -1;
78
79 return TRUE;
80 }
81
82 -(void)initAudioCodec
83 {
84 memset(&voeCodecInst, 0, sizeof(webrtc::CodecInst));
85
86 if (voeCodec != NULL) {
87 for (int index=0; index < voeCodec->NumOfCodecs(); index++) {
88 webrtc::CodecInst ci;
89 voeCodec->GetCodec(index, ci);
90 if (strncmp(ci.plname, "ISAC", 4) == 0) {
91 memcpy(&voeCodecInst, &ci, sizeof(webrtc::CodecInst));
92 break;
93 }
94 }
95 //voeCodecInst.channels = 1;
96 //voeCodecInst.rate = -1;
97 }
98 }
99
100 -(BOOL)start
101 {
102 f ((audioChannel = voeBase->CreateChannel())!=0) {
103 DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
104 return FALSE;
105 }
106 if (vieBase->CreateChannel(videoChannel) != 0) {
107 DebugLog(@"AVErr: %d %s at line %d", vieBase->LastError(),__FUNCTION__, __LINE__);
108 return FALSE;
109 }
110 DebugLog(@"AVInfo: CreateChannel success! %d, %d",videoChannel,audioChannel);
111
112 //vieCodec->SetReceiveCodec(videoChannel,videoCodec);
113
114 if(voeAudioProccessing->SetAecmMode()!=0){
115 DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
116 return FALSE;
117 }
118 voeAudioProccessing->SetAgcStatus(TRUE, webrtc::kAgcDefault);
119 voeAudioProccessing->SetNsStatus(TRUE, webrtc::kNsHighSuppression);
120 _voice_capture_device_index = -1;
121 voeHardware->SetRecordingDevice(_voice_capture_device_index);
122 voeHardware->SetPlayoutDevice(_voice_playback_device_index);
123 if(voeHardware->SetLoudspeakerStatus(true)!=0){
124 DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
125 }
126 voeCodec->SetSendCodec(audioChannel, voeCodecInst);
127
128 RtpRtcpStreamStruct streamStruct=[self createRtpStreamStruct];
129 voeChannelTransport=new webrtc::test::VoiceChannelTransport(voeNetwork, audioChannel);
130 voeChannelTransport->SetLocalReceiver2(localARtpPort.rtp,streamStruct );
131 voeChannelTransport->SetSendDestination2([remoteIPAddress UTF8String], remoteARtpPort.rtp, remoteARtpPort.rtcp);
132
133 if(vieCodec->SetSendCodec(videoChannel, videoCodec) != 0)
134 {
135 DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
136 return FALSE;
137 }
138 vieRtpRtcp->SetNACKStatus(videoChannel, TRUE);
139 vieRtpRtcp->SetRTCPStatus(videoChannel, webrtc::kRtcpNonCompound_RFC5506);
140 vieRtpRtcp->SetKeyFrameRequestMethod(videoChannel, webrtc::kViEKeyFrameRequestPliRtcp);
141
142 vieBase->SetVoiceEngine(voE);
143 if (vieBase->ConnectAudioChannel(videoChannel, audioChannel)) {
144 DebugLog(@"AVErr:%s at line %d",__FUNCTION__,__LINE__);
145 return FALSE;
146 }
147
148 if (deviceUniqueID == nil) {
149 DebugLog(@"AVInfo NumberOfCaptureDevices is %d", vieCapture->NumberOfCaptureDevices());
150 int list_count=vieCapture->NumberOfCaptureDevices();
151 if ( list_count> 0) {
152 int list_number=0;
153 if (list_count>1) {
154 list_number=1;//[[AVShareData instance] isUseFrontCamera]?0:1;
155 }
156 char device_name[KMaxDeviceNameLength];
157 char unique_id[KMaxUniqueIdLength];
158 memset(unique_id, 0, KMaxUniqueIdLength);
159 vieCapture->GetCaptureDevice(list_number, device_name, KMaxDeviceNameLength, unique_id, KMaxUniqueIdLength);
160 deviceUniqueID = [NSString stringWithFormat:@"%s", unique_id];
161 }
162 }
163 DebugLog(@"AVInfo deviceUniqueID is %@", deviceUniqueID);
164
165 if ((vieCapture->AllocateCaptureDevice([deviceUniqueID UTF8String], deviceUniqueID.length, captureID)) != 0) {
166 DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
167 return FALSE;
168 }
169
170 DebugLog(@"AVInfo captureID is %d", captureID);
171
172 if (vieCapture->ConnectCaptureDevice(captureID, videoChannel) != 0) {
173 DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
174 return FALSE;
175 }
176
177 webrtc::CaptureCapability captureCapability;
178 captureCapability.width=352;
179 captureCapability.height=288;
180 captureCapability.codecType=webrtc::kVideoCodecVP8;
181 captureCapability.maxFPS=DEFAULT_VIDEO_CODEC_MAX_FRAMERATE;
182 //vieCapture->SetRotateCapturedFrames(captureID, <#const webrtc::RotateCapturedFrame rotation#>)
183 if (vieCapture->StartCapture(captureID,captureCapability) != 0) {
184 //if (vieCapture->StartCapture(captureID) != 0) {
185 DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
186 return FALSE;
187 }
188 if((vieRender->AddRenderer(captureID, [self localRenderView], 0, 0.0, 0.0, 1.0, 1.0)) != 0){
189 DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
190 return FALSE;
191 }
192 /*
193 if((vieRender->AddRenderer(captureID, [self localRenderView2], 0, 0.0, 0.0, 1.0, 1.0)) != 0){
194 DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
195 return FALSE;
196 }
197 */
198
199 if (vieRender->StartRender(captureID) != 0) {
200 DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
201 return FALSE;
202 }
203 if(vieRender->AddRenderer(videoChannel, [self remoteRenderView], 1, 0.0f, 0.0f, 1.0f, 1.0f)!=0){
204 DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
205 return FALSE;
206 }
207 if(vieRender->StartRender(videoChannel)!=0){
208 DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
209 return FALSE;
210 }
211
212 if (vieBase->StartReceive(videoChannel)!=0) {
213 DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
214 return FALSE;
215 }
216 if (vieBase->StartSend(videoChannel)!=0) {
217 DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
218 return FALSE;
219 }
220 if(voeBase->StartReceive(audioChannel) != 0)
221 {
222 DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
223 return FALSE;
224 }
225 if(voeBase->StartPlayout(audioChannel) != 0)
226 {
227 DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
228 return FALSE;
229 }
230 if(voeBase->StartSend(audioChannel) != 0)
231 {
232 DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
233 return FALSE;
234 }
235
236 //webrtc::CodecInst ci;
237 //voeFile->StartRecordingMicrophone(@"a.avi",ci,1000);
238
239 DebugLog(@"AVInfo: %s at line %d success!", __FUNCTION__, __LINE__);
240 return TRUE;
241 }
242
243 -(BOOL)stop
244 {
245 if(voeBase->StopSend(audioChannel)!=0){
246 DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
247 return FALSE;
248 }
249 if(voeBase->StopReceive(audioChannel)!=0){
250 DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
251 return FALSE;
252 }
253 if(voeBase->StopPlayout(audioChannel)!=0){
254 DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
255 return FALSE;
256 }
257
258 if(vieBase->StopSend(videoChannel)!=0){
259 DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
260 return FALSE;
261 }
262 if(vieBase->StopReceive(videoChannel)!=0){
263 DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
264 return FALSE;
265 }
266 if(vieCapture->StopCapture(captureID)!=0){
267 DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
268 return FALSE;
269 }
270 if(vieCapture->ReleaseCaptureDevice(captureID)!=0){
271 DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
272 return FALSE;
273 }
274 if(vieRender->StopRender(videoChannel)!=0){
275 DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
276 return FALSE;
277 }
278 if(vieRender->RemoveRenderer(videoChannel)!=0){
279 DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
280 return FALSE;
281 }
282
283 if(voeBase->DeleteChannel(audioChannel)!=0){
284 DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
285 return FALSE;
286 }
287 if(vieBase->DeleteChannel(videoChannel)!=0){
288 DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
289 return FALSE;
290 }
291
292 DebugLog(@"AVInfo: %s at line %d success", __FUNCTION__, __LINE__);
293
294 return TRUE;
295 }
相關說明:
1,聲音處理:
1.1. webrtc支持很多種音頻編碼,ilbc. isac. G711. G722. opus等等,不能編碼適用不同場景,可根據自己需求調整.
1.2. 聲音處理最大的難題就是噪聲,回聲,抖動,自動增益的處理,這也是最有價值的部分,webrtc和系統里面都有相應的處理,不過因為安卓機型眾多,加上廠商DIY所以不同機器問題不一樣,有些問題還得自己去處理,如webrtc團隊基本上就不會用小米,酷派啥的測試.
1.3. AECM目前在安卓上都是通過軟件在處理,看資料說后面一些廠商會直接集成到硬件上,具體效果拭目以待.
2,視頻處理:
2.1. webrtc默認使用vp8編碼,這也是Google力推的一種編碼格式,后面會推VP9.
2.2. 如果需兼容H264,則需要自己去集成,實際上有人已經這么做了,不過WebRTC后面也會支持H264.
2.3. vp8與和h264孰優孰劣,最好自己去比較測試,不要道聽途書,我相信Google力推的東西不會差到哪去.
2.4. NACK,字面解釋就是協商確認包,實際就是起到丟包重傳的作用,網絡不好時因為丟包造成花屏,通過這個可解決,但會帶來一定的延遲.
2.5. FEC,字面解釋就是向前糾錯編碼,與NACK不同,包里面已經攜帶了糾錯碼,即時前一包未正確接收,也可根據他的信息正確計算出來.

