Android IOS WebRTC 音视频开发总结(八)-- ios上移植webRTCDemo
这篇文章主要介绍ios webrtcdemo的实现及相关注意事项,转载请说明出处(博客园RTC.Blacker)
前面很多人问webrtc android下有webrtcdemo,ios上怎么找不到,放在哪里呢?
答案:webrtcdemo在ios上没有实现,如果要实现也很简单,既然安卓都有了,依葫芦画瓢即可移植到ios上,不过可能要求您熟悉android语法,这里给出ios上的参考代码:
-(BOOL)initWebrtcObjects
{
//转载请说明出处: RTC_Blacker http://www.cnblogs.com/lingyunhu
if ((voE = webrtc::VoiceEngine::Create()) == NULL) {
DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
return FALSE;
}
if ((voeBase = webrtc::VoEBase::GetInterface(voE)) == NULL) {
DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
return FALSE;
}
if ((voeCodec = webrtc::VoECodec::GetInterface(voE)) == NULL) {
DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
return FALSE;
}
if ((voeFile=webrtc::VoEFile::GetInterface(voE))==NULL) {
DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
}
if ((voeHardware = webrtc::VoEHardware::GetInterface(voE)) == NULL) {
DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
return FALSE;
}
if ((voeNetwork = webrtc::VoENetwork::GetInterface(voE)) == NULL) {
DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
return FALSE;
}
if ((voeAudioProccessing = webrtc::VoEAudioProcessing::GetInterface(voE)) == NULL) {
DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
return FALSE;
}
if ((voeRtpRtcp = webrtc::VoERTP_RTCP::GetInterface(voE)) == NULL) {
DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
return FALSE;
} if(voeBase->Init()!=){
DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
} if ((viE = webrtc::VideoEngine::Create()) == NULL) {
DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
return FALSE;
}
if ((vieBase = webrtc::ViEBase::GetInterface(viE)) == NULL) {
DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
return FALSE;
}
if ((vieCapture = webrtc::ViECapture::GetInterface(viE)) == NULL) {
DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
return FALSE;
}
if ((vieRender = webrtc::ViERender::GetInterface(viE)) == NULL) {
DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
return FALSE;
}
if ((vieCodec = webrtc::ViECodec::GetInterface(viE)) == NULL) {
DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
return FALSE;
}
if ((vieNetwork = webrtc::ViENetwork::GetInterface(viE)) == NULL) {
DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
return FALSE;
}
if ((vieRtpRtcp = webrtc::ViERTP_RTCP::GetInterface(viE)) == NULL) {
DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
return FALSE;
} if (vieBase->Init() != ) {
DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
return FALSE;
} [self initAudioCodec];
[self initVideoCodec]; captureID = ;
videoChannel = -; return TRUE;
} -(void)initAudioCodec
{
memset(&voeCodecInst, , sizeof(webrtc::CodecInst)); if (voeCodec != NULL) {
for (int index=; index < voeCodec->NumOfCodecs(); index++) {
webrtc::CodecInst ci;
voeCodec->GetCodec(index, ci);
if (strncmp(ci.plname, "ISAC", ) == ) {
memcpy(&voeCodecInst, &ci, sizeof(webrtc::CodecInst));
break;
}
}
//voeCodecInst.channels = 1;
//voeCodecInst.rate = -1;
}
} -(BOOL)start
{
f ((audioChannel = voeBase->CreateChannel())!=) {
DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
return FALSE;
}
if (vieBase->CreateChannel(videoChannel) != ) {
DebugLog(@"AVErr: %d %s at line %d", vieBase->LastError(),__FUNCTION__, __LINE__);
return FALSE;
}
DebugLog(@"AVInfo: CreateChannel success! %d, %d",videoChannel,audioChannel); //vieCodec->SetReceiveCodec(videoChannel,videoCodec); if(voeAudioProccessing->SetAecmMode()!=){
DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
return FALSE;
}
voeAudioProccessing->SetAgcStatus(TRUE, webrtc::kAgcDefault);
voeAudioProccessing->SetNsStatus(TRUE, webrtc::kNsHighSuppression);
_voice_capture_device_index = -;
voeHardware->SetRecordingDevice(_voice_capture_device_index);
voeHardware->SetPlayoutDevice(_voice_playback_device_index);
if(voeHardware->SetLoudspeakerStatus(true)!=){
DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
}
voeCodec->SetSendCodec(audioChannel, voeCodecInst); RtpRtcpStreamStruct streamStruct=[self createRtpStreamStruct];
voeChannelTransport=new webrtc::test::VoiceChannelTransport(voeNetwork, audioChannel);
voeChannelTransport->SetLocalReceiver2(localARtpPort.rtp,streamStruct );
voeChannelTransport->SetSendDestination2([remoteIPAddress UTF8String], remoteARtpPort.rtp, remoteARtpPort.rtcp); if(vieCodec->SetSendCodec(videoChannel, videoCodec) != )
{
DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
return FALSE;
}
vieRtpRtcp->SetNACKStatus(videoChannel, TRUE);
vieRtpRtcp->SetRTCPStatus(videoChannel, webrtc::kRtcpNonCompound_RFC5506);
vieRtpRtcp->SetKeyFrameRequestMethod(videoChannel, webrtc::kViEKeyFrameRequestPliRtcp); vieBase->SetVoiceEngine(voE);
if (vieBase->ConnectAudioChannel(videoChannel, audioChannel)) {
DebugLog(@"AVErr:%s at line %d",__FUNCTION__,__LINE__);
return FALSE;
} if (deviceUniqueID == nil) {
DebugLog(@"AVInfo NumberOfCaptureDevices is %d", vieCapture->NumberOfCaptureDevices());
int list_count=vieCapture->NumberOfCaptureDevices();
if ( list_count> ) {
int list_number=;
if (list_count>) {
list_number=;//[[AVShareData instance] isUseFrontCamera]?0:1;
}
char device_name[KMaxDeviceNameLength];
char unique_id[KMaxUniqueIdLength];
memset(unique_id, , KMaxUniqueIdLength);
vieCapture->GetCaptureDevice(list_number, device_name, KMaxDeviceNameLength, unique_id, KMaxUniqueIdLength);
deviceUniqueID = [NSString stringWithFormat:@"%s", unique_id];
}
}
DebugLog(@"AVInfo deviceUniqueID is %@", deviceUniqueID); if ((vieCapture->AllocateCaptureDevice([deviceUniqueID UTF8String], deviceUniqueID.length, captureID)) != ) {
DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
return FALSE;
} DebugLog(@"AVInfo captureID is %d", captureID); if (vieCapture->ConnectCaptureDevice(captureID, videoChannel) != ) {
DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
return FALSE;
} webrtc::CaptureCapability captureCapability;
captureCapability.width=;
captureCapability.height=;
captureCapability.codecType=webrtc::kVideoCodecVP8;
captureCapability.maxFPS=DEFAULT_VIDEO_CODEC_MAX_FRAMERATE;
//vieCapture->SetRotateCapturedFrames(captureID, <#const webrtc::RotateCapturedFrame rotation#>)
if (vieCapture->StartCapture(captureID,captureCapability) != ) {
//if (vieCapture->StartCapture(captureID) != 0) {
DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
return FALSE;
}
if((vieRender->AddRenderer(captureID, [self localRenderView], , 0.0, 0.0, 1.0, 1.0)) != ){
DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
return FALSE;
}
/*
if((vieRender->AddRenderer(captureID, [self localRenderView2], 0, 0.0, 0.0, 1.0, 1.0)) != 0){
DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
return FALSE;
}
*/ if (vieRender->StartRender(captureID) != ) {
DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
return FALSE;
}
if(vieRender->AddRenderer(videoChannel, [self remoteRenderView], , 0.0f, 0.0f, 1.0f, 1.0f)!=){
DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
return FALSE;
}
if(vieRender->StartRender(videoChannel)!=){
DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
return FALSE;
} if (vieBase->StartReceive(videoChannel)!=) {
DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
return FALSE;
}
if (vieBase->StartSend(videoChannel)!=) {
DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
return FALSE;
}
if(voeBase->StartReceive(audioChannel) != )
{
DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
return FALSE;
}
if(voeBase->StartPlayout(audioChannel) != )
{
DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
return FALSE;
}
if(voeBase->StartSend(audioChannel) != )
{
DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
return FALSE;
} //webrtc::CodecInst ci;
//voeFile->StartRecordingMicrophone(@"a.avi",ci,1000); DebugLog(@"AVInfo: %s at line %d success!", __FUNCTION__, __LINE__);
return TRUE;
} -(BOOL)stop
{
if(voeBase->StopSend(audioChannel)!=){
DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
return FALSE;
}
if(voeBase->StopReceive(audioChannel)!=){
DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
return FALSE;
}
if(voeBase->StopPlayout(audioChannel)!=){
DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
return FALSE;
} if(vieBase->StopSend(videoChannel)!=){
DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
return FALSE;
}
if(vieBase->StopReceive(videoChannel)!=){
DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
return FALSE;
}
if(vieCapture->StopCapture(captureID)!=){
DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
return FALSE;
}
if(vieCapture->ReleaseCaptureDevice(captureID)!=){
DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
return FALSE;
}
if(vieRender->StopRender(videoChannel)!=){
DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
return FALSE;
}
if(vieRender->RemoveRenderer(videoChannel)!=){
DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
return FALSE;
} if(voeBase->DeleteChannel(audioChannel)!=){
DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
return FALSE;
}
if(vieBase->DeleteChannel(videoChannel)!=){
DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);
return FALSE;
} DebugLog(@"AVInfo: %s at line %d success", __FUNCTION__, __LINE__); return TRUE;
}
相关说明:
1,声音处理:
1.1. webrtc支持很多种音频编码,ilbc. isac. G711. G722. opus等等,不能编码适用不同场景,可根据自己需求调整.
1.2. 声音处理最大的难题就是噪声,回声,抖动,自动增益的处理,这也是最有价值的部分,webrtc和系统里面都有相应的处理,不过因为安卓机型众多,加上厂商DIY所以不同机器问题不一样,有些问题还得自己去处理,如webrtc团队基本上就不会用小米,酷派啥的测试.
1.3. AECM目前在安卓上都是通过软件在处理,看资料说后面一些厂商会直接集成到硬件上,具体效果拭目以待.
2,视频处理:
2.1. webrtc默认使用vp8编码,这也是Google力推的一种编码格式,后面会推VP9.
2.2. 如果需兼容H264,则需要自己去集成,实际上有人已经这么做了,不过WebRTC后面也会支持H264.
2.3. vp8与和h264孰优孰劣,最好自己去比较测试,不要道听途书,我相信Google力推的东西不会差到哪去.
2.4. NACK,字面解释就是协商确认包,实际就是起到丢包重传的作用,网络不好时因为丢包造成花屏,通过这个可解决,但会带来一定的延迟.
2.5. FEC,字面解释就是向前纠错编码,与NACK不同,包里面已经携带了纠错码,即时前一包未正确接收,也可根据他的信息正确计算出来.
Android IOS WebRTC 音视频开发总结(八)-- ios上移植webRTCDemo的更多相关文章
- Android IOS WebRTC 音视频开发总结(八十五)-- 使用WebRTC广播网络摄像头视频(下)
本文主要介绍WebRTC (我们翻译和整理的,译者:weizhenwei,校验:blacker),最早发表在[编风网] 支持原创,转载必须注明出处,欢迎关注我的微信公众号blacker(微信ID:bl ...
- Android IOS WebRTC 音视频开发总结(八十三)-- 使用WebRTC广播网络摄像头视频(上)
本文主要介绍WebRTC (我们翻译和整理的,译者:weizhenwei,校验:blacker),最早发表在[编风网] 支持原创,转载必须注明出处,欢迎关注我的微信公众号blacker(微信ID:bl ...
- 转:Android IOS WebRTC 音视频开发总结 (系列文章集合)
随笔分类 - webrtc Android IOS WebRTC 音视频开发总结(七八)-- 为什么WebRTC端到端监控很关键? 摘要: 本文主要介绍WebRTC端到端监控(我们翻译和整理的,译 ...
- Android IOS WebRTC 音视频开发总结(四六)-- 从另一个角度看国内首届WebRTC大会
文章主要从开发者角度谈国内首届WebRTC大会,支持原创,文章来自博客园RTC.Blacker,支持原创,转载必须说明出处,更多详见www.rtc.help. -------------------- ...
- Android IOS WebRTC 音视频开发总结(六)-- iOS开发之含泪经验
前段时间在搞webrtc iOS开发,所以将标题改为了Android IOS WebRTC 音视频开发总结, 下面都是开发过程中的经验总结,转载请说明出处(博客园RTC.Blacker): 1. IO ...
- Android IOS WebRTC 音视频开发总结(二四)-- p2p调用堆栈
本文主要分析webrtc音视频点对点部分的代码结构,文章来自博客园RTC.Blacker,转载请说明出处. 前段时间在查一个偶尔断线的问题(这种问题最蛋疼,不好重现,只能凭经验去搞),所以理了下web ...
- Android IOS WebRTC 音视频开发总结(八十一)-- WebRTC靠谱吗?有没有适合的SDK推荐?
作者:blaker,最早发表在我们的微信公众和[编风网],详见[这里] 支持原创,转载必须注明出处,欢迎关注我的微信公众号blacker(微信ID:blackerteam 或 webrtcorgcn) ...
- Android IOS WebRTC 音视频开发总结(八十七)-- WebRTC中丢包重传NACK实现分析
本文主要介绍WebRTC中丢包重传NACK的实现,作者:weizhenwei ,文章最早发表在编风网,微信ID:befoio 支持原创,转载必须注明出处,欢迎关注我的微信公众号blacker(微信ID ...
- Android IOS WebRTC 音视频开发总结(八十六)-- WebRTC中RTP/RTCP协议实现分析
本文主要介绍WebRTC中的RTP/RTCP协议,作者:weizhenwei ,文章最早发表在编风网,微信ID:befoio 支持原创,转载必须注明出处,欢迎关注我的微信公众号blacker(微信ID ...
- Android IOS WebRTC 音视频开发总结(四八)-- 从商业和技术的角度看视频行业的机会
本文主要从不同角度介绍视频行业的机会,文章来自博客园RTC.Blacker,支持原创,转载必须说明出处,欢迎关注个人微信公众号blacker ----------------------------- ...
随机推荐
- Linq常用操作
http://www.cnblogs.com/knowledgesea/p/3897665.html
- jquery源码
null 与 undefined 都是 ==null 为true alert(typeof(123)) number alert(typeof(NAN)) 打印 number 不靠谱 ale ...
- html 其它标签
<pre> 标签 , 如果写的内容在记事本中 在网页上原样输出 <fieldset> 外框 <p> 段落 <sub>下标 <sup> 上标 ...
- C++学习4
在C++中,定义函数时可以给参数指定一个默认的初始值.调用函数时,可以省略有默认值的参数.也就是说,如果用户指定了参数的值,那么就使用用户指定的值,否则使用参数的默认值. C++规定,默认参数只能放在 ...
- [ActionScript 3.0] as3.0加载as2.0的swf时获取as2.0的实际舞台尺寸
var loader:Loader = new Loader(); loader.contentLoaderInfo.addEventListener(Event.INIT, initHandler) ...
- ramBufferSizeMB
索引算法确定 的情况下,影响Lucene索引速度的因素 MaxBufferedDocs这个参数默认是disabled的,因为Lucene中还用另外一个参数(RAMBufferSizeMB)控制这个bu ...
- [转载]python中将普通对象作为 字典类(dict) 使用
目前我知道的有两种方法: 1 定义的类继承dict类 例如 class A(dict): pass a = A() a['name'] = 12 2 给自定义的类添加 __setitem__() __ ...
- Net文章汇总帖
DevExpress:Data Grid ExamplesHow to: Initialize Cells in Newly Created RowsHow to: Set a Cell Value ...
- NYOJ 123 士兵杀敌4-树状数组的插线求点
士兵杀敌(四) 时间限制:2000 ms | 内存限制:65535 KB 难度:5 描述 南将军麾下有百万精兵,现已知共有M个士兵,编号为1~M,每次有任务的时候,总会有一批编号连在一起人请战(编 ...
- archlinux log 文件查看
# pacman -S gnome-system-log gnome-logsextra/gnome-logs 3.12.1-1 (gnome-extra) A log viewer for the ...