【视频开发】【Live555】通过live555实现H264 RTSP直播
前面的文章中介绍了《H264视频通过RTMP流直播》,下面将介绍一下如何将H264实时视频通过RTSP直播。
实现思路是将视频流发送给live555, 由live555来实现H264数据流直播。
视频采集模块通过FIFO队列将H264数据帧发送给live555. live555 在收到客户端的RTSP播放请求后,开始从FIFO中读取H264视频数据并通过RTSP直播出去。整个流程如下图所示:
调整和修改Live555 MediaServer
下载live555源码,在media目录下增加四个文件并修改文件live555MediaServer.cpp。增加的四个文件如下:
WW_H264VideoServerMediaSubsession.h
WW_H264VideoServerMediaSubsession.cpp
WW_H264VideoSource.h
WW_H264VideoSource.cpp
下面附上四个文件的源码:
WW_H264VideoServerMediaSubsession.h
- #pragma once
- #include "liveMedia.hh"
- #include "BasicUsageEnvironment.hh"
- #include "GroupsockHelper.hh"
- #include "OnDemandServerMediaSubsession.hh"
- #include "WW_H264VideoSource.h"
- class WW_H264VideoServerMediaSubsession : public OnDemandServerMediaSubsession
- {
- public:
- WW_H264VideoServerMediaSubsession(UsageEnvironment & env, FramedSource * source);
- ~WW_H264VideoServerMediaSubsession(void);
- public:
- virtual char const * getAuxSDPLine(RTPSink * rtpSink, FramedSource * inputSource);
- virtual FramedSource * createNewStreamSource(unsigned clientSessionId, unsigned & estBitrate); // "estBitrate" is the stream's estimated bitrate, in kbps
- virtual RTPSink * createNewRTPSink(Groupsock * rtpGroupsock, unsigned char rtpPayloadTypeIfDynamic, FramedSource * inputSource);
- static WW_H264VideoServerMediaSubsession * createNew(UsageEnvironment & env, FramedSource * source);
- static void afterPlayingDummy(void * ptr);
- static void chkForAuxSDPLine(void * ptr);
- void chkForAuxSDPLine1();
- private:
- FramedSource * m_pSource;
- char * m_pSDPLine;
- RTPSink * m_pDummyRTPSink;
- char m_done;
- };
WW_H264VideoServerMediaSubsession.cpp
- #include "WW_H264VideoServerMediaSubsession.h"
- WW_H264VideoServerMediaSubsession::WW_H264VideoServerMediaSubsession(UsageEnvironment & env, FramedSource * source) : OnDemandServerMediaSubsession(env, True)
- {
- m_pSource = source;
- m_pSDPLine = 0;
- }
- WW_H264VideoServerMediaSubsession::~WW_H264VideoServerMediaSubsession(void)
- {
- if (m_pSDPLine)
- {
- free(m_pSDPLine);
- }
- }
- WW_H264VideoServerMediaSubsession * WW_H264VideoServerMediaSubsession::createNew(UsageEnvironment & env, FramedSource * source)
- {
- return new WW_H264VideoServerMediaSubsession(env, source);
- }
- FramedSource * WW_H264VideoServerMediaSubsession::createNewStreamSource(unsigned clientSessionId, unsigned & estBitrate)
- {
- return H264VideoStreamFramer::createNew(envir(), new WW_H264VideoSource(envir()));
- }
- RTPSink * WW_H264VideoServerMediaSubsession::createNewRTPSink(Groupsock * rtpGroupsock, unsigned char rtpPayloadTypeIfDynamic, FramedSource * inputSource)
- {
- return H264VideoRTPSink::createNew(envir(), rtpGroupsock, rtpPayloadTypeIfDynamic);
- }
- char const * WW_H264VideoServerMediaSubsession::getAuxSDPLine(RTPSink * rtpSink, FramedSource * inputSource)
- {
- if (m_pSDPLine)
- {
- return m_pSDPLine;
- }
- m_pDummyRTPSink = rtpSink;
- //mp_dummy_rtpsink->startPlaying(*source, afterPlayingDummy, this);
- m_pDummyRTPSink->startPlaying(*inputSource, 0, 0);
- chkForAuxSDPLine(this);
- m_done = 0;
- envir().taskScheduler().doEventLoop(&m_done);
- m_pSDPLine = strdup(m_pDummyRTPSink->auxSDPLine());
- m_pDummyRTPSink->stopPlaying();
- return m_pSDPLine;
- }
- void WW_H264VideoServerMediaSubsession::afterPlayingDummy(void * ptr)
- {
- WW_H264VideoServerMediaSubsession * This = (WW_H264VideoServerMediaSubsession *)ptr;
- This->m_done = 0xff;
- }
- void WW_H264VideoServerMediaSubsession::chkForAuxSDPLine(void * ptr)
- {
- WW_H264VideoServerMediaSubsession * This = (WW_H264VideoServerMediaSubsession *)ptr;
- This->chkForAuxSDPLine1();
- }
- void WW_H264VideoServerMediaSubsession::chkForAuxSDPLine1()
- {
- if (m_pDummyRTPSink->auxSDPLine())
- {
- m_done = 0xff;
- }
- else
- {
- double delay = 1000.0 / (FRAME_PER_SEC); // ms
- int to_delay = delay * 1000; // us
- nextTask() = envir().taskScheduler().scheduleDelayedTask(to_delay, chkForAuxSDPLine, this);
- }
- }
WW_H264VideoSource.h
- #ifndef _WW_H264VideoSource_H
- #define _WW_H264VideoSource_H
- #include "liveMedia.hh"
- #include "BasicUsageEnvironment.hh"
- #include "GroupsockHelper.hh"
- #include "FramedSource.hh"
- #define FRAME_PER_SEC 25
- class WW_H264VideoSource : public FramedSource
- {
- public:
- WW_H264VideoSource(UsageEnvironment & env);
- ~WW_H264VideoSource(void);
- public:
- virtual void doGetNextFrame();
- virtual unsigned int maxFrameSize() const;
- static void getNextFrame(void * ptr);
- void GetFrameData();
- private:
- void *m_pToken;
- char *m_pFrameBuffer;
- int m_hFifo;
- };
- #endif
WW_H264VideoSource.cpp
- #include "WW_H264VideoSource.h"
- #include <stdio.h>
- #ifdef WIN32
- #include <windows.h>
- #else
- #include <sys/types.h>
- #include <sys/stat.h>
- #include <string.h>
- #include <fcntl.h>
- #include <unistd.h>
- #include <limits.h>
- #endif
- #define FIFO_NAME "/tmp/H264_fifo"
- #define BUFFER_SIZE PIPE_BUF
- #define REV_BUF_SIZE (1024*1024)
- #ifdef WIN32
- #define mSleep(ms) Sleep(ms)
- #else
- #define mSleep(ms) usleep(ms*1000)
- #endif
- WW_H264VideoSource::WW_H264VideoSource(UsageEnvironment & env) :
- FramedSource(env),
- m_pToken(0),
- m_pFrameBuffer(0),
- m_hFifo(0)
- {
- m_hFifo = open(FIFO_NAME,O_RDONLY);
- printf("[MEDIA SERVER] open fifo result = [%d]\n",m_hFifo);
- if(m_hFifo == -1)
- {
- return;
- }
- m_pFrameBuffer = new char[REV_BUF_SIZE];
- if(m_pFrameBuffer == NULL)
- {
- printf("[MEDIA SERVER] error malloc data buffer failed\n");
- return;
- }
- memset(m_pFrameBuffer,0,REV_BUF_SIZE);
- }
- WW_H264VideoSource::~WW_H264VideoSource(void)
- {
- if(m_hFifo)
- {
- ::close(m_hFifo);
- }
- envir().taskScheduler().unscheduleDelayedTask(m_pToken);
- if(m_pFrameBuffer)
- {
- delete[] m_pFrameBuffer;
- m_pFrameBuffer = NULL;
- }
- printf("[MEDIA SERVER] rtsp connection closed\n");
- }
- void WW_H264VideoSource::doGetNextFrame()
- {
- // 根据 fps,计算等待时间
- double delay = 1000.0 / (FRAME_PER_SEC * 2); // ms
- int to_delay = delay * 1000; // us
- m_pToken = envir().taskScheduler().scheduleDelayedTask(to_delay, getNextFrame, this);
- }
- unsigned int WW_H264VideoSource::maxFrameSize() const
- {
- return 1024*200;
- }
- void WW_H264VideoSource::getNextFrame(void * ptr)
- {
- ((WW_H264VideoSource *)ptr)->GetFrameData();
- }
- void WW_H264VideoSource::GetFrameData()
- {
- gettimeofday(&fPresentationTime, 0);
- fFrameSize = 0;
- int len = 0;
- unsigned char buffer[BUFFER_SIZE] = {0};
- while((len = read(m_hFifo,buffer,BUFFER_SIZE))>0)
- {
- memcpy(m_pFrameBuffer+fFrameSize,buffer,len);
- fFrameSize+=len;
- }
- //printf("[MEDIA SERVER] GetFrameData len = [%d],fMaxSize = [%d]\n",fFrameSize,fMaxSize);
- // fill frame data
- memcpy(fTo,m_pFrameBuffer,fFrameSize);
- if (fFrameSize > fMaxSize)
- {
- fNumTruncatedBytes = fFrameSize - fMaxSize;
- fFrameSize = fMaxSize;
- }
- else
- {
- fNumTruncatedBytes = 0;
- }
- afterGetting(this);
- }
修改live555MediaServer.cpp文件如下
- /**********
- This library is free software; you can redistribute it and/or modify it under
- the terms of the GNU Lesser General Public License as published by the
- Free Software Foundation; either version 2.1 of the License, or (at your
- option) any later version. (See <http://www.gnu.org/copyleft/lesser.html>.)
- This library is distributed in the hope that it will be useful, but WITHOUT
- ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
- FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for
- more details.
- You should have received a copy of the GNU Lesser General Public License
- along with this library; if not, write to the Free Software Foundation, Inc.,
- 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
- **********/
- // Copyright (c) 1996-2013, Live Networks, Inc. All rights reserved
- // LIVE555 Media Server
- // main program
- #include <BasicUsageEnvironment.hh>
- #include "DynamicRTSPServer.hh"
- #include "version.hh"
- #include "WW_H264VideoSource.h"
- #include "WW_H264VideoServerMediaSubsession.h"
- int main(int argc, char** argv) {
- // Begin by setting up our usage environment:
- TaskScheduler* scheduler = BasicTaskScheduler::createNew();
- UsageEnvironment* env = BasicUsageEnvironment::createNew(*scheduler);
- UserAuthenticationDatabase* authDB = NULL;
- #ifdef ACCESS_CONTROL
- // To implement client access control to the RTSP server, do the following:
- authDB = new UserAuthenticationDatabase;
- authDB->addUserRecord("username1", "password1"); // replace these with real strings
- // Repeat the above with each <username>, <password> that you wish to allow
- // access to the server.
- #endif
- // Create the RTSP server:
- RTSPServer* rtspServer = RTSPServer::createNew(*env, 554, authDB);
- if (rtspServer == NULL) {
- *env << "Failed to create RTSP server: " << env->getResultMsg() << "\n";
- exit(1);
- }
- // Add live stream
- WW_H264VideoSource * videoSource = 0;
- ServerMediaSession * sms = ServerMediaSession::createNew(*env, "live", 0, "ww live test");
- sms->addSubsession(WW_H264VideoServerMediaSubsession::createNew(*env, videoSource));
- rtspServer->addServerMediaSession(sms);
- char * url = rtspServer->rtspURL(sms);
- *env << "using url \"" << url << "\"\n";
- delete[] url;
- // Run loop
- env->taskScheduler().doEventLoop();
- rtspServer->removeServerMediaSession(sms);
- Medium::close(rtspServer);
- env->reclaim();
- delete scheduler;
- return 1;
- }
发送H264视频流的RTSPStream
- /********************************************************************
- filename: RTSPStream.h
- created: 2013-08-01
- author: firehood
- purpose: 通过live555实现H264 RTSP直播
- *********************************************************************/
- #pragma once
- #include <stdio.h>
- #ifdef WIN32
- #include <windows.h>
- #else
- #include <pthread.h>
- #endif
- #ifdef WIN32
- typedef HANDLE ThreadHandle;
- #define mSleep(ms) Sleep(ms)
- #else
- typedef unsigned int SOCKET;
- typedef pthread_t ThreadHandle;
- #define mSleep(ms) usleep(ms*1000)
- #endif
- #define FILEBUFSIZE (1024 * 1024)
- class CRTSPStream
- {
- public:
- CRTSPStream(void);
- ~CRTSPStream(void);
- public:
- // 初始化
- bool Init();
- // 卸载
- void Uninit();
- // 发送H264文件
- bool SendH264File(const char *pFileName);
- // 发送H264数据帧
- int SendH264Data(const unsigned char *data,unsigned int size);
- };
- /********************************************************************
- filename: RTSPStream.cpp
- created: 2013-08-01
- author: firehood
- purpose: 通过live555实现H264 RTSP直播
- *********************************************************************/
- #include "RTSPStream.h"
- #ifdef WIN32
- #else
- #include <sys/types.h>
- #include <sys/stat.h>
- #include <string.h>
- #include <fcntl.h>
- #include <unistd.h>
- #include <limits.h>
- #include <errno.h>
- #endif
- #define FIFO_NAME "/tmp/H264_fifo"
- #define BUFFERSIZE PIPE_BUF
- CRTSPStream::CRTSPStream(void)
- {
- }
- CRTSPStream::~CRTSPStream(void)
- {
- }
- bool CRTSPStream::Init()
- {
- if(access(FIFO_NAME,F_OK) == -1)
- {
- int res = mkfifo(FIFO_NAME,0777);
- if(res != 0)
- {
- printf("[RTSPStream] Create fifo failed.\n");
- return false;
- }
- }
- return true;
- }
- void CRTSPStream::Uninit()
- {
- }
- bool CRTSPStream::SendH264File(const char *pFileName)
- {
- if(pFileName == NULL)
- {
- return false;
- }
- FILE *fp = fopen(pFileName, "rb");
- if(!fp)
- {
- printf("[RTSPStream] error:open file %s failed!",pFileName);
- }
- fseek(fp, 0, SEEK_SET);
- unsigned char *buffer = new unsigned char[FILEBUFSIZE];
- int pos = 0;
- while(1)
- {
- int readlen = fread(buffer+pos, sizeof(unsigned char), FILEBUFSIZE-pos, fp);
- if(readlen<=0)
- {
- break;
- }
- readlen+=pos;
- int writelen = SendH264Data(buffer,readlen);
- if(writelen<=0)
- {
- break;
- }
- memcpy(buffer,buffer+writelen,readlen-writelen);
- pos = readlen-writelen;
- mSleep(25);
- }
- fclose(fp);
- delete[] buffer;
- return true;
- }
- // 发送H264数据帧
- int CRTSPStream::SendH264Data(const unsigned char *data,unsigned int size)
- {
- if(data == NULL)
- {
- return 0;
- }
- // open pipe with non_block mode
- int pipe_fd = open(FIFO_NAME, O_WRONLY|O_NONBLOCK);
- //printf("[RTSPStream] open fifo result = [%d]\n",pipe_fd);
- if(pipe_fd == -1)
- {
- return 0;
- }
- int send_size = 0;
- int remain_size = size;
- while(send_size < size)
- {
- int data_len = (remain_size<BUFFERSIZE) ? remain_size : BUFFERSIZE;
- int len = write(pipe_fd,data+send_size,data_len);
- if(len == -1)
- {
- static int resend_conut = 0;
- if(errno == EAGAIN && ++resend_conut<=3)
- {
- printf("[RTSPStream] write fifo error,resend..\n");
- continue;
- }
- resend_conut = 0;
- printf("[RTSPStream] write fifo error,errorcode[%d],send_size[%d]\n",errno,send_size);
- break;
- }
- else
- {
- send_size+= len;
- remain_size-= len;
- }
- }
- close(pipe_fd);
- //printf("[RTSPStream] SendH264Data datalen[%d], sendsize = [%d]\n",size,send_size);
- return 0;
- }
测试程序代码
- #include <stdio.h>
- #include "RTSPStream.h"
- int main(int argc,char* argv[])
- {
- CRTSPStream rtspSender;
- bool bRet = rtspSender.Init();
- rtspSender.SendH264File("E:\\测试视频\\test.264");
- system("pause");
- }
【视频开发】【Live555】通过live555实现H264 RTSP直播的更多相关文章
- 【视频开发】【Live555】摄像头采集,264编码,live555直播(0)
参看 有关live555 1.首先需要修改live555,定义从 内存中直接获取source而不是从文件读取source的类. 自己实现的类命名为 H264FramedLiveSource /* ...
- Android音视频开发(1):H264 基本原理
前言 H264 视频压缩算法现在无疑是所有视频压缩技术中使用最广泛,最流行的.随着 x264/openh264 以及 ffmpeg 等开源库的推出,大多数使用者无需再对H264的细节做过多的研究,这大 ...
- 【视频开发】【Live555】live555实现h264码流RTSP传输
1.概述 liveMedia 库中有一系列类,基类是Medium,这些类针对不同的流媒体类型和编码. 其中的StreamFrame类文件(如MPEG4VideoStreamFramer)为流传输关键. ...
- 【视频开发】【Live555】摄像头采集,264编码,live555直播
加入 摄像头采集和264编码,再使用live555直播 1.摄像头采集和264编码 将x264改成编码一帧的接口,码流不写入文件而是直接写入内存中(int Encode_frame 函数中). /* ...
- 通过live555实现H264 RTSP直播
http://blog.csdn.net/firehood_/article/details/16844397
- EasyPusher:基于live555的DarwinInjector实现的RTSP直播推送程序
先简单介绍一下EasyPusher的功能,后面再对具体内部架构做介绍: EasyPusher:https://github.com/EasyDarwin/EasyPusher EasyPusher是什 ...
- Android IOS WebRTC 音视频开发总结(七六)-- 探讨直播低延迟低流量的粉丝连麦技术
本文主要探讨基于WebRTC的P2P直播粉丝连麦技术 (作者:郝飞,亲加云CTO,编辑:dora),最早发表在[这里] 支持原创,转载必须注明出处,欢迎关注微信公众号blacker(微信ID:blac ...
- WebRTC 音视频开发
WebRTC 音视频开发 webrtc Android IOS WebRTC 音视频开发总结(七八)-- 为什么WebRTC端到端监控很关键? 摘要: 本文主要介绍WebRTC端到端监控(我们翻译 ...
- 转:Android IOS WebRTC 音视频开发总结 (系列文章集合)
随笔分类 - webrtc Android IOS WebRTC 音视频开发总结(七八)-- 为什么WebRTC端到端监控很关键? 摘要: 本文主要介绍WebRTC端到端监控(我们翻译和整理的,译 ...
随机推荐
- 后台返回的Json为null的字段不显示的方法
如果引入的是谷歌的gson的话,需要引入依赖: <dependency> <groupId>com.fasterxml.jackson.core</groupId> ...
- 【Selenium-WebDriver实战篇】ScreenRecorder的实际输出路径,自己的解决方案
==================================================================================================== ...
- spring配置文件ApplicationContext.xml里面class等没有提示功能
实现效果: 解决方法: windows–>preference—>myeclipse—>files and editors–>xml—>xmlcatalog 点击add ...
- django-用户中心订单页面
提交订单页面place_order.html,创建订单成功后跳转到用户订单页面 {% block bottomfiles %} <script type="text/javascrip ...
- Git学习笔记--历史与安装(一)
声明:今天起学习Git,第一篇学习笔记主要借鉴廖雪峰先生的个人博客,以及自己的实践所得. “本教程只会让你成为Git用户,不会让你成为Git专家”——引自廖雪峰博客. 一.Git简介 Git是目前世界 ...
- c#的参数调用
c#的参数传递有三种方式:值传递,和c一样,引用传递,类似与c++,但形式不一样输出参数,这种方式可以返回多个值,这种有点像c中的指针传递,但其实不太一样.值传递不细说,c中已经很详细了引用传递实例如 ...
- modbus-poll和modbus-slave工具的学习使用——modbus协议功能码1的解析
一.数据解析 上一文介绍了modbus工具的基本使用情况,但是还没用说明modbus中的协议的具体意义, 1.左边是slave,id=1,说明地址是1,f=01说明是功能码01,功能码是一个字节,说明 ...
- DOM内容梳理2
JavaScript-DOM2(内容整理) 这两天新的知识有点多有点杂一时半会没有整理过来,以后不出意外会一直更行. js节点类型(NODETYPE) 查看节点类型 nodetype属性,返回的结果会 ...
- pgloader 学习(三)快速使用
pgloader 支持多种数据源数据的加载,以下列出简单的操作命令,后边会有详细的使用说明 csv 格式内容加载 预备说明 需要先在pg 数据库创建表 create table districts_l ...
- SQL必知必会收集学习
1.按查询列位置排序:如按第一列 降序排序 desc