参看 有关live555

1.首先需要修改live555,定义从 内存中直接获取source而不是从文件读取source的类。

自己实现的类命名为 H264FramedLiveSource

 
/*
* Filename: H264FramedLiveSource.hh
* Auther: chenbin
* Create date: 2013/ 1/22
*/ #ifndef _H264FRAMEDLIVESOURCE_HH
#define _H264FRAMEDLIVESOURCE_HH #include <FramedSource.hh> class H264FramedLiveSource : public FramedSource
{
public:
static H264FramedLiveSource* createNew(UsageEnvironment& env,
char const* fileName,
unsigned preferredFrameSize = 0,
unsigned playTimePerFrame = 0); protected:
H264FramedLiveSource(UsageEnvironment& env,
char const* fileName,
unsigned preferredFrameSize,
unsigned playTimePerFrame);
// called only by createNew()
~H264FramedLiveSource(); private:
// redefined virtual functions:
virtual void doGetNextFrame();
int TransportData( unsigned char* to, unsigned maxSize ); protected:
FILE *fp;
}; #endif
/*
* Filename: H264FramedLiveSource.cpp
* Auther: mlj
* Create date: 2013/ 1/22
*/ #include "H264FramedLiveSource.hh" H264FramedLiveSource::H264FramedLiveSource( UsageEnvironment& env,
char const* fileName,
unsigned preferredFrameSize,
unsigned playTimePerFrame )
: FramedSource(env)
{
fp = fopen( fileName, "rb" );
} H264FramedLiveSource* H264FramedLiveSource::createNew( UsageEnvironment& env,
char const* fileName,
unsigned preferredFrameSize /*= 0*/,
unsigned playTimePerFrame /*= 0*/ )
{
H264FramedLiveSource* newSource = new H264FramedLiveSource(env, fileName, preferredFrameSize, playTimePerFrame); return newSource;
} H264FramedLiveSource::~H264FramedLiveSource()
{
fclose(fp);
} long filesize(FILE *stream)
{
long curpos, length;
curpos = ftell(stream);
fseek(stream, 0L, SEEK_END);
length = ftell(stream);
fseek(stream, curpos, SEEK_SET);
return length;
} void H264FramedLiveSource::doGetNextFrame()
{ if( filesize(fp) > fMaxSize)
fFrameSize = fread(fTo,1,fMaxSize,fp);
else
{
fFrameSize = fread(fTo,1,filesize(fp),fp);
fseek(fp, 0, SEEK_SET);
}
//fFrameSize = fMaxSize;
nextTask() = envir().taskScheduler().scheduleDelayedTask( 0,
(TaskFunc*)FramedSource::afterGetting, this);//表示延迟0秒后再执行 afterGetting 函数
return;
}

在 H264FramedLiveSource::doGetNextFrame() 中,将要发送的内容复制到 fTo,最大为fMaxSize,fFrameSize指示实际发送的内容是多少字节。这里暂时还是从文件读作为测试

2、定义自己的ServerMedia

/*
* Filename: H264LiveVideoServerMediaSubssion.hh
* Auther: mlj
* Create date: 2013/ 1/22
*/
#ifndef _H264_LIVE_VIDEO_SERVER_MEDIA_SUBSESSION_HH
#define _H264_LIVE_VIDEO_SERVER_MEDIA_SUBSESSION_HH
#include "H264VideoFileServerMediaSubsession.hh" class H264LiveVideoServerMediaSubssion: public H264VideoFileServerMediaSubsession { public:
static H264LiveVideoServerMediaSubssion*
createNew( UsageEnvironment& env,
char const* fileName,
Boolean reuseFirstSource ); protected: // we're a virtual base class
H264LiveVideoServerMediaSubssion( UsageEnvironment& env, char const* fileName, Boolean reuseFirstSource );
~H264LiveVideoServerMediaSubssion(); protected: // redefined virtual functions
FramedSource* createNewStreamSource(unsigned clientSessionId,
unsigned& estBitrate);
public:
char fFileName[100]; }; #endif
/*
* Filename: H264LiveVideoServerMediaSubssion.cpp
* Auther: chenbin
* Create date: 2012/11/29
*/ #include "H264LiveVideoServerMediaSubssion.hh"
#include "H264FramedLiveSource.hh"
#include "H264VideoStreamFramer.hh" H264LiveVideoServerMediaSubssion*
H264LiveVideoServerMediaSubssion::createNew( UsageEnvironment& env,
char const* fileName,
Boolean reuseFirstSource )
{
return new H264LiveVideoServerMediaSubssion( env, fileName, reuseFirstSource );
} H264LiveVideoServerMediaSubssion::H264LiveVideoServerMediaSubssion( UsageEnvironment& env, char const* fileName, Boolean reuseFirstSource )
: H264VideoFileServerMediaSubsession( env, fileName, reuseFirstSource )
{
strcpy(fFileName,fileName);
} H264LiveVideoServerMediaSubssion::~H264LiveVideoServerMediaSubssion()
{
} FramedSource* H264LiveVideoServerMediaSubssion::createNewStreamSource( unsigned clientSessionId, unsigned& estBitrate )
{
/* Remain to do : assign estBitrate */
estBitrate = 1000; // kbps, estimate // Create the video source:
H264FramedLiveSource* liveSource = H264FramedLiveSource::createNew(envir(), fFileName);
if (liveSource == NULL)
{
return NULL;
} // Create a framer for the Video Elementary Stream:
return H264VideoStreamFramer::createNew(envir(), liveSource);
}

3、主函数

/**********
This library is free software; you can redistribute it and/or modify it under
the terms of the GNU Lesser General Public License as published by the
Free Software Foundation; either version 2.1 of the License, or (at your
option) any later version. (See <http://www.gnu.org/copyleft/lesser.html>.) This library is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for
more details. You should have received a copy of the GNU Lesser General Public License
along with this library; if not, write to the Free Software Foundation, Inc.,
51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
**********/
// Copyright (c) 1996-2012, Live Networks, Inc. All rights reserved
// A test program that demonstrates how to stream - via unicast RTP
// - various kinds of file on demand, using a built-in RTSP server.
// main program
#include "H264LiveVideoServerMediaSubssion.hh"
#include "H264FramedLiveSource.hh"
#include "liveMedia.hh"
#include "BasicUsageEnvironment.hh" #pragma comment (lib, "Ws2_32.lib")
#pragma comment (lib, "BasicUsageEnvironment.lib")
#pragma comment (lib, "groupsock.lib")
#pragma comment (lib, "liveMedia.lib")
#pragma comment (lib, "UsageEnvironment.lib")
UsageEnvironment* env; // To make the second and subsequent client for each stream reuse the same
// input stream as the first client (rather than playing the file from the
// start for each client), change the following "False" to "True":
Boolean reuseFirstSource = False; // To stream *only* MPEG-1 or 2 video "I" frames
// (e.g., to reduce network bandwidth),
// change the following "False" to "True":
Boolean iFramesOnly = False; static void announceStream(RTSPServer* rtspServer, ServerMediaSession* sms,
char const* streamName, char const* inputFileName); // fwd static char newMatroskaDemuxWatchVariable;
static MatroskaFileServerDemux* demux;
static void onMatroskaDemuxCreation(MatroskaFileServerDemux* newDemux, void* /*clientData*/) {
demux = newDemux;
newMatroskaDemuxWatchVariable = 1;
} int main(int argc, char** argv) {
// Begin by setting up our usage environment:
TaskScheduler* scheduler = BasicTaskScheduler::createNew();
env = BasicUsageEnvironment::createNew(*scheduler); UserAuthenticationDatabase* authDB = NULL;
#ifdef ACCESS_CONTROL
// To implement client access control to the RTSP server, do the following:
authDB = new UserAuthenticationDatabase;
authDB->addUserRecord("username1", "password1"); // replace these with real strings
// Repeat the above with each <username>, <password> that you wish to allow
// access to the server.
#endif // Create the RTSP server:
RTSPServer* rtspServer = RTSPServer::createNew(*env, 8554, authDB);
if (rtspServer == NULL) {
*env << "Failed to create RTSP server: " << env->getResultMsg() << "\n";
exit(1);
} char const* descriptionString
= "Session streamed by \"testOnDemandRTSPServer\""; // Set up each of the possible streams that can be served by the
// RTSP server. Each such stream is implemented using a
// "ServerMediaSession" object, plus one or more
// "ServerMediaSubsession" objects for each audio/video substream. // A H.264 video elementary stream:
{
char const* streamName = "h264ESVideoTest";
char const* inputFileName = "test.264";
ServerMediaSession* sms
= ServerMediaSession::createNew(*env, streamName, streamName,
descriptionString);
sms->addSubsession(H264LiveVideoServerMediaSubssion
::createNew(*env, inputFileName, reuseFirstSource));//修改为自己实现的servermedia H264LiveVideoServerMediaSubssion
rtspServer->addServerMediaSession(sms); announceStream(rtspServer, sms, streamName, inputFileName);
} // Also, attempt to create a HTTP server for RTSP-over-HTTP tunneling.
// Try first with the default HTTP port (80), and then with the alternative HTTP
// port numbers (8000 and 8080). //if (rtspServer->setUpTunnelingOverHTTP(80) || rtspServer->setUpTunnelingOverHTTP(8000) || rtspServer->setUpTunnelingOverHTTP(8080)) {
// *env << "\n(We use port " << rtspServer->httpServerPortNum() << " for optional RTSP-over-HTTP tunneling.)\n";
//} else {
// *env << "\n(RTSP-over-HTTP tunneling is not available.)\n";
//} env->taskScheduler().doEventLoop(); // does not return return 0; // only to prevent compiler warning
} static void announceStream(RTSPServer* rtspServer, ServerMediaSession* sms,
char const* streamName, char const* inputFileName) {
char* url = rtspServer->rtspURL(sms);
UsageEnvironment& env = rtspServer->envir();
env << "\n\"" << streamName << "\" stream, from the file \""
<< inputFileName << "\"\n";
env << "Play this stream using the URL \"" << url << "\"\n";
delete[] url;
}

使用 ffplay.exe rtsp://115.156.164.19:8554/h264ESVideoTest 可以播放test.264的视频.

相关配置:live555的四个库放在lib文件夹下。

库目录:G:\workspace\avs\live555test\live555test\lib

包含目录:G:\workspace\avs\live555test\live555test\BasicUsageEnvironment\include;G:\workspace\avs\live555test\live555test\UsageEnvironment\include;G:\workspace\avs\live555test\live555test\liveMedia\include;G:\workspace\avs\live555test\live555test\groupsock\include

源代码 :

svn checkout http://live555-send-test.googlecode.com/svn/trunk/
live555-send-test-read-only

FROM:  http://www.cnblogs.com/mlj318/archive/2013/01/23/2872932.html

【视频开发】【Live555】摄像头采集,264编码,live555直播(0)的更多相关文章

  1. 【视频开发】RTSP SERVER(基于live555)详细设计

    /* *本文基于LIVE555的嵌入式的RTSP流媒体服务器一个设计文档,个中细节现剖于此,有需者可参考指正,同时也方便后期自己查阅.(本版本是基于2011年的live555) 作者:llf_17@q ...

  2. ffmpeg摄像头采集h264编码RTP发送

    一. 相关API说明 1. av_register_all 2. avformat_network_init 不管是流媒体发送还是流媒体接收, 需要先执行该函数. 3. avformat_alloc_ ...

  3. Android IOS WebRTC 音视频开发总结(七六)-- 探讨直播低延迟低流量的粉丝连麦技术

    本文主要探讨基于WebRTC的P2P直播粉丝连麦技术 (作者:郝飞,亲加云CTO,编辑:dora),最早发表在[这里] 支持原创,转载必须注明出处,欢迎关注微信公众号blacker(微信ID:blac ...

  4. WebRTC 音视频开发

    WebRTC 音视频开发 webrtc   Android IOS WebRTC 音视频开发总结(七八)-- 为什么WebRTC端到端监控很关键? 摘要: 本文主要介绍WebRTC端到端监控(我们翻译 ...

  5. 转:Android IOS WebRTC 音视频开发总结 (系列文章集合)

    随笔分类 - webrtc   Android IOS WebRTC 音视频开发总结(七八)-- 为什么WebRTC端到端监控很关键? 摘要: 本文主要介绍WebRTC端到端监控(我们翻译和整理的,译 ...

  6. 【视频开发】【Live555】摄像头采集,264编码,live555直播

    加入 摄像头采集和264编码,再使用live555直播 1.摄像头采集和264编码 将x264改成编码一帧的接口,码流不写入文件而是直接写入内存中(int  Encode_frame 函数中). /* ...

  7. (转)C++实现RTMP协议发送H.264编码及AAC编码的音视频,摄像头直播

    转:http://www.cnblogs.com/haibindev/archive/2011/12/29/2305712.html C++实现RTMP协议发送H.264编码及AAC编码的音视频 RT ...

  8. 【秒懂音视频开发】23_H.264编码

    本文主要介绍一种非常流行的视频编码:H.264. 计算一下:10秒钟1080p(1920x1080).30fps的YUV420P原始视频,需要占用多大的存储空间? (10 * 30) * (1920 ...

  9. Android IOS WebRTC 音视频开发总结(八十五)-- 使用WebRTC广播网络摄像头视频(下)

    本文主要介绍WebRTC (我们翻译和整理的,译者:weizhenwei,校验:blacker),最早发表在[编风网] 支持原创,转载必须注明出处,欢迎关注我的微信公众号blacker(微信ID:bl ...

  10. C++实现RTMP协议发送H.264编码及AAC编码的音视频

    http://www.cnblogs.com/haibindev/archive/2011/12/29/2305712.html C++实现RTMP协议发送H.264编码及AAC编码的音视频 RTMP ...

随机推荐

  1. Problem F. Wiki with String

    Problem F. Wiki with StringInput file: standard input Time limit: 1 secondOutput file: standard outp ...

  2. 基于麦克风阵列的声源定位算法之GCC-PHAT

    目前基于麦克风阵列的声源定位方法大致可以分为三类:基于最大输出功率的可控波束形成技术.基于高分辨率谱图估计技术和基于声音时间差(time-delay estimation,TDE)的声源定位技术. 基 ...

  3. LeetCode 971. Flip Binary Tree To Match Preorder Traversal

    原题链接在这里:https://leetcode.com/problems/flip-binary-tree-to-match-preorder-traversal/ 题目: Given a bina ...

  4. [转贴] bu AU3脚本录制工具(软件自动化安装的最简便的方法)

    http://www.autoitx.com/thread-15419-1-1.html 1,打开一个.au3的文档或者新建一个.au3的文档,用SciTE编辑; 2,按下ALT+F6,弹出下面的对话 ...

  5. php在Linux下的相对路径问题

    如图所示,我在 /root/phpcode/ 下面有两个php文件. a.php 与 b.php,我用 a.php 去 require b.php ,然后 b.php 输出 1. 现在我在 /root ...

  6. P2052 [NOI2011]道路修建——树形结构(水题,大佬勿进)

    P2052 [NOI2011]道路修建 这个题其实在dfs里面就可以把事干完的,(我一开始还拿出来求了一把)…… 一条边的贡献就是儿子的大小和n-siz[v]乘上边权: #include<cma ...

  7. mysql 创建时间字段

    alter table table1 add order_date datetime null; mysql> select * from table1; +----------+------- ...

  8. 「PKUSC2018」PKUSC

    传送门 Solution  考虑求每个点的贡献 等价于一个以OA长为半径的圆心为原点的圆在多边形内的弧对应的角度/\(2\pi\) 求弧度可以利用三角剖分 在原点的点要特判,采用射线法就可以了 Cod ...

  9. centos7最小化安装准备工作

    1.配置网络 [root@localhost ~]# vim /etc/sysconfig/network-scripts/ifcfg-eno16780032 HWADDR=00:0C:29:48:9 ...

  10. intellij ide调用一个对象所有的set方法

    1.下载地址:https://github.com/yoke233/genSets/releases/download/1.1/genSets.jar 2.plugin 从本地磁盘安装找到jar,并重 ...