Windows下利用live555实现H264实时流RTSP发送
2020-12-13 05:19
标签:windows live555 h264实时流 rtsp 发送服务器 文如其名,最近在做的项目要求利用RTSP协议转发处理完的H264视频数据给上一层客户端,环境是Windows的VS2013,于是就各种百度谷歌找代码。结果在得到利用live555去做比较简单的结论的同时也悲情地发现,网上别人贴出来的代码基本都是Linux上面的。在修改了两份来适用于Windows无效后,又一次陷入了百度谷歌的无尽搜索中。Anyway,最后终于解决了,所以贴出代码跟大家分享下,希望能给和我需求相似的童鞋一点启发,也希望有高手指正其中的问题。 用live555进行RTSP的播放基本上是通过修改其给出来的播放本地文件的DEMO来实现的。但由于其DEMO封装的比较深,所以要直接修改他的fread处的代码变成内存拷贝来实现实时传输会显得比较别扭。本文参考了网上的一些代码,自定义了一个继承自H264VideoFileServerMediaSubsession的类来来进行处理,同时定义了一个继承自FramedSource的类来做内存的拷贝操作,该类亦是区别于读本地文件和实时流之紧要处。 一口气杂七杂八说了好多,下面贴出代码吧。如果觉得需要或者懒得自己搭建live555的环境亦可以在文中最后的链接中下载该工程(环境为VS2013),如果你的VS版本合适即可直接运行。 主文件(程序入口) H264VideoFileServerMediaSubsession.hh
H264FramedLiveSource.hh
Windows下利用live555实现H264实时流RTSP发送 标签:windows live555 h264实时流 rtsp 发送服务器 原文地址:http://blog.csdn.net/weixinhum/article/details/38067743#include "H264LiveVideoServerMediaSubssion.hh"
#include "H264FramedLiveSource.hh"
#include "liveMedia.hh"
#include "BasicUsageEnvironment.hh"
#define BUFSIZE 1024*200
static void announceStream(RTSPServer* rtspServer, ServerMediaSession* sms,char const* streamName)//显示RTSP连接信息
{
char* url = rtspServer->rtspURL(sms);
UsageEnvironment& env = rtspServer->envir();
env getResultMsg() addSubsession(H264LiveVideoServerMediaSubssion::createNew(*env, reuseFirstSource, &datasize, databuf,&dosent));//修改为自己实现的H264LiveVideoServerMediaSubssion
rtspServer->addServerMediaSession(sms);
announceStream(rtspServer, sms, streamName);//提示用户输入连接信息
env->taskScheduler().doEventLoop(); //循环等待连接
free(databuf);//释放掉内存
return 0;
}
自定义H264VideoFileServerMediaSubsession类
#ifndef _H264_LIVE_VIDEO_SERVER_MEDIA_SUBSESSION_HH
#define _H264_LIVE_VIDEO_SERVER_MEDIA_SUBSESSION_HH
#include "H264VideoFileServerMediaSubsession.hh"
class H264LiveVideoServerMediaSubssion : public H264VideoFileServerMediaSubsession {
public:
static H264LiveVideoServerMediaSubssion* createNew(UsageEnvironment& env, Boolean reuseFirstSource, int *datasize, unsigned char* databuf, bool *dosent);
protected: // we're a virtual base class
H264LiveVideoServerMediaSubssion(UsageEnvironment& env, Boolean reuseFirstSource, int *datasize, unsigned char* databuf, bool *dosent);
~H264LiveVideoServerMediaSubssion();
protected: // redefined virtual functions
FramedSource* createNewStreamSource(unsigned clientSessionId,unsigned& estBitrate);
public:
char fFileName[100];
int *Server_datasize;//数据区大小指针
unsigned char* Server_databuf;//数据区指针
bool *Server_dosent;//发送标示
};
#endif
H264VideoFileServerMediaSubsession.cpp
#include "H264LiveVideoServerMediaSubssion.hh"
#include "H264FramedLiveSource.hh"
#include "H264VideoStreamFramer.hh"
H264LiveVideoServerMediaSubssion* H264LiveVideoServerMediaSubssion::createNew(UsageEnvironment& env, Boolean reuseFirstSource, int *datasize, unsigned char* databuf, bool *dosent)
{
return new H264LiveVideoServerMediaSubssion(env, reuseFirstSource, datasize, databuf, dosent);
}
H264LiveVideoServerMediaSubssion::H264LiveVideoServerMediaSubssion(UsageEnvironment& env, Boolean reuseFirstSource, int *datasize, unsigned char* databuf, bool *dosent)
: H264VideoFileServerMediaSubsession(env, fFileName, reuseFirstSource)//H264VideoFileServerMediaSubsession不是我们需要修改的文件,
//但是我们又要用它来初始化我们的函数,
//所以给个空数组进去即可
{
Server_datasize = datasize;//数据区大小指针
Server_databuf = databuf;//数据区指针
Server_dosent = dosent;//发送标示
}
H264LiveVideoServerMediaSubssion::~H264LiveVideoServerMediaSubssion()
{
}
FramedSource* H264LiveVideoServerMediaSubssion::createNewStreamSource(unsigned clientSessionId, unsigned& estBitrate)
{
/* Remain to do : assign estBitrate */
estBitrate = 1000; // kbps, estimate
//创建视频源
H264FramedLiveSource* liveSource = H264FramedLiveSource::createNew(envir(), Server_datasize, Server_databuf, Server_dosent);
if (liveSource == NULL)
{
return NULL;
}
// Create a framer for the Video Elementary Stream:
return H264VideoStreamFramer::createNew(envir(), liveSource);
}
自定义H264FramedLiveSource类
#ifndef _H264FRAMEDLIVESOURCE_HH
#define _H264FRAMEDLIVESOURCE_HH
#include
H264FramedLiveSource.cpp
#include "H264FramedLiveSource.hh"
H264FramedLiveSource::H264FramedLiveSource(UsageEnvironment& env, int *datasize, unsigned char* databuf, bool *dosent, unsigned preferredFrameSize, unsigned playTimePerFrame)
: FramedSource(env)
{
Framed_datasize = datasize;//数据区大小指针
Framed_databuf = databuf;//数据区指针
Framed_dosent = dosent;//发送标示
}
H264FramedLiveSource* H264FramedLiveSource::createNew(UsageEnvironment& env, int *datasize, unsigned char* databuf, bool *dosent, unsigned preferredFrameSize, unsigned playTimePerFrame)
{
H264FramedLiveSource* newSource = new H264FramedLiveSource(env, datasize, databuf, dosent, preferredFrameSize, playTimePerFrame);
return newSource;
}
H264FramedLiveSource::~H264FramedLiveSource()
{
}
void H264FramedLiveSource::doGetNextFrame()
{
if (*Framed_dosent == true)
{
*Framed_dosent = false;
bufsizel = *Framed_datasize;
readbufsize = 0;
fFrameSize = fMaxSize;
memcpy(fTo, Framed_databuf + readbufsize, fFrameSize);
readbufsize += fFrameSize;
}
else
{
if (bufsizel - readbufsize>fMaxSize)
{
fFrameSize = fMaxSize;
memcpy(fTo, Framed_databuf + readbufsize, fFrameSize);
readbufsize += fFrameSize;
}
else
{
memcpy(fTo, Framed_databuf + readbufsize, bufsizel - readbufsize);
*Framed_dosent = true;
}
}
nextTask() = envir().taskScheduler().scheduleDelayedTask(0,(TaskFunc*)FramedSource::afterGetting, this);//表示延迟0秒后再执行 afterGetting 函数
return;
}
工程下载地址:点击打开链接
文章标题:Windows下利用live555实现H264实时流RTSP发送
文章链接:http://soscw.com/essay/30742.html