转载自:http://blog.csdn.net/firehood_/article/details/16844397
前面的文章中介绍了《H264视频通过RTMP流直播》,下面将介绍一下如何将H264实时视频通过RTSP直播。
实现思路是将视频流发送给live555, 由live555来实现H264数据流直播。
视频采集模块通过FIFO队列将H264数据帧发送给live555. live555 在收到客户端的RTSP播放请求后,开始从FIFO中读取H264视频数据并通过RTSP直播出去。整个流程如下图所示:
调整和修改Live555 MediaServer
下载live555源码,在media目录下增加四个文件并修改文件live555MediaServer.cpp。增加的四个文件如下:
WW_H264VideoServerMediaSubsession.h
WW_H264VideoServerMediaSubsession.cpp
WW_H264VideoSource.h
WW_H264VideoSource.cpp
下面附上四个文件的源码:
WW_H264VideoServerMediaSubsession.h
- #pragma once
- #include "liveMedia.hh"
- #include "BasicUsageEnvironment.hh"
- #include "GroupsockHelper.hh"
- #include "OnDemandServerMediaSubsession.hh"
- #include "WW_H264VideoSource.h"
- class WW_H264VideoServerMediaSubsession : public OnDemandServerMediaSubsession
- {
- public:
- WW_H264VideoServerMediaSubsession(UsageEnvironment & env, FramedSource * source);
- ~WW_H264VideoServerMediaSubsession(void);
- public:
- virtual char const * getAuxSDPLine(RTPSink * rtpSink, FramedSource * inputSource);
- virtual FramedSource * createNewStreamSource(unsigned clientSessionId, unsigned & estBitrate); // "estBitrate" is the stream's estimated bitrate, in kbps
- virtual RTPSink * createNewRTPSink(Groupsock * rtpGroupsock, unsigned char rtpPayloadTypeIfDynamic, FramedSource * inputSource);
- static WW_H264VideoServerMediaSubsession * createNew(UsageEnvironment & env, FramedSource * source);
- static void afterPlayingDummy(void * ptr);
- static void chkForAuxSDPLine(void * ptr);
- void chkForAuxSDPLine1();
- private:
- FramedSource * m_pSource;
- char * m_pSDPLine;
- RTPSink * m_pDummyRTPSink;
- char m_done;
- };
WW_H264VideoServerMediaSubsession.cpp
- #include "WW_H264VideoServerMediaSubsession.h"
- WW_H264VideoServerMediaSubsession::WW_H264VideoServerMediaSubsession(UsageEnvironment & env, FramedSource * source) : OnDemandServerMediaSubsession(env, True)
- {
- m_pSource = source;
- m_pSDPLine = 0;
- }
- WW_H264VideoServerMediaSubsession::~WW_H264VideoServerMediaSubsession(void)
- {
- if (m_pSDPLine)
- {
- free(m_pSDPLine);
- }
- }
- WW_H264VideoServerMediaSubsession * WW_H264VideoServerMediaSubsession::createNew(UsageEnvironment & env, FramedSource * source)
- {
- return new WW_H264VideoServerMediaSubsession(env, source);
- }
- FramedSource * WW_H264VideoServerMediaSubsession::createNewStreamSource(unsigned clientSessionId, unsigned & estBitrate)
- {
- return H264VideoStreamFramer::createNew(envir(), new WW_H264VideoSource(envir()));
- }
- RTPSink * WW_H264VideoServerMediaSubsession::createNewRTPSink(Groupsock * rtpGroupsock, unsigned char rtpPayloadTypeIfDynamic, FramedSource * inputSource)
- {
- return H264VideoRTPSink::createNew(envir(), rtpGroupsock, rtpPayloadTypeIfDynamic);
- }
- char const * WW_H264VideoServerMediaSubsession::getAuxSDPLine(RTPSink * rtpSink, FramedSource * inputSource)
- {
- if (m_pSDPLine)
- {
- return m_pSDPLine;
- }
- m_pDummyRTPSink = rtpSink;
- //mp_dummy_rtpsink->startPlaying(*source, afterPlayingDummy, this);
- m_pDummyRTPSink->startPlaying(*inputSource, 0, 0);
- chkForAuxSDPLine(this);
- m_done = 0;
- envir().taskScheduler().doEventLoop(&m_done);
- m_pSDPLine = strdup(m_pDummyRTPSink->auxSDPLine());
- m_pDummyRTPSink->stopPlaying();
- return m_pSDPLine;
- }
- void WW_H264VideoServerMediaSubsession::afterPlayingDummy(void * ptr)
- {
- WW_H264VideoServerMediaSubsession * This = (WW_H264VideoServerMediaSubsession *)ptr;
- This->m_done = 0xff;
- }
- void WW_H264VideoServerMediaSubsession::chkForAuxSDPLine(void * ptr)
- {
- WW_H264VideoServerMediaSubsession * This = (WW_H264VideoServerMediaSubsession *)ptr;
- This->chkForAuxSDPLine1();
- }
- void WW_H264VideoServerMediaSubsession::chkForAuxSDPLine1()
- {
- if (m_pDummyRTPSink->auxSDPLine())
- {
- m_done = 0xff;
- }
- else
- {
- double delay = 1000.0 / (FRAME_PER_SEC); // ms
- int to_delay = delay * 1000; // us
- nextTask() = envir().taskScheduler().scheduleDelayedTask(to_delay, chkForAuxSDPLine, this);
- }
- }
- #ifndef _WW_H264VideoSource_H
- #define _WW_H264VideoSource_H
- #include "liveMedia.hh"
- #include "BasicUsageEnvironment.hh"
- #include "GroupsockHelper.hh"
- #include "FramedSource.hh"
- #define FRAME_PER_SEC 25
- class WW_H264VideoSource : public FramedSource
- {
- public:
- WW_H264VideoSource(UsageEnvironment & env);
- ~WW_H264VideoSource(void);
- public:
- virtual void doGetNextFrame();
- virtual unsigned int maxFrameSize() const;
- static void getNextFrame(void * ptr);
- void GetFrameData();
- private:
- void *m_pToken;
- char *m_pFrameBuffer;
- int m_hFifo;
- };
- #endif
WW_H264VideoSource.cpp
- #include "WW_H264VideoSource.h"
- #include
- #ifdef WIN32
- #include
- #else
- #include
- #include
- #include
- #include
- #include
- #include
- #endif
- #define FIFO_NAME "/tmp/H264_fifo"
- #define BUFFER_SIZE PIPE_BUF
- #define REV_BUF_SIZE (1024*1024)
- #ifdef WIN32
- #define mSleep(ms) Sleep(ms)
- #else
- #define mSleep(ms) usleep(ms*1000)
- #endif
- WW_H264VideoSource::WW_H264VideoSource(UsageEnvironment & env) :
- FramedSource(env),
- m_pToken(0),
- m_pFrameBuffer(0),
- m_hFifo(0)
- {
- m_hFifo = open(FIFO_NAME,O_RDONLY);
- printf("[MEDIA SERVER] open fifo result = [%d]\n",m_hFifo);
- if(m_hFifo == -1)
- {
- return;
- }
- m_pFrameBuffer = new char[REV_BUF_SIZE];
- if(m_pFrameBuffer == NULL)
- {
- printf("[MEDIA SERVER] error malloc data buffer failed\n");
- return;
- }
- memset(m_pFrameBuffer,0,REV_BUF_SIZE);
- }
- WW_H264VideoSource::~WW_H264VideoSource(void)
- {
- if(m_hFifo)
- {
- ::close(m_hFifo);
- }
- envir().taskScheduler().unscheduleDelayedTask(m_pToken);
- if(m_pFrameBuffer)
- {
- delete[] m_pFrameBuffer;
- m_pFrameBuffer = NULL;
- }
- printf("[MEDIA SERVER] rtsp connection closed\n");
- }
- void WW_H264VideoSource::doGetNextFrame()
- {
- // 根据 fps,计算等待时间
- double delay = 1000.0 / (FRAME_PER_SEC * 2); // ms
- int to_delay = delay * 1000; // us
- m_pToken = envir().taskScheduler().scheduleDelayedTask(to_delay, getNextFrame, this);
- }
- unsigned int WW_H264VideoSource::maxFrameSize() const
- {
- return 1024*200;
- }
- void WW_H264VideoSource::getNextFrame(void * ptr)
- {
- ((WW_H264VideoSource *)ptr)->GetFrameData();
- }
- void WW_H264VideoSource::GetFrameData()
- {
- gettimeofday(&fPresentationTime, 0);
- fFrameSize = 0;
- int len = 0;
- unsigned char buffer[BUFFER_SIZE] = {0};
- while((len = read(m_hFifo,buffer,BUFFER_SIZE))>0)
- {
- memcpy(m_pFrameBuffer+fFrameSize,buffer,len);
- fFrameSize+=len;
- }
- //printf("[MEDIA SERVER] GetFrameData len = [%d],fMaxSize = [%d]\n",fFrameSize,fMaxSize);
- // fill frame data
- memcpy(fTo,m_pFrameBuffer,fFrameSize);
- if (fFrameSize > fMaxSize)
- {
- fNumTruncatedBytes = fFrameSize - fMaxSize;
- fFrameSize = fMaxSize;
- }
- else
- {
- fNumTruncatedBytes = 0;
- }
- afterGetting(this);
- }
修改live555MediaServer.cpp文件如下
- /**********
- This library is free software; you can redistribute it and/or modify it under
- the terms of the GNU Lesser General Public License as published by the
- Free Software Foundation; either version 2.1 of the License, or (at your
- option) any later version. (See .)
- This library is distributed in the hope that it will be useful, but WITHOUT
- ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
- FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for
- more details.
- You should have received a copy of the GNU Lesser General Public License
- along with this library; if not, write to the Free Software Foundation, Inc.,
- 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
- **********/
- // Copyright (c) 1996-2013, Live Networks, Inc. All rights reserved
- // LIVE555 Media Server
- // main program
- #include
- #include "DynamicRTSPServer.hh"
- #include "version.hh"
- #include "WW_H264VideoSource.h"
- #include "WW_H264VideoServerMediaSubsession.h"
- int main(int argc, char** argv) {
- // Begin by setting up our usage environment:
- TaskScheduler* scheduler = BasicTaskScheduler::createNew();
- UsageEnvironment* env = BasicUsageEnvironment::createNew(*scheduler);
- UserAuthenticationDatabase* authDB = NULL;
- #ifdef ACCESS_CONTROL
- // To implement client access control to the RTSP server, do the following:
- authDB = new UserAuthenticationDatabase;
- authDB->addUserRecord("username1", "password1"); // replace these with real strings
- // Repeat the above with each , that you wish to allow
- // access to the server.
- #endif
- // Create the RTSP server:
- RTSPServer* rtspServer = RTSPServer::createNew(*env, 554, authDB);
- if (rtspServer == NULL) {
- *env taskScheduler().doEventLoop();
- rtspServer->removeServerMediaSession(sms);
- Medium::close(rtspServer);
- env->reclaim();
- delete scheduler;
- return 1;
- }
- /********************************************************************
- filename: RTSPStream.h
- created: 2013-08-01
- author: firehood
- purpose: 通过live555实现H264 RTSP直播
- *********************************************************************/
- #pragma once
- #include
- #ifdef WIN32
- #include
- #else
- #include
- #endif
- #ifdef WIN32
- typedef HANDLE ThreadHandle;
- #define mSleep(ms) Sleep(ms)
- #else
- typedef unsigned int SOCKET;
- typedef pthread_t ThreadHandle;
- #define mSleep(ms) usleep(ms*1000)
- #endif
- #define FILEBUFSIZE (1024 * 1024)
- class CRTSPStream
- {
- public:
- CRTSPStream(void);
- ~CRTSPStream(void);
- public:
- // 初始化
- bool Init();
- // 卸载
- void Uninit();
- // 发送H264文件
- bool SendH264File(const char *pFileName);
- // 发送H264数据帧
- int SendH264Data(const unsigned char *data,unsigned int size);
- };
- /********************************************************************
- filename: RTSPStream.cpp
- created: 2013-08-01
- author: firehood
- purpose: 通过live555实现H264 RTSP直播
- *********************************************************************/
- #include "RTSPStream.h"
- #ifdef WIN32
- #else
- #include
- #include
- #include
- #include
- #include
- #include
- #include
- #endif
- #define FIFO_NAME "/tmp/H264_fifo"
- #define BUFFERSIZE PIPE_BUF
- CRTSPStream::CRTSPStream(void)
- {
- }
- CRTSPStream::~CRTSPStream(void)
- {
- }
- bool CRTSPStream::Init()
- {
- if(access(FIFO_NAME,F_OK) == -1)
- {
- int res = mkfifo(FIFO_NAME,0777);
- if(res != 0)
- {
- printf("[RTSPStream] Create fifo failed.\n");
- return false;
- }
- }
- return true;
- }
- void CRTSPStream::Uninit()
- {
- }
- bool CRTSPStream::SendH264File(const char *pFileName)
- {
- if(pFileName == NULL)
- {
- return false;
- }
- FILE *fp = fopen(pFileName, "rb");
- if(!fp)
- {
- printf("[RTSPStream] error:open file %s failed!",pFileName);
- }
- fseek(fp, 0, SEEK_SET);
- unsigned char *buffer = new unsigned char[FILEBUFSIZE];
- int pos = 0;
- while(1)
- {
- int readlen = fread(buffer+pos, sizeof(unsigned char), FILEBUFSIZE-pos, fp);
- if(readlen
关注打赏
最近更新
- 深拷贝和浅拷贝的区别(重点)
- 【Vue】走进Vue框架世界
- 【云服务器】项目部署—搭建网站—vue电商后台管理系统
- 【React介绍】 一文带你深入React
- 【React】React组件实例的三大属性之state,props,refs(你学废了吗)
- 【脚手架VueCLI】从零开始,创建一个VUE项目
- 【React】深入理解React组件生命周期----图文详解(含代码)
- 【React】DOM的Diffing算法是什么?以及DOM中key的作用----经典面试题
- 【React】1_使用React脚手架创建项目步骤--------详解(含项目结构说明)
- 【React】2_如何使用react脚手架写一个简单的页面?
立即登录/注册


微信扫码登录