總述
Live555是使用c++編寫的RTSP開源庫(kù),以文件為載體進(jìn)行推流,其中實(shí)現(xiàn)了多種類型的音頻以及視頻的流化。
簡(jiǎn)述RTSP
RTSP是一種媒體傳輸協(xié)議,基于TCP協(xié)議,主要功能為媒體播放的控制以及開始連接時(shí)的握手操作。時(shí)間流傳輸走的時(shí)RTP協(xié)議,流傳輸控制協(xié)議走的是RTCP,RTP和RTCP具體使用TCP還是UDP,有RTSP SETUP階段根據(jù)客戶端選擇而定。
實(shí)現(xiàn)自定義數(shù)據(jù)流傳輸
首先實(shí)現(xiàn)一個(gè)比較通用的MediaSubsession,繼承OnDemandServerMediaSubsession文章來源:http://www.zghlxwxcb.cn/news/detail-523330.html
class ComMediaSubsession:public OnDemandServerMediaSubsession
{
public:
static ComMediaSubsession* createNew(UsageEnvironment& env, int streamType, int videoType, int channelNO,
bool reuseFirstSource, portNumBits initalNumPort = 6970);
protected:
ComMediaSubsession(UsageEnvironment& env, int streamType, int videoType, int channelNO,
bool reuseFirstSource, portNumBits initalNumPort);
~ComMediaSubsession();
protected:
// virtual function new source and rtplink
FramedSource* createNewStreamSource(unsigned clientsessionId, unsigned& estBitrate);
RTPSink* createNewRTPSink(Groupsock* rtpGroupsock, unsigned char rtpPayloadTypeIfDynamic, FramedSource* inputSource);
public:
int fStreamType;
int fVideoType;
int fChannelNO;
};
#include "ComMediaSubsession.h"
#include "ComFrameSource.h"
#include "H264VideoStreamFramer.hh"
#include "H264VideoRTPSink.hh"
#include "MP3ADURTPSink.hh"
#include "MPEG1or2AudioRTPSink.hh"
ComMediaSubsession::ComMediaSubsession(UsageEnvironment& env, int streamType, int videoType, int channelNO, bool reuseFirstSource, portNumBits initalNumPort)
:OnDemandServerMediaSubsession(env, reuseFirstSource), fStreamType(streamType), fVideoType(videoType), fChannelNO(channelNO)
{
}
ComMediaSubsession::~ComMediaSubsession()
{
}
ComMediaSubsession* DemoH264MediaSubsession::createNew(UsageEnvironment& env, int streamType, int videoType, int channelNO,
bool reuseFirstSource, portNumBits initalNumPort)
{
ComMediaSubsession* sms = new ComMediaSubsession(env, streamType, videoType, channelNO, reuseFirstSource, initalNumPort);
return sms;
}
FramedSource* ComMediaSubsession::createNewStreamSource(unsigned clientsessionId, unsigned& estBitrate)
{
if(fVideoType == 0x01)
{
// H264 video
estBitrate = 2000; // kbps
ComFrameSource * source = ComFrameSource::createNew(envir(), fStreamType, fChannelNO, 0);
if ( source == NULL )
{
DBG_LIVE555_PRINT("create source failed videoType:%d!\n", fVideoType );
return NULL;
}
return H264VideoStreamFramer::createNew(envir(), source);
}
else if ( fVideoType == 0x2)
{
// Mpeg-4 video
estBitrate = 128; // kbps
ComFrameSource * source = ComFrameSource::createNew(envir(), fStreamType, fChannelNO, 1);
if ( source == NULL )
{
DBG_LIVE555_PRINT("create source failed videoType:%d!\n", fVideoType );
return NULL;
}
return source;
}
return NULL;
}
RTPSink* ComMediaSubsession::createNewRTPSink(Groupsock* rtpGroupsock, unsigned char rtpPayloadTypeIfDynamic, FramedSource* inputSource)
{
if( fVideoType == 0x01)
{
// H264 video
return H264VideoRTPSink::createNew(envir(), rtpGroupsock, rtpPayloadTypeIfDynamic);
}
else if( fVideoType == 0x02)
{
// Mpeg-4
return MP3ADURTPSink::createNew(envir(), rtpGroupsock,rtpPayloadTypeIfDynamic);
}
return NULL;
}
接著實(shí)現(xiàn)數(shù)據(jù)獲取類文章來源地址http://www.zghlxwxcb.cn/news/detail-523330.html
#include "FramedSource.hh"
class ComFrameSource:public FramedSource
{
public:
static ComFrameSource* createNew(UsageEnvironment& env, int streamtype, int channelno, int sourceType);
protected:
ComFrameSource(UsageEnvironment& env, long sourceHandle, int sourceType);
~ComFrameSource();
private:
virtual void doGetNextFrame();
public:
void doStopGetFrame();
public:
unsigned fLastBufSize;
unsigned fLeftDataSize;
int fSourceType;
int fFirstFrame;
};
#include "ComFrameSource.h"
ComFrameSource::ComFrameSource(UsageEnvironment& env, int sourceType):
FramedSource(env), fLastBufSize(0), fLeftDataSize(0), fSourceType(sourceType), fFirstFrame(1)
{
}
ComFrameSource::~ComFrameSource()
{
}
ComFrameSource* ComFrameSource::createNew(UsageEnvironment& env, int streamType, int channelNO, int sourceType)
{
return new ComFrameSource(env, sourceType);
}
void ComFrameSource::doGetNextFrame()
{
int ret = 0;
/*實(shí)現(xiàn)獲取數(shù)據(jù)到fTo,最大fMaxSize*/
ret = getStreamData(fSourceHandle, (char *)fTo, fMaxSize);
if (ret <= 0)
{
fFrameSize = 0;
nextTask() = envir().taskScheduler().scheduleDelayedTask(10,
(TaskFunc*)FramedSource::afterGetting, this);
return;
}
gettimeofday(&fPresentationTime, NULL);
// Switch to another task, and inform the reader that he has data:
nextTask() = envir().taskScheduler().scheduleDelayedTask(10,
(TaskFunc*)FramedSource::afterGetting, this);
}
void ComFrameSource::doStopGetFrame()
{
}
到了這里,關(guān)于基于Live555實(shí)現(xiàn)數(shù)據(jù)流的推送的文章就介紹完了。如果您還想了解更多內(nèi)容,請(qǐng)?jiān)谟疑辖撬阉鱐OY模板網(wǎng)以前的文章或繼續(xù)瀏覽下面的相關(guān)文章,希望大家以后多多支持TOY模板網(wǎng)!