1,linux 環(huán)境:
官網上下載,下載地址:http://www.live555.com/liveMedia/public/
live555 版本:“2018.12.14”
參考:http://www.live555.com/liveMedia/faq.html 這個FAQ要仔細閱讀。
2,編譯
根據不同的平臺來配置,并生成對應的Makefile
2.1 ARM平臺:
修改交叉編譯工具
cp config.armlinux config.arm
vi config.arm
CROSS_COMPILE?= arm-buildroot-linux-uclibcgnueabi-
生成Makefile: ./genMakefiles arm
2.2 Linux 64位平臺(x86-64 ):
./genMakefiles linux-64bit
2.3 Linux 32位平臺(x86):
./genMakefiles linux
make
生成mediaServer/live555MediaServer
3,測試
3.1,mediaServer下 會生成 live555MediaServer。
live555MediaServer test.264
如果出現Correct this by increasing “OutPacketBuffer::maxSize” to at least 186818, before creating this ‘RTPSink’. (Current value is 100000.)
在DynamicRTSPServer.cpp文件ServerMediaSession* createNewSMS()
里修改OutPacketBuffer::maxSize
if (strcmp(extension, ".264") == 0) {
? ? // Assumed to be a H.264 Video Elementary Stream file:
? ? NEW_SMS("H.264 Video");
? ? OutPacketBuffer::maxSize = 300000; //100000;// allow for some possibly large H.264 frames
? ? sms->addSubsession(H264VideoFileServerMediaSubsession::createNew(env, fileName, reuseSource));
? }
createNewSMS是在RTSP setup時調用的。
3.2,testProgs
testProgs 目錄下各種測試文件,每個文件的作用和用法,官網上有詳細的介紹。這些測試用例目前基本上都是以文件的形式作為輸入源,下面重點介紹以實時流的形式作為輸入源的2種方法。
主要是參考testH264VideoStreamer 和testOnDemandRTSPServer來修改。
4.不用讀文件,使用實時視頻流作為輸入源
最簡單的方法:將實時視頻流推送到一個FIFO管道(或stdin),將文件名改為這個管道的文件名,這里不做詳細介紹了。管道一次寫數據4096。
4.1 ?方法1,rtsp_server_main.cpp
#include <liveMedia.hh>
#include <BasicUsageEnvironment.hh>
#include <GroupsockHelper.hh>
#include <rtsp_stream.h>
#include "LiveServerMediaSubsession.h"
#include <pthread.h>
#include "rtsp_server.h"
using namespace KHJ;
UsageEnvironment* env;
#ifdef RTSP_H265_SUPPORT
H265VideoStreamFramer* videoSource = NULL;
#else
H264VideoStreamFramer* videoSource = NULL;
#endif
RTPSink* videoSink;
pthread_t rtsp_server_thread_id;
void play(); // forward
void * rtsp_server_thread(void *)
{
// Begin by setting up our usage environment:
TaskScheduler* scheduler = BasicTaskScheduler::createNew();
env = BasicUsageEnvironment::createNew(*scheduler);
OutPacketBuffer::maxSize = 512 * 1024;
RTSPServer* rtspServer = RTSPServer::createNew(*env, 8554);
if (rtspServer == NULL) {
*env << "Failed to create RTSP server: " << env->getResultMsg() << "\n";
exit(1);
}
ServerMediaSession* sms = ServerMediaSession::createNew(*env, "stream", "",
"Session streamed by \"testVideoStreamer\"", True);
sms->addSubsession(KHJ::LiveServerMediaSubsession::createNew(*env));
rtspServer->addServerMediaSession(sms);
char* url = rtspServer->rtspURL(sms);
*env << "Play this stream using the URL \"" << url << "\"\n";
delete[] url;
*env << "Beginning streaming...\n";
//play();
if (rtspServer->setUpTunnelingOverHTTP(80) || rtspServer->setUpTunnelingOverHTTP(8000) || rtspServer->setUpTunnelingOverHTTP(8080)) {
*env << "(We use port " << rtspServer->httpServerPortNum() << " for optional RTSP-over-HTTP tunneling, or for HTTP live streaming (for indexed Transport Stream files only).)\n";
} else {
*env << "(RTSP-over-HTTP tunneling is not available.)\n";
}
env->taskScheduler().doEventLoop();
}
void afterPlaying(void* /*clientData*/) {
*env << "...done reading from file\n";
videoSink->stopPlaying();
if (videoSource)
Medium::close(videoSource);
play();
}
void play()
{
H265Source *fileSource = H265Source::createNew(*env);
FramedSource* videoES = fileSource;
#ifdef RTSP_H265_SUPPORT
videoSource = H265VideoStreamFramer::createNew(*env, videoES);
#else
videoSource = H264VideoStreamFramer::createNew(*env, videoES);
#endif
// Finally, start playing:
*env << "Beginning to read from file...\n";
videoSink->startPlaying(*videoSource, afterPlaying, videoSink);
}
int rtsp_server_start()
{
int ret = 0;
if((ret = pthread_create(&rtsp_server_thread_id, NULL, rtsp_server_thread, NULL)) <0 )
{
printf("create rtsp_server_thread fail\n");
}
return ret;
}
int rtsp_server_stop()
{
// delete env;
// delete videoSource;
return 0;
}
#if 0
int main()
{
rtsp_server_start();
getchar();
}
#endif
4.2 方法2,參考testOnDemandRTSPServer
1)set the variable “reuseFirstSource” to “True”
2)根據類H264VideoFileServerMediaSubsession,新建一個新類H264LiveVideoServerMediaSubsession, implementation of the two pure virtual functions “createNewStreamSource()” and “createNewRTPSink()”
在createNewStreamSource()里用上面的H264LiveVideoSource代替ByteStreamFileSource。
H264VideoRTPSink繼承關系:
H264VideoRTPSink->H264or5VideoRTPSink->VideoRTPSink->MultiFramedRTPSink->RTPSink->MediaSink->Medium。
H264VideoRTPSource繼承關系:
H264VideoRTPSource->MultiFramedRTPSource->RTPSource->FramedSource->MediaSource->Medium.
H264VideoStreamFramer繼承關系:
H264VideoStreamFramer->H264or5VideoStreamFramer->MPEGVideoStreamFramer->FramedFilter->FramedSource ->MediaSource->Medium.
下面列出具體實現
h265source.h
#ifndef MESAI_H265_SOURCE_HH
#define MESAI_H265_SOURCE_HH
#include <FramedSource.hh>
#include <UsageEnvironment.hh>
#include <Groupsock.hh>
namespace RTSP_DEMO
{
class H265Source : public FramedSource {
public:
static H265Source* createNew(UsageEnvironment& env);
~H265Source();
private:
H265Source(UsageEnvironment& env);
virtual void doGetNextFrame();
virtual void doStopGettingFrames();
private:
int fp;
};
}
#endif
h265source.cpp文件如下:
static int sfp[MAX_CLIENT_NUM] = {-1, -1, -1, -1};
namespace RTSP_DEMO
{
H265Source * H265Source::createNew(UsageEnvironment& env) {
return new H265Source(env);
}
H265Source::H265Source(UsageEnvironment& env) : FramedSource(env)
{
printf("%s--->%d\n", __FUNCTION__, __LINE__);
int i=0;
fp = -1;
for(i=0; i<MAX_CLIENT_NUM; i++)
{
if(sfp[i] == -1)
{
fp = i;
sfp[i] = fp;
break;
}
}
if(fp<0)
{
return;
}
memset(&client_frame_info[fp], 0, sizeof(client_frame_info[fp]));
printf("%s--->%d, fp is %d\n", __FUNCTION__, __LINE__, fp);
}
H265Source::~H265Source()
{
printf("%s--->%d, fp is %d\n", __FUNCTION__, __LINE__, fp);
int i=0;
for(i=0; i<MAX_CLIENT_NUM; i++)
{
if(sfp[i] == fp)
{
fp = -1;
sfp[i] = fp;
break;
}
}
printf("%s--->%d, fp is %d\n", __FUNCTION__, __LINE__, fp);
}
void H265Source::doStopGettingFrames()
{
printf("%s--->%d\n", __FUNCTION__, __LINE__);
return ;
}
void H265Source::doGetNextFrame()
{
if (!isCurrentlyAwaitingData()) return;
int ret = 0;
int len = 10 * 1024;
// printf("fMaxSize is %d\n", fMaxSize);
if (len > fMaxSize)
len = fMaxSize;
gettimeofday(&fPresentationTime, NULL);
fNumTruncatedBytes = 0;
agin:
if((ret = read_packet(fp, fTo, len)) > 0){
fFrameSize = ret;
}else{
fFrameSize = 0;
}
if(fFrameSize>0)
FramedSource::afterGetting(this);
}
}
linve555常用修改點:
1, 輸入的一幀數據最大值
StreamParser.cpp
#define BANK_SIZE 1500000 //幀越大,這個值就要越大
2, rtp buffer最大值
(1)Source端使用 MultiFramedRTPSource.cpp
BufferedPacket::BufferedPacket()
定義輸入Buffer的上限值,即BufferedPacket的最大值
#define MAX_PACKET_SIZE 65536
(2)Sink端使用 MultiFramedRTPSink.cpp
#define RTP_PAYLOAD_MAX_SIZE 1456 //(1500-14-20-8)/4 *4 //ethernet=14,IP=20, UDP=8, a multiple of 4 bytes
MediaSink.cpp
靜態(tài)變量OutPacketBuffer::maxSize = 600000; // allow for some possibly large H.265 frames,2000000 is by default
最好是RTP_PAYLOAD_MAX_SIZE的整數倍
值小了,會不斷打印信息: Correct this by increasing “OutPacketBuffer::maxSize” to at least
,3,獲取IP地址失敗
RTSPServer::rtspURLPrefix(){
ourIPAddress(envir())
}
GroupsockHelper.cpp
?ourIPAddress()
?{
?? ?if (badAddressForUs(from)) {
?? ? ?#if 0
? ? ? char tmp[100];
? ? ? sprintf(tmp, "This computer has an invalid IP address: %s", AddressString(from).val());
? ? ? env.setResultMsg(tmp);
? ? ? from = 0;
?? ? ?#endif
?? ? ?struct ifreq req;
?? ? ?int ret = 0;
?? ? ?char szIpBuf[32];
?? ? ?sock = socket(AF_INET, SOCK_DGRAM, 0);
?? ? ?if (-1 != sock)
?? ? ?{
?? ??? ? ?memset(&req, 0, sizeof(req));
?? ??? ? ?strncpy(req.ifr_name, "eth0", sizeof(req.ifr_name));
?? ??? ? ?ret = ioctl(sock, SIOCGIFADDR, &req);
?? ??? ? ?if (-1 == ret)
?? ??? ? ?{
?? ??? ??? ?close(sock);
?? ??? ? ?}
?? ??? ? ?else
?? ??? ? ??? ?{
?? ??? ? ??? ? memset(&szIpBuf, 0, sizeof(szIpBuf));
?? ??? ? ? ? strcpy(szIpBuf, inet_ntoa(((struct sockaddr_in *)&req.ifr_addr)->sin_addr));
?? ??? ? ? ?close(sock);
?? ??? ? ? ?fromAddr.sin_addr.s_addr=our_inet_addr(szIpBuf);
?? ??? ??? ?from = fromAddr.sin_addr.s_addr;
?? ??? ? ??? ?}
?? ? ?}
?? ? ?else
? ?? ? ?{
? ?? ? ??? ? char tmp[100];
? ? ? ? ?sprintf(tmp, "This computer has an invalid IP address: %s", AddressString(from).val());
? ? ? ? ?env.setResultMsg(tmp);
? ? ? ? ?from = 0;
? ?? ? ?}
?}
3,內存泄漏點
RTCPInstance::processIncomingReport
if(NULL != reason)
{
delete[] reason;
reason = NULL;
}
在申請內存時加上上面釋放語句
reason = new char[reasonLength + 1];
4,fill sei data DeltaTfiDivisor
H264or5VideoStreamParser::H264or5VideoStreamParser()
{
//according to H264 and H265 spec, if not fill sei data, then
frame_field_info_present_flag is zero. so need to set DeltaTfiDivisor to 2.0 in H264 and 1.0 in H265
if(fHNumber == 264) {
DeltaTfiDivisor = 2.0;
} else {
DeltaTfiDivisor = 1.0;
}
}
5,長時間拉取拉取RTSP流
報錯誤"Hit limit when reading incoming packet over TCP"
可考慮提高maxRTCPPacketSize的值
RTCP.CPP
static unsigned const maxRTCPPacketSize = 1456;
6,如播放越久延時越大
MultiFramedRTPSink.cpp->MultiFramedRTPSink::sendPacketIfNecessary() 最后延時列隊uSecondsToGo 每幀都有延時時間。將uSecondsToGo 值賦為0。
7, 裁剪文章來源:http://www.zghlxwxcb.cn/news/detail-401971.html
只需留下這些目錄(BasicUsageEnvironment、groupsock、liveMedia、mediaServer、UsageEnvironment),其它可刪除掉。
其中l(wèi)iveMedia目錄下有很多類型的文件,不需要的也可刪除,同時修改
MediaSubsession::createSourceObjects()把相關類型的createNew也刪除掉,否則編譯失敗。文章來源地址http://www.zghlxwxcb.cn/news/detail-401971.html
到了這里,關于live555推送實時視頻流的文章就介紹完了。如果您還想了解更多內容,請在右上角搜索TOY模板網以前的文章或繼續(xù)瀏覽下面的相關文章,希望大家以后多多支持TOY模板網!