一.獲取海康威視rtsp視頻流
??低暪俜降腞TSP最新取流格式如下:
rtsp://用戶名:密碼@IP:554/Streaming/Channels/101
用戶名和密碼
IP就是登陸攝像頭時候的IP(筆者這里IP是192.168.1.210)
所以筆者的rtsp流地址就是rtsp://用戶名:密碼@192.168.1.210:554/Streaming/Channels/101
二. 測試rtsp流是否可以播放
1.實現(xiàn)RTSP協(xié)議推流需要做的配置
1.1關(guān)閉螢石云的接入
1.2調(diào)整視頻編碼為H.264
2.安裝VLC播放器
在此下載 video mediaplay官網(wǎng) 即(VLC)
安裝完成之后 打開VLC播放器
在VLC播放器中打開網(wǎng)絡(luò)串流 輸入rtsp地址
成功的話我們可以看到我們所顯示的攝像頭
如果RTSP流地址正確且取流成功,VLC的界面會顯示監(jiān)控畫面。否則會報錯,報錯信息寫在了日志里,在[工具]>[消息]里可以看到
三.在vue2中引用rtsp視頻流形式的??禂z像頭
1.新建webrtcstreamer.js文件
在public文件夾下新建webrtcstreamer.js 代碼貼在下方,復(fù)制粘貼即可
var WebRtcStreamer = (function() {
/**
* Interface with WebRTC-streamer API
* @constructor
* @param {string} videoElement - id of the video element tag
* @param {string} srvurl - url of webrtc-streamer (default is current location)
*/
var WebRtcStreamer = function WebRtcStreamer (videoElement, srvurl) {
if (typeof videoElement === "string") {
this.videoElement = document.getElementById(videoElement);
} else {
this.videoElement = videoElement;
}
this.srvurl = srvurl || location.protocol+"http://"+window.location.hostname+":"+window.location.port;
this.pc = null;
this.mediaConstraints = { offerToReceiveAudio: true, offerToReceiveVideo: true };
this.iceServers = null;
this.earlyCandidates = [];
}
WebRtcStreamer.prototype._handleHttpErrors = function (response) {
if (!response.ok) {
throw Error(response.statusText);
}
return response;
}
/**
* Connect a WebRTC Stream to videoElement
* @param {string} videourl - id of WebRTC video stream
* @param {string} audiourl - id of WebRTC audio stream
* @param {string} options - options of WebRTC call
* @param {string} stream - local stream to send
*/
WebRtcStreamer.prototype.connect = function(videourl, audiourl, options, localstream) {
this.disconnect();
// getIceServers is not already received
if (!this.iceServers) {
console.log("Get IceServers");
fetch(this.srvurl + "/api/getIceServers")
.then(this._handleHttpErrors)
.then( (response) => (response.json()) )
.then( (response) => this.onReceiveGetIceServers(response, videourl, audiourl, options, localstream))
.catch( (error) => this.onError("getIceServers " + error ))
} else {
this.onReceiveGetIceServers(this.iceServers, videourl, audiourl, options, localstream);
}
}
/**
* Disconnect a WebRTC Stream and clear videoElement source
*/
WebRtcStreamer.prototype.disconnect = function() {
if (this.videoElement?.srcObject) {
this.videoElement.srcObject.getTracks().forEach(track => {
track.stop()
this.videoElement.srcObject.removeTrack(track);
});
}
if (this.pc) {
fetch(this.srvurl + "/api/hangup?peerid=" + this.pc.peerid)
.then(this._handleHttpErrors)
.catch( (error) => this.onError("hangup " + error ))
try {
this.pc.close();
}
catch (e) {
console.log ("Failure close peer connection:" + e);
}
this.pc = null;
}
}
/*
* GetIceServers callback
*/
WebRtcStreamer.prototype.onReceiveGetIceServers = function(iceServers, videourl, audiourl, options, stream) {
this.iceServers = iceServers;
this.pcConfig = iceServers || {"iceServers": [] };
try {
this.createPeerConnection();
var callurl = this.srvurl + "/api/call?peerid=" + this.pc.peerid + "&url=" + encodeURIComponent(videourl);
if (audiourl) {
callurl += "&audiourl="+encodeURIComponent(audiourl);
}
if (options) {
callurl += "&options="+encodeURIComponent(options);
}
if (stream) {
this.pc.addStream(stream);
}
// clear early candidates
this.earlyCandidates.length = 0;
// create Offer
this.pc.createOffer(this.mediaConstraints).then((sessionDescription) => {
console.log("Create offer:" + JSON.stringify(sessionDescription));
this.pc.setLocalDescription(sessionDescription)
.then(() => {
fetch(callurl, { method: "POST", body: JSON.stringify(sessionDescription) })
.then(this._handleHttpErrors)
.then( (response) => (response.json()) )
.catch( (error) => this.onError("call " + error ))
.then( (response) => this.onReceiveCall(response) )
.catch( (error) => this.onError("call " + error ))
}, (error) => {
console.log ("setLocalDescription error:" + JSON.stringify(error));
});
}, (error) => {
alert("Create offer error:" + JSON.stringify(error));
});
} catch (e) {
this.disconnect();
alert("connect error: " + e);
}
}
WebRtcStreamer.prototype.getIceCandidate = function() {
fetch(this.srvurl + "/api/getIceCandidate?peerid=" + this.pc.peerid)
.then(this._handleHttpErrors)
.then( (response) => (response.json()) )
.then( (response) => this.onReceiveCandidate(response))
.catch( (error) => this.onError("getIceCandidate " + error ))
}
/*
* create RTCPeerConnection
*/
WebRtcStreamer.prototype.createPeerConnection = function() {
console.log("createPeerConnection config: " + JSON.stringify(this.pcConfig));
this.pc = new RTCPeerConnection(this.pcConfig);
var pc = this.pc;
pc.peerid = Math.random();
pc.onicecandidate = (evt) => this.onIceCandidate(evt);
pc.onaddstream = (evt) => this.onAddStream(evt);
pc.oniceconnectionstatechange = (evt) => {
console.log("oniceconnectionstatechange state: " + pc.iceConnectionState);
if (this.videoElement) {
if (pc.iceConnectionState === "connected") {
this.videoElement.style.opacity = "1.0";
}
else if (pc.iceConnectionState === "disconnected") {
this.videoElement.style.opacity = "0.25";
}
else if ( (pc.iceConnectionState === "failed") || (pc.iceConnectionState === "closed") ) {
this.videoElement.style.opacity = "0.5";
} else if (pc.iceConnectionState === "new") {
this.getIceCandidate();
}
}
}
pc.ondatachannel = function(evt) {
console.log("remote datachannel created:"+JSON.stringify(evt));
evt.channel.onopen = function () {
console.log("remote datachannel open");
this.send("remote channel openned");
}
evt.channel.onmessage = function (event) {
console.log("remote datachannel recv:"+JSON.stringify(event.data));
}
}
pc.onicegatheringstatechange = function() {
if (pc.iceGatheringState === "complete") {
const recvs = pc.getReceivers();
recvs.forEach((recv) => {
if (recv.track && recv.track.kind === "video") {
console.log("codecs:" + JSON.stringify(recv.getParameters().codecs))
}
});
}
}
try {
var dataChannel = pc.createDataChannel("ClientDataChannel");
dataChannel.onopen = function() {
console.log("local datachannel open");
this.send("local channel openned");
}
dataChannel.onmessage = function(evt) {
console.log("local datachannel recv:"+JSON.stringify(evt.data));
}
} catch (e) {
console.log("Cannor create datachannel error: " + e);
}
console.log("Created RTCPeerConnnection with config: " + JSON.stringify(this.pcConfig) );
return pc;
}
/*
* RTCPeerConnection IceCandidate callback
*/
WebRtcStreamer.prototype.onIceCandidate = function (event) {
if (event.candidate) {
if (this.pc.currentRemoteDescription) {
this.addIceCandidate(this.pc.peerid, event.candidate);
} else {
this.earlyCandidates.push(event.candidate);
}
}
else {
console.log("End of candidates.");
}
}
WebRtcStreamer.prototype.addIceCandidate = function(peerid, candidate) {
fetch(this.srvurl + "/api/addIceCandidate?peerid="+peerid, { method: "POST", body: JSON.stringify(candidate) })
.then(this._handleHttpErrors)
.then( (response) => (response.json()) )
.then( (response) => {console.log("addIceCandidate ok:" + response)})
.catch( (error) => this.onError("addIceCandidate " + error ))
}
/*
* RTCPeerConnection AddTrack callback
*/
WebRtcStreamer.prototype.onAddStream = function(event) {
console.log("Remote track added:" + JSON.stringify(event));
this.videoElement.srcObject = event.stream;
var promise = this.videoElement.play();
if (promise !== undefined) {
promise.catch((error) => {
console.warn("error:"+error);
this.videoElement.setAttribute("controls", true);
});
}
}
/*
* AJAX /call callback
*/
WebRtcStreamer.prototype.onReceiveCall = function(dataJson) {
console.log("offer: " + JSON.stringify(dataJson));
var descr = new RTCSessionDescription(dataJson);
this.pc.setRemoteDescription(descr).then(() => {
console.log ("setRemoteDescription ok");
while (this.earlyCandidates.length) {
var candidate = this.earlyCandidates.shift();
this.addIceCandidate(this.pc.peerid, candidate);
}
this.getIceCandidate()
}
, (error) => {
console.log ("setRemoteDescription error:" + JSON.stringify(error));
});
}
/*
* AJAX /getIceCandidate callback
*/
WebRtcStreamer.prototype.onReceiveCandidate = function(dataJson) {
console.log("candidate: " + JSON.stringify(dataJson));
if (dataJson) {
for (var i=0; i<dataJson.length; i++) {
var candidate = new RTCIceCandidate(dataJson[i]);
console.log("Adding ICE candidate :" + JSON.stringify(candidate) );
this.pc.addIceCandidate(candidate).then( () => { console.log ("addIceCandidate OK"); }
, (error) => { console.log ("addIceCandidate error:" + JSON.stringify(error)); } );
}
this.pc.addIceCandidate();
}
}
/*
* AJAX callback for Error
*/
WebRtcStreamer.prototype.onError = function(status) {
console.log("onError:" + status);
}
return WebRtcStreamer;
})();
if (typeof window !== 'undefined' && typeof window.document !== 'undefined') {
window.WebRtcStreamer = WebRtcStreamer;
}
if (typeof module !== 'undefined' && typeof module.exports !== 'undefined') {
module.exports = WebRtcStreamer;
}
2.下載webrtc-streamer
資源在最上面
也可以去github上面下載:webrtc-streamer
下載完解壓,打開文件夾,啟動webrtc-streamer.exe
?
打開完會出現(xiàn)cmd一樣的黑框框如下
?
如果沒有啟動成功可以在瀏覽器中輸入http://127.0.0.1:8000/查看本地端口8000是否被其他應(yīng)用程序占用,如果沒有被占用打開窗口應(yīng)該如下圖所示(是可以看見自己的頁面的)
?
3.封裝組件video.vue(名字隨意)
代碼如下(但是有需要注意的地方,請看下方)
<template>
<div id="video-contianer">
<video
class="video"
ref="video"
preload="auto"
autoplay="autoplay"
muted
width="600"
height="400"
/>
<div
class="mask"
@click="handleClickVideo"
:class="{ 'active-video-border': selectStatus }"
></div>
</div>
</template>
<script>
import WebRtcStreamer from '../../public/hk/webrtcstreamer'
export default {
name: 'videoCom',
props: {
rtsp: {
type: String,
required: true,
},
isOn: {
type: Boolean,
default: false,
},
spareId: {
type: Number,
},
selectStatus: {
type: Boolean,
default: false,
},
},
data() {
return {
socket: null,
result: null, // 返回值
pic: null,
webRtcServer: null,
clickCount: 0, // 用來計數(shù)點擊次數(shù)
}
},
watch: {
rtsp() {
// do something
console.log(this.rtsp)
this.webRtcServer.disconnect()
this.initVideo()
},
},
destroyed() {
this.webRtcServer.disconnect()
},
beforeCreate() {
window.onbeforeunload = () => {
this.webRtcServer.disconnect()
}
},
created() {},
mounted() {
this.initVideo()
},
methods: {
initVideo() {
try {
//連接后端的IP地址和端口
this.webRtcServer = new WebRtcStreamer(
this.$refs.video,
`http://192.168.1.102:8000`
)
//向后端發(fā)送rtsp地址
this.webRtcServer.connect(this.rtsp)
} catch (error) {
console.log(error)
}
},
/* 處理雙擊 單機 */
dbClick() {
this.clickCount++
if (this.clickCount === 2) {
this.btnFull() // 雙擊全屏
this.clickCount = 0
}
setTimeout(() => {
if (this.clickCount === 1) {
this.clickCount = 0
}
}, 250)
},
/* 視頻全屏 */
btnFull() {
const elVideo = this.$refs.video
if (elVideo.webkitRequestFullScreen) {
elVideo.webkitRequestFullScreen()
} else if (elVideo.mozRequestFullScreen) {
elVideo.mozRequestFullScreen()
} else if (elVideo.requestFullscreen) {
elVideo.requestFullscreen()
}
},
/*
ison用來判斷是否需要更換視頻流
dbclick函數(shù)用來雙擊放大全屏方法
*/
handleClickVideo() {
if (this.isOn) {
this.$emit('selectVideo', this.spareId)
this.dbClick()
} else {
this.btnFull()
}
},
},
}
</script>
<style scoped lang="scss">
.active-video-border {
border: 2px salmon solid;
}
#video-contianer {
position: relative;
// width: 100%;
// height: 100%;
.video {
// width: 100%;
// height: 100%;
// object-fit: cover;
}
.mask {
position: absolute;
top: 0;
left: 0;
width: 100%;
height: 100%;
cursor: pointer;
}
}
</style>
這里要注意兩個地方
第一個是
第二個是
不會查看本機端口的看這里(首先使用 Win + R打開運行 輸入cmd)
4.使用video封裝組件播放rtsp視頻流
首先我們在要使用video封裝組件的地方引入并且注冊video組件
之后在頁面中使用video組件 并且定義了兩個變量將rtsp流傳給封裝的video組件
效果圖如下
5.使用此種方法播放的時候會默認帶聲音播放,如何取消(看這里)
之后聲明一個方法
然后在created里面調(diào)用就靜音了
到此為止??禂z像頭引入vue的方法就完美完結(jié)了
如果同學(xué)們有什么好的意見或者有什么問題可以私信我
最后祝大家事業(yè)蒸蒸日上,心想事成!文章來源:http://www.zghlxwxcb.cn/news/detail-858811.html
文章來源地址http://www.zghlxwxcb.cn/news/detail-858811.html
到了這里,關(guān)于vue2使用rtsp視頻流接入??低晹z像頭(純前端)的文章就介紹完了。如果您還想了解更多內(nèi)容,請在右上角搜索TOY模板網(wǎng)以前的文章或繼續(xù)瀏覽下面的相關(guān)文章,希望大家以后多多支持TOY模板網(wǎng)!