Hi all,
I’ve been using the ipcamera
bindings to convert the RTSP streams from my cameras to jpgs which are then displayed in HabPanel, but I’d like to stream the videos directly instead.
The addon uses ffmpeg which is quite clunky, crashes frequently, and uses a lot of CPU on my raspi (for 10+ cams at 4K resolution).
Instead, I came across this little project which offers a lightweight solution for streaming RTSP to browsers:
GitHub - deepch/RTSPtoWeb: RTSP Stream to WebBrowser
It works in my browser, and I can sorta/kinda make it work in HabPanel by just putting it in an iframe, but I’d rather have it as a custom widget.
Unfortunately, HabPanel doesn’t allow executing JavaScript, so I’m stuck.
Here’s the code I came up with (modified from the example on github), which I’d like to run on HabPanel:
class CamInfo {
constructor(name, url, videoEl) {
this.name = name
this.url = url
this.videoEl = videoEl
this.mseQueue = [];
this.mseStreamingStarted = false
this.mseSourceBuffer = null
}
startVideo()
{
// fix stalled video in safari
this.videoEl.addEventListener('pause', () => {
if (videoEl.currentTime > videoEl.buffered.end(videoEl.buffered.length - 1)) {
videoEl.currentTime = videoEl.buffered.end(videoEl.buffered.length - 1) - 0.1
videoEl.play()
}
})
// Prevent pause on clicking
//
this.videoEl.addEventListener('click', (e) => e.preventDefault())
this.startPlay(this)
}
startPlay (camObj) {
const mse = new MediaSource()
this.videoEl.src = window.URL.createObjectURL(mse)
mse.addEventListener('sourceopen', function () {
const ws = new WebSocket(camObj.url)
ws.binaryType = 'arraybuffer'
ws.onopen = function (event) {
console.log(camObj.name + ' connected')
}
ws.onmessage = function (event) {
const data = new Uint8Array(event.data)
if (data[0] === 9) {
let mimeCodec
const decodedArr = data.slice(1)
if (window.TextDecoder) {
mimeCodec = new TextDecoder('utf-8').decode(decodedArr)
} else {
mimeCodec = Utf8ArrayToStr(decodedArr)
}
camObj.mseSourceBuffer = mse.addSourceBuffer('video/mp4; codecs="' + mimeCodec + '"')
camObj.mseSourceBuffer.mode = 'segments'
camObj.mseSourceBuffer.addEventListener('updateend', camObj.pushPacket.bind(this, camObj, camObj.videoEl))
} else {
camObj.readPacket(event.data, camObj, camObj.videoEl)
}
}
}, false)
}
pushPacket (camObj, videoEl) {
let packet
if (!camObj.mseSourceBuffer.updating) {
if (camObj.mseQueue.length > 0) {
packet = camObj.mseQueue.shift()
camObj.mseSourceBuffer.appendBuffer(packet)
} else {
camObj.mseStreamingStarted = false
}
}
if (videoEl.buffered.length > 0) {
if (typeof document.hidden !== 'undefined' && document.hidden) {
// no sound, browser paused video without sound in background
videoEl.currentTime = videoEl.buffered.end((videoEl.buffered.length - 1)) - 0.5
}
}
}
readPacket (packet, camObj, videoEl) {
if (!camObj.mseStreamingStarted) {
camObj.mseSourceBuffer.appendBuffer(packet)
camObj.mseStreamingStarted = true
return
}
camObj.mseQueue.push(packet)
if (!camObj.mseSourceBuffer.updating) {
this.pushPacket(camObj, videoEl)
}
}
}
<html>
<body>
<script src="main.js (above script)"></script>
<video id="cam1-video" autoplay muted playsinline controls style="width: 640px; height: 480px;"></video>
<script>
var cam1 = new CamInfo('Camera 1 LoRes', 'ws://x.x.x.x:8080/stream/cam1/channel/LoRes/mse', document.querySelector('#cam1-video'))
cam1.startVideo();
</script>
</body>
</html>
Is there any way to implement that in Angular or how would I handle that?
Thanks,
schalli