Reputation: 47
I'm working on a live video streaming service. I get data from the camera and turn the binary array to base64 and send base64 strings to other clients. in the receiver part, I'm using Mediasource API to provide src for the HTML video elements. everything works fine when the receiver gets video data from the beginning of the stream. but the problem is that the receiver cant turn received data into video if start receiving them from the middle of streaming.I used this doc for my work
this is appending method
const mediaSource = new MediaSource();
mediaSource.addEventListener('sourceopen', async () => {
const sourceBuffer = mediaSource.addSourceBuffer(webm9MimeCodec);
sourceBuffer.mode = 'sequence';
sourceBuffer.addEventListener('updateend', async () => {
if (videoTarget.paused)
videoTarget.play();
const ab = await channel.pull()
sourceBuffer.appendBuffer(ab)
});
const ab = await channel.pull()
sourceBuffer.appendBuffer(ab)
});
videoTarget.src = URL.createObjectURL(mediaSource);
this is the method I use create array buffer
const channelFactory = function () {
const _arrayBufferBuffer = []
const pullResolveQueue = []
return {
push: (ab) => {
if (pullResolveQueue.length > 0) {
const pull = pullResolveQueue.pop()
pull(ab)
} else {
_arrayBufferBuffer.push(ab)
}
},
pull: () => new Promise((res, rej) => {
if (_arrayBufferBuffer.length > 0) {
res(_arrayBufferBuffer.pop())
}
pullResolveQueue.push(res)
})
}
}
and this is how I get data from the server
var lastIndex = -1
var partBuffer = []
var playing = false;
connection.on('video-data', async (r) => {
console.log(r.part.length);
if (r.part.length === 0) {
return
}
if (!playing && r.index !== 0) {
return
}
playing = true
if (lastIndex >= r.index) {
const ba = base64js.toByteArray(partBuffer.reduce((a, b) => a + b))
channel.push(ba.buffer)
partBuffer = []
}
partBuffer.push(r.part)
lastIndex = r.index
})
thanks for reading
Upvotes: 1
Views: 50