Reputation: 9206
I have this simple code to get chunks of video stream and play them in MediaSource. I see video, but sometimes it stops. It may work for few seconds or for few minutes. But finally it stops at some moment. chrome://media-internals/ shows no errors.
What is wrong here?
navigator.getUserMedia = navigator.getUserMedia || navigator.webkitGetUserMedia || navigator.mozGetUserMedia || navigator.msGetUserMedia;
var mediaSource = new MediaSource();
var constraints = {
"audio": true,
"video": {
"mandatory": {
"minWidth": 320, "maxWidth": 320,
"minHeight": 240, "maxHeight": 240
}, "optional": []
}
};
window.mediaSource = mediaSource;
var sourceBuffer;
var video = document.querySelector('#video');
window.video = video;
video.src = window.URL.createObjectURL(mediaSource);
mediaSource.addEventListener('sourceopen', function (e) {
console.log("sourceopen");
sourceBuffer = mediaSource.addSourceBuffer('video/webm; codecs="vorbis,vp8"');
window.sourceBuffer = sourceBuffer;
}, false);
mediaSource.addEventListener('error', function (e) {
console.log("error", e)
}, false);
var stack = [];
video.play();
navigator.getUserMedia(constraints, function (stream) {
console.log("stream", stream);
mediaRecorder = new MediaRecorder(stream);
mediaRecorder.ondataavailable = function (e) {
var reader = new FileReader();
reader.addEventListener("loadend", function () {
var arr = new Uint8Array(reader.result);
sourceBuffer.appendBuffer(arr);
});
reader.readAsArrayBuffer(e.data);
};
mediaRecorder.start(100);
}, function (e) {
console.log(e)
});
Here is JSFIDDLE which is going to try to do it: https://jsfiddle.net/stivyakovenko/fkt89cLu/6/ I am using Chrome as my main target.
Upvotes: 7
Views: 6405
Reputation: 277
This solution works great in Firefox, no freezing. It requires jquery, cgi Python3 for the browser client. It also has two server-side Python3 programs for writing and reading the webcam data as the data is created.
Browser Client:
<html>
<head>
<script type="text/javascript" src="js/jquery.min.js"></script>
</head>
<body>
<video id="video" width="300" height="300" controls></video>
<video id="video2" width="300" height="300" controls></video>
<script>
var offsetA = 0;
var res;
var pos;
var b = "base64," ;
var fr = new FileReader();
function b64toBlob(dataURI) {
var byteString = atob(dataURI.split(',')[1]);
var ab = new ArrayBuffer(byteString.length);
var ia = new Uint8Array(ab);
for (var i = 0; i < byteString.length; i++) {
ia[i] = byteString.charCodeAt(i);
}
return new Blob([ab], { type: 'video/webm; codecs="vp8, opus"' });
}
// 1. Create a `MediaSource`
var mediaSource2 = new MediaSource();
// 2. Create an object URL from the `MediaSource`
var url = URL.createObjectURL(mediaSource2);
// 3. Set the video's `src` to the object URL
var video = document.getElementById("video2");
video.src = url;
// 4. On the `sourceopen` event, create a `SourceBuffer`
var sourceBuffer2 = null;
navigator.getUserMedia = navigator.getUserMedia || navigator.webkitGetUserMedia || navigator.mozGetUserMedia || navigator.msGetUserMedia;
var mediaSource = new MediaSource();
var constraints = {
"audio": true,
"video": {
"mandatory": {
"minWidth": 320, "maxWidth": 320,
"minHeight": 240, "maxHeight": 240
}, "optional": []
}
};
window.mediaSource = mediaSource;
var sourceBuffer;
var video = document.querySelector('#video');
window.video = video;
video.src = window.URL.createObjectURL(mediaSource);
mediaSource.addEventListener('sourceopen', function (e) {
console.log("sourceopen");
sourceBuffer = mediaSource.addSourceBuffer('video/webm; codecs="vp8, opus"');
window.sourceBuffer = sourceBuffer;
}, false);
mediaSource.addEventListener('error', function (e) {
console.log("error", e)
}, false);
var stack = [];
video.play();
mediaSource2.addEventListener("sourceopen", function()
{
// NOTE: Browsers are VERY picky about the codec being EXACTLY
// right here. Make sure you know which codecs you're using!
sourceBuffer2 = mediaSource2.addSourceBuffer("video/webm; codecs=\"vp8, opus\"");
sourceBuffer2.mode = 'sequence';
// Make sure to only append one chunk at a time to the SourceBuffer
navigator.getUserMedia(constraints, function (stream)
{
console.log("stream", stream);
mediaRecorder = new MediaRecorder(stream);
mediaRecorder.ondataavailable = function (e)
{
fr.onload = function(){
res = this.result;
pos = res.search(b);
pos = pos + b.length;
res = res.substring(pos);
$.ajax({
type: 'POST',
url: 'post_data_webcam.py',
dataType: "html",
data: { chunk: res },
success: function(data){
//alert(data + ' yes');
},
error: function(XMLHttpRequest, textStatus, errorThrown) {
alert('Status: ' + textStatus + ' ' + ' Error: ' + errorThrown);
}
});
}
fr.readAsDataURL(e.data);
var reader = new FileReader();
reader.addEventListener("loadend", function () {
var arr = new Uint8Array(reader.result);
sourceBuffer.appendBuffer(arr);
});
reader.readAsArrayBuffer(e.data);
};
mediaRecorder.start(1000);
}, function (e) {
console.log(e)
});
var i = setInterval(function()
{
if (mediaSource2.readyState === "open" && sourceBuffer2 && sourceBuffer2.updating === false )
{
if (sourceBuffer.duration > 2){
sourceBuffer.remove(0, sourceBuffer.duration - 2);
}
if (sourceBuffer2.duration > 2){
sourceBuffer2.remove(0, sourceBuffer2.duration - 2);
}
$.ajax({
type: 'POST',
url: 'get_data_webcam.py',
dataType: "html",
async: false,
data: { offset: offsetA },
success: function(data){
data = data.trim();
if (data != 'base64,') {
var reader = new FileReader();
reader.addEventListener("loadend", function () {
var arr = new Uint8Array(reader.result);
sourceBuffer2.appendBuffer(arr);
});
reader.readAsArrayBuffer( b64toBlob(data) );
offsetA = offsetA + 1;
}
},
error: function(XMLHttpRequest, textStatus, errorThrown) {
alert('Status: ' + textStatus + ' ' + ' Error: ' + errorThrown);
}
});
}
}, 1000);
});
</script>
</body>
</html>
Server-side Python3 webcam video writer: post_data_webcam.py
import os
import sys
import cgi
import cgitb
import base64
include_path = '/var/project_path/www'
cgitb.enable(display=0, logdir=f"""{include_path}/tmp_errors""") # include_path is OUTDIR
sys.path.insert(0, include_path)
def enc_print(string='', encoding='utf8'):
sys.stdout.buffer.write(string.encode(encoding) + b'\n')
from html import escape
args = cgi.FieldStorage()
chunk = '' if not args.getvalue( "chunk" ) else escape( args.getvalue( "chunk" ) )
mp4 = 'webcam.mp4'
mp4_text = 'webcam_text.txt'
with open (mp4, 'ab') as f:
f.write( base64.b64decode(chunk) )
with open (mp4_text, 'a') as f:
f.write( str(len(chunk)) + ',' + chunk + '\n' )
html = 'success'
enc_print("Content-Type:text/html;charset=utf-8;")
enc_print()
enc_print(html)
Server-side Python3 webcam video reader: get_data_webcam.py
import os
import sys
import cgi
import cgitb
import base64
include_path = '/var/project_path/www'
cgitb.enable(display=0, logdir=f"""{include_path}/tmp_errors""") # include_path is OUTDIR
sys.path.insert(0, include_path)
def enc_print(string='', encoding='utf8'):
sys.stdout.buffer.write(string.encode(encoding) + b'\n')
from html import escape
args = cgi.FieldStorage()
offset = '' if not args.getvalue( "offset" ) else escape( args.getvalue( "offset" ) )
mp4_text = 'webcam_text.txt'
data = ''
try:
with open(mp4_text, 'r') as f:
line = f.readlines()[int(offset)]
data = line.split(',')[1].strip()
except:
pass
enc_print("Content-Type:text/html;charset=utf-8;")
enc_print()
enc_print('base64,' + data)
Upvotes: 0
Reputation: 277
UPDATE! This is version 2 that I also created, it will work in Firefox and Chrome, and no freeze. Please note, I am using the same two server-side Python3 programs for writing and reading the webcam data as the data is created from my previous answer.
Browser Client version 2:
<html>
<head>
<script type="text/javascript" src="js/jquery.min.js"></script>
</head>
<body>
<video id="video1" width="300" height="300" autoplay controls ></video>
<video id="video2" width="300" height="300" controls></video>
<script>
var offsetA = 0;
function b64toBlob(dataURI) {
var byteString = atob(dataURI.split(',')[1]);
var ab = new ArrayBuffer(byteString.length);
var ia = new Uint8Array(ab);
for (var i = 0; i < byteString.length; i++) {
ia[i] = byteString.charCodeAt(i);
}
return new Blob([ab], { type: 'video/webm; codecs=vp8;' });
}
// 1. Create a `MediaSource`
var mediaSource2 = new MediaSource();
// 2. Create an object URL from the `MediaSource`
var url = URL.createObjectURL(mediaSource2);
// 3. Set the video's `src` to the object URL
var video = document.getElementById("video2");
video.src = url;
// 4. On the `sourceopen` event, create a `SourceBuffer`
var sourceBuffer2 = null;
const constraints = {video: true};
const video1 = document.querySelector('#video1');
const video2 = document.querySelector('#video2');
//var blobList = [];
function handleSuccess(stream) {
video1.srcObject = stream;
var mediaRecorder = new MediaRecorder(stream,{type:"video/webm; codecs=vp8;"});
mediaRecorder.ondataavailable = function(e){
//blobList.push(e.data);
var res;
var pos;
var b = "base64," ;
var fr = new FileReader();
fr.onload = function(){
res = this.result;
pos = res.search(b);
pos = pos + b.length;
res = res.substring(pos);
$.ajax({
type: 'POST',
url: 'post_data_webcam.py',
dataType: "html",
async:false,
data: { chunk: res },
success: function(data){
//alert(data + ' yes');
},
error: function(XMLHttpRequest, textStatus, errorThrown) {
alert('Status: ' + textStatus + ' ' + ' Error: ' + errorThrown);
}
});
}
fr.readAsDataURL(e.data);
}
mediaRecorder.start(1000);
var i = setInterval(function()
{
if (mediaSource2.readyState === "open" && sourceBuffer2 && sourceBuffer2.updating === false )
{
if (sourceBuffer2.duration > 2) {
sourceBuffer2.remove(0, sourceBuffer2.duration - 2);
}
$.ajax({
type: 'POST',
url: 'get_data_webcam.py',
dataType: "html",
async: false,
data: { offset: offsetA },
success: function(data){
data = data.trim();
if (data != 'base64,') {
var reader = new FileReader();
reader.addEventListener("loadend", function () {
sourceBuffer2.appendBuffer( reader.result );
});
reader.readAsArrayBuffer( b64toBlob(data) );
offsetA = offsetA + 1;
}
},
error: function(XMLHttpRequest, textStatus, errorThrown) {
alert('Status: ' + textStatus + ' ' + ' Error: ' + errorThrown);
}
});
}
}, 1000);
video.play();
}
function handleError(error) {
console.error('error', error);
}
mediaSource2.addEventListener("sourceopen", function()
{
// NOTE: Browsers are VERY picky about the codec being EXACTLY
// right here. Make sure you know which codecs you're using!
sourceBuffer2 = mediaSource2.addSourceBuffer("video/webm; codecs=vp8;");
sourceBuffer2.mode = 'sequence';
// Make sure to only append one chunk at a time to the SourceBuffer
navigator.mediaDevices.getUserMedia(constraints).then(handleSuccess).catch(handleError);
});
</script>
</body>
</html>
Upvotes: 0
Reputation: 11
Based on my experience of working with MediaRecorder and MediaSource, most of the errors related to the video freezing or returning errors may be due to the chunks being received out-of-sync. I believe that webm (and maybe other media types also) need the chunks to be received in increasing order of their timecodes. Recording, sending and then receiving the chunks Async may not preserve this increasing order of timecodes.
So, after the above analysis of my own experience of video freezing with MediaRecorder/MediaSource, I changed my code to send the recorded chunks in Sync, not Async.
Upvotes: 1
Reputation: 1
a working example in chrome but it freez in firefox
const main = async(function* main(){
const logging = true;
let tasks = Promise.resolve(void 0);
const devices = yield navigator.mediaDevices.enumerateDevices();
console.table(devices);
const stream = yield navigator.mediaDevices.getUserMedia({video: true, audio: true});
if(logging){
stream.addEventListener("active", (ev)=>{ console.log(ev.type); });
stream.addEventListener("inactive", (ev)=>{ console.log(ev.type); });
stream.addEventListener("addtrack", (ev)=>{ console.log(ev.type); });
stream.addEventListener("removetrack", (ev)=>{ console.log(ev.type); });
}
const rec = new MediaRecorder(stream, {mimeType: 'video/webm; codecs="opus,vp8"'});
if(logging){
rec.addEventListener("dataavailable", (ev)=>{ console.log(ev.type); });
rec.addEventListener("pause", (ev)=>{ console.log(ev.type); });
rec.addEventListener("resume", (ev)=>{ console.log(ev.type); });
rec.addEventListener("start", (ev)=>{ console.log(ev.type); });
rec.addEventListener("stop", (ev)=>{ console.log(ev.type); });
rec.addEventListener("error", (ev)=>{ console.error(ev.type, ev); });
}
const ms = new MediaSource();
if(logging){
ms.addEventListener('sourceopen', (ev)=>{ console.log(ev.type); });
ms.addEventListener('sourceended', (ev)=>{ console.log(ev.type); });
ms.addEventListener('sourceclose', (ev)=>{ console.log(ev.type); });
ms.sourceBuffers.addEventListener('addsourcebuffer', (ev)=>{ console.log(ev.type); });
ms.sourceBuffers.addEventListener('removesourcebuffer', (ev)=>{ console.log(ev.type); });
}
const video = document.createElement("video");
if(logging){
video.addEventListener('loadstart', (ev)=>{ console.log(ev.type); });
video.addEventListener('progress', (ev)=>{ console.log(ev.type); });
video.addEventListener('loadedmetadata', (ev)=>{ console.log(ev.type); });
video.addEventListener('loadeddata', (ev)=>{ console.log(ev.type); });
video.addEventListener('canplay', (ev)=>{ console.log(ev.type); });
video.addEventListener('canplaythrough', (ev)=>{ console.log(ev.type); });
video.addEventListener('playing', (ev)=>{ console.log(ev.type); });
video.addEventListener('waiting', (ev)=>{ console.log(ev.type); });
video.addEventListener('seeking', (ev)=>{ console.log(ev.type); });
video.addEventListener('seeked', (ev)=>{ console.log(ev.type); });
video.addEventListener('ended', (ev)=>{ console.log(ev.type); });
video.addEventListener('emptied', (ev)=>{ console.log(ev.type); });
video.addEventListener('stalled', (ev)=>{ console.log(ev.type); });
video.addEventListener('timeupdate', (ev)=>{ console.log(ev.type); }); // annoying
video.addEventListener('durationchange', (ev)=>{ console.log(ev.type); });
video.addEventListener('ratechange', (ev)=>{ console.log(ev.type); });
video.addEventListener('play', (ev)=>{ console.log(ev.type); });
video.addEventListener('pause', (ev)=>{ console.log(ev.type); });
video.addEventListener('error', (ev)=>{ console.warn(ev.type, ev); });
}
//video.srcObject = ms;
video.src = URL.createObjectURL(ms);
video.volume = 0;
video.controls = true;
video.autoplay = true;
document.body.appendChild(video);
yield new Promise((resolve, reject)=>{
ms.addEventListener('sourceopen', ()=> resolve(), {once: true});
});
const sb = ms.addSourceBuffer(rec.mimeType);
if(logging){
sb.addEventListener('updatestart', (ev)=>{ console.log(ev.type); }); // annoying
sb.addEventListener('update', (ev)=>{ console.log(ev.type); }); // annoying
sb.addEventListener('updateend', (ev)=>{ console.log(ev.type); }); // annoying
sb.addEventListener('error', (ev)=>{ console.error(ev.type, ev); });
sb.addEventListener('abort', (ev)=>{ console.log(ev.type); });
}
const stop = async(function* stop(){
console.info("stopping");
if(sb.updating){ sb.abort(); }
if(ms.readyState === "open"){ ms.endOfStream(); }
rec.stop();
stream.getTracks().map((track)=>{ track.stop(); });
yield video.pause();
console.info("end");
});
const button = document.createElement("button");
button.innerHTML = "stop";
button.addEventListener("click", ()=>{
document.body.removeChild(button);
tasks = tasks.then(stop);
}, {once: true});
document.body.appendChild(button);
let i = 0;
rec.ondataavailable = ({data})=>{
tasks = tasks.then(async(function*(){
console.group(""+i);
try{
if(logging){ console.log("dataavailable", "size:", data.size); }
if(data.size === 0){
console.warn("empty recorder data");
throw new Error("empty recorder data");
}
const buf = yield readAsArrayBuffer(data);
sb.appendBuffer(buf);
yield new Promise((resolve, reject)=>{
sb.addEventListener('updateend', ()=> resolve(), {once: true});
sb.addEventListener("error", (err)=> reject(ev), {once: true});
});
if(logging){
console.log("timestampOffset", sb.timestampOffset);
console.log("appendWindowStart", sb.appendWindowStart);
console.log("appendWindowEnd", sb.appendWindowEnd);
for(let i=0; i<sb.buffered.length; i++){
console.log("buffered", i, sb.buffered.start(i), sb.buffered.end(i));
}
for(let i=0; i<video.seekable.length; i++){
console.log("seekable", i, video.seekable.start(i), video.seekable.end(i));
}
console.log("webkitAudioDecodedByteCount", video.webkitAudioDecodedByteCount);
console.log("webkitVideoDecodedByteCount", video.webkitVideoDecodedByteCount);
console.log("webkitDecodedFrameCount", video.webkitDecodedFrameCount);
console.log("webkitDroppedFrameCount", video.webkitDroppedFrameCount);
}
if (video.buffered.length > 1) {
console.warn("MSE buffered has a gap!");
throw new Error("MSE buffered has a gap!");
}
}catch(err){
console.error(err);
yield stop();
console.groupEnd(""+i); i++;
return Promise.reject(err);
}
console.groupEnd(""+i);
i++;
}));
};
rec.start(1000);
console.info("start");
});
function sleep(ms){
return new Promise(resolve =>
setTimeout((()=>resolve(ms)), ms));
}
function readAsArrayBuffer(blob) {
return new Promise((resolve, reject)=>{
const reader = new FileReader();
reader.addEventListener("loadend", ()=> resolve(reader.result), {once: true});
reader.addEventListener("error", (err)=> reject(err.error), {once: true});
reader.readAsArrayBuffer(blob);
});
}
function async(generatorFunc){
return function (arg) {
const generator = generatorFunc(arg);
return next(null);
function next(arg) {
const result = generator.next(arg);
if(result.done){ return result.value; }
else if(result.value instanceof Promise){ return result.value.then(next); }
else{ return Promise.resolve(result.value); }
}
}
}
console.clear();
main().catch(console.error);
https://jsfiddle.net/nthyfgvs/
Upvotes: 0
Reputation: 1607
The mediarecorder will give you part of a whole webm file in the ondataavailable callback. Look like this kind of stuff is not work with mediaSource. It can not work at all in my chrome 66.
Here is a way that works like "video chat" or "live stream" with MediaRecorder without ffmpeg:
And this kind of workaroud works only with html too:
Here is a jsfiddle that works:
const constraints = {video: true};
const video1 = document.querySelector('.real1');
const video2 = document.querySelector('.real2');
var blobList = [];
var gCurrentTime = 0;
function playNew(){
gCurrentTime = video2.currentTime;
var thisBlob = new Blob(blobList,{type:"video/webm"});
var url = URL.createObjectURL(thisBlob);
video2.src = url;
video2.currentTime = gCurrentTime;
video2.play();
}
video2.onended = playNew;
var isFirst = true;
function handleSuccess(stream) {
video1.srcObject = stream;
var mediaRecorder = new MediaRecorder(stream,{mimeType:"video/webm"});
mediaRecorder.ondataavailable = function(e){
blobList.push(e.data);
if (isFirst){
playNew();
isFirst = false;
}
}
mediaRecorder.start(1000);
}
function handleError(error) {
console.error('Reeeejected!', error);
}
navigator.mediaDevices.getUserMedia(constraints).
then(handleSuccess).catch(handleError);
<video class="real1" autoplay controls></video>
<video class="real2" controls></video>
https://jsfiddle.net/4akkadht/1/
The html only solution (second one) will blink again and again and have a huge delay. The server long push solution (first one) will not blink and have five seconds delay.
Upvotes: 1
Reputation: 91
I am trying to do this as well, however I do not get any video at all. Your jsfiddle does not work for me on chrome or firefox (tested on ubuntu 14.04 and windows 7).
After a bit of research (mainly streaming back the file after it has been recorded), I've found out that the file is not properly fragmented to be played by MSE. @Steve: I'd be interested to find out how you've done the fragmenting with ffmpeg.
As a sidenote, I also have a similar question here: Display getUserMediaStream live video with media stream extensions (MSE) , with an error description from chrome://media-internals.
Upvotes: 0
Reputation: 9206
Looks like this is a bug in Chrome...
https://bugs.chromium.org/p/chromium/issues/detail?id=606000
Upvotes: 5