Reputation: 113
I have an app that allows me to draw on a canvas, and play a video under the canvas element. So onscreen I see a video being played with my drawings on top of it with a canvas.
I can use the following code to create an image of the canvas drawing on top of the video:
var tempcanvasDrawing = document.getElementById("drawcanvas");
var img = new Image();
img.src = tempcanvasDrawing.toDataURL();
img.onload = function() {
var tempcanvasFrame = document.createElement("canvas");
tempcanvasFrame.width = 1660;
tempcanvasFrame.height = 925;
var ctx = tempcanvasFrame.getContext('2d');
var video = $("video").get(1);
ctx.drawImage(video, 0, 0, 1660, 925);
ctx.drawImage(img, 0, 0, 1660, 925);
var dataURL = tempcanvasFrame.toDataURL('image/jpeg');
$.post('img_upload.php', {
imgBase64: dataURL,
userid: window.userid
}, function(o) {
});
}
I am now trying to find a way to record my drawing and video in reatime and export it to a video.
I've played a bit with mediarecorder and can record the canvas but i am really struggling to find a way to combine the video and canvas like I do above into a stream where I can record with mediarecorder.
Has anyone been able to do this kind of thing?
With help from @dandavid and TyTy396 i managed to get a little further:
I can capture all the streams I need but am having trouble linking them together.
async function startRecording() {
let types = [
"video/webm",
"audio/webm",
"video/webm\;codecs=vp8",
"video/webm\;codecs=daala",
"video/webm\;codecs=h264",
"audio/webm\;codecs=opus",
"video/mpeg"
];
for (let i in types) {
if (MediaRecorder.isTypeSupported(types[i])) {
supportedType = types[i];
break;
}
}
if (supportedType == null) {
console.log("No supported type found for MediaRecorder");
}
let options = {
mimeType: supportedType,
audio: true,
audioBitsPerSecond: 64000,
videoBitsPerSecond: 1750000, // 1.75Mbps
};
//assign inputs to be captured
stream['canvas'] = document.getElementById('whiteboard'); //This is the drawing canvas
stream['input1'] = $("video").get(1); //this is the video behind tha canvas
stream['input2'] = stream['canvas'].captureStream(30); //capture canvas stream
//compensate for firefox
if( browserName == "firefox"){
stream['input1'] = $("video").get(1).mozCaptureStream(30); //capture video stream using mozcapture
} else {
stream['input1'] = $("video").get(1).captureStream(30); //capture video stram using capture
}
stream['obj'] = stream['input1'];
//stream['obj'].addTrack(stream['audio'].getAudioTracks()[0]);
//stream['output'].srcObject = stream['obj']; //preview video
recordedBlobs = [];
try {
stream['mediaRecorder'] = new MediaRecorder(stream['obj'], options); //can i add all streams
} catch (e) {
alert('MediaRecorder is not supported by this browser.');
console.error('Exception while creating MediaRecorder:', e);
return;
}
stream['mediaRecorder'].onstop = handleStop;
stream['mediaRecorder'].ondataavailable = handleDataAvailable;
stream['mediaRecorder'].start(100); // collect 100ms of data blobs
timer.reset();
timer.start( {target: {minutes: 30} });
timer.addEventListener('secondsUpdated', function (e) {
$('.record-menu-time').text(timer.getTimeValues().toString());
});
timer.addEventListener('targetAchieved', function (e) {
$(".record-menu-stop").trigger("click");
});
}
function handleDataAvailable(event) {
if (event.data && event.data.size > 0) {
recordedBlobs.push(event.data);
}
}
function handleStop(event) {
stream['superBuffer'] = new Blob(recordedBlobs, { type: supportedType });
stream['blobUrl'] = window.URL.createObjectURL(stream['superBuffer']);
var preview = document.createElement("video");
preview.src = stream['blobUrl'];
preview.controls = true;
preview.load();
$( "#screen-recorder-video" ).replaceWith( preview );
preview.id = "screen-recorder-video";
$(preview).addClass("screen-recorder-video");
$(".record-menu-preview-show").hide();
$(".record-menu-preview-hide").show();
$(".video-preview").show();
$(".hide-preview").show();
$(".show-preview").hide();
}
function stopRecording() {
stream['mediaRecorder'].stop();
stream['output'].controls = true;
timer.stop();
}
function pauseRecording() {
console.log("pause recording");
timer.pause();
stream['mediaRecorder'].pause();
}
function resumeRecording() {
console.log("resume recording");
stream['mediaRecorder'].resume();
timer.start();
}
Upvotes: 1
Views: 1422
Reputation:
Maybe consider using the <video>
element?
var video = document.createElement('video');
video.setAttribute('playsinline', '');
video.setAttribute('autoplay', '');
video.setAttribute('muted', '');
video.style.width = screenX + 'px;';
video.style.height = screenY + 'px;';
document.body.appendChild(video);
var facingMode = "user";
var constraints = {
audio: true,
video: {
facingMode: facingMode
}
};
var s;
navigator.mediaDevices.getUserMedia(constraints).then(function success(stream) {
video.srcObject = stream;
});
Hope this helps!
Upvotes: 1