Reputation: 83
The problem: I am not able to go back in a video recording by loading old blobs in a sourceBuffer
I am creating a js video player that records live the video from the webcam and allows to go back. The video could be very long (more than one hour). The idea is making a Camera Assisted Video application (like the soccer/judo one, to replay older actions).
Since the MediaSource sourceBuffer has a limited duration, I am using IndexDb to store and retrieve the various Blobs, which are pieces of video recorded.
Currently this is what I am doing to show the live video:
start the recording with a MediaRecorder. When a new Blob is available, store it in the IndexDB
I create from Javascript the video tag (more on why later), create a MediaSource, attach it on the video element and create a source buffer, then start retrieving blobs
I have a global variable i that tells me the id of the next blob to retrieve. Every time a blob is attached to the sourceBuffer, I fetch the next one and append it to the same sourceBuffer.
This works nicely, as I am able to see the livestream of the recording and retrieve all the blobs to save the total video (while using just the sourceBuffer, I cannot have a video longer than 150MB before it gets cut).
I have various methods to let the user decide to go back at what timestamp (keyboard shortcuts, custom timeline) and a function that, given a timestamp, returns me the index of the nearest previous blob.
My problem is, once I know what old blob to load next, I am not able to show it, since the stream just freezes and/or returns error.
Here is what I have tried so far, and none of them works:
Every time, when the first blob is added the sourceBuffer gets an error (which is not printed anywhere, since it returns an event) and the video freezes. How can I do what I want to do?
Here are the relevant pieces of code, you can find the full code here if it is helpful. Thank you very much
/** The blob lenght from a MediaRecorder in milliseconds. It decides also when a new blob is stored / retrieved */
const REFRESHRATE = 1 * 1000;
/** how much to wait from recording to showing the first blob of the live. Total delay to the live is this times REFRESHRATE */
const DELAY_MULTIPLIER = 2;
const mimeType = 'video/webm; codecs="vp8, opus"';
const videoContainer = document.querySelector(".video-container");
getWebcamStream();
/** get the webcam stream, save it to the mediaStream and start the mediaRecorder */
function getWebcamStream() {
navigator.mediaDevices
.getUserMedia({
audio: useAudio,
video: { width: 1920, height: 1080 },
facingMode: { exact: "enviroment" },
})
.then((stream) => {
// todo we can add multiple videotracks in the future
const videoTrack = stream.getVideoTracks()[0];
/** holder of the webcam audio and video stream */
const mediaStream = new MediaStream();
mediaStream.addTrack(videoTrack);
if (useAudio) {
const audioTrack = stream.getAudioTracks()[0];
mediaStream.addTrack(audioTrack);
}
/** saves the webcam stream to various Blobs */
const mediaRecorder = new MediaRecorder(
mediaStream,
useAudio
? {
audioBitsPerSecond: 128000,
videoBitsPerSecond: videoBitsPerSecond,
}
: { videoBitsPerSecond: videoBitsPerSecond }
);
mediaRecorder.start(REFRESHRATE);
// * when data is aviable to the recorder, add it to the arrayOfBlob and then call appendToSourceBuffer to process it
mediaRecorder.addEventListener("dataavailable", (e) => {
const blob = e.data;
storeBlob(blob);
});
setTimeout(appendToSourceBuffer, REFRESHRATE * DELAY_MULTIPLIER);
})
.catch((err) => {
console.log(err);
alert(
"Assicurati che la webcam non sia usata da qualche altro programma, poi ricarica il CARE system"
);
});
}
/** source for the video tag @type {MediaSource} */
let mediaSource;
/** buffer to hold various Blobs @type {SourceBuffer} */
let sourceBuffer;
/** index of the last blob added in the db. Autoindexing starts at 1 */
let i = 1;
/** @type {HTMLVideoElement} */
let video;
createVideoElement();
function createVideoElement() {
if (video) {
try {
console.log("Removing previous video element");
mediaSource.removeSourceBuffer(sourceBuffer);
sourceBuffer = null;
mediaSource.endOfStream();
mediaSource = null;
videoContainer.removeChild(video);
} catch (e) {
console.error("Error removing previous video element:", e);
}
}
console.log("Creating video element");
video = videoContainer.appendChild(document.createElement("video"));
addVideoEvents();
createMediaSource();
setTimeout(() => video.play().catch(console.error), REFRESHRATE);
}
function createMediaSource() {
console.log("Creating mediaSource");
mediaSource = new MediaSource();
const url = URL.createObjectURL(mediaSource);
video.src = url;
// * when mediaSource is ready, create the sourceBuffer
mediaSource.addEventListener("sourceopen", createSourceBuffer);
}
function createSourceBuffer() {
console.log("Creating sourceBuffer with mimeType:", mimeType);
sourceBuffer = mediaSource.addSourceBuffer(mimeType);
sourceBuffer.mode = "segments";
// * when the previous blob has been appended, append a new one
sourceBuffer.addEventListener("updateend", () =>
setTimeout(appendToSourceBuffer, REFRESHRATE)
);
sourceBuffer.addEventListener("error", (e) => {
console.error("Error with sourceBuffer:", e);
});
}
/** add to the sourceBuffer the new segment */
function appendToSourceBuffer() {
if (!mediaSource) return;
if (mediaSource.readyState !== "open") return;
if (!sourceBuffer) return;
if (sourceBuffer.updating) return;
getBlobById(
i,
(blob, timestamp) => {
i++;
blob
.arrayBuffer()
.then((arrayBuffer) => {
sourceBuffer.appendBuffer(arrayBuffer);
currentTimestamp = timestamp;
updateTotalTime();
})
.catch((e) =>
console.error("Error appending blob to sourceBuffer:", e)
);
},
() => setTimeout(appendToSourceBuffer, REFRESHRATE)
);
}
function moveToTimestamp(timestamp) {
if (timestamp > lastTimestamp) return returnLive();
if (timestamp < startTimestamp) timestamp = startTimestamp;
getNearestBlobByTimestamp(timestamp, (blob, timestamp, id) => {
i = id;
createVideoElement();
setTimeout(appendToSourceBuffer, REFRESHRATE * DELAY_MULTIPLIER);
});
}
function returnLive() {
moveToTimestamp(lastTimestamp - REFRESHRATE * DELAY_MULTIPLIER);
}
EDIT: Following the advice of VC.One, I tried to just load the single blob requested in a different video tag and see if I could play it. Surprise, I cannot. So now the question is, how can I store the blobs from a mediarecorder so that they are indipendent from one another and I can load them indipendently?
New code that shows what happens:
function moveToTimestamp(timestamp) {
if (timestamp > lastTimestamp) return returnLive();
if (timestamp < startTimestamp) timestamp = startTimestamp;
getNearestBlobByTimestamp(timestamp, (blob, timestamp, id) => {
// i = id;
// createVideoElement();
// setTimeout(appendToSourceBuffer, REFRESHRATE);
console.log("creating new video element");
const video = videoContainer.appendChild(document.createElement("video"));
const mediaSource = new MediaSource();
const url = URL.createObjectURL(mediaSource);
video.src = url;
console.log(blob);
// * when mediaSource is ready, create the sourceBuffer
mediaSource.addEventListener("sourceopen", () => {
const sourceBuffer = mediaSource.addSourceBuffer(mimeType);
sourceBuffer.mode = "segments";
blob.arrayBuffer().then((arrayBuffer) => {
console.log(arrayBuffer);
sourceBuffer.appendBuffer(arrayBuffer);
});
// * only works if it is the first blob
setTimeout(() => video.play().catch(console.error), 1000);
setTimeout(() => videoContainer.removeChild(video), 10000);
});
});
}
Upvotes: 1
Views: 78