Reputation: 28847
First run code snippet then read the description ... It will give you the structure
I want to record, play and save video in the 2nd video element
. The problem I am facing is: stream is running in the 1st video-element
but unable to record and save video
.video {
border: 1px solid gray;
box-shadow: 3px 4px lightgray;
}
<link href="https://maxcdn.bootstrapcdn.com/bootstrap/4.0.0/css/bootstrap.min.css" rel="stylesheet"/>
<div style="text-align:center">
<h1>Welcome to WebRTC</h1>
<video class="video" #video autoplay controls></video>
<video class="video" style="width:360;" autoplay controls #recordedVideo></video>
<br>
<button class="btn btn-warning" (click)="startRecording()">Start</button>
<button class="btn btn-warning" (click)="stopRecording()">Stop</button>
<button class="btn btn-warning" (click)="playRecording()">Play</button>
</div>
What I did here, in Luis Estevez code, I declared the event there in startRecording
method because when I tried to push stream-chunk in blob-array, it responded an error: push method does not exist, even I created an object-array after I declared an array.
startRecording(stream) {
let options = { mimeType: 'video/webm' }
this.recordedBlobs = []
console.log(this.recordedBlobs)
try {
this.mediaRecorder = new MediaRecorder(stream, options)
} catch (e0) {
console.log('Try different mimeType')
}
console.log('Created MediaRecorder', this.mediaRecorder, 'with options', options)
// this.mediaRecorder.onstop = this.handleStop
this.mediaRecorder.onstop = (event) => {
console.log('Recorder stopped: ', event)
const videoBuffer = new Blob(this.recordedBlobs, { type: 'video/webm' })
this.downloadUrl = window.URL.createObjectURL(videoBuffer) // you can download with <a> tag
this.recordVideoElement = this.recordVideoElementRef.nativeElement
this.recordVideoElement.src = this.downloadUrl
}
// this.mediaRecorder.ondataavailable = this.handleDataAvailable
this.mediaRecorder.ondataavailable = (event) => {
if (event.data && event.data.size > 0) {
this.recordedBlobs.push(event.data)
}
}
this.mediaRecorder.start(100) // collect 100ms of data
console.log('MediaRecorder started', this.mediaRecorder)
}
Thanks Luis Estevez :)
Upvotes: 5
Views: 4732
Reputation: 28847
@ViewChild('recordedVideo') recordVideoElementRef: ElementRef
@ViewChild('video') videoElementRef: ElementRef
videoElement: HTMLVideoElement
recordVideoElement: HTMLVideoElement
mediaRecorder: MediaRecorder
recordedBlobs: Blob[]
isRecording: boolean = false
downloadUrl: string
stream: MediaStream
constructor() {
}
async ngOnInit() {
this.videoElement = this.videoElementRef.nativeElement
this.recordVideoElement = this.recordVideoElementRef.nativeElement
navigator.mediaDevices.getUserMedia({
video: {
width: 360
}
}).then(stream => {
this.stream = stream
this.videoElement.srcObject = this.stream
})
}
startRecording() {
this.recordedBlobs = []
let options: MediaRecorderOptions = { mimeType: 'video/webm' }
try {
this.mediaRecorder = new MediaRecorder(this.stream, options)
} catch (err) {
console.log(err)
}
this.mediaRecorder.start() // collect 100ms of data
this.isRecording = !this.isRecording
this.onDataAvailableEvent()
this.onStopRecordingEvent()
}
stopRecording() {
this.mediaRecorder.stop()
this.isRecording = !this.isRecording
console.log('Recorded Blobs: ', this.recordedBlobs)
}
playRecording() {
if (!this.recordedBlobs || !this.recordedBlobs.length) {
console.log('cannot play.')
return
}
this.recordVideoElement.play()
}
onDataAvailableEvent() {
try {
this.mediaRecorder.ondataavailable = (event: BlobEvent) => {
if (event.data && event.data.size > 0) {
this.recordedBlobs.push(event.data)
}
}
} catch (error) {
console.log(error)
}
}
onStopRecordingEvent() {
try {
this.mediaRecorder.onstop = (event: Event) => {
const videoBuffer = new Blob(this.recordedBlobs, { type: 'video/webm' })
this.downloadUrl = window.URL.createObjectURL(videoBuffer) // you can download with <a> tag
this.recordVideoElement.src = this.downloadUrl
}
} catch (error) {
console.log(error)
}
}
}
<div style="text-align:center">
<h1>Welcome to WebRTC</h1>
<video class="video" #video autoplay controls></video>
<span class="m-1"></span>
<video class="video" style="width:360 !important;" controls #recordedVideo></video>
<br>
<button class="btn btn-primary btn-lg" *ngIf="!isRecording" (click)="startRecording()">Start Recording</button>
<button class="btn btn-warning btn-lg" *ngIf="isRecording" (click)="stopRecording()">Stop Recording</button>
</div>
Note: if you get an error that MediaRecorder is not found etc then do
npm i @types/dom-mediacapture-record
Be sure to update your Chrome
browser too.
Have a Good day
Upvotes: 1
Reputation: 1407
You didn't 'really' record the stream, you just copied the stream object, not the event data coming from the stream.
Use MediaRecorder
and pass the stream as constructor argument. Grab the video blob from the event handler ondataavailable. Join the recorded array of blobs to a new Blob. From there you can get url using createObbjectURL(blob);
The follow snippet is pseudo code:
** typescript doesn't recognize 'MediaRecorder' so you'll have to find a way to add type any to MediaRecorder
mediaRecorder: any;
recordedBlobs: Blob[];
downloadUrl: string;
handleDataAvailable(event) {
if (event.data && event.data.size > 0) {
this.recordedBlobs.push(event.data);
}
}
handleStop(event) {
console.log('Recorder stopped: ', event);
const videoBuffer = new Blob(this.recordedBlobs, {type: 'video/webm'});
this.downloadUrl = window.URL.createObjectURL(videoBuffer); // you can download with <a> tag
this.recordVideoElement.src = this.downloadUrl;
}
startRecording(stream) {
let options = {mimeType: 'video/webm'};
this.recordedBlobs = [];
try {
this.mediaRecorder = new MediaRecorder(stream, options);
} catch (e0) {
console.log('Try different mimeType');
}
console.log('Created MediaRecorder', this.mediaRecorder, 'with options', options);
this.mediaRecorder.onstop = this.handleStop;
this.mediaRecorder.ondataavailable = this.handleDataAvailable;
this.mediaRecorder.start(100); // collect 100ms of data
console.log('MediaRecorder started', this.mediaRecorder);
}
stopRecording() {
this.mediaRecorder.stop();
console.log('Recorded Blobs: ', this.recordedBlobs);
this.recordVideoElement.controls = true;
}
playRecording() {
if (!this.recordedBlobs.length) {
console.log('cannot play.');
return;
}
this.recordVideoElement.play();
}
async ngOnInit() {
navigator.mediaDevices.getUserMedia({ video: { width: 360 } }).then(stream => {
this.videoElement.srcObject = stream
this.startRecording(stream);
})
}
Upvotes: 5