Michel
Michel

Reputation: 11755

Capturing audio data (using javascript) and uploading on a server as MP3

Following a number of resources on the internet, I am trying to build a simple web page, where I can go to record something (my voice), then make a mp3 file out of the recording and finally upload that file to a server.

At this point I can do the recording and also play back, but I haven't gone as far as uploading, it seems like I cannot even make an mp3 file locally. Can someone tell me what I am doing wrong, or in the wrong order?

Below is all the code I have at this point.

<!DOCTYPE html>
<html lang="en">
<head>
    <meta charset="UTF-8">
    <meta name="viewport" content="width=device-width, initial-scale=1.0">
    <meta http-equiv="X-UA-Compatible" content="ie=edge">
    <title>Document</title>
</head>
<body>
<div>
    <h2>Audio record and playback</h2>
    <p>
        <button id=startRecord><h3>Start</h3></button>
        <button id=stopRecord disabled><h3>Stop</h3></button>
        <audio id="player" controls></audio>
        <a id=audioDownload></a>
    </p>
</div>

<script>
  var player = document.getElementById('player');

  var handleSuccess = function(stream) {
    rec = new MediaRecorder(stream);

    rec.ondataavailable = e => {
        audioChunks.push(e.data);
        if (rec.state == "inactive") {
            let blob = new Blob(audioChunks,{type:'audio/x-mpeg-3'});
            player.src = URL.createObjectURL(blob);
            player.controls=true;
            player.autoplay=true;
            // audioDownload.href = player.src;
            // audioDownload.download = 'sound.data';
            // audioDownload.innerHTML = 'Download';
            mp3Build();
        }
    }

    player.src = stream;
  };

  navigator.mediaDevices.getUserMedia({audio:true/*, video: false */})
      .then(handleSuccess);

startRecord.onclick = e => {
  startRecord.disabled = true;
  stopRecord.disabled=false;
  audioChunks = [];
  rec.start();
}

stopRecord.onclick = e => {
  startRecord.disabled = false;
  stopRecord.disabled=true;
  rec.stop();
}


var ffmpeg = require('ffmpeg');

function mp3Build() {
try {
    var process = new ffmpeg('sound.data');
    process.then(function (audio) {
        // Callback mode.
        audio.fnExtractSoundToMP3('sound.mp3', function (error, file) {
            if (!error) {
                console.log('Audio file: ' + file);
        audioDownload.href = player.src;
        audioDownload.download = 'sound.mp3';
        audioDownload.innerHTML = 'Download';
      } else {
        console.log('Error-fnExtractSoundToMP3: ' + error);
      }
        });
    }, function (err) {
        console.log('Error: ' + err);
    });
} catch (e) {
    console.log(e.code);
    console.log(e.msg);
}
}

</script>

</body>
</html>

When I try to investigate and see what is happening using the Debugger inside the Web Console; on the line:

var process = new ffmpeg('sound.data');

I get this message:

Paused on exception
TypeError ffmpeg is not a contructor.

And on the line:

var ffmpeg = require('ffmpeg');

I get this message:

Paused on exception
ReferenceError require is not defined.

Beside when I watch the expression ffmpeg, I can see:

ffmpeg: undefined

After some further investigations, and using browserify I use the following code:

<!DOCTYPE html>
<html lang="en">
<head>
    <meta charset="UTF-8">
    <meta name="viewport" content="width=device-width, initial-scale=1.0">
    <meta http-equiv="X-UA-Compatible" content="ie=edge">
    <title>Document</title>
</head>
<body>
<div>
    <h2>Audio record and playback</h2>
    <p>
        <button id=startRecord><h3>Start</h3></button>
        <button id=stopRecord disabled><h3>Stop</h3></button>
        <audio id="player" controls></audio>
        <a id=audioDownload></a>
    </p>
</div>

<script src="bundle.js"></script>
<script>
  var player = document.getElementById('player');

  var handleSuccess = function(stream) {
    rec = new MediaRecorder(stream);

    rec.ondataavailable = e => {
        if (rec.state == "inactive") {
            let blob = new Blob(audioChunks,{type:'audio/x-mpeg-3'});
            //player.src = URL.createObjectURL(blob);
            //player.srcObject = URL.createObjectURL(blob);
            //player.srcObject = blob;
            player.srcObject = stream;
            player.controls=true;
            player.autoplay=true;
            // audioDownload.href = player.src;
            // audioDownload.download = 'sound.data';
            // audioDownload.innerHTML = 'Download';
            mp3Build();
        }
    }

    //player.src = stream;
    player.srcObject = stream;
  };

  navigator.mediaDevices.getUserMedia({audio:true/*, video: false */})
      .then(handleSuccess);

startRecord.onclick = e => {
  startRecord.disabled = true;
  stopRecord.disabled=false;
  audioChunks = [];
  rec.start();
}

stopRecord.onclick = e => {
  startRecord.disabled = false;
  stopRecord.disabled=true;
  rec.stop();
}


var ffmpeg = require('ffmpeg');

function mp3Build() {
try {
    var process = new ffmpeg('sound.data');
    process.then(function (audio) {
        // Callback mode.
        audio.fnExtractSoundToMP3('sound.mp3', function (error, file) {
            if (!error) {
                console.log('Audio file: ' + file);
        //audioDownload.href = player.src;
        audioDownload.href = player.srcObject;
        audioDownload.download = 'sound.mp3';
        audioDownload.innerHTML = 'Download';
      } else {
        console.log('Error-fnExtractSoundToMP3: ' + error);
      }
        });
    }, function (err) {
        console.log('Error: ' + err);
    });
} catch (e) {
    console.log(e.code);
    console.log(e.msg);
}
}

</script>

</body>
</html>

That solved the problem of:

the expression ffmpeg being: undefined

But the play back is no longer working. I may not be doing the right thing with player.srcObject and maybe some other things too.

When I use this line:

player.srcObject = URL.createObjectURL(blob);

I get this message:

Paused on exception
TypeError: Value being assigned to HTMLMediaElement.srcObject is not an object.

And when I use this line:

player.srcObject = blob;

I get this message:

Paused on exception
TypeError: Value being assigned to HTMLMediaElement.srcObject does not implement interface MediaStream.

Finally, if I use this:

player.srcObject = stream;

I do not get any error message but the voice recording still does not work.

Upvotes: 0

Views: 2238

Answers (1)

evgeni fotia
evgeni fotia

Reputation: 4810

require doesn't work in the browers.

you should use "browserify" this is its github page https://github.com/browserify/browserify

also it's better (if it is not necessary) to use the browser MediaRecorder supported audio type in new Blob check this link https://developer.mozilla.org/en-US/docs/Web/API/MediaRecorder/isTypeSupported%22

last of all there is no need for player.src = stream; in your code (I am speaking of second one) plus it will give you an error check this https://developer.mozilla.org/en-US/docs/Web/API/HTMLMediaElement/srcObject#Basic_example

This is a little edite

    var handleSuccess = function(stream) {
    rec = new MediaRecorder(stream);

    rec.ondataavailable = e => {
        if (rec.state == "inactive") {
            let blob = new Blob(audioChunks,{type:'audio/x-mpeg-3'});
            player.src = URL.createObjectURL(blob);
            player.controls=true;
            player.autoplay=true;
            audioDownload.href = player.src;
            audioDownload.download = 'sound.data';
            audioDownload.innerHTML = 'Download';
            mp3Build();
        }
    }

    //No need to put anything here
  };

Upvotes: 1

Related Questions