sqwk
sqwk

Reputation: 2699

Audio Level Meter for Web RTC Stream

I would like to create a decibel meter for the audio that is playing in a video element. The video element is playing a WebRTC stream.

At the moment WebRTC streams cannot be passed into a Web Audio Analyzer. (Although this might change soon … ) (see Web Audio API analyser node getByteFrequencyData returning blank array)

Is there currently another way to get decibel information from a remote mediastream?

Upvotes: 3

Views: 9798

Answers (2)

sqwk
sqwk

Reputation: 2699

Chrome 50 was released: As of the 13th of April 2016 using an Analyser Node with a MediaStreamAudioSourceNode works fine to get audio levels. The resulting audioLevels value can be animated or simply passed into a html meter element.

var _mediaStream    = SOME_LOCAL_OR_RTP_MEDIASTREAM;
var _audioContext   = new AudioContext();
var _audioAnalyser  = [];
var _freqs          = [];
var audioLevels     = [0];

var _audioSource          = _audioContext.createMediaStreamSource(_mediaStream);
var _audioGain1           = _audioContext.createGain();
var _audioChannelSplitter = _audioContext.createChannelSplitter(_audioSource.channelCount);

_audioSource.connect(_audioGain1);
_audioGain1.connect(_audioChannelSplitter);
_audioGain1.connect(_audioContext.destination);

for (let i = 0; i < _audioSource.channelCount; i++) {
    _audioAnalyser[i]                       = _audioContext.createAnalyser();
    _audioAnalyser[i].minDecibels           = -100;
    _audioAnalyser[i].maxDecibels           = 0;
    _audioAnalyser[i].smoothingTimeConstant = 0.8;
    _audioAnalyser[i].fftSize               = 32;
    _freqs[i]                               = new Uint8Array(_audioAnalyser[i].frequencyBinCount);

    _audioChannelSplitter.connect(_audioAnalyser[i], i, 0);
}

function calculateAudioLevels()  {
    setTimeout(() => {
        for (let channelI = 0; channelI < _audioAnalyser.length; channelI++) {
            _audioAnalyser[channelI].getByteFrequencyData(_freqs[channelI]);
            let value = 0;
            for (let freqBinI = 0; freqBinI < _audioAnalyser[channelI].frequencyBinCount; freqBinI++) {
                value = Math.max(value, _freqs[channelI][freqBinI]);
            }
            audioLevels[channelI] = value / 256;
        }
        requestAnimationFrame(calculateAudioLevels.bind(this));
    }, 1000 / 15); // Max 15fps — not more needed
}

Upvotes: 6

Behiry
Behiry

Reputation: 573

This is a good example:

https://webrtc.github.io/samples/src/content/getusermedia/volume/

And this is the source code:

https://github.com/webrtc/samples/tree/gh-pages/src/content/getusermedia/volume

And this is a sample:

function recordAudio() {
    try {
        window.AudioContext = window.AudioContext || window.webkitAudioContext;
        window.audioContext = new AudioContext();

        const instantMeter = document.querySelector('#sound-meter');
        const constraints = {'video': false, 'audio': true};
        const stream = await navigator.mediaDevices.getUserMedia(constraints);
        window.stream = stream;
        const soundMeter = window.soundMeter = new SoundMeter(window.audioContext);
        soundMeter.connectToSource(stream, function(e) {
            if (e) {
                  alert(e);
                  return;
                }
                setInterval(() => {
                  instantMeter.value = soundMeter.instant.toFixed(2);
                }, 200);
            });
              
            $('#sound-meter').show(); 
            $('#audio-icon').hide()
    } catch(error) {
        console.error('Error recording audio.', error);
    }
}

Upvotes: 1

Related Questions