Luca Reghellin
Luca Reghellin

Reputation: 8125

How to connect one or two musical instruments to the Web Audio API and split the stereo signal?

EDIT 3:

It was only a Firefox issue, works in Chrome, so problem solved, see the answer below. Thank you Chris for your help!

EDIT 2:

Following Chris's advice I changed one line on getUserMedia call, but it doesn't works for now, maybe I'm using a wrong syntax but this feature is undocumented:

if(navigator.getUserMedia){
  navigator.getUserMedia(
    { audio: { optional: [{ echoCancellation: false }] } }
    ,function(stream){ init_stream(stream); }
    ,function(err){ console.log('The following gUM error occured: ' + err); }
  );
}

Also, you can now follow progresses here:

http://jsfiddle.net/stratboy/aapafrbu/1/

EDIT 1:

I'm currently playing a keyboard > mixer > behringer UCA222 > mac (usb). My current code to see some data is the following. I see data changing for channel left but not for channel right, and despite what I'm doing on the mixer. What can be the reason?

window.AudioContext = window.AudioContext || window.webkitAudioContext;

navigator.getUserMedia = (navigator.getUserMedia ||
                          navigator.webkitGetUserMedia ||
                          navigator.mozGetUserMedia ||
                          navigator.msGetUserMedia);


var audiocontext = new (window.AudioContext || window.webkitAudioContext)();
var analyser_left = audiocontext.createAnalyser();
var analyser_right = audiocontext.createAnalyser();
var splitter = audiocontext.createChannelSplitter(2);
var index = 0;

function init_stream(stream){
  window.audiosource = audiocontext.createMediaStreamSource(stream);

  audiosource.connect(splitter);
  splitter.connect(analyser_left,0);
  splitter.connect(analyser_right,1);

  listen();
}

function listen(){
  requestAnimationFrame(listen);

  analyser_left.fftSize = 256;
  var leftBufferLength = analyser_left.frequencyBinCount;
  var leftDataArray = new Uint8Array(leftBufferLength);

  analyser_left.getByteTimeDomainData(leftDataArray);
  $('.monitor_left').html(JSON.stringify(leftDataArray));


  analyser_right.fftSize = 256;
  var rightBufferLength = analyser_right.frequencyBinCount;
  var rightDataArray = new Uint8Array(rightBufferLength);

  analyser_right.getByteTimeDomainData(rightDataArray);
  $('.monitor_right').html(JSON.stringify(rightDataArray));

}

if(navigator.getUserMedia){
  navigator.getUserMedia(
    { audio: true }
    ,function(stream){ init_stream(stream); }
    ,function(err){ console.log('The following gUM error occured: ' + err); }
  );
}

I'd like to play my guitar into the computer and analyze the sounds via the web audio API. I know it's possible to use the microphone, but what about a real instrument plugged in?

Upvotes: 0

Views: 353

Answers (2)

Luca Reghellin
Luca Reghellin

Reputation: 8125

So, the original question was about connecting more instruments/real time sources (for example an instrument and a microphone) to the web audio api and analyze the stream.

The answer is almost yes :P As Chris wrote, it's not possible for now to get several instruments, but It's possible to split a stereo signal! So what I did is going through a mixer with 2 sources (say, a keyboard and a microphone), put one on the left channel and one on the right. Then connect to an USB audio card of some kind (I'm currently using a cheap behringer UCA222).

Turns out that Firefox seems still not to be able to split the signal, but Chrome can do it, and it's enough for me. Some working code is this and it's quite self-explanatory:

window.AudioContext = window.AudioContext || window.webkitAudioContext;

navigator.getUserMedia = (navigator.getUserMedia ||
                          navigator.webkitGetUserMedia ||
                          navigator.mozGetUserMedia ||
                          navigator.msGetUserMedia);


var audiocontext = new (window.AudioContext || window.webkitAudioContext)();
var analyser_left = audiocontext.createAnalyser();
var analyser_right = audiocontext.createAnalyser();
var splitter = audiocontext.createChannelSplitter(2);
var index = 0;

function init_stream(stream){
  window.audiosource = audiocontext.createMediaStreamSource(stream);

  audiosource.connect(splitter);
  splitter.connect(analyser_left,0);
  splitter.connect(analyser_right,1);

  listen();
}

function listen(){
  requestAnimationFrame(listen);

  analyser_left.fftSize = 256;
  var leftBufferLength = analyser_left.frequencyBinCount;
  var leftDataArray = new Uint8Array(leftBufferLength);

  analyser_left.getByteTimeDomainData(leftDataArray);
  $('.monitor_left').html(JSON.stringify(leftDataArray));


  analyser_right.fftSize = 256;
  var rightBufferLength = analyser_right.frequencyBinCount;
  var rightDataArray = new Uint8Array(rightBufferLength);

  analyser_right.getByteTimeDomainData(rightDataArray);
  $('.monitor_right').html(JSON.stringify(rightDataArray));

}


if(navigator.getUserMedia){
  navigator.getUserMedia(
    { audio: true }
    ,function(stream){ init_stream(stream); }
    ,function(err){ console.log('The following gUM error occured: ' + err); }
  );
}

The final fiddle to test is here: jsfiddle.net/stratboy/aapafrbu

You can see bits changing while playing.

As a web programmer one thing I didn't understand is that there are no 'onAudioSomething' events to catch say, when a single a note is playied on a keyboard. But maybe it's quite logical, since I guess such kind of events would often be useless on a piece of music, as usually there are no 'zero points' on the audio gain. So the way one can analyse the source is by polling via requestAnimationFrame().

Hope it helps some other explorer out there :)

Upvotes: 0

cwilso
cwilso

Reputation: 13928

Yes. I do this all the time (use non-mic sources). Just get a USB audio interface that supports guitar/instrument input.

Upvotes: 2

Related Questions