Reputation: 51245
I'm receive raw float32 audio through websockets and would like to playback this in the browser. From my understanding I would need to to use MediaStream API for this. However, I cannot find a way to create a MediaStream which I can append data buffers to.
What is the proper way to achieve this?
I'm trying something like this:
var context = new AudioContext();
context.sampleRate = 48000;
var stream = null; // ????
var source = context.createMediaStreamSource(stream);
source.connect(context.destination);
source.start(0);
socket.onmessage = function (event) {
stream.appendBuffer(new Float32Array(event.data)); // ????
};
Upvotes: 24
Views: 5861
Reputation: 784
You should use the AudioBuffers to read sound from the buffers from the websocket and play it.
var context = new AudioContext();
var sampleRate = 48000;
var startAt = 0;
socket.onmessage = function (event) {
var floats = new Float32Array(event.data);
var source = context.createBufferSource();
var buffer = context.createBuffer(1, floats.length, sampleRate);
buffer.getChannelData(0).set(floats);
source.buffer = buffer;
source.connect(context.destination);
startAt = Math.max(context.currentTime, startAt);
source.start(startAt);
startAt += buffer.duration;
};
This plays the music from a websocket.
To convert an AudioBuffer into a MediaStream, use AudioContext.createMediaStreamDestination()
. Connect the BufferSource to it to make the custom MediaStream based on the buffer's data.
var data = getSound(); // Float32Array;
var sampleRate = 48000;
var context = new AudioContext();
var streamDestination = context.createMediaStreamDestination();
var buffer = context.createBuffer(1, data.length, sampleRate);
var source = context.createBufferSource();
buffer.getChannelData(0).set(data);
source.buffer = buffer;
source.connect(streamDestination);
source.loop = true;
source.start();
var stream = streamDestination.stream;
This reads audio from the data array and converts it into a MediaStream.
Upvotes: 7
Reputation: 877
Regarding decoding, audioContext from the window object should do the job.
var audioCtx = new (window.AudioContext || window.webkitAudioContext)();
and then
audioCtx.decodeAudioData(audioData, function(buffer) {
directly on the binary array.
Regarding communication, I'd rather use XMLHttpRequest (a low level function and old) and using the response directly.
This is a pretty good function made by MDM guys (I updated the url of the ogg file so you can test it directly) :
function getData() {
source = audioCtx.createBufferSource();
request = new XMLHttpRequest();
request.open('GET', 'https://raw.githubusercontent.com/mdn/webaudio-examples/master/decode-audio-data/viper.ogg', true);
request.responseType = 'arraybuffer';
request.onload = function() {
var audioData = request.response;
audioCtx.decodeAudioData(audioData, function(buffer) {
myBuffer = buffer;
songLength = buffer.duration;
source.buffer = myBuffer;
source.playbackRate.value = playbackControl.value;
source.connect(audioCtx.destination);
source.loop = true;
loopstartControl.setAttribute('max', Math.floor(songLength));
loopendControl.setAttribute('max', Math.floor(songLength));
},
function(e){"Error with decoding audio data" + e.error});
}
request.send();
}
the full source code is here :
https://raw.githubusercontent.com/mdn/webaudio-examples/master/decode-audio-data/index.html
Upvotes: -1