Reputation: 945
I'm trying to create reactive music visualization, as seen in this sample: http://webaudioapi.com/samples/visualizer/
What I would like to do is use buffering audio htmlmediaelements to avoid slowness.
How can I connect the visualization functions to the audio elements?
Edit: I have tried it again using blip.js, but I am getting an error at the line analyser.getByteTimeDomainData(dataArray);
.
Here is the code:
$(document).ready(function() {
var audio = new Audio('5minutes.mp3');
var source = blip.node('audioBufferSource');
var analyser = blip.node('analyser');
var canvas = document.querySelector('canvas');
var canvasCtx = canvas.getContext('2d');
var audioCtx = blip.getContext;
source.connect(analyser);
var bufferLength = analyser.frequencyBinCount;
var dataArray = new Uint8Array(bufferLength);
function draw() {
WIDTH = 512;
HEIGTH = 256;
drawVisual = requestAnimationFrame(draw);
analyser.getByteTimeDomainData(dataArray);
canvasCtx.fillStyle = 'rgb(200, 200, 200)';
canvasCtx.fillRect(0, 0, WIDTH, HEIGHT);
canvasCtx.lineWidth = 2;
canvasCtx.strokeStyle = 'rgb(0, 0, 0)';
canvasCtx.beginPath();
var sliceWidth = WIDTH * 1.0 / bufferLength;
var x = 0;
for(var i = 0; i < bufferLength; i++) {
var v = dataArray[i] / 128.0;
var y = v * HEIGHT/2;
if(i === 0) {
canvasCtx.moveTo(x, y);
} else {
canvasCtx.lineTo(x, y);
}
x += sliceWidth;
}
canvasCtx.lineTo(canvas.width, canvas.height/2);
canvasCtx.stroke();
};
draw();
$('#play').on('click', function () {
audio.play();
});
$('#stop').on('click', function () {
audio.stop();
});
});
Upvotes: 0
Views: 207
Reputation: 945
I have found a way:
http://jsfiddle.net/tomasantunes/hb5huzew/
$(document).ready(function() {
var audioCtx = new (window.AudioContext || window.webkitAudioContext)();
var myAudio = document.querySelector('audio');
var pre = document.querySelector('pre');
var myScript = document.querySelector('script');
pre.innerHTML = myScript.innerHTML;
var source = audioCtx.createMediaElementSource(myAudio);
var analyser = audioCtx.createAnalyser();
source.connect(analyser); analyser.connect(audioCtx.destination);
var canvas = document.querySelector('canvas');
var canvasCtx = canvas.getContext('2d');
var bufferLength = analyser.frequencyBinCount;
var dataArray = new Uint8Array(bufferLength);
function draw() {
WIDTH = 512;
HEIGHT = 256;
drawVisual = requestAnimationFrame(draw);
analyser.getByteTimeDomainData(dataArray);
canvasCtx.fillStyle = 'rgb(200, 200, 200)';
canvasCtx.fillRect(0, 0, WIDTH, HEIGHT);
canvasCtx.lineWidth = 2;
canvasCtx.strokeStyle = 'rgb(0, 0, 0)';
canvasCtx.beginPath();
var sliceWidth = WIDTH * 1.0 / bufferLength;
var x = 0;
for(var i = 0; i < bufferLength; i++) {
var v = dataArray[i] / 128.0;
var y = v * HEIGHT/2;
if(i === 0) {
canvasCtx.moveTo(x, y);
} else {
canvasCtx.lineTo(x, y);
}
x += sliceWidth;
}
canvasCtx.lineTo(canvas.width, canvas.height/2);
canvasCtx.stroke();
};
draw();
});
Upvotes: 1