Joel Hager
Joel Hager

Reputation: 3440

Can you attach an AudioContext/Analyser to an HTML audio node with a src already loaded?

I have a basic HTML audio src, and I'm just trying to hook an analyzer node to the output of it to show a visualizer when you play the file. Any ideas?

When I try to map the node, I also get stuttering. When I make an AudioContext and attach it to the source, I get no output. I'm sure I'm just routing/doing it wrong. Any help? Thanks in advance!

The js:

$(document).ready(function()
{

  const audio = document.getElementById("audio");
  const audioCtx = new AudioContext();
  const canvas = document.getElementById("canvas");
  const canvasCtx = canvas.getContext("2d");
  const co = "https://cors-anywhere.herokuapp.com/";
  const audioSrc = "https://pineprojectmusic.s3-us-west-2.amazonaws.com/StreamingSongs/Radio.mp3";
  var track;
  
  // Fetch mp3
  
  
  audio.addEventListener("play", function() 
  {
    // Circumvent Chrome autoplay AudioContext issues
    if (audioCtx.state === "suspended")
      {
        audioCtx.resume()
      };
    
    // Where we add connect all of the analyser stuff
    // track = audioCtx.createMediaElementSource(audio);
    // track.connect(audioCtx.destination);
    
    if (this.dataset.playing === "false")
    {
        alert("Playing");
        audio.play();
      this.dataset.playing = true;
    } else if (this.dataset.playing === "false")
    {
      alert("Stopped");
      audio.pause();
      this.dataset.playing = true;
    }
    
  }, false);
  
  function setUpContext()
  {
    if (typeof audioCtx != 'undefined')
      {
        audioCtx.resume();
      }
  }
  // var source = ctx.createMediaElementSource(audio);
  

  


  
  // Where we fetch the mp3 file from S3 Bucket
  
  /*
  fetch(co + audioSrc)
    .then(response => response.arrayBuffer())
    .then(data => loadSong(data)); */

function loadSong(data)
{
  console.log(data);
  // This is where we assign the arrayBuffer into a stream and the src of the file.
  data.decodeAudioData(data);
  var mediaSrc = new MediaSource();
  var mediaBuffer = mediaSrc.createMediaElementSource(decodedAudio);
  audio.src.connect(analyser);
};
  
}); // end of DOM event
#audio:focus {
  outline: none;
}

#thefile {
  position: fixed;
  top: 10px;
  left: 10px;
  z-index: 100;
}

#canvas {
  position: fixed;
  left: 0;
  top: 0;
  width: 100%;
  height: 100%;
}

audio {
  position: fixed;
  left: 10px;
  bottom: 10px;
  width: calc(100% - 20px);
}
<script src="https://cdnjs.cloudflare.com/ajax/libs/jquery/3.3.1/jquery.min.js"></script>
<div id="content">
  <canvas id="canvas"></canvas>
  <audio id="audio" controls controlsList="nodownload" src="https://pineprojectmusic.s3-us-west-2.amazonaws.com/StreamingSongs/Radio.mp3">
  </audio>
</div>

Upvotes: 1

Views: 2729

Answers (2)

Kosh
Kosh

Reputation: 18393

Looks like a wrong routing.

The spec has described the Modular Routing topic well.
So the key point is:

In AudioContext, signal goes from a source to a destination, optionally through other audioNodes.
Hence we have to connect them in the right order:

Source --> Analyser --> Destination

See the example below:

const audioCtx = new(window.AudioContext || window.webkitAudioContext)();

// Get the source
const audio = document.querySelector('audio');
audio.onplay = () => audioCtx.resume();
const source = audioCtx.createMediaElementSource(audio);

// Create an analyser
const analyser = audioCtx.createAnalyser();
analyser.fftSize = 2 ** 8;
const bufferLength = analyser.frequencyBinCount;
const dataArray = new Uint8Array(bufferLength);

// Connect parts
source.connect(analyser);
analyser.connect(audioCtx.destination);

// Visualisation
const section = document.querySelector('section');
const v = (new Array(bufferLength)).fill().map(e => (e = document.createElement('i')) && section.appendChild(e) && e);

setInterval(() => {
  analyser.getByteTimeDomainData(dataArray);
  dataArray.forEach((d, i) => v[i].style.setProperty('--c', Math.abs(128 - d) * 2.8125 | 0))
}, 15);
html,
body {
  width: 100vw;
  height: 100vh;
  margin: 0;
  padding: 0;
  background: #000
}

section {
  display: flex;
  align-items: center;
  height: 100vh
}

section i {
  flex: 1 1 100%;
  height: calc(100vh * var(--c)/360);
  border-radius: 55%;
  background: hsl(var(--c), 95%, 45%);
}

audio {
  position: fixed;
  width: calc(100% - 2em);
  bottom: 1em;
  left: 1em;
  opacity: .3
}
<section></section>
<audio controls src="https://ice3.somafm.com/defcon-128-mp3" crossorigin="anonymous">

Hope it helps.

Upvotes: 3

Joel Hager
Joel Hager

Reputation: 3440

There were two issues I found that solved it for me:

  1. I forgot to add a <AllowedHeader> entry in my S3 CORS policy (derp).
  2. Because of web browsers' new autoplay stuff, you have to instantiate a new instance of the AudioConext after a user interaction (I put mine in the "play" event listener for the audio tag).

The audio now passes, and we're off to the races it seems! Hope this helps somebody in the future! :)

The code (to hopefully help some in the same boat in the future):

// Declare variables
const url = "https://pineprojectmusic.s3-us-west-2.amazonaws.com/StreamingSongs/Radio.mp3";
var AudioContext = (window.AudioContext || window.webkitAudioContext);
// if (audioCtx) { audioCtx = new (audioCtx) };
var source, analyser, ctx;

// Housekeeping when DOM loads
document.addEventListener("DOMContentLoaded", function () 
{
    // Make Audio File
    const audio = new Audio();
    audio.id = "audio";
    audio.src = url;
    audio.controls = true;
    audio.controlsList = "nodownload";
    audio.crossOrigin = "anonymous";
    audio.autoload = "auto";

    // Create Canvas
    const canvas = document.createElement("CANVAS");
    canvas.id = "canvas";
    canvasCtx = canvas.getContext("2d");

    // Insert it into HTML
    document.getElementById("audio_container").appendChild(audio);
    document.getElementById("audio_container").appendChild(canvas);

    audio.addEventListener("play", playAudio);

    function playAudio()
    {
        // Set up routes
        const audioCtx = new(AudioContext);
        if (!analyser) 
        { 
            analyser = audioCtx.createAnalyser();
            analyser.fftSize = 256;
        };
        if (!source) 
        { 
            source = audioCtx.createMediaElementSource(audio);
            source.connect(audioCtx.destination);
        };
        audioAnalyser();
    };

Upvotes: 1

Related Questions