Nick
Nick

Reputation: 93

Streaming audio from a PyAudio stream to be played on a webpage in Javascript

I'm trying to take an audio stream from my computer using PyAudio (specifically the pyaudiowpatch fork), stream that audio data over a websocket (using the websockets library, and play it on a web page using javascript. To do this, I have the following Python code on the server side:

import pyaudiowpatch as pyaudio

import asyncio
from websockets.server import serve


CHUNK_SIZE = 512


async def main():
    with pyaudio.PyAudio() as p:
        # 23 is the index of the device i'm trying to get the data from
        audio_device = p.get_device_info_by_index(23)
        with p.open(
            format=pyaudio.paInt16,
            channels=audio_device["maxInputChannels"],
            rate=int(audio_device["defaultSampleRate"]),
            frames_per_buffer=CHUNK_SIZE,
            input=True,
            input_device_index=audio_device["index"]
        ) as stream:
            async def handler(ws):
                print(f"Connection from {ws}")
                while stream.is_active():
                    chunk = stream.read(CHUNK_SIZE)
                    await ws.send(chunk)
                    print(f"Sent chunk to {ws}")
                print(f"Closing connection to {ws}")

            async with serve(handler, host="localhost", port=8081):
                print("Listening...")
                await asyncio.Future()

asyncio.run(main())

This code works fine, and when I make a connection from the web page, the data is sent over just fine. My client-side js is as follows:

const host = "ws://localhost:8081"
const ws = new WebSocket(host, "testProtocol")
const audioContext = new AudioContext()
const source = audioContext.createBufferSource()

source.connect(audioContext.destination)

document.getElementById("playButton").onclick = () => {
    source.start()
}

ws.onmessage = (event) => {
    event.data.arrayBuffer().then(data => {
        console.log(new Uint8Array(data)) // Shows that the data being received matches that being sent
        audioContext.decodeAudioData(new Uint8Array(data).buffer, (buffer) => {
            source.buffer = buffer
        })
    })
}

The idea is that the ws client receives the data as a blob, and then turns it into an ArrayBuffer to be played by the Web Audio API. However, I get the following error from the js: Uncaught (in promise) DOMException: The buffer passed to decodeAudioData contains an unknown content type. Clearly this means that the audio data I am passing is not in a format that the Web Audio API can understand.

What is the correct way to play the audio being streamed over to the web page?

Upvotes: 0

Views: 682

Answers (1)

Bryan Vaughn
Bryan Vaughn

Reputation: 1

You need to convert your audio data from int16 to float32 which javascript expects. You can use numpy to convert the audio signal.

import pyaudio
import asyncio
from websockets.server import serve
import numpy as np

# Set up audio parameters
CHUNK_SIZE = 24000
FORMAT = pyaudio.paInt16
CHANNELS = 1
RATE = 44100


async def main():
    # Initialize PyAudio
    p = pyaudio.PyAudio()

    # Open the microphone stream
    stream = p.open(format=FORMAT,
                    channels=CHANNELS,
                    rate=RATE,
                    input=True,
                    frames_per_buffer=CHUNK_SIZE)

    def pcm16_to_float32(audio_data):
        #Convert 16-bit PCM audio data to float32 format
        audio_data = np.frombuffer(audio_data, dtype=np.int16)
        # Convert to float32 and normalize to [-1, 1]
        audio_data = audio_data.astype(np.float32) / 32767.0
        return audio_data.tobytes()

    async def handler(ws):
        print(f"Connection from {ws}")
        while stream.is_active():
            chunk = pcm16_to_float32(stream.read(CHUNK_SIZE))
            await ws.send(chunk)
            print(f"Sent chunk to {ws}")


        print(f"Closing connection to {ws}")

    async with serve(handler, host="0.0.0.0", port=8081):
        print("Listening...")
        await asyncio.Future()


    # Stop and close the stream
    stream.stop_stream()
    stream.close()
    p.terminate()

asyncio.run(main())

and convert blob to float32 array

<button type="button" onClick="startAudio()" id="startaudio" class="btn btn-sm btn-primary" value="submit">Start Audio</button>
<script>
    async function blobToFloat32Array(blob) {
      const arrayBuffer = await blob.arrayBuffer();
      const dataView = new DataView(arrayBuffer);
      const float32Array = new Float32Array(dataView.byteLength / Float32Array.BYTES_PER_ELEMENT);

      for (let i = 0; i < float32Array.length; i++) {
        float32Array[i] = dataView.getFloat32(i * Float32Array.BYTES_PER_ELEMENT, true);
      }

      return float32Array;
    }
    

    function startAudio() {

        const websocket = new WebSocket('ws://localhost:8081');
        
        // Create an AudioContext
        const audioCtx = new (window.AudioContext || window.webkitAudioContext)();
                
        websocket.onmessage = (event) => {
                
            const data = blobToFloat32Array(event.data);

            data.then((audioData) => {
                // Create an AudioBuffer
                const audioBuffer = audioCtx.createBuffer(1, audioData.length, 44100); // audioCtx.sampleRate
                audioBuffer.copyToChannel(audioData, 0);

                // Play the audio
                const source = audioCtx.createBufferSource();
                source.buffer = audioBuffer;
                source.connect(audioCtx.destination);
                source.start();
            });
        };
    }
</script>

Also I found you need to increase the chunk size a lot to get it working. The code above might not be the best example but it works.. though the audio breaks every chunk or so.

Upvotes: 0

Related Questions