[index] [pictures] [about] [how]

Audio visualizer

Table of Contents

Demonstration

I wanted to make a small audio visualizer, inspired by the ones that may for instance be found in Winamp. When you load the page, the browser should ask which audio device to use as input. You can select a microphone or (at least on Linux) the stream that is sent to the speakers. To start, click on black canvas below:

Time per frame: - ms | Frames per second: -

Comments

The "FFT size" slider ranges from 25 to 215 in powers of two and controls the number of samples that are used in one run of the FFT. The number of frequency bins is half the size of the FFT.

The "Decay" slider ranges from 0 to 1 in steps of 0.01 and controls the opacity of the background color with which the canvas is filled in each rendering step. If \(x \in [0, 1]\) is the value of the slider, we compute the opacity \(\alpha\) according to the (more or less arbitrarily chosen) function

\begin{align} \alpha(x) = \arctan(50) - \arctan(50 (1 - x)). \end{align}

It turns out that creating this visualization is very easy in a modern browser using the Web Audio API which provides access to an audio stream, for example from a microphone. The waveform and frequency data for the visualizations are provided by an AnalyserNode which does all the heavy lifting (e.g. computing the FFT).

The data is rendered to an HTML canvas element using the Canvas API. For this experiment the performance of the CanvasRdneringContext2D was good enought, but one could try to use a WebGL context for more complicated visualizations.

The code

The code for the visualization follows below. It lives inside org-mode code blocks and is written to the JavaScript file audiovisualizer.js using org-babel-tangle.

Visualization class

class Visualization {
    constructor(audioContext, audioSource, canvas, canvasCtx, timerCallback) {
        this.canvas = canvas;
        this.canvasCtx = canvasCtx;
        this.timerCallback = timerCallback;

        this.audioContext = audioContext;
        this.audioSource = audioSource;

        this.analyser = audioContext.createAnalyser();
        this.audioSource.connect(this.analyser);

        this.paused = true;
        this.frame = undefined;

        this.offscreen = new OffscreenCanvas(this.canvas.width, this.canvas.height);
        this.offscreenCtx = this.offscreen.getContext("2d");

        this.decayConstant = 0.03;
        this.backgroundColor = "#000000";
    }

    set fftSize(n) {
        this.analyser.fftSize = 2**n;
        this.waveformBuffer = new Uint8Array(this.analyser.fftSize);
        this.frequencyBuffer = new Uint8Array(this.analyser.frequencyBinCount);
    }

    get fftSize() {
        return Math.log(this.analyser.fftSize) / Math.log(2);
    }

    commonParameters() {
        return {
            "fftSize": {
                type: "range",
                value: this.fftSize,
                attributes: { min: 5, max: 15, step: 1 },
                name: "FFT size",
                onChange: (n) => this.fftSize = parseInt(n),
            },
            "decayConstant": {
                type: "range",
                value: 0.60,
                attributes: { min: 0, max: 1, step: 0.01 },
                name: "Decay",
                onChange: (n) => this.decayConstant = Math.atan(50) - Math.atan((1 - parseFloat(n)) * 50),
            },
            "backgroundColor": {
                type: "color",
                value: this.backgroundColor,
                name: "Background color",
                onChange: (c) => this.backgroundColor = c,
            },
        };
    }

    teardown() {
        this.paused = true;
        if (this.frame !== undefined) {
            cancelAnimationFrame(this.frame);
        }

        this.audioSource.disconnect(this.analyser);

        this.canvasCtx.fillStyle = this.backgroundColor;
        this.canvasCtx.fillRect(0, 0, this.canvas.width, this.canvas.height);
    }

    fadeAway() {
        this.globalAlpha = 1.0;
        this.canvasCtx.setTransform(0.8, 0.0, 0.0, 0.8, Math.random() * 10.0 - 100.0, Math.random() * 10.0 + 100.0);
        this.canvasCtx.drawImage(this.canvas, (1.0 - 0.8) * this.canvas.width / 2.0, (1.0 - 0.8) * this.canvas.height / 2.0); // TODO
        this.canvasCtx.setTransform(1.2, 0.0, 0.0, 1.2, Math.random() * 10.0 - 100.0, Math.random() * 10.0 - 100.0);
        this.canvasCtx.drawImage(this.canvas, (1.0 - 1.2) * this.canvas.width / 2.0, (1.0 - 1.2) * this.canvas.height / 2.0); // TODO
    }


    clearCanvas() {
        this.canvasCtx.save();

        this.canvasCtx.globalAlpha = this.decayConstant,
        this.canvasCtx.fillStyle = this.backgroundColor;
        this.canvasCtx.fillRect(0, 0, this.canvas.width, this.canvas.height);

        this.fadeAway();

        this.canvasCtx.restore();
    }

    animate() {
        this.clearCanvas();

        this.analyser.getByteTimeDomainData(this.waveformBuffer);
        this.analyser.getByteFrequencyData(this.frequencyBuffer);

        this.renderLoop();
        this.timerCallback();

        if (!this.paused) {
            this.frame = requestAnimationFrame(() => this.animate());
        }
    }
}

Oscilloscope

class Oscilloscope extends Visualization {
    static name = "Oscilloscope";

    constructor(audioContext, audioSource, canvas, canvasCtx, timerCallback) {
        super(audioContext, audioSource, canvas, canvasCtx, timerCallback);
        this.fftSize = 8;
        this.canvasCtx.strokeStyle = "#f9a900";
    }

    getParameters() {
        return Object.assign({}, this.commonParameters(), {
            "color": {
                type: "color",
                value: this.canvasCtx.strokeStyle,
                name: "Color",
                onChange: (c) => this.canvasCtx.strokeStyle = c
            },
        });
    }

    renderLoop() {
        let x = 0;
        const dx = this.canvas.width / this.waveformBuffer.length;

        this.canvasCtx.beginPath();

        for (let i = 0; i < this.waveformBuffer.length; i++) {
            const y = this.waveformBuffer[i] / 128.0 * this.canvas.height / 2.0;

            i === 0 ? this.canvasCtx.moveTo(x, y) : this.canvasCtx.lineTo(x, y);

            x += dx;
        }

        this.canvasCtx.lineTo(this.canvas.width, this.canvas.height / 2);
        this.canvasCtx.stroke();
    }
}

FFT

class FFT extends Visualization {
    static name = "FFT";

    constructor(audioContext, audioSource, canvas, canvasCtx, timerCallback) {
        super(audioContext, audioSource, canvas, canvasCtx, timerCallback);
        this.fftSize = 11;
        this.canvasCtx.fillStyle = "#f9a900";
    }

    getParameters() {
        return Object.assign({}, this.commonParameters(), {
            "color": {
                type: "color",
                value: this.canvasCtx.fillStyle,
                name: "Color",
                onChange: (c) => this.canvasCtx.fillStyle = c
            },
        });
    }

    renderLoop() {
        let x = 0;
        const dx = this.canvas.width / this.frequencyBuffer.length;

        for (let i = 0; i < this.frequencyBuffer.length; i++) {
            const y = this.frequencyBuffer[i] / 255.0 * this.canvas.height;

            this.canvasCtx.fillRect(x, this.canvas.height - y / 2, dx, dx / 2);

            x += dx;
        }
    }
}

Circular

class Circular extends Visualization {
    static name = "Circular";

    constructor(audioContext, audioSource, canvas, canvasCtx, timerCallback) {
        super(audioContext, audioSource, canvas, canvasCtx, timerCallback);
        this.fftSize = 10;
        this.canvasCtx.fillStyle = "#f9a900";

        this.twopi = 2.0 * Math.PI;
    }

    getParameters() {
        return Object.assign({}, this.commonParameters(), {
            "color": {
                type: "color",
                value: this.canvasCtx.fillStyle,
                name: "Color",
                onChange: (c) => this.canvasCtx.fillStyle = c
            },
        });
    }

    renderLoop() {
        let omega = 0;
        const domega = this.canvas.width / this.waveformBuffer.length;
        const avg = this.waveformBuffer.map(r => r / 128.0).reduce((a, b) => a + b**2, 0) / this.waveformBuffer.length;

        for (let i = 0; i < this.waveformBuffer.length; i++) {
            const r = this.waveformBuffer[i] / 128.0 * this.canvas.width / 4.0 / 2.0 + this.canvas.width / 8.0;

            this.canvasCtx.beginPath();
            this.canvasCtx.ellipse(
                this.canvas.width / 2.0 + r * Math.sin(this.twopi * omega) + 100 * avg * Math.sin((this.frame % 360) / 360 * this.twopi),
                this.canvas.height / 2.0 + r * Math.cos(this.twopi * omega) + 100 * avg * Math.cos((this.frame % 360) / 360 * this.twopi),
                2.0,
                2.0,
                0.0,
                0.0,
                this.twopi
            );
            this.canvasCtx.fill();

            omega += domega;
        }
    }
}

Auxiliary and setup functions

const canvas = document.getElementById("viscanvas");
const ctx = canvas.getContext("2d");

canvas.width = 600;
canvas.height = 600;
ctx.fillStyle = "rgb(0 0 0)";
ctx.strokeStyle = "rgb(249 169 0)";
ctx.globalAlpha = 1.0;
ctx.lineWidth = 2;

function setupTimer() {
    return {
        diffs: Array.from({ length: 60 }, () => 0),
        start: performance.now(),
        end: performance.now(),
        frame: 0,
        displays: {
            ms: document.getElementById("ms-per-frame"),
            fps: document.getElementById("fps"),
        },
    };
}

function updateTimer(timer) {
    timer.end = performance.now();
    timer.diffs[timer.frame] = timer.end - timer.start;
    timer.frame = (timer.frame + 1) % 60;
    timer.start = timer.end;

    if (timer.frame % 60 === 0) {
        let avg = timer.diffs.reduce((a, b) => a + b, 0) / 60;
        timer.displays.ms.textContent = avg.toFixed(2);
        timer.displays.fps.textContent = (1 / avg * 1000).toFixed(2);
    }
}

function createParameterControls(params) {
    const parent = document.getElementById("param-container");

    while (parent.firstChild) {
        parent.removeChild(parent.firstChild);
    }

    for (const k of Object.keys(params)) {
        const param = params[k];

        const label = document.createElement("label");
        label.setAttribute("for", k);
        label.textContent = param.name + ":";

        const input = document.createElement("input");
        input.type = param.type;
        if (param.attributes) {
            Object.keys(param.attributes).forEach(k => input.setAttribute(k, param.attributes[k]));
        }
        input.value = param.value;
        input.addEventListener("input", (ev) => param.onChange(ev.target.value));

        const container = document.createElement("div");
        container.setAttribute("class", "param-row");
        container.appendChild(label);
        container.appendChild(input);

        parent.appendChild(container);
    }
}

async function getMediaStreamSource(audioCtx) {
    const constraints = { audio: true, video: false };

    try {
        const stream = await navigator.mediaDevices.getUserMedia(constraints);
        return audioCtx.createMediaStreamSource(stream);
    } catch (err) {
        console.log(err);
    }
}

async function main() {
    const AudioContext = window.AudioContext || window.webkitAudioContext;
    const audioContext = new AudioContext();
    const audioSource = await getMediaStreamSource(audioContext);

    const visualizations = {
        "oscilloscope": Oscilloscope,
        "fft": FFT,
        "circular": Circular,
    };

    const dropdown = document.getElementById("visselect");
    for (const vis of Object.keys(visualizations)) {
        const option = document.createElement("option");
        option.value = vis;
        option.innerText = visualizations[vis].name;
        dropdown.appendChild(option);
    }

    const timer = setupTimer();
    let vis = undefined;

    const startVis = function(visId, wasPaused) {
        const visClass = visualizations[visId];
        vis = new visClass(audioContext, audioSource, canvas, ctx, () => updateTimer(timer));
        if (vis.getParameters !== undefined) {
            createParameterControls(vis.getParameters());
        }

        if (!wasPaused) {
            vis.paused = false;
            requestAnimationFrame(() => vis.animate());
        }
        console.log(vis);
    };

    canvas.addEventListener("click", () => {
        vis.paused = !vis.paused;
        requestAnimationFrame(() => vis.animate());
    });

    dropdown.addEventListener("input", () => {
        const selectedVisId = dropdown.selectedOptions[0].value;
        const wasPaused = vis.paused;

        if (vis !== undefined) {
            vis.teardown();
        }

        startVis(selectedVisId, wasPaused);
    });

    startVis("oscilloscope", true);
}

main();
RSS: [index] [pictures] Last build: 2025-01-05 16:27:20