HTML5/JavaScript Audio Visualiser

I want to kick off this series with a bit of audio processing theory and its application within the digital realm. Some of the theory is best explained using audible examples, although a visual representation can help a lot with the others. So, I wrote an audio visualiser.


Time-Domain Visualisation




The image above is not the visualiser… It’s an oscilloscope. The screen displays a continuous green line representing a connected electronic signal.
The type of representation is known as the time-domain. The x-axis relates to time (hence time-domain) and the y-axis relates to the signal amplitude ranging from a negative voltage at the bottom of the display to a posting voltage at the top. The time and amplitude ranges are configurable.

Oscilloscopes allow inspection of digital or analogue signals, although when viewing a digital signal on an oscilloscope the signal just switches between two voltages at different intervals. These voltages represent the binary digits (0 and 1) which make up the stream of data. Digital signals have no inherent format. Codecs must be used to encode audio data into an audio format.

An analogue signal is an electronic representation of the fluctuation in air pressure of sound. A loudspeaker does not need to decode an analogue signal. The information in an analogue signal is inherent.

Both of these concepts are amazing in their own right.


The Visualiser


The above image is a screenshot of the visualiser in action. It acts very much the same as an oscilloscope, right down the authentic background drawn in MS Paint. The graphic represents the audio signal.

I am quite happy with how this visualiser turned out. It reminded me of the visualisation in Windows Media Player and WinAmp. I will include a couple of posts on this topic at a later date.

The source code for the visualiser is below. To see it in action then click here.




<!DOCTYPE html>

    <meta charset="UTF-8">
    <title>JavaScript Audio Visualiser</title>
    <script src="com.littleDebugger.namespacer.js"></script>
    <script src="com.littleDebugger.daw.dsp.passThrough.js"></script>
    <script src="com.littleDebugger.daw.audioContext.js"></script>
    <script src="com.littleDebugger.daw.audioLoader.js"></script>
    <script src="com.littleDebugger.daw.dsp.visualiser.js"></script>
    <script src="com.littleDebugger.daw.player.js"></script>
    <script src="com.littleDebugger.daw.controlHelpers.js"></script>
    <link rel="stylesheet" type="text/css" href="styles/index.css" />

    <h1 align="center">JavaScript Audio Visualiser</h1>
    <p>workshop text</p>
    <!-- The Controls that you see on the index page. -->
    <div id="container">
        <canvas id="visualiserCanvas"></canvas>
        <div id="controls">
            <div id="audioFileSelect">
                <br /> Audio File:
                <select id="fileToPlay">
                <option value="1" data-audioFile='bensound-funnysong.mp3'>
                    Funny Song (
                <input id="file" type="file" accept="audio/*" class="hidden">
                    <td>Switch Audio Source to: </td>
                    <td><input id="audioSourceSwitch" type="button" value="Filesystem" /></td>
                        Buffer Size:
                        <select id="bufferSizeSelect">
                        <option selected>4096</option>
                        <select id="resolutionSelect">
                        <option data-width="1920" data-height="1080">1920 x 1080</option>
                        <option data-width="1024" data-height="864">1024 x 864</option>
                        <option data-width="800" data-height="640">800 x 640</option>
                        <option data-width="640" data-height="480" selected>640 x 480</option>
                        <option data-width="320" data-height="200">320 x 200</option>
                        Refresh Rate:
                        <input id="refreshRate" type="range" min="1" max="20" value="1" />
                        Wave Line Width:
                        <input id="lineWidth" type="range" min="1" max="15" value="1" />
                        Fit to Visualiser
                        <input id="fitToCanvasCheckbox" type="checkbox" />
                        <input id="playButton" type="button" value="Play" />
                        <input id="stopButton" type="button" value="Stop" />
                        <input id="fullScreen" type="button" value="Full Screen" />
                        <div id="loadingMessage" class="hidden">
                            <span class="message">Loading...</span>
            <a href="Documentation.html">Documentation</a>

<script src="index.js"></script>



var audioSourceIsFileSystem = false;
// Audio volume warning is shown the first time audio is played only.
var showAudioVolumeWarning = true;

// Reference to audioLoader module.
var audioLoader = com.littleDebugger.daw.audioLoader;
// Reference to controlHelpers module.
var controlHelpers = com.littleDebugger.daw.controlHelpers;

// Reference to the audio processor used for this workshop.
var audioProcessor = com.littleDebugger.daw.dsp.passThrough;

var audioSourceText = 
    1: "Filesystem",
    0: "Server"

// Array of objects with colour and alpha (opacity) properties.
// The first object represented the configuration for the input buffer and the second for the output.
// This is configurable so that the visualiser can show many different waves at the same time.
var waveDisplayConfigs =
        colour: "rgb(0,0,255)",
        alpha: 1
        colour: "rgb(0, 255,0)",
        alpha: 0.8

// Initialise visualiser.
var visualiser = com.littleDebugger.daw.dsp.visualiser(

var playControl = document.getElementById('playButton');
var stopControl = document.getElementById('stopButton');
var filesystemFileControl = document.getElementById('file');
var audioFileControl = document.getElementById('fileToPlay');
var fileSourceControl = document.getElementById('audioSourceSwitch');

// The callback for the audioProcessingEvent from the audio player.
// The code is not in the audio player because its currently doing more than it should be, due to calling the visualiser.
var processAudio = function(audioProcessingEvent) 
    var inputBuffer = audioProcessingEvent.inputBuffer;
    var outputBuffer = audioProcessingEvent.outputBuffer;

    var updateVisualiser = true;
    for (var channel = 0; channel < outputBuffer.numberOfChannels; channel++)
        var inputData = inputBuffer.getChannelData(channel);
        var outputData = outputBuffer.getChannelData(channel);

        audioProcessor(inputData, outputData);

        // Visualiser should only be updated for 1 channel.
        if (updateVisualiser) {
            visualiser.drawWave([inputData, outputData]);
            updateVisualiser = false;

// Wire up control events.
// Some of the following event handling could be contained in a module.
// I am not exactly sure how it will all be grouped and split yet so it is just in the main page JS file. 

fileSourceControl.onclick = function () 
    this.value = audioSourceText[audioSourceIsFileSystem * 1];
    if (audioSourceIsFileSystem) {
        audioSourceIsFileSystem = false;
        audioSourceIsFileSystem = true;
        filesystemFileControl.value = null;;

window.addEventListener(audioLoader.audioLoadingStartedEventName, function () 
    playControl.disabled = true;

window.addEventListener(audioLoader.audioLoadingCompletedEventName, function () 
    playControl.disabled = false;

playControl.onclick = function () 
    if (showAudioVolumeWarning) {
        alert('Please make sure the audio volume is set to an appropriate level!');
        showAudioVolumeWarning = false;

stopControl.onclick = function () 

audioFileControl.onchange = function () 
    playControl.disabled = true;

filesystemFileControl.onchange = function () 
    var localFile = window.URL.createObjectURL(this.files[0]);

// Initialise the player.
var player = com.littleDebugger.daw.player(

// Cue the audio.




// Visualiser module.
com.littleDebugger.daw.dsp.visualiser = function () {
    // Function to create a visualiser. 
    // The parameters represent the visualiser controls but since they are passed in then there is no 
    // dependency on the DOM.
    // Some of the controls do not have a dependcy on a specific type of element and just need the 
    // appropriate properties. (child properties indented with '-')

    // <waveDisplayConfigs> Array of objects. Each object has properties related to the configuration of each waveform to 
    // the draw on the visualiser.
    // -<colour> The colour of the wave.
    // -<alpha> The alpha (transparency) of the wave.
    // <canvas> Canvas element which where the visualiser will be drawn.
    // <fullScreenControl> Control to trigger full screen. Needs an onclick event.
    // <waveWidthControl> Control for the width of the waveform lines. 
    // -<value> Line width in pixels.
    // <resolutionControl> Select control for resolution of the canvas.
    // - Options need 'data-width' and 'data-height' attributes.
    // <fitToVisualiserWidthControl> Control for stretching/contracting the buffer to fit neatly into the width of the 
    // visualiser. 
    // -<checked> Boolean property.
    // <refreshRateControl> Set how many buffers the visualiser should recieve before updating.
    // -<value> Integer property.
    var initialise = function (
        refreshRateControl) {
        // Setup refresh rate.
        var visualFrame = 1;

        var visualiser = {};

        // Get canvas context.
        var ctx = canvas.getContext('2d');

        // We be the virtical midpoint of the canvas.
        var virticalMidpoint;

        // Add event to change the canvas resolution when the resolution select is changed.
        resolutionControl.onchange = function () {
            canvas.width = this.options[this.selectedIndex].getAttribute('data-width');
            canvas.height = this.options[this.selectedIndex].getAttribute('data-height');
            virticalMidpoint = canvas.height / 2;

        // Call the onchange event.

        // Get the vertical point on the canvas for amplitude.
        var getVerticalPoint = function (virticalMidpoint, amplitude) {
            return virticalMidpoint + (amplitude * virticalMidpoint);

        // Draw wave on canvas.
        // <inputData> Audio buffer.
        // <ctx> Canvas context.
        // <strokeStlye> Colour of wave line.
        // <alpha> Alpha of wave line.
        var drawLine = function (inputData, ctx, strokeStyle, alpha) {
            var inputLength = inputData.length;

            ctx.globalAlpha = alpha;
            ctx.strokeStyle = strokeStyle;
            ctx.lineWidth = waveWidthControl.value;
            ctx.moveTo(0, getVerticalPoint(virticalMidpoint, inputData[0]));

            var fit = fitToVisualiserWidthControl.checked;
            var inputLength = inputData.length;
            var canvasWidth = canvas.width;

            for (var sample = 1; sample < inputLength; sample++) {
                var x = fit ? (sample / inputLength) * canvasWidth : sample;
                ctx.lineTo(x, getVerticalPoint(virticalMidpoint, inputData[sample]))


        // Refresh the canvas with new buffers
        // <buffers> Array of buffers to display.
        visualiser.drawWave = function (buffers) {
            // Chec if the canvas should be updated.
            if (visualFrame % refreshRateControl.value == 0) {
                visualFrame = 1;
            } else {

            // Clear the canvas (could be optimised).
            ctx.clearRect(0, 0, canvas.width, canvas.height);

            // Iterate over each buffer and draw the wave.
            var i = 0;
            buffers.forEach(function (buffer) {
                var colour = waveDisplayConfigs[i].colour;
                var alpha = waveDisplayConfigs[i].alpha;
                drawLine(buffer, ctx, colour, alpha);

        // Full screen toggle.
        fullScreenControl.onclick = function () {
            if (canvas.requestFullscreen) {
            } else if (canvas.webkitRequestFullscreen) {
            } else if (canvas.mozRequestFullScreen) {
            } else if (canvas.msRequestFullscreen) {

        return visualiser;

    return initialise;




// This is the audio player.
// It handles the audio context for loading, playing and stopping the audio.
// <audioLoader>Reference to the audioLoader.js module.
// <getAudioContext>Reference to the audioContext.js module.
com.littleDebugger.daw.player = (function (audioLoader, getAudioContext) {
    var initialise = function (
        processAudioCallback) {
        var that = {};
        that.audioPlayingEventName = "audio-playing";
        that.audioStoppedEventName = "audio-stopped";

        var playingAudio = false;
        var audioCtx = getAudioContext();
        var source = audioCtx.createBufferSource();
        var scriptNode;

        // Reloads the audio file.
        that.cueAudioFile = function (fileName) {
            audioLoader.loadAudioFile(fileName, audioCtx, setSource);

        // Starts the audio playing.
        that.startAudio = function () {
            if (!playingAudio) {
                bufferSizeControl.disabled = true;
                scriptNode = audioCtx.createScriptProcessor(bufferSizeControl.value, 1, 1);
                scriptNode.onaudioprocess = function (audioProcessingEvent) {

                playingAudio = true;

        // Stops the audio.
        that.stopAudio = function () {
            if (playingAudio) {
                playingAudio = false;
                bufferSizeControl.disabled = false;
                setSource(audioLoader.createBuffer(source.buffer, audioCtx));

        var fireEvent = function (eventName) {
            var event = new Event(that.audioStoppedEventName);

        // Used as a callback to set the local source variable. 
        var setSource = function (src) {
            source = src;

        // When the buffer source stops playControling, disconnect everything.
        var setOnended = function () {
            source.onended = that.stopAudio;

        return that;

    return initialise;




// Processor for an audio buffer.
// For this demonstraction the processed audio is a copy of the same as the input buffer so there 
// is no change to the audio.
com.littleDebugger.daw.dsp.passThrough = function () {

    // Process audio buffer.
    // <inputData> The buffer to processed.
    // <outputData> The processed buffer.
    return function (inputData, outputData) {
        for (var sample = 0; sample < inputData.length; sample++) {
            outputData[sample] = inputData[sample];




com.littleDebugger.daw.audioLoader = function () {
    this.audioLoadingStartedEventName = 'audio-loading-started';
    this.audioLoadingCompletedEventName = 'audio-loading-completed';

    // Load audio file.
    // <fileName> Name of the file to be loaded. This can be on local machine if it has been loaded correctly.
    // <audioCtx> Audio context on which the audio file should be played.
    // <sourceReturnCallback> Callback to attach the audio the context when loaded.
    this.loadAudioFile = function (fileName, audioCtx, sourceReturnCallback) {
        this.loadAudioBufferFromFile(fileName, audioCtx, function (buffer) {
            sourceReturnCallback(this.createBuffer(buffer, audioCtx));

    // Load audio file and return the buffer.
    // This function is public but is not yet called from outside of this module. 
    // It will be though, which might give you an idea about how I plan to play audio later on
    // in the series.

    // Function was based on the example here: 
    this.loadAudioBufferFromFile = function (fileName, audioCtx, bufferReturnCallback) {
        fireEvent(audioLoadingStartedEventName, fileName);

        var request = new XMLHttpRequest();'GET', fileName, true);
        request.responseType = 'arraybuffer';
        request.onload = function () {
            var audioData = request.response;
            audioCtx.decodeAudioData(audioData, function (buffer) {
                    fireEvent(audioLoadingCompletedEventName, fileName);
                function (e) {
                    "Error decoding audio file." + e.err


    // Creates an audio buffer.
    this.createBuffer = function (buffer, audioCtx) {
        source = audioCtx.createBufferSource();
        source.buffer = buffer;
        return source;

    // Fires event.
    var fireEvent = function (eventName, detail) {
        var event = new CustomEvent(eventName, {
            'detail': detail

    return this;



// Create namespace.

// The is the visualiser.
com.littleDebugger.daw.getAudioContext = (function () {
    // Support Web Audio API in different supported broswers.
    // Taken from
    var getAudioContext = function () {
        var ContextClass = (
            window.AudioContext ||
            window.webkitAudioContext ||
            window.mozAudioContext ||
            window.oAudioContext ||
        if (ContextClass) {
            return new ContextClass();
        } else {
            alert("Web Audio API is not available. Please use a supported browser.");
            throw new Exception();

    return getAudioContext;



// Simple pattern used for namespacing in JavaScript.
// The module pattern will be used to group related functionality. 
// Modules are not yet supported in the main browswers natively.

// I do not plan to use any 3rd party libraries. 
// This may mean reinventing the wheel in some cases but I do not want anything 
// going on under the hood which I am not aware of.
// I will 'borrow' functions and snippets where required. This will be referenced.

if (typeof (com) === 'undefined') {
    com = {};

if (typeof (com.littleDebugger) === 'undefined') {
    com.littleDebugger = {};

if (typeof (com.littleDebugger.namespacer) === 'undefined') {
    com.littleDebugger.namespacer = {};

// Creates a namespace in the global space.
// <namespaceText> . seperated namespace to be created.
com.littleDebugger.namespacer.createNamespace = function (namespaceText) {
    var namespaces = namespaceText.split('.');
    if (typeof (window[namespaces[0]]) === 'undefined') {
        window[namespaces[0]] = {};

    var currentSpace = window[namespaces[0]];

    for (i = 1; i < namespaces.length; i++) {
        var namespace = namespaces[i];
        if (typeof (currentSpace[namespace]) === 'undefined') {
            currentSpace[namespace] = {};

        currentSpace = currentSpace[namespace];




// Hides the logic to show/hide elements on the DOM.
com.littleDebugger.daw.controlHelpers = (function () {
    var hiddenClass = 'hidden';
    var that = {};

    // Hides a element.
    that.hideControl = function (control) {

    // Shows a element.
    that.showControl = function (control) {

    return that;



/* Set the oscilloscope background on canvas on give it a border. */

#visualiserCanvas {
    padding: 0 0;
    margin: auto auto;
    border: 20px solid black;
    background: url('../images/osc.jpg');
    background-size: 100% 100%;
    border-radius: 20px;

/* Set the container in the center of the page. */

#container {
    padding: 0 0;
    margin: auto auto;
    display: table;

/* Maximize canvas on web-kit full screen. */

#visualiserCanvas:-webkit-full-screen {
    height: 100%;

/* Loading message. */

.message {
    color: red;

/* Show/hide audio source controls. */

.hidden {
    display: none;

/* To stop the controls jumping around when changing audio source. */

#audioFileSelect {
    height: 3em;


Leave a Reply