HTML Audio Spectrum Visualizer

Html Audio Spectrum Visualizer
Project: Audio Waveform Visualizer
Author: bataimx
Edit Online: View on CodePen
License: MIT

This code snippet helps you to create an audio spectrum visualizer using the HTML canvas element. It utilizes the Web Audio API to process and draw the audio waveform. When you upload an audio file, it will generate a visual representation of the audio waveform on an HTML canvas element.

The visualizer works by first decoding the audio data using the Web Audio API, then normalizing and segmenting the audio data to represent it on the canvas. The waveform is drawn using line segments that reflect the amplitude of the audio at different time intervals. As the audio plays, the visualizer updates in real-time, creating a timeline that indicates the progress of the audio playback.

This Spectrum Visualizer can be a valuable tool for understanding the characteristics of audio files, analyzing audio data, or simply creating visually appealing audio representations for web applications.

How to Create Audio Spectrum Visualizer in HTML

1. Create the HTML structure for the audio spectrum visualizer as follows:

<div class="wrapper">
  <input id="fileinput" type="file" accept="audio/mp3,video/mp4"/>
  <canvas id="canvas"></canvas>
  <audio id="audio" src="" controls="true"></audio>
</div>

2. Now, style the audio spectrum visualizer using the following CSS styles:

html {
  height: 100%;
}

body {
  height: 100%;
  margin: 0;
  background-color: #edf2f7;
}

.sh-card {
  position: fixed;
  bottom: 10px;
  right: 10px;
  background: #edf2f7;
  padding: 7px 10px;
  border-radius: 50px;
  border: none;
  color: #000;
  box-shadow: 2px 2px 20px -10px #000;
  transition: all 0.3s ease;
  z-index: 99999;
  font-family: system-ui, -apple-system, BlinkMacSystemFont, "Segoe UI", Roboto, "Helvetica Neue", Arial, "Noto Sans", sans-serif, "Apple Color Emoji", "Segoe UI Emoji", "Segoe UI Symbol", "Noto Color Emoji";
  line-height: 1.5;
  overflow: hidden;
  width: 18px;
  white-space: nowrap;
}
.sh-card:hover {
  width: 92px;
}

.sh-card-icon,
.sh-card-info {
  display: inline-block;
  vertical-align: baseline;
  line-height: 1;
}

.sh-card-info {
  font-size: 16px;
  margin-left: 7px;
}

.sh-card-icon {
  width: 18px;
  height: 15px;
}

.sh-card-link,
.sh-card-link:hover,
.sh-card-link:active,
.sh-card-link:visited {
  color: #000;
  text-decoration: none;
  line-height: 1;
}

.sh-card-box {
  display: block;
}

@-webkit-keyframes bounceIn {
  0%, 20%, 40%, 60%, 80%, to {
    -webkit-animation-timing-function: cubic-bezier(0.215, 0.61, 0.355, 1);
    animation-timing-function: cubic-bezier(0.215, 0.61, 0.355, 1);
  }
  0% {
    opacity: 0;
    -webkit-transform: scale3d(0.3, 0.3, 0.3);
    transform: scale3d(0.3, 0.3, 0.3);
  }
  20% {
    -webkit-transform: scale3d(1.1, 1.1, 1.1);
    transform: scale3d(1.1, 1.1, 1.1);
  }
  40% {
    -webkit-transform: scale3d(0.9, 0.9, 0.9);
    transform: scale3d(0.9, 0.9, 0.9);
  }
  60% {
    opacity: 1;
    -webkit-transform: scale3d(1.03, 1.03, 1.03);
    transform: scale3d(1.03, 1.03, 1.03);
  }
  80% {
    -webkit-transform: scale3d(0.97, 0.97, 0.97);
    transform: scale3d(0.97, 0.97, 0.97);
  }
  to {
    opacity: 1;
    -webkit-transform: scaleX(1);
    transform: scaleX(1);
  }
}
@keyframes bounceIn {
  0%, 20%, 40%, 60%, 80%, to {
    -webkit-animation-timing-function: cubic-bezier(0.215, 0.61, 0.355, 1);
    animation-timing-function: cubic-bezier(0.215, 0.61, 0.355, 1);
  }
  0% {
    opacity: 0;
    -webkit-transform: scale3d(0.3, 0.3, 0.3);
    transform: scale3d(0.3, 0.3, 0.3);
  }
  20% {
    -webkit-transform: scale3d(1.1, 1.1, 1.1);
    transform: scale3d(1.1, 1.1, 1.1);
  }
  40% {
    -webkit-transform: scale3d(0.9, 0.9, 0.9);
    transform: scale3d(0.9, 0.9, 0.9);
  }
  60% {
    opacity: 1;
    -webkit-transform: scale3d(1.03, 1.03, 1.03);
    transform: scale3d(1.03, 1.03, 1.03);
  }
  80% {
    -webkit-transform: scale3d(0.97, 0.97, 0.97);
    transform: scale3d(0.97, 0.97, 0.97);
  }
  to {
    opacity: 1;
    -webkit-transform: scaleX(1);
    transform: scaleX(1);
  }
}
.sh-bounceIn {
  -webkit-animation-duration: 0.75s;
  animation-duration: 0.75s;
  -webkit-animation-name: bounceIn;
  animation-name: bounceIn;
  animation-fill-mode: forwards;
  -webkit-animation-fill-mode: forwards;
  animation-delay: 1s;
  -webkit-animation-delay: 1s;
  opacity: 0;
}

canvas {
  width: 100%;
  height: 130px;
  background: #f9f9f9;
  margin: 2rem auto;
  box-shadow: inset 0px 0px 25px -15px #000;
  border-radius: 5px;
  overflow: hidden;
}

.wrapper {
  padding: 2rem;
}

3. Load the following scripts before closing the body tag:

<script src='https://codepen.io/bataimx/pen/JjdyXyG.js'></script>

4. Finally, add the following JavaScript function to generate the audio spectrum.

"use strict";
window.AudioContext = window.AudioContext || window.webkitAudioContext;
class renderWave {
    constructor(message) {
        this._samples = 10000;
        this._strokeStyle = "#3098ff";
        this.audioContext = new AudioContext();
        this.canvas = document.querySelector("canvas");
        this.ctx = this.canvas.getContext("2d");
        this.data = [];
        message
            .then(arrayBuffer => {
            return this.audioContext.decodeAudioData(arrayBuffer);
        })
            .then(audioBuffer => {
            this.draw(this.normalizedData(audioBuffer));
            this.drawData(this.data);
        });
    }
    normalizedData(audioBuffer) {
        const rawData = audioBuffer.getChannelData(0); // We only need to work with one channel of data
        const samples = this._samples; // Number of samples we want to have in our final data set
        const blockSize = Math.floor(rawData.length / samples); // Number of samples in each subdivision
        const filteredData = [];
        for (let i = 0; i < samples; i++) {
            filteredData.push(rawData[i * blockSize]);
        }
        return filteredData;
    }
    draw(normalizedData) {
        // set up the canvas
        const canvas = this.canvas;
        const dpr = window.devicePixelRatio || 1;
        const padding = 10;
        canvas.width = canvas.offsetWidth * dpr;
        canvas.height = (canvas.offsetHeight + padding * 2) * dpr;
        this.ctx.scale(dpr, dpr);
        this.ctx.translate(0, canvas.offsetHeight / 2 + padding); // set Y = 0 to be in the middle of the canvas
        // draw the line segments
        const width = canvas.offsetWidth / normalizedData.length;
        for (let i = 0; i < normalizedData.length; i++) {
            const x = width * i;
            let height = normalizedData[i] * canvas.offsetHeight - padding;
            if (height < 0) {
                height = 0;
            }
            else if (height > canvas.offsetHeight / 2) {
                height = height > canvas.offsetHeight / 2;
            }
            // this.drawLineSegment(this.ctx, x, height, width, (i + 1) % 2);
            this.data.push({
                x: x,
                h: height,
                w: width,
                isEven: (i + 1) % 2
            });
        }
        return this.data;
    }
    drawLineSegment(ctx, x, height, width, isEven, colors = this._strokeStyle) {
        ctx.lineWidth = 1; // how thick the line is
        ctx.strokeStyle = colors; // what color our line is
        ctx.beginPath();
        height = isEven ? height : -height;
        ctx.moveTo(x, 0);
        ctx.lineTo(x + width, height);
        ctx.stroke();
    }
    drawData(data, colors = this._strokeStyle) {
        data.map(item => {
            this.drawLineSegment(this.ctx, item.x, item.h, item.w, item.isEven, colors);
        });
    }
    drawTimeline(percent) {
        let end = Math.ceil(this._samples * percent);
        let start = end - 5 || 0;
        let t = this.data.slice(0, end);
        this.drawData(t, "#1d1e22");
    }
}
document.getElementById("fileinput").addEventListener("change", function () {
    var wave = new renderWave(this.files[0].arrayBuffer());
    var audioPlayer = document.getElementById("audio");
    audioPlayer.src = URL.createObjectURL(this.files[0]);
    // audioPlayer.play();
    audioPlayer.ontimeupdate = function () {
        let percent = this.currentTime / this.duration;
        wave.drawTimeline(percent);
    };
});

That’s all! hopefully, you have successfully created an audio spectrum visualizer. If you have any questions or suggestions, feel free to comment below.

Leave a Comment

Comments

No comments yet. Why don’t you start the discussion?

Leave a Reply

Your email address will not be published. Required fields are marked *