Skip to content

Examples : FFT

Description

Demonstrates using the HTML5 Audio API to generate an FFT from a microphone and then use it to generate a displacement map that is then used to distort a plane geometry.

This examples demonstrates,

  • Using a HTML5 audio analyser to get byte frequency data from a connected microphone.
  • Generating an offscreen canvas from the FFT data and using it as a displacement map.

Code

./src/client/client.ts

  1
  2
  3
  4
  5
  6
  7
  8
  9
 10
 11
 12
 13
 14
 15
 16
 17
 18
 19
 20
 21
 22
 23
 24
 25
 26
 27
 28
 29
 30
 31
 32
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
import * as THREE from 'three'
import { MeshBasicMaterial } from 'three'
import { OrbitControls } from 'three/examples/jsm/controls/OrbitControls'
import Stats from 'three/examples/jsm/libs/stats.module'

const scene = new THREE.Scene()

const light = new THREE.AmbientLight()
scene.add(light)

const camera = new THREE.PerspectiveCamera(
    75,
    window.innerWidth / window.innerHeight,
    0.1,
    1000
)
camera.position.x = 7
camera.position.y = 0.75
camera.position.z = 10

const renderer = new THREE.WebGLRenderer()
renderer.setSize(window.innerWidth, window.innerHeight)
document.body.appendChild(renderer.domElement)

new OrbitControls(camera, renderer.domElement)

const canvas = document.createElement('canvas') as HTMLCanvasElement
canvas.width = 256
canvas.height = 512

const ctx = canvas.getContext('2d') as CanvasRenderingContext2D

const texture = new THREE.Texture(canvas)
texture.minFilter = THREE.LinearFilter
texture.magFilter = THREE.LinearFilter

const plane = new THREE.Mesh(
    new THREE.PlaneGeometry(20, 20, 256, 256),
    new THREE.MeshPhongMaterial({
        wireframe: true,
        color: new THREE.Color(0x00ff00),
        displacementMap: texture,
        displacementScale: 10,
    })
)
plane.rotateX(-Math.PI / 2)
scene.add(plane)

window.addEventListener('resize', onWindowResize, false)
function onWindowResize() {
    camera.aspect = window.innerWidth / window.innerHeight
    camera.updateProjectionMatrix()
    renderer.setSize(window.innerWidth, window.innerHeight)
    render()
}

let context: AudioContext
let analyser: AnalyserNode
let mediaSource
let imageData

function getUserMedia(
    dictionary: MediaStreamConstraints,
    callback: NavigatorUserMediaSuccessCallback
) {
    try {
        navigator.getUserMedia =
            navigator.getUserMedia ||
            (navigator as any).webkitGetUserMedia ||
            (navigator as any).mozGetUserMedia
        navigator.getUserMedia(dictionary, callback, (e) => {
            console.dir(e)
        })
    } catch (e) {
        alert('getUserMedia threw exception :' + e)
    }
}

function connectAudioAPI() {
    try {
        context = new AudioContext()
        analyser = context.createAnalyser()
        analyser.fftSize = 2048

        navigator.mediaDevices
            .getUserMedia({ audio: true, video: false })
            .then(function (stream) {
                mediaSource = context.createMediaStreamSource(stream)
                mediaSource.connect(analyser)
                animate()
            })
            .catch(function (err) {
                alert(err)
            })
    } catch (e) {
        alert(e)
    }
}

function updateFFT() {
    let timeData = new Uint8Array(analyser.frequencyBinCount)

    analyser.getByteFrequencyData(timeData)

    imageData = ctx.getImageData(0, 1, 256, 511)
    ctx.putImageData(imageData, 0, 0, 0, 0, 256, 512)

    for (let x = 0; x < 256; x++) {
        ctx.fillStyle = 'rgb(' + timeData[x] + ', 0, 0) '
        ctx.fillRect(x, 510, 2, 2)
    }

    texture.needsUpdate = true
}

const stats = Stats()
document.body.appendChild(stats.domElement)

function animate() {
    requestAnimationFrame(animate)

    updateFFT()

    render()

    stats.update()
}

function render() {
    renderer.render(scene, camera)
}

window.onload = function () {
    connectAudioAPI()
}