The JavaScript code example demonstrates how to use an AudioWorklet node for audio processing.
<!DOCTYPE html> <html> <head> <title>XoaX.net JavaScript</title> <script src="AudioWorklet.js"></script> <link rel="icon" href="data:,"> <!--Requires a web server--> </head> <body onload="Initialize()"> <h1>AudioWorklet</h1> <div style="border:1px solid black;width:300px;padding:5px;"> <div> <label for="idSignal">Signal:</label> <input type="range" id="idSignal" min="0.0" max="1.0" step="0.05" value="0.1" /> </div> <div> <label for="idNoise">Noise:</label> <input type="range" id="idNoise" min="0.0" max="1.0" step="0.05" value="0.1" /> </div> <br /> <button id="idOnOff">On/Off</button> </div> </body> </html>
let gqAudioContext = null;
let gqSignalRange;
let gqNoiseRange;
let gqGainNode;
let gqNoiseNode;
let gqNoiseParams;
function Initialize() {
document.getElementById("idOnOff").addEventListener("click", ToggleSound);
gqSignalRange = document.getElementById("idSignal");
gqNoiseRange = document.getElementById("idNoise");
gqSignalRange.oninput = UpdateSignal;
gqNoiseRange.oninput = UpdateNoise;
gqSignalRange.disabled = true;
gqNoiseRange.disabled = true;
}
async function CreateNoiseGenerator() {
if (!gqAudioContext) {
gqAudioContext = new AudioContext();
}
let qNoiseGeneratorNode
try {
qNoiseGeneratorNode = new AudioWorkletNode(gqAudioContext, "NoiseGenerator");
} catch (e) {
// Add the worklet module
await gqAudioContext.audioWorklet.addModule("CNoiseGenerator.js");
qNoiseGeneratorNode = new AudioWorkletNode(gqAudioContext, "NoiseGenerator");
}
await gqAudioContext.resume();
return qNoiseGeneratorNode;
}
async function StartAudio() {
gqNoiseNode = await CreateNoiseGenerator();
gqGainNode = gqAudioContext.createGain();
gqGainNode.gain.setValueAtTime(gqSignalRange.value, gqAudioContext.currentTime);
const qSignal = new OscillatorNode(gqAudioContext);
qSignal.type = "square";
qSignal.frequency.setValueAtTime(440, gqAudioContext.currentTime);
qSignal.connect(gqGainNode).connect(gqNoiseNode).connect(gqAudioContext.destination);
qSignal.start();
gqNoiseParams = gqNoiseNode.parameters.get("noise");
gqNoiseParams.setValueAtTime(gqNoiseRange.value, gqAudioContext.currentTime);
}
async function ToggleSound(event) {
if (!gqAudioContext) {
StartAudio();
gqSignalRange.disabled = false;
gqNoiseRange.disabled = false;
} else {
gqSignalRange.disabled = true;
gqNoiseRange.disabled = true;
await gqAudioContext.close();
gqAudioContext = null;
}
}
function UpdateSignal(qEvent) {
gqGainNode.gain.setValueAtTime(event.target.value, gqAudioContext.currentTime);
}
function UpdateNoise(qEvent) {
gqNoiseParams.setValueAtTime(event.target.value, gqAudioContext.currentTime);
}
class CNoiseGenerator extends AudioWorkletProcessor {
constructor() {
super();
}
// Add noise to the input
process(qaIn, qaOut, qParams) {
const kdGain = qParams.noise[0];
const kiLength = Math.min(qaIn.length, qaOut.length);
for (var i = 0; i < kiLength; ++i) {
let qIn = qaIn[i];
let qOut = qaOut[i];
const kiChannels = Math.min(qIn.length, qOut.length);
for (var c = 0; c < kiChannels; ++c) {
const kiSamples = qIn[c].length;
for (var s = 0; s < kiSamples; s++) {
let qSample = qIn[c][s];
let dRandom = 2 * (Math.random() - 0.5);
qSample += dRandom*kdGain;
if (qSample > 1.0) {
qSample = 1.0;
} else if (qSample < -1.0) {
qSample = -1.0;
}
qOut[c][s] = qSample;
}
}
}
return true;
}
static get parameterDescriptors() {
return [
{
name: "noise",
defaultValue: 0.1,
minValue: 0,
maxValue: 1,
},
];
}
}
registerProcessor("NoiseGenerator", CNoiseGenerator);
© 20072025 XoaX.net LLC. All rights reserved.