I want to record current playing audio using Audio Worklet processer and web audio api and play in real time with possible lag of 100ms current audio sources, but the audio is distorted and not playing correctly so what is correct way to fix the following issues?
In following code a test.mp3 file selected, then played, after that the start processing button clicked to make current playing audio volume to 0.01 and and play the new processed audio in new context. The audio taken from Audio Worklet Processor.
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>Real-Time Audio Processing</title>
</head>
<body>
<h1>Real-Time Audio Processing</h1>
<audio id="audio" controls>
<source src="test.mp3" type="audio/mpeg">
Your browser does not support the audio tag.
</audio>
<button id="start">Start Processing</button>
<button id="stop" disabled>Stop Processing</button>
<script>
let originalAudio, audioContext, newAudioContext, workletNode, mediaStreamSource;
let bufferQueue = [];
let isPlaying = false;
let processorNode;
let startTime = 0;
let lastAudioTime = 0;
document.getElementById('start').addEventListener('click', async () => {
originalAudio = document.getElementById('audio');
originalAudio.volume = 0.01; // Mute original audio to 0.01 but still play
const stream = originalAudio.captureStream();
audioContext = new AudioContext();
newAudioContext = new AudioContext();
// Register WorkletProcessor
await audioContext.audioWorklet.addModule(URL.createObjectURL(new Blob([`
class RecorderProcessor extends AudioWorkletProcessor {
constructor() {
super();
this.port.start();
}
process(inputs) {
const input = inputs[0];
if (input.length > 0) {
const outputBuffer = input[0]; // First channel data
this.port.postMessage(outputBuffer); // Send to main thread
}
return true;
}
}
registerProcessor("recorder-processor", RecorderProcessor);
`], { type: "application/javascript" })));
workletNode = new AudioWorkletNode(audioContext, "recorder-processor");
workletNode.port.onmessage = (event) => {
const data = event.data;
bufferQueue.push(data);
if (!isPlaying) {
playBufferedAudio();
}
};
mediaStreamSource = audioContext.createMediaStreamSource(stream);
mediaStreamSource.connect(workletNode);
workletNode.connect(audioContext.destination);
document.getElementById('start').disabled = true;
document.getElementById('stop').disabled = false;
});
function playBufferedAudio() {
if (bufferQueue.length === 0) {
isPlaying = false;
return;
}
isPlaying = true;
const data = bufferQueue.shift();
const buffer = newAudioContext.createBuffer(1, data.length, newAudioContext.sampleRate);
buffer.copyToChannel(new Float32Array(data), 0);
const source = newAudioContext.createBufferSource();
source.buffer = buffer;
source.connect(newAudioContext.destination);
if (startTime === 0) {
startTime = newAudioContext.currentTime + 0.02; // Add slight delay to sync
} else {
startTime = Math.max(newAudioContext.currentTime, lastAudioTime);
}
lastAudioTime = startTime + buffer.duration;
source.start(startTime);
source.onended = playBufferedAudio;
}
document.getElementById('stop').addEventListener('click', () => {
audioContext.close();
newAudioContext.close();
bufferQueue = [];
isPlaying = false;
console.log("Stopped processing audio");
});
</script>
</body>
</html>