Why is this AudioWorklet to MP3 code producing different results on Chromium and Firefox?

I’m running the same code on Firefox 125 and Chromium 124.

The code run on Firefox produces a Blob that is an encoded MP3 file – after chopping up whatever audio is being played in Firefox while the audio is finalizing encoding.

The Chromium version produces an MP3 that is full of glitches, clipping, and plays back faster.

Here’s a link to a ZIP file containing the result examples https://github.com/guest271314/MP3Recorder/files/14625262/firefox125_chromium124_audioworklet_to_mp3.zip.

Here’s a link to the source code https://github.com/guest271314/MP3Recorder/blob/main/MP3Recorder.js.

In pertinent part:

// https://www.iis.fraunhofer.de/en/ff/amm/consumer-electronics/mp3.html
// https://www.audioblog.iis.fraunhofer.com/mp3-software-patents-licenses
class MP3Recorder {
  constructor(audioTrack) {
    const {
      readable,
      writable
    } = new TransformStream({}, {}, {
      highWaterMark: 65536,
    });
    Object.assign(this, {
      readable,
      writable,
      audioTrack,
    });
    this.writer = this.writable.getWriter();
    this.audioTrack.onended = this.stop.bind(this);

    this.ac = new AudioContext({
      latencyHint: .2,
      sampleRate: 44100,
      numberOfChannels: 2,
    });

    const {
      resolve,
      promise
    } = Promise.withResolvers();
    this.promise = promise;

    this.ac.onstatechange = async (e) => {
      console.log(e.target.state);
    };
    return this.ac.suspend().then(async () => {
      // ...
      const lamejs = await file.text();
      // const {lamejs} = await import(url);
      const processor = `${lamejs}
class AudioWorkletStream extends AudioWorkletProcessor {
  constructor(options) {
    super(options);
    this.mp3encoder = new lamejs.Mp3Encoder(2, 44100, 128);
    this.done = false;
    this.transferred = false;
    this.controller = void 0;
    this.readable = new ReadableStream({
      start: (c) => {
        return this.controller = c;
      }
    });
    this.port.onmessage = (e) => {
      this.done = true;
    }
  }
  write(channels) {
    const [left, right] = channels;
    let leftChannel, rightChannel;
    // https://github.com/zhuker/lamejs/commit/e18447fefc4b581e33a89bd6a51a4fbf1b3e1660
    leftChannel = new Int32Array(left.length);
    rightChannel = new Int32Array(right.length);
    for (let i = 0; i < left.length; i++) {
      leftChannel[i] = left[i] < 0 ? left[i] * 32768 : left[i] * 32767;
      rightChannel[i] = right[i] < 0 ? right[i] * 32768 : right[i] * 32767;
    }
    const mp3buffer = this.mp3encoder.encodeBuffer(leftChannel, rightChannel);
    if (mp3buffer.length > 0) {
      this.controller.enqueue(new Uint8Array(mp3buffer));
    }
  }
  process(inputs, outputs) {
    if (this.done) {
      try {
      this.write(inputs.flat());
      const mp3buffer = this.mp3encoder.flush();
      if (mp3buffer.length > 0) {
        this.controller.enqueue(new Uint8Array(mp3buffer));
        this.controller.close();
        this.port.postMessage(this.readable, [this.readable]);
        this.port.close();
        return false;
      }
      } catch (e) {
        this.port.close();
        return false;
      }
    }
    this.write(inputs.flat());
    return true;
  }
};
registerProcessor(
  "audio-worklet-stream",
  AudioWorkletStream
)`;
      this.worklet = URL.createObjectURL(new Blob([processor], {
        type: "text/javascript",
      }));
      await this.ac.audioWorklet.addModule(this.worklet);
      this.aw = new AudioWorkletNode(this.ac, "audio-worklet-stream", {
        numberOfInputs: 1,
        numberOfOutputs: 2,
        outputChannelCount: [2, 2],
      });
      this.aw.onprocessorerror = (e) => {
        console.error(e);
        console.trace();
      };
      this.aw.port.onmessage = async (e) => {
        console.log(e.data);
        if (e.data instanceof ReadableStream) {
          const blob = new Blob([await new Response(e.data).arrayBuffer()], {
            type: "audio/mp3",
          });
          resolve(blob);
          console.log(blob);
          this.audioTrack.stop();
          this.msasn.disconnect();
          this.aw.disconnect();
          this.aw.port.close();
          this.aw.port.onmessage = null;
          await this.ac.close();
        }
      };
      this.msasn = new MediaStreamAudioSourceNode(this.ac, {
        mediaStream: new MediaStream([this.audioTrack]),
      })
      this.msasn.connect(this.aw);
      return this;
    }).catch(e => console.log(e));
  }
  async start() {
    return this.ac.resume().then(() => this.audioTrack).catch(e => console.log(e));
  }
  async stop(e) {
    this.aw.port.postMessage(null);
    return this.promise;
  }
}

Here’s how I use the code, with setTimeout() included for how to reproduce what I’m getting here for tests:

var stream = await navigator.mediaDevices.getUserMedia({
  audio: {
    channelCount: 2,
    noiseSuppression: false,
    autoGainControl: false,
    echoCancellation: false,
  }
});
var [audioTrack] = stream.getAudioTracks();
var recorder = await new MP3Recorder(audioTrack);
var start = await recorder.start();
setTimeout(() => {
  recorder.stop().then((blob) => console.log(URL.createObjectURL(blob)))
  .catch(console.error);
}, 10000);

What’s the issue on Chrome/Chromium?