webRTC audio – Don’t know how to resume streaming after closing/stoping it

I was trying webRTC for a chatbot with voice prompts. I was successful on incorporating the voice prompts to the chat, but i have a problem. I noticed the mediaRecorder.stop doesn’t stop the streaming, so the red circle in the window will not dissapear. I want it to only be streaming when the user is recording the message.

What i tried:
I searched other solutions and found that this line will successfully end the streaming:
stream.getTracks().forEach( track => track.stop() );

The problem is, I don’t know how to resume the streaming after that, the record button turns unresponsive.

This is my entire code, sorry for not providing a more compact one, i’m not too keen with javascript to leave things behind:

  let mediaRecorder;
  let recordedChunks = [];
  let isRecording = false; // Track the recording state

  // Function to start recording
  function startRecording() {
    if (mediaRecorder && mediaRecorder.state === 'inactive') {
      mediaRecorder.start();
      isRecording = true;
    }
  }

  // Function to stop recording
  function stopRecording() {
    if (mediaRecorder && mediaRecorder.state === 'recording') {
      mediaRecorder.stop();
      isRecording = false;
    }
  }
  // Function to initialize the MediaRecorder
  function initializeMediaRecorder() {
    if (!mediaRecorder) {
      // Initialize the MediaRecorder with audio constraints
      navigator.mediaDevices
        .getUserMedia({ audio: true })
        .then((stream) => {
          mediaRecorder = new MediaRecorder(stream);

          // Add data to the recordedChunks array when data is available
          mediaRecorder.ondataavailable = (e) => {
            if (e.data.size > 0) {
              recordedChunks.push(e.data);
            }
          };

          // When the recording is stopped, you can process the recorded data
          mediaRecorder.onstop = () => {
            const audioBlob = new Blob(recordedChunks, { type: 'audio/wav' });
            const audioSize = audioBlob.size;
            recordedChunks = [];

            // Example: Send the audio data to the server or do something else with it
            const messageItem = document.createElement('li');
            messageItem.classList.add('message', 'sent');
            messageItem.innerHTML = `
                <div class="message-text">
                    <div class="message-sender">
                        <b>You</b>
                    </div>
                    <div class="message-content">
                        Audio size: ${audioSize}. Recording state: ${mediaRecorder.state}
                    </div>
                </div>`;
            messagesList.appendChild(messageItem);
            //stream.getTracks().forEach( track => track.stop() );
          };
        })
        .catch((error) => {
          console.error('Error accessing audio:', error);
        });
    }
  }

  // Attach click event handler to the "Record" button
  const recordButton = document.querySelector('.btn-record');
  recordButton.addEventListener('click', () => {
    if (!isRecording) {
      // Start recording
      startRecording();
      initializeMediaRecorder(); // Initialize the MediaRecorder 

    } else {
      // Stop recording
      stopRecording();
    }
  });

I know some conditions are redundant, i was debugging.
Thanks.