I am trying to build my custom video conferencing using mediasoup. I am totally new to WebRTC and mediasoup. I already built my own SFU script that handles things like producers, consumers, whiteboard events … etc.
I started building a simple mediasoup-client script to connect to a room and share events among peers in the same room. My problem is with sharing screens in general, the event is sent successfully with creating producer and the other users get the mediatrack that the producer is sharing but when creating a video element and setting its srcObject to the track. Everything is set successfully but when testing it locally with 2 different tabs, the video element is created with the srcObject but nothing is actually showing in the video. Not even a black screen.
mediasoup-config.js:
module.exports = {
mediaCodecs: [
{
kind: "audio",
mimeType: "audio/opus",
clockRate: 48000,
channels: 2,
},
{
kind: "video",
mimeType: "video/VP8",
clockRate: 90000,
parameters: {
"x-google-start-bitrate": 1000,
},
},
],
};
shareScreen function:
async function shareScreen() {
const screenStream = await navigator.mediaDevices.getDisplayMedia({
video: true
});
const screenTrack = screenStream.getVideoTracks()[0];
await sendTransport.produce({
track: screenTrack,
appData: { mediaType: 'screen' }
});
console.log('Shared screen track readyState:', screenTrack.readyState);
console.log('Screen track:', screenTrack);
localVideo.srcObject = screenStream;
localVideo.autoplay = true;
localVideo.playsInline = true;
}
handleNewConsumer function:
function handleNewConsumer(consumerData) {
console.log(consumerData);
const { id, kind, rtpParameters, producerId } = consumerData;
console.log('New consumer', consumerData);
recvTransport.consume({
id,
producerId,
kind,
rtpParameters,
}).then(consumer => {
const stream = new MediaStream();
stream.addTrack(consumer.track);
console.log('Remote stream tracks:', stream.getTracks());
console.log('Consumer track:', consumer.track);
console.log('Track readyState:', consumer.track.readyState);
console.log('Track muted:', consumer.track.muted);
const remoteVideo = document.createElement('video');
remoteVideo.autoplay = true;
remoteVideo.playsInline = true;
remoteVideo.onerror = (event) => {
console.error('Video playback error:', event, remoteVideo.error);
};
remoteVideo.addEventListener('loadedmetadata', () => {
console.log('Remote video loaded metadata:', remoteVideo.videoWidth, remoteVideo.videoHeight, remoteVideo.duration);
remoteVideo.play().catch(err => {
console.error('Auto-play failed:', err);
});
});
remoteVideo.addEventListener('resize', () => {
console.log('Remote video resized to:', remoteVideo.videoWidth, remoteVideo.videoHeight);
});
consumer.track.addEventListener('unmute', () => {
console.log('Track unmuted, setting srcObject');
remoteVideo.srcObject = stream;
console.log('Remote video srcObject set:', remoteVideo.srcObject);
});
if (consumer.track.readyState === 'live' && !consumer.track.muted) {
console.log('Track already live and unmuted, setting srcObject');
remoteVideo.srcObject = stream;
console.log('Remote video srcObject set:', remoteVideo.srcObject);
}
remoteVideos.appendChild(remoteVideo);
console.log('Remote video element appended:', remoteVideo);
});
}


