I am writing a very basic chrome extension that captures and sends video stream to a nodeJS server, which in turns sends it to Youtube live server.
Here is my implementation of the backend which receives data via WebRTC and send to YT using FFMPEG:
const express = require('express');
const cors = require('cors');
const { RTCPeerConnection, RTCSessionDescription } = require('@roamhq/wrtc');
const { spawn } = require('child_process');
const app = express();
app.use(express.json());
app.use(cors());
app.post('/webrtc', async (req, res) => {
const peerConnection = new RTCPeerConnection();
// Start ffmpeg process for streaming
const ffmpeg = spawn('ffmpeg', [
'-f', 'flv',
'-i', 'pipe:0',
'-c:v', 'libx264',
'-preset', 'veryfast',
'-maxrate', '3000k',
'-bufsize', '6000k',
'-pix_fmt', 'yuv420p',
'-g', '50',
'-f', 'flv',
'rtmp://a.rtmp.youtube.com/live2/MY_KEY'
]);
ffmpeg.on('error', (err) => {
console.error('FFmpeg error:', err);
});
ffmpeg.stderr.on('data', (data) => {
console.error('FFmpeg stderr:', data.toString());
});
ffmpeg.stdout.on('data', (data) => {
console.log('FFmpeg stdout:', data.toString());
});
// Handle incoming tracks
peerConnection.ontrack = (event) => {
console.log('Track received:', event.track.kind);
const track = event.track;
// Stream the incoming track to FFmpeg
track.onunmute = () => {
console.log('Track unmuted:', track.kind);
const reader = track.createReadStream();
reader.on('data', (chunk) => {
console.log('Forwarding chunk to FFmpeg:', chunk.length);
ffmpeg.stdin.write(chunk);
});
reader.on('end', () => {
console.log('Stream ended');
ffmpeg.stdin.end();
});
};
track.onmute = () => {
console.log('Track muted:', track.kind);
};
};
// Set the remote description (offer) received from the client
await peerConnection.setRemoteDescription(new RTCSessionDescription(req.body.sdp));
// Create an answer and send it back to the client
const answer = await peerConnection.createAnswer();
await peerConnection.setLocalDescription(answer);
res.json({ sdp: peerConnection.localDescription });
});
app.listen(3000, () => {
console.log('WebRTC to RTMP server running on port 3000');
});
This is the output I get, but nothing gets sent to YouTube:
FFmpeg stderr: ffmpeg version 7.0.2 Copyright (c) 2000-2024 the FFmpeg developers built with Apple clang version 15.0.0 (clang-1500.3.9.4) FFmpeg stderr: configuration: --prefix=/opt/homebrew/Cellar/ffmpeg/7.0.2_1 --enable-shared --enable-pthreads --enable-version3 --cc=clang --host-cflags= --host-ldflags='-Wl,-ld_classic' --enable-ffplay --enable-gnutls --enable-gpl --enable-libaom --enable-libaribb24 --enable-libbluray --enable-libdav1d --enable-libharfbuzz --enable-libjxl --enable-libmp3lame --enable-libopus --enable-librav1e --enable-librist --enable-librubberband --enable-libsnappy --enable-libsrt --enable-libssh --enable-libsvtav1 --enable-libtesseract --enable-libtheora --enable-libvidstab --enable-libvmaf --enable-libvorbis --enable-libvpx --enable-libwebp --enable-libx264 --enable-libx265 --enable-libxml2 --enable-libxvid --enable-lzma --enable-libfontconfig --enable-libfreetype --enable-frei0r --enable-libass --enable-libopencore-amrnb --enable-libopencore-amrwb --enable-libopenjpeg --enable-libspeex --enable-libsoxr --enable-libzmq --enable-libzimg --disable-libjack --disable-indev=jack --enable-videotoolbox --enable-audiotoolbox --enable-neon FFmpeg stderr: libavutil 59. 8.100 / 59. 8.100 libavcodec 61. 3.100 / 61. 3.100 libavformat 61. 1.100 / 61. 1.100 libavdevice 61. 1.100 / 61. 1.100 FFmpeg stderr: libavfilter 10. 1.100 / 10. 1.100 libswscale 8. 1.100 / 8. 1.100 libswresample 5. 1.100 / 5. 1.100 libpostproc 58. 1.100 / 58. 1.100
I do not understand what I am doing wrong. Any help would be appreciated.
Optionally Here’s the frontend code from the extension, which (to me) appears to be recording and sending the capture:
popup.js & popup.html
document.addEventListener('DOMContentLoaded', () => {
document.getElementById('openCapturePage').addEventListener('click', () => {
chrome.tabs.create({
url: chrome.runtime.getURL('capture.html')
});
});
});
<!DOCTYPE html>
<html lang="en">
<head>
<title>StreamSavvy Popup</title>
<script src="popup.js"></script>
</head>
<body>
<h2>StreamSavvy</h2>
<button id="openCapturePage">Open Capture Page</button>
</body>
</html>
capture.js & capture.html
let peerConnection;
async function startStreaming() {
try {
const stream = await navigator.mediaDevices.getDisplayMedia({
video: {
cursor: "always"
},
audio: false
});
peerConnection = new RTCPeerConnection({
iceServers: [{
urls: 'stun:stun.l.google.com:19302'
}]
});
stream.getTracks().forEach(track => peerConnection.addTrack(track, stream));
const offer = await peerConnection.createOffer();
await peerConnection.setLocalDescription(offer);
const response = await fetch('http://localhost:3000/webrtc', {
method: 'POST',
headers: {
'Content-Type': 'application/json'
},
body: JSON.stringify({
sdp: peerConnection.localDescription
})
});
const {
sdp
} = await response.json();
await peerConnection.setRemoteDescription(new RTCSessionDescription(sdp));
console.log("Streaming to server via WebRTC...");
} catch (error) {
console.error("Error starting streaming:", error.name, error.message);
}
}
async function stopStreaming() {
if (peerConnection) {
// Stop all media tracks
peerConnection.getSenders().forEach(sender => {
if (sender.track) {
sender.track.stop();
}
});
// Close the peer connection
peerConnection.close();
peerConnection = null;
console.log("Streaming stopped");
}
}
document.addEventListener('DOMContentLoaded', () => {
document.getElementById('startCapture').addEventListener('click', startStreaming);
document.getElementById('stopCapture').addEventListener('click', stopStreaming);
});
<!DOCTYPE html>
<html lang="en">
<head>
<title>StreamSavvy Capture</title>
<script src="capture.js"></script>
</head>
<body>
<h2>StreamSavvy Capture</h2>
<button id="startCapture">Start Capture</button>
<button id="stopCapture">Stop Capture</button>
</body>
</html>
background.js (service worker)
chrome.runtime.onInstalled.addListener(() => {
console.log("StreamSavvy Extension Installed");
});
chrome.runtime.onMessage.addListener((message, sender, sendResponse) => {
if (message.type === 'startStreaming') {
chrome.tabs.create({
url: chrome.runtime.getURL('capture.html')
});
sendResponse({
status: 'streaming'
});
} else if (message.type === 'stopStreaming') {
chrome.tabs.query({
url: chrome.runtime.getURL('capture.html')
}, (tabs) => {
if (tabs.length > 0) {
chrome.tabs.sendMessage(tabs[0].id, {
type: 'stopStreaming'
});
sendResponse({
status: 'stopped'
});
}
});
}
return true; // Keep the message channel open for sendResponse
});