audio support

This commit is contained in:
david 2025-03-31 16:40:05 -07:00
parent 795fd0cd38
commit b69b0dbcec
2 changed files with 83 additions and 42 deletions

View File

@ -44,14 +44,25 @@ function startFFmpeg(): ChildProcessWithoutNullStreams {
const p = spawn('ffmpeg', [ const p = spawn('ffmpeg', [
'-loglevel', 'debug', '-loglevel', 'debug',
'-i', VIDEO_DEVICE, // Input device '-i', VIDEO_DEVICE, // Input device
'-map', '0:v:0',
'-vf', `scale=${WIDTH}:${HEIGHT}`, // Scale video resolution '-vf', `scale=${WIDTH}:${HEIGHT}`, // Scale video resolution
'-vcodec', 'rawvideo', // Output raw video codec '-vcodec', 'rawvideo', // Output raw video codec
'-pix_fmt', 'yuv420p', // Pixel format for WebRTC '-pix_fmt', 'yuv420p', // Pixel format for WebRTC
'-f', 'rawvideo', // Output format '-f', 'rawvideo', // Output format
'pipe:1' // Pipe to stdout 'pipe:3', // Pipe to stdout
// Audio
'-map', '0:a:0',
'-acodec', 'pcm_s16le',
'-ac', '1',
'-ar', '48000',
'-f', 's16le',
'pipe:4'
], { ], {
// stdio: ['ignore', 'pipe', 'pipe'], stdio: ['ignore', 'pipe', 'pipe', 'pipe', 'pipe']
// detached: true // detached: true
}); });
process.on('SIGINT', () => { process.on('SIGINT', () => {
console.log('🔻 Server shutting down... KILLING'); console.log('🔻 Server shutting down... KILLING');
@ -68,7 +79,7 @@ function startFFmpeg(): ChildProcessWithoutNullStreams {
process.on('exit', () => { process.on('exit', () => {
p.kill('SIGHUP'); //this one p.kill('SIGHUP'); //this one
let b = p.kill('SIGTERM'); let b = p.kill('SIGTERM');
console.log("b ",b) console.log("b ", b)
}); });
return p; return p;
@ -84,23 +95,17 @@ const ffmpegProcess = startFFmpeg();
const videoSource = new nonstandard.RTCVideoSource(); const videoSource = new nonstandard.RTCVideoSource();
const audioSource = new nonstandard.RTCAudioSource();
// Function to create a WebRTC PeerConnection // Function to create a WebRTC PeerConnection
async function createPeerConnection(): Promise<RTCPeerConnection> { async function createPeerConnection(): Promise<RTCPeerConnection> {
const peerConnection = new RTCPeerConnection({iceServers: []} ); const peerConnection = new RTCPeerConnection({ iceServers: [] });
// Create a video source
// const videoStream = new VideoStream('/dev/video0');
// track.addEventListener('')
const videoStream = ffmpegProcess.stdio[3]; // pipe:3
const audioStream = ffmpegProcess.stdio[4]; // pipe:4
// Start FFmpeg and pipe video frames to the source // Start FFmpeg and pipe video frames to the source
videoStream.on('data', (chunk: Buffer) => {
ffmpegProcess.stdout.on('data', (chunk: Buffer) => {
// Push video frames to the RTCVideoSource // Push video frames to the RTCVideoSource
frameBuffer = Buffer.concat([frameBuffer, chunk]); frameBuffer = Buffer.concat([frameBuffer, chunk]);
@ -120,13 +125,49 @@ async function createPeerConnection(): Promise<RTCPeerConnection> {
} }
}); });
ffmpegProcess.stderr.on('data', (data: Buffer) => { videoStream.on('data', (data: Buffer) => {
// console.error('FFmpeg Error:', data.toString()); // console.error('FFmpeg Error:', data.toString());
}); });
ffmpegProcess.on('exit', (code) => { videoStream.on('exit', (code) => {
console.log(`FFmpeg exited with code ${code}`);
});
// --- AUDIO handling ---
const AUDIO_FRAME_SIZE = 480 * 2; // 480 samples * 2 bytes (s16le)
let audioBuffer = Buffer.alloc(0);
audioStream.on('data', (chunk: Buffer) => {
audioBuffer = Buffer.concat([audioBuffer, chunk]);
while (audioBuffer.length >= AUDIO_FRAME_SIZE) {
const frameData = audioBuffer.slice(0, AUDIO_FRAME_SIZE);
// const sampleBuffer = Buffer.from(frameData.buffer.; // makes an isolated buffer
audioBuffer = audioBuffer.slice(AUDIO_FRAME_SIZE);
const samples = new Int16Array(480);
for (let i = 0; i < 480; i++) {
samples[i] = frameData.readInt16LE(i * 2);
}
audioSource.onData({
samples: samples,
sampleRate: 48000,
bitsPerSample: 16,
channelCount: 1,
numberOfFrames: 480
});
}
});
audioStream.on('data', (data: Buffer) => {
// console.error('FFmpeg Error:', data.toString());
});
audioStream.on('exit', (code) => {
console.log(`FFmpeg exited with code ${code}`); console.log(`FFmpeg exited with code ${code}`);
}); });
@ -134,17 +175,16 @@ async function createPeerConnection(): Promise<RTCPeerConnection> {
// Add the track to the PeerConnection // Add the track to the PeerConnection
const track: MediaStreamTrack = videoSource.createTrack(); const track: MediaStreamTrack = videoSource.createTrack();
console.log('vdei src ',videoSource.isScreencast) const track1 = audioSource.createTrack();
console.log('vdei src ', videoSource.isScreencast)
const stream = new MediaStream() const stream = new MediaStream()
stream.addTrack(track) stream.addTrack(track)
console.log('enabled ',track.enabled, track.id, track.kind, track.label, track.readyState); stream.addTrack(track1);
console.log('enabled ', track.enabled, track.id, track.kind, track.label, track.readyState);
// track. // track.
console.log('get',stream.getVideoTracks()[0].id) console.log('get', stream.getVideoTracks()[0].id)
peerConnection.addTrack(track, stream) peerConnection.addTrack(track, stream);
// peerConnection.addTransceiver(track, { direction: 'sendonly' }); // peerConnection.add peerConnection.addTrack(track1, stream);
// peerConnection.addIceCandidate();
// peerConnection
// console.log('Stream with track:', s.track.);
return peerConnection; return peerConnection;
} }
@ -159,10 +199,10 @@ wss.on('connection', async (ws: ws.WebSocket) => {
console.log('Client connected'); console.log('Client connected');
ws.on('message', async (message: Buffer) => { ws.on('message', async (message: Buffer) => {
const { type, data} = JSON.parse(message.toString()); const { type, data } = JSON.parse(message.toString());
console.log("message type", type) console.log("message type", type)
if(type == 'offer') { if (type == 'offer') {
await peerConnection.setRemoteDescription(data); await peerConnection.setRemoteDescription(data);
const answer = await peerConnection.createAnswer(); const answer = await peerConnection.createAnswer();
await peerConnection.setLocalDescription(answer); await peerConnection.setLocalDescription(answer);

View File

@ -28,16 +28,17 @@
pc.ontrack = (event) => { pc.ontrack = (event) => {
console.log("Received track event", event.streams); console.log("Received track event", event.streams);
video.srcObject = event.streams[0]; video.srcObject = event.streams[0];
// video.muted = false;
}; };
pc.onicecandidate = ({ candidate }) => { pc.onicecandidate = ({ candidate }) => {
console.log("pc.onicecandidate") // console.log("pc.onicecandidate")
if (candidate) { if (candidate) {
ws.send(JSON.stringify({ type: 'ice-candidate', data: candidate })); // Use 'candidate' instead of 'ice-candidate' ws.send(JSON.stringify({ type: 'ice-candidate', data: candidate })); // Use 'candidate' instead of 'ice-candidate'
} }
}; };
pc.onicegatheringstatechange = () => { pc.onicegatheringstatechange = () => {
console.log('ICE state:', pc.iceGatheringState); // console.log('ICE state:', pc.iceGatheringState);
}; };
ws.onopen = async () => { ws.onopen = async () => {