audio support

This commit is contained in:
david 2025-03-31 16:40:05 -07:00
parent 795fd0cd38
commit b69b0dbcec
2 changed files with 83 additions and 42 deletions

View File

@ -44,13 +44,24 @@ function startFFmpeg(): ChildProcessWithoutNullStreams {
const p = spawn('ffmpeg', [
'-loglevel', 'debug',
'-i', VIDEO_DEVICE, // Input device
'-map', '0:v:0',
'-vf', `scale=${WIDTH}:${HEIGHT}`, // Scale video resolution
'-vcodec', 'rawvideo', // Output raw video codec
'-pix_fmt', 'yuv420p', // Pixel format for WebRTC
'-f', 'rawvideo', // Output format
'pipe:1' // Pipe to stdout
'pipe:3', // Pipe to stdout
// Audio
'-map', '0:a:0',
'-acodec', 'pcm_s16le',
'-ac', '1',
'-ar', '48000',
'-f', 's16le',
'pipe:4'
], {
// stdio: ['ignore', 'pipe', 'pipe'],
stdio: ['ignore', 'pipe', 'pipe', 'pipe', 'pipe']
// detached: true
});
process.on('SIGINT', () => {
@ -84,23 +95,17 @@ const ffmpegProcess = startFFmpeg();
const videoSource = new nonstandard.RTCVideoSource();
const audioSource = new nonstandard.RTCAudioSource();
// Function to create a WebRTC PeerConnection
async function createPeerConnection(): Promise<RTCPeerConnection> {
const peerConnection = new RTCPeerConnection({ iceServers: [] });
// Create a video source
// const videoStream = new VideoStream('/dev/video0');
// track.addEventListener('')
const videoStream = ffmpegProcess.stdio[3]; // pipe:3
const audioStream = ffmpegProcess.stdio[4]; // pipe:4
// Start FFmpeg and pipe video frames to the source
ffmpegProcess.stdout.on('data', (chunk: Buffer) => {
videoStream.on('data', (chunk: Buffer) => {
// Push video frames to the RTCVideoSource
frameBuffer = Buffer.concat([frameBuffer, chunk]);
@ -120,13 +125,49 @@ async function createPeerConnection(): Promise<RTCPeerConnection> {
}
});
ffmpegProcess.stderr.on('data', (data: Buffer) => {
videoStream.on('data', (data: Buffer) => {
// console.error('FFmpeg Error:', data.toString());
});
ffmpegProcess.on('exit', (code) => {
videoStream.on('exit', (code) => {
console.log(`FFmpeg exited with code ${code}`);
});
// --- AUDIO handling ---
const AUDIO_FRAME_SIZE = 480 * 2; // 480 samples * 2 bytes (s16le)
let audioBuffer = Buffer.alloc(0);
audioStream.on('data', (chunk: Buffer) => {
audioBuffer = Buffer.concat([audioBuffer, chunk]);
while (audioBuffer.length >= AUDIO_FRAME_SIZE) {
const frameData = audioBuffer.slice(0, AUDIO_FRAME_SIZE);
// const sampleBuffer = Buffer.from(frameData.buffer.; // makes an isolated buffer
audioBuffer = audioBuffer.slice(AUDIO_FRAME_SIZE);
const samples = new Int16Array(480);
for (let i = 0; i < 480; i++) {
samples[i] = frameData.readInt16LE(i * 2);
}
audioSource.onData({
samples: samples,
sampleRate: 48000,
bitsPerSample: 16,
channelCount: 1,
numberOfFrames: 480
});
}
});
audioStream.on('data', (data: Buffer) => {
// console.error('FFmpeg Error:', data.toString());
});
audioStream.on('exit', (code) => {
console.log(`FFmpeg exited with code ${code}`);
});
@ -134,17 +175,16 @@ async function createPeerConnection(): Promise<RTCPeerConnection> {
// Add the track to the PeerConnection
const track: MediaStreamTrack = videoSource.createTrack();
const track1 = audioSource.createTrack();
console.log('vdei src ', videoSource.isScreencast)
const stream = new MediaStream()
stream.addTrack(track)
stream.addTrack(track1);
console.log('enabled ', track.enabled, track.id, track.kind, track.label, track.readyState);
// track.
console.log('get', stream.getVideoTracks()[0].id)
peerConnection.addTrack(track, stream)
// peerConnection.addTransceiver(track, { direction: 'sendonly' }); // peerConnection.add
// peerConnection.addIceCandidate();
// peerConnection
// console.log('Stream with track:', s.track.);
peerConnection.addTrack(track, stream);
peerConnection.addTrack(track1, stream);
return peerConnection;
}

View File

@ -28,16 +28,17 @@
pc.ontrack = (event) => {
console.log("Received track event", event.streams);
video.srcObject = event.streams[0];
// video.muted = false;
};
pc.onicecandidate = ({ candidate }) => {
console.log("pc.onicecandidate")
// console.log("pc.onicecandidate")
if (candidate) {
ws.send(JSON.stringify({ type: 'ice-candidate', data: candidate })); // Use 'candidate' instead of 'ice-candidate'
}
};
pc.onicegatheringstatechange = () => {
console.log('ICE state:', pc.iceGatheringState);
// console.log('ICE state:', pc.iceGatheringState);
};
ws.onopen = async () => {