code tidy up
This commit is contained in:
parent
b69b0dbcec
commit
3610c37585
113
src/server.ts
113
src/server.ts
@ -1,56 +1,26 @@
|
||||
// @ts-ignore
|
||||
import { nonstandard, RTCPeerConnection, MediaStreamTrack, MediaStream, RTCSessionDescription } from '@roamhq/wrtc';
|
||||
import { spawn, ChildProcessWithoutNullStreams } from 'child_process';
|
||||
import { Readable } from 'stream';
|
||||
import * as ws from 'ws'
|
||||
import * as fs from 'fs';
|
||||
import { i420ToRgba, RTCVideoSource } from '@roamhq/wrtc/types/nonstandard';
|
||||
// import { RTCVideoSource } from '@roamhq/wrtc/types/nonstandard';
|
||||
|
||||
|
||||
|
||||
// type RTCVideoSource = nonstandard.RTCVideoSource;
|
||||
// const RTCVideoSource = wrtc.nonstandard.RTCVideoSource;
|
||||
// const {mediaDevices} = wrtc
|
||||
import { MediaStream, MediaStreamTrack, nonstandard, RTCPeerConnection } from '@roamhq/wrtc';
|
||||
import { ChildProcessWithoutNullStreams, spawn } from 'child_process';
|
||||
import * as ws from 'ws';
|
||||
|
||||
// Constants
|
||||
// const VIDEO_DEVICE = '/dev/video0'; // Video source device
|
||||
const VIDEO_DEVICE = '/dev/dvb/adapter0/dvr0'; // Video source device
|
||||
const WIDTH = 640; // Video width
|
||||
const HEIGHT = 480; // Video height
|
||||
const FRAME_SIZE = WIDTH * HEIGHT * 1.5; // YUV420p frame size (460800 bytes)
|
||||
|
||||
class VideoStream extends Readable {
|
||||
private device: fs.ReadStream;
|
||||
|
||||
constructor(devicePath: string) {
|
||||
super();
|
||||
this.device = fs.createReadStream(devicePath);
|
||||
}
|
||||
|
||||
_read(size: number): void {
|
||||
const chunk = this.device.read(size);
|
||||
if (chunk === null) {
|
||||
this.push(null); // Signal end of stream
|
||||
} else {
|
||||
this.push(chunk);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// Function to start FFmpeg and capture raw video
|
||||
function startFFmpeg(): ChildProcessWithoutNullStreams {
|
||||
const startFFmpeg = (): ChildProcessWithoutNullStreams => {
|
||||
const p = spawn('ffmpeg', [
|
||||
'-loglevel', 'debug',
|
||||
'-i', VIDEO_DEVICE, // Input device
|
||||
'-i', VIDEO_DEVICE,
|
||||
|
||||
// Video
|
||||
'-map', '0:v:0',
|
||||
'-vf', `scale=${WIDTH}:${HEIGHT}`, // Scale video resolution
|
||||
'-vcodec', 'rawvideo', // Output raw video codec
|
||||
'-pix_fmt', 'yuv420p', // Pixel format for WebRTC
|
||||
'-f', 'rawvideo', // Output format
|
||||
'pipe:3', // Pipe to stdout
|
||||
'-vf', `scale=${WIDTH}:${HEIGHT}`,
|
||||
'-vcodec', 'rawvideo',
|
||||
'-pix_fmt', 'yuv420p',
|
||||
'-f', 'rawvideo',
|
||||
'pipe:3',
|
||||
|
||||
// Audio
|
||||
'-map', '0:a:0',
|
||||
@ -62,12 +32,10 @@ function startFFmpeg(): ChildProcessWithoutNullStreams {
|
||||
|
||||
], {
|
||||
stdio: ['ignore', 'pipe', 'pipe', 'pipe', 'pipe']
|
||||
// detached: true
|
||||
});
|
||||
process.on('SIGINT', () => {
|
||||
console.log('🔻 Server shutting down... KILLING');
|
||||
let b = p.kill('SIGINT');
|
||||
// let b = process.kill(p.pid)
|
||||
console.log('🔻 Server shutting down...');
|
||||
p.kill('SIGINT');
|
||||
process.exit(0);
|
||||
});
|
||||
|
||||
@ -78,49 +46,33 @@ function startFFmpeg(): ChildProcessWithoutNullStreams {
|
||||
});
|
||||
process.on('exit', () => {
|
||||
p.kill('SIGHUP'); //this one
|
||||
let b = p.kill('SIGTERM');
|
||||
console.log("b ", b)
|
||||
p.kill('SIGTERM');
|
||||
});
|
||||
|
||||
return p;
|
||||
}
|
||||
|
||||
// const videoSource =
|
||||
|
||||
|
||||
|
||||
|
||||
let frameBuffer = Buffer.alloc(0);
|
||||
const ffmpegProcess = startFFmpeg();
|
||||
|
||||
|
||||
const videoSource = new nonstandard.RTCVideoSource();
|
||||
const audioSource = new nonstandard.RTCAudioSource();
|
||||
// Function to create a WebRTC PeerConnection
|
||||
async function createPeerConnection(): Promise<RTCPeerConnection> {
|
||||
const createPeerConnection = async (): Promise<RTCPeerConnection> => {
|
||||
|
||||
const peerConnection = new RTCPeerConnection({ iceServers: [] });
|
||||
|
||||
const videoStream = ffmpegProcess.stdio[3]; // pipe:3
|
||||
const audioStream = ffmpegProcess.stdio[4]; // pipe:4
|
||||
|
||||
// Start FFmpeg and pipe video frames to the source
|
||||
videoStream.on('data', (chunk: Buffer) => {
|
||||
// Push video frames to the RTCVideoSource
|
||||
|
||||
frameBuffer = Buffer.concat([frameBuffer, chunk]);
|
||||
while (frameBuffer.length >= FRAME_SIZE) {
|
||||
|
||||
const frameData = frameBuffer.slice(0, FRAME_SIZE);
|
||||
frameBuffer = frameBuffer.slice(FRAME_SIZE); // Keep remaining data
|
||||
|
||||
|
||||
frameBuffer = frameBuffer.slice(FRAME_SIZE);
|
||||
const frame: nonstandard.RTCVideoFrame = {
|
||||
width: WIDTH,
|
||||
height: HEIGHT,
|
||||
data: new Uint8Array(frameData),
|
||||
}
|
||||
|
||||
videoSource.onFrame(frame);
|
||||
}
|
||||
});
|
||||
@ -129,8 +81,6 @@ async function createPeerConnection(): Promise<RTCPeerConnection> {
|
||||
// console.error('FFmpeg Error:', data.toString());
|
||||
});
|
||||
|
||||
|
||||
|
||||
videoStream.on('exit', (code) => {
|
||||
console.log(`FFmpeg exited with code ${code}`);
|
||||
});
|
||||
@ -143,16 +93,13 @@ async function createPeerConnection(): Promise<RTCPeerConnection> {
|
||||
audioBuffer = Buffer.concat([audioBuffer, chunk]);
|
||||
while (audioBuffer.length >= AUDIO_FRAME_SIZE) {
|
||||
const frameData = audioBuffer.slice(0, AUDIO_FRAME_SIZE);
|
||||
// const sampleBuffer = Buffer.from(frameData.buffer.; // makes an isolated buffer
|
||||
audioBuffer = audioBuffer.slice(AUDIO_FRAME_SIZE);
|
||||
|
||||
const samples = new Int16Array(480);
|
||||
for (let i = 0; i < 480; i++) {
|
||||
samples[i] = frameData.readInt16LE(i * 2);
|
||||
}
|
||||
|
||||
audioSource.onData({
|
||||
samples: samples,
|
||||
samples,
|
||||
sampleRate: 48000,
|
||||
bitsPerSample: 16,
|
||||
channelCount: 1,
|
||||
@ -165,26 +112,18 @@ async function createPeerConnection(): Promise<RTCPeerConnection> {
|
||||
// console.error('FFmpeg Error:', data.toString());
|
||||
});
|
||||
|
||||
|
||||
|
||||
audioStream.on('exit', (code) => {
|
||||
console.log(`FFmpeg exited with code ${code}`);
|
||||
});
|
||||
|
||||
|
||||
|
||||
// Add the track to the PeerConnection
|
||||
const track: MediaStreamTrack = videoSource.createTrack();
|
||||
const track1 = audioSource.createTrack();
|
||||
console.log('vdei src ', videoSource.isScreencast)
|
||||
const videoTrack: MediaStreamTrack = videoSource.createTrack();
|
||||
const audioTrack: MediaStreamTrack = audioSource.createTrack();
|
||||
const stream = new MediaStream()
|
||||
stream.addTrack(track)
|
||||
stream.addTrack(track1);
|
||||
console.log('enabled ', track.enabled, track.id, track.kind, track.label, track.readyState);
|
||||
// track.
|
||||
console.log('get', stream.getVideoTracks()[0].id)
|
||||
peerConnection.addTrack(track, stream);
|
||||
peerConnection.addTrack(track1, stream);
|
||||
stream.addTrack(videoTrack)
|
||||
stream.addTrack(audioTrack);
|
||||
peerConnection.addTrack(videoTrack, stream);
|
||||
peerConnection.addTrack(audioTrack, stream);
|
||||
return peerConnection;
|
||||
}
|
||||
|
||||
@ -195,12 +134,9 @@ const wss = new ws.WebSocketServer({ port: 8080 });
|
||||
|
||||
wss.on('connection', async (ws: ws.WebSocket) => {
|
||||
const peerConnection: RTCPeerConnection = await createPeerConnection();
|
||||
// const source = new RTCVideoSource();
|
||||
|
||||
console.log('Client connected');
|
||||
ws.on('message', async (message: Buffer) => {
|
||||
const { type, data } = JSON.parse(message.toString());
|
||||
console.log("message type", type)
|
||||
|
||||
if (type == 'offer') {
|
||||
await peerConnection.setRemoteDescription(data);
|
||||
@ -210,14 +146,12 @@ wss.on('connection', async (ws: ws.WebSocket) => {
|
||||
}
|
||||
|
||||
if (type === 'ice-candidate') {
|
||||
console.log('type ice')
|
||||
await peerConnection.addIceCandidate(data);
|
||||
}
|
||||
|
||||
});
|
||||
|
||||
peerConnection.oniceconnectionstatechange = () => {
|
||||
console.log('ICE connection state:', peerConnection.iceConnectionState);
|
||||
if (peerConnection.iceConnectionState === 'failed') {
|
||||
console.error('ICE connection failed');
|
||||
}
|
||||
@ -226,7 +160,6 @@ wss.on('connection', async (ws: ws.WebSocket) => {
|
||||
|
||||
// Send ICE candidates to the client
|
||||
peerConnection.onicecandidate = ({ candidate }) => {
|
||||
console.log("onicecandidate")
|
||||
if (candidate) {
|
||||
ws.send(JSON.stringify({ type: 'ice-candidate', data: candidate }));
|
||||
}
|
||||
|
@ -28,11 +28,9 @@
|
||||
pc.ontrack = (event) => {
|
||||
console.log("Received track event", event.streams);
|
||||
video.srcObject = event.streams[0];
|
||||
// video.muted = false;
|
||||
};
|
||||
|
||||
pc.onicecandidate = ({ candidate }) => {
|
||||
// console.log("pc.onicecandidate")
|
||||
if (candidate) {
|
||||
ws.send(JSON.stringify({ type: 'ice-candidate', data: candidate })); // Use 'candidate' instead of 'ice-candidate'
|
||||
}
|
||||
@ -46,15 +44,11 @@
|
||||
pc.addTransceiver('audio', { direction: 'recvonly' })
|
||||
const offer = await pc.createOffer();
|
||||
await pc.setLocalDescription(offer);
|
||||
// ws.send(JSON.stringify(offer));
|
||||
console.log("on open ")
|
||||
// ws.send(JSON.stringify(offer));
|
||||
ws.send(JSON.stringify({ type: 'offer', data: offer }));
|
||||
}
|
||||
|
||||
ws.onmessage = async (message) => {
|
||||
const msg = JSON.parse(message.data);
|
||||
console.log("onmessage type:", msg.type, msg)
|
||||
|
||||
if (msg.type === 'answer') {
|
||||
await pc.setRemoteDescription(msg.data);
|
||||
|
Loading…
Reference in New Issue
Block a user