318 lines
12 KiB
TypeScript
318 lines
12 KiB
TypeScript
import { Identity } from "spacetimedb";
|
|
import type { Peer, WebRTCStats } from "./types";
|
|
|
|
const ICE_SERVERS: RTCConfiguration = {
|
|
iceServers: [{ urls: "stun:stun.l.google.com:19302" }],
|
|
};
|
|
|
|
export class PeerManagerService {
|
|
peers = $state<Map<string, Peer>>(new Map());
|
|
peerStatuses = $state<Map<string, string>>(new Map());
|
|
peerStats = $state<Map<string, WebRTCStats>>(new Map());
|
|
|
|
peerAudioPreferences = new Map<string, { volume: number; muted: boolean }>();
|
|
audioContext: AudioContext | null = null;
|
|
|
|
identity = $state<Identity | null>(null);
|
|
mediaType = $state<"voice" | "screen">("voice");
|
|
isDeafened = $state(false);
|
|
onNegotiationNeeded: (peerIdHex: string, pc: RTCPeerConnection) => void;
|
|
onIceCandidate: (peerIdHex: string, candidate: RTCIceCandidate) => void;
|
|
|
|
constructor(
|
|
identity: Identity | null,
|
|
mediaType: "voice" | "screen",
|
|
isDeafened: boolean,
|
|
onNegotiationNeeded: (peerIdHex: string, pc: RTCPeerConnection) => void,
|
|
onIceCandidate: (peerIdHex: string, candidate: RTCIceCandidate) => void
|
|
) {
|
|
this.identity = identity;
|
|
this.mediaType = mediaType;
|
|
this.isDeafened = isDeafened;
|
|
this.onNegotiationNeeded = onNegotiationNeeded;
|
|
this.onIceCandidate = onIceCandidate;
|
|
|
|
$effect(() => {
|
|
if (this.mediaType === "voice") {
|
|
this.peers.forEach((peer, peerIdHex) => {
|
|
const pref = this.peerAudioPreferences.get(peerIdHex) || {
|
|
volume: 1,
|
|
muted: false,
|
|
};
|
|
if (peer.gainNode) {
|
|
peer.gainNode.gain.value = this.isDeafened || pref.muted ? 0 : pref.volume;
|
|
} else if (peer.audio) {
|
|
peer.audio.muted = this.isDeafened || pref.muted;
|
|
}
|
|
});
|
|
}
|
|
});
|
|
|
|
$effect(() => {
|
|
if (this.peers.size === 0) {
|
|
if (this.peerStats.size > 0) {
|
|
this.peerStats = new Map();
|
|
}
|
|
return;
|
|
}
|
|
|
|
const interval = setInterval(async () => {
|
|
const newStats = new Map(this.peerStats);
|
|
for (const [peerIdHex, peer] of this.peers.entries()) {
|
|
try {
|
|
const stats = await peer.pc.getStats();
|
|
const prevStats = this.peerStats.get(peerIdHex);
|
|
const currentStats: WebRTCStats = {
|
|
audio: { bytesReceived: 0, jitter: 0, packetsLost: 0, bitrate: 0 },
|
|
video: {
|
|
bytesReceived: 0,
|
|
frameWidth: 0,
|
|
frameHeight: 0,
|
|
framesPerSecond: 0,
|
|
bitrate: 0,
|
|
},
|
|
timestamp: Date.now(),
|
|
};
|
|
|
|
stats.forEach((report) => {
|
|
if (report.type === "inbound-rtp") {
|
|
const kind = report.kind;
|
|
if (kind === "audio" || kind === "video") {
|
|
const target = kind === "audio" ? currentStats.audio : currentStats.video;
|
|
target.bytesReceived = report.bytesReceived || 0;
|
|
if (kind === "audio") {
|
|
currentStats.audio.jitter = report.jitter || 0;
|
|
currentStats.audio.packetsLost = report.packetsLost || 0;
|
|
} else {
|
|
currentStats.video.frameWidth = report.frameWidth || 0;
|
|
currentStats.video.frameHeight = report.frameHeight || 0;
|
|
currentStats.video.framesPerSecond = report.framesPerSecond || 0;
|
|
}
|
|
|
|
if (prevStats) {
|
|
const prevTarget = kind === "audio" ? prevStats.audio : prevStats.video;
|
|
const deltaBytes = target.bytesReceived - prevTarget.bytesReceived;
|
|
const deltaTime = (currentStats.timestamp - prevStats.timestamp) / 1000;
|
|
if (deltaTime > 0) {
|
|
target.bitrate = Math.max(0, (deltaBytes * 8) / deltaTime);
|
|
}
|
|
}
|
|
}
|
|
}
|
|
});
|
|
newStats.set(peerIdHex, currentStats);
|
|
} catch (e) {
|
|
console.warn(`[WebRTC][${this.mediaType}] Failed to get stats for ${peerIdHex}`, e);
|
|
}
|
|
}
|
|
this.peerStats = newStats;
|
|
}, 2000);
|
|
|
|
return () => clearInterval(interval);
|
|
});
|
|
}
|
|
|
|
getAudioContext = () => {
|
|
if (!this.audioContext) {
|
|
this.audioContext = new (window.AudioContext || (window as any).webkitAudioContext)();
|
|
}
|
|
if (this.audioContext.state === "suspended") {
|
|
this.audioContext.resume();
|
|
}
|
|
return this.audioContext;
|
|
};
|
|
|
|
setPeerAudioPreference = (peerIdHex: string, preference: { volume?: number; muted?: boolean }) => {
|
|
const current = this.peerAudioPreferences.get(peerIdHex) || {
|
|
volume: 1,
|
|
muted: false,
|
|
};
|
|
const next = { ...current, ...preference };
|
|
this.peerAudioPreferences.set(peerIdHex, next);
|
|
|
|
const ctx = this.audioContext;
|
|
if (ctx && ctx.state === "suspended") {
|
|
ctx.resume();
|
|
}
|
|
|
|
const peer = this.peers.get(peerIdHex);
|
|
if (peer) {
|
|
if (peer.gainNode) {
|
|
peer.gainNode.gain.value = this.isDeafened || next.muted ? 0 : next.volume;
|
|
} else if (peer.audio) {
|
|
peer.audio.volume = Math.min(1, next.volume);
|
|
peer.audio.muted = this.isDeafened || next.muted;
|
|
}
|
|
}
|
|
};
|
|
|
|
closePeer = (peerIdHex: string) => {
|
|
const peer = this.peers.get(peerIdHex);
|
|
if (peer) {
|
|
console.log(`[WebRTC][${this.mediaType}] Closing peer connection for ${peerIdHex}`);
|
|
peer.pc.close();
|
|
if (peer.gainNode) {
|
|
peer.gainNode.disconnect();
|
|
}
|
|
if (peer.audio) {
|
|
peer.audio.pause();
|
|
peer.audio.srcObject = null;
|
|
}
|
|
const nextPeers = new Map(this.peers);
|
|
nextPeers.delete(peerIdHex);
|
|
this.peers = nextPeers;
|
|
|
|
const nextStatuses = new Map(this.peerStatuses);
|
|
nextStatuses.delete(peerIdHex);
|
|
this.peerStatuses = nextStatuses;
|
|
}
|
|
};
|
|
|
|
createPeerConnection = (peerIdHex: string, initialTracks: (MediaStreamTrack | null)[] = []) => {
|
|
if (this.peers.has(peerIdHex)) return this.peers.get(peerIdHex)!.pc;
|
|
|
|
if (this.identity && peerIdHex === this.identity.toHexString()) {
|
|
console.warn(`[WebRTC][${this.mediaType}] Attempted to create a PeerConnection to self (${peerIdHex}). Ignoring.`);
|
|
return null as any;
|
|
}
|
|
|
|
console.log(`[WebRTC][${this.mediaType}] Creating new PeerConnection for ${peerIdHex}`);
|
|
const pc = new RTCPeerConnection(ICE_SERVERS);
|
|
|
|
pc.onnegotiationneeded = () => {
|
|
console.log(`[WebRTC][${this.mediaType}] onnegotiationneeded fired for ${peerIdHex}`);
|
|
this.onNegotiationNeeded(peerIdHex, pc);
|
|
};
|
|
|
|
pc.onicecandidate = (event) => {
|
|
if (event.candidate) {
|
|
this.onIceCandidate(peerIdHex, event.candidate);
|
|
}
|
|
};
|
|
|
|
pc.oniceconnectionstatechange = () => {
|
|
console.log(`[WebRTC][${this.mediaType}] ICE state for ${peerIdHex}: ${pc.iceConnectionState}`);
|
|
const nextStatuses = new Map(this.peerStatuses);
|
|
nextStatuses.set(peerIdHex, `${pc.iceConnectionState}/${pc.signalingState}`);
|
|
this.peerStatuses = nextStatuses;
|
|
|
|
if (pc.iceConnectionState === "failed") {
|
|
console.log(`[WebRTC][${this.mediaType}] ICE failed for ${peerIdHex}, closing peer for retry`);
|
|
this.closePeer(peerIdHex);
|
|
}
|
|
};
|
|
|
|
pc.onsignalingstatechange = () => {
|
|
console.log(`[WebRTC][${this.mediaType}] Signaling state for ${peerIdHex}: ${pc.signalingState}`);
|
|
const nextStatuses = new Map(this.peerStatuses);
|
|
nextStatuses.set(peerIdHex, `${pc.iceConnectionState}/${pc.signalingState}`);
|
|
this.peerStatuses = nextStatuses;
|
|
};
|
|
|
|
pc.onconnectionstatechange = () => {
|
|
console.log(`[WebRTC][${this.mediaType}] Connection state for ${peerIdHex}: ${pc.connectionState}`);
|
|
if (pc.connectionState === "failed" || pc.connectionState === "closed") {
|
|
console.log(`[WebRTC][${this.mediaType}] Connection ${pc.connectionState} for ${peerIdHex}, cleaning up`);
|
|
this.closePeer(peerIdHex);
|
|
}
|
|
};
|
|
|
|
pc.ontrack = (event) => {
|
|
console.log(`[WebRTC][${this.mediaType}] Received track from ${peerIdHex}: ${event.track.kind} (id: ${event.track.id})`);
|
|
const nextPeers = new Map(this.peers);
|
|
const existingPeer = { ...(nextPeers.get(peerIdHex) || { pc }) };
|
|
|
|
if (event.track.kind === "audio") {
|
|
if (this.mediaType === "voice") {
|
|
const pref = this.peerAudioPreferences.get(peerIdHex) || {
|
|
volume: 1,
|
|
muted: false,
|
|
};
|
|
|
|
try {
|
|
const ctx = this.getAudioContext();
|
|
const stream = new MediaStream([event.track]);
|
|
|
|
if (!existingPeer.audio) {
|
|
existingPeer.audio = new Audio();
|
|
existingPeer.audio.muted = true;
|
|
}
|
|
existingPeer.audio.srcObject = stream;
|
|
existingPeer.audio.play().catch((err) => {
|
|
if (err.name !== "AbortError")
|
|
console.warn(`[WebRTC][voice] Dummy audio play failed for ${peerIdHex}`, err);
|
|
});
|
|
|
|
const source = ctx.createMediaStreamSource(stream);
|
|
const gainNode = ctx.createGain();
|
|
|
|
gainNode.gain.value = this.isDeafened || pref.muted ? 0 : pref.volume;
|
|
source.connect(gainNode);
|
|
gainNode.connect(ctx.destination);
|
|
|
|
existingPeer.gainNode = gainNode;
|
|
console.log(`[WebRTC][voice] Web Audio graph connected for ${peerIdHex} (volume: ${pref.volume})`);
|
|
} catch (e) {
|
|
console.error(`[WebRTC][voice] Failed to setup Web Audio for ${peerIdHex}, falling back to HTMLAudioElement`, e);
|
|
if (!existingPeer.audio) {
|
|
existingPeer.audio = new Audio();
|
|
existingPeer.audio.autoplay = true;
|
|
|
|
const pref = this.peerAudioPreferences.get(peerIdHex) || {
|
|
volume: 1,
|
|
muted: false,
|
|
};
|
|
existingPeer.audio.volume = Math.min(1, pref.volume);
|
|
existingPeer.audio.muted = this.isDeafened || pref.muted;
|
|
}
|
|
|
|
const stream = new MediaStream([event.track]);
|
|
existingPeer.audio.srcObject = stream;
|
|
existingPeer.audio.play().catch((err) => {
|
|
if (err.name !== "AbortError")
|
|
console.error(`[WebRTC][voice] Error playing audio for ${peerIdHex}`, err);
|
|
});
|
|
}
|
|
} else {
|
|
const currentStream = existingPeer.videoStream || new MediaStream();
|
|
if (!currentStream.getTracks().find((t) => t.id === event.track.id)) {
|
|
currentStream.addTrack(event.track);
|
|
}
|
|
existingPeer.videoStream = new MediaStream(currentStream.getTracks());
|
|
}
|
|
} else if (event.track.kind === "video") {
|
|
const currentStream = existingPeer.videoStream || new MediaStream();
|
|
if (!currentStream.getTracks().find((t) => t.id === event.track.id)) {
|
|
currentStream.addTrack(event.track);
|
|
}
|
|
existingPeer.videoStream = new MediaStream(currentStream.getTracks());
|
|
}
|
|
|
|
nextPeers.set(peerIdHex, existingPeer);
|
|
this.peers = nextPeers;
|
|
};
|
|
|
|
if (this.mediaType === "voice") {
|
|
pc.addTransceiver("audio", { direction: "sendrecv" });
|
|
} else {
|
|
pc.addTransceiver("video", { direction: "sendrecv" });
|
|
pc.addTransceiver("audio", { direction: "sendrecv" });
|
|
}
|
|
|
|
const transceivers = pc.getTransceivers();
|
|
initialTracks.forEach((track, i) => {
|
|
if (track && transceivers[i]) {
|
|
console.log(`[WebRTC][${this.mediaType}] Attaching initial track ${i} to ${peerIdHex}`);
|
|
transceivers[i].sender.replaceTrack(track);
|
|
}
|
|
});
|
|
|
|
const nextPeers = new Map(this.peers);
|
|
nextPeers.set(peerIdHex, { pc });
|
|
this.peers = nextPeers;
|
|
return pc;
|
|
};
|
|
|
|
getPeer = (peerIdHex: string) => this.peers.get(peerIdHex);
|
|
}
|