feat: 添加双人对练语音通话功能
Some checks failed
continuous-integration/drone/push Build is failing

- 后端:扩展 SSE 支持 WebRTC 信令消息转发
- 前端:创建 WebRTC 连接管理模块 (webrtc.ts)
- 前端:创建 useVoiceCall 组合式函数
- 前端:在对练房间添加语音通话 UI
- 集成 Web Speech API 实现语音转文字
This commit is contained in:
yuliang_guo
2026-01-28 15:45:47 +08:00
parent c27ad55e95
commit c5d460b413
6 changed files with 1254 additions and 19 deletions

View File

@@ -0,0 +1,324 @@
/**
* WebRTC 连接管理模块
*
* 功能:
* - 管理 RTCPeerConnection 生命周期
* - 处理 SDP 交换
* - 处理 ICE 候选收集
* - 音频流管理
*/
export type ConnectionState = 'idle' | 'connecting' | 'connected' | 'disconnected' | 'failed'
export interface WebRTCConfig {
iceServers?: RTCIceServer[]
onLocalStream?: (stream: MediaStream) => void
onRemoteStream?: (stream: MediaStream) => void
onConnectionStateChange?: (state: ConnectionState) => void
onIceCandidate?: (candidate: RTCIceCandidate) => void
onError?: (error: Error) => void
}
// 默认 ICE 服务器配置
const DEFAULT_ICE_SERVERS: RTCIceServer[] = [
{ urls: 'stun:stun.l.google.com:19302' },
{ urls: 'stun:stun1.l.google.com:19302' },
{ urls: 'stun:stun2.l.google.com:19302' }
]
export class WebRTCManager {
private peerConnection: RTCPeerConnection | null = null
private localStream: MediaStream | null = null
private remoteStream: MediaStream | null = null
private config: WebRTCConfig
private connectionState: ConnectionState = 'idle'
private pendingIceCandidates: RTCIceCandidate[] = []
constructor(config: WebRTCConfig = {}) {
this.config = {
iceServers: DEFAULT_ICE_SERVERS,
...config
}
}
/**
* 获取当前连接状态
*/
getConnectionState(): ConnectionState {
return this.connectionState
}
/**
* 获取本地音频流
*/
getLocalStream(): MediaStream | null {
return this.localStream
}
/**
* 获取远程音频流
*/
getRemoteStream(): MediaStream | null {
return this.remoteStream
}
/**
* 初始化本地音频流
*/
async initLocalStream(): Promise<MediaStream> {
try {
this.localStream = await navigator.mediaDevices.getUserMedia({
audio: {
echoCancellation: true,
noiseSuppression: true,
autoGainControl: true
},
video: false
})
this.config.onLocalStream?.(this.localStream)
return this.localStream
} catch (error) {
const err = error instanceof Error ? error : new Error('获取麦克风权限失败')
this.config.onError?.(err)
throw err
}
}
/**
* 创建 PeerConnection
*/
private createPeerConnection(): RTCPeerConnection {
const pc = new RTCPeerConnection({
iceServers: this.config.iceServers
})
// 监听 ICE 候选
pc.onicecandidate = (event) => {
if (event.candidate) {
console.log('[WebRTC] ICE candidate:', event.candidate.candidate?.substring(0, 50))
this.config.onIceCandidate?.(event.candidate)
}
}
// 监听连接状态变化
pc.onconnectionstatechange = () => {
console.log('[WebRTC] Connection state:', pc.connectionState)
this.updateConnectionState(pc.connectionState)
}
// 监听 ICE 连接状态
pc.oniceconnectionstatechange = () => {
console.log('[WebRTC] ICE connection state:', pc.iceConnectionState)
if (pc.iceConnectionState === 'failed') {
this.updateConnectionState('failed')
}
}
// 监听远程流
pc.ontrack = (event) => {
console.log('[WebRTC] Remote track received')
if (event.streams && event.streams[0]) {
this.remoteStream = event.streams[0]
this.config.onRemoteStream?.(this.remoteStream)
}
}
return pc
}
/**
* 更新连接状态
*/
private updateConnectionState(state: RTCPeerConnectionState | string) {
const stateMap: Record<string, ConnectionState> = {
'new': 'connecting',
'connecting': 'connecting',
'connected': 'connected',
'disconnected': 'disconnected',
'failed': 'failed',
'closed': 'disconnected'
}
this.connectionState = stateMap[state] || 'idle'
this.config.onConnectionStateChange?.(this.connectionState)
}
/**
* 创建 Offer发起方调用
*/
async createOffer(): Promise<RTCSessionDescriptionInit> {
if (!this.localStream) {
await this.initLocalStream()
}
this.peerConnection = this.createPeerConnection()
this.updateConnectionState('connecting')
// 添加本地音频轨道
this.localStream!.getTracks().forEach(track => {
this.peerConnection!.addTrack(track, this.localStream!)
})
// 创建 Offer
const offer = await this.peerConnection.createOffer()
await this.peerConnection.setLocalDescription(offer)
console.log('[WebRTC] Offer created')
return offer
}
/**
* 处理 Offer接收方调用
*/
async handleOffer(offer: RTCSessionDescriptionInit): Promise<RTCSessionDescriptionInit> {
if (!this.localStream) {
await this.initLocalStream()
}
this.peerConnection = this.createPeerConnection()
this.updateConnectionState('connecting')
// 添加本地音频轨道
this.localStream!.getTracks().forEach(track => {
this.peerConnection!.addTrack(track, this.localStream!)
})
// 设置远程描述
await this.peerConnection.setRemoteDescription(new RTCSessionDescription(offer))
// 处理等待中的 ICE 候选
for (const candidate of this.pendingIceCandidates) {
await this.peerConnection.addIceCandidate(candidate)
}
this.pendingIceCandidates = []
// 创建 Answer
const answer = await this.peerConnection.createAnswer()
await this.peerConnection.setLocalDescription(answer)
console.log('[WebRTC] Answer created')
return answer
}
/**
* 处理 Answer发起方调用
*/
async handleAnswer(answer: RTCSessionDescriptionInit): Promise<void> {
if (!this.peerConnection) {
throw new Error('PeerConnection not initialized')
}
await this.peerConnection.setRemoteDescription(new RTCSessionDescription(answer))
// 处理等待中的 ICE 候选
for (const candidate of this.pendingIceCandidates) {
await this.peerConnection.addIceCandidate(candidate)
}
this.pendingIceCandidates = []
console.log('[WebRTC] Answer handled')
}
/**
* 添加 ICE 候选
*/
async addIceCandidate(candidate: RTCIceCandidateInit): Promise<void> {
const iceCandidate = new RTCIceCandidate(candidate)
if (this.peerConnection && this.peerConnection.remoteDescription) {
await this.peerConnection.addIceCandidate(iceCandidate)
console.log('[WebRTC] ICE candidate added')
} else {
// 如果远程描述还没设置,先缓存候选
this.pendingIceCandidates.push(iceCandidate)
console.log('[WebRTC] ICE candidate queued')
}
}
/**
* 静音/取消静音本地音频
*/
setMuted(muted: boolean): void {
if (this.localStream) {
this.localStream.getAudioTracks().forEach(track => {
track.enabled = !muted
})
}
}
/**
* 检查是否静音
*/
isMuted(): boolean {
if (this.localStream) {
const audioTrack = this.localStream.getAudioTracks()[0]
return audioTrack ? !audioTrack.enabled : true
}
return true
}
/**
* 获取音频音量级别(用于音量指示器)
*/
async getAudioLevel(stream: MediaStream): Promise<number> {
return new Promise((resolve) => {
const audioContext = new AudioContext()
const analyser = audioContext.createAnalyser()
const source = audioContext.createMediaStreamSource(stream)
source.connect(analyser)
analyser.fftSize = 256
const dataArray = new Uint8Array(analyser.frequencyBinCount)
analyser.getByteFrequencyData(dataArray)
// 计算平均音量
const average = dataArray.reduce((a, b) => a + b, 0) / dataArray.length
audioContext.close()
resolve(average / 255) // 归一化到 0-1
})
}
/**
* 关闭连接
*/
close(): void {
console.log('[WebRTC] Closing connection')
// 停止本地流
if (this.localStream) {
this.localStream.getTracks().forEach(track => track.stop())
this.localStream = null
}
// 停止远程流
if (this.remoteStream) {
this.remoteStream.getTracks().forEach(track => track.stop())
this.remoteStream = null
}
// 关闭 PeerConnection
if (this.peerConnection) {
this.peerConnection.close()
this.peerConnection = null
}
this.pendingIceCandidates = []
this.updateConnectionState('disconnected')
}
/**
* 重置管理器
*/
reset(): void {
this.close()
this.connectionState = 'idle'
}
}
// 导出单例工厂函数
export function createWebRTCManager(config?: WebRTCConfig): WebRTCManager {
return new WebRTCManager(config)
}