Files
012-kaopeilian/frontend/src/utils/cozeVoiceClient.ts
111 998211c483 feat: 初始化考培练系统项目
- 从服务器拉取完整代码
- 按框架规范整理项目结构
- 配置 Drone CI 测试环境部署
- 包含后端(FastAPI)、前端(Vue3)、管理端

技术栈: Vue3 + TypeScript + FastAPI + MySQL
2026-01-24 19:33:28 +08:00

306 lines
9.2 KiB
TypeScript
Raw Blame History

This file contains ambiguous Unicode characters
This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.
/**
* Coze语音客户端基于@coze/api官方SDK
*
* 完全参考coze-chat-frontend/src/stores/TrainingStore.ts
* 保持最简洁的实现,避免过度配置
*/
import {
WsChatClient,
WsChatEventNames,
WsToolsUtils,
ClientEventType,
type WsChatEventData
} from '@coze/api/ws-tools'
import {
WebsocketsEventType,
type ConversationAudioTranscriptCompletedEvent
} from '@coze/api'
export interface CozeVoiceConfig {
token: string // PAT Token
botId: string
userId: string
scenePrompt?: string // 首次连接时的场景提示词
conversationId?: string // 暂不支持,需要研究如何传递
}
export type VoiceEventType =
| 'connected'
| 'disconnected'
| 'user_transcript'
| 'ai_message_delta'
| 'ai_speaking'
| 'ai_speech_end'
| 'error'
export type EventCallback = (data: any) => void
/**
* Coze语音WebSocket客户端
* 使用@coze/api官方SDK
*/
export class CozeVoiceClient {
private client: WsChatClient | null = null
private eventHandlers: Map<VoiceEventType, EventCallback[]> = new Map()
private tempContent: string = ''
private isFirstDelta: boolean = true
/**
* 初始化并连接完全参考TrainingStore.ts
*/
async connect(config: CozeVoiceConfig): Promise<void> {
try {
// 1. 检查麦克风权限
const permission = await WsToolsUtils.checkDevicePermission()
if (!permission.audio) {
throw new Error('需要麦克风权限才能使用语音陪练')
}
console.log('[CozeVoice] ✅ 麦克风权限检查通过')
// 2. 创建WebSocket客户端完全参考TrainingStore第94-100行
this.client = new WsChatClient({
token: config.token,
baseWsURL: 'wss://ws.coze.cn',
allowPersonalAccessTokenInBrowser: true,
botId: config.botId,
audioMutedDefault: false, // 不静音
// ⚠️ 不设置enableLocalLoopback让SDK自动判断桌面浏览器默认false
// enableLocalLoopback: true会导致用户听到自己的声音
playbackVolumeDefault: 1.0, // 默认音量
debug: true
})
console.log('[CozeVoice] ✅ WebSocket客户端已创建')
// 3. 注册事件监听器
this.setupEventListeners()
// 4. 连接WebSocket参考TrainingStore第220行
await this.client.connect()
console.log('[CozeVoice] ✅ WebSocket连接成功')
// 5. 设置播放音量参考TrainingStore第225-227行
if (this.client && typeof (this.client as any).setPlaybackVolume === 'function') {
(this.client as any).setPlaybackVolume(1)
console.log('[CozeVoice] ✅ 已设置播放音量=1')
}
// 6. 如果有场景提示词,发送初始消息
// 注意:参考代码不发送初始消息,而是等待用户先说话
// 但我们的场景需要AI先开场白所以发送场景提示词
if (config.scenePrompt) {
this.client.sendTextMessage(config.scenePrompt)
console.log('[CozeVoice] ✅ 场景提示词已发送')
}
console.log('[CozeVoice] 🎉 初始化完成,等待对话...')
} catch (error) {
console.error('[CozeVoice] ❌ 连接失败:', error)
throw error
}
}
/**
* 设置事件监听器参考TrainingStore第108-136行
*/
private setupEventListeners(): void {
if (!this.client) return
// 连接成功
this.client.on(WsChatEventNames.CONNECTED, () => {
console.log('[CozeVoice] [事件] WebSocket连接已建立')
this.emit('connected', {})
})
// 断开连接
this.client.on(WsChatEventNames.DISCONNECTED, () => {
console.log('[CozeVoice] [事件] WebSocket已断开')
this.emit('disconnected', {})
})
// 服务器错误
this.client.on(WsChatEventNames.SERVER_ERROR, (_: string, event: unknown) => {
console.error('[CozeVoice] [事件] 服务器错误:', event)
this.emit('error', { error: '服务器错误' })
})
// 所有消息事件参考TrainingStore第135行
this.client.on(WsChatEventNames.ALL, (eventName: string, event: WsChatEventData) => {
this.handleMessageEvent(eventName, event)
})
}
/**
* 处理消息事件参考TrainingStore第139-203行
*/
private handleMessageEvent(eventName: string, event: WsChatEventData): void {
// 忽略CONNECTED事件参考TrainingStore第140-143行
if (eventName === WsChatEventNames.CONNECTED) {
return
}
if (!event) return
switch (event.event_type) {
// 用户开始说话 - 关闭扬声器本地回放(避免用户听到自己的声音)
case 'input_audio_buffer.speech_started': {
console.log('[CozeVoice] 🎤 用户开始说话')
// 关闭本地回放(避免扬声器播放用户自己的声音)
const wavStreamPlayer = (this.client as any)?.wavStreamPlayer
if (wavStreamPlayer && typeof wavStreamPlayer.setLocalLoopbackEnable === 'function') {
wavStreamPlayer.setLocalLoopbackEnable(false)
console.log('[CozeVoice] 🔇 已关闭扬声器本地回放')
}
break
}
// 用户停止说话
case 'input_audio_buffer.speech_stopped': {
console.log('[CozeVoice] 🎤 用户停止说话,等待识别...')
break
}
// 用户语音转文字完成参考TrainingStore第148-158行
case WebsocketsEventType.CONVERSATION_AUDIO_TRANSCRIPT_COMPLETED: {
const { content } = (event as ConversationAudioTranscriptCompletedEvent).data
console.log('[CozeVoice] 🎤 用户说:', content)
this.emit('user_transcript', { content })
break
}
// AI开始语音回复 - 确保扬声器打开
case 'conversation.audio.sentence_start': {
// 恢复扬声器播放AI声音
const wavStreamPlayer = (this.client as any)?.wavStreamPlayer
if (wavStreamPlayer && typeof wavStreamPlayer.setLocalLoopbackEnable === 'function') {
wavStreamPlayer.setLocalLoopbackEnable(false) // 保持false只播放AI不播放用户
console.log('[CozeVoice] 🔊 AI开始说话')
}
break
}
// AI文字回复增量参考TrainingStore第160-180行
case WebsocketsEventType.CONVERSATION_MESSAGE_DELTA: {
if (event.data.content) {
const content = this.tempContent + event.data.content
this.tempContent = content
this.emit('ai_message_delta', {
content: event.data.content,
fullContent: content,
isFirst: this.isFirstDelta
})
if (this.isFirstDelta) {
this.isFirstDelta = false
}
}
break
}
// AI消息完成参考TrainingStore第181-187行
case WebsocketsEventType.CONVERSATION_MESSAGE_COMPLETED: {
this.isFirstDelta = true
this.tempContent = ''
break
}
// AI语音增量参考TrainingStore第189-193行
case WebsocketsEventType.CONVERSATION_AUDIO_DELTA: {
this.emit('ai_speaking', {})
break
}
// 音频播放结束参考TrainingStore第195-199行
case ClientEventType.AUDIO_SENTENCE_PLAYBACK_ENDED: {
this.emit('ai_speech_end', {})
break
}
}
}
/**
* 注册事件监听器
*/
on(eventType: VoiceEventType, callback: EventCallback): void {
if (!this.eventHandlers.has(eventType)) {
this.eventHandlers.set(eventType, [])
}
this.eventHandlers.get(eventType)!.push(callback)
}
/**
* 移除事件监听器
*/
off(eventType: VoiceEventType, callback: EventCallback): void {
const handlers = this.eventHandlers.get(eventType)
if (handlers) {
const index = handlers.indexOf(callback)
if (index > -1) {
handlers.splice(index, 1)
}
}
}
/**
* 触发事件
*/
private emit(eventType: VoiceEventType, data: any): void {
const handlers = this.eventHandlers.get(eventType)
if (handlers) {
handlers.forEach(callback => callback(data))
}
}
/**
* 打断AI回复参考TrainingStore第206-212行
*/
interrupt(): void {
if (this.client) {
this.client.interrupt()
console.log('[CozeVoice] ⏸️ 已打断AI回复')
}
}
/**
* 断开连接参考TrainingStore第235-244行
*/
async disconnect(): Promise<void> {
if (this.client) {
await this.client.disconnect()
this.client = null
console.log('[CozeVoice] ✅ 已断开连接')
}
// 清空事件处理器
this.eventHandlers.clear()
this.tempContent = ''
this.isFirstDelta = true
}
/**
* 获取连接状态
*/
isConnected(): boolean {
return this.client !== null
}
}
/**
* 检查麦克风权限(使用@coze/api工具
*/
export async function checkMicrophonePermission(): Promise<boolean> {
try {
const permission = await WsToolsUtils.checkDevicePermission()
return permission.audio === true
} catch (error) {
console.error('麦克风权限检查失败:', error)
return false
}
}