feat: 初始化考培练系统项目

- 从服务器拉取完整代码
- 按框架规范整理项目结构
- 配置 Drone CI 测试环境部署
- 包含后端(FastAPI)、前端(Vue3)、管理端

技术栈: Vue3 + TypeScript + FastAPI + MySQL
This commit is contained in:
111
2026-01-24 19:33:28 +08:00
commit 998211c483
1197 changed files with 228429 additions and 0 deletions

View File

@@ -0,0 +1,190 @@
"""
与课程对话 API
使用 Python 原生 AI 服务实现
"""
import json
import logging
from typing import Optional, Any
from fastapi import APIRouter, HTTPException, Depends
from fastapi.responses import StreamingResponse
from pydantic import BaseModel, Field
from sqlalchemy.ext.asyncio import AsyncSession
from app.core.deps import get_db, get_current_user
from app.models.user import User
from app.services.ai.course_chat_service import course_chat_service_v2
router = APIRouter()
logger = logging.getLogger(__name__)
class CourseChatRequest(BaseModel):
"""课程对话请求"""
course_id: int = Field(..., description="课程ID")
query: str = Field(..., description="用户问题")
conversation_id: Optional[str] = Field(None, description="会话ID续接对话时传入")
class ResponseModel(BaseModel):
"""通用响应模型"""
code: int = 200
message: str = "success"
data: Optional[Any] = None
async def _chat_with_course(
request: CourseChatRequest,
current_user: User,
db: AsyncSession
):
"""
Python 原生实现的流式对话
"""
logger.info(
f"用户 {current_user.username} 与课程 {request.course_id} 对话: "
f"{request.query[:50]}..."
)
async def generate_stream():
"""生成 SSE 流"""
try:
async for event_type, data in course_chat_service_v2.chat_stream(
db=db,
course_id=request.course_id,
query=request.query,
user_id=current_user.id,
conversation_id=request.conversation_id
):
if event_type == "conversation_started":
yield f"data: {json.dumps({'event': 'conversation_started', 'conversation_id': data})}\n\n"
logger.info(f"会话已创建: {data}")
elif event_type == "chunk":
yield f"data: {json.dumps({'event': 'message_chunk', 'chunk': data})}\n\n"
elif event_type == "done":
yield f"data: {json.dumps({'event': 'message_end', 'message': data})}\n\n"
logger.info(f"对话完成,总长度: {len(data)}")
elif event_type == "error":
yield f"data: {json.dumps({'event': 'error', 'message': data})}\n\n"
logger.error(f"对话错误: {data}")
except Exception as e:
logger.error(f"流式对话异常: {e}", exc_info=True)
yield f"data: {json.dumps({'event': 'error', 'message': str(e)})}\n\n"
return StreamingResponse(
generate_stream(),
media_type="text/event-stream",
headers={
"Cache-Control": "no-cache",
"Connection": "keep-alive",
"X-Accel-Buffering": "no"
}
)
@router.post("/chat")
async def chat_with_course(
request: CourseChatRequest,
current_user: User = Depends(get_current_user),
db: AsyncSession = Depends(get_db)
):
"""
与课程对话(流式响应)
使用 Python 原生 AI 服务实现,支持多轮对话。
"""
return await _chat_with_course(request, current_user, db)
@router.get("/conversations")
async def get_conversations(
course_id: Optional[int] = None,
limit: int = 20,
current_user: User = Depends(get_current_user),
db: AsyncSession = Depends(get_db)
):
"""
获取会话列表
返回当前用户的历史会话列表
"""
try:
conversations = await course_chat_service_v2.get_conversations(
user_id=current_user.id,
course_id=course_id,
limit=limit
)
return ResponseModel(
code=200,
message="获取会话列表成功",
data={
"conversations": conversations,
"total": len(conversations)
}
)
except Exception as e:
logger.error(f"获取会话列表失败: {e}", exc_info=True)
raise HTTPException(status_code=500, detail=f"获取会话列表失败: {str(e)}")
@router.get("/messages")
async def get_messages(
conversation_id: str,
limit: int = 50,
current_user: User = Depends(get_current_user),
db: AsyncSession = Depends(get_db)
):
"""
获取历史消息
返回指定会话的历史消息
"""
try:
messages = await course_chat_service_v2.get_messages(
conversation_id=conversation_id,
user_id=current_user.id,
limit=limit
)
return ResponseModel(
code=200,
message="获取历史消息成功",
data={
"messages": messages,
"total": len(messages)
}
)
except Exception as e:
logger.error(f"获取历史消息失败: {e}", exc_info=True)
raise HTTPException(status_code=500, detail=f"获取历史消息失败: {str(e)}")
@router.get("/engines")
async def list_chat_engines():
"""
获取可用的对话引擎列表
"""
return ResponseModel(
code=200,
message="获取对话引擎列表成功",
data={
"engines": [
{
"id": "native",
"name": "Python 原生实现",
"description": "使用本地 AI 服务4sapi.com + OpenRouter支持流式输出和多轮对话",
"default": True
}
],
"default_engine": "native"
}
)