fix:优化数据
This commit is contained in:
300
backend/app/routers/ai_settings.py
Normal file
300
backend/app/routers/ai_settings.py
Normal file
@@ -0,0 +1,300 @@
|
||||
"""
|
||||
AI 模型配置:支持多套配置,持久化在 data/ai_configs.json,可选用当前生效配置。
|
||||
GET /settings/ai 当前选用配置;GET /settings/ai/list 列表;POST 新增;PUT /:id 更新;DELETE /:id 删除;POST /:id/activate 选用。
|
||||
"""
|
||||
import json
|
||||
import uuid
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict, List
|
||||
|
||||
from fastapi import APIRouter, HTTPException, Query, status
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
from backend.app.services.ai_service import get_active_ai_config, test_connection_with_config
|
||||
|
||||
router = APIRouter(prefix="/settings/ai", tags=["ai-settings"])
|
||||
|
||||
CONFIGS_PATH = Path("data/ai_configs.json")
|
||||
LEGACY_CONFIG_PATH = Path("data/ai_config.json")
|
||||
|
||||
DEFAULT_FIELDS: Dict[str, Any] = {
|
||||
"provider": "OpenAI",
|
||||
"api_key": "",
|
||||
"base_url": "",
|
||||
"model_name": "gpt-4o-mini",
|
||||
"temperature": 0.2,
|
||||
"system_prompt_override": "",
|
||||
}
|
||||
|
||||
|
||||
class AIConfigRead(BaseModel):
|
||||
model_config = {"protected_namespaces": ()}
|
||||
|
||||
id: str = ""
|
||||
name: str = ""
|
||||
provider: str = "OpenAI"
|
||||
api_key: str = ""
|
||||
base_url: str = ""
|
||||
model_name: str = "gpt-4o-mini"
|
||||
temperature: float = 0.2
|
||||
system_prompt_override: str = ""
|
||||
|
||||
|
||||
class AIConfigListItem(BaseModel):
|
||||
"""列表项:不含完整 api_key,仅标记是否已配置"""
|
||||
id: str
|
||||
name: str
|
||||
provider: str
|
||||
model_name: str
|
||||
base_url: str = ""
|
||||
api_key_configured: bool = False
|
||||
is_active: bool = False
|
||||
|
||||
|
||||
class AIConfigCreate(BaseModel):
|
||||
model_config = {"protected_namespaces": ()}
|
||||
|
||||
name: str = Field("", max_length=64)
|
||||
provider: str = "OpenAI"
|
||||
api_key: str = ""
|
||||
base_url: str = ""
|
||||
model_name: str = "gpt-4o-mini"
|
||||
temperature: float = 0.2
|
||||
system_prompt_override: str = ""
|
||||
|
||||
|
||||
class AIConfigUpdate(BaseModel):
|
||||
model_config = {"protected_namespaces": ()}
|
||||
|
||||
name: str | None = Field(None, max_length=64)
|
||||
provider: str | None = None
|
||||
api_key: str | None = None
|
||||
base_url: str | None = None
|
||||
model_name: str | None = None
|
||||
temperature: float | None = None
|
||||
system_prompt_override: str | None = None
|
||||
|
||||
|
||||
def _load_configs_file() -> Dict[str, Any]:
|
||||
if not CONFIGS_PATH.exists():
|
||||
return {"configs": [], "active_id": ""}
|
||||
try:
|
||||
data = json.loads(CONFIGS_PATH.read_text(encoding="utf-8"))
|
||||
return {"configs": data.get("configs", []), "active_id": data.get("active_id", "") or ""}
|
||||
except Exception:
|
||||
return {"configs": [], "active_id": ""}
|
||||
|
||||
|
||||
def _migrate_from_legacy() -> None:
|
||||
if CONFIGS_PATH.exists():
|
||||
return
|
||||
if not LEGACY_CONFIG_PATH.exists():
|
||||
return
|
||||
try:
|
||||
legacy = json.loads(LEGACY_CONFIG_PATH.read_text(encoding="utf-8"))
|
||||
except Exception:
|
||||
return
|
||||
cfg = {**DEFAULT_FIELDS, **legacy}
|
||||
new_id = str(uuid.uuid4())[:8]
|
||||
payload = {
|
||||
"configs": [
|
||||
{
|
||||
"id": new_id,
|
||||
"name": "默认配置",
|
||||
"provider": cfg.get("provider", "OpenAI"),
|
||||
"api_key": cfg.get("api_key", ""),
|
||||
"base_url": cfg.get("base_url", ""),
|
||||
"model_name": cfg.get("model_name", "gpt-4o-mini"),
|
||||
"temperature": cfg.get("temperature", 0.2),
|
||||
"system_prompt_override": cfg.get("system_prompt_override", ""),
|
||||
}
|
||||
],
|
||||
"active_id": new_id,
|
||||
}
|
||||
CONFIGS_PATH.parent.mkdir(parents=True, exist_ok=True)
|
||||
CONFIGS_PATH.write_text(json.dumps(payload, ensure_ascii=False, indent=2), encoding="utf-8")
|
||||
|
||||
|
||||
def _save_configs(configs: List[Dict], active_id: str) -> None:
|
||||
CONFIGS_PATH.parent.mkdir(parents=True, exist_ok=True)
|
||||
CONFIGS_PATH.write_text(
|
||||
json.dumps({"configs": configs, "active_id": active_id}, ensure_ascii=False, indent=2),
|
||||
encoding="utf-8",
|
||||
)
|
||||
|
||||
|
||||
@router.get("", response_model=AIConfigRead)
|
||||
async def get_current_ai_settings():
|
||||
"""返回当前选用的 AI 配置(用于编辑表单与兼容旧接口)。"""
|
||||
_migrate_from_legacy()
|
||||
cfg = get_active_ai_config()
|
||||
return AIConfigRead(
|
||||
id=cfg.get("id", ""),
|
||||
name=cfg.get("name", ""),
|
||||
provider=cfg.get("provider", "OpenAI"),
|
||||
api_key=cfg.get("api_key", ""),
|
||||
base_url=cfg.get("base_url", ""),
|
||||
model_name=cfg.get("model_name", "gpt-4o-mini"),
|
||||
temperature=float(cfg.get("temperature", 0.2)),
|
||||
system_prompt_override=cfg.get("system_prompt_override", ""),
|
||||
)
|
||||
|
||||
|
||||
@router.get("/list", response_model=List[AIConfigListItem])
|
||||
async def list_ai_configs():
|
||||
"""列出所有已配置的模型,方便查看、选用或编辑。"""
|
||||
_migrate_from_legacy()
|
||||
data = _load_configs_file()
|
||||
configs = data.get("configs") or []
|
||||
active_id = data.get("active_id") or ""
|
||||
out = []
|
||||
for c in configs:
|
||||
out.append(
|
||||
AIConfigListItem(
|
||||
id=c.get("id", ""),
|
||||
name=c.get("name", "未命名"),
|
||||
provider=c.get("provider", "OpenAI"),
|
||||
model_name=c.get("model_name", ""),
|
||||
base_url=(c.get("base_url") or "")[:64] or "",
|
||||
api_key_configured=bool((c.get("api_key") or "").strip()),
|
||||
is_active=(c.get("id") == active_id),
|
||||
)
|
||||
)
|
||||
return out
|
||||
|
||||
|
||||
@router.get("/{config_id}", response_model=AIConfigRead)
|
||||
async def get_ai_config_by_id(config_id: str):
|
||||
"""获取单条配置(用于编辑)。"""
|
||||
_migrate_from_legacy()
|
||||
data = _load_configs_file()
|
||||
for c in data.get("configs") or []:
|
||||
if c.get("id") == config_id:
|
||||
return AIConfigRead(
|
||||
id=c.get("id", ""),
|
||||
name=c.get("name", ""),
|
||||
provider=c.get("provider", "OpenAI"),
|
||||
api_key=c.get("api_key", ""),
|
||||
base_url=c.get("base_url", ""),
|
||||
model_name=c.get("model_name", "gpt-4o-mini"),
|
||||
temperature=float(c.get("temperature", 0.2)),
|
||||
system_prompt_override=c.get("system_prompt_override", ""),
|
||||
)
|
||||
raise HTTPException(status_code=404, detail="配置不存在")
|
||||
|
||||
|
||||
@router.post("", response_model=AIConfigRead, status_code=status.HTTP_201_CREATED)
|
||||
async def create_ai_config(payload: AIConfigCreate):
|
||||
"""新增一套模型配置。"""
|
||||
_migrate_from_legacy()
|
||||
data = _load_configs_file()
|
||||
configs = list(data.get("configs") or [])
|
||||
active_id = data.get("active_id") or ""
|
||||
new_id = str(uuid.uuid4())[:8]
|
||||
name = (payload.name or "").strip() or f"{payload.provider} - {payload.model_name}"
|
||||
new_cfg = {
|
||||
"id": new_id,
|
||||
"name": name[:64],
|
||||
"provider": payload.provider or "OpenAI",
|
||||
"api_key": payload.api_key or "",
|
||||
"base_url": (payload.base_url or "").strip(),
|
||||
"model_name": (payload.model_name or "gpt-4o-mini").strip(),
|
||||
"temperature": float(payload.temperature) if payload.temperature is not None else 0.2,
|
||||
"system_prompt_override": (payload.system_prompt_override or "").strip(),
|
||||
}
|
||||
configs.append(new_cfg)
|
||||
if not active_id:
|
||||
active_id = new_id
|
||||
_save_configs(configs, active_id)
|
||||
return AIConfigRead(**new_cfg)
|
||||
|
||||
|
||||
@router.put("/{config_id}", response_model=AIConfigRead)
|
||||
async def update_ai_config(config_id: str, payload: AIConfigUpdate):
|
||||
"""更新指定配置。"""
|
||||
_migrate_from_legacy()
|
||||
data = _load_configs_file()
|
||||
configs = data.get("configs") or []
|
||||
for c in configs:
|
||||
if c.get("id") == config_id:
|
||||
if payload.name is not None:
|
||||
c["name"] = (payload.name or "").strip()[:64] or c.get("name", "")
|
||||
if payload.provider is not None:
|
||||
c["provider"] = payload.provider
|
||||
if payload.api_key is not None:
|
||||
c["api_key"] = payload.api_key
|
||||
if payload.base_url is not None:
|
||||
c["base_url"] = (payload.base_url or "").strip()
|
||||
if payload.model_name is not None:
|
||||
c["model_name"] = (payload.model_name or "").strip()
|
||||
if payload.temperature is not None:
|
||||
c["temperature"] = float(payload.temperature)
|
||||
if payload.system_prompt_override is not None:
|
||||
c["system_prompt_override"] = (payload.system_prompt_override or "").strip()
|
||||
_save_configs(configs, data.get("active_id", ""))
|
||||
return AIConfigRead(
|
||||
id=c.get("id", ""),
|
||||
name=c.get("name", ""),
|
||||
provider=c.get("provider", "OpenAI"),
|
||||
api_key=c.get("api_key", ""),
|
||||
base_url=c.get("base_url", ""),
|
||||
model_name=c.get("model_name", "gpt-4o-mini"),
|
||||
temperature=float(c.get("temperature", 0.2)),
|
||||
system_prompt_override=c.get("system_prompt_override", ""),
|
||||
)
|
||||
raise HTTPException(status_code=404, detail="配置不存在")
|
||||
|
||||
|
||||
@router.delete("/{config_id}", status_code=status.HTTP_204_NO_CONTENT)
|
||||
async def delete_ai_config(config_id: str):
|
||||
"""删除指定配置;若为当前选用,则改用列表第一项。"""
|
||||
_migrate_from_legacy()
|
||||
data = _load_configs_file()
|
||||
configs = [c for c in (data.get("configs") or []) if c.get("id") != config_id]
|
||||
active_id = data.get("active_id", "")
|
||||
if active_id == config_id:
|
||||
active_id = configs[0].get("id", "") if configs else ""
|
||||
_save_configs(configs, active_id)
|
||||
return None
|
||||
|
||||
|
||||
@router.post("/test")
|
||||
async def test_ai_connection(config_id: str | None = Query(None, description="指定配置 ID,不传则用当前选用")):
|
||||
"""测试连接;不传 config_id 时使用当前选用配置。"""
|
||||
if config_id:
|
||||
data = _load_configs_file()
|
||||
for c in data.get("configs") or []:
|
||||
if c.get("id") == config_id:
|
||||
try:
|
||||
result = await test_connection_with_config(c)
|
||||
return {"status": "ok", "message": result}
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=str(e)) from e
|
||||
raise HTTPException(status_code=404, detail="配置不存在")
|
||||
try:
|
||||
result = await test_connection_with_config(get_active_ai_config())
|
||||
return {"status": "ok", "message": result}
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=str(e)) from e
|
||||
|
||||
|
||||
@router.post("/{config_id}/activate", response_model=AIConfigRead)
|
||||
async def activate_ai_config(config_id: str):
|
||||
"""选用该配置为当前生效。"""
|
||||
_migrate_from_legacy()
|
||||
data = _load_configs_file()
|
||||
exists = any(c.get("id") == config_id for c in (data.get("configs") or []))
|
||||
if not exists:
|
||||
raise HTTPException(status_code=404, detail="配置不存在")
|
||||
_save_configs(data.get("configs", []), config_id)
|
||||
cfg = get_active_ai_config()
|
||||
return AIConfigRead(
|
||||
id=cfg.get("id", ""),
|
||||
name=cfg.get("name", ""),
|
||||
provider=cfg.get("provider", "OpenAI"),
|
||||
api_key=cfg.get("api_key", ""),
|
||||
base_url=cfg.get("base_url", ""),
|
||||
model_name=cfg.get("model_name", "gpt-4o-mini"),
|
||||
temperature=float(cfg.get("temperature", 0.2)),
|
||||
system_prompt_override=cfg.get("system_prompt_override", ""),
|
||||
)
|
||||
Reference in New Issue
Block a user