Files
Airtep/gig-poc/apps/api/app/services/llm_client.py
2026-03-30 20:49:40 +08:00

35 lines
1.2 KiB
Python

from __future__ import annotations
import json
import httpx
from app.core.config import Settings
from app.domain.schemas import PromptOutput
class LLMClient:
def __init__(self, settings: Settings):
self.settings = settings
def extract_json(self, system_prompt: str, user_text: str) -> PromptOutput | None:
if not self.settings.llm_enabled or not self.settings.llm_base_url or not self.settings.llm_api_key:
return None
payload = {
"model": self.settings.llm_model,
"messages": [
{"role": "system", "content": system_prompt},
{"role": "user", "content": user_text},
],
"temperature": 0.1,
"response_format": {"type": "json_object"},
}
headers = {"Authorization": f"Bearer {self.settings.llm_api_key}"}
with httpx.Client(timeout=30.0) as client:
response = client.post(f"{self.settings.llm_base_url.rstrip('/')}/chat/completions", json=payload, headers=headers)
response.raise_for_status()
data = response.json()
raw_text = data["choices"][0]["message"]["content"]
return PromptOutput(content=json.loads(raw_text), raw_text=raw_text)