feat: new file

This commit is contained in:
Daniel
2026-03-18 18:57:58 +08:00
commit d0ff049899
31 changed files with 1507 additions and 0 deletions

View File

@@ -0,0 +1 @@

24
backend/app/routes/ai.py Normal file
View File

@@ -0,0 +1,24 @@
from __future__ import annotations
from fastapi import APIRouter
from pydantic import BaseModel, Field
from ..services.ai_insight import generate_insight
router = APIRouter()
class InsightRequest(BaseModel):
query: str = Field(..., min_length=1, max_length=2000)
product_id: str | None = None
top_k: int = Field(6, ge=1, le=20)
@router.post("/insight")
def insight(req: InsightRequest):
"""
基于向量检索 + LLM可选输出“爆款发现 -> 数据验证 -> 决策跟卖”的建议。
"""
return generate_insight(query=req.query, product_id=req.product_id, top_k=req.top_k)

View File

@@ -0,0 +1,18 @@
from __future__ import annotations
from fastapi import APIRouter, Query
from ..services.db_sample import print_db_sample_to_logs
router = APIRouter()
@router.post("/db-sample")
def db_sample(limit: int = Query(10, ge=1, le=200)):
"""
触发一次“数据库样例打印到后端日志”,用于快速理解数据内容与结构。
"""
print_db_sample_to_logs(limit=limit)
return {"ok": True, "printed": True, "limit": limit}

View File

@@ -0,0 +1,56 @@
from __future__ import annotations
from datetime import datetime, timedelta
import pandas as pd
from fastapi import APIRouter, HTTPException, Query
from sqlalchemy import text
from ..db import get_engine
from ..services.schema_discovery import discover_schema
from ..services.timeseries import normalize_timeseries
router = APIRouter()
@router.get("/overview")
def overview():
"""
返回 BI 顶部卡片核心指标(尽量从现有表自动推断)。
"""
engine = get_engine()
schema = discover_schema(engine)
if not schema.sales_table:
raise HTTPException(status_code=422, detail="未发现可用销量/订单明细表(需要至少包含 product_id + 时间 + 数量/金额)")
q = text(schema.overview_sql)
with engine.connect() as conn:
row = conn.execute(q).mappings().first()
return {"schema": schema.model_dump(), "metrics": dict(row) if row else {}}
@router.get("/sales/timeseries")
def sales_timeseries(
product_id: str = Query(..., min_length=1),
days: int = Query(30, ge=1, le=365),
):
engine = get_engine()
schema = discover_schema(engine)
if not schema.sales_table:
raise HTTPException(status_code=422, detail="未发现可用销量/订单明细表")
since = datetime.utcnow() - timedelta(days=days)
q = text(schema.timeseries_sql)
with engine.connect() as conn:
df = pd.read_sql(
q,
conn,
params={"product_id": product_id, "since": since},
)
if df.empty:
return {"product_id": product_id, "points": []}
points = normalize_timeseries(df, ts_col="ds", value_cols=["units", "gmv"])
return {"product_id": product_id, "points": points}

View File

@@ -0,0 +1,64 @@
from __future__ import annotations
import math
from datetime import datetime, timedelta
import pandas as pd
from fastapi import APIRouter, HTTPException, Query
from sqlalchemy import text
from ..db import get_engine
from ..services.forecast import forecast_next_n
from ..services.schema_discovery import discover_schema
from ..services.trend_engine import compute_trend_scores
router = APIRouter()
@router.get("/potential-winners")
def potential_winners(days: int = Query(14, ge=3, le=60), limit: int = Query(50, ge=1, le=200)):
engine = get_engine()
schema = discover_schema(engine)
if not schema.sales_table:
raise HTTPException(status_code=422, detail="未发现可用销量/订单明细表")
since = datetime.utcnow() - timedelta(days=days)
q = text(schema.trend_candidates_sql)
with engine.connect() as conn:
df = pd.read_sql(q, conn, params={"since": since, "limit": limit * 5})
if df.empty:
return {"items": []}
scored = compute_trend_scores(df)
scored = scored.sort_values("potential_score", ascending=False).head(limit)
return {"items": scored.to_dict(orient="records")}
@router.get("/forecast")
def forecast(
product_id: str = Query(..., min_length=1),
days: int = Query(30, ge=7, le=180),
horizon: int = Query(14, ge=1, le=60),
):
engine = get_engine()
schema = discover_schema(engine)
if not schema.sales_table:
raise HTTPException(status_code=422, detail="未发现可用销量/订单明细表")
since = datetime.utcnow() - timedelta(days=days)
q = text(schema.timeseries_sql)
with engine.connect() as conn:
df = pd.read_sql(q, conn, params={"product_id": product_id, "since": since})
if df.empty:
return {"product_id": product_id, "forecast": []}
df = df.sort_values("ds")
y = df["units"].astype(float).fillna(0.0).values
yhat = forecast_next_n(y, n=horizon)
start = pd.to_datetime(df["ds"]).max()
out = []
for i, v in enumerate(yhat, start=1):
out.append({"ds": (start + pd.Timedelta(days=i)).to_pydatetime().isoformat(), "units_hat": float(max(0.0, v))})
return {"product_id": product_id, "forecast": out}