fix:bug
This commit is contained in:
1
backend/__init__.py
Normal file
1
backend/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
__all__ = []
|
||||
1
backend/app/__init__.py
Normal file
1
backend/app/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
__all__ = []
|
||||
38
backend/app/db.py
Normal file
38
backend/app/db.py
Normal file
@@ -0,0 +1,38 @@
|
||||
import os
|
||||
from sqlalchemy import create_engine
|
||||
from sqlalchemy.orm import sessionmaker, DeclarativeBase
|
||||
|
||||
|
||||
class Base(DeclarativeBase):
|
||||
"""Base class for all ORM models."""
|
||||
|
||||
|
||||
def get_database_url() -> str:
|
||||
"""
|
||||
DATABASE_URL is configurable via env, default to local SQLite file.
|
||||
Example: sqlite:///./data/ops_core.db
|
||||
"""
|
||||
return os.getenv("DATABASE_URL", "sqlite:///./data/ops_core.db")
|
||||
|
||||
|
||||
DATABASE_URL = get_database_url()
|
||||
|
||||
# For SQLite, check_same_thread=False is required when used with FastAPI / threads.
|
||||
engine = create_engine(
|
||||
DATABASE_URL,
|
||||
connect_args={"check_same_thread": False} if DATABASE_URL.startswith("sqlite") else {},
|
||||
)
|
||||
|
||||
SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
||||
|
||||
|
||||
def get_db():
|
||||
"""
|
||||
FastAPI dependency to provide a DB session.
|
||||
"""
|
||||
db = SessionLocal()
|
||||
try:
|
||||
yield db
|
||||
finally:
|
||||
db.close()
|
||||
|
||||
48
backend/app/main.py
Normal file
48
backend/app/main.py
Normal file
@@ -0,0 +1,48 @@
|
||||
import os
|
||||
from pathlib import Path
|
||||
from typing import List
|
||||
|
||||
from fastapi import FastAPI
|
||||
from fastapi.middleware.cors import CORSMiddleware
|
||||
from fastapi.staticfiles import StaticFiles
|
||||
|
||||
from backend.app.routers import customers, projects, finance
|
||||
|
||||
|
||||
def create_app() -> FastAPI:
|
||||
app = FastAPI(
|
||||
title="Ops-Core",
|
||||
description="Monolithic automation & business ops platform",
|
||||
version="0.1.0",
|
||||
)
|
||||
|
||||
# CORS
|
||||
raw_origins = os.getenv("CORS_ORIGINS")
|
||||
if raw_origins:
|
||||
origins: List[str] = [o.strip() for o in raw_origins.split(",") if o.strip()]
|
||||
else:
|
||||
origins = ["*"]
|
||||
|
||||
app.add_middleware(
|
||||
CORSMiddleware,
|
||||
allow_origins=origins,
|
||||
allow_credentials=True,
|
||||
allow_methods=["*"],
|
||||
allow_headers=["*"],
|
||||
)
|
||||
|
||||
# Routers
|
||||
app.include_router(customers.router)
|
||||
app.include_router(projects.router)
|
||||
app.include_router(finance.router)
|
||||
|
||||
# Static data mount (for quotes, contracts, finance archives, etc.)
|
||||
data_dir = Path("data")
|
||||
data_dir.mkdir(parents=True, exist_ok=True)
|
||||
app.mount("/data", StaticFiles(directory=str(data_dir)), name="data")
|
||||
|
||||
return app
|
||||
|
||||
|
||||
app = create_app()
|
||||
|
||||
83
backend/app/models.py
Normal file
83
backend/app/models.py
Normal file
@@ -0,0 +1,83 @@
|
||||
from datetime import datetime
|
||||
|
||||
from sqlalchemy import (
|
||||
Column,
|
||||
DateTime,
|
||||
ForeignKey,
|
||||
Integer,
|
||||
Numeric,
|
||||
String,
|
||||
Text,
|
||||
)
|
||||
from sqlalchemy.orm import relationship, Mapped, mapped_column
|
||||
|
||||
from .db import Base
|
||||
|
||||
|
||||
class Customer(Base):
|
||||
__tablename__ = "customers"
|
||||
|
||||
id: Mapped[int] = mapped_column(Integer, primary_key=True, index=True)
|
||||
name: Mapped[str] = mapped_column(String(255), nullable=False)
|
||||
contact_info: Mapped[str | None] = mapped_column(String(512), nullable=True)
|
||||
created_at: Mapped[datetime] = mapped_column(
|
||||
DateTime(timezone=True), default=datetime.utcnow, nullable=False
|
||||
)
|
||||
|
||||
projects: Mapped[list["Project"]] = relationship(
|
||||
"Project", back_populates="customer", cascade="all, delete-orphan"
|
||||
)
|
||||
|
||||
|
||||
class Project(Base):
|
||||
"""
|
||||
Project Archive: stores original requirement text and AI-generated solution.
|
||||
"""
|
||||
|
||||
__tablename__ = "projects"
|
||||
|
||||
id: Mapped[int] = mapped_column(Integer, primary_key=True, index=True)
|
||||
customer_id: Mapped[int] = mapped_column(
|
||||
Integer, ForeignKey("customers.id", ondelete="CASCADE"), nullable=False, index=True
|
||||
)
|
||||
raw_requirement: Mapped[str] = mapped_column(Text, nullable=False)
|
||||
ai_solution_md: Mapped[str | None] = mapped_column(Text, nullable=True)
|
||||
status: Mapped[str] = mapped_column(String(50), default="draft", nullable=False)
|
||||
created_at: Mapped[datetime] = mapped_column(
|
||||
DateTime(timezone=True), default=datetime.utcnow, nullable=False
|
||||
)
|
||||
|
||||
customer: Mapped[Customer] = relationship("Customer", back_populates="projects")
|
||||
quotes: Mapped[list["Quote"]] = relationship(
|
||||
"Quote", back_populates="project", cascade="all, delete-orphan"
|
||||
)
|
||||
|
||||
|
||||
class Quote(Base):
|
||||
__tablename__ = "quotes"
|
||||
|
||||
id: Mapped[int] = mapped_column(Integer, primary_key=True, index=True)
|
||||
project_id: Mapped[int] = mapped_column(
|
||||
Integer, ForeignKey("projects.id", ondelete="CASCADE"), nullable=False, index=True
|
||||
)
|
||||
total_amount: Mapped[Numeric] = mapped_column(Numeric(12, 2), nullable=False)
|
||||
file_path: Mapped[str] = mapped_column(String(512), nullable=False)
|
||||
created_at: Mapped[datetime] = mapped_column(
|
||||
DateTime(timezone=True), default=datetime.utcnow, nullable=False
|
||||
)
|
||||
|
||||
project: Mapped[Project] = relationship("Project", back_populates="quotes")
|
||||
|
||||
|
||||
class FinanceRecord(Base):
|
||||
__tablename__ = "finance_records"
|
||||
|
||||
id: Mapped[int] = mapped_column(Integer, primary_key=True, index=True)
|
||||
month: Mapped[str] = mapped_column(String(7), nullable=False, index=True) # YYYY-MM
|
||||
type: Mapped[str] = mapped_column(String(50), nullable=False) # invoice / bank_receipt / ...
|
||||
file_name: Mapped[str] = mapped_column(String(255), nullable=False)
|
||||
file_path: Mapped[str] = mapped_column(String(512), nullable=False)
|
||||
created_at: Mapped[datetime] = mapped_column(
|
||||
DateTime(timezone=True), default=datetime.utcnow, nullable=False
|
||||
)
|
||||
|
||||
1
backend/app/routers/__init__.py
Normal file
1
backend/app/routers/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
__all__ = []
|
||||
71
backend/app/routers/customers.py
Normal file
71
backend/app/routers/customers.py
Normal file
@@ -0,0 +1,71 @@
|
||||
from typing import List
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, status
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from backend.app.db import get_db
|
||||
from backend.app import models
|
||||
from backend.app.schemas import (
|
||||
CustomerCreate,
|
||||
CustomerRead,
|
||||
CustomerUpdate,
|
||||
)
|
||||
|
||||
|
||||
router = APIRouter(prefix="/customers", tags=["customers"])
|
||||
|
||||
|
||||
@router.get("/", response_model=List[CustomerRead])
|
||||
async def list_customers(db: Session = Depends(get_db)):
|
||||
customers = db.query(models.Customer).order_by(models.Customer.created_at.desc()).all()
|
||||
return customers
|
||||
|
||||
|
||||
@router.post("/", response_model=CustomerRead, status_code=status.HTTP_201_CREATED)
|
||||
async def create_customer(payload: CustomerCreate, db: Session = Depends(get_db)):
|
||||
customer = models.Customer(
|
||||
name=payload.name,
|
||||
contact_info=payload.contact_info,
|
||||
)
|
||||
db.add(customer)
|
||||
db.commit()
|
||||
db.refresh(customer)
|
||||
return customer
|
||||
|
||||
|
||||
@router.get("/{customer_id}", response_model=CustomerRead)
|
||||
async def get_customer(customer_id: int, db: Session = Depends(get_db)):
|
||||
customer = db.query(models.Customer).get(customer_id)
|
||||
if not customer:
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Customer not found")
|
||||
return customer
|
||||
|
||||
|
||||
@router.put("/{customer_id}", response_model=CustomerRead)
|
||||
async def update_customer(
|
||||
customer_id: int, payload: CustomerUpdate, db: Session = Depends(get_db)
|
||||
):
|
||||
customer = db.query(models.Customer).get(customer_id)
|
||||
if not customer:
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Customer not found")
|
||||
|
||||
if payload.name is not None:
|
||||
customer.name = payload.name
|
||||
if payload.contact_info is not None:
|
||||
customer.contact_info = payload.contact_info
|
||||
|
||||
db.commit()
|
||||
db.refresh(customer)
|
||||
return customer
|
||||
|
||||
|
||||
@router.delete("/{customer_id}", status_code=status.HTTP_204_NO_CONTENT)
|
||||
async def delete_customer(customer_id: int, db: Session = Depends(get_db)):
|
||||
customer = db.query(models.Customer).get(customer_id)
|
||||
if not customer:
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Customer not found")
|
||||
|
||||
db.delete(customer)
|
||||
db.commit()
|
||||
return None
|
||||
|
||||
37
backend/app/routers/finance.py
Normal file
37
backend/app/routers/finance.py
Normal file
@@ -0,0 +1,37 @@
|
||||
from fastapi import APIRouter, HTTPException
|
||||
from fastapi.responses import FileResponse
|
||||
|
||||
from backend.app.schemas import FinanceSyncResponse, FinanceSyncResult
|
||||
from backend.app.services.email_service import create_monthly_zip, sync_finance_emails
|
||||
|
||||
|
||||
router = APIRouter(prefix="/finance", tags=["finance"])
|
||||
|
||||
|
||||
@router.post("/sync", response_model=FinanceSyncResponse)
|
||||
async def sync_finance():
|
||||
try:
|
||||
items_raw = await sync_finance_emails()
|
||||
except RuntimeError as exc:
|
||||
raise HTTPException(status_code=500, detail=str(exc)) from exc
|
||||
|
||||
items = [FinanceSyncResult(**item) for item in items_raw]
|
||||
return FinanceSyncResponse(items=items)
|
||||
|
||||
|
||||
@router.get("/download/{month}")
|
||||
async def download_finance_month(month: str):
|
||||
"""
|
||||
Download a zipped archive for a given month (YYYY-MM).
|
||||
"""
|
||||
try:
|
||||
zip_path = await create_monthly_zip(month)
|
||||
except FileNotFoundError as exc:
|
||||
raise HTTPException(status_code=404, detail=str(exc)) from exc
|
||||
|
||||
return FileResponse(
|
||||
path=zip_path,
|
||||
media_type="application/zip",
|
||||
filename=f"finance_{month}.zip",
|
||||
)
|
||||
|
||||
256
backend/app/routers/projects.py
Normal file
256
backend/app/routers/projects.py
Normal file
@@ -0,0 +1,256 @@
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, status
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from backend.app import models
|
||||
from backend.app.db import get_db
|
||||
from backend.app.schemas import (
|
||||
ContractGenerateRequest,
|
||||
ContractGenerateResponse,
|
||||
ProjectRead,
|
||||
ProjectUpdate,
|
||||
QuoteGenerateResponse,
|
||||
RequirementAnalyzeRequest,
|
||||
RequirementAnalyzeResponse,
|
||||
)
|
||||
from backend.app.services.ai_service import analyze_requirement
|
||||
from backend.app.services.doc_service import (
|
||||
generate_contract_word,
|
||||
generate_quote_excel,
|
||||
generate_quote_pdf_from_data,
|
||||
)
|
||||
|
||||
|
||||
router = APIRouter(prefix="/projects", tags=["projects"])
|
||||
|
||||
|
||||
def _build_markdown_from_analysis(data: Dict[str, Any]) -> str:
|
||||
"""
|
||||
Convert structured AI analysis JSON into a human-editable Markdown document.
|
||||
"""
|
||||
lines: list[str] = []
|
||||
lines.append("# 项目方案草稿")
|
||||
lines.append("")
|
||||
|
||||
total_hours = data.get("total_estimated_hours")
|
||||
total_amount = data.get("total_amount")
|
||||
if total_hours is not None or total_amount is not None:
|
||||
lines.append("## 概要")
|
||||
if total_hours is not None:
|
||||
lines.append(f"- 建议总工时:**{total_hours}**")
|
||||
if total_amount is not None:
|
||||
lines.append(f"- 建议总报价:**{total_amount}**")
|
||||
lines.append("")
|
||||
|
||||
modules = data.get("modules") or []
|
||||
if modules:
|
||||
lines.append("## 功能模块与技术实现")
|
||||
for idx, module in enumerate(modules, start=1):
|
||||
name = module.get("name", f"模块 {idx}")
|
||||
desc = module.get("description") or ""
|
||||
tech = module.get("technical_approach") or ""
|
||||
hours = module.get("estimated_hours")
|
||||
unit_price = module.get("unit_price")
|
||||
subtotal = module.get("subtotal")
|
||||
|
||||
lines.append(f"### {idx}. {name}")
|
||||
if desc:
|
||||
lines.append(desc)
|
||||
lines.append("")
|
||||
if tech:
|
||||
lines.append("**技术实现思路:**")
|
||||
lines.append(tech)
|
||||
lines.append("")
|
||||
if hours is not None or unit_price is not None or subtotal is not None:
|
||||
lines.append("**工时与报价:**")
|
||||
if hours is not None:
|
||||
lines.append(f"- 预估工时:{hours}")
|
||||
if unit_price is not None:
|
||||
lines.append(f"- 单价:{unit_price}")
|
||||
if subtotal is not None:
|
||||
lines.append(f"- 小计:{subtotal}")
|
||||
lines.append("")
|
||||
|
||||
notes = data.get("notes")
|
||||
if notes:
|
||||
lines.append("## 备注")
|
||||
lines.append(notes)
|
||||
lines.append("")
|
||||
|
||||
return "\n".join(lines)
|
||||
|
||||
|
||||
@router.get("/", response_model=list[ProjectRead])
|
||||
async def list_projects(db: Session = Depends(get_db)):
|
||||
projects = (
|
||||
db.query(models.Project)
|
||||
.order_by(models.Project.created_at.desc())
|
||||
.all()
|
||||
)
|
||||
return projects
|
||||
|
||||
|
||||
@router.get("/{project_id}", response_model=ProjectRead)
|
||||
async def get_project(project_id: int, db: Session = Depends(get_db)):
|
||||
project = db.query(models.Project).get(project_id)
|
||||
if not project:
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Project not found")
|
||||
return project
|
||||
|
||||
|
||||
@router.patch("/{project_id}", response_model=ProjectRead)
|
||||
async def update_project(
|
||||
project_id: int,
|
||||
payload: ProjectUpdate,
|
||||
db: Session = Depends(get_db),
|
||||
):
|
||||
project = db.query(models.Project).get(project_id)
|
||||
if not project:
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Project not found")
|
||||
if payload.ai_solution_md is not None:
|
||||
project.ai_solution_md = payload.ai_solution_md
|
||||
if payload.status is not None:
|
||||
project.status = payload.status
|
||||
db.commit()
|
||||
db.refresh(project)
|
||||
return project
|
||||
|
||||
|
||||
@router.post("/analyze", response_model=RequirementAnalyzeResponse)
|
||||
async def analyze_project_requirement(
|
||||
payload: RequirementAnalyzeRequest,
|
||||
db: Session = Depends(get_db),
|
||||
):
|
||||
# Ensure customer exists
|
||||
customer = db.query(models.Customer).get(payload.customer_id)
|
||||
if not customer:
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Customer not found")
|
||||
|
||||
analysis = await analyze_requirement(payload.raw_text)
|
||||
ai_solution_md = _build_markdown_from_analysis(analysis)
|
||||
|
||||
project = models.Project(
|
||||
customer_id=payload.customer_id,
|
||||
raw_requirement=payload.raw_text,
|
||||
ai_solution_md=ai_solution_md,
|
||||
status="draft",
|
||||
)
|
||||
db.add(project)
|
||||
db.commit()
|
||||
db.refresh(project)
|
||||
|
||||
return RequirementAnalyzeResponse(
|
||||
project_id=project.id,
|
||||
ai_solution_md=ai_solution_md,
|
||||
ai_solution_json=analysis,
|
||||
)
|
||||
|
||||
|
||||
@router.post("/{project_id}/generate_quote", response_model=QuoteGenerateResponse)
|
||||
async def generate_project_quote(
|
||||
project_id: int,
|
||||
db: Session = Depends(get_db),
|
||||
):
|
||||
project = db.query(models.Project).get(project_id)
|
||||
if not project:
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Project not found")
|
||||
|
||||
# Re-analyze to get fresh structured data (the UI will allow user to edit Markdown separately).
|
||||
analysis = await analyze_requirement(project.raw_requirement)
|
||||
|
||||
# Paths
|
||||
base_dir = Path("data/quotes")
|
||||
base_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
excel_path = base_dir / f"quote_project_{project.id}.xlsx"
|
||||
pdf_path = base_dir / f"quote_project_{project.id}.pdf"
|
||||
|
||||
template_path = Path("templates/quote_template.xlsx")
|
||||
if not template_path.exists():
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail="Quote template file not found on server.",
|
||||
)
|
||||
|
||||
await generate_quote_excel(analysis, str(template_path), str(excel_path))
|
||||
await generate_quote_pdf_from_data(analysis, str(pdf_path))
|
||||
|
||||
total_amount = float(analysis.get("total_amount") or 0.0)
|
||||
|
||||
quote = models.Quote(
|
||||
project_id=project.id,
|
||||
total_amount=total_amount,
|
||||
file_path=str(pdf_path),
|
||||
)
|
||||
db.add(quote)
|
||||
db.commit()
|
||||
db.refresh(quote)
|
||||
|
||||
return QuoteGenerateResponse(
|
||||
quote_id=quote.id,
|
||||
project_id=project.id,
|
||||
total_amount=total_amount,
|
||||
excel_path=str(excel_path),
|
||||
pdf_path=str(pdf_path),
|
||||
)
|
||||
|
||||
|
||||
@router.post(
|
||||
"/{project_id}/generate_contract",
|
||||
response_model=ContractGenerateResponse,
|
||||
)
|
||||
async def generate_project_contract(
|
||||
project_id: int,
|
||||
payload: ContractGenerateRequest,
|
||||
db: Session = Depends(get_db),
|
||||
):
|
||||
project = db.query(models.Project).get(project_id)
|
||||
if not project:
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Project not found")
|
||||
|
||||
customer = project.customer
|
||||
if not customer:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST, detail="Project has no associated customer"
|
||||
)
|
||||
|
||||
# Use the latest quote for this project to determine total price.
|
||||
latest_quote = (
|
||||
db.query(models.Quote)
|
||||
.filter(models.Quote.project_id == project.id)
|
||||
.order_by(models.Quote.created_at.desc())
|
||||
.first()
|
||||
)
|
||||
if not latest_quote:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail="No quote found for this project.",
|
||||
)
|
||||
|
||||
contracts_dir = Path("data/contracts")
|
||||
contracts_dir.mkdir(parents=True, exist_ok=True)
|
||||
output_path = contracts_dir / f"contract_project_{project.id}.docx"
|
||||
|
||||
template_path = Path("templates/contract_template.docx")
|
||||
if not template_path.exists():
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail="Contract template file not found on server.",
|
||||
)
|
||||
|
||||
# Build placeholder mapping
|
||||
mapping: Dict[str, str] = {
|
||||
"{{CUSTOMER_NAME}}": customer.name,
|
||||
"{{TOTAL_PRICE}}": str(latest_quote.total_amount),
|
||||
"{{DELIVERY_DATE}}": payload.delivery_date,
|
||||
}
|
||||
# Allow arbitrary additional placeholders
|
||||
for key, value in payload.extra_placeholders.items():
|
||||
mapping[key] = value
|
||||
|
||||
await generate_contract_word(mapping, str(template_path), str(output_path))
|
||||
|
||||
return ContractGenerateResponse(project_id=project.id, contract_path=str(output_path))
|
||||
|
||||
88
backend/app/schemas.py
Normal file
88
backend/app/schemas.py
Normal file
@@ -0,0 +1,88 @@
|
||||
from datetime import datetime
|
||||
from typing import Any, Dict, List, Optional
|
||||
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
|
||||
class CustomerBase(BaseModel):
|
||||
name: str = Field(..., description="Customer name")
|
||||
contact_info: Optional[str] = Field(None, description="Contact information")
|
||||
|
||||
|
||||
class CustomerCreate(CustomerBase):
|
||||
pass
|
||||
|
||||
|
||||
class CustomerUpdate(BaseModel):
|
||||
name: Optional[str] = None
|
||||
contact_info: Optional[str] = None
|
||||
|
||||
|
||||
class CustomerRead(CustomerBase):
|
||||
id: int
|
||||
created_at: datetime
|
||||
|
||||
class Config:
|
||||
from_attributes = True
|
||||
|
||||
|
||||
class ProjectRead(BaseModel):
|
||||
id: int
|
||||
customer_id: int
|
||||
raw_requirement: str
|
||||
ai_solution_md: Optional[str] = None
|
||||
status: str
|
||||
created_at: datetime
|
||||
|
||||
class Config:
|
||||
from_attributes = True
|
||||
|
||||
|
||||
class ProjectUpdate(BaseModel):
|
||||
ai_solution_md: Optional[str] = None
|
||||
status: Optional[str] = None
|
||||
|
||||
|
||||
class RequirementAnalyzeRequest(BaseModel):
|
||||
customer_id: int = Field(..., description="Related customer id")
|
||||
raw_text: str = Field(..., description="Raw requirement text from chat or WeChat")
|
||||
|
||||
|
||||
class RequirementAnalyzeResponse(BaseModel):
|
||||
project_id: int
|
||||
ai_solution_md: str
|
||||
ai_solution_json: Dict[str, Any]
|
||||
|
||||
|
||||
class QuoteGenerateResponse(BaseModel):
|
||||
quote_id: int
|
||||
project_id: int
|
||||
total_amount: float
|
||||
excel_path: str
|
||||
pdf_path: str
|
||||
|
||||
|
||||
class ContractGenerateRequest(BaseModel):
|
||||
delivery_date: str = Field(..., description="Delivery date, e.g. 2026-03-31")
|
||||
extra_placeholders: Dict[str, str] = Field(
|
||||
default_factory=dict,
|
||||
description="Additional placeholder mappings for the contract template",
|
||||
)
|
||||
|
||||
|
||||
class ContractGenerateResponse(BaseModel):
|
||||
project_id: int
|
||||
contract_path: str
|
||||
|
||||
|
||||
class FinanceSyncResult(BaseModel):
|
||||
id: int
|
||||
month: str
|
||||
type: str
|
||||
file_name: str
|
||||
file_path: str
|
||||
|
||||
|
||||
class FinanceSyncResponse(BaseModel):
|
||||
items: List[FinanceSyncResult]
|
||||
|
||||
1
backend/app/services/__init__.py
Normal file
1
backend/app/services/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
__all__ = []
|
||||
108
backend/app/services/ai_service.py
Normal file
108
backend/app/services/ai_service.py
Normal file
@@ -0,0 +1,108 @@
|
||||
import json
|
||||
import os
|
||||
from typing import Any, Dict
|
||||
|
||||
from openai import AsyncOpenAI
|
||||
|
||||
|
||||
_client: AsyncOpenAI | None = None
|
||||
|
||||
|
||||
def get_ai_client() -> AsyncOpenAI:
|
||||
"""
|
||||
Create (or reuse) a singleton AsyncOpenAI client.
|
||||
|
||||
The client is configured via:
|
||||
- AI_API_KEY / OPENAI_API_KEY
|
||||
- AI_BASE_URL (optional, defaults to official OpenAI endpoint)
|
||||
- AI_MODEL (optional, defaults to gpt-4.1-mini or a similar capable model)
|
||||
"""
|
||||
global _client
|
||||
if _client is not None:
|
||||
return _client
|
||||
|
||||
api_key = os.getenv("AI_API_KEY") or os.getenv("OPENAI_API_KEY")
|
||||
if not api_key:
|
||||
raise RuntimeError("AI_API_KEY or OPENAI_API_KEY must be set in environment.")
|
||||
|
||||
base_url = os.getenv("AI_BASE_URL") # can point to OpenAI, DeepSeek, Qwen, etc.
|
||||
|
||||
_client = AsyncOpenAI(
|
||||
api_key=api_key,
|
||||
base_url=base_url or None,
|
||||
)
|
||||
return _client
|
||||
|
||||
|
||||
def _build_requirement_prompt(raw_text: str) -> str:
|
||||
"""
|
||||
Build a clear system/user prompt for requirement analysis.
|
||||
The model must output valid JSON only.
|
||||
"""
|
||||
return (
|
||||
"你是一名资深的系统架构师,请阅读以下来自客户的原始需求文本,"
|
||||
"提炼出清晰的交付方案,并严格按照指定 JSON 结构输出。\n\n"
|
||||
"【要求】\n"
|
||||
"1. 按功能模块拆分需求。\n"
|
||||
"2. 每个模块给出简要说明和技术实现思路。\n"
|
||||
"3. 估算建议工时(以人天或人小时为单位,使用数字)。\n"
|
||||
"4. 可以根据你的经验给出每个模块的单价与小计金额,并给出总金额,"
|
||||
"方便后续生成报价单。\n\n"
|
||||
"【返回格式】请只返回 JSON,不要包含任何额外说明文字:\n"
|
||||
"{\n"
|
||||
' "modules": [\n'
|
||||
" {\n"
|
||||
' "name": "模块名称",\n'
|
||||
' "description": "模块说明(可以为 Markdown 格式)",\n'
|
||||
' "technical_approach": "技术实现思路(Markdown 格式)",\n'
|
||||
' "estimated_hours": 16,\n'
|
||||
' "unit_price": 800,\n'
|
||||
' "subtotal": 12800\n'
|
||||
" }\n"
|
||||
" ],\n"
|
||||
' "total_estimated_hours": 40,\n'
|
||||
' "total_amount": 32000,\n'
|
||||
' "notes": "整体方案备注(可选,Markdown 格式)"\n'
|
||||
"}\n\n"
|
||||
f"【客户原始需求】\n{raw_text}"
|
||||
)
|
||||
|
||||
|
||||
async def analyze_requirement(raw_text: str) -> Dict[str, Any]:
|
||||
"""
|
||||
Call the AI model to analyze customer requirements.
|
||||
|
||||
Returns a Python dict matching the JSON structure described
|
||||
in `_build_requirement_prompt`.
|
||||
"""
|
||||
client = get_ai_client()
|
||||
model = os.getenv("AI_MODEL", "gpt-4.1-mini")
|
||||
|
||||
prompt = _build_requirement_prompt(raw_text)
|
||||
|
||||
completion = await client.chat.completions.create(
|
||||
model=model,
|
||||
response_format={"type": "json_object"},
|
||||
messages=[
|
||||
{
|
||||
"role": "system",
|
||||
"content": (
|
||||
"你是一名严谨的系统架构师,只能输出有效的 JSON,不要输出任何解释文字。"
|
||||
),
|
||||
},
|
||||
{
|
||||
"role": "user",
|
||||
"content": prompt,
|
||||
},
|
||||
],
|
||||
temperature=0.2,
|
||||
)
|
||||
|
||||
content = completion.choices[0].message.content or "{}"
|
||||
try:
|
||||
data: Dict[str, Any] = json.loads(content)
|
||||
except json.JSONDecodeError as exc:
|
||||
raise RuntimeError(f"AI 返回的内容不是合法 JSON:{content}") from exc
|
||||
|
||||
return data
|
||||
|
||||
189
backend/app/services/doc_service.py
Normal file
189
backend/app/services/doc_service.py
Normal file
@@ -0,0 +1,189 @@
|
||||
import asyncio
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict, List
|
||||
|
||||
from docx import Document
|
||||
from openpyxl import load_workbook
|
||||
from reportlab.lib.pagesizes import A4
|
||||
from reportlab.pdfgen import canvas
|
||||
|
||||
|
||||
async def generate_quote_excel(
|
||||
project_data: Dict[str, Any],
|
||||
template_path: str,
|
||||
output_path: str,
|
||||
) -> str:
|
||||
"""
|
||||
Generate an Excel quote based on a template and structured project data.
|
||||
|
||||
project_data is expected to have the following structure (from AI JSON):
|
||||
{
|
||||
"modules": [
|
||||
{
|
||||
"name": "...",
|
||||
"description": "...",
|
||||
"technical_approach": "...",
|
||||
"estimated_hours": 16,
|
||||
"unit_price": 800,
|
||||
"subtotal": 12800
|
||||
},
|
||||
...
|
||||
],
|
||||
"total_estimated_hours": 40,
|
||||
"total_amount": 32000,
|
||||
"notes": "..."
|
||||
}
|
||||
"""
|
||||
|
||||
async def _work() -> str:
|
||||
template = Path(template_path)
|
||||
output = Path(output_path)
|
||||
output.parent.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
wb = load_workbook(template)
|
||||
# Assume the first worksheet is used for the quote.
|
||||
ws = wb.active
|
||||
|
||||
modules: List[Dict[str, Any]] = project_data.get("modules", [])
|
||||
total_amount = project_data.get("total_amount")
|
||||
total_hours = project_data.get("total_estimated_hours")
|
||||
notes = project_data.get("notes")
|
||||
|
||||
# Example layout assumptions (adjust cell coordinates to match your template):
|
||||
# - Starting row for line items: 10
|
||||
# - Columns:
|
||||
# A: index, B: module name, C: description,
|
||||
# D: estimated hours, E: unit price, F: subtotal
|
||||
start_row = 10
|
||||
for idx, module in enumerate(modules, start=1):
|
||||
row = start_row + idx - 1
|
||||
ws[f"A{row}"] = idx
|
||||
ws[f"B{row}"] = module.get("name")
|
||||
ws[f"C{row}"] = module.get("description")
|
||||
ws[f"D{row}"] = module.get("estimated_hours")
|
||||
ws[f"E{row}"] = module.get("unit_price")
|
||||
ws[f"F{row}"] = module.get("subtotal")
|
||||
|
||||
# Place total hours and amount in typical footer cells (adjust as needed).
|
||||
if total_hours is not None:
|
||||
ws["D5"] = total_hours # e.g., total hours
|
||||
if total_amount is not None:
|
||||
ws["F5"] = total_amount # e.g., total amount
|
||||
if notes:
|
||||
ws["B6"] = notes
|
||||
|
||||
wb.save(output)
|
||||
return str(output)
|
||||
|
||||
return await asyncio.to_thread(_work)
|
||||
|
||||
|
||||
def _replace_in_paragraphs(paragraphs, mapping: Dict[str, str]) -> None:
|
||||
for paragraph in paragraphs:
|
||||
for placeholder, value in mapping.items():
|
||||
if placeholder in paragraph.text:
|
||||
# Rebuild runs to preserve basic formatting as much as possible.
|
||||
inline = paragraph.runs
|
||||
text = paragraph.text.replace(placeholder, value)
|
||||
# Clear existing runs
|
||||
for i in range(len(inline) - 1, -1, -1):
|
||||
paragraph.runs[i].clear()
|
||||
paragraph.runs[i].text = ""
|
||||
# Add a single run with replaced text
|
||||
paragraph.add_run(text)
|
||||
|
||||
|
||||
def _replace_in_tables(tables, mapping: Dict[str, str]) -> None:
|
||||
for table in tables:
|
||||
for row in table.rows:
|
||||
for cell in row.cells:
|
||||
_replace_in_paragraphs(cell.paragraphs, mapping)
|
||||
|
||||
|
||||
async def generate_contract_word(
|
||||
contract_data: Dict[str, str],
|
||||
template_path: str,
|
||||
output_path: str,
|
||||
) -> str:
|
||||
"""
|
||||
Generate a contract Word document by replacing placeholders.
|
||||
|
||||
contract_data is a flat dict like:
|
||||
{
|
||||
"{{CUSTOMER_NAME}}": "张三",
|
||||
"{{TOTAL_PRICE}}": "¥32,000",
|
||||
"{{DELIVERY_DATE}}": "2026-03-31",
|
||||
...
|
||||
}
|
||||
"""
|
||||
|
||||
async def _work() -> str:
|
||||
template = Path(template_path)
|
||||
output = Path(output_path)
|
||||
output.parent.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
doc = Document(str(template))
|
||||
|
||||
_replace_in_paragraphs(doc.paragraphs, contract_data)
|
||||
_replace_in_tables(doc.tables, contract_data)
|
||||
|
||||
doc.save(str(output))
|
||||
return str(output)
|
||||
|
||||
return await asyncio.to_thread(_work)
|
||||
|
||||
|
||||
async def generate_quote_pdf_from_data(
|
||||
project_data: Dict[str, Any],
|
||||
output_pdf_path: str,
|
||||
) -> str:
|
||||
"""
|
||||
Generate a simple PDF quote summary directly from structured data.
|
||||
This does not render the Excel visually, but provides a clean PDF
|
||||
that can be sent to customers.
|
||||
"""
|
||||
|
||||
async def _work() -> str:
|
||||
output = Path(output_pdf_path)
|
||||
output.parent.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
c = canvas.Canvas(str(output), pagesize=A4)
|
||||
width, height = A4
|
||||
|
||||
y = height - 40
|
||||
c.setFont("Helvetica-Bold", 14)
|
||||
c.drawString(40, y, "报价单 Quote")
|
||||
y -= 30
|
||||
|
||||
c.setFont("Helvetica", 10)
|
||||
|
||||
modules: List[Dict[str, Any]] = project_data.get("modules", [])
|
||||
for idx, module in enumerate(modules, start=1):
|
||||
name = module.get("name", "")
|
||||
hours = module.get("estimated_hours", "")
|
||||
subtotal = module.get("subtotal", "")
|
||||
line = f"{idx}. {name} - 工时: {hours}, 小计: {subtotal}"
|
||||
c.drawString(40, y, line)
|
||||
y -= 16
|
||||
if y < 80:
|
||||
c.showPage()
|
||||
y = height - 40
|
||||
c.setFont("Helvetica", 10)
|
||||
|
||||
total_amount = project_data.get("total_amount")
|
||||
total_hours = project_data.get("total_estimated_hours")
|
||||
|
||||
y -= 10
|
||||
c.setFont("Helvetica-Bold", 11)
|
||||
if total_hours is not None:
|
||||
c.drawString(40, y, f"总工时 Total Hours: {total_hours}")
|
||||
y -= 18
|
||||
if total_amount is not None:
|
||||
c.drawString(40, y, f"总金额 Total Amount: {total_amount}")
|
||||
|
||||
c.showPage()
|
||||
c.save()
|
||||
return str(output)
|
||||
|
||||
return await asyncio.to_thread(_work)
|
||||
|
||||
215
backend/app/services/email_service.py
Normal file
215
backend/app/services/email_service.py
Normal file
@@ -0,0 +1,215 @@
|
||||
import asyncio
|
||||
import email
|
||||
import imaplib
|
||||
import os
|
||||
from datetime import datetime
|
||||
from email.header import decode_header
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict, List, Tuple
|
||||
|
||||
from backend.app.db import SessionLocal
|
||||
from backend.app.models import FinanceRecord
|
||||
|
||||
|
||||
FINANCE_BASE_DIR = Path("data/finance")
|
||||
|
||||
|
||||
def _decode_header_value(value: str | None) -> str:
|
||||
if not value:
|
||||
return ""
|
||||
parts = decode_header(value)
|
||||
decoded = ""
|
||||
for text, enc in parts:
|
||||
if isinstance(text, bytes):
|
||||
decoded += text.decode(enc or "utf-8", errors="ignore")
|
||||
else:
|
||||
decoded += text
|
||||
return decoded
|
||||
|
||||
|
||||
def _classify_type(subject: str) -> str:
|
||||
"""
|
||||
Classify finance document type based on subject keywords.
|
||||
"""
|
||||
subject_lower = subject.lower()
|
||||
if any(k in subject for k in ["发票", "invoice"]):
|
||||
return "invoices"
|
||||
if any(k in subject for k in ["流水", "bank", "对账单", "statement"]):
|
||||
return "bank_records"
|
||||
if any(k in subject for k in ["回执", "receipt"]):
|
||||
return "receipts"
|
||||
return "others"
|
||||
|
||||
|
||||
def _ensure_month_dir(month_str: str, doc_type: str) -> Path:
|
||||
month_dir = FINANCE_BASE_DIR / month_str / doc_type
|
||||
month_dir.mkdir(parents=True, exist_ok=True)
|
||||
return month_dir
|
||||
|
||||
|
||||
def _parse_email_date(msg: email.message.Message) -> datetime:
|
||||
date_tuple = email.utils.parsedate_tz(msg.get("Date"))
|
||||
if date_tuple:
|
||||
dt = datetime.fromtimestamp(email.utils.mktime_tz(date_tuple))
|
||||
else:
|
||||
dt = datetime.utcnow()
|
||||
return dt
|
||||
|
||||
|
||||
def _save_attachment(
|
||||
msg: email.message.Message,
|
||||
month_str: str,
|
||||
doc_type: str,
|
||||
) -> List[Tuple[str, str]]:
|
||||
"""
|
||||
Save PDF/image attachments and return list of (file_name, file_path).
|
||||
"""
|
||||
saved: List[Tuple[str, str]] = []
|
||||
base_dir = _ensure_month_dir(month_str, doc_type)
|
||||
|
||||
for part in msg.walk():
|
||||
content_disposition = part.get("Content-Disposition", "")
|
||||
if "attachment" not in content_disposition:
|
||||
continue
|
||||
|
||||
filename = part.get_filename()
|
||||
filename = _decode_header_value(filename)
|
||||
if not filename:
|
||||
continue
|
||||
|
||||
content_type = part.get_content_type()
|
||||
maintype = part.get_content_maintype()
|
||||
|
||||
# Accept pdf and common images
|
||||
if maintype not in ("application", "image"):
|
||||
continue
|
||||
|
||||
data = part.get_payload(decode=True)
|
||||
if not data:
|
||||
continue
|
||||
|
||||
file_path = base_dir / filename
|
||||
# Ensure unique filename
|
||||
counter = 1
|
||||
while file_path.exists():
|
||||
stem = file_path.stem
|
||||
suffix = file_path.suffix
|
||||
file_path = base_dir / f"{stem}_{counter}{suffix}"
|
||||
counter += 1
|
||||
|
||||
with open(file_path, "wb") as f:
|
||||
f.write(data)
|
||||
|
||||
saved.append((filename, str(file_path)))
|
||||
|
||||
return saved
|
||||
|
||||
|
||||
async def sync_finance_emails() -> List[Dict[str, Any]]:
|
||||
"""
|
||||
Connect to IMAP, fetch unread finance-related emails, download attachments,
|
||||
save to filesystem and record FinanceRecord entries.
|
||||
"""
|
||||
|
||||
def _sync() -> List[Dict[str, Any]]:
|
||||
host = os.getenv("IMAP_HOST")
|
||||
user = os.getenv("IMAP_USER")
|
||||
password = os.getenv("IMAP_PASSWORD")
|
||||
port = int(os.getenv("IMAP_PORT", "993"))
|
||||
mailbox = os.getenv("IMAP_MAILBOX", "INBOX")
|
||||
|
||||
if not all([host, user, password]):
|
||||
raise RuntimeError("IMAP_HOST, IMAP_USER, IMAP_PASSWORD must be set.")
|
||||
|
||||
results: List[Dict[str, Any]] = []
|
||||
|
||||
with imaplib.IMAP4_SSL(host, port) as imap:
|
||||
imap.login(user, password)
|
||||
imap.select(mailbox)
|
||||
|
||||
# Search for UNSEEN emails with finance related keywords in subject.
|
||||
# Note: IMAP SEARCH is limited; here we search UNSEEN first then filter in Python.
|
||||
status, data = imap.search(None, "UNSEEN")
|
||||
if status != "OK":
|
||||
return results
|
||||
|
||||
id_list = data[0].split()
|
||||
db = SessionLocal()
|
||||
try:
|
||||
for msg_id in id_list:
|
||||
status, msg_data = imap.fetch(msg_id, "(RFC822)")
|
||||
if status != "OK":
|
||||
continue
|
||||
|
||||
raw_email = msg_data[0][1]
|
||||
msg = email.message_from_bytes(raw_email)
|
||||
|
||||
subject = _decode_header_value(msg.get("Subject"))
|
||||
doc_type = _classify_type(subject)
|
||||
|
||||
# Filter by keywords first
|
||||
if doc_type == "others":
|
||||
continue
|
||||
|
||||
dt = _parse_email_date(msg)
|
||||
month_str = dt.strftime("%Y-%m")
|
||||
|
||||
saved_files = _save_attachment(msg, month_str, doc_type)
|
||||
for file_name, file_path in saved_files:
|
||||
record = FinanceRecord(
|
||||
month=month_str,
|
||||
type=doc_type,
|
||||
file_name=file_name,
|
||||
file_path=file_path,
|
||||
)
|
||||
# NOTE: created_at defaults at DB layer
|
||||
db.add(record)
|
||||
db.flush()
|
||||
|
||||
results.append(
|
||||
{
|
||||
"id": record.id,
|
||||
"month": record.month,
|
||||
"type": record.type,
|
||||
"file_name": record.file_name,
|
||||
"file_path": record.file_path,
|
||||
}
|
||||
)
|
||||
|
||||
# Mark email as seen and flagged to avoid re-processing
|
||||
imap.store(msg_id, "+FLAGS", "\\Seen \\Flagged")
|
||||
|
||||
db.commit()
|
||||
finally:
|
||||
db.close()
|
||||
|
||||
return results
|
||||
|
||||
return await asyncio.to_thread(_sync)
|
||||
|
||||
|
||||
async def create_monthly_zip(month_str: str) -> str:
|
||||
"""
|
||||
Zip the finance folder for a given month (YYYY-MM) and return the zip path.
|
||||
"""
|
||||
import zipfile
|
||||
|
||||
def _zip() -> str:
|
||||
month_dir = FINANCE_BASE_DIR / month_str
|
||||
if not month_dir.exists():
|
||||
raise FileNotFoundError(f"Finance directory for {month_str} not found.")
|
||||
|
||||
FINANCE_BASE_DIR.mkdir(parents=True, exist_ok=True)
|
||||
zip_path = FINANCE_BASE_DIR / f"{month_str}.zip"
|
||||
|
||||
with zipfile.ZipFile(zip_path, "w", compression=zipfile.ZIP_DEFLATED) as zf:
|
||||
for root, _, files in os.walk(month_dir):
|
||||
for file in files:
|
||||
full_path = Path(root) / file
|
||||
rel_path = full_path.relative_to(FINANCE_BASE_DIR)
|
||||
zf.write(full_path, arcname=rel_path)
|
||||
|
||||
return str(zip_path)
|
||||
|
||||
return await asyncio.to_thread(_zip)
|
||||
|
||||
Reference in New Issue
Block a user