45 lines
1.3 KiB
Python
45 lines
1.3 KiB
Python
from datetime import datetime
|
|
from typing import Literal, Optional
|
|
|
|
from pydantic import BaseModel, Field
|
|
|
|
|
|
class GenerateRequest(BaseModel):
|
|
prompt: str = Field(..., min_length=1, max_length=1000)
|
|
negative_prompt: str = Field(default="", max_length=1000)
|
|
quality_mode: Literal["preview", "refine"]
|
|
duration_sec: int = Field(default=5, ge=1, le=5)
|
|
width: int = Field(default=832, ge=64, le=832)
|
|
height: int = Field(default=480, ge=64, le=480)
|
|
fps: int = Field(default=16, ge=1, le=24)
|
|
steps: int = Field(default=8, ge=1, le=100)
|
|
seed: Optional[int] = Field(default=None, ge=0, le=2**31 - 1)
|
|
|
|
|
|
class TaskStatusResponse(BaseModel):
|
|
task_id: str
|
|
status: Literal["PENDING", "RUNNING", "SUCCEEDED", "FAILED"]
|
|
backend: Optional[str] = None
|
|
model_name: Optional[str] = None
|
|
progress: float = 0.0
|
|
created_at: datetime
|
|
updated_at: datetime
|
|
|
|
|
|
class TaskResultResponse(BaseModel):
|
|
task_id: str
|
|
status: Literal["PENDING", "RUNNING", "SUCCEEDED", "FAILED"]
|
|
video_path: Optional[str] = None
|
|
first_frame_path: Optional[str] = None
|
|
metadata_path: Optional[str] = None
|
|
log_path: Optional[str] = None
|
|
error: Optional[str] = None
|
|
|
|
|
|
class HealthResponse(BaseModel):
|
|
service_status: str
|
|
cuda_available: bool
|
|
gpu_name: Optional[str]
|
|
ltx_loaded: bool
|
|
hunyuan_loaded: bool
|