Files
AI_A4000/video_worker/app/backends/hunyuan_backend.py
2026-04-07 00:37:39 +08:00

60 lines
2.2 KiB
Python

from pathlib import Path
from typing import Any, Dict
from app.backends.base import BaseVideoBackend
from app.utils.ffmpeg_utils import extract_first_frame, frames_to_video
from app.utils.files import TASK_FIRST_FRAME_NAME, TASK_VIDEO_NAME
from app.utils.image_utils import make_dummy_frame
class HunyuanBackend(BaseVideoBackend):
backend_name = "hunyuan_backend"
model_name = "HunyuanVideo-1.5"
def __init__(self, model_dir: Path, enable_cpu_offload: bool = True, enable_vae_tiling: bool = True):
self.model_dir = model_dir
self.enable_cpu_offload = enable_cpu_offload
self.enable_vae_tiling = enable_vae_tiling
self._loaded = False
self._pipeline = None
def load(self) -> None:
if self._loaded:
return
# TODO: Replace with real HunyuanVideo loading and memory optimization hooks.
# Example hooks: self._pipeline.enable_model_cpu_offload(), self._pipeline.vae.enable_tiling()
self.model_dir.mkdir(parents=True, exist_ok=True)
self._pipeline = "hunyuan_pipeline_placeholder"
self._loaded = True
def is_loaded(self) -> bool:
return self._loaded
def generate(self, task_id: str, request_data: Dict[str, Any], output_dir: str) -> Dict[str, str]:
self.load()
output = Path(output_dir)
frames_dir = output / "frames"
frames_dir.mkdir(parents=True, exist_ok=True)
duration = int(request_data["duration_sec"])
fps = int(request_data["fps"])
width = int(request_data["width"])
height = int(request_data["height"])
prompt = request_data["prompt"]
total_frames = duration * fps
for i in range(total_frames):
frame_path = frames_dir / f"frame_{i:04d}.jpg"
make_dummy_frame(frame_path, width, height, f"Hunyuan refine | {prompt[:60]}", i)
video_path = output / TASK_VIDEO_NAME
frames_to_video(str(frames_dir / "frame_%04d.jpg"), fps, video_path)
first_frame_path = output / TASK_FIRST_FRAME_NAME
extract_first_frame(video_path, first_frame_path)
return {
"video_path": str(video_path.resolve()),
"first_frame_path": str(first_frame_path.resolve()),
}