79 lines
2.5 KiB
Python
79 lines
2.5 KiB
Python
from __future__ import annotations
|
|
|
|
from dataclasses import dataclass
|
|
from pathlib import Path
|
|
|
|
from moviepy import AudioFileClip, CompositeVideoClip, TextClip, VideoFileClip, concatenate_videoclips, vfx
|
|
|
|
from .config import AppConfig
|
|
|
|
|
|
@dataclass(frozen=True)
|
|
class Segment:
|
|
video_path: Path
|
|
audio_path: Path
|
|
narration: str
|
|
|
|
|
|
def _fit_video_to_audio(video: VideoFileClip, audio: AudioFileClip) -> VideoFileClip:
|
|
if audio.duration is None or video.duration is None:
|
|
return video.with_audio(audio)
|
|
if audio.duration > video.duration:
|
|
video = video.with_effects([vfx.Loop(duration=audio.duration)])
|
|
elif video.duration > audio.duration:
|
|
video = video.subclipped(0, audio.duration)
|
|
return video.with_audio(audio)
|
|
|
|
|
|
def _subtitle_clip(text: str, size: tuple[int, int], duration: float) -> TextClip:
|
|
# MoviePy 2 uses Pillow for text rendering by default on most setups.
|
|
return (
|
|
TextClip(
|
|
text=text,
|
|
font_size=44,
|
|
color="white",
|
|
stroke_color="black",
|
|
stroke_width=2,
|
|
size=(int(size[0] * 0.92), None),
|
|
method="caption",
|
|
)
|
|
.with_position(("center", "bottom"))
|
|
.with_duration(duration)
|
|
.with_opacity(0.95)
|
|
)
|
|
|
|
|
|
def render_final(segments: list[Segment], cfg: AppConfig, output_path: str | Path | None = None) -> Path:
|
|
transition_s = float(cfg.get("video.transition_seconds", 0.25))
|
|
out = Path(output_path or str(cfg.get("video.final_output", "./final_poc.mp4")))
|
|
out.parent.mkdir(parents=True, exist_ok=True)
|
|
|
|
clips = []
|
|
for seg in segments:
|
|
v = VideoFileClip(str(seg.video_path))
|
|
a = AudioFileClip(str(seg.audio_path))
|
|
v2 = _fit_video_to_audio(v, a)
|
|
|
|
w, h = v2.size
|
|
sub = _subtitle_clip(seg.narration, (w, h), v2.duration or a.duration or 0)
|
|
comp = CompositeVideoClip([v2, sub])
|
|
if transition_s > 0:
|
|
comp = comp.with_effects([vfx.FadeIn(transition_s), vfx.FadeOut(transition_s)])
|
|
clips.append(comp)
|
|
|
|
final = concatenate_videoclips(clips, method="compose")
|
|
try:
|
|
final.write_videofile(
|
|
str(out),
|
|
codec="libx264",
|
|
audio_codec="aac",
|
|
fps=clips[0].fps if clips and clips[0].fps else int(cfg.get("video.mock_fps", 24)),
|
|
threads=4,
|
|
preset="medium",
|
|
)
|
|
finally:
|
|
final.close()
|
|
for c in clips:
|
|
c.close()
|
|
return out
|