render_video/src/render/mod.rs

439 lines
11 KiB
Rust
Raw Normal View History

2023-10-28 21:38:17 +00:00
#![allow(warnings)]
pub mod ffmpeg;
2023-10-30 15:05:21 +00:00
mod filter;
2023-10-28 21:38:17 +00:00
2023-10-30 16:32:21 +00:00
use self::filter::Filter;
2023-10-28 21:38:17 +00:00
use crate::{
2023-10-30 16:32:21 +00:00
iotro::{intro, outro},
render::ffmpeg::{Ffmpeg, FfmpegInput},
2023-10-28 21:38:17 +00:00
time::{format_date, Time},
2023-11-02 20:33:21 +00:00
Project, ProjectSourceMetadata, Resolution, MEM_LIMIT
2023-10-28 21:38:17 +00:00
};
use anyhow::{bail, Context};
use camino::{Utf8Path as Path, Utf8PathBuf as PathBuf};
use rational::Rational;
use std::{
2023-10-30 20:26:17 +00:00
borrow::Cow,
collections::VecDeque,
2023-10-28 21:38:17 +00:00
fs::{self, File},
io::Write as _,
process::{Command, Stdio}
};
const INTRO_LEN: Time = Time {
seconds: 3,
micros: 0
};
const OUTRO_LEN: Time = Time {
seconds: 5,
micros: 0
};
const TRANSITION_LEN: Time = Time {
seconds: 0,
micros: 200_000
};
const FF_MULTIPLIER: usize = 8;
// logo sizes at full hd, will be scaled to source resolution
const FF_LOGO_SIZE: usize = 128;
const LOGO_SIZE: usize = 96;
2023-10-28 21:38:17 +00:00
fn cmd() -> Command {
2023-11-02 09:23:31 +00:00
// we use systemd-run to limit the process memory
// I tried others like ulimit, chpst or isolate, but none worked
let mut cmd = Command::new("systemd-run");
cmd.arg("--scope")
.arg("-q")
.arg("--expand-environment=no")
.arg("-p")
2023-11-02 20:33:21 +00:00
.arg(format!("MemoryMax={}", MEM_LIMIT.read().unwrap()))
2023-11-02 09:23:31 +00:00
.arg("--user");
// we use busybox ash for having a shell that outputs commands with -x
cmd.arg("busybox")
.arg("ash")
2023-10-28 21:38:17 +00:00
.arg("-exuo")
.arg("pipefail")
.arg("-c")
2023-10-30 19:28:17 +00:00
.arg("exec \"$0\" \"${@}\"");
2023-10-28 21:38:17 +00:00
cmd
}
fn ffprobe() -> Command {
let mut cmd = cmd();
cmd.arg("ffprobe")
.arg("-v")
.arg("error")
.arg("-of")
.arg("default=noprint_wrappers=1:nokey=1");
cmd
}
fn read_output(cmd: &mut Command) -> anyhow::Result<String> {
let out = cmd.stderr(Stdio::inherit()).output()?;
if !out.status.success() {
bail!(
"Executed command failed with exit status {:?}",
out.status.code()
);
}
2023-10-30 19:28:17 +00:00
String::from_utf8(out.stdout)
.context("Command returned non-utf8 output")
.map(|str| str.trim().into())
2023-10-28 21:38:17 +00:00
}
fn ffprobe_video(query: &str, input: &Path) -> anyhow::Result<String> {
read_output(
ffprobe()
.arg("-select_streams")
.arg("v:0")
.arg("-show_entries")
.arg(query)
.arg(input)
)
}
fn ffprobe_audio(query: &str, concat_input: &Path) -> anyhow::Result<String> {
read_output(
ffprobe()
.arg("-select_streams")
.arg("a:0")
.arg("-show_entries")
.arg(query)
.arg("-safe")
.arg("0")
.arg("-f")
.arg("concat")
.arg(concat_input)
)
}
pub(crate) struct Renderer<'a> {
/// The directory with all the sources.
directory: &'a Path,
/// The slug (i.e. 23ws-malo2-231016).
slug: String,
/// The target directory.
target: PathBuf
}
2023-10-30 16:32:21 +00:00
fn svg2mp4(svg: PathBuf, mp4: PathBuf, duration: Time) -> anyhow::Result<()> {
let mut ffmpeg = Ffmpeg::new(mp4);
ffmpeg.add_input(FfmpegInput {
loop_input: true,
..FfmpegInput::new(svg)
});
ffmpeg.add_filter(Filter::GenerateSilence {
video: "0".into(),
output: "out".into()
});
ffmpeg.set_filter_output("out");
2023-10-30 19:28:17 +00:00
ffmpeg.set_duration(duration);
2023-10-30 16:32:21 +00:00
ffmpeg.run()
}
2023-10-30 16:55:53 +00:00
fn svg2png(svg: &Path, png: &Path, size: usize) -> anyhow::Result<()> {
let mut cmd = cmd();
let size = size.to_string();
2023-10-30 19:28:17 +00:00
cmd.arg("inkscape")
.arg("-w")
.arg(&size)
.arg("-h")
.arg(&size);
2023-10-30 16:55:53 +00:00
cmd.arg(svg).arg("-o").arg(png);
2023-10-30 19:28:17 +00:00
2023-10-30 16:55:53 +00:00
let status = cmd.status()?;
2023-10-30 19:28:17 +00:00
if status.success() {
Ok(())
} else {
bail!("inkscape failed with exit code {:?}", status.code())
}
2023-10-30 16:55:53 +00:00
}
2023-10-28 21:38:17 +00:00
impl<'a> Renderer<'a> {
pub(crate) fn new(directory: &'a Path, project: &Project) -> anyhow::Result<Self> {
let slug = format!(
"{}-{}",
project.lecture.course,
format_date(project.lecture.date)
);
let target = directory.join(&slug);
2023-10-30 19:28:17 +00:00
fs::create_dir_all(&target)?;
2023-10-28 21:38:17 +00:00
Ok(Self {
directory,
slug,
target
})
}
2023-10-30 19:28:17 +00:00
pub(crate) fn recording_mp4(&self) -> PathBuf {
self.target.join("recording.mp4")
}
2023-10-28 21:38:17 +00:00
pub(crate) fn preprocess(&self, project: &mut Project) -> anyhow::Result<()> {
assert!(!project.progress.preprocessed);
let recording_txt = self.target.join("recording.txt");
2023-10-30 16:32:21 +00:00
let mut file = File::create(&recording_txt)?;
2023-10-28 21:38:17 +00:00
for filename in &project.source.files {
2023-10-30 19:28:17 +00:00
writeln!(file, "file '{}'", self.directory.join(filename).to_string());
2023-10-28 21:38:17 +00:00
}
drop(file);
2023-10-30 19:28:17 +00:00
println!("\x1B[1m ==> Concatenating Video and Normalising Audio ...\x1B[0m");
2023-10-30 16:32:21 +00:00
let source_sample_rate =
ffprobe_audio("stream=sample_rate", &recording_txt)?.parse()?;
2023-10-30 19:28:17 +00:00
let recording_mp4 = self.recording_mp4();
2023-10-30 16:32:21 +00:00
let mut ffmpeg = Ffmpeg::new(recording_mp4.clone());
ffmpeg.add_input(FfmpegInput {
concat: true,
..FfmpegInput::new(recording_txt)
});
ffmpeg.enable_loudnorm();
ffmpeg.run()?;
let width = ffprobe_video("stream=width", &recording_mp4)?.parse()?;
let height = ffprobe_video("stream=height", &recording_mp4)?.parse()?;
let source_res = match (width, height) {
(3840, 2160) => Resolution::UHD,
(2560, 1440) => Resolution::WQHD,
(1920, 1080) => Resolution::FullHD,
(1280, 720) => Resolution::HD,
(640, 360) => Resolution::nHD,
(width, height) => bail!("Unknown resolution: {width}x{height}")
};
project.source.metadata = Some(ProjectSourceMetadata {
source_duration: ffprobe_video("format=duration", &recording_mp4)?.parse()?,
source_fps: ffprobe_video("stream=r_frame_rate", &recording_mp4)?.parse()?,
source_tbn: ffprobe_video("stream=time_base", &recording_mp4)?.parse()?,
source_res,
source_sample_rate
});
2023-10-28 21:38:17 +00:00
2023-10-30 19:28:17 +00:00
println!("\x1B[1m ==> Preparing assets ...\x1B[0m");
2023-10-30 16:55:53 +00:00
// render intro to svg then mp4
2023-10-28 21:38:17 +00:00
let intro_svg = self.target.join("intro.svg");
2023-10-30 16:32:21 +00:00
fs::write(
&intro_svg,
intro(source_res, project.lecture.date)
.to_string_pretty()
.into_bytes()
)?;
2023-10-28 21:38:17 +00:00
let intro_mp4 = self.target.join("intro.mp4");
2023-10-30 20:38:16 +00:00
svg2mp4(intro_svg, intro_mp4, INTRO_LEN)?;
2023-10-30 16:32:21 +00:00
2023-10-30 16:55:53 +00:00
// render outro to svg then mp4
2023-10-30 16:32:21 +00:00
let outro_svg = self.target.join("outro.svg");
fs::write(
&outro_svg,
outro(source_res).to_string_pretty().into_bytes()
)?;
let outro_mp4 = self.target.join("outro.mp4");
2023-10-30 20:38:16 +00:00
svg2mp4(outro_svg, outro_mp4, OUTRO_LEN)?;
2023-10-28 21:38:17 +00:00
2023-10-30 16:55:53 +00:00
// copy logo then render to png
let logo_svg = self.target.join("logo.svg");
fs::write(
&logo_svg,
include_bytes!(concat!(env!("CARGO_MANIFEST_DIR"), "/assets/logo.svg"))
)?;
let logo_png = self.target.join("logo.png");
svg2png(&logo_svg, &logo_png, LOGO_SIZE * source_res.width() / 1920)?;
2023-10-30 16:55:53 +00:00
// copy fastforward then render to png
let fastforward_svg = self.target.join("fastforward.svg");
fs::write(
&fastforward_svg,
include_bytes!(concat!(
env!("CARGO_MANIFEST_DIR"),
"/assets/fastforward.svg"
))
)?;
let fastforward_png = self.target.join("fastforward.png");
2023-10-30 19:28:17 +00:00
svg2png(
&fastforward_svg,
&fastforward_png,
FF_LOGO_SIZE * source_res.width() / 1920
2023-10-30 19:28:17 +00:00
)?;
2023-10-30 16:55:53 +00:00
2023-10-28 21:38:17 +00:00
Ok(())
}
2023-10-30 20:26:17 +00:00
pub(crate) fn render(&self, project: &mut Project) -> anyhow::Result<PathBuf> {
let mut output = self.target.join(format!(
"{}-{}p.mp4",
self.slug,
project
.source
.metadata
.as_ref()
.unwrap()
.source_res
.height()
2023-10-30 20:26:17 +00:00
));
let mut ffmpeg = Ffmpeg::new(output.clone());
// add all of our inputs
let intro = ffmpeg.add_input(FfmpegInput::new(self.target.join("intro.mp4")));
2023-11-02 10:07:35 +00:00
let rec_file = self.target.join("recording.mp4");
2023-10-30 20:26:17 +00:00
let outro = ffmpeg.add_input(FfmpegInput::new(self.target.join("outro.mp4")));
let logo = ffmpeg.add_input(FfmpegInput::new(self.target.join("logo.png")));
let ff = ffmpeg.add_input(FfmpegInput::new(self.target.join("fastforward.png")));
let mut part1: Cow<'static, str> = intro.into();
let mut part3: Cow<'static, str> = outro.into();
// the recording is fun because of all the fast forwarding
let mut part2 = VecDeque::new();
let mut part2_start_of_the_end = None;
let mut part2_end_of_the_start = None;
// ok so ff is fun. we will add the ff'ed section as well as the part between
// the previous ff'ed section and our new section, unless we are the first
project.source.fast.sort();
for (i, (ff_st, ff_end)) in project.source.fast.iter().rev().enumerate() {
if let Some(prev_end) = part2_end_of_the_start {
2023-11-02 10:07:35 +00:00
let recffbetween = ffmpeg.add_input(FfmpegInput {
start: Some(*ff_end),
2023-11-02 10:07:35 +00:00
duration: Some(prev_end - *ff_end),
..FfmpegInput::new(rec_file.clone())
});
part2.push_front(recffbetween.into());
} else {
part2_start_of_the_end = Some(*ff_end);
}
part2_end_of_the_start = Some(*ff_st);
2023-11-02 10:07:35 +00:00
let recffpart = ffmpeg.add_input(FfmpegInput {
start: Some(*ff_st),
duration: Some(*ff_end - *ff_st),
2023-11-02 10:07:35 +00:00
..FfmpegInput::new(rec_file.clone())
});
let recff = format!("recff{i}");
ffmpeg.add_filter(Filter::FastForward {
input: recffpart.into(),
ffinput: ff.clone().into(),
multiplier: FF_MULTIPLIER,
output: recff.clone().into()
});
part2.push_front(recff.into());
}
// if the recording was not ff'ed, perform a normal trim
2023-10-30 20:26:17 +00:00
let start = project.source.start.unwrap();
let end = project.source.end.unwrap();
let part2_last_part_duration;
if part2.is_empty() {
part2_last_part_duration = end - start;
2023-11-02 10:07:35 +00:00
let rectrim = ffmpeg.add_input(FfmpegInput {
start: Some(start),
duration: Some(part2_last_part_duration),
2023-11-02 10:07:35 +00:00
..FfmpegInput::new(rec_file.clone())
});
part2.push_back(rectrim.into());
}
// otherwise add the first and last parts separately
else {
2023-11-02 10:07:35 +00:00
let rectrimst = ffmpeg.add_input(FfmpegInput {
start: Some(start),
duration: Some(part2_end_of_the_start.unwrap() - start),
2023-11-02 10:07:35 +00:00
..FfmpegInput::new(rec_file.clone())
});
part2.push_front(rectrimst.into());
part2_last_part_duration = end - part2_start_of_the_end.unwrap();
2023-11-02 10:07:35 +00:00
let rectrimend = ffmpeg.add_input(FfmpegInput {
start: Some(part2_start_of_the_end.unwrap()),
duration: Some(part2_last_part_duration),
2023-11-02 10:07:35 +00:00
..FfmpegInput::new(rec_file.clone())
});
part2.push_back(rectrimend.into());
}
2023-10-30 20:26:17 +00:00
2023-10-30 20:46:17 +00:00
// fade out the intro
let introfade = "introfade";
ffmpeg.add_filter(Filter::Fade {
input: part1,
direction: "out",
start: INTRO_LEN - TRANSITION_LEN,
duration: TRANSITION_LEN,
output: introfade.into()
});
part1 = introfade.into();
// fade in the recording
let recfadein = "recfadein";
ffmpeg.add_filter(Filter::Fade {
input: part2.pop_front().unwrap(),
2023-10-30 20:46:17 +00:00
direction: "in",
start: Time {
seconds: 0,
micros: 0
},
duration: TRANSITION_LEN,
output: recfadein.into()
});
part2.push_front(recfadein.into());
2023-10-30 20:46:17 +00:00
// fade out the recording
let recfadeout = "recfadeout";
ffmpeg.add_filter(Filter::Fade {
input: part2.pop_back().unwrap(),
2023-10-30 20:46:17 +00:00
direction: "out",
start: part2_last_part_duration - TRANSITION_LEN,
2023-10-30 20:46:17 +00:00
duration: TRANSITION_LEN,
output: recfadeout.into()
});
part2.push_back(recfadeout.into());
2023-10-30 20:46:17 +00:00
// fade in the outro
let outrofade = "outrofade";
ffmpeg.add_filter(Filter::Fade {
input: part3,
direction: "in",
start: Time {
seconds: 0,
micros: 0
},
duration: TRANSITION_LEN,
output: outrofade.into()
});
part3 = outrofade.into();
2023-10-30 20:26:17 +00:00
// concatenate everything
let mut parts = part2;
parts.push_front(part1);
parts.push_back(part3);
2023-10-30 20:26:17 +00:00
let concat = "concat";
ffmpeg.add_filter(Filter::Concat {
inputs: parts,
2023-10-30 20:26:17 +00:00
output: concat.into()
});
// overlay the logo
2023-11-02 22:48:56 +00:00
let logoalpha = "logoalpha";
ffmpeg.add_filter(Filter::Alpha {
input: logo.into(),
alpha: 0.5,
output: logoalpha.into()
});
2023-10-30 20:26:17 +00:00
let overlay = "overlay";
ffmpeg.add_filter(Filter::Overlay {
video_input: concat.into(),
2023-11-02 22:48:56 +00:00
overlay_input: logoalpha.into(),
2023-10-30 20:26:17 +00:00
x: "main_w-overlay_w-130".into(),
y: "main_h-overlay_h-65".into(),
output: overlay.into()
});
// we're done :)
ffmpeg.set_filter_output(overlay);
ffmpeg.run()?;
Ok(output)
}
2023-10-28 21:38:17 +00:00
}