render_video/src/render/mod.rs

473 lines
12 KiB
Rust

#![allow(warnings)]
pub mod ffmpeg;
mod filter;
use self::{ffmpeg::FfmpegOutput, filter::Filter};
use crate::{
iotro::{intro, outro},
render::ffmpeg::{Ffmpeg, FfmpegInput},
time::{format_date, Time},
Project, ProjectSourceMetadata, Resolution, MEM_LIMIT
};
use anyhow::{bail, Context};
use camino::{Utf8Path as Path, Utf8PathBuf as PathBuf};
use rational::Rational;
use std::{
borrow::Cow,
collections::VecDeque,
fs::{self, File},
io::Write as _,
process::{Command, Stdio}
};
const INTRO_LEN: Time = Time {
seconds: 3,
micros: 0
};
const OUTRO_LEN: Time = Time {
seconds: 5,
micros: 0
};
const TRANSITION_LEN: Time = Time {
seconds: 0,
micros: 200_000
};
const FF_MULTIPLIER: usize = 8;
// logo sizes at full hd, will be scaled to source resolution
const FF_LOGO_SIZE: usize = 128;
const LOGO_SIZE: usize = 96;
fn cmd() -> Command {
// we use systemd-run to limit the process memory
// I tried others like ulimit, chpst or isolate, but none worked
let mut cmd = Command::new("systemd-run");
cmd.arg("--scope")
.arg("-q")
.arg("--expand-environment=no")
.arg("-p")
.arg(format!("MemoryMax={}", MEM_LIMIT.read().unwrap()))
.arg("--user");
// we use busybox ash for having a shell that outputs commands with -x
cmd.arg("busybox")
.arg("ash")
.arg("-exuo")
.arg("pipefail")
.arg("-c")
.arg("exec \"$0\" \"${@}\"");
cmd
}
fn ffprobe() -> Command {
let mut cmd = cmd();
cmd.arg("ffprobe")
.arg("-v")
.arg("error")
.arg("-of")
.arg("default=noprint_wrappers=1:nokey=1");
cmd
}
fn read_output(cmd: &mut Command) -> anyhow::Result<String> {
let out = cmd.stderr(Stdio::inherit()).output()?;
if !out.status.success() {
bail!(
"Executed command failed with exit status {:?}",
out.status.code()
);
}
String::from_utf8(out.stdout)
.context("Command returned non-utf8 output")
.map(|str| str.trim().into())
}
fn ffprobe_video(query: &str, input: &Path) -> anyhow::Result<String> {
read_output(
ffprobe()
.arg("-select_streams")
.arg("v:0")
.arg("-show_entries")
.arg(query)
.arg(input)
)
}
fn ffprobe_audio(query: &str, concat_input: &Path) -> anyhow::Result<String> {
read_output(
ffprobe()
.arg("-select_streams")
.arg("a:0")
.arg("-show_entries")
.arg(query)
.arg("-safe")
.arg("0")
.arg("-f")
.arg("concat")
.arg(concat_input)
)
}
pub(crate) struct Renderer<'a> {
/// The directory with all the sources.
directory: &'a Path,
/// The slug (i.e. 23ws-malo2-231016).
slug: String,
/// The target directory.
target: PathBuf
}
fn svg2mp4(
meta: &ProjectSourceMetadata,
svg: PathBuf,
mp4: PathBuf,
duration: Time
) -> anyhow::Result<()> {
let mut ffmpeg = Ffmpeg::new(FfmpegOutput {
fps: None,
duration: Some(duration),
time_base: Some(meta.source_tbn),
fps_mode_vfr: true,
faststart: false,
path: mp4
});
ffmpeg.add_input(FfmpegInput {
loop_input: true,
fps: Some(meta.source_fps),
..FfmpegInput::new(svg)
});
ffmpeg.add_filter(Filter::GenerateSilence {
video: "0".into(),
output: "out".into()
});
ffmpeg.set_filter_output("out");
ffmpeg.run()
}
fn svg2png(svg: &Path, png: &Path, size: usize) -> anyhow::Result<()> {
let mut cmd = cmd();
let size = size.to_string();
cmd.arg("inkscape")
.arg("-w")
.arg(&size)
.arg("-h")
.arg(&size);
cmd.arg(svg).arg("-o").arg(png);
let status = cmd.status()?;
if status.success() {
Ok(())
} else {
bail!("inkscape failed with exit code {:?}", status.code())
}
}
impl<'a> Renderer<'a> {
pub(crate) fn new(directory: &'a Path, project: &Project) -> anyhow::Result<Self> {
let slug = format!(
"{}-{}",
project.lecture.course,
format_date(project.lecture.date)
);
let target = directory.join(&slug);
fs::create_dir_all(&target)?;
Ok(Self {
directory,
slug,
target
})
}
pub(crate) fn recording_mp4(&self) -> PathBuf {
self.target.join("recording.mp4")
}
pub(crate) fn preprocess(&self, project: &mut Project) -> anyhow::Result<()> {
assert!(!project.progress.preprocessed);
let recording_txt = self.target.join("recording.txt");
let mut file = File::create(&recording_txt)?;
for filename in &project.source.files {
writeln!(file, "file '{}'", self.directory.join(filename).to_string());
}
drop(file);
println!("\x1B[1m ==> Concatenating Video and Normalising Audio ...\x1B[0m");
let source_sample_rate =
ffprobe_audio("stream=sample_rate", &recording_txt)?.parse()?;
let recording_mp4 = self.recording_mp4();
let mut ffmpeg = Ffmpeg::new(FfmpegOutput::new(recording_mp4.clone()));
ffmpeg.add_input(FfmpegInput {
concat: true,
..FfmpegInput::new(recording_txt)
});
ffmpeg.enable_loudnorm();
ffmpeg.run()?;
let width = ffprobe_video("stream=width", &recording_mp4)?.parse()?;
let height = ffprobe_video("stream=height", &recording_mp4)?.parse()?;
let source_res = match (width, height) {
(3840, 2160) => Resolution::UHD,
(2560, 1440) => Resolution::WQHD,
(1920, 1080) => Resolution::FullHD,
(1280, 720) => Resolution::HD,
(640, 360) => Resolution::nHD,
(width, height) => bail!("Unknown resolution: {width}x{height}")
};
project.source.metadata = Some(ProjectSourceMetadata {
source_duration: ffprobe_video("format=duration", &recording_mp4)?.parse()?,
source_fps: ffprobe_video("stream=r_frame_rate", &recording_mp4)?.parse()?,
source_tbn: ffprobe_video("stream=time_base", &recording_mp4)?.parse()?,
source_res,
source_sample_rate
});
let metadata = project.source.metadata.as_ref().unwrap();
println!("\x1B[1m ==> Preparing assets ...\x1B[0m");
// render intro to svg then mp4
let intro_svg = self.target.join("intro.svg");
fs::write(
&intro_svg,
intro(source_res, project.lecture.date)
.to_string_pretty()
.into_bytes()
)?;
let intro_mp4 = self.target.join("intro.mp4");
svg2mp4(metadata, intro_svg, intro_mp4, INTRO_LEN)?;
// render outro to svg then mp4
let outro_svg = self.target.join("outro.svg");
fs::write(
&outro_svg,
outro(source_res).to_string_pretty().into_bytes()
)?;
let outro_mp4 = self.target.join("outro.mp4");
svg2mp4(metadata, outro_svg, outro_mp4, OUTRO_LEN)?;
// copy logo then render to png
let logo_svg = self.target.join("logo.svg");
fs::write(
&logo_svg,
include_bytes!(concat!(env!("CARGO_MANIFEST_DIR"), "/assets/logo.svg"))
)?;
let logo_png = self.target.join("logo.png");
svg2png(&logo_svg, &logo_png, LOGO_SIZE * source_res.width() / 1920)?;
// copy fastforward then render to png
let fastforward_svg = self.target.join("fastforward.svg");
fs::write(
&fastforward_svg,
include_bytes!(concat!(
env!("CARGO_MANIFEST_DIR"),
"/assets/fastforward.svg"
))
)?;
let fastforward_png = self.target.join("fastforward.png");
svg2png(
&fastforward_svg,
&fastforward_png,
FF_LOGO_SIZE * source_res.width() / 1920
)?;
Ok(())
}
fn video_mp4_res(&self, res: Resolution) -> PathBuf {
self.target
.join(format!("{}-{}p.mp4", self.slug, res.height()))
}
pub(crate) fn video_mp4(&self, project: &Project) -> PathBuf {
self.video_mp4_res(project.source.metadata.as_ref().unwrap().source_res)
}
pub(crate) fn render(&self, project: &mut Project) -> anyhow::Result<PathBuf> {
let output = self.video_mp4(project);
let mut ffmpeg = Ffmpeg::new(FfmpegOutput::new(output.clone()));
// add all of our inputs
let intro = ffmpeg.add_input(FfmpegInput::new(self.target.join("intro.mp4")));
let rec_file = self.target.join("recording.mp4");
let outro = ffmpeg.add_input(FfmpegInput::new(self.target.join("outro.mp4")));
let logo = ffmpeg.add_input(FfmpegInput::new(self.target.join("logo.png")));
let ff = ffmpeg.add_input(FfmpegInput::new(self.target.join("fastforward.png")));
let mut part1: Cow<'static, str> = intro.into();
let mut part3: Cow<'static, str> = outro.into();
// the recording is fun because of all the fast forwarding
let mut part2 = VecDeque::new();
let mut part2_start_of_the_end = None;
let mut part2_end_of_the_start = None;
// ok so ff is fun. we will add the ff'ed section as well as the part between
// the previous ff'ed section and our new section, unless we are the first
project.source.fast.sort();
for (i, (ff_st, ff_end)) in project.source.fast.iter().rev().enumerate() {
if let Some(prev_end) = part2_end_of_the_start {
let recffbetween = ffmpeg.add_input(FfmpegInput {
start: Some(*ff_end),
duration: Some(prev_end - *ff_end),
..FfmpegInput::new(rec_file.clone())
});
part2.push_front(recffbetween.into());
} else {
part2_start_of_the_end = Some(*ff_end);
}
part2_end_of_the_start = Some(*ff_st);
let recffpart = ffmpeg.add_input(FfmpegInput {
start: Some(*ff_st),
duration: Some(*ff_end - *ff_st),
..FfmpegInput::new(rec_file.clone())
});
let recff = format!("recff{i}");
ffmpeg.add_filter(Filter::FastForward {
input: recffpart.into(),
ffinput: ff.clone().into(),
multiplier: FF_MULTIPLIER,
output: recff.clone().into()
});
part2.push_front(recff.into());
}
// if the recording was not ff'ed, perform a normal trim
let start = project.source.start.unwrap();
let end = project.source.end.unwrap();
let part2_last_part_duration;
if part2.is_empty() {
part2_last_part_duration = end - start;
let rectrim = ffmpeg.add_input(FfmpegInput {
start: Some(start),
duration: Some(part2_last_part_duration),
..FfmpegInput::new(rec_file.clone())
});
part2.push_back(rectrim.into());
}
// otherwise add the first and last parts separately
else {
let rectrimst = ffmpeg.add_input(FfmpegInput {
start: Some(start),
duration: Some(part2_end_of_the_start.unwrap() - start),
..FfmpegInput::new(rec_file.clone())
});
part2.push_front(rectrimst.into());
part2_last_part_duration = end - part2_start_of_the_end.unwrap();
let rectrimend = ffmpeg.add_input(FfmpegInput {
start: Some(part2_start_of_the_end.unwrap()),
duration: Some(part2_last_part_duration),
..FfmpegInput::new(rec_file.clone())
});
part2.push_back(rectrimend.into());
}
// fade out the intro
let introfade = "introfade";
ffmpeg.add_filter(Filter::Fade {
input: part1,
direction: "out",
start: INTRO_LEN - TRANSITION_LEN,
duration: TRANSITION_LEN,
output: introfade.into()
});
part1 = introfade.into();
// fade in the recording
let recfadein = "recfadein";
ffmpeg.add_filter(Filter::Fade {
input: part2.pop_front().unwrap(),
direction: "in",
start: Time {
seconds: 0,
micros: 0
},
duration: TRANSITION_LEN,
output: recfadein.into()
});
part2.push_front(recfadein.into());
// fade out the recording
let recfadeout = "recfadeout";
ffmpeg.add_filter(Filter::Fade {
input: part2.pop_back().unwrap(),
direction: "out",
start: part2_last_part_duration - TRANSITION_LEN,
duration: TRANSITION_LEN,
output: recfadeout.into()
});
part2.push_back(recfadeout.into());
// fade in the outro
let outrofade = "outrofade";
ffmpeg.add_filter(Filter::Fade {
input: part3,
direction: "in",
start: Time {
seconds: 0,
micros: 0
},
duration: TRANSITION_LEN,
output: outrofade.into()
});
part3 = outrofade.into();
// concatenate everything
let mut parts = part2;
parts.push_front(part1);
parts.push_back(part3);
let concat = "concat";
ffmpeg.add_filter(Filter::Concat {
inputs: parts,
output: concat.into()
});
// overlay the logo
let logoalpha = "logoalpha";
ffmpeg.add_filter(Filter::Alpha {
input: logo.into(),
alpha: 0.5,
output: logoalpha.into()
});
let overlay = "overlay";
ffmpeg.add_filter(Filter::Overlay {
video_input: concat.into(),
overlay_input: logoalpha.into(),
x: "main_w-overlay_w-130".into(),
y: "main_h-overlay_h-65".into(),
output: overlay.into()
});
// we're done :)
ffmpeg.set_filter_output(overlay);
ffmpeg.set_video_bitrate(
project
.source
.metadata
.as_ref()
.unwrap()
.source_res
.bitrate() * 3
);
ffmpeg.run()?;
Ok(output)
}
pub fn rescale(&self, res: Resolution, project: &Project) -> anyhow::Result<PathBuf> {
let input = self.video_mp4(project);
let output = self.video_mp4_res(res);
println!("\x1B[1m ==> Rescaling to {}p\x1B[0m", res.height());
let mut ffmpeg =
Ffmpeg::new(FfmpegOutput::new(output.clone()).enable_faststart());
ffmpeg.add_input(FfmpegInput::new(input));
ffmpeg.rescale_video(res);
ffmpeg.set_video_bitrate(res.bitrate());
ffmpeg.run()?;
Ok(output)
}
}