pub mod ffmpeg; mod filter; use self::{ ffmpeg::{FfmpegOutput, FfmpegOutputFormat}, filter::Filter }; use crate::{ iotro::{intro, outro}, project::{Project, ProjectLecture, ProjectSourceMetadata, Resolution}, question::Question, render::ffmpeg::{Ffmpeg, FfmpegInput}, time::{format_date, format_time, Time} }; use anyhow::{bail, Context}; use camino::{Utf8Path as Path, Utf8PathBuf as PathBuf}; use console::style; use std::{ borrow::Cow, collections::VecDeque, fs::{self, File}, io::Write as _, process::{Command, Stdio} }; const INTRO_LEN: Time = Time { seconds: 3, micros: 0 }; const OUTRO_LEN: Time = Time { seconds: 5, micros: 0 }; const TRANSITION_LEN: Time = Time { seconds: 0, micros: 200_000 }; const QUESTION_FADE_LEN: Time = Time { seconds: 0, micros: 400_000 }; const FF_MULTIPLIER: usize = 8; // logo sizes at full hd, will be scaled to source resolution const FF_LOGO_SIZE: usize = 128; const LOGO_SIZE: usize = 96; fn cmd() -> Command { #[cfg(feature = "mem_limit")] let mut cmd = { // we use systemd-run to limit the process memory // I tried others like ulimit, chpst or isolate, but none worked let mut cmd = Command::new("systemd-run"); cmd.arg("--scope") .arg("-q") .arg("--expand-environment=no") .arg("-p") .arg(format!("MemoryMax={}", crate::MEM_LIMIT.read().unwrap())) .arg("--user"); // we use busybox ash for having a shell that outputs commands with -x cmd.arg("busybox"); cmd }; #[cfg(not(feature = "mem_limit"))] let mut cmd = Command::new("busybox"); cmd.arg("ash") .arg("-exuo") .arg("pipefail") .arg("-c") .arg("exec \"$0\" \"${@}\""); cmd } fn ffprobe() -> Command { let mut cmd = cmd(); cmd.arg("ffprobe") .arg("-v") .arg("error") .arg("-of") .arg("default=noprint_wrappers=1:nokey=1"); cmd } fn read_output(cmd: &mut Command) -> anyhow::Result { let out = cmd.stderr(Stdio::inherit()).output()?; if !out.status.success() { bail!( "Executed command failed with exit status {:?}", out.status.code() ); } String::from_utf8(out.stdout) .context("Command returned non-utf8 output") .map(|str| str.trim().into()) } fn ffprobe_video(query: &str, input: &Path) -> anyhow::Result { read_output( ffprobe() .arg("-select_streams") .arg("v:0") .arg("-show_entries") .arg(query) .arg(input) ) } fn ffprobe_audio(query: &str, concat_input: &Path) -> anyhow::Result { read_output( ffprobe() .arg("-select_streams") .arg("a:0") .arg("-show_entries") .arg(query) .arg("-safe") .arg("0") .arg("-f") .arg("concat") .arg(concat_input) ) } pub(crate) struct Renderer<'a> { /// The directory with all the sources. directory: &'a Path, /// The slug (i.e. 23ws-malo2-231016). slug: String, /// The target directory. target: PathBuf, /// The format to use for intermediate products format: FfmpegOutputFormat } fn svg2mkv( meta: &ProjectSourceMetadata, svg: PathBuf, mkv: PathBuf, format: FfmpegOutputFormat, duration: Time ) -> anyhow::Result<()> { let mut ffmpeg = Ffmpeg::new(FfmpegOutput { duration: Some(duration), time_base: Some(meta.source_tbn), fps_mode_vfr: true, ..FfmpegOutput::new(format, mkv) }); ffmpeg.add_input(FfmpegInput { loop_input: true, fps: Some(meta.source_fps), ..FfmpegInput::new(svg) }); ffmpeg.add_filter(Filter::GenerateSilence { video: "0".into(), output: "out".into() }); ffmpeg.set_filter_output("out"); ffmpeg.run() } fn svg2png(svg: &Path, png: &Path, width: usize, height: usize) -> anyhow::Result<()> { let mut cmd = cmd(); cmd.arg("inkscape") .arg("-w") .arg(width.to_string()) .arg("-h") .arg(height.to_string()); cmd.arg(svg).arg("-o").arg(png); let status = cmd.status()?; if status.success() { Ok(()) } else { bail!("inkscape failed with exit code {:?}", status.code()) } } impl<'a> Renderer<'a> { pub(crate) fn new(directory: &'a Path, project: &Project) -> anyhow::Result { let slug = format!( "{}-{}", project.lecture.course, format_date(project.lecture.date) ); let target = directory.join(&slug); fs::create_dir_all(&target)?; let first: PathBuf = directory.join( project .source .files .first() .context("No source files present")? ); let height: u32 = ffprobe_video("stream=height", &first)? .split('\n') .next() .unwrap() .parse()?; let format = if height < 1080 { FfmpegOutputFormat::AvcAac } else { FfmpegOutputFormat::Av1Flac }; Ok(Self { directory, slug, target, format }) } pub(crate) fn recording_mkv(&self) -> PathBuf { self.target.join("recording.mkv") } fn intro_mkv(&self) -> PathBuf { self.target.join("intro.mkv") } fn outro_mkv(&self) -> PathBuf { self.target.join("outro.mkv") } fn question_svg(&self, q_idx: usize) -> PathBuf { self.target.join(format!("question{q_idx}.svg")) } fn question_png(&self, q_idx: usize) -> PathBuf { self.target.join(format!("question{q_idx}.png")) } pub(crate) fn preprocess(&self, project: &mut Project) -> anyhow::Result<()> { assert!(!project.progress.preprocessed); let recording_txt = self.target.join("recording.txt"); let mut file = File::create(&recording_txt)?; for filename in &project.source.files { writeln!(file, "file '{}'", self.directory.join(filename))?; } drop(file); println!(); println!( " {} {}", style("==>").bold().cyan(), style("Concatenating Video and Normalising Audio ...").bold() ); let source_sample_rate = ffprobe_audio("stream=sample_rate", &recording_txt)?.parse()?; let recording_mkv = self.recording_mkv(); let mut ffmpeg = Ffmpeg::new(FfmpegOutput::new( FfmpegOutputFormat::Av1Flac, recording_mkv.clone() )); ffmpeg.add_input(FfmpegInput { concat: true, ..FfmpegInput::new(recording_txt) }); ffmpeg.enable_loudnorm(project.source.stereo); ffmpeg.run()?; let width = ffprobe_video("stream=width", &recording_mkv)?.parse()?; let height = ffprobe_video("stream=height", &recording_mkv)?.parse()?; let source_res = match (width, height) { (3840, 2160) => Resolution::UHD, (2560, 1440) => Resolution::WQHD, (1920, 1080) => Resolution::FullHD, (1280, 720) => Resolution::HD, (640, 360) => Resolution::nHD, (width, height) => bail!("Unknown resolution: {width}x{height}") }; project.source.metadata = Some(ProjectSourceMetadata { source_duration: ffprobe_video("format=duration", &recording_mkv)?.parse()?, source_fps: ffprobe_video("stream=r_frame_rate", &recording_mkv)?.parse()?, source_tbn: ffprobe_video("stream=time_base", &recording_mkv)?.parse()?, source_res, source_sample_rate }); Ok(()) } /// Prepare assets like intro, outro and questions. pub(crate) fn render_assets(&self, project: &Project) -> anyhow::Result<()> { let metadata = project.source.metadata.as_ref().unwrap(); println!(); println!( " {} {}", style("==>").bold().cyan(), style("Preparing assets ...").bold() ); // render intro to svg then mp4 let intro_svg = self.target.join("intro.svg"); fs::write( &intro_svg, intro(metadata.source_res, &project.lecture) .to_string_pretty() .into_bytes() )?; let intro_mkv = self.intro_mkv(); svg2mkv(metadata, intro_svg, intro_mkv, self.format, INTRO_LEN)?; // render outro to svg then mp4 let outro_svg = self.target.join("outro.svg"); fs::write( &outro_svg, outro(&project.lecture.lang, metadata.source_res) .to_string_pretty() .into_bytes() )?; let outro_mkv = self.outro_mkv(); svg2mkv(metadata, outro_svg, outro_mkv, self.format, OUTRO_LEN)?; // copy logo then render to png let logo_svg = self.target.join("logo.svg"); fs::write( &logo_svg, include_bytes!(concat!(env!("CARGO_MANIFEST_DIR"), "/assets/logo.svg")) )?; let logo_png = self.target.join("logo.png"); let logo_size = LOGO_SIZE * metadata.source_res.width() / 1920; svg2png(&logo_svg, &logo_png, logo_size, logo_size)?; // copy fastforward then render to png let fastforward_svg = self.target.join("fastforward.svg"); fs::write( &fastforward_svg, include_bytes!(concat!( env!("CARGO_MANIFEST_DIR"), "/assets/fastforward.svg" )) )?; let fastforward_png = self.target.join("fastforward.png"); let ff_logo_size = FF_LOGO_SIZE * metadata.source_res.width() / 1920; svg2png( &fastforward_svg, &fastforward_png, ff_logo_size, ff_logo_size )?; // write questions then render to png for (q_idx, (_, _, q_text)) in project.source.questions.iter().enumerate() { let q = Question::new(metadata.source_res, &project.lecture.lang, q_text) .finish() .to_string_pretty() .into_bytes(); let q_svg = self.question_svg(q_idx); let q_png = self.question_png(q_idx); fs::write(&q_svg, q)?; svg2png( &q_svg, &q_png, metadata.source_res.width(), metadata.source_res.height() )?; } Ok(()) } /// Get the video file for a specific resolution, completely finished. fn video_file_res(&self, res: Resolution) -> PathBuf { let extension = match res.format() { FfmpegOutputFormat::Av1Flac => "mkv", FfmpegOutputFormat::Av1Opus => "webm", FfmpegOutputFormat::AvcAac => "mp4" }; self.target .join(format!("{}-{}p.{extension}", self.slug, res.height())) } /// Get the video file directly outputed to further transcode. pub(crate) fn video_file_output(&self) -> PathBuf { self.target.join(format!("{}.mkv", self.slug)) } pub(crate) fn render(&self, project: &mut Project) -> anyhow::Result { let source_res = project.source.metadata.as_ref().unwrap().source_res; let output = self.video_file_output(); let mut ffmpeg = Ffmpeg::new(FfmpegOutput { video_bitrate: Some(source_res.bitrate() * 3), ..FfmpegOutput::new(self.format, output.clone()) }); // add all of our inputs let intro = ffmpeg.add_input(FfmpegInput::new(self.intro_mkv())); let rec_file = self.recording_mkv(); let outro = ffmpeg.add_input(FfmpegInput::new(self.outro_mkv())); let logo = ffmpeg.add_input(FfmpegInput::new(self.target.join("logo.png"))); let ff = ffmpeg.add_input(FfmpegInput::new(self.target.join("fastforward.png"))); let mut part1: Cow<'static, str> = intro.into(); let mut part3: Cow<'static, str> = outro.into(); // the recording is fun because of all the fast forwarding let mut part2 = VecDeque::>::new(); let mut part2_ts = VecDeque::new(); let mut part2_start_of_the_end = None; let mut part2_end_of_the_start = None; // ok so ff is fun. we will add the ff'ed section as well as the part between // the previous ff'ed section and our new section, unless we are the first. project.source.fast.sort(); for (i, (ff_st, ff_end)) in project.source.fast.iter().rev().enumerate() { if let Some(prev_end) = part2_end_of_the_start { let duration = prev_end - *ff_end; let recffbetween = ffmpeg.add_input(FfmpegInput { start: Some(*ff_end), duration: Some(duration), ..FfmpegInput::new(rec_file.clone()) }); part2.push_front(recffbetween.into()); part2_ts.push_front(Some((*ff_end, duration))); } else { part2_start_of_the_end = Some(*ff_end); } part2_end_of_the_start = Some(*ff_st); let recffpart = ffmpeg.add_input(FfmpegInput { start: Some(*ff_st), duration: Some(*ff_end - *ff_st), ..FfmpegInput::new(rec_file.clone()) }); let recff = format!("recff{i}"); ffmpeg.add_filter(Filter::FastForward { input: recffpart.into(), ffinput: ff.clone().into(), multiplier: FF_MULTIPLIER, output: recff.clone().into() }); part2.push_front(recff.into()); part2_ts.push_front(None); } // if the recording was not ff'ed, perform a normal trim let start = project.source.start.unwrap(); let end = project.source.end.unwrap(); let part2_last_part_duration; if part2.is_empty() { part2_last_part_duration = end - start; let rectrim = ffmpeg.add_input(FfmpegInput { start: Some(start), duration: Some(part2_last_part_duration), ..FfmpegInput::new(rec_file.clone()) }); part2.push_back(rectrim.into()); part2_ts.push_back(Some((start, part2_last_part_duration))); } // otherwise add the first and last parts separately else { let duration = part2_end_of_the_start.unwrap() - start; let rectrimst = ffmpeg.add_input(FfmpegInput { start: Some(start), duration: Some(duration), ..FfmpegInput::new(rec_file.clone()) }); part2.push_front(rectrimst.into()); part2_ts.push_front(Some((start, duration))); let part2_start_of_the_end = part2_start_of_the_end.unwrap(); part2_last_part_duration = end - part2_start_of_the_end; let rectrimend = ffmpeg.add_input(FfmpegInput { start: Some(part2_start_of_the_end), duration: Some(part2_last_part_duration), ..FfmpegInput::new(rec_file.clone()) }); part2.push_back(rectrimend.into()); part2_ts.push_back(Some((part2_start_of_the_end, part2_last_part_duration))); } // ok now we have a bunch of parts and a bunch of questions that want to get // overlayed over those parts. project.source.questions.sort(); let mut q_idx = 0; for (i, ts) in part2_ts.iter().enumerate() { let Some((start, duration)) = ts else { continue; }; loop { if q_idx >= project.source.questions.len() { break; } let (q_start, q_end, _) = &project.source.questions[q_idx]; if q_start < start { bail!( "Question starting at {} did not fit into the video", format_time(*q_start) ); } if q_start >= start && *q_end <= *start + *duration { // add the question as input to ffmpeg let q_inp = ffmpeg.add_input(FfmpegInput { loop_input: true, fps: Some(project.source.metadata.as_ref().unwrap().source_fps), duration: Some(*q_end - *q_start), ..FfmpegInput::new(self.question_png(q_idx)) }); // fade in the question let q_fadein = format!("q{q_idx}fin"); ffmpeg.add_filter(Filter::FadeAlpha { input: q_inp.into(), direction: "in", start: Time { seconds: 0, micros: 0 }, duration: QUESTION_FADE_LEN, output: q_fadein.clone().into() }); // fade out the question let q_fadeout = format!("q{q_idx}fout"); ffmpeg.add_filter(Filter::FadeAlpha { input: q_fadein.into(), direction: "out", start: *q_end - *q_start - QUESTION_FADE_LEN, duration: QUESTION_FADE_LEN, output: q_fadeout.clone().into() }); // move the question to the correct timestamp let q_pts = format!("q{q_idx}pts"); ffmpeg.add_filter(Filter::VideoOffset { input: q_fadeout.into(), seconds: *q_start - *start, output: q_pts.clone().into() }); // overlay the part in question let q_overlay = format!("q{q_idx}o"); ffmpeg.add_filter(Filter::Overlay { video_input: part2[i].clone(), overlay_input: q_pts.into(), x: "0".into(), y: "0".into(), repeatlast: false, output: q_overlay.clone().into() }); part2[i] = q_overlay.into(); q_idx += 1; continue; } break; } } if q_idx < project.source.questions.len() { bail!( "Question starting at {} did not fit into the video before it was over", format_time(project.source.questions[q_idx].0) ); } // fade out the intro let introfade = "introfade"; ffmpeg.add_filter(Filter::Fade { input: part1, direction: "out", start: INTRO_LEN - TRANSITION_LEN, duration: TRANSITION_LEN, output: introfade.into() }); part1 = introfade.into(); // fade in the recording let recfadein = "recfadein"; ffmpeg.add_filter(Filter::Fade { input: part2.pop_front().unwrap(), direction: "in", start: Time { seconds: 0, micros: 0 }, duration: TRANSITION_LEN, output: recfadein.into() }); part2.push_front(recfadein.into()); // fade out the recording let recfadeout = "recfadeout"; ffmpeg.add_filter(Filter::Fade { input: part2.pop_back().unwrap(), direction: "out", start: part2_last_part_duration - TRANSITION_LEN, duration: TRANSITION_LEN, output: recfadeout.into() }); part2.push_back(recfadeout.into()); // fade in the outro let outrofade = "outrofade"; ffmpeg.add_filter(Filter::Fade { input: part3, direction: "in", start: Time { seconds: 0, micros: 0 }, duration: TRANSITION_LEN, output: outrofade.into() }); part3 = outrofade.into(); // concatenate everything let mut parts = part2; parts.push_front(part1); parts.push_back(part3); let concat = "concat"; ffmpeg.add_filter(Filter::Concat { inputs: parts, output: concat.into() }); // overlay the logo let logoalpha = "logoalpha"; ffmpeg.add_filter(Filter::Alpha { input: logo.into(), alpha: 0.5, output: logoalpha.into() }); let overlay = "overlay"; let overlay_off_x = 130 * source_res.width() / 3840; let overlay_off_y = 65 * source_res.height() / 2160; ffmpeg.add_filter(Filter::Overlay { video_input: concat.into(), overlay_input: logoalpha.into(), x: format!("main_w-overlay_w-{overlay_off_x}").into(), y: format!("main_h-overlay_h-{overlay_off_y}").into(), repeatlast: true, output: overlay.into() }); // we're done :) ffmpeg.set_filter_output(overlay); ffmpeg.run()?; Ok(output) } pub fn rescale( &self, lecture: &ProjectLecture, res: Resolution ) -> anyhow::Result { let input = self.video_file_output(); let output = self.video_file_res(res); println!(); println!( " {} {}", style("==>").bold().cyan(), style(format!("Rescaling to {}p", res.height())).bold() ); let mut ffmpeg = Ffmpeg::new(FfmpegOutput { video_bitrate: Some(res.bitrate()), title: Some(format!( "{} {} {}", lecture.label, lecture.lang.from, (lecture.lang.format_date_long)(lecture.date) )), author: Some(lecture.docent.clone()), album: Some(lecture.course.clone()), year: Some(lecture.date.year.to_string()), comment: Some(lecture.lang.video_created_by_us.into()), language: Some(lecture.lang.lang.into()), ..FfmpegOutput::new(res.format(), output.clone()).enable_faststart() }); ffmpeg.add_input(FfmpegInput::new(input)); ffmpeg.rescale_video(res); ffmpeg.run()?; Ok(output) } }