render_video/src/main.rs

386 lines
10 KiB
Rust

#![allow(clippy::manual_range_contains)]
#![warn(clippy::unreadable_literal, rust_2018_idioms)]
#![forbid(elided_lifetimes_in_paths, unsafe_code)]
mod iotro;
mod render;
mod time;
use crate::{
render::{ffmpeg::FfmpegOutputFormat, Renderer},
time::{parse_date, parse_time, Date, Time}
};
use camino::Utf8PathBuf as PathBuf;
use clap::Parser;
use iotro::Language;
use rational::Rational;
use serde::{Deserialize, Serialize};
use serde_with::{serde_as, DisplayFromStr};
#[cfg(feature = "mem_limit")]
use std::sync::RwLock;
use std::{
collections::BTreeSet,
fmt::Display,
fs,
io::{self, BufRead as _, Write},
str::FromStr
};
#[cfg(feature = "mem_limit")]
static MEM_LIMIT: RwLock<String> = RwLock::new(String::new());
#[derive(Debug, Parser)]
struct Args {
/// The root directory of the project. It should contain the raw video file(s).
#[clap(short = 'C', long, default_value = ".")]
directory: PathBuf,
/// The slug of the course, e.g. "23ws-malo2".
#[clap(short = 'c', long, default_value = "23ws-malo2")]
course: String,
/// The label of the course, e.g. "Mathematische Logik II".
#[clap(short, long, default_value = "Mathematische Logik II")]
label: String,
/// The docent of the course, e.g. "Prof. E. Grädel".
#[clap(short, long, default_value = "Prof. E. Grädel")]
docent: String,
/// The language of the lecture. Used for the intro and outro frame.
#[clap(short = 'L', long, default_value = "de")]
lang: Language<'static>,
#[cfg(feature = "mem_limit")]
/// The memory limit for external tools like ffmpeg.
#[clap(short, long, default_value = "12G")]
mem_limit: String,
/// Transcode the final video clip down to the minimum resolution specified.
#[clap(short, long)]
transcode: Option<Resolution>,
/// Transcode starts at this resolution, or the source resolution, whichever is lower.
#[clap(short = 'T', long, default_value = "1440p")]
transcode_start: Resolution,
/// Treat the audio as stereo. By default, only one channel from the input stereo will
/// be used, assuming either the other channel is backup or the same as the used.
#[clap(short, long, default_value = "false")]
stereo: bool
}
macro_rules! resolutions {
($($res:ident: $width:literal x $height:literal at $bitrate:literal in $format:ident),+) => {
#[allow(non_camel_case_types, clippy::upper_case_acronyms)]
#[derive(Clone, Copy, Debug, Deserialize, Eq, Ord, PartialEq, PartialOrd, Serialize)]
enum Resolution {
$(
#[doc = concat!(stringify!($width), "x", stringify!($height))]
$res
),+
}
const NUM_RESOLUTIONS: usize = {
let mut num = 0;
$(num += 1; stringify!($res);)+
num
};
impl Resolution {
fn values() -> [Self; NUM_RESOLUTIONS] {
[$(Self::$res),+]
}
fn width(self) -> usize {
match self {
$(Self::$res => $width),+
}
}
fn height(self) -> usize {
match self {
$(Self::$res => $height),+
}
}
fn bitrate(self) -> u64 {
match self {
$(Self::$res => $bitrate),+
}
}
fn format(self) -> FfmpegOutputFormat {
match self {
$(Self::$res => FfmpegOutputFormat::$format),+
}
}
}
impl FromStr for Resolution {
type Err = anyhow::Error;
fn from_str(s: &str) -> anyhow::Result<Self> {
Ok(match s {
$(concat!(stringify!($height), "p") => Self::$res,)+
_ => anyhow::bail!("Unknown Resolution: {s:?}")
})
}
}
}
}
resolutions! {
nHD: 640 x 360 at 500_000 in AvcAac,
HD: 1280 x 720 at 1_000_000 in AvcAac,
FullHD: 1920 x 1080 at 750_000 in Av1Opus,
WQHD: 2560 x 1440 at 1_000_000 in Av1Opus,
// TODO qsx muss mal sagen wieviel bitrate für 4k
UHD: 3840 x 2160 at 2_000_000 in Av1Opus
}
#[derive(Deserialize, Serialize)]
struct Project {
lecture: ProjectLecture,
source: ProjectSource,
progress: ProjectProgress
}
#[serde_as]
#[derive(Deserialize, Serialize)]
struct ProjectLecture {
course: String,
label: String,
docent: String,
#[serde_as(as = "DisplayFromStr")]
date: Date,
#[serde(default = "Default::default")]
#[serde_as(as = "DisplayFromStr")]
lang: Language<'static>
}
#[serde_as]
#[derive(Deserialize, Serialize)]
struct ProjectSource {
files: Vec<String>,
stereo: bool,
#[serde_as(as = "Option<DisplayFromStr>")]
start: Option<Time>,
#[serde_as(as = "Option<DisplayFromStr>")]
end: Option<Time>,
#[serde_as(as = "Vec<(DisplayFromStr, DisplayFromStr)>")]
fast: Vec<(Time, Time)>,
metadata: Option<ProjectSourceMetadata>
}
#[serde_as]
#[derive(Deserialize, Serialize)]
struct ProjectSourceMetadata {
/// The duration of the source video.
#[serde_as(as = "DisplayFromStr")]
source_duration: Time,
/// The FPS of the source video.
#[serde_as(as = "DisplayFromStr")]
source_fps: Rational,
/// The time base of the source video.
#[serde_as(as = "DisplayFromStr")]
source_tbn: Rational,
/// The resolution of the source video.
source_res: Resolution,
/// The sample rate of the source audio.
source_sample_rate: u32
}
#[derive(Default, Deserialize, Serialize)]
struct ProjectProgress {
preprocessed: bool,
asked_start_end: bool,
asked_fast: bool,
rendered: bool,
transcoded: BTreeSet<Resolution>
}
fn ask(question: impl Display) -> String {
let mut stdout = io::stdout().lock();
let mut stdin = io::stdin().lock();
writeln!(stdout, "{question}").unwrap();
let mut line = String::new();
write!(stdout, "> ").unwrap();
stdout.flush().unwrap();
stdin.read_line(&mut line).unwrap();
line.trim().to_owned()
}
fn ask_time(question: impl Display) -> Time {
let mut stdout = io::stdout().lock();
let mut stdin = io::stdin().lock();
writeln!(stdout, "{question}").unwrap();
let mut line = String::new();
loop {
line.clear();
write!(stdout, "> ").unwrap();
stdout.flush().unwrap();
stdin.read_line(&mut line).unwrap();
let line = line.trim();
match parse_time(line) {
Ok(time) => return time,
Err(err) => writeln!(stdout, "Invalid Input {line:?}: {err}").unwrap()
}
}
}
fn main() {
let args = Args::parse();
#[cfg(feature = "mem_limit")]
{
*(MEM_LIMIT.write().unwrap()) = args.mem_limit;
}
// process arguments
let directory = args.directory.canonicalize_utf8().unwrap();
let course = args.course;
// let's see if we need to initialise the project
let project_path = directory.join("project.toml");
let mut project = if project_path.exists() {
toml::from_slice(&fs::read(&project_path).unwrap()).unwrap()
} else {
let dirname = directory.file_name().unwrap();
let date =
parse_date(dirname).expect("Directory name is not in the expected format");
let mut files = Vec::new();
for entry in directory.read_dir_utf8().unwrap() {
let entry = entry.unwrap();
let name = entry.file_name();
let lower = name.to_ascii_lowercase();
if (lower.ends_with(".mp4")
|| lower.ends_with(".mts")
|| lower.ends_with(".mkv"))
&& !entry.file_type().unwrap().is_dir()
{
files.push(String::from(name));
}
}
files.sort_unstable();
assert!(!files.is_empty());
print!("I found the following source files:");
for f in &files {
print!(" {f}");
}
println!();
files = ask("Which source files would you like to use? (specify multiple files separated by whitespace)")
.split_ascii_whitespace()
.map(String::from)
.collect();
assert!(!files.is_empty());
let project = Project {
lecture: ProjectLecture {
course,
label: args.label,
docent: args.docent,
date,
lang: args.lang
},
source: ProjectSource {
files,
stereo: args.stereo,
start: None,
end: None,
fast: Vec::new(),
metadata: None
},
progress: Default::default()
};
fs::write(&project_path, toml::to_string(&project).unwrap().as_bytes()).unwrap();
project
};
let renderer = Renderer::new(&directory, &project).unwrap();
let recording = renderer.recording_mkv();
// preprocess the video
if !project.progress.preprocessed {
renderer.preprocess(&mut project).unwrap();
project.progress.preprocessed = true;
fs::write(&project_path, toml::to_string(&project).unwrap().as_bytes()).unwrap();
}
// ask the user about start and end times
if !project.progress.asked_start_end {
project.source.start = Some(ask_time(format_args!(
"Please take a look at the file {recording} and tell me the first second you want included"
)));
project.source.end = Some(ask_time(format_args!(
"Please take a look at the file {recording} and tell me the last second you want included"
)));
project.progress.asked_start_end = true;
fs::write(&project_path, toml::to_string(&project).unwrap().as_bytes()).unwrap();
}
// ask the user about fast forward times
if !project.progress.asked_fast {
loop {
let start = ask_time(format_args!(
"Please take a look at the file {recording} and tell me the first second you want fast-forwarded. You may reply with `0` if there are no more fast-forward sections"
));
if start.seconds == 0 && start.micros == 0 {
break;
}
let end = ask_time(format_args!(
"Please tell me the last second you want fast-forwarded"
));
project.source.fast.push((start, end));
}
project.progress.asked_fast = true;
fs::write(&project_path, toml::to_string(&project).unwrap().as_bytes()).unwrap();
}
// render the video
let mut videos = Vec::new();
videos.push(if project.progress.rendered {
renderer.video_file_output()
} else {
let video = renderer.render(&mut project).unwrap();
project.progress.rendered = true;
fs::write(&project_path, toml::to_string(&project).unwrap().as_bytes()).unwrap();
video
});
// rescale the video
if let Some(lowest_res) = args.transcode {
for res in Resolution::values().into_iter().rev() {
if res > project.source.metadata.as_ref().unwrap().source_res
|| res > args.transcode_start
|| res < lowest_res
{
continue;
}
if !project.progress.transcoded.contains(&res) {
videos.push(renderer.rescale(&project.lecture, res).unwrap());
project.progress.transcoded.insert(res);
fs::write(&project_path, toml::to_string(&project).unwrap().as_bytes())
.unwrap();
}
}
}
println!("\x1B[1m ==> DONE :)\x1B[0m");
println!(" Videos:");
for v in &videos {
println!(" -> {v}");
}
}