preprocessing seems to work

This commit is contained in:
Dominic 2023-10-30 20:28:17 +01:00
parent 469fc5d663
commit 889dbbce5a
Signed by: msrd0
GPG key ID: DCC8C247452E98F9
3 changed files with 124 additions and 56 deletions

View file

@ -6,7 +6,10 @@ mod iotro;
mod render;
mod time;
use crate::time::{parse_date, parse_time, Date, Time};
use crate::{
render::Renderer,
time::{parse_date, parse_time, Date, Time}
};
use camino::Utf8PathBuf as PathBuf;
use clap::Parser;
use rational::Rational;
@ -85,10 +88,12 @@ struct ProjectLecture {
struct ProjectSource {
files: Vec<String>,
#[serde_as(as = "DisplayFromStr")]
first_file_start: Time,
#[serde_as(as = "DisplayFromStr")]
last_file_end: Time,
#[serde_as(as = "Option<DisplayFromStr>")]
start: Option<Time>,
#[serde_as(as = "Option<DisplayFromStr>")]
end: Option<Time>,
#[serde_as(as = "Vec<(DisplayFromStr, DisplayFromStr)>")]
fast: Vec<(Time, Time)>,
metadata: Option<ProjectSourceMetadata>
}
@ -103,7 +108,8 @@ struct ProjectSourceMetadata {
#[serde_as(as = "DisplayFromStr")]
source_fps: Rational,
/// The time base of the source video.
source_tbn: u32,
#[serde_as(as = "DisplayFromStr")]
source_tbn: Rational,
/// The resolution of the source video.
source_res: Resolution,
/// The sample rate of the source audio.
@ -113,6 +119,8 @@ struct ProjectSourceMetadata {
#[derive(Default, Deserialize, Serialize)]
struct ProjectProgress {
preprocessed: bool,
asked_start_end: bool,
asked_fast: bool,
rendered: bool,
transcoded: BTreeSet<Resolution>
}
@ -145,7 +153,7 @@ fn main() {
// let's see if we need to initialise the project
let project_path = directory.join("project.toml");
let project = if project_path.exists() {
let mut project = if project_path.exists() {
toml::from_slice(&fs::read(&project_path).unwrap()).unwrap()
} else {
let dirname = directory.file_name().unwrap();
@ -167,21 +175,13 @@ fn main() {
assert!(!files.is_empty());
println!("I found the following source files: {files:?}");
let first_file_start = ask_time(format_args!(
"Please take a look at the file {} and tell me the first second you want included",
files.first().unwrap()
));
let last_file_end = ask_time(format_args!(
"Please take a look at the file {} and tell me the last second you want included",
files.last().unwrap()
));
let project = Project {
lecture: ProjectLecture { course, date },
source: ProjectSource {
files,
first_file_start,
last_file_end,
start: None,
end: None,
fast: Vec::new(),
metadata: None
},
progress: Default::default()
@ -191,5 +191,52 @@ fn main() {
};
println!("{}", toml::to_string(&project).unwrap());
let renderer = Renderer::new(&directory, &project).unwrap();
let recording = renderer.recording_mp4();
// preprocess the video
if !project.progress.preprocessed {
renderer.preprocess(&mut project).unwrap();
project.progress.preprocessed = true;
println!("{}", toml::to_string(&project).unwrap());
fs::write(&project_path, toml::to_string(&project).unwrap().as_bytes()).unwrap();
}
// ask the user about start and end times
if !project.progress.asked_start_end {
project.source.start = Some(ask_time(format_args!(
"Please take a look at the file {recording} and tell me the first second you want included"
)));
project.source.end = Some(ask_time(format_args!(
"Please take a look at the file {recording} and tell me the last second you want included"
)));
project.progress.asked_start_end = true;
println!("{}", toml::to_string(&project).unwrap());
fs::write(&project_path, toml::to_string(&project).unwrap().as_bytes()).unwrap();
}
// ask the user about fast forward times
if !project.progress.asked_fast {
loop {
let start = ask_time(format_args!(
"Please take a look at the file {recording} and tell me the first second you want fast-forwarded. You may reply with `0` if there are no more fast-forward sections"
));
if start.seconds == 0 && start.micros == 0 {
break;
}
let end = ask_time(format_args!(
"Please tell me the last second you want fast-forwarded"
));
project.source.fast.push((start, end));
}
project.progress.asked_fast = true;
println!("{}", toml::to_string(&project).unwrap());
fs::write(&project_path, toml::to_string(&project).unwrap().as_bytes()).unwrap();
}
// render(&directory, &project).unwrap();
}

View file

@ -4,9 +4,9 @@ use crate::{
time::{format_time, Time}
};
use anyhow::bail;
use camino::{Utf8Path as Path, Utf8PathBuf as PathBuf};
use camino::Utf8PathBuf as PathBuf;
use rational::Rational;
use std::{borrow::Cow, process::Command};
use std::{borrow::Cow, fmt::Write as _, process::Command};
pub(crate) struct FfmpegInput {
pub(crate) concat: bool,
@ -54,6 +54,7 @@ pub(crate) struct Ffmpeg {
filters: Vec<Filter>,
filters_output: Cow<'static, str>,
loudnorm: bool,
duration: Option<Time>,
output: PathBuf,
filter_idx: usize
@ -66,6 +67,7 @@ impl Ffmpeg {
filters: Vec::new(),
filters_output: "0".into(),
loudnorm: false,
duration: None,
output,
filter_idx: 0
@ -97,13 +99,18 @@ impl Ffmpeg {
self
}
pub fn set_duration(&mut self, duration: Time) -> &mut Self {
self.duration = Some(duration);
self
}
pub fn run(mut self) -> anyhow::Result<()> {
let mut cmd = cmd();
cmd.arg("ffmpeg").arg("-hide_banner");
cmd.arg("ffmpeg").arg("-hide_banner").arg("-y");
// determine whether the video need to be re-encoded
let venc = self.filters.iter().any(|f| f.is_video_filter());
let aenc = self.filters.iter().any(|f| f.is_audio_filter()) || self.loudnorm;
let venc = !self.filters.is_empty();
let aenc = !self.filters.is_empty() || self.loudnorm;
// initialise a vaapi device if one exists
let vaapi_device: PathBuf = "/dev/dri/renderD128".into();
@ -120,7 +127,7 @@ impl Ffmpeg {
// always try to synchronise audio
cmd.arg("-async").arg("1");
// TODO apply filters
// apply filters
match (self.loudnorm, self.filters) {
(true, f) if f.is_empty() => {
cmd.arg("-af").arg("pan=mono|c0=FR,loudnorm,pan=stereo|c0=c0|c1=c0,aformat=sample_rates=48000");
@ -133,8 +140,17 @@ impl Ffmpeg {
for filter in f {
filter.append_to_complex_filter(&mut complex, &mut self.filter_idx);
}
if vaapi {
write!(
complex,
"{}format=nv12,hwupload[v]",
channel('v', &self.filters_output)
);
} else {
write!(complex, "{}null[v]", channel('v', &self.filters_output));
}
cmd.arg("-filter_complex").arg(complex);
cmd.arg("-map").arg(channel('v', &self.filters_output));
cmd.arg("-map").arg("[v]");
cmd.arg("-map").arg(channel('a', &self.filters_output));
}
}
@ -157,7 +173,11 @@ impl Ffmpeg {
cmd.arg("-c:a").arg("copy");
}
if let Some(duration) = self.duration {
cmd.arg("-t").arg(format_time(duration));
}
cmd.arg(&self.output);
let status = cmd.status()?;
if status.success() {
Ok(())

View file

@ -39,7 +39,7 @@ fn cmd() -> Command {
.arg("-exuo")
.arg("pipefail")
.arg("-c")
.arg("exec");
.arg("exec \"$0\" \"${@}\"");
cmd
}
@ -61,7 +61,9 @@ fn read_output(cmd: &mut Command) -> anyhow::Result<String> {
out.status.code()
);
}
String::from_utf8(out.stdout).context("Command returned non-utf8 output")
String::from_utf8(out.stdout)
.context("Command returned non-utf8 output")
.map(|str| str.trim().into())
}
fn ffprobe_video(query: &str, input: &Path) -> anyhow::Result<String> {
@ -90,23 +92,6 @@ fn ffprobe_audio(query: &str, concat_input: &Path) -> anyhow::Result<String> {
)
}
fn ffmpeg() -> Command {
let mut cmd = cmd();
cmd.arg("ffmpeg")
.arg("-hide_banner")
.arg("-vaapi_device")
.arg("/dev/dri/renderD128");
cmd
}
fn render_svg(fps: Rational, tbn: u32, input: &Path, duration: Time, output: &Path) {
let mut cmd = ffmpeg();
cmd.arg("-framerate").arg(fps.to_string());
cmd.arg("-loop").arg("1");
cmd.arg("-i").arg(input);
cmd.arg("-c:v").arg("libx264");
}
pub(crate) struct Renderer<'a> {
/// The directory with all the sources.
directory: &'a Path,
@ -128,13 +113,18 @@ fn svg2mp4(svg: PathBuf, mp4: PathBuf, duration: Time) -> anyhow::Result<()> {
output: "out".into()
});
ffmpeg.set_filter_output("out");
ffmpeg.set_duration(duration);
ffmpeg.run()
}
fn svg2png(svg: &Path, png: &Path, size: usize) -> anyhow::Result<()> {
let mut cmd = cmd();
let size = size.to_string();
cmd.arg("inkscape").arg("-w").arg(&size).arg("-h").arg(&size);
cmd.arg("inkscape")
.arg("-w")
.arg(&size)
.arg("-h")
.arg(&size);
cmd.arg(svg).arg("-o").arg(png);
let status = cmd.status()?;
@ -153,6 +143,7 @@ impl<'a> Renderer<'a> {
format_date(project.lecture.date)
);
let target = directory.join(&slug);
fs::create_dir_all(&target)?;
Ok(Self {
directory,
@ -161,20 +152,24 @@ impl<'a> Renderer<'a> {
})
}
pub(crate) fn recording_mp4(&self) -> PathBuf {
self.target.join("recording.mp4")
}
pub(crate) fn preprocess(&self, project: &mut Project) -> anyhow::Result<()> {
assert!(!project.progress.preprocessed);
let recording_txt = self.target.join("recording.txt");
let mut file = File::create(&recording_txt)?;
for filename in &project.source.files {
writeln!(file, "file {:?}", self.directory.join(filename).to_string());
writeln!(file, "file '{}'", self.directory.join(filename).to_string());
}
drop(file);
println!("\x1B[1m ==> Concatenating Video and Normalising Audio ...");
println!("\x1B[1m ==> Concatenating Video and Normalising Audio ...\x1B[0m");
let source_sample_rate =
ffprobe_audio("stream=sample_rate", &recording_txt)?.parse()?;
let recording_mp4 = self.target.join("recording.mp4");
let recording_mp4 = self.recording_mp4();
let mut ffmpeg = Ffmpeg::new(recording_mp4.clone());
ffmpeg.add_input(FfmpegInput {
concat: true,
@ -201,6 +196,8 @@ impl<'a> Renderer<'a> {
source_sample_rate
});
println!("\x1B[1m ==> Preparing assets ...\x1B[0m");
// render intro to svg then mp4
let intro_svg = self.target.join("intro.svg");
fs::write(
@ -225,7 +222,7 @@ impl<'a> Renderer<'a> {
svg2mp4(outro_svg, outro_mp4, Time {
seconds: 5,
micros: 0
});
})?;
// copy logo then render to png
let logo_svg = self.target.join("logo.svg");
@ -246,7 +243,11 @@ impl<'a> Renderer<'a> {
))
)?;
let fastforward_png = self.target.join("fastforward.png");
svg2png(&fastforward_svg, &fastforward_png, 128 * 1920 / source_res.width())?;
svg2png(
&fastforward_svg,
&fastforward_png,
128 * 1920 / source_res.width()
)?;
Ok(())
}