almost ready for preprocessing

This commit is contained in:
Dominic 2023-10-30 17:32:21 +01:00
parent a0f1caa34b
commit 45b9173c56
Signed by: msrd0
GPG key ID: DCC8C247452E98F9
5 changed files with 136 additions and 16 deletions

View file

@ -17,3 +17,6 @@ serde = { version = "1.0.188", features = ["derive"] }
serde_with = "3.4"
svgwriter = "0.1"
toml = { package = "basic-toml", version = "0.1.4" }
[patch.crates-io]
rational = { git = "https://github.com/msrd0/rational", branch = "error" }

View file

@ -11,7 +11,7 @@ use camino::Utf8PathBuf as PathBuf;
use clap::Parser;
use rational::Rational;
use serde::{Deserialize, Serialize};
use serde_with::{serde_as, DisplayFromStr, FromInto};
use serde_with::{serde_as, DisplayFromStr};
use std::{
collections::BTreeSet,
fmt::Display,
@ -100,7 +100,7 @@ struct ProjectSourceMetadata {
#[serde_as(as = "DisplayFromStr")]
source_duration: Time,
/// The FPS of the source video.
#[serde_as(as = "FromInto<(i128, i128)>")]
#[serde_as(as = "DisplayFromStr")]
source_fps: Rational,
/// The time base of the source video.
source_tbn: u32,

View file

@ -1,5 +1,9 @@
use super::{cmd, filter::Filter};
use crate::time::{format_time, Time};
use crate::{
render::filter::channel,
time::{format_time, Time}
};
use anyhow::bail;
use camino::{Utf8Path as Path, Utf8PathBuf as PathBuf};
use rational::Rational;
use std::{borrow::Cow, process::Command};
@ -48,6 +52,8 @@ impl FfmpegInput {
pub(crate) struct Ffmpeg {
inputs: Vec<FfmpegInput>,
filters: Vec<Filter>,
filters_output: Cow<'static, str>,
loudnorm: bool,
output: PathBuf,
filter_idx: usize
@ -58,19 +64,46 @@ impl Ffmpeg {
Self {
inputs: Vec::new(),
filters: Vec::new(),
filters_output: "0".into(),
loudnorm: false,
output,
filter_idx: 0
}
}
pub fn run(self) -> anyhow::Result<()> {
pub fn add_input(&mut self, input: FfmpegInput) -> &mut Self {
self.inputs.push(input);
self
}
pub fn add_filter(&mut self, filter: Filter) -> &mut Self {
assert!(!self.loudnorm);
self.filters.push(filter);
self
}
pub fn set_filter_output<T: Into<Cow<'static, str>>>(
&mut self,
output: T
) -> &mut Self {
self.filters_output = output.into();
self
}
pub fn enable_loudnorm(&mut self) -> &mut Self {
assert!(self.filters.is_empty());
self.loudnorm = true;
self
}
pub fn run(mut self) -> anyhow::Result<()> {
let mut cmd = cmd();
cmd.arg("ffmpeg").arg("-hide_banner");
// determine whether the video need to be re-encoded
let venc = self.filters.iter().any(|f| f.is_video_filter());
let aenc = self.filters.iter().any(|f| f.is_audio_filter());
let aenc = self.filters.iter().any(|f| f.is_audio_filter()) || self.loudnorm;
// initialise a vaapi device if one exists
let vaapi_device: PathBuf = "/dev/dri/renderD128".into();
@ -88,6 +121,23 @@ impl Ffmpeg {
cmd.arg("-async").arg("1");
// TODO apply filters
match (self.loudnorm, self.filters) {
(true, f) if f.is_empty() => {
cmd.arg("-af").arg("pan=mono|c0=FR,loudnorm,pan=stereo|c0=c0|c1=c0,aformat=sample_rates=48000");
},
(true, _) => panic!("Filters and loudnorm at the same time is not supported"),
(false, f) if f.is_empty() => {},
(false, f) => {
let mut complex = String::new();
for filter in f {
filter.append_to_complex_filter(&mut complex, &mut self.filter_idx);
}
cmd.arg("-filter_complex").arg(complex);
cmd.arg("-map").arg(channel('v', &self.filters_output));
cmd.arg("-map").arg(channel('a', &self.filters_output));
}
}
// append encoding options
if vaapi {
@ -107,6 +157,12 @@ impl Ffmpeg {
cmd.arg("-c:a").arg("copy");
}
unimplemented!()
cmd.arg(&self.output);
let status = cmd.status()?;
if status.success() {
Ok(())
} else {
bail!("ffmpeg failed with exit code {:?}", status.code())
}
}
}

View file

@ -71,7 +71,11 @@ impl Filter {
)
}
fn append_to_complex_filter(&self, complex: &mut String, filter_idx: &mut usize) {
pub(crate) fn append_to_complex_filter(
&self,
complex: &mut String,
filter_idx: &mut usize
) {
match self {
Self::Trim {
input,
@ -191,7 +195,7 @@ impl Filter {
}
}
fn channel(channel: char, id: &str) -> String {
pub(super) fn channel(channel: char, id: &str) -> String {
if id.chars().any(|ch| !ch.is_digit(10)) {
format!("[{channel}_{id}]")
} else {

View file

@ -3,9 +3,10 @@
pub mod ffmpeg;
mod filter;
use self::filter::Filter;
use crate::{
iotro::intro,
render::ffmpeg::Ffmpeg,
iotro::{intro, outro},
render::ffmpeg::{Ffmpeg, FfmpegInput},
time::{format_date, Time},
Project, ProjectSourceMetadata, Resolution
};
@ -116,6 +117,20 @@ pub(crate) struct Renderer<'a> {
target: PathBuf
}
fn svg2mp4(svg: PathBuf, mp4: PathBuf, duration: Time) -> anyhow::Result<()> {
let mut ffmpeg = Ffmpeg::new(mp4);
ffmpeg.add_input(FfmpegInput {
loop_input: true,
..FfmpegInput::new(svg)
});
ffmpeg.add_filter(Filter::GenerateSilence {
video: "0".into(),
output: "out".into()
});
ffmpeg.set_filter_output("out");
ffmpeg.run()
}
impl<'a> Renderer<'a> {
pub(crate) fn new(directory: &'a Path, project: &Project) -> anyhow::Result<Self> {
let slug = format!(
@ -150,23 +165,65 @@ impl<'a> Renderer<'a> {
)?;
let recording_txt = self.target.join("recording.txt");
let mut file = File::create(recording_txt)?;
let mut file = File::create(&recording_txt)?;
for filename in &project.source.files {
writeln!(file, "file {:?}", self.directory.join(filename).to_string());
}
drop(file);
println!("\x1B[1m ==> Concatenating Video and Normalising Audio ...");
let source_sample_rate =
ffprobe_audio("stream=sample_rate", &recording_txt)?.parse()?;
let recording_mp4 = self.target.join("recording.mp4");
let mut ffmpeg = Ffmpeg::new(recording_mp4);
let mut ffmpeg = Ffmpeg::new(recording_mp4.clone());
ffmpeg.add_input(FfmpegInput {
concat: true,
..FfmpegInput::new(recording_txt)
});
ffmpeg.enable_loudnorm();
ffmpeg.run()?;
// project.source.metadata = Some(ProjectSourceMetadata {
// source_duration: ffprobe_video("format=duration", input)?.parse()?
// });
let width = ffprobe_video("stream=width", &recording_mp4)?.parse()?;
let height = ffprobe_video("stream=height", &recording_mp4)?.parse()?;
let source_res = match (width, height) {
(3840, 2160) => Resolution::UHD,
(2560, 1440) => Resolution::WQHD,
(1920, 1080) => Resolution::FullHD,
(1280, 720) => Resolution::HD,
(640, 360) => Resolution::nHD,
(width, height) => bail!("Unknown resolution: {width}x{height}")
};
project.source.metadata = Some(ProjectSourceMetadata {
source_duration: ffprobe_video("format=duration", &recording_mp4)?.parse()?,
source_fps: ffprobe_video("stream=r_frame_rate", &recording_mp4)?.parse()?,
source_tbn: ffprobe_video("stream=time_base", &recording_mp4)?.parse()?,
source_res,
source_sample_rate
});
let intro_svg = self.target.join("intro.svg");
// fs::write(&intro_svg, intro(res, date));
fs::write(
&intro_svg,
intro(source_res, project.lecture.date)
.to_string_pretty()
.into_bytes()
)?;
let intro_mp4 = self.target.join("intro.mp4");
svg2mp4(intro_svg, intro_mp4, Time {
seconds: 3,
micros: 0
})?;
let outro_svg = self.target.join("outro.svg");
fs::write(
&outro_svg,
outro(source_res).to_string_pretty().into_bytes()
)?;
let outro_mp4 = self.target.join("outro.mp4");
svg2mp4(outro_svg, outro_mp4, Time {
seconds: 5,
micros: 0
});
Ok(())
}