Compare commits
5 commits
7b1681d85d
...
677c35a6fd
Author | SHA1 | Date | |
---|---|---|---|
677c35a6fd | |||
b11baf1358 | |||
8b57b97c80 | |||
1e7f5f95cd | |||
01e0758b6a |
7 changed files with 494 additions and 49 deletions
|
@ -7,9 +7,13 @@ date = "230101"
|
||||||
[source]
|
[source]
|
||||||
files = ["C01.mp4", "C02.mp4", "C03.mp4"]
|
files = ["C01.mp4", "C02.mp4", "C03.mp4"]
|
||||||
stereo = false
|
stereo = false
|
||||||
start = "2"
|
start = "1"
|
||||||
end = "12"
|
end = "12"
|
||||||
fast = [["5", "7"], ["9", "11"]]
|
fast = [["6", "8"], ["10", "11"]]
|
||||||
|
questions = [
|
||||||
|
["1.5", "3", "Hallo liebes Publikum. Ich habe leider meine Frage vergessen. Bitte entschuldigt die Störung."],
|
||||||
|
["3.5", "5", "Ah jetzt weiß ich es wieder. Meine Frage war: Was war meine Frage?"]
|
||||||
|
]
|
||||||
|
|
||||||
[source.metadata]
|
[source.metadata]
|
||||||
source_duration = "12.53000"
|
source_duration = "12.53000"
|
||||||
|
@ -22,5 +26,6 @@ source_sample_rate = 48000
|
||||||
preprocessed = false
|
preprocessed = false
|
||||||
asked_start_end = true
|
asked_start_end = true
|
||||||
asked_fast = true
|
asked_fast = true
|
||||||
|
asked_questions = true
|
||||||
rendered = false
|
rendered = false
|
||||||
transcoded = []
|
transcoded = []
|
||||||
|
|
|
@ -10,7 +10,10 @@ license = "EPL-2.0"
|
||||||
[dependencies]
|
[dependencies]
|
||||||
anyhow = "1.0"
|
anyhow = "1.0"
|
||||||
camino = "1.1"
|
camino = "1.1"
|
||||||
|
console = "0.15"
|
||||||
clap = { version = "4.4", features = ["derive"] }
|
clap = { version = "4.4", features = ["derive"] }
|
||||||
|
fontconfig = "0.8"
|
||||||
|
harfbuzz_rs = "2.0"
|
||||||
indexmap = "2.2"
|
indexmap = "2.2"
|
||||||
rational = "1.5"
|
rational = "1.5"
|
||||||
serde = { version = "1.0.188", features = ["derive"] }
|
serde = { version = "1.0.188", features = ["derive"] }
|
||||||
|
|
|
@ -7,7 +7,7 @@ use std::{
|
||||||
str::FromStr
|
str::FromStr
|
||||||
};
|
};
|
||||||
use svgwriter::{
|
use svgwriter::{
|
||||||
tags::{Group, Rect, TagWithPresentationAttributes, Text},
|
tags::{Group, Rect, TagWithPresentationAttributes as _, Text},
|
||||||
Graphic
|
Graphic
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -134,14 +134,14 @@ impl Debug for Language<'_> {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[repr(u16)]
|
#[repr(u16)]
|
||||||
enum FontSize {
|
pub(crate) enum FontSize {
|
||||||
Huge = 72,
|
Huge = 72,
|
||||||
Large = 56,
|
Large = 56,
|
||||||
Big = 44
|
Big = 44
|
||||||
}
|
}
|
||||||
|
|
||||||
#[repr(u16)]
|
#[repr(u16)]
|
||||||
enum FontWeight {
|
pub(crate) enum FontWeight {
|
||||||
Normal = 400,
|
Normal = 400,
|
||||||
SemiBold = 500,
|
SemiBold = 500,
|
||||||
Bold = 700
|
Bold = 700
|
||||||
|
|
138
src/main.rs
138
src/main.rs
|
@ -3,16 +3,19 @@
|
||||||
#![forbid(elided_lifetimes_in_paths, unsafe_code)]
|
#![forbid(elided_lifetimes_in_paths, unsafe_code)]
|
||||||
|
|
||||||
mod iotro;
|
mod iotro;
|
||||||
|
mod question;
|
||||||
mod render;
|
mod render;
|
||||||
mod time;
|
mod time;
|
||||||
|
|
||||||
use crate::{
|
use self::{
|
||||||
|
iotro::Language,
|
||||||
|
question::Question,
|
||||||
render::{ffmpeg::FfmpegOutputFormat, Renderer},
|
render::{ffmpeg::FfmpegOutputFormat, Renderer},
|
||||||
time::{parse_date, parse_time, Date, Time}
|
time::{parse_date, parse_time, Date, Time}
|
||||||
};
|
};
|
||||||
use camino::Utf8PathBuf as PathBuf;
|
use camino::Utf8PathBuf as PathBuf;
|
||||||
use clap::Parser;
|
use clap::Parser;
|
||||||
use iotro::Language;
|
use console::style;
|
||||||
use rational::Rational;
|
use rational::Rational;
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use serde_with::{serde_as, DisplayFromStr};
|
use serde_with::{serde_as, DisplayFromStr};
|
||||||
|
@ -169,9 +172,15 @@ struct ProjectSource {
|
||||||
start: Option<Time>,
|
start: Option<Time>,
|
||||||
#[serde_as(as = "Option<DisplayFromStr>")]
|
#[serde_as(as = "Option<DisplayFromStr>")]
|
||||||
end: Option<Time>,
|
end: Option<Time>,
|
||||||
|
|
||||||
|
#[serde(default)]
|
||||||
#[serde_as(as = "Vec<(DisplayFromStr, DisplayFromStr)>")]
|
#[serde_as(as = "Vec<(DisplayFromStr, DisplayFromStr)>")]
|
||||||
fast: Vec<(Time, Time)>,
|
fast: Vec<(Time, Time)>,
|
||||||
|
|
||||||
|
#[serde(default)]
|
||||||
|
#[serde_as(as = "Vec<(DisplayFromStr, DisplayFromStr, _)>")]
|
||||||
|
questions: Vec<(Time, Time, String)>,
|
||||||
|
|
||||||
metadata: Option<ProjectSourceMetadata>
|
metadata: Option<ProjectSourceMetadata>
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -195,10 +204,25 @@ struct ProjectSourceMetadata {
|
||||||
|
|
||||||
#[derive(Default, Deserialize, Serialize)]
|
#[derive(Default, Deserialize, Serialize)]
|
||||||
struct ProjectProgress {
|
struct ProjectProgress {
|
||||||
|
#[serde(default)]
|
||||||
preprocessed: bool,
|
preprocessed: bool,
|
||||||
|
|
||||||
|
#[serde(default)]
|
||||||
asked_start_end: bool,
|
asked_start_end: bool,
|
||||||
|
|
||||||
|
#[serde(default)]
|
||||||
asked_fast: bool,
|
asked_fast: bool,
|
||||||
|
|
||||||
|
#[serde(default)]
|
||||||
|
asked_questions: bool,
|
||||||
|
|
||||||
|
#[serde(default)]
|
||||||
|
rendered_assets: bool,
|
||||||
|
|
||||||
|
#[serde(default)]
|
||||||
rendered: bool,
|
rendered: bool,
|
||||||
|
|
||||||
|
#[serde(default)]
|
||||||
transcoded: BTreeSet<Resolution>
|
transcoded: BTreeSet<Resolution>
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -206,29 +230,44 @@ fn ask(question: impl Display) -> String {
|
||||||
let mut stdout = io::stdout().lock();
|
let mut stdout = io::stdout().lock();
|
||||||
let mut stdin = io::stdin().lock();
|
let mut stdin = io::stdin().lock();
|
||||||
|
|
||||||
writeln!(stdout, "{question}").unwrap();
|
write!(
|
||||||
let mut line = String::new();
|
stdout,
|
||||||
write!(stdout, "> ").unwrap();
|
"{} {} ",
|
||||||
|
style(question).bold().magenta(),
|
||||||
|
style(">").cyan()
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
stdout.flush().unwrap();
|
stdout.flush().unwrap();
|
||||||
|
let mut line = String::new();
|
||||||
stdin.read_line(&mut line).unwrap();
|
stdin.read_line(&mut line).unwrap();
|
||||||
line.trim().to_owned()
|
line.trim().to_owned()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn ask_time(question: impl Display) -> Time {
|
fn ask_time(question: impl Display + Copy) -> Time {
|
||||||
let mut stdout = io::stdout().lock();
|
let mut stdout = io::stdout().lock();
|
||||||
let mut stdin = io::stdin().lock();
|
let mut stdin = io::stdin().lock();
|
||||||
|
|
||||||
writeln!(stdout, "{question}").unwrap();
|
|
||||||
let mut line = String::new();
|
let mut line = String::new();
|
||||||
loop {
|
loop {
|
||||||
line.clear();
|
line.clear();
|
||||||
write!(stdout, "> ").unwrap();
|
write!(
|
||||||
|
stdout,
|
||||||
|
"{} {} ",
|
||||||
|
style(question).bold().magenta(),
|
||||||
|
style(">").cyan()
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
stdout.flush().unwrap();
|
stdout.flush().unwrap();
|
||||||
stdin.read_line(&mut line).unwrap();
|
stdin.read_line(&mut line).unwrap();
|
||||||
let line = line.trim();
|
let line = line.trim();
|
||||||
match parse_time(line) {
|
match parse_time(line) {
|
||||||
Ok(time) => return time,
|
Ok(time) => return time,
|
||||||
Err(err) => writeln!(stdout, "Invalid Input {line:?}: {err}").unwrap()
|
Err(err) => writeln!(
|
||||||
|
stdout,
|
||||||
|
"{} {line:?}: {err}",
|
||||||
|
style("Invalid Input").bold().red()
|
||||||
|
)
|
||||||
|
.unwrap()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -272,10 +311,14 @@ fn main() {
|
||||||
|
|
||||||
print!("I found the following source files:");
|
print!("I found the following source files:");
|
||||||
for f in &files {
|
for f in &files {
|
||||||
print!(" {f}");
|
print!(" {}", style(f).bold().yellow());
|
||||||
}
|
}
|
||||||
println!();
|
println!();
|
||||||
files = ask("Which source files would you like to use? (specify multiple files separated by whitespace)")
|
println!(
|
||||||
|
"{} Which source files would you like to use? (specify multiple files separated by whitespace)",
|
||||||
|
style("?").bold().yellow()
|
||||||
|
);
|
||||||
|
files = ask("files")
|
||||||
.split_ascii_whitespace()
|
.split_ascii_whitespace()
|
||||||
.map(String::from)
|
.map(String::from)
|
||||||
.collect();
|
.collect();
|
||||||
|
@ -295,6 +338,7 @@ fn main() {
|
||||||
start: None,
|
start: None,
|
||||||
end: None,
|
end: None,
|
||||||
fast: Vec::new(),
|
fast: Vec::new(),
|
||||||
|
questions: Vec::new(),
|
||||||
metadata: None
|
metadata: None
|
||||||
},
|
},
|
||||||
progress: Default::default()
|
progress: Default::default()
|
||||||
|
@ -314,14 +358,21 @@ fn main() {
|
||||||
fs::write(&project_path, toml::to_string(&project).unwrap().as_bytes()).unwrap();
|
fs::write(&project_path, toml::to_string(&project).unwrap().as_bytes()).unwrap();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
println!();
|
||||||
|
println!(
|
||||||
|
" {} Preprocessed video: {}",
|
||||||
|
style("==>").bold().cyan(),
|
||||||
|
style(recording).bold().yellow()
|
||||||
|
);
|
||||||
|
|
||||||
// ask the user about start and end times
|
// ask the user about start and end times
|
||||||
if !project.progress.asked_start_end {
|
if !project.progress.asked_start_end {
|
||||||
project.source.start = Some(ask_time(format_args!(
|
println!(
|
||||||
"Please take a look at the file {recording} and tell me the first second you want included"
|
"{} What is the first/last second you want included?",
|
||||||
)));
|
style("?").bold().yellow()
|
||||||
project.source.end = Some(ask_time(format_args!(
|
);
|
||||||
"Please take a look at the file {recording} and tell me the last second you want included"
|
project.source.start = Some(ask_time("first"));
|
||||||
)));
|
project.source.end = Some(ask_time("last "));
|
||||||
project.progress.asked_start_end = true;
|
project.progress.asked_start_end = true;
|
||||||
|
|
||||||
fs::write(&project_path, toml::to_string(&project).unwrap().as_bytes()).unwrap();
|
fs::write(&project_path, toml::to_string(&project).unwrap().as_bytes()).unwrap();
|
||||||
|
@ -329,16 +380,16 @@ fn main() {
|
||||||
|
|
||||||
// ask the user about fast forward times
|
// ask the user about fast forward times
|
||||||
if !project.progress.asked_fast {
|
if !project.progress.asked_fast {
|
||||||
|
println!(
|
||||||
|
"{} Which sections of the video do you want fast-forwarded? (0 to finish)",
|
||||||
|
style("?").bold().yellow()
|
||||||
|
);
|
||||||
loop {
|
loop {
|
||||||
let start = ask_time(format_args!(
|
let start = ask_time("from");
|
||||||
"Please take a look at the file {recording} and tell me the first second you want fast-forwarded. You may reply with `0` if there are no more fast-forward sections"
|
|
||||||
));
|
|
||||||
if start.seconds == 0 && start.micros == 0 {
|
if start.seconds == 0 && start.micros == 0 {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
let end = ask_time(format_args!(
|
let end = ask_time("to ");
|
||||||
"Please tell me the last second you want fast-forwarded"
|
|
||||||
));
|
|
||||||
project.source.fast.push((start, end));
|
project.source.fast.push((start, end));
|
||||||
}
|
}
|
||||||
project.progress.asked_fast = true;
|
project.progress.asked_fast = true;
|
||||||
|
@ -346,6 +397,34 @@ fn main() {
|
||||||
fs::write(&project_path, toml::to_string(&project).unwrap().as_bytes()).unwrap();
|
fs::write(&project_path, toml::to_string(&project).unwrap().as_bytes()).unwrap();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ask the user about questions from the audience that should be subtitled
|
||||||
|
if !project.progress.asked_questions {
|
||||||
|
println!(
|
||||||
|
"{} In which sections of the video were questions asked you want subtitles for? (0 to finish)",
|
||||||
|
style("?").bold().yellow()
|
||||||
|
);
|
||||||
|
loop {
|
||||||
|
let start = ask_time("from");
|
||||||
|
if start.seconds == 0 && start.micros == 0 {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
let end = ask_time("to ");
|
||||||
|
let text = ask("text");
|
||||||
|
project.source.questions.push((start, end, text));
|
||||||
|
}
|
||||||
|
project.progress.asked_questions = true;
|
||||||
|
|
||||||
|
fs::write(&project_path, toml::to_string(&project).unwrap().as_bytes()).unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
// render the assets
|
||||||
|
if !project.progress.rendered_assets {
|
||||||
|
renderer.render_assets(&project).unwrap();
|
||||||
|
project.progress.rendered_assets = true;
|
||||||
|
|
||||||
|
fs::write(&project_path, toml::to_string(&project).unwrap().as_bytes()).unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
// render the video
|
// render the video
|
||||||
let mut videos = Vec::new();
|
let mut videos = Vec::new();
|
||||||
videos.push(if project.progress.rendered {
|
videos.push(if project.progress.rendered {
|
||||||
|
@ -378,9 +457,18 @@ fn main() {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
println!("\x1B[1m ==> DONE :)\x1B[0m");
|
println!();
|
||||||
|
println!(
|
||||||
|
" {} {}",
|
||||||
|
style("==>").bold().cyan(),
|
||||||
|
style("DONE :)").bold()
|
||||||
|
);
|
||||||
println!(" Videos:");
|
println!(" Videos:");
|
||||||
for v in &videos {
|
for v in &videos {
|
||||||
println!(" -> {v}");
|
println!(
|
||||||
|
" {} {}",
|
||||||
|
style("->").bold().cyan(),
|
||||||
|
style(v).bold().yellow()
|
||||||
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
152
src/question.rs
Normal file
152
src/question.rs
Normal file
|
@ -0,0 +1,152 @@
|
||||||
|
use crate::{iotro::Language, Resolution};
|
||||||
|
use fontconfig::Fontconfig;
|
||||||
|
use harfbuzz_rs::{Face, Font, Owned, UnicodeBuffer};
|
||||||
|
use std::sync::OnceLock;
|
||||||
|
use svgwriter::{
|
||||||
|
tags::{Group, Path, Rect, TSpan, TagWithPresentationAttributes as _, Text},
|
||||||
|
Data, Graphic, Transform
|
||||||
|
};
|
||||||
|
|
||||||
|
pub(crate) struct Question {
|
||||||
|
res: Resolution,
|
||||||
|
g: Group
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Question {
|
||||||
|
pub(crate) fn new(res: Resolution, lang: &Language<'_>, str: &str) -> Self {
|
||||||
|
static FONT: OnceLock<Owned<Font<'static>>> = OnceLock::new();
|
||||||
|
let font = FONT.get_or_init(|| {
|
||||||
|
let fc = Fontconfig::new().unwrap();
|
||||||
|
let font_path = fc.find("Noto Sans", None).unwrap().path;
|
||||||
|
let face = Face::from_file(font_path, 0).unwrap();
|
||||||
|
Font::new(face)
|
||||||
|
});
|
||||||
|
let upem = font.face().upem();
|
||||||
|
|
||||||
|
// constants
|
||||||
|
let border_r = 12;
|
||||||
|
let font_size = 44;
|
||||||
|
let line_height = font_size * 6 / 5;
|
||||||
|
let padding = font_size / 2;
|
||||||
|
let margin_x = 240;
|
||||||
|
let margin_y = padding * 3 / 2;
|
||||||
|
let question_offset = 64;
|
||||||
|
let question_width = 240;
|
||||||
|
|
||||||
|
// calculated
|
||||||
|
let box_width = 1920 - 2 * margin_x;
|
||||||
|
let text_width = box_width - 2 * padding;
|
||||||
|
|
||||||
|
// calculates the width of the given string
|
||||||
|
let width_of = |s: &str| {
|
||||||
|
let width: i32 =
|
||||||
|
harfbuzz_rs::shape(font, UnicodeBuffer::new().add_str(s), &[])
|
||||||
|
.get_glyph_positions()
|
||||||
|
.iter()
|
||||||
|
.map(|glyph_pos| glyph_pos.x_advance)
|
||||||
|
.sum();
|
||||||
|
(width * font_size) / upem as i32
|
||||||
|
};
|
||||||
|
let space_width = width_of(" ");
|
||||||
|
|
||||||
|
// lay out the text
|
||||||
|
let mut text = Text::new()
|
||||||
|
.with_dominant_baseline("hanging")
|
||||||
|
.with_transform(
|
||||||
|
Transform::new().translate(padding, padding + font_size / 2 + border_r)
|
||||||
|
);
|
||||||
|
let words = str.split_whitespace();
|
||||||
|
let mut text_height = 0;
|
||||||
|
let mut text_x = 0;
|
||||||
|
for word in words {
|
||||||
|
let word_width = width_of(word);
|
||||||
|
if text_x + word_width > text_width {
|
||||||
|
text_x = 0;
|
||||||
|
text_height += line_height;
|
||||||
|
}
|
||||||
|
text.push(
|
||||||
|
TSpan::new()
|
||||||
|
.with_x(text_x)
|
||||||
|
.with_y(text_height)
|
||||||
|
.append(word.to_owned())
|
||||||
|
);
|
||||||
|
text_x += word_width + space_width;
|
||||||
|
}
|
||||||
|
text_height += font_size;
|
||||||
|
|
||||||
|
// calculated
|
||||||
|
let box_height = text_height + 2 * padding + font_size / 2 + border_r;
|
||||||
|
|
||||||
|
let mut g = Group::new()
|
||||||
|
.with_fill("white")
|
||||||
|
.with_font_family("Noto Sans")
|
||||||
|
.with_font_size(font_size)
|
||||||
|
.with_transform(
|
||||||
|
Transform::new().translate(margin_x, 1080 - margin_y - box_height)
|
||||||
|
);
|
||||||
|
|
||||||
|
let mut outline = Data::new();
|
||||||
|
outline.move_by(border_r, 0).horiz_line_to(question_offset);
|
||||||
|
outline
|
||||||
|
.vert_line_by(-font_size / 2)
|
||||||
|
.arc_by(border_r, border_r, 0, false, true, border_r, -border_r)
|
||||||
|
.horiz_line_by(question_width)
|
||||||
|
.arc_by(border_r, border_r, 0, false, true, border_r, border_r)
|
||||||
|
.vert_line_by(font_size)
|
||||||
|
.arc_by(border_r, border_r, 0, false, true, -border_r, border_r)
|
||||||
|
.horiz_line_by(-question_width)
|
||||||
|
.arc_by(border_r, border_r, 0, false, true, -border_r, -border_r)
|
||||||
|
.vert_line_by(-font_size / 2)
|
||||||
|
.move_by(question_width + 2 * border_r, 0);
|
||||||
|
outline
|
||||||
|
.horiz_line_to(box_width - border_r)
|
||||||
|
.arc_by(border_r, border_r, 0, false, true, border_r, border_r)
|
||||||
|
.vert_line_by(box_height - 2 * border_r)
|
||||||
|
.arc_by(border_r, border_r, 0, false, true, -border_r, border_r)
|
||||||
|
.horiz_line_to(border_r)
|
||||||
|
.arc_by(border_r, border_r, 0, false, true, -border_r, -border_r)
|
||||||
|
.vert_line_to(border_r)
|
||||||
|
.arc_by(border_r, border_r, 0, false, true, border_r, -border_r);
|
||||||
|
g.push(
|
||||||
|
Path::new()
|
||||||
|
.with_stroke("#fff")
|
||||||
|
.with_stroke_width(3)
|
||||||
|
.with_fill("#000")
|
||||||
|
.with_fill_opacity(".3")
|
||||||
|
.with_d(outline)
|
||||||
|
);
|
||||||
|
g.push(
|
||||||
|
Text::new()
|
||||||
|
.with_x(question_offset + question_width / 2 + border_r)
|
||||||
|
.with_y(0)
|
||||||
|
.with_dominant_baseline("middle")
|
||||||
|
.with_text_anchor("middle")
|
||||||
|
.with_font_weight(600)
|
||||||
|
.append("Question")
|
||||||
|
);
|
||||||
|
g.push(text);
|
||||||
|
|
||||||
|
Self { res, g }
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn finish(self) -> Graphic {
|
||||||
|
let mut svg = Graphic::new();
|
||||||
|
svg.set_width(self.res.width());
|
||||||
|
svg.set_height(self.res.height());
|
||||||
|
svg.set_view_box("0 0 1920 1080");
|
||||||
|
svg.push(self.g);
|
||||||
|
svg
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
#[test]
|
||||||
|
fn question() {
|
||||||
|
let svg = Question::new(
|
||||||
|
Resolution::FullHD,
|
||||||
|
&Language::default(),
|
||||||
|
"Hallo Welt! Dies ist eine sehr kluge Frage aus dem Publikum. Die Frage ist nämlich: Was ist eigentlich die Frage?".into()
|
||||||
|
)
|
||||||
|
.finish();
|
||||||
|
std::fs::write("question.svg", svg.to_string_pretty()).unwrap();
|
||||||
|
}
|
|
@ -19,6 +19,7 @@ pub(crate) enum Filter {
|
||||||
overlay_input: Cow<'static, str>,
|
overlay_input: Cow<'static, str>,
|
||||||
x: Cow<'static, str>,
|
x: Cow<'static, str>,
|
||||||
y: Cow<'static, str>,
|
y: Cow<'static, str>,
|
||||||
|
repeatlast: bool,
|
||||||
output: Cow<'static, str>
|
output: Cow<'static, str>
|
||||||
},
|
},
|
||||||
|
|
||||||
|
@ -37,6 +38,22 @@ pub(crate) enum Filter {
|
||||||
output: Cow<'static, str>
|
output: Cow<'static, str>
|
||||||
},
|
},
|
||||||
|
|
||||||
|
/// Fade only video using the alpha channel.
|
||||||
|
FadeAlpha {
|
||||||
|
input: Cow<'static, str>,
|
||||||
|
direction: &'static str,
|
||||||
|
start: Time,
|
||||||
|
duration: Time,
|
||||||
|
output: Cow<'static, str>
|
||||||
|
},
|
||||||
|
|
||||||
|
/// Offset the PTS of the video by the amount of seconds.
|
||||||
|
VideoOffset {
|
||||||
|
input: Cow<'static, str>,
|
||||||
|
seconds: Time,
|
||||||
|
output: Cow<'static, str>
|
||||||
|
},
|
||||||
|
|
||||||
/// Generate silence. The video is copied.
|
/// Generate silence. The video is copied.
|
||||||
GenerateSilence {
|
GenerateSilence {
|
||||||
video: Cow<'static, str>,
|
video: Cow<'static, str>,
|
||||||
|
@ -77,11 +94,13 @@ impl Filter {
|
||||||
overlay_input,
|
overlay_input,
|
||||||
x,
|
x,
|
||||||
y,
|
y,
|
||||||
|
repeatlast,
|
||||||
output
|
output
|
||||||
} => {
|
} => {
|
||||||
|
let repeatlast: u8 = (*repeatlast).into();
|
||||||
writeln!(
|
writeln!(
|
||||||
complex,
|
complex,
|
||||||
"{}{}overlay=x={x}:y={y}{};",
|
"{}{}overlay=x={x}:y={y}:repeatlast={repeatlast}:eval=init{};",
|
||||||
channel('v', video_input),
|
channel('v', video_input),
|
||||||
channel('v', overlay_input),
|
channel('v', overlay_input),
|
||||||
channel('v', output)
|
channel('v', output)
|
||||||
|
@ -129,6 +148,34 @@ impl Filter {
|
||||||
)?;
|
)?;
|
||||||
},
|
},
|
||||||
|
|
||||||
|
Self::FadeAlpha {
|
||||||
|
input,
|
||||||
|
direction,
|
||||||
|
start,
|
||||||
|
duration,
|
||||||
|
output
|
||||||
|
} => {
|
||||||
|
writeln!(
|
||||||
|
complex,
|
||||||
|
"{}fade={direction}:st={start}:d={duration}:alpha=1{};",
|
||||||
|
channel('v', input),
|
||||||
|
channel('v', output)
|
||||||
|
)?;
|
||||||
|
},
|
||||||
|
|
||||||
|
Self::VideoOffset {
|
||||||
|
input,
|
||||||
|
seconds,
|
||||||
|
output
|
||||||
|
} => {
|
||||||
|
writeln!(
|
||||||
|
complex,
|
||||||
|
"{}setpts=PTS+{seconds}/TB{};",
|
||||||
|
channel('v', input),
|
||||||
|
channel('v', output)
|
||||||
|
)?;
|
||||||
|
},
|
||||||
|
|
||||||
Self::GenerateSilence { video, output } => {
|
Self::GenerateSilence { video, output } => {
|
||||||
writeln!(
|
writeln!(
|
||||||
complex,
|
complex,
|
||||||
|
|
|
@ -7,12 +7,14 @@ use self::{
|
||||||
};
|
};
|
||||||
use crate::{
|
use crate::{
|
||||||
iotro::{intro, outro},
|
iotro::{intro, outro},
|
||||||
|
question::Question,
|
||||||
render::ffmpeg::{Ffmpeg, FfmpegInput},
|
render::ffmpeg::{Ffmpeg, FfmpegInput},
|
||||||
time::{format_date, Time},
|
time::{format_date, format_time, Time},
|
||||||
Project, ProjectLecture, ProjectSourceMetadata, Resolution
|
Project, ProjectLecture, ProjectSourceMetadata, Resolution
|
||||||
};
|
};
|
||||||
use anyhow::{bail, Context};
|
use anyhow::{bail, Context};
|
||||||
use camino::{Utf8Path as Path, Utf8PathBuf as PathBuf};
|
use camino::{Utf8Path as Path, Utf8PathBuf as PathBuf};
|
||||||
|
use console::style;
|
||||||
use std::{
|
use std::{
|
||||||
borrow::Cow,
|
borrow::Cow,
|
||||||
collections::VecDeque,
|
collections::VecDeque,
|
||||||
|
@ -33,6 +35,10 @@ const TRANSITION_LEN: Time = Time {
|
||||||
seconds: 0,
|
seconds: 0,
|
||||||
micros: 200_000
|
micros: 200_000
|
||||||
};
|
};
|
||||||
|
const QUESTION_FADE_LEN: Time = Time {
|
||||||
|
seconds: 0,
|
||||||
|
micros: 400_000
|
||||||
|
};
|
||||||
const FF_MULTIPLIER: usize = 8;
|
const FF_MULTIPLIER: usize = 8;
|
||||||
// logo sizes at full hd, will be scaled to source resolution
|
// logo sizes at full hd, will be scaled to source resolution
|
||||||
const FF_LOGO_SIZE: usize = 128;
|
const FF_LOGO_SIZE: usize = 128;
|
||||||
|
@ -151,14 +157,13 @@ fn svg2mkv(
|
||||||
ffmpeg.run()
|
ffmpeg.run()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn svg2png(svg: &Path, png: &Path, size: usize) -> anyhow::Result<()> {
|
fn svg2png(svg: &Path, png: &Path, width: usize, height: usize) -> anyhow::Result<()> {
|
||||||
let mut cmd = cmd();
|
let mut cmd = cmd();
|
||||||
let size = size.to_string();
|
|
||||||
cmd.arg("inkscape")
|
cmd.arg("inkscape")
|
||||||
.arg("-w")
|
.arg("-w")
|
||||||
.arg(&size)
|
.arg(width.to_string())
|
||||||
.arg("-h")
|
.arg("-h")
|
||||||
.arg(&size);
|
.arg(height.to_string());
|
||||||
cmd.arg(svg).arg("-o").arg(png);
|
cmd.arg(svg).arg("-o").arg(png);
|
||||||
|
|
||||||
let status = cmd.status()?;
|
let status = cmd.status()?;
|
||||||
|
@ -217,6 +222,14 @@ impl<'a> Renderer<'a> {
|
||||||
self.target.join("outro.mkv")
|
self.target.join("outro.mkv")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn question_svg(&self, q_idx: usize) -> PathBuf {
|
||||||
|
self.target.join(format!("question{q_idx}.svg"))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn question_png(&self, q_idx: usize) -> PathBuf {
|
||||||
|
self.target.join(format!("question{q_idx}.png"))
|
||||||
|
}
|
||||||
|
|
||||||
pub(crate) fn preprocess(&self, project: &mut Project) -> anyhow::Result<()> {
|
pub(crate) fn preprocess(&self, project: &mut Project) -> anyhow::Result<()> {
|
||||||
assert!(!project.progress.preprocessed);
|
assert!(!project.progress.preprocessed);
|
||||||
|
|
||||||
|
@ -227,7 +240,13 @@ impl<'a> Renderer<'a> {
|
||||||
}
|
}
|
||||||
drop(file);
|
drop(file);
|
||||||
|
|
||||||
println!("\x1B[1m ==> Concatenating Video and Normalising Audio ...\x1B[0m");
|
println!();
|
||||||
|
println!(
|
||||||
|
" {} {}",
|
||||||
|
style("==>").bold().cyan(),
|
||||||
|
style("Concatenating Video and Normalising Audio ...").bold()
|
||||||
|
);
|
||||||
|
|
||||||
let source_sample_rate =
|
let source_sample_rate =
|
||||||
ffprobe_audio("stream=sample_rate", &recording_txt)?.parse()?;
|
ffprobe_audio("stream=sample_rate", &recording_txt)?.parse()?;
|
||||||
let recording_mkv = self.recording_mkv();
|
let recording_mkv = self.recording_mkv();
|
||||||
|
@ -259,15 +278,26 @@ impl<'a> Renderer<'a> {
|
||||||
source_res,
|
source_res,
|
||||||
source_sample_rate
|
source_sample_rate
|
||||||
});
|
});
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Prepare assets like intro, outro and questions.
|
||||||
|
pub(crate) fn render_assets(&self, project: &Project) -> anyhow::Result<()> {
|
||||||
let metadata = project.source.metadata.as_ref().unwrap();
|
let metadata = project.source.metadata.as_ref().unwrap();
|
||||||
|
|
||||||
println!("\x1B[1m ==> Preparing assets ...\x1B[0m");
|
println!();
|
||||||
|
println!(
|
||||||
|
" {} {}",
|
||||||
|
style("==>").bold().cyan(),
|
||||||
|
style("Preparing assets ...").bold()
|
||||||
|
);
|
||||||
|
|
||||||
// render intro to svg then mp4
|
// render intro to svg then mp4
|
||||||
let intro_svg = self.target.join("intro.svg");
|
let intro_svg = self.target.join("intro.svg");
|
||||||
fs::write(
|
fs::write(
|
||||||
&intro_svg,
|
&intro_svg,
|
||||||
intro(source_res, &project.lecture)
|
intro(metadata.source_res, &project.lecture)
|
||||||
.to_string_pretty()
|
.to_string_pretty()
|
||||||
.into_bytes()
|
.into_bytes()
|
||||||
)?;
|
)?;
|
||||||
|
@ -278,7 +308,7 @@ impl<'a> Renderer<'a> {
|
||||||
let outro_svg = self.target.join("outro.svg");
|
let outro_svg = self.target.join("outro.svg");
|
||||||
fs::write(
|
fs::write(
|
||||||
&outro_svg,
|
&outro_svg,
|
||||||
outro(&project.lecture.lang, source_res)
|
outro(&project.lecture.lang, metadata.source_res)
|
||||||
.to_string_pretty()
|
.to_string_pretty()
|
||||||
.into_bytes()
|
.into_bytes()
|
||||||
)?;
|
)?;
|
||||||
|
@ -292,7 +322,8 @@ impl<'a> Renderer<'a> {
|
||||||
include_bytes!(concat!(env!("CARGO_MANIFEST_DIR"), "/assets/logo.svg"))
|
include_bytes!(concat!(env!("CARGO_MANIFEST_DIR"), "/assets/logo.svg"))
|
||||||
)?;
|
)?;
|
||||||
let logo_png = self.target.join("logo.png");
|
let logo_png = self.target.join("logo.png");
|
||||||
svg2png(&logo_svg, &logo_png, LOGO_SIZE * source_res.width() / 1920)?;
|
let logo_size = LOGO_SIZE * metadata.source_res.width() / 1920;
|
||||||
|
svg2png(&logo_svg, &logo_png, logo_size, logo_size)?;
|
||||||
|
|
||||||
// copy fastforward then render to png
|
// copy fastforward then render to png
|
||||||
let fastforward_svg = self.target.join("fastforward.svg");
|
let fastforward_svg = self.target.join("fastforward.svg");
|
||||||
|
@ -304,12 +335,31 @@ impl<'a> Renderer<'a> {
|
||||||
))
|
))
|
||||||
)?;
|
)?;
|
||||||
let fastforward_png = self.target.join("fastforward.png");
|
let fastforward_png = self.target.join("fastforward.png");
|
||||||
|
let ff_logo_size = FF_LOGO_SIZE * metadata.source_res.width() / 1920;
|
||||||
svg2png(
|
svg2png(
|
||||||
&fastforward_svg,
|
&fastforward_svg,
|
||||||
&fastforward_png,
|
&fastforward_png,
|
||||||
FF_LOGO_SIZE * source_res.width() / 1920
|
ff_logo_size,
|
||||||
|
ff_logo_size
|
||||||
)?;
|
)?;
|
||||||
|
|
||||||
|
// write questions then render to png
|
||||||
|
for (q_idx, (_, _, q_text)) in project.source.questions.iter().enumerate() {
|
||||||
|
let q = Question::new(metadata.source_res, &project.lecture.lang, q_text)
|
||||||
|
.finish()
|
||||||
|
.to_string_pretty()
|
||||||
|
.into_bytes();
|
||||||
|
let q_svg = self.question_svg(q_idx);
|
||||||
|
let q_png = self.question_png(q_idx);
|
||||||
|
fs::write(&q_svg, q)?;
|
||||||
|
svg2png(
|
||||||
|
&q_svg,
|
||||||
|
&q_png,
|
||||||
|
metadata.source_res.width(),
|
||||||
|
metadata.source_res.height()
|
||||||
|
)?;
|
||||||
|
}
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -349,21 +399,24 @@ impl<'a> Renderer<'a> {
|
||||||
let mut part3: Cow<'static, str> = outro.into();
|
let mut part3: Cow<'static, str> = outro.into();
|
||||||
|
|
||||||
// the recording is fun because of all the fast forwarding
|
// the recording is fun because of all the fast forwarding
|
||||||
let mut part2 = VecDeque::new();
|
let mut part2 = VecDeque::<Cow<'static, str>>::new();
|
||||||
|
let mut part2_ts = VecDeque::new();
|
||||||
let mut part2_start_of_the_end = None;
|
let mut part2_start_of_the_end = None;
|
||||||
let mut part2_end_of_the_start = None;
|
let mut part2_end_of_the_start = None;
|
||||||
|
|
||||||
// ok so ff is fun. we will add the ff'ed section as well as the part between
|
// ok so ff is fun. we will add the ff'ed section as well as the part between
|
||||||
// the previous ff'ed section and our new section, unless we are the first
|
// the previous ff'ed section and our new section, unless we are the first.
|
||||||
project.source.fast.sort();
|
project.source.fast.sort();
|
||||||
for (i, (ff_st, ff_end)) in project.source.fast.iter().rev().enumerate() {
|
for (i, (ff_st, ff_end)) in project.source.fast.iter().rev().enumerate() {
|
||||||
if let Some(prev_end) = part2_end_of_the_start {
|
if let Some(prev_end) = part2_end_of_the_start {
|
||||||
|
let duration = prev_end - *ff_end;
|
||||||
let recffbetween = ffmpeg.add_input(FfmpegInput {
|
let recffbetween = ffmpeg.add_input(FfmpegInput {
|
||||||
start: Some(*ff_end),
|
start: Some(*ff_end),
|
||||||
duration: Some(prev_end - *ff_end),
|
duration: Some(duration),
|
||||||
..FfmpegInput::new(rec_file.clone())
|
..FfmpegInput::new(rec_file.clone())
|
||||||
});
|
});
|
||||||
part2.push_front(recffbetween.into());
|
part2.push_front(recffbetween.into());
|
||||||
|
part2_ts.push_front(Some((*ff_end, duration)));
|
||||||
} else {
|
} else {
|
||||||
part2_start_of_the_end = Some(*ff_end);
|
part2_start_of_the_end = Some(*ff_end);
|
||||||
}
|
}
|
||||||
|
@ -383,6 +436,7 @@ impl<'a> Renderer<'a> {
|
||||||
output: recff.clone().into()
|
output: recff.clone().into()
|
||||||
});
|
});
|
||||||
part2.push_front(recff.into());
|
part2.push_front(recff.into());
|
||||||
|
part2_ts.push_front(None);
|
||||||
}
|
}
|
||||||
|
|
||||||
// if the recording was not ff'ed, perform a normal trim
|
// if the recording was not ff'ed, perform a normal trim
|
||||||
|
@ -397,23 +451,112 @@ impl<'a> Renderer<'a> {
|
||||||
..FfmpegInput::new(rec_file.clone())
|
..FfmpegInput::new(rec_file.clone())
|
||||||
});
|
});
|
||||||
part2.push_back(rectrim.into());
|
part2.push_back(rectrim.into());
|
||||||
|
part2_ts.push_back(Some((start, part2_last_part_duration)));
|
||||||
}
|
}
|
||||||
// otherwise add the first and last parts separately
|
// otherwise add the first and last parts separately
|
||||||
else {
|
else {
|
||||||
|
let duration = part2_end_of_the_start.unwrap() - start;
|
||||||
let rectrimst = ffmpeg.add_input(FfmpegInput {
|
let rectrimst = ffmpeg.add_input(FfmpegInput {
|
||||||
start: Some(start),
|
start: Some(start),
|
||||||
duration: Some(part2_end_of_the_start.unwrap() - start),
|
duration: Some(duration),
|
||||||
..FfmpegInput::new(rec_file.clone())
|
..FfmpegInput::new(rec_file.clone())
|
||||||
});
|
});
|
||||||
part2.push_front(rectrimst.into());
|
part2.push_front(rectrimst.into());
|
||||||
|
part2_ts.push_front(Some((start, duration)));
|
||||||
|
|
||||||
part2_last_part_duration = end - part2_start_of_the_end.unwrap();
|
let part2_start_of_the_end = part2_start_of_the_end.unwrap();
|
||||||
|
part2_last_part_duration = end - part2_start_of_the_end;
|
||||||
let rectrimend = ffmpeg.add_input(FfmpegInput {
|
let rectrimend = ffmpeg.add_input(FfmpegInput {
|
||||||
start: Some(part2_start_of_the_end.unwrap()),
|
start: Some(part2_start_of_the_end),
|
||||||
duration: Some(part2_last_part_duration),
|
duration: Some(part2_last_part_duration),
|
||||||
..FfmpegInput::new(rec_file.clone())
|
..FfmpegInput::new(rec_file.clone())
|
||||||
});
|
});
|
||||||
part2.push_back(rectrimend.into());
|
part2.push_back(rectrimend.into());
|
||||||
|
part2_ts.push_back(Some((part2_start_of_the_end, part2_last_part_duration)));
|
||||||
|
}
|
||||||
|
|
||||||
|
// ok now we have a bunch of parts and a bunch of questions that want to get
|
||||||
|
// overlayed over those parts.
|
||||||
|
project.source.questions.sort();
|
||||||
|
let mut q_idx = 0;
|
||||||
|
for (i, ts) in part2_ts.iter().enumerate() {
|
||||||
|
let Some((start, duration)) = ts else {
|
||||||
|
continue;
|
||||||
|
};
|
||||||
|
loop {
|
||||||
|
if q_idx >= project.source.questions.len() {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
let (q_start, q_end, _) = &project.source.questions[q_idx];
|
||||||
|
if q_start < start {
|
||||||
|
bail!(
|
||||||
|
"Question starting at {} did not fit into the video",
|
||||||
|
format_time(*q_start)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
if q_start >= start && *q_end <= *start + *duration {
|
||||||
|
// add the question as input to ffmpeg
|
||||||
|
let q_inp = ffmpeg.add_input(FfmpegInput {
|
||||||
|
loop_input: true,
|
||||||
|
fps: Some(project.source.metadata.as_ref().unwrap().source_fps),
|
||||||
|
duration: Some(*q_end - *q_start),
|
||||||
|
..FfmpegInput::new(self.question_png(q_idx))
|
||||||
|
});
|
||||||
|
|
||||||
|
// fade in the question
|
||||||
|
let q_fadein = format!("q{q_idx}fin");
|
||||||
|
ffmpeg.add_filter(Filter::FadeAlpha {
|
||||||
|
input: q_inp.into(),
|
||||||
|
direction: "in",
|
||||||
|
start: Time {
|
||||||
|
seconds: 0,
|
||||||
|
micros: 0
|
||||||
|
},
|
||||||
|
duration: QUESTION_FADE_LEN,
|
||||||
|
output: q_fadein.clone().into()
|
||||||
|
});
|
||||||
|
|
||||||
|
// fade out the question
|
||||||
|
let q_fadeout = format!("q{q_idx}fout");
|
||||||
|
ffmpeg.add_filter(Filter::FadeAlpha {
|
||||||
|
input: q_fadein.into(),
|
||||||
|
direction: "out",
|
||||||
|
start: *q_end - *q_start - QUESTION_FADE_LEN,
|
||||||
|
duration: QUESTION_FADE_LEN,
|
||||||
|
output: q_fadeout.clone().into()
|
||||||
|
});
|
||||||
|
|
||||||
|
// move the question to the correct timestamp
|
||||||
|
let q_pts = format!("q{q_idx}pts");
|
||||||
|
ffmpeg.add_filter(Filter::VideoOffset {
|
||||||
|
input: q_fadeout.into(),
|
||||||
|
seconds: *q_start - *start,
|
||||||
|
output: q_pts.clone().into()
|
||||||
|
});
|
||||||
|
|
||||||
|
// overlay the part in question
|
||||||
|
let q_overlay = format!("q{q_idx}o");
|
||||||
|
ffmpeg.add_filter(Filter::Overlay {
|
||||||
|
video_input: part2[i].clone(),
|
||||||
|
overlay_input: q_pts.into(),
|
||||||
|
x: "0".into(),
|
||||||
|
y: "0".into(),
|
||||||
|
repeatlast: false,
|
||||||
|
output: q_overlay.clone().into()
|
||||||
|
});
|
||||||
|
part2[i] = q_overlay.into();
|
||||||
|
|
||||||
|
q_idx += 1;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if q_idx < project.source.questions.len() {
|
||||||
|
bail!(
|
||||||
|
"Question starting at {} did not fit into the video before it was over",
|
||||||
|
format_time(project.source.questions[q_idx].0)
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
// fade out the intro
|
// fade out the intro
|
||||||
|
@ -491,6 +634,7 @@ impl<'a> Renderer<'a> {
|
||||||
overlay_input: logoalpha.into(),
|
overlay_input: logoalpha.into(),
|
||||||
x: format!("main_w-overlay_w-{overlay_off_x}").into(),
|
x: format!("main_w-overlay_w-{overlay_off_x}").into(),
|
||||||
y: format!("main_h-overlay_h-{overlay_off_y}").into(),
|
y: format!("main_h-overlay_h-{overlay_off_y}").into(),
|
||||||
|
repeatlast: true,
|
||||||
output: overlay.into()
|
output: overlay.into()
|
||||||
});
|
});
|
||||||
|
|
||||||
|
@ -508,7 +652,13 @@ impl<'a> Renderer<'a> {
|
||||||
) -> anyhow::Result<PathBuf> {
|
) -> anyhow::Result<PathBuf> {
|
||||||
let input = self.video_file_output();
|
let input = self.video_file_output();
|
||||||
let output = self.video_file_res(res);
|
let output = self.video_file_res(res);
|
||||||
println!("\x1B[1m ==> Rescaling to {}p\x1B[0m", res.height());
|
|
||||||
|
println!();
|
||||||
|
println!(
|
||||||
|
" {} {}",
|
||||||
|
style("==>").bold().cyan(),
|
||||||
|
style(format!("Rescaling to {}p", res.height())).bold()
|
||||||
|
);
|
||||||
|
|
||||||
let mut ffmpeg = Ffmpeg::new(FfmpegOutput {
|
let mut ffmpeg = Ffmpeg::new(FfmpegOutput {
|
||||||
video_bitrate: Some(res.bitrate()),
|
video_bitrate: Some(res.bitrate()),
|
||||||
|
|
Loading…
Reference in a new issue