Compare commits

...

48 commits
240517 ... main

Author SHA1 Message Date
d5bb7a4bdc
only use vaapi when feature is enabled
All checks were successful
Trigger quay.io Webhook / run (push) Successful in 4s
2024-07-08 13:18:26 +02:00
5e1f5e8829
add readme
[skip ci] no need
2024-06-29 11:21:06 +02:00
5808bff395
fix still defaulting to av1, allow aac to be paired with flac
All checks were successful
Trigger quay.io Webhook / run (push) Successful in 4s
2024-06-28 18:19:08 +02:00
330515d6b4
fix toml serialiser being stupid
All checks were successful
Trigger quay.io Webhook / run (push) Successful in 5s
2024-06-28 16:56:07 +02:00
7662150b89
fix codec comparison; encode with higher quality for intermediate results
All checks were successful
Trigger quay.io Webhook / run (push) Successful in 5s
2024-06-26 12:05:40 +02:00
b6fb0fa184
uncommitted stuff: some bitrate changes [skip ci] 2024-06-23 23:59:23 +02:00
6e56452f78
limit webhook ci to main branch
All checks were successful
Trigger quay.io Webhook / run (push) Successful in 5s
2024-06-23 17:54:18 +02:00
680ea8f4e5
Refactor the code into a binary and library (#1)
All checks were successful
Trigger quay.io Webhook / run (push) Successful in 5s
Reviewed-on: #1
Co-authored-by: Dominic <git@msrd0.de>
Co-committed-by: Dominic <git@msrd0.de>
2024-06-23 15:53:45 +00:00
13c03559d0
set svt-av1 preset to 7 and quality to 28
All checks were successful
Trigger quay.io Webhook / run (push) Successful in 6s
2024-06-13 15:53:40 +02:00
f9129b2351
we are using av1 for 1080p
All checks were successful
Trigger quay.io Webhook / run (push) Successful in 5s
2024-06-04 09:51:45 +02:00
9a58e39bf8
fix typo, thanks Dorian
All checks were successful
Trigger quay.io Webhook / run (push) Successful in 7s
This code path is currently unused given that we accumulate the input
into a recording.mkv file that doesn't have the cursed mp4/mov behaviour.
2024-06-04 09:47:58 +02:00
2fdb653496
seek_streams_individually can also happen with .mov files
All checks were successful
Trigger quay.io Webhook / run (push) Successful in 6s
2024-05-30 11:13:12 +02:00
cbdf55335a
detect .mov files
All checks were successful
Trigger quay.io Webhook / run (push) Successful in 6s
2024-05-27 20:05:41 +02:00
6934012c11
take audio from left instead of right channel
All checks were successful
Trigger quay.io Webhook / run (push) Successful in 6s
2024-05-27 20:05:07 +02:00
52c89dc95a
fix british dates
All checks were successful
Trigger quay.io Webhook / run (push) Successful in 5s
2024-05-26 16:15:36 +02:00
4aefb5a647
presets 2024-05-26 16:15:07 +02:00
14daa1c9f9
add missing file 2024-05-26 15:23:31 +02:00
78609dec9a
translate question 2024-05-26 15:23:23 +02:00
1dfe835587
move project structs into their own mod 2024-05-26 15:20:40 +02:00
b6bd1be12e
docker: don't copy Cargo.lock; it's not part of the repo
All checks were successful
Trigger quay.io Webhook / run (push) Successful in 5s
2024-05-26 12:47:25 +02:00
4f4711cf31
CI: fix #5
All checks were successful
Trigger quay.io Webhook / run (push) Successful in 5s
2024-05-26 12:37:56 +02:00
e55886a0df
CI: fix #4
Some checks failed
Trigger quay.io Webhook / run (push) Failing after 5s
2024-05-26 12:34:48 +02:00
867544f12e
CI: fix #3
All checks were successful
Trigger quay.io Webhook / run (push) Successful in 5s
2024-05-26 12:29:51 +02:00
2ba0f6a075
CI: apparently forgejo prefers github vendor prefix
All checks were successful
Trigger quay.io Webhook / run (push) Successful in 5s
2024-05-26 12:21:54 +02:00
17d961bc0d
CI: fix #2
All checks were successful
Trigger quay.io Webhook / run (push) Successful in 4s
2024-05-26 12:20:07 +02:00
be787b6c9c
CI: fix
Some checks failed
Trigger quay.io Webhook / run (push) Failing after 5s
2024-05-26 12:19:22 +02:00
fad24597fd
CI: add webhook triggering quay.io 2024-05-26 12:18:17 +02:00
8147ffd231
update alpine to 3.20 2024-05-26 11:47:53 +02:00
5b2d6653dc
add dockerignore 2024-05-26 11:33:42 +02:00
97a7268d4a
fix docker not compiling 2024-05-26 11:33:25 +02:00
677c35a6fd
remove test files 2024-05-25 16:21:04 +02:00
b11baf1358
question overlays work :) 2024-05-25 16:20:43 +02:00
8b57b97c80
add asked questions to project.toml 2024-05-25 14:25:23 +02:00
1e7f5f95cd
clean up console output a little 2024-05-25 14:17:07 +02:00
01e0758b6a
build question overlay svg 2024-05-25 13:53:15 +02:00
7b1681d85d
create a fancy new fast forward logo based on our videoag logo 2024-05-24 19:58:52 +02:00
9ae95fefb6
find a tool that optimizes svg's even further 2024-05-24 19:40:08 +02:00
05b650dfd7
use readable logo 2024-05-24 12:39:43 +02:00
410a4eaf96
strip the logo of any useless crap and make it actually nice to read/edit 2024-05-24 12:39:00 +02:00
3798b3382a
start understanding the logo svg 2024-05-24 12:29:10 +02:00
6de519c980
create a readable version of the logo svg 2024-05-24 12:21:49 +02:00
98f415ade7
add metadata to transcoded videos 2024-05-24 12:17:40 +02:00
9a4b3142ff
optimize some libsvtav1 params 2024-05-24 12:01:45 +02:00
d323915aed
install fonts in docker 2024-05-20 17:34:08 +02:00
0de4f35311
start putting together a dockerfile
is still missing font installation plus potentially more stuff
2024-05-18 23:24:09 +02:00
cae7b9b99b
add default language 2024-05-18 15:21:14 +02:00
4612cbdfaa
allow compiling out systemd-run support 2024-05-18 12:52:08 +02:00
0d98ab21f0
use libsvtav1 instead of av1 when vaapi is disabled 2024-05-18 12:40:18 +02:00
18 changed files with 1169 additions and 532 deletions

3
.dockerignore Normal file
View file

@ -0,0 +1,3 @@
**/target/
23*/
24*/

View file

@ -0,0 +1,18 @@
name: Trigger quay.io Webhook
on:
push:
branches: [main]
jobs:
run:
runs-on: alpine-latest
steps:
- run: |
apk add ca-certificates curl
curl -D - --fail-with-body -X POST -H 'Content-Type: application/json' --data '{
"commit": "${{github.sha}}",
"ref": "${{github.ref}}",
"default_branch": "main"
}' 'https://$token:${{secrets.quay_token}}@quay.io/webhooks/push/trigger/f21fe844-3a4b-43b0-a92f-7871d7d7ea68'
shell: ash -eo pipefail {0}

View file

@ -7,9 +7,13 @@ date = "230101"
[source]
files = ["C01.mp4", "C02.mp4", "C03.mp4"]
stereo = false
start = "2"
start = "1"
end = "12"
fast = [["5", "7"], ["9", "11"]]
fast = [["6", "8"], ["10", "11"]]
questions = [
["1.5", "3", "Hallo liebes Publikum. Ich habe leider meine Frage vergessen. Bitte entschuldigt die Störung."],
["3.5", "5", "Ah jetzt weiß ich es wieder. Meine Frage war: Was war meine Frage?"]
]
[source.metadata]
source_duration = "12.53000"
@ -22,5 +26,6 @@ source_sample_rate = 48000
preprocessed = false
asked_start_end = true
asked_fast = true
asked_questions = true
rendered = false
transcoded = []

View file

@ -10,10 +10,18 @@ license = "EPL-2.0"
[dependencies]
anyhow = "1.0"
camino = "1.1"
console = "0.15"
clap = { version = "4.4", features = ["derive"] }
fontconfig = "0.8"
harfbuzz_rs = "2.0"
indexmap = "2.2"
rational = "1.5"
serde = { version = "1.0.188", features = ["derive"] }
serde_with = "3.4"
svgwriter = "0.1"
toml = { package = "basic-toml", version = "0.1.4" }
[features]
default = ["mem_limit"]
mem_limit = []
vaapi = []

64
Dockerfile Normal file
View file

@ -0,0 +1,64 @@
FROM alpine:3.20
ARG ffmpeg_ver=7.0
RUN mkdir -p /usr/local/src/render_video
COPY LICENSE /usr/local/src/render_video/LICENSE
COPY Cargo.toml /usr/local/src/render_video/Cargo.toml
COPY src /usr/local/src/render_video/src
COPY assets /usr/local/src/render_video/assets
RUN apk add --no-cache \
dav1d fontconfig freetype harfbuzz librsvg libva lilv-libs opus svt-av1 x264-libs x265 \
font-noto inkscape libarchive-tools libgcc \
&& apk add --no-cache --virtual .build-deps \
build-base cargo pkgconf \
dav1d-dev fontconfig-dev freetype-dev harfbuzz-dev librsvg-dev libva-dev lilv-dev opus-dev svt-av1-dev x264-dev x265-dev \
# build the render_video project
&& cargo install --path /usr/local/src/render_video --root /usr/local --no-default-features \
&& rm -rf ~/.cargo \
# we install ffmpeg ourselves to get libsvtav1 support which is not part of the alpine package \
&& wget -q https://ffmpeg.org/releases/ffmpeg-${ffmpeg_ver}.tar.bz2 \
&& tar xfa ffmpeg-${ffmpeg_ver}.tar.bz2 \
&& cd ffmpeg-${ffmpeg_ver} \
&& ./configure \
--prefix=/usr/local \
--disable-asm \
--disable-librtmp \
--disable-lzma \
--disable-static \
--enable-avfilter \
--enable-gpl \
--enable-libdav1d \
--enable-libfontconfig \
--enable-libfreetype \
--enable-libharfbuzz \
--enable-libopus \
--enable-librsvg \
--enable-libsvtav1 \
--enable-libx264 \
--enable-libx265 \
--enable-lto=auto \
--enable-lv2 \
--enable-pic \
--enable-postproc \
--enable-pthreads \
--enable-shared \
--enable-vaapi \
--enable-version3 \
--optflags="-O3" \
&& make -j$(nproc) install \
&& apk del --no-cache .build-deps \
&& cd .. \
&& rm -r ffmpeg-${ffmpeg_ver} ffmpeg-${ffmpeg_ver}.tar.bz2 \
# we need Arial Black for the VideoAG logo \
&& wget -q https://www.freedesktop.org/software/fontconfig/webfonts/webfonts.tar.gz \
&& tar xfa webfonts.tar.gz \
&& cd msfonts \
&& for file in *.exe; do bsdtar xf "$file"; done \
&& install -Dm644 -t /usr/share/fonts/msfonts/ *.ttf *.TTF \
&& install -Dm644 -t /usr/share/licenses/msfonts/ Licen.TXT \
&& cd .. \
&& rm -r msfonts webfonts.tar.gz
ENTRYPOINT ["/usr/local/bin/render_video"]

17
README.md Normal file
View file

@ -0,0 +1,17 @@
**ACHTUNG!** This repository might be mirrored at different places, but the main repository is and remains at [msrd0.dev/msrd0/render_video](https://msrd0.dev/msrd0/render_video). Please redirect all issues and pull requests there.
# render_video
This "script" is an extremely fancy wrapper around ffmpeg to cut/render videos for the [VideoAG](https://video.fsmpi.rwth-aachen.de) of the [Fachschaft I/1 der RWTH Aachen University](https://fsmpi.rwth-aachen.de).
You can find a ready-to-use docker image at [`quay.io/msrd0/render_video`](https://quay.io/msrd0/render_video).
## Features
- **Extract a single audio channel from stereo recording.** We use that with one of our cameras that supports plugging a lavalier microphone (mono source) into one channel of the stereo recording, and using the camera microphone (mono source) for the other channel of the stereo recording.
- **Cut away before/after the lecture.** We don't hit the start record button the exact time that the lecture starts, and don't hit the stop button exactly when the lecture ends, so we need to cut away those unwanted bits.
- **Fast-forward through _Tafelwischpausen_.** Sometimes docents still use blackboards and need to wipe those, which can be fast-forwarded by this tool.
- **Overlay questions from the audience.** Sometimes people in the audience have questions, and those are usually poorly understood on the lavalier microphones. Therefore you can subtitle these using the other microphones in the room that don't make it into the final video and have those overlayed.
- **Add intro and outro.** We add intro and outro slides at the start/end at all lectures, which this tool can do for you.
- **Add our logo watermark.** We add a logo watermark in the bottom right corner of all videos, which this tool can do for you.
- **Rescale to lower resolutions.** We usually published videos at different resolutions, and this tool can rescale your video for all resolutions you want.

View file

@ -1,8 +1,25 @@
<svg viewBox="0 0 16 16" xmlns="http://www.w3.org/2000/svg">
<style type="text/css" id="current-color-scheme">
.ColorScheme-Text {
color:#eff0f1;
}
</style>
<path d="m8 2v12l7-6zm-7 0v12l7-6z" class="ColorScheme-Text" fill="currentColor"/>
<svg
xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink"
width="300" height="300"
version="1.0">
<defs>
<path id="arrows" d="M160 60v180l105-90zM55 60v180l105-90z"/>
<clipPath id="background-cut">
<path d="M136.856 78.357a37.5 37.5 0 1 1-75 0 37.5 37.5 0 1 1 75 0"/>
<path d="M149.826-.002A150 150-26.33 1 1 0 150.17l150-.172z"/>
</clipPath>
</defs>
<g fill="#000" opacity=".4">
<path fill="#fff" d="M149.826-.002a150 150-26.33 1 1 .345 300 150 150-26.33 1 1-.345-300"/>
<path d="M136.856 78.357a37.5 37.5 0 1 1-75 0 37.5 37.5 0 1 1 75 0"/>
<path d="M149.826-.002A150 150-26.33 1 1 0 150.17l150-.172z"/>
<use href="#arrows"/>
</g>
<use href="#arrows" fill="#fff" clip-path="url(#background-cut)"/>
</svg>

Before

Width:  |  Height:  |  Size: 287 B

After

Width:  |  Height:  |  Size: 801 B

View file

@ -1,199 +1,36 @@
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<!-- Created with Inkscape (http://www.inkscape.org/) -->
<svg
xmlns:dc="http://purl.org/dc/elements/1.1/"
xmlns:cc="http://creativecommons.org/ns#"
xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
xmlns:svg="http://www.w3.org/2000/svg"
xmlns="http://www.w3.org/2000/svg"
xmlns:xlink="http://www.w3.org/1999/xlink"
xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
width="299.99982"
height="300.00003"
id="svg2"
sodipodi:version="0.32"
inkscape:version="0.46"
sodipodi:docbase="C:\Eigene Dateien\Video AG"
sodipodi:docname="logo.svg"
inkscape:output_extension="org.inkscape.output.svg.inkscape"
version="1.0"
inkscape:export-filename="Q:\video AG\fs-pub-video\folien\logo-1024.png"
inkscape:export-xdpi="307.20001"
inkscape:export-ydpi="307.20001">
<defs
id="defs4">
<inkscape:perspective
sodipodi:type="inkscape:persp3d"
inkscape:vp_x="0 : 526.18109 : 1"
inkscape:vp_y="0 : 1000 : 0"
inkscape:vp_z="744.09448 : 526.18109 : 1"
inkscape:persp3d-origin="372.04724 : 350.78739 : 1"
id="perspective21" />
<linearGradient
id="linearGradient2041">
<stop
style="stop-color:#ffffff;stop-opacity:1.0000000;"
offset="0.00000000"
id="stop2043" />
<stop
style="stop-color:#7d7d7d;stop-opacity:1.0000000;"
offset="1.0000000"
id="stop2045" />
xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink"
width="300" height="300"
version="1.0">
<defs>
<linearGradient id="a">
<stop offset="0" stop-color="#fff"/>
<stop offset="1" stop-color="#7d7d7d"/>
</linearGradient>
<linearGradient
inkscape:collect="always"
xlink:href="#linearGradient2041"
id="linearGradient2047"
x1="194.17578"
y1="139.59265"
x2="250.03906"
y2="139.59265"
gradientUnits="userSpaceOnUse"
gradientTransform="translate(317,-57)" />
<linearGradient xlink:href="#a" id="b" x1="194.176" x2="250.039" y1="139.593" y2="139.593" gradientTransform="translate(317 -57)" gradientUnits="userSpaceOnUse"/>
</defs>
<sodipodi:namedview
id="base"
pagecolor="#ffffff"
bordercolor="#666666"
borderopacity="1.0"
inkscape:pageopacity="0"
inkscape:pageshadow="2"
inkscape:zoom="1.4889823"
inkscape:cx="71.681092"
inkscape:cy="217.8489"
inkscape:document-units="px"
inkscape:current-layer="layer1"
inkscape:window-width="1400"
inkscape:window-height="988"
inkscape:window-x="-8"
inkscape:window-y="-8"
showgrid="false"
inkscape:snap-bbox="false"
inkscape:snap-nodes="true"
inkscape:object-paths="false"
inkscape:object-nodes="true"
objecttolerance="10"
gridtolerance="10000"
guidetolerance="10000"
showborder="true"
inkscape:showpageshadow="true"
borderlayer="false" />
<metadata
id="metadata7">
<rdf:RDF>
<cc:Work
rdf:about="">
<dc:format>image/svg+xml</dc:format>
<dc:type
rdf:resource="http://purl.org/dc/dcmitype/StillImage" />
</cc:Work>
</rdf:RDF>
</metadata>
<g
inkscape:label="Ebene 1"
inkscape:groupmode="layer"
id="layer1"
transform="translate(-438.99979,0.6379836)">
<path
sodipodi:type="arc"
style="opacity:1;fill:#ffffff;fill-opacity:1;stroke-width:1;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0"
id="path2393"
sodipodi:cx="454"
sodipodi:cy="112.36218"
sodipodi:rx="87"
sodipodi:ry="56"
d="M 541,112.36218 A 87,56 0 1 1 367,112.36218 A 87,56 0 1 1 541,112.36218 z"
transform="matrix(-1.977323e-3,-1.724137,2.678569,-3.071905e-3,288.9275,932.4654)"
inkscape:export-filename="C:\Eigene Dateien\Video AG\logo.png"
inkscape:export-xdpi="90.000000"
inkscape:export-ydpi="90.000000" />
<path
sodipodi:type="arc"
style="opacity:1;fill:#000000;fill-opacity:1;stroke-width:1;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0"
id="path1306"
sodipodi:cx="281.42856"
sodipodi:cy="270.93362"
sodipodi:rx="38.57143"
sodipodi:ry="44.285713"
d="M 319.99999,270.93362 A 38.57143,44.285713 0 1 1 242.85713,270.93362 A 38.57143,44.285713 0 1 1 319.99999,270.93362 z"
transform="matrix(0.972222,0,0,0.846774,264.7456,-151.7001)"
inkscape:export-filename="C:\Eigene Dateien\Video AG\logo.png"
inkscape:export-xdpi="90.000000"
inkscape:export-ydpi="90.000000" />
<text
xml:space="preserve"
style="font-size:72px;font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;text-align:start;line-height:125%;writing-mode:lr-tb;text-anchor:start;fill:url(#linearGradient2047);fill-opacity:1;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1;font-family:Arial Black"
x="511"
y="108.36218"
id="text1308"
sodipodi:linespacing="125%"
inkscape:export-filename="C:\Eigene Dateien\Video AG\logo.png"
inkscape:export-xdpi="90.000000"
inkscape:export-ydpi="90.000000"><tspan
sodipodi:role="line"
id="tspan1312"
x="511"
y="108.36218"
style="fill:url(#linearGradient2047);fill-opacity:1">V</tspan></text>
<path
sodipodi:type="arc"
style="opacity:1;fill:#000000;fill-opacity:1;stroke-width:1;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0"
id="path3517"
sodipodi:cx="454"
sodipodi:cy="112.36218"
sodipodi:rx="87"
sodipodi:ry="56"
d="M 541,112.36218 A 87,56 0 1 1 454,56.362183 L 454,112.36218 z"
transform="matrix(-1.977323e-3,-1.724137,2.678569,-3.071905e-3,288.9275,932.4654)"
sodipodi:start="0"
sodipodi:end="4.712389"
inkscape:export-filename="C:\Eigene Dateien\Video AG\logo.png"
inkscape:export-xdpi="90.000000"
inkscape:export-ydpi="90.000000" />
<text
xml:space="preserve"
style="font-size:28px;font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;text-align:start;line-height:125%;writing-mode:lr-tb;text-anchor:start;fill:#ffffff;fill-opacity:1;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1;font-family:Arial Black"
x="598"
y="105.36218"
id="text2055"
sodipodi:linespacing="125%"
inkscape:export-filename="C:\Eigene Dateien\Video AG\logo.png"
inkscape:export-xdpi="90.000000"
inkscape:export-ydpi="90.000000"><tspan
sodipodi:role="line"
id="tspan2785"
x="598"
y="105.36218"
style="font-size:36px;font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;text-align:start;line-height:125%;writing-mode:lr-tb;text-anchor:start;font-family:Arial Black">ideo</tspan></text>
<text
xml:space="preserve"
style="font-size:100px;font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;text-align:start;line-height:125%;writing-mode:lr-tb;text-anchor:start;fill:#ffffff;fill-opacity:1;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1;font-family:Arial Black"
x="511"
y="243.36218"
id="text2051"
sodipodi:linespacing="125%"
inkscape:export-filename="C:\Eigene Dateien\Video AG\logo.png"
inkscape:export-xdpi="90.000000"
inkscape:export-ydpi="90.000000"><tspan
sodipodi:role="line"
id="tspan2053"
x="511"
y="243.36218"
style="font-size:100px;font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;text-align:start;line-height:125%;writing-mode:lr-tb;text-anchor:start;fill:#ffffff;fill-opacity:1;font-family:Arial Black">AG</tspan></text>
<text
xml:space="preserve"
style="font-size:72px;font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;text-align:start;line-height:125%;writing-mode:lr-tb;text-anchor:start;fill:#000000;fill-opacity:0.25098039;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1;font-family:Arial Black"
x="471"
y="108.36218"
id="text4979"
sodipodi:linespacing="125%"
inkscape:export-filename="C:\Eigene Dateien\Video AG\logo.png"
inkscape:export-xdpi="90.000000"
inkscape:export-ydpi="90.000000"><tspan
sodipodi:role="line"
id="tspan4981"
x="471"
y="108.36218">V</tspan></text>
<!-- this creates the small circle inside the cutout. the first path seems to do nothing. -->
<path fill="#fff" d="M149.826-.002a150 150-26.33 1 1 .345 300 150 150-26.33 1 1-.345-300"/>
<path d="M136.856 78.357a37.5 37.5 0 1 1-75 0 37.5 37.5 0 1 1 75 0"/>
<!-- this is the big 270-degree circle -->
<path d="M149.826-.002A150 150-26.33 1 1 0 150.17l150-.172z"/>
<g font-family="Arial Black" style="line-height:125%" transform="translate(-439 .638)">
<text font-size="72">
<!-- this text is the "V" with linear gradient inside the small circle. -->
<tspan x="511" y="108.362" fill="url(#b)">V</tspan>
<!-- this is the "V" to the left of the small circle -->
<tspan x="471" y="108.362" fill="#000" fill-opacity=".251">V</tspan>
</text>
<!-- this is the "ideo" text in the upper line -->
<text x="598" y="105.362" font-size="36" fill="#fff">ideo</text>
<!-- this is the "AG" text in the lower line -->
<text x="511" y="243.362" font-size="100" fill="#fff">AG</text>
</g>
</svg>

Before

Width:  |  Height:  |  Size: 8.2 KiB

After

Width:  |  Height:  |  Size: 1.5 KiB

54
src/cli.rs Normal file
View file

@ -0,0 +1,54 @@
//! This module contains helper functions for implementing CLI/TUI.
use crate::time::{parse_time, Time};
use console::style;
use std::{
fmt::Display,
io::{self, BufRead as _, Write as _}
};
pub fn ask(question: impl Display) -> String {
let mut stdout = io::stdout().lock();
let mut stdin = io::stdin().lock();
write!(
stdout,
"{} {} ",
style(question).bold().magenta(),
style(">").cyan()
)
.unwrap();
stdout.flush().unwrap();
let mut line = String::new();
stdin.read_line(&mut line).unwrap();
line.trim().to_owned()
}
pub fn ask_time(question: impl Display + Copy) -> Time {
let mut stdout = io::stdout().lock();
let mut stdin = io::stdin().lock();
let mut line = String::new();
loop {
line.clear();
write!(
stdout,
"{} {} ",
style(question).bold().magenta(),
style(">").cyan()
)
.unwrap();
stdout.flush().unwrap();
stdin.read_line(&mut line).unwrap();
let line = line.trim();
match parse_time(line) {
Ok(time) => return time,
Err(err) => writeln!(
stdout,
"{} {line:?}: {err}",
style("Invalid Input").bold().red()
)
.unwrap()
}
}
}

View file

@ -1,28 +1,35 @@
//! A module for writing intros and outros
use crate::{time::Date, ProjectLecture, Resolution};
use crate::{
project::{ProjectLecture, Resolution},
time::Date
};
use anyhow::anyhow;
use std::{
fmt::{self, Debug, Display, Formatter},
str::FromStr
};
use svgwriter::{
tags::{Group, Rect, TagWithPresentationAttributes, Text},
tags::{Group, Rect, TagWithPresentationAttributes as _, Text},
Graphic
};
#[derive(Clone)]
pub struct Language<'a> {
lang: &'a str,
format_date_long: fn(Date) -> String,
pub(crate) lang: &'a str,
pub(crate) format_date_long: fn(Date) -> String,
// intro
lecture_from: &'a str,
video_created_by_us: &'a str,
pub(crate) video_created_by_us: &'a str,
// outro
video_created_by: &'a str,
our_website: &'a str,
download_videos: &'a str,
questions_feedback: &'a str
questions_feedback: &'a str,
// metadata
pub(crate) from: &'a str,
// questions
pub(crate) question: &'a str
}
pub const GERMAN: Language<'static> = Language {
@ -54,7 +61,11 @@ pub const GERMAN: Language<'static> = Language {
video_created_by: "Video erstellt von der",
our_website: "Website der Fachschaft",
download_videos: "Videos herunterladen",
questions_feedback: "Fragen, Vorschläge und Feedback"
questions_feedback: "Fragen, Vorschläge und Feedback",
from: "vom",
question: "Frage"
};
pub const BRITISH: Language<'static> = Language {
@ -83,17 +94,28 @@ pub const BRITISH: Language<'static> = Language {
3 | 23 => "rd",
_ => "th"
};
format!("{:02}{th} {month} {:04}", d.day, d.year)
format!("{}{th} {month} {:04}", d.day, d.year)
},
lecture_from: "Lecture from",
video_created_by_us: "Video created by the Video AG, Fachschaft I/1",
video_created_by: "Video created by the",
our_website: "The Fachschaft's website",
download_videos: "Download videos",
questions_feedback: "Questions, Suggestions and Feedback"
questions_feedback: "Questions, Suggestions and Feedback",
from: "from",
question: "Question"
};
impl Default for Language<'static> {
fn default() -> Self {
GERMAN
}
}
impl FromStr for Language<'static> {
type Err = anyhow::Error;
@ -121,14 +143,14 @@ impl Debug for Language<'_> {
}
#[repr(u16)]
enum FontSize {
pub(crate) enum FontSize {
Huge = 72,
Large = 56,
Big = 44
}
#[repr(u16)]
enum FontWeight {
pub(crate) enum FontWeight {
Normal = 400,
SemiBold = 500,
Bold = 700

17
src/lib.rs Normal file
View file

@ -0,0 +1,17 @@
#![allow(clippy::manual_range_contains)]
#![warn(clippy::unreadable_literal, rust_2018_idioms)]
#![forbid(elided_lifetimes_in_paths, unsafe_code)]
pub mod cli;
pub mod iotro;
pub mod preset;
pub mod project;
pub mod question;
pub mod render;
pub mod time;
#[cfg(feature = "mem_limit")]
use std::sync::RwLock;
#[cfg(feature = "mem_limit")]
pub static MEM_LIMIT: RwLock<String> = RwLock::new(String::new());

View file

@ -2,30 +2,17 @@
#![warn(clippy::unreadable_literal, rust_2018_idioms)]
#![forbid(elided_lifetimes_in_paths, unsafe_code)]
mod iotro;
mod render;
mod time;
use crate::{
render::{ffmpeg::FfmpegOutputFormat, Renderer},
time::{parse_date, parse_time, Date, Time}
};
use camino::Utf8PathBuf as PathBuf;
use clap::Parser;
use iotro::Language;
use rational::Rational;
use serde::{Deserialize, Serialize};
use serde_with::{serde_as, DisplayFromStr};
use std::{
collections::BTreeSet,
fmt::Display,
fs,
io::{self, BufRead as _, Write},
str::FromStr,
sync::RwLock
use console::style;
use render_video::{
cli::{ask, ask_time},
preset::Preset,
project::{Project, ProjectLecture, ProjectSource, Resolution},
render::Renderer,
time::parse_date
};
static MEM_LIMIT: RwLock<String> = RwLock::new(String::new());
use std::fs;
#[derive(Debug, Parser)]
struct Args {
@ -33,33 +20,24 @@ struct Args {
#[clap(short = 'C', long, default_value = ".")]
directory: PathBuf,
/// The slug of the course, e.g. "23ws-malo2".
#[clap(short = 'c', long, default_value = "23ws-malo2")]
course: String,
/// The label of the course, e.g. "Mathematische Logik II".
#[clap(short, long, default_value = "Mathematische Logik II")]
label: String,
/// The docent of the course, e.g. "Prof. E. Grädel".
#[clap(short, long, default_value = "Prof. E. Grädel")]
docent: String,
/// The language of the lecture. Used for the intro and outro frame.
#[clap(short = 'L', long, default_value = "de")]
lang: Language<'static>,
/// The preset of the lecture. Can be a toml file or a known course slug.
#[clap(short, long)]
preset: String,
#[cfg(feature = "mem_limit")]
/// The memory limit for external tools like ffmpeg.
#[clap(short, long, default_value = "12G")]
mem_limit: String,
/// Transcode the final video clip down to the minimum resolution specified.
/// Transcode the final video clip down to the minimum resolution specified. If not
/// specified, the default value from the preset is used.
#[clap(short, long)]
transcode: Option<Resolution>,
/// Transcode starts at this resolution, or the source resolution, whichever is lower.
#[clap(short = 'T', long, default_value = "1440p")]
transcode_start: Resolution,
/// If not specified, the default value from the preset is used.
#[clap(short = 'T', long)]
transcode_start: Option<Resolution>,
/// Treat the audio as stereo. By default, only one channel from the input stereo will
/// be used, assuming either the other channel is backup or the same as the used.
@ -67,175 +45,18 @@ struct Args {
stereo: bool
}
macro_rules! resolutions {
($($res:ident: $width:literal x $height:literal at $bitrate:literal in $format:ident),+) => {
#[allow(non_camel_case_types, clippy::upper_case_acronyms)]
#[derive(Clone, Copy, Debug, Deserialize, Eq, Ord, PartialEq, PartialOrd, Serialize)]
enum Resolution {
$(
#[doc = concat!(stringify!($width), "x", stringify!($height))]
$res
),+
}
const NUM_RESOLUTIONS: usize = {
let mut num = 0;
$(num += 1; stringify!($res);)+
num
};
impl Resolution {
fn values() -> [Self; NUM_RESOLUTIONS] {
[$(Self::$res),+]
}
fn width(self) -> usize {
match self {
$(Self::$res => $width),+
}
}
fn height(self) -> usize {
match self {
$(Self::$res => $height),+
}
}
fn bitrate(self) -> u64 {
match self {
$(Self::$res => $bitrate),+
}
}
fn format(self) -> FfmpegOutputFormat {
match self {
$(Self::$res => FfmpegOutputFormat::$format),+
}
}
}
impl FromStr for Resolution {
type Err = anyhow::Error;
fn from_str(s: &str) -> anyhow::Result<Self> {
Ok(match s {
$(concat!(stringify!($height), "p") => Self::$res,)+
_ => anyhow::bail!("Unknown Resolution: {s:?}")
})
}
}
}
}
resolutions! {
nHD: 640 x 360 at 500_000 in AvcAac,
HD: 1280 x 720 at 1_000_000 in AvcAac,
FullHD: 1920 x 1080 at 2_000_000 in Av1Opus,
WQHD: 2560 x 1440 at 3_000_000 in Av1Opus,
// TODO qsx muss mal sagen wieviel bitrate für 4k
UHD: 3840 x 2160 at 4_000_000 in Av1Opus
}
#[derive(Deserialize, Serialize)]
struct Project {
lecture: ProjectLecture,
source: ProjectSource,
progress: ProjectProgress
}
#[serde_as]
#[derive(Deserialize, Serialize)]
struct ProjectLecture {
course: String,
label: String,
docent: String,
#[serde_as(as = "DisplayFromStr")]
date: Date,
#[serde_as(as = "DisplayFromStr")]
lang: Language<'static>
}
#[serde_as]
#[derive(Deserialize, Serialize)]
struct ProjectSource {
files: Vec<String>,
stereo: bool,
#[serde_as(as = "Option<DisplayFromStr>")]
start: Option<Time>,
#[serde_as(as = "Option<DisplayFromStr>")]
end: Option<Time>,
#[serde_as(as = "Vec<(DisplayFromStr, DisplayFromStr)>")]
fast: Vec<(Time, Time)>,
metadata: Option<ProjectSourceMetadata>
}
#[serde_as]
#[derive(Deserialize, Serialize)]
struct ProjectSourceMetadata {
/// The duration of the source video.
#[serde_as(as = "DisplayFromStr")]
source_duration: Time,
/// The FPS of the source video.
#[serde_as(as = "DisplayFromStr")]
source_fps: Rational,
/// The time base of the source video.
#[serde_as(as = "DisplayFromStr")]
source_tbn: Rational,
/// The resolution of the source video.
source_res: Resolution,
/// The sample rate of the source audio.
source_sample_rate: u32
}
#[derive(Default, Deserialize, Serialize)]
struct ProjectProgress {
preprocessed: bool,
asked_start_end: bool,
asked_fast: bool,
rendered: bool,
transcoded: BTreeSet<Resolution>
}
fn ask(question: impl Display) -> String {
let mut stdout = io::stdout().lock();
let mut stdin = io::stdin().lock();
writeln!(stdout, "{question}").unwrap();
let mut line = String::new();
write!(stdout, "> ").unwrap();
stdout.flush().unwrap();
stdin.read_line(&mut line).unwrap();
line.trim().to_owned()
}
fn ask_time(question: impl Display) -> Time {
let mut stdout = io::stdout().lock();
let mut stdin = io::stdin().lock();
writeln!(stdout, "{question}").unwrap();
let mut line = String::new();
loop {
line.clear();
write!(stdout, "> ").unwrap();
stdout.flush().unwrap();
stdin.read_line(&mut line).unwrap();
let line = line.trim();
match parse_time(line) {
Ok(time) => return time,
Err(err) => writeln!(stdout, "Invalid Input {line:?}: {err}").unwrap()
}
}
}
fn main() {
let args = Args::parse();
*(MEM_LIMIT.write().unwrap()) = args.mem_limit;
#[cfg(feature = "mem_limit")]
{
*(render_video::MEM_LIMIT.write().unwrap()) = args.mem_limit;
}
// process arguments
let directory = args.directory.canonicalize_utf8().unwrap();
let course = args.course;
let preset = Preset::find(&args.preset).unwrap();
let course = preset.course;
// let's see if we need to initialise the project
let project_path = directory.join("project.toml");
@ -253,7 +74,8 @@ fn main() {
let lower = name.to_ascii_lowercase();
if (lower.ends_with(".mp4")
|| lower.ends_with(".mts")
|| lower.ends_with(".mkv"))
|| lower.ends_with(".mkv")
|| lower.ends_with(".mov"))
&& !entry.file_type().unwrap().is_dir()
{
files.push(String::from(name));
@ -264,10 +86,14 @@ fn main() {
print!("I found the following source files:");
for f in &files {
print!(" {f}");
print!(" {}", style(f).bold().yellow());
}
println!();
files = ask("Which source files would you like to use? (specify multiple files separated by whitespace)")
println!(
"{} Which source files would you like to use? (specify multiple files separated by whitespace)",
style("?").bold().yellow()
);
files = ask("files")
.split_ascii_whitespace()
.map(String::from)
.collect();
@ -276,10 +102,10 @@ fn main() {
let project = Project {
lecture: ProjectLecture {
course,
label: args.label,
docent: args.docent,
label: preset.label,
docent: preset.docent,
date,
lang: args.lang
lang: preset.lang
},
source: ProjectSource {
files,
@ -287,6 +113,7 @@ fn main() {
start: None,
end: None,
fast: Vec::new(),
questions: Vec::new(),
metadata: None
},
progress: Default::default()
@ -295,7 +122,7 @@ fn main() {
project
};
let renderer = Renderer::new(&directory, &project).unwrap();
let mut renderer = Renderer::new(&directory, &project).unwrap();
let recording = renderer.recording_mkv();
// preprocess the video
@ -306,14 +133,21 @@ fn main() {
fs::write(&project_path, toml::to_string(&project).unwrap().as_bytes()).unwrap();
}
println!();
println!(
" {} Preprocessed video: {}",
style("==>").bold().cyan(),
style(recording).bold().yellow()
);
// ask the user about start and end times
if !project.progress.asked_start_end {
project.source.start = Some(ask_time(format_args!(
"Please take a look at the file {recording} and tell me the first second you want included"
)));
project.source.end = Some(ask_time(format_args!(
"Please take a look at the file {recording} and tell me the last second you want included"
)));
println!(
"{} What is the first/last second you want included?",
style("?").bold().yellow()
);
project.source.start = Some(ask_time("first"));
project.source.end = Some(ask_time("last "));
project.progress.asked_start_end = true;
fs::write(&project_path, toml::to_string(&project).unwrap().as_bytes()).unwrap();
@ -321,16 +155,16 @@ fn main() {
// ask the user about fast forward times
if !project.progress.asked_fast {
println!(
"{} Which sections of the video do you want fast-forwarded? (0 to finish)",
style("?").bold().yellow()
);
loop {
let start = ask_time(format_args!(
"Please take a look at the file {recording} and tell me the first second you want fast-forwarded. You may reply with `0` if there are no more fast-forward sections"
));
let start = ask_time("from");
if start.seconds == 0 && start.micros == 0 {
break;
}
let end = ask_time(format_args!(
"Please tell me the last second you want fast-forwarded"
));
let end = ask_time("to ");
project.source.fast.push((start, end));
}
project.progress.asked_fast = true;
@ -338,6 +172,34 @@ fn main() {
fs::write(&project_path, toml::to_string(&project).unwrap().as_bytes()).unwrap();
}
// ask the user about questions from the audience that should be subtitled
if !project.progress.asked_questions {
println!(
"{} In which sections of the video were questions asked you want subtitles for? (0 to finish)",
style("?").bold().yellow()
);
loop {
let start = ask_time("from");
if start.seconds == 0 && start.micros == 0 {
break;
}
let end = ask_time("to ");
let text = ask("text");
project.source.questions.push((start, end, text));
}
project.progress.asked_questions = true;
fs::write(&project_path, toml::to_string(&project).unwrap().as_bytes()).unwrap();
}
// render the assets
if !project.progress.rendered_assets {
renderer.render_assets(&project).unwrap();
project.progress.rendered_assets = true;
fs::write(&project_path, toml::to_string(&project).unwrap().as_bytes()).unwrap();
}
// render the video
let mut videos = Vec::new();
videos.push(if project.progress.rendered {
@ -352,16 +214,16 @@ fn main() {
});
// rescale the video
if let Some(lowest_res) = args.transcode {
for res in Resolution::values().into_iter().rev() {
if let Some(lowest_res) = args.transcode.or(preset.transcode) {
for res in Resolution::STANDARD_RESOLUTIONS.into_iter().rev() {
if res > project.source.metadata.as_ref().unwrap().source_res
|| res > args.transcode_start
|| res > args.transcode_start.unwrap_or(preset.transcode_start)
|| res < lowest_res
{
continue;
}
if !project.progress.transcoded.contains(&res) {
videos.push(renderer.rescale(res).unwrap());
videos.push(renderer.rescale(&project.lecture, res).unwrap());
project.progress.transcoded.insert(res);
fs::write(&project_path, toml::to_string(&project).unwrap().as_bytes())
@ -370,9 +232,18 @@ fn main() {
}
}
println!("\x1B[1m ==> DONE :)\x1B[0m");
println!();
println!(
" {} {}",
style("==>").bold().cyan(),
style("DONE :)").bold()
);
println!(" Videos:");
for v in &videos {
println!(" -> {v}");
println!(
" {} {}",
style("->").bold().cyan(),
style(v).bold().yellow()
);
}
}

79
src/preset.rs Normal file
View file

@ -0,0 +1,79 @@
//! This struct defines presets.
use crate::{
iotro::{Language, BRITISH, GERMAN},
project::Resolution
};
use anyhow::bail;
use serde::{Deserialize, Serialize};
use serde_with::{serde_as, DisplayFromStr};
use std::{fs, io};
#[serde_as]
#[derive(Deserialize, Serialize)]
pub struct Preset {
// options for the intro slide
pub course: String,
pub label: String,
pub docent: String,
/// Course language
#[serde(default)]
#[serde_as(as = "DisplayFromStr")]
pub lang: Language<'static>,
// coding options
#[serde_as(as = "DisplayFromStr")]
pub transcode_start: Resolution,
#[serde_as(as = "Option<DisplayFromStr>")]
pub transcode: Option<Resolution>
}
pub fn preset_23ws_malo2() -> Preset {
Preset {
course: "23ws-malo2".into(),
label: "Mathematische Logik II".into(),
docent: "Prof. E. Grädel".into(),
lang: GERMAN,
transcode_start: "1440p".parse().unwrap(),
transcode: Some("360p".parse().unwrap())
}
}
pub fn preset_24ss_algomod() -> Preset {
Preset {
course: "24ss-algomod".into(),
label: "Algorithmische Modelltheorie".into(),
docent: "Prof. E. Grädel".into(),
lang: GERMAN,
transcode_start: "1440p".parse().unwrap(),
transcode: Some("720p".parse().unwrap())
}
}
pub fn preset_24ss_qc() -> Preset {
Preset {
course: "24ss-qc".into(),
label: "Introduction to Quantum Computing".into(),
docent: "Prof. D. Unruh".into(),
lang: BRITISH,
transcode_start: "1440p".parse().unwrap(),
transcode: Some("720p".parse().unwrap())
}
}
impl Preset {
pub fn find(name: &str) -> anyhow::Result<Self> {
match fs::read(name) {
Ok(buf) => return Ok(toml::from_slice(&buf)?),
Err(err) if err.kind() == io::ErrorKind::NotFound => {},
Err(err) => return Err(err.into())
}
Ok(match name {
"23ws-malo2" => preset_23ws_malo2(),
"24ss-algomod" => preset_24ss_algomod(),
"24ss-qc" => preset_24ss_qc(),
_ => bail!("Unknown preset {name:?}")
})
}
}

206
src/project.rs Normal file
View file

@ -0,0 +1,206 @@
//! Defines the structure of the `project.toml` file.
use crate::{
iotro::Language,
render::ffmpeg::FfmpegOutputFormat,
time::{Date, Time}
};
use rational::Rational;
use serde::{Deserialize, Serialize};
use serde_with::{serde_as, DisplayFromStr};
use std::{
cmp,
collections::BTreeSet,
fmt::{self, Display, Formatter},
str::FromStr
};
#[derive(Clone, Copy, Debug, Deserialize, Serialize)]
pub struct Resolution(u32, u32);
impl Resolution {
pub fn new(width: u32, height: u32) -> Self {
Self(width, height)
}
pub fn width(self) -> u32 {
self.0
}
pub fn height(self) -> u32 {
self.1
}
pub(crate) fn bitrate(self) -> u64 {
// 640 * 360: 500k
if self.width() <= 640 {
500_000
}
// 1280 * 720: 1M
else if self.width() <= 1280 {
1_000_000
}
// 1920 * 1080: 2M
else if self.width() <= 1920 {
2_000_000
}
// 2560 * 1440: 3M
else if self.width() <= 2560 {
3_000_000
}
// 3840 * 2160: 4M
// TODO die bitrate von 4M ist absolut an den haaren herbeigezogen
else if self.width() <= 3840 {
4_000_000
}
// we'll cap everything else at 5M for no apparent reason
else {
5_000_000
}
}
pub(crate) fn default_codec(self) -> FfmpegOutputFormat {
if self.width() > 1920 {
FfmpegOutputFormat::Av1Opus
} else {
FfmpegOutputFormat::AvcAac
}
}
pub const STANDARD_RESOLUTIONS: [Self; 5] = [
Self(640, 360),
Self(1280, 720),
Self(1920, 1080),
Self(2560, 1440),
Self(3840, 2160)
];
}
impl Display for Resolution {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
write!(f, "{}p", self.height())
}
}
impl FromStr for Resolution {
type Err = anyhow::Error;
fn from_str(s: &str) -> anyhow::Result<Self> {
Ok(match s.to_lowercase().as_str() {
"360p" | "nhd" => Self(640, 360),
"540p" | "qhd" => Self(960, 540),
"720p" | "hd" => Self(1280, 720),
"900p" | "hd+" => Self(1600, 900),
"1080p" | "fhd" | "fullhd" => Self(1920, 1080),
"1440p" | "wqhd" => Self(2560, 1440),
"2160p" | "4k" | "uhd" => Self(3840, 2160),
_ => anyhow::bail!("Unknown Resolution: {s:?}")
})
}
}
impl Ord for Resolution {
fn cmp(&self, other: &Self) -> cmp::Ordering {
(self.0 * self.1).cmp(&(other.0 * other.1))
}
}
impl Eq for Resolution {}
impl PartialOrd for Resolution {
fn partial_cmp(&self, other: &Self) -> Option<cmp::Ordering> {
Some(self.cmp(other))
}
}
impl PartialEq for Resolution {
fn eq(&self, other: &Self) -> bool {
self.cmp(other) == cmp::Ordering::Equal
}
}
#[derive(Debug, Deserialize, Serialize)]
pub struct Project {
pub lecture: ProjectLecture,
pub source: ProjectSource,
pub progress: ProjectProgress
}
#[serde_as]
#[derive(Debug, Deserialize, Serialize)]
pub struct ProjectLecture {
pub course: String,
pub label: String,
pub docent: String,
#[serde_as(as = "DisplayFromStr")]
pub date: Date,
#[serde(default = "Default::default")]
#[serde_as(as = "DisplayFromStr")]
pub lang: Language<'static>
}
#[serde_as]
#[derive(Debug, Deserialize, Serialize)]
pub struct ProjectSource {
pub files: Vec<String>,
pub stereo: bool,
#[serde_as(as = "Option<DisplayFromStr>")]
pub start: Option<Time>,
#[serde_as(as = "Option<DisplayFromStr>")]
pub end: Option<Time>,
#[serde(default)]
#[serde_as(as = "Vec<(DisplayFromStr, DisplayFromStr)>")]
pub fast: Vec<(Time, Time)>,
#[serde(default)]
#[serde_as(as = "Vec<(DisplayFromStr, DisplayFromStr, _)>")]
pub questions: Vec<(Time, Time, String)>,
pub metadata: Option<ProjectSourceMetadata>
}
#[serde_as]
#[derive(Debug, Deserialize, Serialize)]
pub struct ProjectSourceMetadata {
/// The duration of the source video.
#[serde_as(as = "DisplayFromStr")]
pub source_duration: Time,
/// The FPS of the source video.
#[serde_as(as = "DisplayFromStr")]
pub source_fps: Rational,
/// The time base of the source video.
#[serde_as(as = "DisplayFromStr")]
pub source_tbn: Rational,
/// The resolution of the source video.
pub source_res: Resolution,
/// The sample rate of the source audio.
pub source_sample_rate: u32
}
#[serde_as]
#[derive(Debug, Default, Deserialize, Serialize)]
pub struct ProjectProgress {
#[serde(default)]
pub preprocessed: bool,
#[serde(default)]
pub asked_start_end: bool,
#[serde(default)]
pub asked_fast: bool,
#[serde(default)]
pub asked_questions: bool,
#[serde(default)]
pub rendered_assets: bool,
#[serde(default)]
pub rendered: bool,
#[serde_as(as = "BTreeSet<DisplayFromStr>")]
#[serde(default)]
pub transcoded: BTreeSet<Resolution>
}

140
src/question.rs Normal file
View file

@ -0,0 +1,140 @@
use crate::{iotro::Language, project::Resolution};
use fontconfig::Fontconfig;
use harfbuzz_rs::{Face, Font, Owned, UnicodeBuffer};
use std::sync::OnceLock;
use svgwriter::{
tags::{Group, Path, TSpan, TagWithPresentationAttributes as _, Text},
Data, Graphic, Transform
};
pub(crate) struct Question {
res: Resolution,
g: Group
}
impl Question {
pub(crate) fn new(res: Resolution, lang: &Language<'_>, str: &str) -> Self {
static FONT: OnceLock<Owned<Font<'static>>> = OnceLock::new();
let font = FONT.get_or_init(|| {
let fc = Fontconfig::new().unwrap();
let font_path = fc.find("Noto Sans", None).unwrap().path;
let face = Face::from_file(font_path, 0).unwrap();
Font::new(face)
});
let upem = font.face().upem();
// constants
let border_r = 12;
let font_size = 44;
let line_height = font_size * 6 / 5;
let padding = font_size / 2;
let margin_x = 240;
let margin_y = padding * 3 / 2;
let question_offset = 64;
let question_width = 240;
// calculated
let box_width = 1920 - 2 * margin_x;
let text_width = box_width - 2 * padding;
// calculates the width of the given string
let width_of = |s: &str| {
let width: i32 =
harfbuzz_rs::shape(font, UnicodeBuffer::new().add_str(s), &[])
.get_glyph_positions()
.iter()
.map(|glyph_pos| glyph_pos.x_advance)
.sum();
(width * font_size) / upem as i32
};
let space_width = width_of(" ");
// lay out the text
let mut text = Text::new()
.with_dominant_baseline("hanging")
.with_transform(
Transform::new().translate(padding, padding + font_size / 2 + border_r)
);
let words = str.split_whitespace();
let mut text_height = 0;
let mut text_x = 0;
for word in words {
let word_width = width_of(word);
if text_x + word_width > text_width {
text_x = 0;
text_height += line_height;
}
text.push(
TSpan::new()
.with_x(text_x)
.with_y(text_height)
.append(word.to_owned())
);
text_x += word_width + space_width;
}
text_height += font_size;
// calculated
let box_height = text_height + 2 * padding + font_size / 2 + border_r;
let mut g = Group::new()
.with_fill("white")
.with_font_family("Noto Sans")
.with_font_size(font_size)
.with_transform(
Transform::new().translate(margin_x, 1080 - margin_y - box_height)
);
let mut outline = Data::new();
outline.move_by(border_r, 0).horiz_line_to(question_offset);
outline
.vert_line_by(-font_size / 2)
.arc_by(border_r, border_r, 0, false, true, border_r, -border_r)
.horiz_line_by(question_width)
.arc_by(border_r, border_r, 0, false, true, border_r, border_r)
.vert_line_by(font_size)
.arc_by(border_r, border_r, 0, false, true, -border_r, border_r)
.horiz_line_by(-question_width)
.arc_by(border_r, border_r, 0, false, true, -border_r, -border_r)
.vert_line_by(-font_size / 2)
.move_by(question_width + 2 * border_r, 0);
outline
.horiz_line_to(box_width - border_r)
.arc_by(border_r, border_r, 0, false, true, border_r, border_r)
.vert_line_by(box_height - 2 * border_r)
.arc_by(border_r, border_r, 0, false, true, -border_r, border_r)
.horiz_line_to(border_r)
.arc_by(border_r, border_r, 0, false, true, -border_r, -border_r)
.vert_line_to(border_r)
.arc_by(border_r, border_r, 0, false, true, border_r, -border_r);
g.push(
Path::new()
.with_stroke("#fff")
.with_stroke_width(3)
.with_fill("#000")
.with_fill_opacity(".3")
.with_d(outline)
);
g.push(
Text::new()
.with_x(question_offset + question_width / 2 + border_r)
.with_y(0)
.with_dominant_baseline("middle")
.with_text_anchor("middle")
.with_font_weight(600)
.append(lang.question.to_owned())
);
g.push(text);
Self { res, g }
}
pub(crate) fn finish(self) -> Graphic {
let mut svg = Graphic::new();
svg.set_width(self.res.width());
svg.set_height(self.res.height());
svg.set_view_box("0 0 1920 1080");
svg.push(self.g);
svg
}
}

View file

@ -1,8 +1,8 @@
use super::{cmd, filter::Filter};
use crate::{
project::Resolution,
render::filter::channel,
time::{format_time, Time},
Resolution
time::{format_time, Time}
};
use anyhow::bail;
use camino::Utf8PathBuf as PathBuf;
@ -41,8 +41,8 @@ impl FfmpegInput {
cmd.arg("-r").arg(fps.to_string());
}
if let Some(start) = self.start {
if self.path.ends_with(".mp4") {
cmd.arg("-seek_streams_individualy").arg("false");
if self.path.ends_with(".mp4") || self.path.ends_with(".mov") {
cmd.arg("-seek_streams_individually").arg("false");
}
cmd.arg("-ss").arg(format_time(start));
}
@ -59,12 +59,30 @@ pub(crate) enum FfmpegOutputFormat {
Av1Flac,
/// AV1 / OPUS
Av1Opus,
/// AVC (H.264) / FLAC
AvcFlac,
/// AVC (H.264) / AAC
AvcAac
}
impl FfmpegOutputFormat {
pub(crate) fn with_flac_audio(self) -> Self {
match self {
Self::Av1Flac | Self::AvcFlac => self,
Self::Av1Opus => Self::Av1Flac,
Self::AvcAac => Self::AvcFlac
}
}
}
pub(crate) enum FfmpegOutputQuality {
Default,
VisuallyLossless
}
pub(crate) struct FfmpegOutput {
pub(crate) format: FfmpegOutputFormat,
pub(crate) quality: FfmpegOutputQuality,
pub(crate) audio_bitrate: Option<u64>,
pub(crate) video_bitrate: Option<u64>,
@ -74,6 +92,14 @@ pub(crate) struct FfmpegOutput {
pub(crate) fps_mode_vfr: bool,
pub(crate) faststart: bool,
// video container metadata
pub(crate) title: Option<String>,
pub(crate) author: Option<String>,
pub(crate) album: Option<String>,
pub(crate) year: Option<String>,
pub(crate) comment: Option<String>,
pub(crate) language: Option<String>,
pub(crate) path: PathBuf
}
@ -81,13 +107,23 @@ impl FfmpegOutput {
pub(crate) fn new(format: FfmpegOutputFormat, path: PathBuf) -> Self {
Self {
format,
quality: FfmpegOutputQuality::Default,
audio_bitrate: None,
video_bitrate: None,
fps: None,
duration: None,
time_base: None,
fps_mode_vfr: false,
faststart: false,
title: None,
author: None,
album: None,
year: None,
comment: None,
language: None,
path
}
}
@ -101,31 +137,48 @@ impl FfmpegOutput {
}
fn append_to_cmd(self, cmd: &mut Command, venc: bool, _aenc: bool, vaapi: bool) {
// select codec and bitrate
const QUALITY: &str = "22";
// select codec and bitrate/crf
if venc {
let mut vcodec: String = match self.format {
FfmpegOutputFormat::Av1Flac | FfmpegOutputFormat::Av1Opus => "av1".into(),
FfmpegOutputFormat::AvcAac => "h264".into()
let vcodec = match (self.format, vaapi) {
(FfmpegOutputFormat::Av1Flac, false)
| (FfmpegOutputFormat::Av1Opus, false) => "libsvtav1",
(FfmpegOutputFormat::Av1Flac, true)
| (FfmpegOutputFormat::Av1Opus, true) => "av1_vaapi",
(FfmpegOutputFormat::AvcAac, false)
| (FfmpegOutputFormat::AvcFlac, false) => "h264",
(FfmpegOutputFormat::AvcAac, true)
| (FfmpegOutputFormat::AvcFlac, true) => "h264_vaapi"
};
if vaapi {
vcodec = format!("{vcodec}_vaapi");
}
cmd.arg("-c:v").arg(vcodec);
if let Some(bv) = self.video_bitrate {
if vcodec == "libsvtav1" {
cmd.arg("-svtav1-params").arg("fast-decode=1");
cmd.arg("-preset").arg("7");
cmd.arg("-crf").arg(match self.quality {
FfmpegOutputQuality::Default => "28",
FfmpegOutputQuality::VisuallyLossless => "18"
});
} else if vcodec == "h264" {
match self.quality {
FfmpegOutputQuality::Default => {
cmd.arg("-preset").arg("slow");
cmd.arg("-crf").arg("21");
},
FfmpegOutputQuality::VisuallyLossless => {
// the quality is not impacted by speed, only the bitrate, and
// for this setting we don't really care about bitrate
cmd.arg("-preset").arg("veryfast");
cmd.arg("-crf").arg("17");
}
}
} else if let Some(bv) = self.video_bitrate {
cmd.arg("-b:v").arg(bv.to_string());
} else if vaapi {
cmd.arg("-rc_mode").arg("CQP");
cmd.arg("-global_quality").arg(QUALITY);
} else {
cmd.arg("-crf").arg(QUALITY);
}
} else {
cmd.arg("-c:v").arg("copy");
}
cmd.arg("-c:a").arg(match self.format {
FfmpegOutputFormat::Av1Flac => "flac",
FfmpegOutputFormat::Av1Flac | FfmpegOutputFormat::AvcFlac => "flac",
FfmpegOutputFormat::Av1Opus => "libopus",
FfmpegOutputFormat::AvcAac => "aac"
});
@ -151,6 +204,17 @@ impl FfmpegOutput {
if self.faststart {
cmd.arg("-movflags").arg("+faststart");
}
// metadata
macro_rules! add_meta {
($this:ident, $cmd:ident: $($meta:ident),+) => {
$(if let Some(value) = $this.$meta.as_deref() {
$cmd.arg("-metadata").arg(format!("{}={}", stringify!($meta), value));
})+
}
}
add_meta!(self, cmd: title, author, album, year, comment, language);
cmd.arg(self.path);
}
}
@ -258,7 +322,7 @@ impl Ffmpeg {
// initialise a vaapi device if one exists
let vaapi_device: PathBuf = "/dev/dri/renderD128".into();
let vaapi = vaapi_device.exists();
let vaapi = cfg!(feature = "vaapi") && vaapi_device.exists();
if vaapi && venc {
if vdec {
cmd.arg("-hwaccel").arg("vaapi");
@ -297,7 +361,7 @@ impl Ffmpeg {
},
FfmpegFilter::Loudnorm { stereo: false } => {
cmd.arg("-af").arg(concat!(
"pan=mono|c0=FR,",
"pan=mono|c0=FL,",
"loudnorm=dual_mono=true:print_format=summary,",
"pan=stereo|c0=c0|c1=c0,",
"aformat=sample_rates=48000"

View file

@ -19,6 +19,7 @@ pub(crate) enum Filter {
overlay_input: Cow<'static, str>,
x: Cow<'static, str>,
y: Cow<'static, str>,
repeatlast: bool,
output: Cow<'static, str>
},
@ -37,6 +38,22 @@ pub(crate) enum Filter {
output: Cow<'static, str>
},
/// Fade only video using the alpha channel.
FadeAlpha {
input: Cow<'static, str>,
direction: &'static str,
start: Time,
duration: Time,
output: Cow<'static, str>
},
/// Offset the PTS of the video by the amount of seconds.
VideoOffset {
input: Cow<'static, str>,
seconds: Time,
output: Cow<'static, str>
},
/// Generate silence. The video is copied.
GenerateSilence {
video: Cow<'static, str>,
@ -77,11 +94,13 @@ impl Filter {
overlay_input,
x,
y,
repeatlast,
output
} => {
let repeatlast: u8 = (*repeatlast).into();
writeln!(
complex,
"{}{}overlay=x={x}:y={y}{};",
"{}{}overlay=x={x}:y={y}:repeatlast={repeatlast}:eval=init{};",
channel('v', video_input),
channel('v', overlay_input),
channel('v', output)
@ -129,6 +148,34 @@ impl Filter {
)?;
},
Self::FadeAlpha {
input,
direction,
start,
duration,
output
} => {
writeln!(
complex,
"{}fade={direction}:st={start}:d={duration}:alpha=1{};",
channel('v', input),
channel('v', output)
)?;
},
Self::VideoOffset {
input,
seconds,
output
} => {
writeln!(
complex,
"{}setpts=PTS+{seconds}/TB{};",
channel('v', input),
channel('v', output)
)?;
},
Self::GenerateSilence { video, output } => {
writeln!(
complex,

View file

@ -7,12 +7,15 @@ use self::{
};
use crate::{
iotro::{intro, outro},
project::{Project, ProjectLecture, ProjectSourceMetadata, Resolution},
question::Question,
render::ffmpeg::{Ffmpeg, FfmpegInput},
time::{format_date, Time},
Project, ProjectSourceMetadata, Resolution, MEM_LIMIT
time::{format_date, format_time, Time}
};
use anyhow::{bail, Context};
use camino::{Utf8Path as Path, Utf8PathBuf as PathBuf};
use console::style;
use ffmpeg::FfmpegOutputQuality;
use std::{
borrow::Cow,
collections::VecDeque,
@ -33,24 +36,34 @@ const TRANSITION_LEN: Time = Time {
seconds: 0,
micros: 200_000
};
const QUESTION_FADE_LEN: Time = Time {
seconds: 0,
micros: 400_000
};
const FF_MULTIPLIER: usize = 8;
// logo sizes at full hd, will be scaled to source resolution
const FF_LOGO_SIZE: usize = 128;
const LOGO_SIZE: usize = 96;
const FF_LOGO_SIZE: u32 = 128;
const LOGO_SIZE: u32 = 96;
fn cmd() -> Command {
// we use systemd-run to limit the process memory
// I tried others like ulimit, chpst or isolate, but none worked
let mut cmd = Command::new("systemd-run");
cmd.arg("--scope")
.arg("-q")
.arg("--expand-environment=no")
.arg("-p")
.arg(format!("MemoryMax={}", MEM_LIMIT.read().unwrap()))
.arg("--user");
// we use busybox ash for having a shell that outputs commands with -x
cmd.arg("busybox")
.arg("ash")
#[cfg(feature = "mem_limit")]
let mut cmd = {
// we use systemd-run to limit the process memory
// I tried others like ulimit, chpst or isolate, but none worked
let mut cmd = Command::new("systemd-run");
cmd.arg("--scope")
.arg("-q")
.arg("--expand-environment=no")
.arg("-p")
.arg(format!("MemoryMax={}", crate::MEM_LIMIT.read().unwrap()))
.arg("--user");
// we use busybox ash for having a shell that outputs commands with -x
cmd.arg("busybox");
cmd
};
#[cfg(not(feature = "mem_limit"))]
let mut cmd = Command::new("busybox");
cmd.arg("ash")
.arg("-exuo")
.arg("pipefail")
.arg("-c")
@ -107,7 +120,7 @@ fn ffprobe_audio(query: &str, concat_input: &Path) -> anyhow::Result<String> {
)
}
pub(crate) struct Renderer<'a> {
pub struct Renderer<'a> {
/// The directory with all the sources.
directory: &'a Path,
@ -127,6 +140,7 @@ fn svg2mkv(
duration: Time
) -> anyhow::Result<()> {
let mut ffmpeg = Ffmpeg::new(FfmpegOutput {
quality: FfmpegOutputQuality::VisuallyLossless,
duration: Some(duration),
time_base: Some(meta.source_tbn),
fps_mode_vfr: true,
@ -145,14 +159,13 @@ fn svg2mkv(
ffmpeg.run()
}
fn svg2png(svg: &Path, png: &Path, size: usize) -> anyhow::Result<()> {
fn svg2png(svg: &Path, png: &Path, width: u32, height: u32) -> anyhow::Result<()> {
let mut cmd = cmd();
let size = size.to_string();
cmd.arg("inkscape")
.arg("-w")
.arg(&size)
.arg(width.to_string())
.arg("-h")
.arg(&size);
.arg(height.to_string());
cmd.arg(svg).arg("-o").arg(png);
let status = cmd.status()?;
@ -164,7 +177,7 @@ fn svg2png(svg: &Path, png: &Path, size: usize) -> anyhow::Result<()> {
}
impl<'a> Renderer<'a> {
pub(crate) fn new(directory: &'a Path, project: &Project) -> anyhow::Result<Self> {
pub fn new(directory: &'a Path, project: &Project) -> anyhow::Result<Self> {
let slug = format!(
"{}-{}",
project.lecture.course,
@ -173,23 +186,21 @@ impl<'a> Renderer<'a> {
let target = directory.join(&slug);
fs::create_dir_all(&target)?;
let first: PathBuf = directory.join(
project
.source
.files
.first()
.context("No source files present")?
);
let height: u32 = ffprobe_video("stream=height", &first)?
.split('\n')
.next()
.unwrap()
.parse()?;
let format = if height <= 1080 {
FfmpegOutputFormat::AvcAac
} else {
FfmpegOutputFormat::Av1Flac
};
// Ensure we have at least one input file.
project
.source
.files
.first()
.context("No source files present")?;
// In case we don't have a resolution yet, we'll asign this after preprocessing.
let format = project
.source
.metadata
.as_ref()
.map(|meta| meta.source_res.default_codec())
.unwrap_or(FfmpegOutputFormat::Av1Flac)
.with_flac_audio();
Ok(Self {
directory,
@ -199,7 +210,7 @@ impl<'a> Renderer<'a> {
})
}
pub(crate) fn recording_mkv(&self) -> PathBuf {
pub fn recording_mkv(&self) -> PathBuf {
self.target.join("recording.mkv")
}
@ -211,7 +222,15 @@ impl<'a> Renderer<'a> {
self.target.join("outro.mkv")
}
pub(crate) fn preprocess(&self, project: &mut Project) -> anyhow::Result<()> {
fn question_svg(&self, q_idx: usize) -> PathBuf {
self.target.join(format!("question{q_idx}.svg"))
}
fn question_png(&self, q_idx: usize) -> PathBuf {
self.target.join(format!("question{q_idx}.png"))
}
pub fn preprocess(&mut self, project: &mut Project) -> anyhow::Result<()> {
assert!(!project.progress.preprocessed);
let recording_txt = self.target.join("recording.txt");
@ -221,7 +240,13 @@ impl<'a> Renderer<'a> {
}
drop(file);
println!("\x1B[1m ==> Concatenating Video and Normalising Audio ...\x1B[0m");
println!();
println!(
" {} {}",
style("==>").bold().cyan(),
style("Concatenating Video and Normalising Audio ...").bold()
);
let source_sample_rate =
ffprobe_audio("stream=sample_rate", &recording_txt)?.parse()?;
let recording_mkv = self.recording_mkv();
@ -238,14 +263,7 @@ impl<'a> Renderer<'a> {
let width = ffprobe_video("stream=width", &recording_mkv)?.parse()?;
let height = ffprobe_video("stream=height", &recording_mkv)?.parse()?;
let source_res = match (width, height) {
(3840, 2160) => Resolution::UHD,
(2560, 1440) => Resolution::WQHD,
(1920, 1080) => Resolution::FullHD,
(1280, 720) => Resolution::HD,
(640, 360) => Resolution::nHD,
(width, height) => bail!("Unknown resolution: {width}x{height}")
};
let source_res = Resolution::new(width, height);
project.source.metadata = Some(ProjectSourceMetadata {
source_duration: ffprobe_video("format=duration", &recording_mkv)?.parse()?,
source_fps: ffprobe_video("stream=r_frame_rate", &recording_mkv)?.parse()?,
@ -253,15 +271,27 @@ impl<'a> Renderer<'a> {
source_res,
source_sample_rate
});
self.format = source_res.default_codec().with_flac_audio();
Ok(())
}
/// Prepare assets like intro, outro and questions.
pub fn render_assets(&self, project: &Project) -> anyhow::Result<()> {
let metadata = project.source.metadata.as_ref().unwrap();
println!("\x1B[1m ==> Preparing assets ...\x1B[0m");
println!();
println!(
" {} {}",
style("==>").bold().cyan(),
style("Preparing assets ...").bold()
);
// render intro to svg then mp4
let intro_svg = self.target.join("intro.svg");
fs::write(
&intro_svg,
intro(source_res, &project.lecture)
intro(metadata.source_res, &project.lecture)
.to_string_pretty()
.into_bytes()
)?;
@ -272,7 +302,7 @@ impl<'a> Renderer<'a> {
let outro_svg = self.target.join("outro.svg");
fs::write(
&outro_svg,
outro(&project.lecture.lang, source_res)
outro(&project.lecture.lang, metadata.source_res)
.to_string_pretty()
.into_bytes()
)?;
@ -286,7 +316,8 @@ impl<'a> Renderer<'a> {
include_bytes!(concat!(env!("CARGO_MANIFEST_DIR"), "/assets/logo.svg"))
)?;
let logo_png = self.target.join("logo.png");
svg2png(&logo_svg, &logo_png, LOGO_SIZE * source_res.width() / 1920)?;
let logo_size = LOGO_SIZE * metadata.source_res.width() / 1920;
svg2png(&logo_svg, &logo_png, logo_size, logo_size)?;
// copy fastforward then render to png
let fastforward_svg = self.target.join("fastforward.svg");
@ -298,19 +329,38 @@ impl<'a> Renderer<'a> {
))
)?;
let fastforward_png = self.target.join("fastforward.png");
let ff_logo_size = FF_LOGO_SIZE * metadata.source_res.width() / 1920;
svg2png(
&fastforward_svg,
&fastforward_png,
FF_LOGO_SIZE * source_res.width() / 1920
ff_logo_size,
ff_logo_size
)?;
// write questions then render to png
for (q_idx, (_, _, q_text)) in project.source.questions.iter().enumerate() {
let q = Question::new(metadata.source_res, &project.lecture.lang, q_text)
.finish()
.to_string_pretty()
.into_bytes();
let q_svg = self.question_svg(q_idx);
let q_png = self.question_png(q_idx);
fs::write(&q_svg, q)?;
svg2png(
&q_svg,
&q_png,
metadata.source_res.width(),
metadata.source_res.height()
)?;
}
Ok(())
}
/// Get the video file for a specific resolution, completely finished.
fn video_file_res(&self, res: Resolution) -> PathBuf {
let extension = match res.format() {
FfmpegOutputFormat::Av1Flac => "mkv",
let extension = match res.default_codec() {
FfmpegOutputFormat::Av1Flac | FfmpegOutputFormat::AvcFlac => "mkv",
FfmpegOutputFormat::Av1Opus => "webm",
FfmpegOutputFormat::AvcAac => "mp4"
};
@ -319,15 +369,16 @@ impl<'a> Renderer<'a> {
}
/// Get the video file directly outputed to further transcode.
pub(crate) fn video_file_output(&self) -> PathBuf {
pub fn video_file_output(&self) -> PathBuf {
self.target.join(format!("{}.mkv", self.slug))
}
pub(crate) fn render(&self, project: &mut Project) -> anyhow::Result<PathBuf> {
pub fn render(&self, project: &mut Project) -> anyhow::Result<PathBuf> {
let source_res = project.source.metadata.as_ref().unwrap().source_res;
let output = self.video_file_output();
let mut ffmpeg = Ffmpeg::new(FfmpegOutput {
quality: FfmpegOutputQuality::VisuallyLossless,
video_bitrate: Some(source_res.bitrate() * 3),
..FfmpegOutput::new(self.format, output.clone())
});
@ -343,21 +394,24 @@ impl<'a> Renderer<'a> {
let mut part3: Cow<'static, str> = outro.into();
// the recording is fun because of all the fast forwarding
let mut part2 = VecDeque::new();
let mut part2 = VecDeque::<Cow<'static, str>>::new();
let mut part2_ts = VecDeque::new();
let mut part2_start_of_the_end = None;
let mut part2_end_of_the_start = None;
// ok so ff is fun. we will add the ff'ed section as well as the part between
// the previous ff'ed section and our new section, unless we are the first
// the previous ff'ed section and our new section, unless we are the first.
project.source.fast.sort();
for (i, (ff_st, ff_end)) in project.source.fast.iter().rev().enumerate() {
if let Some(prev_end) = part2_end_of_the_start {
let duration = prev_end - *ff_end;
let recffbetween = ffmpeg.add_input(FfmpegInput {
start: Some(*ff_end),
duration: Some(prev_end - *ff_end),
duration: Some(duration),
..FfmpegInput::new(rec_file.clone())
});
part2.push_front(recffbetween.into());
part2_ts.push_front(Some((*ff_end, duration)));
} else {
part2_start_of_the_end = Some(*ff_end);
}
@ -377,6 +431,7 @@ impl<'a> Renderer<'a> {
output: recff.clone().into()
});
part2.push_front(recff.into());
part2_ts.push_front(None);
}
// if the recording was not ff'ed, perform a normal trim
@ -391,23 +446,112 @@ impl<'a> Renderer<'a> {
..FfmpegInput::new(rec_file.clone())
});
part2.push_back(rectrim.into());
part2_ts.push_back(Some((start, part2_last_part_duration)));
}
// otherwise add the first and last parts separately
else {
let duration = part2_end_of_the_start.unwrap() - start;
let rectrimst = ffmpeg.add_input(FfmpegInput {
start: Some(start),
duration: Some(part2_end_of_the_start.unwrap() - start),
duration: Some(duration),
..FfmpegInput::new(rec_file.clone())
});
part2.push_front(rectrimst.into());
part2_ts.push_front(Some((start, duration)));
part2_last_part_duration = end - part2_start_of_the_end.unwrap();
let part2_start_of_the_end = part2_start_of_the_end.unwrap();
part2_last_part_duration = end - part2_start_of_the_end;
let rectrimend = ffmpeg.add_input(FfmpegInput {
start: Some(part2_start_of_the_end.unwrap()),
start: Some(part2_start_of_the_end),
duration: Some(part2_last_part_duration),
..FfmpegInput::new(rec_file.clone())
});
part2.push_back(rectrimend.into());
part2_ts.push_back(Some((part2_start_of_the_end, part2_last_part_duration)));
}
// ok now we have a bunch of parts and a bunch of questions that want to get
// overlayed over those parts.
project.source.questions.sort();
let mut q_idx = 0;
for (i, ts) in part2_ts.iter().enumerate() {
let Some((start, duration)) = ts else {
continue;
};
loop {
if q_idx >= project.source.questions.len() {
break;
}
let (q_start, q_end, _) = &project.source.questions[q_idx];
if q_start < start {
bail!(
"Question starting at {} did not fit into the video",
format_time(*q_start)
);
}
if q_start >= start && *q_end <= *start + *duration {
// add the question as input to ffmpeg
let q_inp = ffmpeg.add_input(FfmpegInput {
loop_input: true,
fps: Some(project.source.metadata.as_ref().unwrap().source_fps),
duration: Some(*q_end - *q_start),
..FfmpegInput::new(self.question_png(q_idx))
});
// fade in the question
let q_fadein = format!("q{q_idx}fin");
ffmpeg.add_filter(Filter::FadeAlpha {
input: q_inp.into(),
direction: "in",
start: Time {
seconds: 0,
micros: 0
},
duration: QUESTION_FADE_LEN,
output: q_fadein.clone().into()
});
// fade out the question
let q_fadeout = format!("q{q_idx}fout");
ffmpeg.add_filter(Filter::FadeAlpha {
input: q_fadein.into(),
direction: "out",
start: *q_end - *q_start - QUESTION_FADE_LEN,
duration: QUESTION_FADE_LEN,
output: q_fadeout.clone().into()
});
// move the question to the correct timestamp
let q_pts = format!("q{q_idx}pts");
ffmpeg.add_filter(Filter::VideoOffset {
input: q_fadeout.into(),
seconds: *q_start - *start,
output: q_pts.clone().into()
});
// overlay the part in question
let q_overlay = format!("q{q_idx}o");
ffmpeg.add_filter(Filter::Overlay {
video_input: part2[i].clone(),
overlay_input: q_pts.into(),
x: "0".into(),
y: "0".into(),
repeatlast: false,
output: q_overlay.clone().into()
});
part2[i] = q_overlay.into();
q_idx += 1;
continue;
}
break;
}
}
if q_idx < project.source.questions.len() {
bail!(
"Question starting at {} did not fit into the video before it was over",
format_time(project.source.questions[q_idx].0)
);
}
// fade out the intro
@ -485,6 +629,7 @@ impl<'a> Renderer<'a> {
overlay_input: logoalpha.into(),
x: format!("main_w-overlay_w-{overlay_off_x}").into(),
y: format!("main_h-overlay_h-{overlay_off_y}").into(),
repeatlast: true,
output: overlay.into()
});
@ -495,14 +640,37 @@ impl<'a> Renderer<'a> {
Ok(output)
}
pub fn rescale(&self, res: Resolution) -> anyhow::Result<PathBuf> {
pub fn rescale(
&self,
lecture: &ProjectLecture,
res: Resolution
) -> anyhow::Result<PathBuf> {
let input = self.video_file_output();
let output = self.video_file_res(res);
println!("\x1B[1m ==> Rescaling to {}p\x1B[0m", res.height());
println!();
println!(
" {} {}",
style("==>").bold().cyan(),
style(format!("Rescaling to {}p", res.height())).bold()
);
let mut ffmpeg = Ffmpeg::new(FfmpegOutput {
video_bitrate: Some(res.bitrate()),
..FfmpegOutput::new(res.format(), output.clone()).enable_faststart()
title: Some(format!(
"{} {} {}",
lecture.label,
lecture.lang.from,
(lecture.lang.format_date_long)(lecture.date)
)),
author: Some(lecture.docent.clone()),
album: Some(lecture.course.clone()),
year: Some(lecture.date.year.to_string()),
comment: Some(lecture.lang.video_created_by_us.into()),
language: Some(lecture.lang.lang.into()),
..FfmpegOutput::new(res.default_codec(), output.clone()).enable_faststart()
});
ffmpeg.add_input(FfmpegInput::new(input));
ffmpeg.rescale_video(res);