From b9539ee3afbf1440b8628bf0609dc0e24aed116c Mon Sep 17 00:00:00 2001 From: metamuffin Date: Sun, 2 Mar 2025 13:43:59 +0100 Subject: change things --- server/src/routes/compat/jellyfin/mod.rs | 27 ++++++++++++--------------- 1 file changed, 12 insertions(+), 15 deletions(-) (limited to 'server/src') diff --git a/server/src/routes/compat/jellyfin/mod.rs b/server/src/routes/compat/jellyfin/mod.rs index ab36a8c..6066760 100644 --- a/server/src/routes/compat/jellyfin/mod.rs +++ b/server/src/routes/compat/jellyfin/mod.rs @@ -21,7 +21,7 @@ use crate::routes::{ use anyhow::{anyhow, Context}; use jellybase::{database::Database, CONF}; use jellycommon::{ - stream::{StreamFormat, StreamSpec}, + stream::{StreamContainer, StreamSpec}, user::{NodeUserData, WatchedState}, MediaInfo, Node, NodeID, NodeKind, SourceTrack, SourceTrackKind, Visibility, }; @@ -446,16 +446,12 @@ pub fn r_jellyfin_video_stream( .get_node_slug(id)? .ok_or(anyhow!("node does not exist"))?; let media = node.media.as_ref().ok_or(anyhow!("node has no media"))?; - Ok(Redirect::temporary(rocket::uri!(r_stream( - id, - StreamSpec { - format: StreamFormat::Matroska, - webm: Some(true), - track: (0..media.tracks.len()).collect(), - index: None, - profile: None, - } - )))) + let params = StreamSpec::Remux { + tracks: (0..media.tracks.len()).collect(), + container: StreamContainer::WebM, + } + .to_query(); + Ok(Redirect::temporary(format!("/n/{id}/stream{params}"))) } #[derive(Deserialize)] @@ -498,9 +494,7 @@ pub fn r_jellyfin_playback_bitratetest(_session: Session, Size: usize) -> Vec JellyfinItem { location_type: node.media.as_ref().map(|_| "FileSystem".to_owned()), play_access: node.media.as_ref().map(|_| "Full".to_owned()), container: node.media.as_ref().map(|_| "webm".to_owned()), - run_time_ticks: node.media.as_ref().map(|m| (m.duration * 10_000_000.) as i64), + run_time_ticks: node + .media + .as_ref() + .map(|m| (m.duration * 10_000_000.) as i64), media_sources: media_source.as_ref().map(|s| vec![s.clone()]), media_streams: media_source.as_ref().map(|s| s.media_streams.clone()), path: node -- cgit v1.2.3-70-g09d2 From 8ee25c9ddd5ba5b6f74f7ec3b212020886e366c1 Mon Sep 17 00:00:00 2001 From: metamuffin Date: Sun, 2 Mar 2025 21:01:28 +0100 Subject: a --- common/src/stream.rs | 39 ++++++++++++++- server/src/routes/stream.rs | 111 +++++++++++++++++++++-------------------- server/src/routes/ui/player.rs | 60 ++++++++++------------ 3 files changed, 123 insertions(+), 87 deletions(-) (limited to 'server/src') diff --git a/common/src/stream.rs b/common/src/stream.rs index 0e8f810..9a00ce0 100644 --- a/common/src/stream.rs +++ b/common/src/stream.rs @@ -4,7 +4,7 @@ Copyright (C) 2025 metamuffin */ use serde::{Deserialize, Serialize}; -use std::fmt::Display; +use std::{collections::BTreeMap, fmt::Display, str::FromStr}; #[derive(Debug, Clone, Deserialize, Serialize)] pub enum StreamSpec { @@ -106,6 +106,31 @@ impl StreamSpec { } => format!("?fragment&segment={segment}&track={track}&index={index}&container={container}&format={format}"), } } + pub fn from_query_kv(query: &BTreeMap) -> Result { + let get_num = |k: &'static str| { + query + .get(k) + .ok_or(k) + .and_then(|a| a.parse().map_err(|_| "invalid number")) + }; + let get_container = || { + query + .get("container") + .ok_or("container") + .and_then(|s| s.parse().map_err(|()| "unknown container")) + }; + if query.contains_key("fragment") { + Ok(Self::Fragment { + segment: get_num("segment")?, + track: get_num("track")? as usize, + index: get_num("index")?, + container: get_container()?, + format: get_num("format")? as usize, + }) + } else { + Err("invalid stream spec") + } + } } impl Display for StreamContainer { @@ -118,3 +143,15 @@ impl Display for StreamContainer { }) } } +impl FromStr for StreamContainer { + type Err = (); + fn from_str(s: &str) -> Result { + Ok(match s { + "webm" => StreamContainer::WebM, + "matroska" => StreamContainer::Matroska, + "webvtt" => StreamContainer::WebVTT, + "jvtt" => StreamContainer::JVTT, + _ => return Err(()), + }) + } +} diff --git a/server/src/routes/stream.rs b/server/src/routes/stream.rs index 1fb136c..d65b346 100644 --- a/server/src/routes/stream.rs +++ b/server/src/routes/stream.rs @@ -21,7 +21,11 @@ use rocket::{ response::{self, Redirect, Responder}, Either, Request, Response, State, }; -use std::{collections::HashSet, ops::Range}; +use std::{ + collections::{BTreeMap, HashSet}, + ops::Range, + sync::Arc, +}; use tokio::io::{duplex, DuplexStream}; #[head("/n/<_id>/stream?")] @@ -46,65 +50,66 @@ pub async fn r_stream( db: &State, id: &str, range: Option, - spec: StreamSpec, + spec: BTreeMap, ) -> Result, MyError> { // TODO perm let node = db .get_node_slug(id)? .ok_or(anyhow!("node does not exist"))?; - let media = node - .media - .as_ref() - .ok_or(anyhow!("item does not contain media"))?; + let media = Arc::new( + node.media + .clone() + .ok_or(anyhow!("item does not contain media"))?, + ); // TODO its unclear how requests with multiple tracks should be handled. - if spec.track.len() == 1 { - let ti = spec.track[0]; - if let TrackSource::Remote(remote_index) = media.tracks[ti].source { - session - .user - .permissions - .assert(&UserPermission::FederatedContent)?; - - let track = &node.media.as_ref().ok_or(anyhow!("no media"))?.tracks[ti]; - let host = track - .federated - .last() - .ok_or(anyhow!("federation inconsistent"))?; - - let FederationAccount { - password, username, .. - } = SECRETS - .federation - .get(host) - .ok_or(anyhow!("no credentials on the server-side"))?; - - info!("creating session on {host}"); - let instance = federation.get_instance(host)?.to_owned(); - let session = instance - .login(CreateSessionParams { - username: username.to_owned(), - password: password.to_owned(), - expire: Some(60), - drop_permissions: Some(HashSet::from_iter([ - UserPermission::ManageSelf, - UserPermission::Admin, // in case somebody federated the admin :))) - ])), - }) - .await?; - - let uri = session.stream_url( - node.slug.clone().into(), - &StreamSpec { - track: vec![remote_index], - ..spec - }, - ); - info!("federation redirect"); - return Ok(Either::Right(RedirectResponse(uri))); - } - } + // if spec.track.len() == 1 { + // let ti = spec.track[0]; + // if let TrackSource::Remote(remote_index) = media.tracks[ti].source { + // session + // .user + // .permissions + // .assert(&UserPermission::FederatedContent)?; + + // let track = &node.media.as_ref().ok_or(anyhow!("no media"))?.tracks[ti]; + // let host = track + // .federated + // .last() + // .ok_or(anyhow!("federation inconsistent"))?; + + // let FederationAccount { + // password, username, .. + // } = SECRETS + // .federation + // .get(host) + // .ok_or(anyhow!("no credentials on the server-side"))?; + + // info!("creating session on {host}"); + // let instance = federation.get_instance(host)?.to_owned(); + // let session = instance + // .login(CreateSessionParams { + // username: username.to_owned(), + // password: password.to_owned(), + // expire: Some(60), + // drop_permissions: Some(HashSet::from_iter([ + // UserPermission::ManageSelf, + // UserPermission::Admin, // in case somebody federated the admin :))) + // ])), + // }) + // .await?; + + // let uri = session.stream_url( + // node.slug.clone().into(), + // &StreamSpec { + // track: vec![remote_index], + // ..spec + // }, + // ); + // info!("federation redirect"); + // return Ok(Either::Right(RedirectResponse(uri))); + // } + // } info!( "stream request (range={})", @@ -124,7 +129,7 @@ pub async fn r_stream( let head = jellystream::stream_head(&spec); - match jellystream::stream(node, spec, urange, &session.user.permissions).await { + match jellystream::stream(media, spec, urange, &session.user.permissions).await { Ok(stream) => Ok(Either::Left(StreamResponse { stream, range, diff --git a/server/src/routes/ui/player.rs b/server/src/routes/ui/player.rs index 2f28f74..aa567ab 100644 --- a/server/src/routes/ui/player.rs +++ b/server/src/routes/ui/player.rs @@ -9,16 +9,14 @@ use super::{ }; use crate::{ database::Database, - routes::{ - stream::rocket_uri_macro_r_stream, - ui::{assets::rocket_uri_macro_r_item_backdrop, error::MyResult, layout::DynLayoutPage}, + routes::ui::{ + assets::rocket_uri_macro_r_item_backdrop, error::MyResult, layout::DynLayoutPage, }, uri, }; use anyhow::anyhow; use jellybase::{permission::PermissionSetExt, CONF}; use jellycommon::{ - stream::{StreamFormat, StreamSpec}, user::{PermissionSet, PlayerKind, UserPermission}, Node, NodeID, SourceTrackKind, TrackID, }; @@ -47,13 +45,15 @@ impl PlayerConfig { fn jellynative_url(action: &str, seek: f64, secret: &str, node: &str, session: &str) -> String { let protocol = if CONF.tls { "https" } else { "http" }; let host = &CONF.hostname; - let stream_url = uri!(r_stream( - node, - StreamSpec { - format: StreamFormat::HlsMaster, - ..Default::default() - } - )); + let stream_url = ""; + // TODO + // uri!(r_stream( + // node, + // StreamSpec { + // format: StreamFormat::HlsMaster, + // ..Default::default() + // } + // )); format!("jellynative://{action}/{secret}/{session}/{seek}/{protocol}://{host}{stream_url}",) } @@ -67,14 +67,6 @@ pub fn r_player( let node = db.get_node(id)?.ok_or(anyhow!("node does not exist"))?; let native_session = |action: &str| { - let perm = [ - UserPermission::StreamFormat(StreamFormat::HlsMaster), - UserPermission::StreamFormat(StreamFormat::HlsVariant), - UserPermission::StreamFormat(StreamFormat::Fragment), - ]; - for perm in &perm { - sess.user.permissions.assert(perm)?; - } Ok(Either::Right(Redirect::temporary(jellynative_url( action, conf.t.unwrap_or(0.), @@ -82,7 +74,7 @@ pub fn r_player( &id.to_string(), &token::create( sess.user.name, - PermissionSet(perm.map(|e| (e, true)).into()), + PermissionSet::default(), // TODO chrono::Duration::hours(24), ), )))) @@ -98,19 +90,20 @@ pub fn r_player( } } - let spec = StreamSpec { - track: None - .into_iter() - .chain(conf.v) - .chain(conf.a) - .chain(conf.s) - .collect::>(), - format: StreamFormat::Matroska, - webm: Some(true), - ..Default::default() - }; + // TODO + // let spec = StreamSpec { + // track: None + // .into_iter() + // .chain(conf.v) + // .chain(conf.a) + // .chain(conf.s) + // .collect::>(), + // format: StreamFormat::Matroska, + // webm: Some(true), + // ..Default::default() + // }; - let playing = !spec.track.is_empty(); + let playing = false; // !spec.track.is_empty(); let conf = player_conf(node.clone(), playing)?; Ok(Either::Left(LayoutPage { @@ -118,7 +111,8 @@ pub fn r_player( class: Some("player"), content: markup::new! { @if playing { - video[src=uri!(r_stream(&node.slug, &spec)), controls, preload="auto"]{} + // TODO + // video[src=uri!(r_stream(&node.slug, &spec)), controls, preload="auto"]{} } else { img.backdrop[src=uri!(r_item_backdrop(&node.slug, Some(2048))).to_string()]; } -- cgit v1.2.3-70-g09d2 From 80343d02e9e29e4bc55d790b491ce0d0c7bff201 Mon Sep 17 00:00:00 2001 From: metamuffin Date: Mon, 3 Mar 2025 16:58:59 +0100 Subject: a --- server/src/routes/stream.rs | 8 +++++--- stream/src/lib.rs | 3 ++- transcoder/src/fragment.rs | 2 +- 3 files changed, 8 insertions(+), 5 deletions(-) (limited to 'server/src') diff --git a/server/src/routes/stream.rs b/server/src/routes/stream.rs index d65b346..4b3d02e 100644 --- a/server/src/routes/stream.rs +++ b/server/src/routes/stream.rs @@ -28,12 +28,13 @@ use std::{ }; use tokio::io::{duplex, DuplexStream}; -#[head("/n/<_id>/stream?")] +#[head("/n/<_id>/stream?")] pub async fn r_stream_head( _sess: Session, _id: &str, - spec: StreamSpec, + spec: BTreeMap, ) -> Result, MyError> { + let spec = StreamSpec::from_query_kv(&spec).map_err(|x| anyhow!("spec invalid: {x}"))?; let head = jellystream::stream_head(&spec); Ok(Either::Left(StreamResponse { stream: duplex(0).0, @@ -52,6 +53,7 @@ pub async fn r_stream( range: Option, spec: BTreeMap, ) -> Result, MyError> { + let spec = StreamSpec::from_query_kv(&spec).map_err(|x| anyhow!("spec invalid: {x}"))?; // TODO perm let node = db .get_node_slug(id)? @@ -129,7 +131,7 @@ pub async fn r_stream( let head = jellystream::stream_head(&spec); - match jellystream::stream(media, spec, urange, &session.user.permissions).await { + match jellystream::stream(media, spec, urange).await { Ok(stream) => Ok(Either::Left(StreamResponse { stream, range, diff --git a/stream/src/lib.rs b/stream/src/lib.rs index 68b7e44..1f32239 100644 --- a/stream/src/lib.rs +++ b/stream/src/lib.rs @@ -148,8 +148,9 @@ async fn copy_stream(mut inp: File, mut out: DuplexStream, mut amount: usize) -> // TODO functions to test seekability, get live status and enumate segments trait MediaSource { + fn loaded_ranges(&self) -> Result>>; /// Seeks to some position close to, but before, `time` ticks. - fn seek(&mut self, time: u64) -> Result<()>; + fn seek(&mut self, segment: u64, time: u64) -> Result<()>; /// Retrieve headers (info and tracks) for some segment. fn segment_headers(&mut self, seg: u64) -> Result<(Info, Tracks)>; /// Returns the next block and the current segment index diff --git a/transcoder/src/fragment.rs b/transcoder/src/fragment.rs index 8822fa2..ff6a9db 100644 --- a/transcoder/src/fragment.rs +++ b/transcoder/src/fragment.rs @@ -17,7 +17,7 @@ use tokio::{ }; // TODO odd video resolutions can cause errors when transcoding to YUV42{0,2} -// TODO with an implementation that cant handle it (SVT-AV1 such an impl). +// TODO with an implementation that cant handle it (SVT-AV1 is such an impl). pub async fn transcode( key: &str, -- cgit v1.2.3-70-g09d2 From 7acb520f552bd1edde5c29fbf5baf6643ec4b14e Mon Sep 17 00:00:00 2001 From: metamuffin Date: Sun, 6 Apr 2025 15:40:58 +0200 Subject: a bit more progress on new streaming api --- Cargo.lock | 3 +- client/src/lib.rs | 5 +- common/src/stream.rs | 47 +++++++++++++++++- import/Cargo.toml | 5 +- import/src/lib.rs | 3 +- import/src/matroska.rs | 116 -------------------------------------------- remuxer/Cargo.toml | 4 ++ remuxer/src/lib.rs | 1 + remuxer/src/metadata.rs | 116 ++++++++++++++++++++++++++++++++++++++++++++ server/src/routes/stream.rs | 25 +++++++--- stream/src/hls.rs | 2 +- stream/src/lib.rs | 113 +++++++++++++++++++++++++++++++++--------- 12 files changed, 282 insertions(+), 158 deletions(-) delete mode 100644 import/src/matroska.rs create mode 100644 remuxer/src/metadata.rs (limited to 'server/src') diff --git a/Cargo.lock b/Cargo.lock index 9c9b347..aabeff6 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1760,10 +1760,10 @@ dependencies = [ "base64", "bincode", "crossbeam-channel", - "ebml-struct", "futures", "jellybase", "jellyclient", + "jellyremuxer", "log", "rayon", "regex", @@ -1791,6 +1791,7 @@ version = "0.1.0" dependencies = [ "anyhow", "bincode", + "ebml-struct", "jellybase", "jellymatroska", "log", diff --git a/client/src/lib.rs b/client/src/lib.rs index 1497e45..d3172fd 100644 --- a/client/src/lib.rs +++ b/client/src/lib.rs @@ -165,11 +165,10 @@ impl Session { pub fn stream_url(&self, id: NodeIDOrSlug, stream_spec: &StreamSpec) -> String { format!( - "{}/n/{}/stream?{}&{}", + "{}/n/{}/stream{}&{}", self.instance.base(), id, - todo!(), - // stream_spec.to_query(), + stream_spec.to_query(), self.session_param() ) } diff --git a/common/src/stream.rs b/common/src/stream.rs index 9a00ce0..a06dad5 100644 --- a/common/src/stream.rs +++ b/common/src/stream.rs @@ -51,6 +51,47 @@ pub enum StreamSpec { }, } +#[derive(Debug, Clone, Deserialize, Serialize)] +pub struct StreamInfo { + pub name: Option, + pub segments: Vec, +} + +#[derive(Debug, Clone, Deserialize, Serialize)] +pub struct StreamSegmentInfo { + pub name: Option, + pub duration: u64, + pub tracks: Vec, +} + +#[derive(Debug, Clone, Deserialize, Serialize)] +pub struct StreamTrackInfo { + pub name: Option, + pub kind: TrackKind, + pub formats: Vec, +} + +#[derive(Debug, Clone, Deserialize, Serialize)] +#[serde(rename_all = "snake_case")] +pub enum TrackKind { + Video, + Audio, + Subtitle, +} + +#[derive(Debug, Clone, Deserialize, Serialize, Default)] +pub struct StreamFormatInfo { + pub codec: String, + pub byterate: f64, + pub remux: bool, + pub containers: Vec, + + pub pixel_count: Option, + pub samplerate: Option, + pub channels: Option, + pub bit_depth: Option, +} + #[derive(Debug, Clone, Copy, Deserialize, Serialize)] #[serde(rename_all = "lowercase")] pub enum StreamContainer { @@ -119,7 +160,11 @@ impl StreamSpec { .ok_or("container") .and_then(|s| s.parse().map_err(|()| "unknown container")) }; - if query.contains_key("fragment") { + if query.contains_key("info") { + Ok(Self::Info { + segment: get_num("segment").ok(), + }) + } else if query.contains_key("fragment") { Ok(Self::Fragment { segment: get_num("segment")?, track: get_num("track")? as usize, diff --git a/import/Cargo.toml b/import/Cargo.toml index 645326d..37b5a77 100644 --- a/import/Cargo.toml +++ b/import/Cargo.toml @@ -6,10 +6,7 @@ edition = "2021" [dependencies] jellybase = { path = "../base" } jellyclient = { path = "../client" } - -ebml-struct = { git = "https://codeberg.org/metamuffin/ebml-struct", features = [ - "bincode", -] } +jellyremuxer = { path = "../remuxer" } rayon = "1.10.0" crossbeam-channel = "0.5.14" diff --git a/import/src/lib.rs b/import/src/lib.rs index 3226a0a..d7f9dd7 100644 --- a/import/src/lib.rs +++ b/import/src/lib.rs @@ -15,8 +15,8 @@ use jellybase::{ CONF, SECRETS, }; use jellyclient::{Appearance, PeopleGroup, TmdbKind, TraktKind, Visibility}; +use jellyremuxer::metadata::matroska_metadata; use log::info; -use matroska::matroska_metadata; use rayon::iter::{ParallelBridge, ParallelIterator}; use regex::Regex; use std::{ @@ -36,7 +36,6 @@ use tokio::{ use trakt::Trakt; pub mod infojson; -pub mod matroska; pub mod tmdb; pub mod trakt; diff --git a/import/src/matroska.rs b/import/src/matroska.rs deleted file mode 100644 index 1593463..0000000 --- a/import/src/matroska.rs +++ /dev/null @@ -1,116 +0,0 @@ -/* - This file is part of jellything (https://codeberg.org/metamuffin/jellything) - which is licensed under the GNU Affero General Public License (version 3); see /COPYING. - Copyright (C) 2025 metamuffin -*/ -use anyhow::{Context, Result}; -use bincode::{Decode, Encode}; -use ebml_struct::{ - ids::*, - matroska::*, - read::{EbmlReadExt, TagRead}, -}; -use jellybase::{ - assetfed::AssetInner, - cache::{cache_file, cache_memory}, - common::Asset, -}; -use log::{info, warn}; -use std::{ - fs::File, - io::{BufReader, ErrorKind, Read, Write}, - path::Path, - sync::Arc, -}; - -#[derive(Encode, Decode, Clone)] -pub(crate) struct MatroskaMetadata { - pub info: Option, - pub tracks: Option, - pub cover: Option, - pub chapters: Option, - pub tags: Option, - pub infojson: Option>, -} -pub(crate) fn matroska_metadata(path: &Path) -> Result>> { - cache_memory(&["mkmeta-v2", path.to_string_lossy().as_ref()], || { - let mut magic = [0; 4]; - File::open(path)?.read_exact(&mut magic).ok(); - if !matches!(magic, [0x1A, 0x45, 0xDF, 0xA3]) { - return Ok(None); - } - - info!("reading {path:?}"); - let mut file = BufReader::new(File::open(path)?); - let mut file = file.by_ref().take(u64::MAX); - - let (x, mut ebml) = file.read_tag()?; - assert_eq!(x, EL_EBML); - let ebml = Ebml::read(&mut ebml).unwrap(); - assert!(ebml.doc_type == "matroska" || ebml.doc_type == "webm"); - let (x, mut segment) = file.read_tag()?; - assert_eq!(x, EL_SEGMENT); - - let mut info = None; - let mut infojson = None; - let mut tracks = None; - let mut cover = None; - let mut chapters = None; - let mut tags = None; - loop { - let (x, mut seg) = match segment.read_tag() { - Ok(o) => o, - Err(e) if e.kind() == ErrorKind::UnexpectedEof => break, - Err(e) => return Err(e.into()), - }; - match x { - EL_INFO => info = Some(Info::read(&mut seg).context("info")?), - EL_TRACKS => tracks = Some(Tracks::read(&mut seg).context("tracks")?), - EL_CHAPTERS => chapters = Some(Chapters::read(&mut seg).context("chapters")?), - EL_TAGS => tags = Some(Tags::read(&mut seg).context("tags")?), - EL_ATTACHMENTS => { - let attachments = Attachments::read(&mut seg).context("attachments")?; - for f in attachments.files { - match f.name.as_str() { - "info.json" => { - infojson = Some(f.data); - } - "cover.webp" | "cover.png" | "cover.jpg" | "cover.jpeg" - | "cover.avif" => { - cover = Some( - AssetInner::Cache(cache_file( - &["att-cover", path.to_string_lossy().as_ref()], - move |mut file| { - file.write_all(&f.data)?; - Ok(()) - }, - )?) - .ser(), - ) - } - _ => (), - } - } - } - EL_VOID | EL_CRC32 | EL_CUES | EL_SEEKHEAD => { - seg.consume()?; - } - EL_CLUSTER => { - break; - } - id => { - warn!("unknown top-level element {id:x}"); - seg.consume()?; - } - } - } - Ok(Some(MatroskaMetadata { - chapters, - cover, - info, - infojson, - tags, - tracks, - })) - }) -} diff --git a/remuxer/Cargo.toml b/remuxer/Cargo.toml index 2313dcc..16713df 100644 --- a/remuxer/Cargo.toml +++ b/remuxer/Cargo.toml @@ -13,3 +13,7 @@ log = { workspace = true } serde = { version = "1.0.217", features = ["derive"] } bincode = { version = "2.0.0-rc.3", features = ["serde"] } + +ebml-struct = { git = "https://codeberg.org/metamuffin/ebml-struct", features = [ + "bincode", +] } diff --git a/remuxer/src/lib.rs b/remuxer/src/lib.rs index a98ffad..cc4b39b 100644 --- a/remuxer/src/lib.rs +++ b/remuxer/src/lib.rs @@ -9,6 +9,7 @@ pub mod remux; pub mod seek_index; pub mod segment_extractor; pub mod trim_writer; +pub mod metadata; pub use fragment::write_fragment_into; pub use remux::remux_stream_into; diff --git a/remuxer/src/metadata.rs b/remuxer/src/metadata.rs new file mode 100644 index 0000000..4ddad20 --- /dev/null +++ b/remuxer/src/metadata.rs @@ -0,0 +1,116 @@ +/* + This file is part of jellything (https://codeberg.org/metamuffin/jellything) + which is licensed under the GNU Affero General Public License (version 3); see /COPYING. + Copyright (C) 2025 metamuffin +*/ +use anyhow::{Context, Result}; +use bincode::{Decode, Encode}; +use ebml_struct::{ + ids::*, + matroska::*, + read::{EbmlReadExt, TagRead}, +}; +use jellybase::{ + assetfed::AssetInner, + cache::{cache_file, cache_memory}, + common::Asset, +}; +use log::{info, warn}; +use std::{ + fs::File, + io::{BufReader, ErrorKind, Read, Write}, + path::Path, + sync::Arc, +}; + +#[derive(Debug, Encode, Decode, Clone)] +pub struct MatroskaMetadata { + pub info: Option, + pub tracks: Option, + pub cover: Option, + pub chapters: Option, + pub tags: Option, + pub infojson: Option>, +} +pub fn matroska_metadata(path: &Path) -> Result>> { + cache_memory(&["mkmeta-v2", path.to_string_lossy().as_ref()], || { + let mut magic = [0; 4]; + File::open(path)?.read_exact(&mut magic).ok(); + if !matches!(magic, [0x1A, 0x45, 0xDF, 0xA3]) { + return Ok(None); + } + + info!("reading {path:?}"); + let mut file = BufReader::new(File::open(path)?); + let mut file = file.by_ref().take(u64::MAX); + + let (x, mut ebml) = file.read_tag()?; + assert_eq!(x, EL_EBML); + let ebml = Ebml::read(&mut ebml).unwrap(); + assert!(ebml.doc_type == "matroska" || ebml.doc_type == "webm"); + let (x, mut segment) = file.read_tag()?; + assert_eq!(x, EL_SEGMENT); + + let mut info = None; + let mut infojson = None; + let mut tracks = None; + let mut cover = None; + let mut chapters = None; + let mut tags = None; + loop { + let (x, mut seg) = match segment.read_tag() { + Ok(o) => o, + Err(e) if e.kind() == ErrorKind::UnexpectedEof => break, + Err(e) => return Err(e.into()), + }; + match x { + EL_INFO => info = Some(Info::read(&mut seg).context("info")?), + EL_TRACKS => tracks = Some(Tracks::read(&mut seg).context("tracks")?), + EL_CHAPTERS => chapters = Some(Chapters::read(&mut seg).context("chapters")?), + EL_TAGS => tags = Some(Tags::read(&mut seg).context("tags")?), + EL_ATTACHMENTS => { + let attachments = Attachments::read(&mut seg).context("attachments")?; + for f in attachments.files { + match f.name.as_str() { + "info.json" => { + infojson = Some(f.data); + } + "cover.webp" | "cover.png" | "cover.jpg" | "cover.jpeg" + | "cover.avif" => { + cover = Some( + AssetInner::Cache(cache_file( + &["att-cover", path.to_string_lossy().as_ref()], + move |mut file| { + file.write_all(&f.data)?; + Ok(()) + }, + )?) + .ser(), + ) + } + _ => (), + } + } + } + EL_VOID | EL_CRC32 | EL_CUES | EL_SEEKHEAD => { + seg.consume()?; + } + EL_CLUSTER => { + break; + } + id => { + warn!("unknown top-level element {id:x}"); + seg.consume()?; + } + } + } + Ok(Some(MatroskaMetadata { + chapters, + cover, + info, + infojson, + tags, + tracks, + })) + }) +} diff --git a/server/src/routes/stream.rs b/server/src/routes/stream.rs index 4b3d02e..8f97aec 100644 --- a/server/src/routes/stream.rs +++ b/server/src/routes/stream.rs @@ -6,13 +6,9 @@ use super::ui::{account::session::Session, error::MyError}; use crate::database::Database; use anyhow::{anyhow, Result}; -use jellybase::{federation::Federation, permission::PermissionSetExt, SECRETS}; -use jellycommon::{ - config::FederationAccount, - stream::StreamSpec, - user::{CreateSessionParams, UserPermission}, - TrackSource, -}; +use jellybase::{assetfed::AssetInner, federation::Federation}; +use jellycommon::{stream::StreamSpec, TrackSource}; +use jellystream::SMediaInfo; use log::{info, warn}; use rocket::{ get, head, @@ -22,7 +18,7 @@ use rocket::{ Either, Request, Response, State, }; use std::{ - collections::{BTreeMap, HashSet}, + collections::{BTreeMap, BTreeSet}, ops::Range, sync::Arc, }; @@ -131,6 +127,19 @@ pub async fn r_stream( let head = jellystream::stream_head(&spec); + let mut sources = BTreeSet::new(); + for t in &media.tracks { + if let TrackSource::Local(x) = &t.source { + if let AssetInner::LocalTrack(m) = AssetInner::deser(&x.0)? { + sources.insert(m.path); + } + } + } + let media = Arc::new(SMediaInfo { + files: sources, + info: node, + }); + match jellystream::stream(media, spec, urange).await { Ok(stream) => Ok(Either::Left(StreamResponse { stream, diff --git a/stream/src/hls.rs b/stream/src/hls.rs index 56edd2d..27630b2 100644 --- a/stream/src/hls.rs +++ b/stream/src/hls.rs @@ -34,8 +34,8 @@ pub async fn hls_master_stream( let uri = format!( "stream?{}", StreamSpec::HlsVariant { - track: i, segment, + track: i, container, format: 0 } diff --git a/stream/src/lib.rs b/stream/src/lib.rs index 1f32239..751ecfa 100644 --- a/stream/src/lib.rs +++ b/stream/src/lib.rs @@ -12,17 +12,28 @@ pub mod webvtt; use anyhow::Result; use ebml_struct::matroska::{Info, Tracks}; use jellybase::common::{ - stream::{StreamContainer, StreamSpec}, - LocalTrack, MediaInfo, Node, + stream::{ + StreamContainer, StreamFormatInfo, StreamInfo, StreamSegmentInfo, StreamSpec, + StreamTrackInfo, TrackKind, + }, + LocalTrack, Node, }; use jellymatroska::block::LacingType; -use std::{ops::Range, sync::Arc}; +use jellyremuxer::metadata::{matroska_metadata, MatroskaMetadata}; +use std::{collections::BTreeSet, ops::Range, path::PathBuf, sync::Arc}; use tokio::{ fs::File, io::{duplex, AsyncReadExt, AsyncWriteExt, DuplexStream}, + task::spawn_blocking, }; use tokio_util::io::SyncIoBridge; +#[derive(Debug)] +pub struct SMediaInfo { + pub info: Arc, + pub files: BTreeSet, +} + pub struct StreamHead { pub content_type: &'static str, pub range_supported: bool, @@ -54,7 +65,7 @@ pub fn stream_head(spec: &StreamSpec) -> StreamHead { } pub async fn stream( - info: Arc, + info: Arc, spec: StreamSpec, range: Range, ) -> Result { @@ -73,7 +84,7 @@ pub async fn stream( container, format, } => todo!(), - StreamSpec::Info { segment } => todo!(), + StreamSpec::Info { segment } => write_stream_info(info, b).await?, StreamSpec::FragmentIndex { segment, track } => todo!(), StreamSpec::Fragment { segment, @@ -87,6 +98,64 @@ pub async fn stream( Ok(a) } +async fn async_matroska_metadata(path: PathBuf) -> Result>> { + Ok(spawn_blocking(move || matroska_metadata(&path)).await??) +} + +async fn stream_info(info: Arc) -> Result { + let mut metadata = Vec::new(); + for path in &info.files { + metadata.extend((*async_matroska_metadata(path.clone()).await?).clone()); + } + + let mut tracks = Vec::new(); + + for m in &metadata { + if let Some(t) = &m.tracks { + for t in &t.entries { + let mut formats = Vec::new(); + formats.push(StreamFormatInfo { + codec: t.codec_id.to_string(), + remux: true, + byterate: 10., // TODO + containers: [StreamContainer::Matroska].to_vec(), + bit_depth: t.audio.as_ref().and_then(|a| a.bit_depth.map(|e| e as u8)), + samplerate: t.audio.as_ref().map(|a| a.sampling_frequency), + channels: t.audio.as_ref().map(|a| a.channels as usize), + pixel_count: t.video.as_ref().map(|v| v.pixel_width * v.pixel_height), + ..Default::default() + }); + tracks.push(StreamTrackInfo { + name: None, + kind: match t.track_type { + 1 => TrackKind::Video, + 2 => TrackKind::Audio, + 17 => TrackKind::Subtitle, + _ => todo!(), + }, + formats, + }) + } + } + } + + let segment = StreamSegmentInfo { + name: None, + duration: 0, + tracks, + }; + Ok(StreamInfo { + name: info.info.title.clone(), + segments: vec![segment], + }) +} + +async fn write_stream_info(info: Arc, mut b: DuplexStream) -> Result<()> { + let info = stream_info(info).await?; + b.write_all(&serde_json::to_vec(&info)?).await?; + Ok(()) +} + async fn remux_stream( node: Arc, local_tracks: Vec, @@ -146,20 +215,20 @@ async fn copy_stream(mut inp: File, mut out: DuplexStream, mut amount: usize) -> } } -// TODO functions to test seekability, get live status and enumate segments -trait MediaSource { - fn loaded_ranges(&self) -> Result>>; - /// Seeks to some position close to, but before, `time` ticks. - fn seek(&mut self, segment: u64, time: u64) -> Result<()>; - /// Retrieve headers (info and tracks) for some segment. - fn segment_headers(&mut self, seg: u64) -> Result<(Info, Tracks)>; - /// Returns the next block and the current segment index - fn next(&mut self) -> Result>; -} -pub struct AbsBlock { - track: u64, - pts: u64, - keyframe: bool, - lacing: Option, - data: Vec, -} +// // TODO functions to test seekability, get live status and enumate segments +// trait MediaSource { +// fn loaded_range(&self) -> Result>; +// /// Seeks to some position close to, but before, `time` ticks. +// fn seek(&mut self, segment: u64, time: u64) -> Result<()>; +// /// Retrieve headers (info and tracks) for some segment. +// fn segment_headers(&mut self, seg: u64) -> Result<(Info, Tracks)>; +// /// Returns the next block and the current segment index +// fn next(&mut self) -> Result>; +// } +// pub struct AbsBlock { +// track: u64, +// pts: u64, +// keyframe: bool, +// lacing: Option, +// data: Vec, +// } -- cgit v1.2.3-70-g09d2 From a3afc2756a52f7d6fedc928b97c8ff3eb1ade338 Mon Sep 17 00:00:00 2001 From: metamuffin Date: Mon, 14 Apr 2025 13:41:42 +0200 Subject: lots of rewriting and removing dumb code --- base/src/assetfed.rs | 7 - base/src/database.rs | 27 +- common/src/lib.rs | 1 - common/src/stream.rs | 53 ++-- import/src/lib.rs | 10 +- remuxer/src/extract.rs | 17 +- remuxer/src/fragment.rs | 101 ++++---- remuxer/src/lib.rs | 63 ++--- remuxer/src/remux.rs | 572 ++++++++++++++++++++--------------------- server/src/routes/ui/player.rs | 22 +- stream/src/fragment.rs | 45 ++-- stream/src/fragment_index.rs | 32 +++ stream/src/hls.rs | 72 +++--- stream/src/jhls.rs | 47 ---- stream/src/lib.rs | 36 ++- stream/src/webvtt.rs | 3 +- 16 files changed, 542 insertions(+), 566 deletions(-) create mode 100644 stream/src/fragment_index.rs delete mode 100644 stream/src/jhls.rs (limited to 'server/src') diff --git a/base/src/assetfed.rs b/base/src/assetfed.rs index 575188d..697cacb 100644 --- a/base/src/assetfed.rs +++ b/base/src/assetfed.rs @@ -78,11 +78,4 @@ impl AssetInner { pub fn is_federated(&self) -> bool { matches!(self, Self::Federated { .. }) } - - pub fn as_local_track(self) -> Option { - match self { - AssetInner::LocalTrack(x) => Some(x), - _ => None, - } - } } diff --git a/base/src/database.rs b/base/src/database.rs index 407db29..32f1464 100644 --- a/base/src/database.rs +++ b/base/src/database.rs @@ -14,7 +14,8 @@ use redb::{Durability, ReadableTable, StorageError, TableDefinition}; use std::{ fs::create_dir_all, hash::{DefaultHasher, Hasher}, - path::Path, + path::{Path, PathBuf}, + str::FromStr, sync::{Arc, RwLock}, time::SystemTime, }; @@ -38,6 +39,8 @@ const T_NODE_EXTERNAL_ID: TableDefinition<(&str, &str), [u8; 32]> = TableDefinition::new("node_external_id"); const T_IMPORT_FILE_MTIME: TableDefinition<&[u8], u64> = TableDefinition::new("import_file_mtime"); const T_NODE_MTIME: TableDefinition<[u8; 32], u64> = TableDefinition::new("node_mtime"); +const T_NODE_MEDIA_PATHS: TableDefinition<([u8; 32], &str), ()> = + TableDefinition::new("node_media_paths"); #[derive(Clone)] pub struct Database { @@ -67,6 +70,7 @@ impl Database { txn.open_table(T_NODE_MTIME)?; txn.open_table(T_NODE_CHILDREN)?; txn.open_table(T_NODE_EXTERNAL_ID)?; + txn.open_table(T_NODE_MEDIA_PATHS)?; txn.open_table(T_IMPORT_FILE_MTIME)?; txn.commit()?; } @@ -123,17 +127,20 @@ impl Database { let mut t_node_children = txn.open_table(T_NODE_CHILDREN)?; let mut t_node_external_id = txn.open_table(T_NODE_EXTERNAL_ID)?; let mut t_import_file_mtime = txn.open_table(T_IMPORT_FILE_MTIME)?; + let mut t_node_media_paths = txn.open_table(T_NODE_MEDIA_PATHS)?; t_node.retain(|_, _| false)?; t_node_mtime.retain(|_, _| false)?; t_node_children.retain(|_, _| false)?; t_node_external_id.retain(|_, _| false)?; t_import_file_mtime.retain(|_, _| false)?; + t_node_media_paths.retain(|_, _| false)?; drop(( t_node, t_node_mtime, t_node_children, t_node_external_id, t_import_file_mtime, + t_node_media_paths, )); txn.set_durability(Durability::Eventual); txn.commit()?; @@ -189,6 +196,24 @@ impl Database { txn.commit()?; Ok(()) } + pub fn get_node_media_paths(&self, id: NodeID) -> Result> { + let txn = self.inner.begin_read()?; + let table = txn.open_table(T_NODE_MEDIA_PATHS)?; + let mut paths = Vec::new(); + // TODO fix this + for p in table.range((id.0, "\0")..(id.0, "\x7f"))? { + paths.push(PathBuf::from_str(p?.0.value().1)?); + } + Ok(paths) + } + pub fn insert_node_media_path(&self, id: NodeID, path: &Path) -> Result<()> { + let txn = self.inner.begin_write()?; + let mut table = txn.open_table(T_NODE_MEDIA_PATHS)?; + table.insert((id.0, path.to_str().unwrap()), ())?; + drop(table); + txn.commit()?; + Ok(()) + } pub fn update_node_udata( &self, diff --git a/common/src/lib.rs b/common/src/lib.rs index ce333eb..00f07b6 100644 --- a/common/src/lib.rs +++ b/common/src/lib.rs @@ -171,7 +171,6 @@ pub type TrackID = usize; pub struct LocalTrack { pub path: PathBuf, pub track: TrackID, - pub codec_private: Option>, } #[derive(Debug, Clone, Deserialize, Serialize, Encode, Decode)] diff --git a/common/src/stream.rs b/common/src/stream.rs index a06dad5..75349cc 100644 --- a/common/src/stream.rs +++ b/common/src/stream.rs @@ -6,10 +6,15 @@ use serde::{Deserialize, Serialize}; use std::{collections::BTreeMap, fmt::Display, str::FromStr}; +pub type SegmentNum = usize; +pub type TrackNum = usize; +pub type FormatNum = usize; +pub type IndexNum = usize; + #[derive(Debug, Clone, Deserialize, Serialize)] pub enum StreamSpec { Whep { - track: usize, + track: TrackNum, seek: u64, }, WhepControl { @@ -20,34 +25,34 @@ pub enum StreamSpec { container: StreamContainer, }, Original { - track: usize, + track: TrackNum, }, HlsSuperMultiVariant { container: StreamContainer, }, HlsMultiVariant { - segment: u64, + segment: SegmentNum, container: StreamContainer, }, HlsVariant { - segment: u64, - track: usize, + segment: SegmentNum, + track: TrackNum, container: StreamContainer, - format: usize, + format: FormatNum, }, Info { segment: Option, }, FragmentIndex { - segment: u64, - track: usize, + segment: SegmentNum, + track: TrackNum, }, Fragment { - segment: u64, - track: usize, - index: u64, + segment: SegmentNum, + track: TrackNum, + index: IndexNum, container: StreamContainer, - format: usize, + format: FormatNum, }, } @@ -60,7 +65,7 @@ pub struct StreamInfo { #[derive(Debug, Clone, Deserialize, Serialize)] pub struct StreamSegmentInfo { pub name: Option, - pub duration: u64, + pub duration: f64, pub tracks: Vec, } @@ -92,7 +97,7 @@ pub struct StreamFormatInfo { pub bit_depth: Option, } -#[derive(Debug, Clone, Copy, Deserialize, Serialize)] +#[derive(Debug, Clone, Copy, PartialEq, Eq, Deserialize, Serialize)] #[serde(rename_all = "lowercase")] pub enum StreamContainer { WebM, @@ -164,13 +169,25 @@ impl StreamSpec { Ok(Self::Info { segment: get_num("segment").ok(), }) + } else if query.contains_key("hlsmultivariant") { + Ok(Self::HlsMultiVariant { + segment: get_num("segment")? as SegmentNum, + container: get_container()?, + }) + } else if query.contains_key("hlsvariant") { + Ok(Self::HlsVariant { + segment: get_num("segment")? as SegmentNum, + track: get_num("track")? as TrackNum, + format: get_num("format")? as FormatNum, + container: get_container()?, + }) } else if query.contains_key("fragment") { Ok(Self::Fragment { - segment: get_num("segment")?, - track: get_num("track")? as usize, - index: get_num("index")?, + segment: get_num("segment")? as SegmentNum, + track: get_num("track")? as TrackNum, + format: get_num("format")? as FormatNum, + index: get_num("index")? as IndexNum, container: get_container()?, - format: get_num("format")? as usize, }) } else { Err("invalid stream spec") diff --git a/import/src/lib.rs b/import/src/lib.rs index 5607450..3ea42f1 100644 --- a/import/src/lib.rs +++ b/import/src/lib.rs @@ -7,14 +7,13 @@ use anyhow::{anyhow, bail, Context, Result}; use infojson::YVideo; use jellybase::{ assetfed::AssetInner, - common::{ - Chapter, LocalTrack, MediaInfo, Node, NodeID, NodeKind, Rating, SourceTrack, - SourceTrackKind, TrackSource, - }, + common::{Chapter, MediaInfo, Node, NodeID, NodeKind, Rating, SourceTrack, SourceTrackKind}, database::Database, CONF, SECRETS, }; -use jellyclient::{Appearance, PeopleGroup, TmdbKind, TraktKind, Visibility}; +use jellyclient::{ + Appearance, LocalTrack, PeopleGroup, TmdbKind, TrackSource, TraktKind, Visibility, +}; use jellyremuxer::metadata::checked_matroska_metadata; use log::info; use rayon::iter::{ParallelBridge, ParallelIterator}; @@ -397,7 +396,6 @@ fn import_media_file( }, source: TrackSource::Local( AssetInner::LocalTrack(LocalTrack { - codec_private: track.codec_private, path: path.to_owned(), track: track.track_number as usize, }) diff --git a/remuxer/src/extract.rs b/remuxer/src/extract.rs index 12e4003..15c1e9d 100644 --- a/remuxer/src/extract.rs +++ b/remuxer/src/extract.rs @@ -5,29 +5,22 @@ */ use crate::seek_index::get_seek_index; use anyhow::{anyhow, bail}; -use jellybase::common::LocalTrack; use jellymatroska::{block::Block, read::EbmlReader, Master, MatroskaTag}; use log::debug; use std::{fs::File, io::BufReader, path::PathBuf}; pub type TrackExtract = Vec<(u64, Option, Vec)>; -pub fn extract_track(path_base: PathBuf, track_info: LocalTrack) -> anyhow::Result { - let source_path = path_base.join(track_info.path); - let file = File::open(&source_path)?; +pub fn extract_track(path: PathBuf, track: u64) -> anyhow::Result { + let file = File::open(&path)?; let mut reader = EbmlReader::new(BufReader::new(file)); - let index = get_seek_index(&source_path)?; - let index = index - .get(&(track_info.track as u64)) - .ok_or(anyhow!("track missing"))?; + let index = get_seek_index(&path)?; + let index = index.get(&track).ok_or(anyhow!("track missing"))?; let mut out = Vec::new(); for b in &index.blocks { reader.seek(b.source_off, MatroskaTag::BlockGroup(Master::Start))?; let (duration, block) = read_group(&mut reader)?; - assert_eq!( - track_info.track, block.track as usize, - "seek index is wrong" - ); + assert_eq!(track, block.track, "seek index is wrong"); out.push((b.pts, duration, block.data)) } Ok(out) diff --git a/remuxer/src/fragment.rs b/remuxer/src/fragment.rs index 9fa68f3..73fe046 100644 --- a/remuxer/src/fragment.rs +++ b/remuxer/src/fragment.rs @@ -5,11 +5,10 @@ */ use crate::{ - ebml_header, ebml_segment_info, ebml_track_entry, seek_index::get_seek_index, - segment_extractor::SegmentExtractIter, + ebml_header, ebml_segment_info, ebml_track_entry, metadata::matroska_metadata, + seek_index::get_seek_index, segment_extractor::SegmentExtractIter, }; use anyhow::{anyhow, Context, Result}; -use jellybase::common::{LocalTrack, Node, SourceTrackKind}; use jellymatroska::{read::EbmlReader, write::EbmlWriter, Master, MatroskaTag}; use log::{debug, info}; use std::{ @@ -21,32 +20,33 @@ use std::{ const FRAGMENT_LENGTH: f64 = 2.; -pub fn fragment_index( - path_base: &Path, - item: &Node, - local_track: &LocalTrack, - track_index: usize, -) -> Result>> { - let media_info = item.media.as_ref().unwrap(); - let source_path = path_base.join(&local_track.path); - let index = get_seek_index(&source_path)?; +pub fn fragment_index(path: &Path, track: u64) -> Result>> { + let meta = matroska_metadata(path)?; + let duration = meta.info.as_ref().unwrap().duration.unwrap(); + let force_kf = meta + .as_ref() + .tracks + .as_ref() + .unwrap() + .entries + .iter() + .find(|t| t.track_number == track) + .unwrap() + .track_type + == 17; + + let index = get_seek_index(&path)?; let index = index - .get(&(local_track.track as u64)) + .get(&track) .ok_or(anyhow!("seek index track missing"))?; - // everything is a keyframe (even though nothing is...) - let force_kf = matches!( - media_info.tracks[track_index].kind, - SourceTrackKind::Subtitles { .. } - ); - let n_kf = if force_kf { index.blocks.len() } else { index.keyframes.len() }; - let average_kf_interval = media_info.duration / n_kf as f64; + let average_kf_interval = duration / n_kf as f64; let kf_per_frag = (FRAGMENT_LENGTH / average_kf_interval).ceil() as usize; debug!("average keyframe interval: {average_kf_interval}"); debug!(" => keyframes per frag {kf_per_frag}"); @@ -72,7 +72,7 @@ pub fn fragment_index( index.keyframes.get((i + 1) * kf_per_frag).copied() } .map(|i| index.blocks[i].pts as f64 / 1000.) - .unwrap_or(media_info.duration); + .unwrap_or(duration); start..end }) .collect()) @@ -80,45 +80,45 @@ pub fn fragment_index( pub fn write_fragment_into( writer: impl Write, - path_base: &Path, - item: &Node, - local_track: &LocalTrack, - track: usize, + path: &Path, + track: u64, webm: bool, + title: &str, n: usize, ) -> anyhow::Result<()> { - info!("writing fragment {n} of {:?} (track {track})", item.title); - let mut output = EbmlWriter::new(BufWriter::new(writer), 0); - let media_info = item.media.as_ref().unwrap(); - let info = media_info + let meta = matroska_metadata(path)?; + let duration = meta.info.as_ref().unwrap().duration.unwrap(); + let track_meta = meta + .as_ref() .tracks - .get(track) - .ok_or(anyhow!("track not available"))? - .to_owned(); - let source_path = path_base.join(&local_track.path); + .as_ref() + .unwrap() + .entries + .iter() + .find(|t| t.track_number == track) + .unwrap(); + let force_kf = track_meta.track_type == 17; + + info!("writing fragment {n} of {:?} (track {track})", title); + let mut output = EbmlWriter::new(BufWriter::new(writer), 0); let mapped = 1; - info!( - "\t- {track} {source_path:?} ({} => {mapped})", - local_track.track - ); - info!("\t {}", info); - let file = File::open(&source_path).context("opening source file")?; - let index = get_seek_index(&source_path)?; + info!("\t- {track} {path:?} ({} => {mapped})", track); + // info!("\t {}", info); + let file = File::open(&path).context("opening source file")?; + let index = get_seek_index(&path)?; let index = index - .get(&(local_track.track as u64)) + .get(&track) .ok_or(anyhow!("track missing 2"))? .to_owned(); debug!("\t seek index: {} blocks loaded", index.blocks.len()); let mut reader = EbmlReader::new(BufReader::new(file)); - let force_kf = matches!(info.kind, SourceTrackKind::Subtitles { .. }); let n_kf = if force_kf { index.blocks.len() } else { index.keyframes.len() }; - - let average_kf_interval = media_info.duration / n_kf as f64; + let average_kf_interval = duration / n_kf as f64; let kf_per_frag = (FRAGMENT_LENGTH / average_kf_interval).ceil() as usize; debug!("average keyframe interval: {average_kf_interval}"); debug!(" => keyframes per frag {kf_per_frag}"); @@ -144,25 +144,20 @@ pub fn write_fragment_into( .blocks .get(end_block_index) .map(|b| b.pts) - .unwrap_or((media_info.duration * 1000.) as u64); + .unwrap_or((duration * 1000.) as u64); output.write_tag(&ebml_header(webm))?; output.write_tag(&MatroskaTag::Segment(Master::Start))?; output.write_tag(&ebml_segment_info( - format!("{}: {info}", item.title.clone().unwrap_or_default()), + title.to_string(), (last_block_pts - start_block.pts) as f64 / 1000., ))?; output.write_tag(&MatroskaTag::Tracks(Master::Collected(vec![ - ebml_track_entry( - mapped, - local_track.track as u64 * 100, // TODO something else that is unique to the track - &info, - local_track.codec_private.clone(), - ), + ebml_track_entry(mapped, track_meta), ])))?; reader.seek(start_block.source_off, MatroskaTag::Cluster(Master::Start))?; - let mut reader = SegmentExtractIter::new(&mut reader, local_track.track as u64); + let mut reader = SegmentExtractIter::new(&mut reader, track); { // TODO this one caused fragments to get dropped by MSE for no reason diff --git a/remuxer/src/lib.rs b/remuxer/src/lib.rs index cc4b39b..9ddf7c1 100644 --- a/remuxer/src/lib.rs +++ b/remuxer/src/lib.rs @@ -5,17 +5,16 @@ */ pub mod extract; pub mod fragment; +pub mod metadata; pub mod remux; pub mod seek_index; pub mod segment_extractor; pub mod trim_writer; -pub mod metadata; +use ebml_struct::matroska::TrackEntry; pub use fragment::write_fragment_into; -pub use remux::remux_stream_into; - -use jellybase::common::{SourceTrack, SourceTrackKind}; use jellymatroska::{Master, MatroskaTag}; +pub use remux::remux_stream_into; pub fn ebml_header(webm: bool) -> MatroskaTag { MatroskaTag::Ebml(Master::Collected(vec![ @@ -42,66 +41,56 @@ pub fn ebml_segment_info(title: String, duration: f64) -> MatroskaTag { ])) } -pub fn ebml_track_entry( - number: u64, - uid: u64, - track: &SourceTrack, - codec_private: Option>, -) -> MatroskaTag { +pub fn ebml_track_entry(number: u64, track: &TrackEntry) -> MatroskaTag { let mut els = vec![ MatroskaTag::TrackNumber(number), - MatroskaTag::TrackUID(uid), MatroskaTag::FlagLacing(track.flag_lacing), MatroskaTag::Language(track.language.clone()), - MatroskaTag::CodecID(track.codec.clone()), + MatroskaTag::CodecID(track.codec_id.clone()), MatroskaTag::CodecDelay(track.codec_delay), MatroskaTag::SeekPreRoll(track.seek_pre_roll), ]; if let Some(d) = &track.default_duration { els.push(MatroskaTag::DefaultDuration(*d)); } - match track.kind { - SourceTrackKind::Video { - width, - height, - display_height, - display_width, - display_unit, - fps, - } => { + match track.track_type { + 1 => { + let video = track.video.as_ref().unwrap(); els.push(MatroskaTag::TrackType(1)); let mut props = vec![ - MatroskaTag::PixelWidth(width), - MatroskaTag::PixelHeight(height), + MatroskaTag::PixelWidth(video.pixel_width), + MatroskaTag::PixelHeight(video.pixel_height), ]; - props.push(MatroskaTag::DisplayWidth(display_width.unwrap_or(width))); - props.push(MatroskaTag::DisplayHeight(display_height.unwrap_or(height))); - props.push(MatroskaTag::DisplayUnit(display_unit)); - if let Some(fps) = fps { + props.push(MatroskaTag::DisplayWidth( + video.display_width.unwrap_or(video.pixel_width), + )); + props.push(MatroskaTag::DisplayHeight( + video.display_height.unwrap_or(video.pixel_height), + )); + props.push(MatroskaTag::DisplayUnit(video.display_unit)); + if let Some(fps) = video.frame_rate { props.push(MatroskaTag::FrameRate(fps)) } els.push(MatroskaTag::Video(Master::Collected(props))) } - SourceTrackKind::Audio { - channels, - sample_rate, - bit_depth, - } => { + 2 => { + let audio = track.audio.as_ref().unwrap(); els.push(MatroskaTag::TrackType(2)); let mut props = vec![ - MatroskaTag::SamplingFrequency(sample_rate), - MatroskaTag::Channels(channels.try_into().unwrap()), + MatroskaTag::SamplingFrequency(audio.sampling_frequency), + MatroskaTag::Channels(audio.channels), ]; - if let Some(bit_depth) = bit_depth { + if let Some(bit_depth) = audio.bit_depth { props.push(MatroskaTag::BitDepth(bit_depth.try_into().unwrap())); } els.push(MatroskaTag::Audio(Master::Collected(props))); } - SourceTrackKind::Subtitles => { + 17 => { els.push(MatroskaTag::TrackType(17)); } + _ => unreachable!(), } - if let Some(d) = &codec_private { + if let Some(d) = &track.codec_private { els.push(MatroskaTag::CodecPrivate(d.clone())); } MatroskaTag::TrackEntry(Master::Collected(els)) diff --git a/remuxer/src/remux.rs b/remuxer/src/remux.rs index 0507f1e..a44c58b 100644 --- a/remuxer/src/remux.rs +++ b/remuxer/src/remux.rs @@ -3,333 +3,311 @@ which is licensed under the GNU Affero General Public License (version 3); see /COPYING. Copyright (C) 2025 metamuffin */ -use crate::{ - ebml_header, ebml_track_entry, seek_index::get_seek_index, - segment_extractor::SegmentExtractIter, trim_writer::TrimWriter, -}; -use anyhow::{anyhow, Context}; -use jellybase::common::{ - seek_index::{BlockIndex, SeekIndex}, - LocalTrack, Node, SourceTrack, -}; -use jellymatroska::{ - read::EbmlReader, - write::{bad_vint_length, vint_length, EbmlWriter}, - Master, MatroskaTag, -}; -use log::{debug, info, trace, warn}; -use std::{ - fs::File, - io::{BufReader, BufWriter, Seek, SeekFrom, Write}, - ops::Range, - path::PathBuf, - sync::Arc, - time::Instant, -}; +use jellybase::common::Node; +use std::{io::Write, ops::Range, path::PathBuf}; -struct ClusterLayout { - position: usize, - timestamp: u64, - source_offsets: Vec>, - blocks: Vec<(usize, BlockIndex)>, -} +// struct ClusterLayout { +// position: usize, +// timestamp: u64, +// source_offsets: Vec>, +// blocks: Vec<(usize, BlockIndex)>, +// } pub fn remux_stream_into( - writer: impl Write, - range: Range, - path_base: PathBuf, - item: &Node, - track_sources: Vec, - selection: Vec, - webm: bool, + _writer: impl Write, + _range: Range, + _path_base: PathBuf, + _item: &Node, + _selection: Vec, + _webm: bool, ) -> anyhow::Result<()> { - info!("remuxing {:?} to have tracks {selection:?}", item.title); - let writer = TrimWriter::new(BufWriter::new(writer), range.clone()); - let mut output = EbmlWriter::new(writer, 0); + // info!("remuxing {:?} to have tracks {selection:?}", item.title); + // let writer = TrimWriter::new(BufWriter::new(writer), range.clone()); + // let mut output = EbmlWriter::new(writer, 0); - struct ReaderC { - info: SourceTrack, - reader: EbmlReader, - mapped: u64, - index: Arc, - source_track_index: usize, - codec_private: Option>, - layouting_progress_index: usize, - } + // struct ReaderC { + // info: SourceTrack, + // reader: EbmlReader, + // mapped: u64, + // index: Arc, + // source_track_index: usize, + // codec_private: Option>, + // layouting_progress_index: usize, + // } - let timing_cp = Instant::now(); + // let timing_cp = Instant::now(); - let mut inputs = selection - .iter() - .enumerate() - .map(|(index, sel)| { - let info = item - .media - .as_ref() - .unwrap() - .tracks - .get(*sel) - .ok_or(anyhow!("track not available"))? - .to_owned(); - let private = &track_sources[index]; - let source_path = path_base.join(&private.path); - let mapped = index as u64 + 1; - info!("\t- {sel} {source_path:?} ({} => {mapped})", private.track); - info!("\t {}", info); - let file = File::open(&source_path).context("opening source file")?; - let index = get_seek_index(&source_path)?; - let index = index - .get(&(private.track as u64)) - .ok_or(anyhow!("track missing 3"))? - .to_owned(); - debug!("\t seek index: {} blocks loaded", index.blocks.len()); - let reader = EbmlReader::new(BufReader::new(file)); - Ok(ReaderC { - index, - reader, - info, - mapped, - source_track_index: private.track, - codec_private: private.codec_private.clone(), - layouting_progress_index: 0, - }) - }) - .collect::>>()?; + // let mut inputs = selection + // .iter() + // .enumerate() + // .map(|(index, sel)| { + // let info = item + // .media + // .as_ref() + // .unwrap() + // .tracks + // .get(*sel) + // .ok_or(anyhow!("track not available"))? + // .to_owned(); + // let source_path = path_base.join(&private.path); + // let mapped = index as u64 + 1; + // info!("\t- {sel} {source_path:?} ({} => {mapped})", private.track); + // info!("\t {}", info); + // let file = File::open(&source_path).context("opening source file")?; + // let index = get_seek_index(&source_path)?; + // let index = index + // .get(&(private.track as u64)) + // .ok_or(anyhow!("track missing 3"))? + // .to_owned(); + // debug!("\t seek index: {} blocks loaded", index.blocks.len()); + // let reader = EbmlReader::new(BufReader::new(file)); + // Ok(ReaderC { + // index, + // reader, + // info, + // mapped, + // source_track_index: private.track, + // codec_private: private.codec_private.clone(), + // layouting_progress_index: 0, + // }) + // }) + // .collect::>>()?; - info!("(perf) prepare inputs: {:?}", Instant::now() - timing_cp); - let timing_cp = Instant::now(); + // info!("(perf) prepare inputs: {:?}", Instant::now() - timing_cp); + // let timing_cp = Instant::now(); - output.write_tag(&ebml_header(webm))?; + // output.write_tag(&ebml_header(webm))?; - output.write_tag(&MatroskaTag::Segment(Master::Start))?; - let segment_offset = output.position(); + // output.write_tag(&MatroskaTag::Segment(Master::Start))?; + // let segment_offset = output.position(); - output.write_tag(&MatroskaTag::Info(Master::Collected(vec![ - MatroskaTag::TimestampScale(1_000_000), - MatroskaTag::Duration(item.media.as_ref().unwrap().duration * 1000.0), - MatroskaTag::Title(item.title.clone().unwrap_or_default()), - MatroskaTag::MuxingApp("jellyremux".to_string()), - MatroskaTag::WritingApp("jellything".to_string()), - ])))?; + // output.write_tag(&MatroskaTag::Info(Master::Collected(vec![ + // MatroskaTag::TimestampScale(1_000_000), + // MatroskaTag::Duration(item.media.as_ref().unwrap().duration * 1000.0), + // MatroskaTag::Title(item.title.clone().unwrap_or_default()), + // MatroskaTag::MuxingApp("jellyremux".to_string()), + // MatroskaTag::WritingApp("jellything".to_string()), + // ])))?; - let tracks_header = inputs - .iter_mut() - .map(|rc| ebml_track_entry(rc.mapped, rc.mapped, &rc.info, rc.codec_private.take())) - .collect(); - output.write_tag(&MatroskaTag::Tracks(Master::Collected(tracks_header)))?; + // let tracks_header = inputs + // .iter_mut() + // .map(|rc| ebml_track_entry(rc.mapped, rc.mapped, &rc.info, rc.codec_private.take())) + // .collect(); + // output.write_tag(&MatroskaTag::Tracks(Master::Collected(tracks_header)))?; - let mut segment_layout: Vec = { - let mut cluster_pts = 0; - let mut clusters = vec![]; - let mut cluster = vec![]; - let mut source_offsets = vec![None; inputs.len()]; - let mut gp = 0usize; // cluster position (in the segment) - let mut p = 0usize; // block position (in the cluster) - loop { - let (track, block) = { - let mut best_block = BlockIndex { - pts: u64::MAX, - size: 0, - source_off: 0, - }; - let mut best_track = 0; - for (i, r) in inputs.iter().enumerate() { - if let Some(v) = r.index.blocks.get(r.layouting_progress_index) { - if v.pts < best_block.pts { - best_block = v.to_owned(); - best_track = i; - } - }; - } - (best_track, best_block) - }; - inputs[track].layouting_progress_index += 1; - source_offsets[track].get_or_insert(block.source_off); - if block.pts > cluster_pts + 1_000 { - let cluster_content_size = 1 + 1 // timestamp {tag, size} - + bad_vint_length(cluster_pts) // timestamp tag value - + p; - let cluster_size = 4 // tag length - + vint_length(cluster_content_size as u64) // size varint - + cluster_content_size; - clusters.push(ClusterLayout { - position: gp, // relative to the first cluster - timestamp: cluster_pts, - source_offsets, - blocks: std::mem::take(&mut cluster), - }); + // let mut segment_layout: Vec = { + // let mut cluster_pts = 0; + // let mut clusters = vec![]; + // let mut cluster = vec![]; + // let mut source_offsets = vec![None; inputs.len()]; + // let mut gp = 0usize; // cluster position (in the segment) + // let mut p = 0usize; // block position (in the cluster) + // loop { + // let (track, block) = { + // let mut best_block = BlockIndex { + // pts: u64::MAX, + // size: 0, + // source_off: 0, + // }; + // let mut best_track = 0; + // for (i, r) in inputs.iter().enumerate() { + // if let Some(v) = r.index.blocks.get(r.layouting_progress_index) { + // if v.pts < best_block.pts { + // best_block = v.to_owned(); + // best_track = i; + // } + // }; + // } + // (best_track, best_block) + // }; + // inputs[track].layouting_progress_index += 1; + // source_offsets[track].get_or_insert(block.source_off); + // if block.pts > cluster_pts + 1_000 { + // let cluster_content_size = 1 + 1 // timestamp {tag, size} + // + bad_vint_length(cluster_pts) // timestamp tag value + // + p; + // let cluster_size = 4 // tag length + // + vint_length(cluster_content_size as u64) // size varint + // + cluster_content_size; + // clusters.push(ClusterLayout { + // position: gp, // relative to the first cluster + // timestamp: cluster_pts, + // source_offsets, + // blocks: std::mem::take(&mut cluster), + // }); - cluster_pts = block.pts; - source_offsets = vec![None; inputs.len()]; - gp += cluster_size; - p = 0; - } - if block.pts == u64::MAX { - break; - } + // cluster_pts = block.pts; + // source_offsets = vec![None; inputs.len()]; + // gp += cluster_size; + // p = 0; + // } + // if block.pts == u64::MAX { + // break; + // } - let simpleblock_size = 1 + 2 + 1 // block {tracknum, pts_off, flags} - // TODO does not work, if more than 127 tracks are present - + block.size; // block payload - p += 1; // simpleblock tag - p += vint_length(simpleblock_size as u64); // simpleblock size vint - p += simpleblock_size; + // let simpleblock_size = 1 + 2 + 1 // block {tracknum, pts_off, flags} + // // TODO does not work, if more than 127 tracks are present + // + block.size; // block payload + // p += 1; // simpleblock tag + // p += vint_length(simpleblock_size as u64); // simpleblock size vint + // p += simpleblock_size; - cluster.push((track, block)) - } - info!("segment layout computed ({} clusters)", clusters.len()); - clusters - }; - info!( - "(perf) compute segment layout: {:?}", - Instant::now() - timing_cp - ); - let timing_cp = Instant::now(); + // cluster.push((track, block)) + // } + // info!("segment layout computed ({} clusters)", clusters.len()); + // clusters + // }; + // info!( + // "(perf) compute segment layout: {:?}", + // Instant::now() - timing_cp + // ); + // let timing_cp = Instant::now(); - let max_cue_size = 4 // cues id - + 8 // cues len - + ( // cues content - 1 // cp id - + 1 // cp len - + ( // cp content - 1 // ctime id, - + 1 // ctime len - + 8 // ctime content uint - + ( // ctps - 1 // ctp id - + 8 // ctp len - + (// ctp content - 1 // ctrack id - + 1 // ctrack size - + 1 // ctrack content int - // TODO this breaks if inputs.len() >= 127 - + 1 // ccp id - + 1 // ccp len - + 8 // ccp content offset - ) - ) - ) * inputs.len() - ) * segment_layout.len() - + 1 // void id - + 8; // void len + // let max_cue_size = 4 // cues id + // + 8 // cues len + // + ( // cues content + // 1 // cp id + // + 1 // cp len + // + ( // cp content + // 1 // ctime id, + // + 1 // ctime len + // + 8 // ctime content uint + // + ( // ctps + // 1 // ctp id + // + 8 // ctp len + // + (// ctp content + // 1 // ctrack id + // + 1 // ctrack size + // + 1 // ctrack content int + // // TODO this breaks if inputs.len() >= 127 + // + 1 // ccp id + // + 1 // ccp len + // + 8 // ccp content offset + // ) + // ) + // ) * inputs.len() + // ) * segment_layout.len() + // + 1 // void id + // + 8; // void len - let first_cluster_offset_predict = max_cue_size + output.position(); + // let first_cluster_offset_predict = max_cue_size + output.position(); - // make the cluster position relative to the segment start as they should - segment_layout - .iter_mut() - .for_each(|e| e.position += first_cluster_offset_predict - segment_offset); + // // make the cluster position relative to the segment start as they should + // segment_layout + // .iter_mut() + // .for_each(|e| e.position += first_cluster_offset_predict - segment_offset); - output.write_tag(&MatroskaTag::Cues(Master::Collected( - segment_layout - .iter() - .map(|cluster| { - MatroskaTag::CuePoint(Master::Collected( - Some(MatroskaTag::CueTime(cluster.timestamp)) - .into_iter() - // TODO: Subtitles should not have cues for every cluster - .chain(inputs.iter().map(|i| { - MatroskaTag::CueTrackPositions(Master::Collected(vec![ - MatroskaTag::CueTrack(i.mapped), - MatroskaTag::CueClusterPosition(cluster.position as u64), - ])) - })) - .collect(), - )) - }) - .collect(), - )))?; - output.write_padding(first_cluster_offset_predict)?; - let first_cluster_offset = output.position(); - assert_eq!(first_cluster_offset, first_cluster_offset_predict); + // output.write_tag(&MatroskaTag::Cues(Master::Collected( + // segment_layout + // .iter() + // .map(|cluster| { + // MatroskaTag::CuePoint(Master::Collected( + // Some(MatroskaTag::CueTime(cluster.timestamp)) + // .into_iter() + // // TODO: Subtitles should not have cues for every cluster + // .chain(inputs.iter().map(|i| { + // MatroskaTag::CueTrackPositions(Master::Collected(vec![ + // MatroskaTag::CueTrack(i.mapped), + // MatroskaTag::CueClusterPosition(cluster.position as u64), + // ])) + // })) + // .collect(), + // )) + // }) + // .collect(), + // )))?; + // output.write_padding(first_cluster_offset_predict)?; + // let first_cluster_offset = output.position(); + // assert_eq!(first_cluster_offset, first_cluster_offset_predict); - let mut skip = 0; - // TODO binary search - for (i, cluster) in segment_layout.iter().enumerate() { - if (cluster.position + segment_offset) >= range.start { - break; - } - skip = i; - } + // let mut skip = 0; + // // TODO binary search + // for (i, cluster) in segment_layout.iter().enumerate() { + // if (cluster.position + segment_offset) >= range.start { + // break; + // } + // skip = i; + // } - if skip != 0 { - info!("skipping {skip} clusters"); - output.seek(SeekFrom::Start( - (segment_layout[skip].position + segment_offset) as u64, - ))?; - } + // if skip != 0 { + // info!("skipping {skip} clusters"); + // output.seek(SeekFrom::Start( + // (segment_layout[skip].position + segment_offset) as u64, + // ))?; + // } - struct ReaderD<'a> { - stream: SegmentExtractIter<'a>, - mapped: u64, - } + // struct ReaderD<'a> { + // stream: SegmentExtractIter<'a>, + // mapped: u64, + // } - let mut track_readers = inputs - .iter_mut() - .enumerate() - .map(|(i, inp)| { - inp.reader - .seek( - // the seek target might be a hole; we continue until the next cluster of that track. - // this should be fine since tracks are only read according to segment_layout - find_first_cluster_with_off(&segment_layout, skip, i) - .ok_or(anyhow!("cluster hole at eof"))?, - MatroskaTag::Cluster(Master::Start), // TODO shouldn't this be a child of cluster? - ) - .context("seeking in input")?; - let stream = SegmentExtractIter::new(&mut inp.reader, inp.source_track_index as u64); + // let mut track_readers = inputs + // .iter_mut() + // .enumerate() + // .map(|(i, inp)| { + // inp.reader + // .seek( + // // the seek target might be a hole; we continue until the next cluster of that track. + // // this should be fine since tracks are only read according to segment_layout + // find_first_cluster_with_off(&segment_layout, skip, i) + // .ok_or(anyhow!("cluster hole at eof"))?, + // MatroskaTag::Cluster(Master::Start), // TODO shouldn't this be a child of cluster? + // ) + // .context("seeking in input")?; + // let stream = SegmentExtractIter::new(&mut inp.reader, inp.source_track_index as u64); - Ok(ReaderD { - mapped: inp.mapped, - stream, - }) - }) - .collect::>>()?; + // Ok(ReaderD { + // mapped: inp.mapped, + // stream, + // }) + // }) + // .collect::>>()?; - info!("(perf) seek inputs: {:?}", Instant::now() - timing_cp); + // info!("(perf) seek inputs: {:?}", Instant::now() - timing_cp); - for (cluster_index, cluster) in segment_layout.into_iter().enumerate().skip(skip) { - debug!( - "writing cluster {cluster_index} (pts_base={}) with {} blocks", - cluster.timestamp, - cluster.blocks.len() - ); - { - let cue_error = cluster.position as i64 - (output.position() - segment_offset) as i64; - if cue_error != 0 { - warn!("calculation was {} bytes off", cue_error); - } - } + // for (cluster_index, cluster) in segment_layout.into_iter().enumerate().skip(skip) { + // debug!( + // "writing cluster {cluster_index} (pts_base={}) with {} blocks", + // cluster.timestamp, + // cluster.blocks.len() + // ); + // { + // let cue_error = cluster.position as i64 - (output.position() - segment_offset) as i64; + // if cue_error != 0 { + // warn!("calculation was {} bytes off", cue_error); + // } + // } - let mut cluster_blocks = vec![MatroskaTag::Timestamp(cluster.timestamp)]; - for (block_track, index_block) in cluster.blocks { - let track_reader = &mut track_readers[block_track]; - // TODO handle duration - let mut block = track_reader.stream.next_block()?.0; + // let mut cluster_blocks = vec![MatroskaTag::Timestamp(cluster.timestamp)]; + // for (block_track, index_block) in cluster.blocks { + // let track_reader = &mut track_readers[block_track]; + // // TODO handle duration + // let mut block = track_reader.stream.next_block()?.0; - assert_eq!(index_block.size, block.data.len(), "seek index is wrong"); + // assert_eq!(index_block.size, block.data.len(), "seek index is wrong"); - block.track = track_reader.mapped; - block.timestamp_off = (index_block.pts - cluster.timestamp).try_into().unwrap(); - trace!("n={} tso={}", block.track, block.timestamp_off); + // block.track = track_reader.mapped; + // block.timestamp_off = (index_block.pts - cluster.timestamp).try_into().unwrap(); + // trace!("n={} tso={}", block.track, block.timestamp_off); - cluster_blocks.push(MatroskaTag::SimpleBlock(block)) - } - output.write_tag(&MatroskaTag::Cluster(Master::Collected(cluster_blocks)))?; - } - // output.write_tag(&MatroskaTag::Segment(Master::End))?; - Ok(()) + // cluster_blocks.push(MatroskaTag::SimpleBlock(block)) + // } + // output.write_tag(&MatroskaTag::Cluster(Master::Collected(cluster_blocks)))?; + // } + // // output.write_tag(&MatroskaTag::Segment(Master::End))?; + // Ok(()) + todo!() } -fn find_first_cluster_with_off( - segment_layout: &[ClusterLayout], - skip: usize, - track: usize, -) -> Option { - for cluster in segment_layout.iter().skip(skip) { - if let Some(off) = cluster.source_offsets[track] { - return Some(off); - } - } - None -} +// fn find_first_cluster_with_off( +// segment_layout: &[ClusterLayout], +// skip: usize, +// track: usize, +// ) -> Option { +// for cluster in segment_layout.iter().skip(skip) { +// if let Some(off) = cluster.source_offsets[track] { +// return Some(off); +// } +// } +// None +// } diff --git a/server/src/routes/ui/player.rs b/server/src/routes/ui/player.rs index aa567ab..2cc2dd4 100644 --- a/server/src/routes/ui/player.rs +++ b/server/src/routes/ui/player.rs @@ -15,9 +15,10 @@ use crate::{ uri, }; use anyhow::anyhow; -use jellybase::{permission::PermissionSetExt, CONF}; +use jellybase::CONF; use jellycommon::{ - user::{PermissionSet, PlayerKind, UserPermission}, + stream::{StreamContainer, StreamSpec}, + user::{PermissionSet, PlayerKind}, Node, NodeID, SourceTrackKind, TrackID, }; use markup::DynRender; @@ -45,15 +46,14 @@ impl PlayerConfig { fn jellynative_url(action: &str, seek: f64, secret: &str, node: &str, session: &str) -> String { let protocol = if CONF.tls { "https" } else { "http" }; let host = &CONF.hostname; - let stream_url = ""; - // TODO - // uri!(r_stream( - // node, - // StreamSpec { - // format: StreamFormat::HlsMaster, - // ..Default::default() - // } - // )); + let stream_url = format!( + "/n/{node}/stream{}", + StreamSpec::HlsMultiVariant { + segment: 0, + container: StreamContainer::Matroska + } + .to_query() + ); format!("jellynative://{action}/{secret}/{session}/{seek}/{protocol}://{host}{stream_url}",) } diff --git a/stream/src/fragment.rs b/stream/src/fragment.rs index a34bb8d..52d32f4 100644 --- a/stream/src/fragment.rs +++ b/stream/src/fragment.rs @@ -3,37 +3,29 @@ which is licensed under the GNU Affero General Public License (version 3); see /COPYING. Copyright (C) 2025 metamuffin */ +use crate::{stream_info, SMediaInfo}; use anyhow::{anyhow, Result}; -use jellybase::{ - common::{ - stream::StreamSpec, - user::{PermissionSet, UserPermission}, - LocalTrack, Node, - }, - permission::PermissionSetExt, - CONF, -}; -use jellytranscoder::fragment::transcode; +use jellybase::common::stream::StreamContainer; use log::warn; use std::sync::Arc; -use tokio::{fs::File, io::DuplexStream}; +use tokio::io::DuplexStream; use tokio_util::io::SyncIoBridge; pub async fn fragment_stream( - node: Arc, - local_tracks: Vec, - spec: StreamSpec, mut b: DuplexStream, - perms: &PermissionSet, - webm: bool, - track: u64, - segment: u64, + info: Arc, + track: usize, + segment: usize, index: usize, + format: usize, + container: StreamContainer, ) -> Result<()> { - let local_track = local_tracks - .first() - .ok_or(anyhow!("track missing"))? - .to_owned(); + let (iinfo, info) = stream_info(info).await?; + let (file_index, track_num) = *iinfo + .track_to_file + .get(track) + .ok_or(anyhow!("track not found"))?; + let path = iinfo.paths[file_index].clone(); // if let Some(profile) = None { // perms.assert(&UserPermission::Transcode)?; @@ -70,11 +62,10 @@ pub async fn fragment_stream( tokio::task::spawn_blocking(move || { if let Err(err) = jellyremuxer::write_fragment_into( b, - &CONF.media_path, - &node, - &local_track, - track as usize, - webm, + &path, + track_num, + container == StreamContainer::WebM, + &info.name.unwrap_or_default(), index, ) { warn!("segment stream error: {err}"); diff --git a/stream/src/fragment_index.rs b/stream/src/fragment_index.rs new file mode 100644 index 0000000..6fbddc6 --- /dev/null +++ b/stream/src/fragment_index.rs @@ -0,0 +1,32 @@ +/* + This file is part of jellything (https://codeberg.org/metamuffin/jellything) + which is licensed under the GNU Affero General Public License (version 3); see /COPYING. + Copyright (C) 2025 metamuffin +*/ +use crate::{stream_info, SMediaInfo}; +use anyhow::{anyhow, Result}; +use jellybase::common::stream::{SegmentNum, TrackNum}; +use std::sync::Arc; +use tokio::io::{AsyncWriteExt, DuplexStream}; + +pub async fn fragment_index_stream( + mut b: DuplexStream, + info: Arc, + _segment: SegmentNum, + track: TrackNum, +) -> Result<()> { + let (iinfo, _info) = stream_info(info).await?; + let (file_index, track_num) = *iinfo + .track_to_file + .get(track) + .ok_or(anyhow!("track not found"))?; + + let fragments = tokio::task::spawn_blocking(move || { + jellyremuxer::fragment::fragment_index(&iinfo.paths[file_index], track_num) + }) + .await??; + + let out = serde_json::to_string(&fragments)?; + tokio::spawn(async move { b.write_all(out.as_bytes()).await }); + Ok(()) +} diff --git a/stream/src/hls.rs b/stream/src/hls.rs index 27630b2..f06ac72 100644 --- a/stream/src/hls.rs +++ b/stream/src/hls.rs @@ -4,13 +4,10 @@ Copyright (C) 2025 metamuffin */ +use crate::{stream_info, SMediaInfo}; use anyhow::{anyhow, Result}; -use jellybase::{ - common::{ - stream::{StreamContainer, StreamSpec}, - LocalTrack, Node, SourceTrackKind, - }, - CONF, +use jellybase::common::stream::{ + FormatNum, SegmentNum, StreamContainer, StreamSpec, TrackKind, TrackNum, }; use std::{fmt::Write, ops::Range, sync::Arc}; use tokio::{ @@ -19,20 +16,24 @@ use tokio::{ }; pub async fn hls_master_stream( - node: Arc, - _local_tracks: Vec, - segment: u64, - container: StreamContainer, mut b: DuplexStream, + info: Arc, + segment: SegmentNum, + container: StreamContainer, ) -> Result<()> { - let media = node.media.as_ref().ok_or(anyhow!("no media"))?; + let (_iinfo, info) = stream_info(info).await?; + let seg = info + .segments + .get(segment) + .ok_or(anyhow!("segment not found"))?; + let mut out = String::new(); writeln!(out, "#EXTM3U")?; writeln!(out, "#EXT-X-VERSION:4")?; // writeln!(out, "#EXT-X-INDEPENDENT-SEGMENTS")?; - for (i, t) in media.tracks.iter().enumerate() { + for (i, t) in seg.tracks.iter().enumerate() { let uri = format!( - "stream?{}", + "stream{}", StreamSpec::HlsVariant { segment, track: i, @@ -42,10 +43,11 @@ pub async fn hls_master_stream( .to_query() ); let r#type = match t.kind { - SourceTrackKind::Video { .. } => "VIDEO", - SourceTrackKind::Audio { .. } => "AUDIO", - SourceTrackKind::Subtitles => "SUBTITLES", + TrackKind::Video => "VIDEO", + TrackKind::Audio => "AUDIO", + TrackKind::Subtitle => "SUBTITLES", }; + // TODO bw writeln!(out, "#EXT-X-STREAM-INF:BANDWIDTH=5000000,TYPE={type}")?; writeln!(out, "{uri}")?; } @@ -54,42 +56,44 @@ pub async fn hls_master_stream( } pub async fn hls_variant_stream( - node: Arc, - local_tracks: Vec, - segment: u64, - track: usize, - format: usize, - container: StreamContainer, mut b: DuplexStream, + info: Arc, + segment: SegmentNum, + track: TrackNum, + format: FormatNum, + container: StreamContainer, ) -> Result<()> { - let local_track = local_tracks.first().ok_or(anyhow!("no track"))?.to_owned(); - let media_info = node.media.to_owned().ok_or(anyhow!("no media?"))?; + let (iinfo, info) = stream_info(info).await?; + let (file_index, track_num) = *iinfo + .track_to_file + .get(track) + .ok_or(anyhow!("track not found"))?; + let seg = info + .segments + .get(segment) + .ok_or(anyhow!("segment not found"))?; + let frags = spawn_blocking(move || { - jellyremuxer::fragment::fragment_index( - &CONF.media_path, - &node, - &local_track, - track as usize, - ) + jellyremuxer::fragment::fragment_index(&iinfo.paths[file_index], track_num) }) .await??; let mut out = String::new(); writeln!(out, "#EXTM3U")?; writeln!(out, "#EXT-X-PLAYLIST-TYPE:VOD")?; - writeln!(out, "#EXT-X-TARGETDURATION:{}", media_info.duration)?; + writeln!(out, "#EXT-X-TARGETDURATION:{}", seg.duration)?; writeln!(out, "#EXT-X-VERSION:4")?; writeln!(out, "#EXT-X-MEDIA-SEQUENCE:0")?; - for (i, Range { start, end }) in frags.iter().enumerate() { + for (index, Range { start, end }) in frags.iter().enumerate() { writeln!(out, "#EXTINF:{:},", end - start)?; writeln!( out, - "stream?{}", + "stream{}", StreamSpec::Fragment { segment, track, - index: i as u64, + index, container, format, } diff --git a/stream/src/jhls.rs b/stream/src/jhls.rs deleted file mode 100644 index 2a2faec..0000000 --- a/stream/src/jhls.rs +++ /dev/null @@ -1,47 +0,0 @@ -/* - This file is part of jellything (https://codeberg.org/metamuffin/jellything) - which is licensed under the GNU Affero General Public License (version 3); see /COPYING. - Copyright (C) 2025 metamuffin -*/ -use anyhow::{anyhow, Result}; -use jellybase::{ - common::{ - jhls::JhlsTrackIndex, - stream::StreamSpec, - user::{PermissionSet, UserPermission}, - LocalTrack, Node, - }, - permission::PermissionSetExt, - CONF, -}; -use std::sync::Arc; -use tokio::io::{AsyncWriteExt, DuplexStream}; - -pub async fn jhls_index( - node: Arc, - local_tracks: &[LocalTrack], - spec: StreamSpec, - mut b: DuplexStream, - perms: &PermissionSet, -) -> Result<()> { - // let local_track = local_tracks - // .first() - // .ok_or(anyhow!("track missing"))? - // .to_owned(); - - // let fragments = tokio::task::spawn_blocking(move || { - // jellyremuxer::fragment::fragment_index(&CONF.media_path, &node, &local_track, spec.track[0]) - // }) - // .await??; - - // let out = serde_json::to_string(&JhlsTrackIndex { - // extra_profiles: if perms.check(&UserPermission::Transcode) { - // CONF.transcoding_profiles.clone() - // } else { - // vec![] - // }, - // fragments, - // })?; - // tokio::spawn(async move { b.write_all(out.as_bytes()).await }); - Ok(()) -} diff --git a/stream/src/lib.rs b/stream/src/lib.rs index d09759f..a6faf54 100644 --- a/stream/src/lib.rs +++ b/stream/src/lib.rs @@ -5,17 +5,20 @@ */ #![feature(iterator_try_collect)] pub mod fragment; +pub mod fragment_index; pub mod hls; -pub mod jhls; pub mod webvtt; use anyhow::{anyhow, Context, Result}; +use fragment::fragment_stream; +use fragment_index::fragment_index_stream; +use hls::{hls_master_stream, hls_variant_stream}; use jellybase::common::{ stream::{ StreamContainer, StreamFormatInfo, StreamInfo, StreamSegmentInfo, StreamSpec, StreamTrackInfo, TrackKind, }, - LocalTrack, Node, + Node, }; use jellyremuxer::metadata::{matroska_metadata, MatroskaMetadata}; use std::{collections::BTreeSet, io::SeekFrom, ops::Range, path::PathBuf, sync::Arc}; @@ -75,22 +78,26 @@ pub async fn stream( StreamSpec::Remux { tracks, container } => todo!(), StreamSpec::Original { track } => original_stream(info, track, range, b).await?, StreamSpec::HlsSuperMultiVariant { container } => todo!(), - StreamSpec::HlsMultiVariant { segment, container } => todo!(), + StreamSpec::HlsMultiVariant { segment, container } => { + hls_master_stream(b, info, segment, container).await? + } StreamSpec::HlsVariant { segment, track, container, format, - } => todo!(), - StreamSpec::Info { segment } => write_stream_info(info, b).await?, - StreamSpec::FragmentIndex { segment, track } => todo!(), + } => hls_variant_stream(b, info, segment, track, format, container).await?, + StreamSpec::Info { segment: _ } => write_stream_info(info, b).await?, + StreamSpec::FragmentIndex { segment, track } => { + fragment_index_stream(b, info, segment, track).await? + } StreamSpec::Fragment { segment, track, index, container, format, - } => todo!(), + } => fragment_stream(b, info, track, segment, index, format, container).await?, } Ok(a) @@ -103,7 +110,7 @@ async fn async_matroska_metadata(path: PathBuf) -> Result> pub(crate) struct InternalStreamInfo { pub paths: Vec, pub metadata: Vec>, - pub track_to_file: Vec, + pub track_to_file: Vec<(usize, u64)>, } async fn stream_info(info: Arc) -> Result<(InternalStreamInfo, StreamInfo)> { @@ -142,14 +149,19 @@ async fn stream_info(info: Arc) -> Result<(InternalStreamInfo, Strea }, formats, }); - track_to_file.push(i); + track_to_file.push((i, t.track_number)); } } } let segment = StreamSegmentInfo { name: None, - duration: 0, + duration: metadata[0] + .info + .as_ref() + .unwrap() + .duration + .unwrap_or_default(), tracks, }; Ok(( @@ -173,7 +185,6 @@ async fn write_stream_info(info: Arc, mut b: DuplexStream) -> Result async fn remux_stream( node: Arc, - local_tracks: Vec, spec: StreamSpec, range: Range, b: DuplexStream, @@ -202,8 +213,7 @@ async fn original_stream( b: DuplexStream, ) -> Result<()> { let (iinfo, _info) = stream_info(info).await?; - - let file_index = *iinfo + let (file_index, _) = *iinfo .track_to_file .get(track) .ok_or(anyhow!("unknown track"))?; diff --git a/stream/src/webvtt.rs b/stream/src/webvtt.rs index fbd6382..960849c 100644 --- a/stream/src/webvtt.rs +++ b/stream/src/webvtt.rs @@ -6,7 +6,7 @@ use anyhow::{anyhow, Context, Result}; use jellybase::{ cache::async_cache_memory, - common::{stream::StreamSpec, LocalTrack, Node}, + common::{stream::StreamSpec, Node}, CONF, }; use jellyremuxer::extract::extract_track; @@ -17,7 +17,6 @@ use tokio::io::{AsyncWriteExt, DuplexStream}; pub async fn vtt_stream( json: bool, node: Arc, - local_tracks: Vec, spec: StreamSpec, mut b: DuplexStream, ) -> Result<()> { -- cgit v1.2.3-70-g09d2 From 42e08750a5a9a112d458a5db1d6b169278e953c5 Mon Sep 17 00:00:00 2001 From: metamuffin Date: Mon, 14 Apr 2025 16:02:42 +0200 Subject: stream info for transcoding --- common/src/config.rs | 71 ++++++------- common/src/stream.rs | 5 +- import/src/infojson.rs | 2 +- server/src/routes/compat/jellyfin/mod.rs | 18 ++-- stream/src/fragment.rs | 1 + stream/src/lib.rs | 91 +---------------- stream/src/stream_info.rs | 164 +++++++++++++++++++++++++++++++ stream/src/webvtt.rs | 18 ++-- transcoder/src/fragment.rs | 74 ++++---------- 9 files changed, 239 insertions(+), 205 deletions(-) create mode 100644 stream/src/stream_info.rs (limited to 'server/src') diff --git a/common/src/config.rs b/common/src/config.rs index a0dc459..3a48fea 100644 --- a/common/src/config.rs +++ b/common/src/config.rs @@ -8,36 +8,42 @@ use crate::user::PermissionSet; use serde::{Deserialize, Serialize}; use std::{collections::HashMap, path::PathBuf}; -#[rustfmt::skip] #[derive(Debug, Deserialize, Serialize, Default)] pub struct GlobalConfig { pub hostname: String, pub brand: String, pub slogan: String, - #[serde(default = "return_true" )] pub tls: bool, - #[serde(default = "default::asset_path")] pub asset_path: PathBuf, - #[serde(default = "default::database_path")] pub database_path: PathBuf, - #[serde(default = "default::cache_path")] pub cache_path: PathBuf, - #[serde(default = "default::media_path")] pub media_path: PathBuf, - #[serde(default = "default::secrets_path")] pub secrets_path: PathBuf, - #[serde(default = "default::max_in_memory_cache_size")] pub max_in_memory_cache_size: usize, - #[serde(default)] pub admin_username: Option, - #[serde(default = "default::login_expire")] pub login_expire: i64, - #[serde(default)] pub default_permission_set: PermissionSet, - #[serde(default)] encoders: EncoderPreferences, + #[serde(default = "return_true")] + pub tls: bool, + pub asset_path: PathBuf, + pub database_path: PathBuf, + pub cache_path: PathBuf, + pub media_path: PathBuf, + pub secrets_path: PathBuf, + #[serde(default = "max_in_memory_cache_size")] + pub max_in_memory_cache_size: usize, + #[serde(default)] + pub admin_username: Option, + #[serde(default = "login_expire")] + pub login_expire: i64, + #[serde(default)] + pub default_permission_set: PermissionSet, + #[serde(default)] + pub encoders: EncoderPreferences, } #[derive(Debug, Deserialize, Serialize, Default)] pub struct EncoderPreferences { - avc: Option, - hevc: Option, - vp8: Option, - vp9: Option, - av1: Option, + pub avc: Option, + pub hevc: Option, + pub vp8: Option, + pub vp9: Option, + pub av1: Option, } #[derive(Debug, Deserialize, Serialize)] -enum EncoderClass { +#[serde(rename_all = "snake_case")] +pub enum EncoderClass { Aom, Svt, X26n, @@ -77,30 +83,11 @@ pub struct ApiSecrets { pub trakt: Option, } -mod default { - use std::path::PathBuf; - - pub fn login_expire() -> i64 { - 60 * 60 * 24 - } - pub fn asset_path() -> PathBuf { - "data/assets".into() - } - pub fn database_path() -> PathBuf { - "data/database".into() - } - pub fn cache_path() -> PathBuf { - "data/cache".into() - } - pub fn media_path() -> PathBuf { - "data/media".into() - } - pub fn secrets_path() -> PathBuf { - "data/secrets.yaml".into() - } - pub fn max_in_memory_cache_size() -> usize { - 50_000_000 - } +fn login_expire() -> i64 { + 60 * 60 * 24 +} +fn max_in_memory_cache_size() -> usize { + 50_000_000 } fn return_true() -> bool { diff --git a/common/src/stream.rs b/common/src/stream.rs index a14fd57..555a5d0 100644 --- a/common/src/stream.rs +++ b/common/src/stream.rs @@ -87,7 +87,7 @@ pub enum TrackKind { #[derive(Debug, Clone, Deserialize, Serialize, Default)] pub struct StreamFormatInfo { pub codec: String, - pub byterate: f64, + pub bitrate: f64, pub remux: bool, pub containers: Vec, @@ -104,6 +104,7 @@ pub enum StreamContainer { WebM, Matroska, WebVTT, + MPEG4, JVTT, } @@ -203,6 +204,7 @@ impl Display for StreamContainer { StreamContainer::Matroska => "matroska", StreamContainer::WebVTT => "webvtt", StreamContainer::JVTT => "jvtt", + StreamContainer::MPEG4 => "mp4", }) } } @@ -214,6 +216,7 @@ impl FromStr for StreamContainer { "matroska" => StreamContainer::Matroska, "webvtt" => StreamContainer::WebVTT, "jvtt" => StreamContainer::JVTT, + "mp4" => StreamContainer::MPEG4, _ => return Err(()), }) } diff --git a/import/src/infojson.rs b/import/src/infojson.rs index 3a8d76e..1efbae9 100644 --- a/import/src/infojson.rs +++ b/import/src/infojson.rs @@ -86,7 +86,7 @@ pub struct YFormat { pub fps: Option, pub columns: Option, pub fragments: Option>, - pub resolution: String, + pub resolution: Option, pub dynamic_range: Option, pub aspect_ratio: Option, pub http_headers: HashMap, diff --git a/server/src/routes/compat/jellyfin/mod.rs b/server/src/routes/compat/jellyfin/mod.rs index 6066760..7393c5f 100644 --- a/server/src/routes/compat/jellyfin/mod.rs +++ b/server/src/routes/compat/jellyfin/mod.rs @@ -5,18 +5,14 @@ */ pub mod models; -use crate::routes::{ - stream::rocket_uri_macro_r_stream, - ui::{ - account::{login_logic, session::Session}, - assets::{ - rocket_uri_macro_r_asset, rocket_uri_macro_r_item_backdrop, - rocket_uri_macro_r_item_poster, - }, - error::MyResult, - node::{aspect_class, DatabaseNodeUserDataExt}, - sort::{filter_and_sort_nodes, FilterProperty, NodeFilterSort, SortOrder, SortProperty}, +use crate::routes::ui::{ + account::{login_logic, session::Session}, + assets::{ + rocket_uri_macro_r_asset, rocket_uri_macro_r_item_backdrop, rocket_uri_macro_r_item_poster, }, + error::MyResult, + node::{aspect_class, DatabaseNodeUserDataExt}, + sort::{filter_and_sort_nodes, FilterProperty, NodeFilterSort, SortOrder, SortProperty}, }; use anyhow::{anyhow, Context}; use jellybase::{database::Database, CONF}; diff --git a/stream/src/fragment.rs b/stream/src/fragment.rs index b2e254b..e0644aa 100644 --- a/stream/src/fragment.rs +++ b/stream/src/fragment.rs @@ -54,6 +54,7 @@ pub async fn fragment_stream( let location = transcode( &format!("{path:?} {track_num} {index} {format_num} {container}"), // TODO maybe not use the entire source format, + container, move |b| { tokio::task::spawn_blocking(move || { if let Err(err) = jellyremuxer::write_fragment_into( diff --git a/stream/src/lib.rs b/stream/src/lib.rs index eb56529..18ad2a7 100644 --- a/stream/src/lib.rs +++ b/stream/src/lib.rs @@ -7,6 +7,7 @@ pub mod fragment; pub mod fragment_index; pub mod hls; +pub mod stream_info; pub mod webvtt; use anyhow::{anyhow, Context, Result}; @@ -14,18 +15,14 @@ use fragment::fragment_stream; use fragment_index::fragment_index_stream; use hls::{hls_master_stream, hls_variant_stream}; use jellybase::common::{ - stream::{ - StreamContainer, StreamFormatInfo, StreamInfo, StreamSegmentInfo, StreamSpec, - StreamTrackInfo, TrackKind, - }, + stream::{StreamContainer, StreamSpec}, Node, }; -use jellyremuxer::metadata::{matroska_metadata, MatroskaMetadata}; use std::{collections::BTreeSet, io::SeekFrom, ops::Range, path::PathBuf, sync::Arc}; +use stream_info::{stream_info, write_stream_info}; use tokio::{ fs::File, io::{duplex, AsyncReadExt, AsyncSeekExt, AsyncWriteExt, DuplexStream}, - task::spawn_blocking, }; use tokio_util::io::SyncIoBridge; @@ -50,6 +47,7 @@ pub fn stream_head(spec: &StreamSpec) -> StreamHead { StreamContainer::Matroska => "video/x-matroska", StreamContainer::WebVTT => "text/vtt", StreamContainer::JVTT => "application/jellything-vtt+json", + StreamContainer::MPEG4 => "video/mp4", }; match spec { StreamSpec::Whep { .. } => cons("application/x-todo", false), @@ -103,87 +101,6 @@ pub async fn stream( Ok(a) } -async fn async_matroska_metadata(path: PathBuf) -> Result> { - Ok(spawn_blocking(move || matroska_metadata(&path)).await??) -} - -pub(crate) struct InternalStreamInfo { - pub paths: Vec, - pub metadata: Vec>, - pub track_to_file: Vec<(usize, u64)>, -} - -async fn stream_info(info: Arc) -> Result<(InternalStreamInfo, StreamInfo)> { - let mut metadata = Vec::new(); - let mut paths = Vec::new(); - for path in &info.files { - metadata.push(async_matroska_metadata(path.clone()).await?); - paths.push(path.clone()); - } - - let mut tracks = Vec::new(); - let mut track_to_file = Vec::new(); - - for (i, m) in metadata.iter().enumerate() { - if let Some(t) = &m.tracks { - for t in &t.entries { - let mut formats = Vec::new(); - formats.push(StreamFormatInfo { - codec: t.codec_id.to_string(), - remux: true, - byterate: 10., // TODO - containers: [StreamContainer::Matroska].to_vec(), - bit_depth: t.audio.as_ref().and_then(|a| a.bit_depth.map(|e| e as u8)), - samplerate: t.audio.as_ref().map(|a| a.sampling_frequency), - channels: t.audio.as_ref().map(|a| a.channels as usize), - width: t.video.as_ref().map(|v| v.pixel_width), - height: t.video.as_ref().map(|v| v.pixel_height), - ..Default::default() - }); - tracks.push(StreamTrackInfo { - name: None, - kind: match t.track_type { - 1 => TrackKind::Video, - 2 => TrackKind::Audio, - 17 => TrackKind::Subtitle, - _ => todo!(), - }, - formats, - }); - track_to_file.push((i, t.track_number)); - } - } - } - - let segment = StreamSegmentInfo { - name: None, - duration: metadata[0] - .info - .as_ref() - .unwrap() - .duration - .unwrap_or_default(), - tracks, - }; - Ok(( - InternalStreamInfo { - metadata, - paths, - track_to_file, - }, - StreamInfo { - name: info.info.title.clone(), - segments: vec![segment], - }, - )) -} - -async fn write_stream_info(info: Arc, mut b: DuplexStream) -> Result<()> { - let (_, info) = stream_info(info).await?; - b.write_all(&serde_json::to_vec(&info)?).await?; - Ok(()) -} - async fn remux_stream( node: Arc, spec: StreamSpec, diff --git a/stream/src/stream_info.rs b/stream/src/stream_info.rs new file mode 100644 index 0000000..9d3d741 --- /dev/null +++ b/stream/src/stream_info.rs @@ -0,0 +1,164 @@ +use anyhow::Result; +use ebml_struct::matroska::TrackEntry; +use jellybase::{ + common::stream::{ + StreamContainer, StreamFormatInfo, StreamInfo, StreamSegmentInfo, StreamTrackInfo, + TrackKind, + }, + CONF, +}; +use jellyremuxer::metadata::{matroska_metadata, MatroskaMetadata}; +use std::{path::PathBuf, sync::Arc}; +use tokio::{ + io::{AsyncWriteExt, DuplexStream}, + spawn, + task::spawn_blocking, +}; + +use crate::SMediaInfo; + +async fn async_matroska_metadata(path: PathBuf) -> Result> { + Ok(spawn_blocking(move || matroska_metadata(&path)).await??) +} + +pub(crate) struct InternalStreamInfo { + pub paths: Vec, + pub metadata: Vec>, + pub track_to_file: Vec<(usize, u64)>, +} + +pub(crate) async fn stream_info(info: Arc) -> Result<(InternalStreamInfo, StreamInfo)> { + let mut metadata = Vec::new(); + let mut paths = Vec::new(); + for path in &info.files { + metadata.push(async_matroska_metadata(path.clone()).await?); + paths.push(path.clone()); + } + let mut tracks = Vec::new(); + let mut track_to_file = Vec::new(); + + for (i, m) in metadata.iter().enumerate() { + if let Some(t) = &m.tracks { + for t in &t.entries { + tracks.push(StreamTrackInfo { + name: None, + kind: match t.track_type { + 1 => TrackKind::Video, + 2 => TrackKind::Audio, + 17 => TrackKind::Subtitle, + _ => todo!(), + }, + formats: stream_formats(t), + }); + track_to_file.push((i, t.track_number)); + } + } + } + + let segment = StreamSegmentInfo { + name: None, + duration: metadata[0] + .info + .as_ref() + .unwrap() + .duration + .unwrap_or_default(), + tracks, + }; + Ok(( + InternalStreamInfo { + metadata, + paths, + track_to_file, + }, + StreamInfo { + name: info.info.title.clone(), + segments: vec![segment], + }, + )) +} + +fn stream_formats(t: &TrackEntry) -> Vec { + let mut formats = Vec::new(); + formats.push(StreamFormatInfo { + codec: t.codec_id.to_string(), + remux: true, + bitrate: 2_000_000., // TODO + containers: containers_by_codec(&t.codec_id), + bit_depth: t.audio.as_ref().and_then(|a| a.bit_depth.map(|e| e as u8)), + samplerate: t.audio.as_ref().map(|a| a.sampling_frequency), + channels: t.audio.as_ref().map(|a| a.channels as usize), + width: t.video.as_ref().map(|v| v.pixel_width), + height: t.video.as_ref().map(|v| v.pixel_height), + ..Default::default() + }); + + match t.track_type { + 1 => { + let sw = t.video.as_ref().unwrap().pixel_width; + let sh = t.video.as_ref().unwrap().pixel_height; + for (w, br) in [(3840, 8e6), (1920, 5e6), (1280, 3e6), (640, 1e6)] { + if w > sw { + continue; + } + let h = (w * sh) / sw; + for (cid, enable) in [ + ("V_AV1", CONF.encoders.av1.is_some()), + ("V_VP8", CONF.encoders.vp8.is_some()), + ("V_VP9", CONF.encoders.vp9.is_some()), + ("V_AVC", CONF.encoders.avc.is_some()), + ("V_HEVC", CONF.encoders.hevc.is_some()), + ] { + if enable { + formats.push(StreamFormatInfo { + codec: cid.to_string(), + bitrate: br, + remux: false, + containers: containers_by_codec(cid), + width: Some(w), + height: Some(h), + samplerate: None, + channels: None, + bit_depth: None, + }); + } + } + } + } + 2 => { + for br in [256e3, 128e3, 64e3] { + formats.push(StreamFormatInfo { + codec: "A_OPUS".to_string(), + bitrate: br, + remux: false, + containers: containers_by_codec("A_OPUS"), + width: None, + height: None, + samplerate: Some(48e3), + channels: Some(2), + bit_depth: Some(32), + }); + } + } + 17 => {} + _ => {} + } + + formats +} + +fn containers_by_codec(codec: &str) -> Vec { + use StreamContainer::*; + match codec { + "V_VP8" | "V_VP9" | "V_AV1" | "A_OPUS" | "A_VORBIS" => vec![Matroska, WebM], + "V_AVC" | "A_AAC" => vec![Matroska, MPEG4], + "S_TEXT/UTF8" | "S_TEXT/WEBVTT" => vec![Matroska, WebVTT, WebM, JVTT], + _ => vec![Matroska], + } +} + +pub(crate) async fn write_stream_info(info: Arc, mut b: DuplexStream) -> Result<()> { + let (_, info) = stream_info(info).await?; + spawn(async move { b.write_all(&serde_json::to_vec(&info)?).await }); + Ok(()) +} diff --git a/stream/src/webvtt.rs b/stream/src/webvtt.rs index 960849c..e9f0181 100644 --- a/stream/src/webvtt.rs +++ b/stream/src/webvtt.rs @@ -3,23 +3,21 @@ which is licensed under the GNU Affero General Public License (version 3); see /COPYING. Copyright (C) 2025 metamuffin */ -use anyhow::{anyhow, Context, Result}; -use jellybase::{ - cache::async_cache_memory, - common::{stream::StreamSpec, Node}, - CONF, -}; -use jellyremuxer::extract::extract_track; -use jellytranscoder::subtitles::{parse_subtitles, write_webvtt}; +use anyhow::Result; +use jellybase::common::{stream::StreamSpec, Node}; use std::sync::Arc; -use tokio::io::{AsyncWriteExt, DuplexStream}; +use tokio::io::DuplexStream; pub async fn vtt_stream( json: bool, node: Arc, spec: StreamSpec, - mut b: DuplexStream, + b: DuplexStream, ) -> Result<()> { + let _ = b; + let _ = spec; + let _ = node; + let _ = json; // TODO cache // TODO should use fragments too? big films take too long... diff --git a/transcoder/src/fragment.rs b/transcoder/src/fragment.rs index b88339c..3cb4c40 100644 --- a/transcoder/src/fragment.rs +++ b/transcoder/src/fragment.rs @@ -7,7 +7,7 @@ use crate::LOCAL_VIDEO_TRANSCODING_TASKS; use jellybase::{ cache::{async_cache_file, CachePath}, - common::stream::StreamFormatInfo, + common::stream::{StreamContainer, StreamFormatInfo}, }; use log::{debug, info}; use std::process::Stdio; @@ -21,73 +21,41 @@ use tokio::{ pub async fn transcode( key: &str, - enc: &StreamFormatInfo, + format: &StreamFormatInfo, + container: StreamContainer, input: impl FnOnce(ChildStdin), ) -> anyhow::Result { async_cache_file( - &["frag-tc", key, &format!("{enc:?}")], + &["frag-tc", key, &format!("{format:?}")], move |mut output| async move { let _permit = LOCAL_VIDEO_TRANSCODING_TASKS.acquire().await?; - debug!("transcoding fragment with {enc:?}"); + debug!("transcoding fragment with {format:?}"); let mut args = Vec::::new(); - // match enc { - // EncodingProfile::Video { - // codec, - // preset, - // bitrate, - // width, - // } => { - // if let Some(width) = width { - // args.push("-vf".to_string()); - // args.push(format!("scale={width}:-1")); - // } - // args.push("-c:v".to_string()); - // args.push(codec.to_string()); - // if let Some(preset) = preset { - // args.push("-preset".to_string()); - // args.push(format!("{preset}")); - // } - // args.push("-b:v".to_string()); - // args.push(format!("{bitrate}")); - // } - // EncodingProfile::Audio { - // codec, - // bitrate, - // sample_rate, - // channels, - // } => { - // if let Some(channels) = channels { - // args.push("-ac".to_string()); - // args.push(format!("{channels}")) - // } - // if let Some(sample_rate) = sample_rate { - // args.push("-ar".to_string()); - // args.push(format!("{sample_rate}")) - // } - // args.push("-c:a".to_string()); - // args.push(codec.to_string()); - // args.push("-b:a".to_string()); - // args.push(format!("{bitrate}")); - // } - // EncodingProfile::Subtitles { codec } => { - // args.push("-c:s".to_string()); - // args.push(codec.to_string()); - // } - // }; + + match format.codec.as_str() { + "V_AVC" => {} + + _ => unreachable!(), + } + info!("encoding with {:?}", args.join(" ")); + let container = match container { + StreamContainer::WebM => "webm", + StreamContainer::Matroska => "matroska", + StreamContainer::WebVTT => "vtt", + StreamContainer::MPEG4 => "mp4", + StreamContainer::JVTT => unreachable!(), + }; + let mut proc = Command::new("ffmpeg") .stdin(Stdio::piped()) .stdout(Stdio::piped()) .args(["-f", "matroska", "-i", "pipe:0"]) .args(args) - .args(["-f", "webm", "pipe:1"]) + .args(["-f", container, "pipe:1"]) .spawn()?; - // let mut proc = Command::new("cat") - // .stdin(Stdio::piped()) - // .stdout(Stdio::piped()) - // .spawn()?; let stdin = proc.stdin.take().unwrap(); let mut stdout = proc.stdout.take().unwrap(); -- cgit v1.2.3-70-g09d2 From 3b147cb1dfcbd5c7218e0accd5784d992d5ae21c Mon Sep 17 00:00:00 2001 From: metamuffin Date: Mon, 14 Apr 2025 18:42:16 +0200 Subject: things --- common/src/config.rs | 17 ++++++++-------- common/src/stream.rs | 2 +- server/src/routes/stream.rs | 4 ++-- stream/src/fragment.rs | 1 + stream/src/hls.rs | 26 ++++++++++++++++++++++++- stream/src/lib.rs | 38 +++++++----------------------------- stream/src/stream_info.rs | 6 +++--- transcoder/src/fragment.rs | 47 +++++++++++++++++++++++++++++++++++++-------- 8 files changed, 87 insertions(+), 54 deletions(-) (limited to 'server/src') diff --git a/common/src/config.rs b/common/src/config.rs index 3a48fea..df16ef0 100644 --- a/common/src/config.rs +++ b/common/src/config.rs @@ -29,19 +29,20 @@ pub struct GlobalConfig { #[serde(default)] pub default_permission_set: PermissionSet, #[serde(default)] - pub encoders: EncoderPreferences, + pub encoders: EncoderArgs, } #[derive(Debug, Deserialize, Serialize, Default)] -pub struct EncoderPreferences { - pub avc: Option, - pub hevc: Option, - pub vp8: Option, - pub vp9: Option, - pub av1: Option, +pub struct EncoderArgs { + pub avc: Option, + pub hevc: Option, + pub vp8: Option, + pub vp9: Option, + pub av1: Option, + pub generic: Option, } -#[derive(Debug, Deserialize, Serialize)] +#[derive(Debug, Deserialize, Serialize, Clone, Copy)] #[serde(rename_all = "snake_case")] pub enum EncoderClass { Aom, diff --git a/common/src/stream.rs b/common/src/stream.rs index 555a5d0..9fd7daf 100644 --- a/common/src/stream.rs +++ b/common/src/stream.rs @@ -76,7 +76,7 @@ pub struct StreamTrackInfo { pub formats: Vec, } -#[derive(Debug, Clone, Deserialize, Serialize)] +#[derive(Debug, Copy, Clone, Deserialize, Serialize)] #[serde(rename_all = "snake_case")] pub enum TrackKind { Video, diff --git a/server/src/routes/stream.rs b/server/src/routes/stream.rs index 8f97aec..0fbeb3a 100644 --- a/server/src/routes/stream.rs +++ b/server/src/routes/stream.rs @@ -42,8 +42,8 @@ pub async fn r_stream_head( #[get("/n//stream?")] pub async fn r_stream( - session: Session, - federation: &State, + _session: Session, + _federation: &State, db: &State, id: &str, range: Option, diff --git a/stream/src/fragment.rs b/stream/src/fragment.rs index e0644aa..26746fc 100644 --- a/stream/src/fragment.rs +++ b/stream/src/fragment.rs @@ -53,6 +53,7 @@ pub async fn fragment_stream( } else { let location = transcode( &format!("{path:?} {track_num} {index} {format_num} {container}"), // TODO maybe not use the entire source + track.kind, format, container, move |b| { diff --git a/stream/src/hls.rs b/stream/src/hls.rs index f06ac72..3dfbf01 100644 --- a/stream/src/hls.rs +++ b/stream/src/hls.rs @@ -15,7 +15,31 @@ use tokio::{ task::spawn_blocking, }; -pub async fn hls_master_stream( +pub async fn hls_supermultivariant_stream( + mut b: DuplexStream, + info: Arc, + container: StreamContainer, +) -> Result<()> { + let (_iinfo, info) = stream_info(info).await?; + let mut out = String::new(); + writeln!(out, "#EXTM3U")?; + writeln!(out, "#EXT-X-VERSION:4")?; + for (i, _seg) in info.segments.iter().enumerate() { + let uri = format!( + "stream{}", + StreamSpec::HlsMultiVariant { + segment: i, + container, + } + .to_query() + ); + writeln!(out, "{uri}")?; + } + tokio::spawn(async move { b.write_all(out.as_bytes()).await }); + Ok(()) +} + +pub async fn hls_multivariant_stream( mut b: DuplexStream, info: Arc, segment: SegmentNum, diff --git a/stream/src/lib.rs b/stream/src/lib.rs index 18ad2a7..4df87ae 100644 --- a/stream/src/lib.rs +++ b/stream/src/lib.rs @@ -10,10 +10,10 @@ pub mod hls; pub mod stream_info; pub mod webvtt; -use anyhow::{anyhow, Context, Result}; +use anyhow::{anyhow, bail, Context, Result}; use fragment::fragment_stream; use fragment_index::fragment_index_stream; -use hls::{hls_master_stream, hls_variant_stream}; +use hls::{hls_multivariant_stream, hls_supermultivariant_stream, hls_variant_stream}; use jellybase::common::{ stream::{StreamContainer, StreamSpec}, Node, @@ -24,7 +24,6 @@ use tokio::{ fs::File, io::{duplex, AsyncReadExt, AsyncSeekExt, AsyncWriteExt, DuplexStream}, }; -use tokio_util::io::SyncIoBridge; #[derive(Debug)] pub struct SMediaInfo { @@ -71,13 +70,12 @@ pub async fn stream( let (a, b) = duplex(4096); match spec { - StreamSpec::Whep { track, seek } => todo!(), - StreamSpec::WhepControl { token } => todo!(), - StreamSpec::Remux { tracks, container } => todo!(), StreamSpec::Original { track } => original_stream(info, track, range, b).await?, - StreamSpec::HlsSuperMultiVariant { container } => todo!(), + StreamSpec::HlsSuperMultiVariant { container } => { + hls_supermultivariant_stream(b, info, container).await?; + } StreamSpec::HlsMultiVariant { segment, container } => { - hls_master_stream(b, info, segment, container).await? + hls_multivariant_stream(b, info, segment, container).await? } StreamSpec::HlsVariant { segment, @@ -96,34 +94,12 @@ pub async fn stream( container, format, } => fragment_stream(b, info, track, segment, index, format, container).await?, + _ => bail!("todo"), } Ok(a) } -async fn remux_stream( - node: Arc, - spec: StreamSpec, - range: Range, - b: DuplexStream, -) -> Result<()> { - let b = SyncIoBridge::new(b); - - // tokio::task::spawn_blocking(move || { - // jellyremuxer::remux_stream_into( - // b, - // range, - // CONF.media_path.to_owned(), - // &node, - // local_tracks, - // spec.track, - // spec.webm.unwrap_or(false), - // ) - // }); - - Ok(()) -} - async fn original_stream( info: Arc, track: usize, diff --git a/stream/src/stream_info.rs b/stream/src/stream_info.rs index 9d3d741..a8b6989 100644 --- a/stream/src/stream_info.rs +++ b/stream/src/stream_info.rs @@ -23,7 +23,7 @@ async fn async_matroska_metadata(path: PathBuf) -> Result> pub(crate) struct InternalStreamInfo { pub paths: Vec, - pub metadata: Vec>, + pub _metadata: Vec>, pub track_to_file: Vec<(usize, u64)>, } @@ -67,7 +67,7 @@ pub(crate) async fn stream_info(info: Arc) -> Result<(InternalStream }; Ok(( InternalStreamInfo { - metadata, + _metadata: metadata, paths, track_to_file, }, @@ -83,7 +83,7 @@ fn stream_formats(t: &TrackEntry) -> Vec { formats.push(StreamFormatInfo { codec: t.codec_id.to_string(), remux: true, - bitrate: 2_000_000., // TODO + bitrate: 10_000_000., // TODO containers: containers_by_codec(&t.codec_id), bit_depth: t.audio.as_ref().and_then(|a| a.bit_depth.map(|e| e as u8)), samplerate: t.audio.as_ref().map(|a| a.sampling_frequency), diff --git a/transcoder/src/fragment.rs b/transcoder/src/fragment.rs index 3cb4c40..1d06e9a 100644 --- a/transcoder/src/fragment.rs +++ b/transcoder/src/fragment.rs @@ -7,7 +7,8 @@ use crate::LOCAL_VIDEO_TRANSCODING_TASKS; use jellybase::{ cache::{async_cache_file, CachePath}, - common::stream::{StreamContainer, StreamFormatInfo}, + common::stream::{StreamContainer, StreamFormatInfo, TrackKind}, + CONF, }; use log::{debug, info}; use std::process::Stdio; @@ -21,6 +22,7 @@ use tokio::{ pub async fn transcode( key: &str, + kind: TrackKind, format: &StreamFormatInfo, container: StreamContainer, input: impl FnOnce(ChildStdin), @@ -31,15 +33,44 @@ pub async fn transcode( let _permit = LOCAL_VIDEO_TRANSCODING_TASKS.acquire().await?; debug!("transcoding fragment with {format:?}"); - let mut args = Vec::::new(); - - match format.codec.as_str() { - "V_AVC" => {} + let template = match format.codec.as_str() { + "V_AVC" => CONF.encoders.avc.as_ref(), + "V_HEVC" => CONF.encoders.hevc.as_ref(), + "V_VP8" => CONF.encoders.vp8.as_ref(), + "V_VP9" => CONF.encoders.vp9.as_ref(), + "V_AV1" => CONF.encoders.av1.as_ref(), + _ => None, + } + .or(CONF.encoders.generic.as_ref()) + .cloned() + .unwrap_or("ffmpeg %i %f %e %o".to_owned()); + let filter = match kind { + TrackKind::Video => format!("-vf scale={}:-1", format.width.unwrap()), + TrackKind::Audio => format!(""), + TrackKind::Subtitle => String::new(), + }; + let typechar = match kind { + TrackKind::Video => "v", + TrackKind::Audio => "a", + TrackKind::Subtitle => "s", + }; + let fallback_encoder = match format.codec.as_str() { + "A_OPUS" => "libopus", _ => unreachable!(), - } + }; + + let args = template + .replace("%i", "-f matroska -i pipe:0") + .replace("%o", "-f %C pipe:1") + .replace("%f", &filter) + .replace("%e", "-c:%t %c -b:%t %r") + .replace("%t", typechar) + .replace("%c", fallback_encoder) + .replace("%r", &(format.bitrate as i64).to_string()) + .replace("%C", &container.to_string()); - info!("encoding with {:?}", args.join(" ")); + info!("encoding with {:?}", args); let container = match container { StreamContainer::WebM => "webm", @@ -53,7 +84,7 @@ pub async fn transcode( .stdin(Stdio::piped()) .stdout(Stdio::piped()) .args(["-f", "matroska", "-i", "pipe:0"]) - .args(args) + .args(args.split(" ")) .args(["-f", container, "pipe:1"]) .spawn()?; -- cgit v1.2.3-70-g09d2