aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--Cargo.lock5
-rw-r--r--base/src/assetfed.rs7
-rw-r--r--base/src/database.rs27
-rw-r--r--client/src/lib.rs2
-rw-r--r--common/src/config.rs119
-rw-r--r--common/src/jhls.rs27
-rw-r--r--common/src/lib.rs2
-rw-r--r--common/src/seek_index.rs33
-rw-r--r--common/src/stream.rs262
-rw-r--r--common/src/user.rs46
-rw-r--r--doc/api.md13
-rw-r--r--import/Cargo.toml5
-rw-r--r--import/src/infojson.rs2
-rw-r--r--import/src/lib.rs15
-rw-r--r--remuxer/Cargo.toml4
-rw-r--r--remuxer/src/extract.rs17
-rw-r--r--remuxer/src/fragment.rs109
-rw-r--r--remuxer/src/lib.rs66
-rw-r--r--remuxer/src/matroska_to_mpeg4.rs36
-rw-r--r--remuxer/src/matroska_to_webm.rs84
-rw-r--r--remuxer/src/metadata.rs (renamed from import/src/matroska.rs)31
-rw-r--r--remuxer/src/remux.rs572
-rw-r--r--remuxer/src/seek_index.rs33
-rw-r--r--server/src/routes/compat/jellyfin/mod.rs45
-rw-r--r--server/src/routes/stream.rs136
-rw-r--r--server/src/routes/ui/player.rs72
-rw-r--r--stream/Cargo.toml2
-rw-r--r--stream/src/fragment.rs126
-rw-r--r--stream/src/fragment_index.rs32
-rw-r--r--stream/src/hls.rs110
-rw-r--r--stream/src/jhls.rs47
-rw-r--r--stream/src/lib.rs167
-rw-r--r--stream/src/stream_info.rs169
-rw-r--r--stream/src/webvtt.rs116
-rw-r--r--transcoder/src/fragment.rs108
m---------web/script/jshelper0
-rw-r--r--web/script/player/download.ts2
-rw-r--r--web/script/player/mediacaps.ts101
-rw-r--r--web/script/player/mod.ts99
-rw-r--r--web/script/player/player.ts62
-rw-r--r--web/script/player/profiles.ts_ (renamed from web/script/player/profiles.ts)5
-rw-r--r--web/script/player/track/create.ts10
-rw-r--r--web/script/player/track/mod.ts4
-rw-r--r--web/script/player/track/mse.ts83
-rw-r--r--web/script/player/track/vtt.ts6
-rw-r--r--web/script/player/types_node.ts (renamed from web/script/player/jhls.d.ts)27
-rw-r--r--web/script/player/types_stream.ts39
-rw-r--r--web/style/js-player.css2
48 files changed, 1735 insertions, 1352 deletions
diff --git a/Cargo.lock b/Cargo.lock
index fbfec3f..94bead7 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -1761,10 +1761,10 @@ dependencies = [
"base64",
"bincode",
"crossbeam-channel",
- "ebml-struct",
"futures",
"jellybase",
"jellyclient",
+ "jellyremuxer",
"log",
"rayon",
"regex",
@@ -1792,6 +1792,7 @@ version = "0.1.0"
dependencies = [
"anyhow",
"bincode",
+ "ebml-struct",
"jellybase",
"jellymatroska",
"log",
@@ -1804,7 +1805,9 @@ name = "jellystream"
version = "0.1.0"
dependencies = [
"anyhow",
+ "ebml-struct",
"jellybase",
+ "jellymatroska",
"jellyremuxer",
"jellytranscoder",
"log",
diff --git a/base/src/assetfed.rs b/base/src/assetfed.rs
index 575188d..697cacb 100644
--- a/base/src/assetfed.rs
+++ b/base/src/assetfed.rs
@@ -78,11 +78,4 @@ impl AssetInner {
pub fn is_federated(&self) -> bool {
matches!(self, Self::Federated { .. })
}
-
- pub fn as_local_track(self) -> Option<LocalTrack> {
- match self {
- AssetInner::LocalTrack(x) => Some(x),
- _ => None,
- }
- }
}
diff --git a/base/src/database.rs b/base/src/database.rs
index 2909498..c3ca5d4 100644
--- a/base/src/database.rs
+++ b/base/src/database.rs
@@ -14,7 +14,8 @@ use redb::{Durability, ReadableTable, StorageError, TableDefinition};
use std::{
fs::create_dir_all,
hash::{DefaultHasher, Hasher},
- path::Path,
+ path::{Path, PathBuf},
+ str::FromStr,
sync::{Arc, RwLock},
time::SystemTime,
};
@@ -39,6 +40,8 @@ const T_NODE_EXTERNAL_ID: TableDefinition<(&str, &str), [u8; 32]> =
TableDefinition::new("node_external_id");
const T_IMPORT_FILE_MTIME: TableDefinition<&[u8], u64> = TableDefinition::new("import_file_mtime");
const T_NODE_MTIME: TableDefinition<[u8; 32], u64> = TableDefinition::new("node_mtime");
+const T_NODE_MEDIA_PATHS: TableDefinition<([u8; 32], &str), ()> =
+ TableDefinition::new("node_media_paths");
#[derive(Clone)]
pub struct Database {
@@ -68,6 +71,7 @@ impl Database {
txn.open_table(T_NODE_MTIME)?;
txn.open_table(T_NODE_CHILDREN)?;
txn.open_table(T_NODE_EXTERNAL_ID)?;
+ txn.open_table(T_NODE_MEDIA_PATHS)?;
txn.open_table(T_IMPORT_FILE_MTIME)?;
txn.commit()?;
}
@@ -132,17 +136,20 @@ impl Database {
let mut t_node_children = txn.open_table(T_NODE_CHILDREN)?;
let mut t_node_external_id = txn.open_table(T_NODE_EXTERNAL_ID)?;
let mut t_import_file_mtime = txn.open_table(T_IMPORT_FILE_MTIME)?;
+ let mut t_node_media_paths = txn.open_table(T_NODE_MEDIA_PATHS)?;
t_node.retain(|_, _| false)?;
t_node_mtime.retain(|_, _| false)?;
t_node_children.retain(|_, _| false)?;
t_node_external_id.retain(|_, _| false)?;
t_import_file_mtime.retain(|_, _| false)?;
+ t_node_media_paths.retain(|_, _| false)?;
drop((
t_node,
t_node_mtime,
t_node_children,
t_node_external_id,
t_import_file_mtime,
+ t_node_media_paths,
));
txn.set_durability(Durability::Eventual);
txn.commit()?;
@@ -208,6 +215,24 @@ impl Database {
txn.commit()?;
Ok(())
}
+ pub fn get_node_media_paths(&self, id: NodeID) -> Result<Vec<PathBuf>> {
+ let txn = self.inner.begin_read()?;
+ let table = txn.open_table(T_NODE_MEDIA_PATHS)?;
+ let mut paths = Vec::new();
+ // TODO fix this
+ for p in table.range((id.0, "\0")..(id.0, "\x7f"))? {
+ paths.push(PathBuf::from_str(p?.0.value().1)?);
+ }
+ Ok(paths)
+ }
+ pub fn insert_node_media_path(&self, id: NodeID, path: &Path) -> Result<()> {
+ let txn = self.inner.begin_write()?;
+ let mut table = txn.open_table(T_NODE_MEDIA_PATHS)?;
+ table.insert((id.0, path.to_str().unwrap()), ())?;
+ drop(table);
+ txn.commit()?;
+ Ok(())
+ }
pub fn update_node_udata(
&self,
diff --git a/client/src/lib.rs b/client/src/lib.rs
index 96c39b6..d3172fd 100644
--- a/client/src/lib.rs
+++ b/client/src/lib.rs
@@ -165,7 +165,7 @@ impl Session {
pub fn stream_url(&self, id: NodeIDOrSlug, stream_spec: &StreamSpec) -> String {
format!(
- "{}/n/{}/stream?{}&{}",
+ "{}/n/{}/stream{}&{}",
self.instance.base(),
id,
stream_spec.to_query(),
diff --git a/common/src/config.rs b/common/src/config.rs
index d7682df..df16ef0 100644
--- a/common/src/config.rs
+++ b/common/src/config.rs
@@ -4,27 +4,53 @@
Copyright (C) 2025 metamuffin <metamuffin.org>
*/
-use crate::{jhls::EncodingProfile, user::PermissionSet};
+use crate::user::PermissionSet;
use serde::{Deserialize, Serialize};
use std::{collections::HashMap, path::PathBuf};
-#[rustfmt::skip]
#[derive(Debug, Deserialize, Serialize, Default)]
pub struct GlobalConfig {
pub hostname: String,
pub brand: String,
pub slogan: String,
- #[serde(default = "return_true" )] pub tls: bool,
- #[serde(default = "default::asset_path")] pub asset_path: PathBuf,
- #[serde(default = "default::database_path")] pub database_path: PathBuf,
- #[serde(default = "default::cache_path")] pub cache_path: PathBuf,
- #[serde(default = "default::media_path")] pub media_path: PathBuf,
- #[serde(default = "default::secrets_path")] pub secrets_path: PathBuf,
- #[serde(default = "default::transcoding_profiles")] pub transcoding_profiles: Vec<EncodingProfile>,
- #[serde(default = "default::max_in_memory_cache_size")] pub max_in_memory_cache_size: usize,
- #[serde(default)] pub admin_username: Option<String>,
- #[serde(default = "default::login_expire")] pub login_expire: i64,
- #[serde(default)] pub default_permission_set: PermissionSet,
+ #[serde(default = "return_true")]
+ pub tls: bool,
+ pub asset_path: PathBuf,
+ pub database_path: PathBuf,
+ pub cache_path: PathBuf,
+ pub media_path: PathBuf,
+ pub secrets_path: PathBuf,
+ #[serde(default = "max_in_memory_cache_size")]
+ pub max_in_memory_cache_size: usize,
+ #[serde(default)]
+ pub admin_username: Option<String>,
+ #[serde(default = "login_expire")]
+ pub login_expire: i64,
+ #[serde(default)]
+ pub default_permission_set: PermissionSet,
+ #[serde(default)]
+ pub encoders: EncoderArgs,
+}
+
+#[derive(Debug, Deserialize, Serialize, Default)]
+pub struct EncoderArgs {
+ pub avc: Option<String>,
+ pub hevc: Option<String>,
+ pub vp8: Option<String>,
+ pub vp9: Option<String>,
+ pub av1: Option<String>,
+ pub generic: Option<String>,
+}
+
+#[derive(Debug, Deserialize, Serialize, Clone, Copy)]
+#[serde(rename_all = "snake_case")]
+pub enum EncoderClass {
+ Aom,
+ Svt,
+ X26n,
+ Vpx,
+ Vaapi,
+ Rkmpp,
}
#[derive(Serialize, Deserialize, Debug, Default)]
@@ -58,68 +84,11 @@ pub struct ApiSecrets {
pub trakt: Option<String>,
}
-mod default {
- use crate::jhls::EncodingProfile;
- use std::path::PathBuf;
-
- pub fn login_expire() -> i64 {
- 60 * 60 * 24
- }
- pub fn asset_path() -> PathBuf {
- "data/assets".into()
- }
- pub fn database_path() -> PathBuf {
- "data/database".into()
- }
- pub fn cache_path() -> PathBuf {
- "data/cache".into()
- }
- pub fn media_path() -> PathBuf {
- "data/media".into()
- }
- pub fn secrets_path() -> PathBuf {
- "data/secrets.yaml".into()
- }
- pub fn max_in_memory_cache_size() -> usize {
- 50_000_000
- }
- pub fn transcoding_profiles() -> Vec<EncodingProfile> {
- vec![
- EncodingProfile::Video {
- codec: "libsvtav1".to_string(),
- preset: Some(8),
- bitrate: 2_000_000,
- width: Some(1920),
- },
- EncodingProfile::Video {
- codec: "libsvtav1".to_string(),
- preset: Some(8),
- bitrate: 1_200_000,
- width: Some(1280),
- },
- EncodingProfile::Video {
- codec: "libsvtav1".to_string(),
- preset: Some(8),
- bitrate: 300_000,
- width: Some(640),
- },
- EncodingProfile::Audio {
- codec: "libopus".to_string(),
- bitrate: 128_000,
- sample_rate: None,
- channels: Some(2),
- },
- EncodingProfile::Audio {
- codec: "libopus".to_string(),
- bitrate: 64_000,
- sample_rate: None,
- channels: Some(2),
- },
- EncodingProfile::Subtitles {
- codec: "webvtt".to_string(),
- },
- ]
- }
+fn login_expire() -> i64 {
+ 60 * 60 * 24
+}
+fn max_in_memory_cache_size() -> usize {
+ 50_000_000
}
fn return_true() -> bool {
diff --git a/common/src/jhls.rs b/common/src/jhls.rs
index 6dc976b..90f48f5 100644
--- a/common/src/jhls.rs
+++ b/common/src/jhls.rs
@@ -5,33 +5,6 @@
*/
use bincode::{Decode, Encode};
use serde::{Deserialize, Serialize};
-use std::ops::Range;
-
-#[derive(Debug, Clone, Deserialize, Serialize)]
-pub struct JhlsTrackIndex {
- pub extra_profiles: Vec<EncodingProfile>,
- pub fragments: Vec<Range<f64>>,
-}
-
-#[derive(Debug, Clone, Deserialize, Serialize)]
-#[serde(rename_all = "snake_case")]
-pub enum EncodingProfile {
- Video {
- codec: String,
- preset: Option<u8>,
- bitrate: usize,
- width: Option<usize>,
- },
- Audio {
- codec: String,
- bitrate: usize,
- channels: Option<usize>,
- sample_rate: Option<f64>,
- },
- Subtitles {
- codec: String,
- },
-}
#[derive(Debug, Serialize, Deserialize, Encode, Decode)]
pub struct SubtitleCue {
diff --git a/common/src/lib.rs b/common/src/lib.rs
index 7685027..003a798 100644
--- a/common/src/lib.rs
+++ b/common/src/lib.rs
@@ -9,7 +9,6 @@ pub mod config;
pub mod helpers;
pub mod r#impl;
pub mod jhls;
-pub mod seek_index;
pub mod stream;
pub mod user;
@@ -173,7 +172,6 @@ pub type TrackID = usize;
pub struct LocalTrack {
pub path: PathBuf,
pub track: TrackID,
- pub codec_private: Option<Vec<u8>>,
}
#[derive(Debug, Clone, Deserialize, Serialize, Encode, Decode)]
diff --git a/common/src/seek_index.rs b/common/src/seek_index.rs
deleted file mode 100644
index 20cf394..0000000
--- a/common/src/seek_index.rs
+++ /dev/null
@@ -1,33 +0,0 @@
-/*
- This file is part of jellything (https://codeberg.org/metamuffin/jellything)
- which is licensed under the GNU Affero General Public License (version 3); see /COPYING.
- Copyright (C) 2025 metamuffin <metamuffin.org>
-*/
-use bincode::{Decode, Encode};
-
-pub const SEEK_INDEX_VERSION: u32 = 0x5eef1de4;
-
-#[derive(Debug, Clone, Decode, Encode)]
-pub struct SeekIndex {
- pub version: u32,
- pub blocks: Vec<BlockIndex>,
- pub keyframes: Vec<usize>,
-}
-
-#[derive(Debug, Clone, Decode, Encode)]
-pub struct BlockIndex {
- pub pts: u64,
- // pub duration: Option<u64>,
- pub source_off: u64, // points to start of SimpleBlock or BlockGroup (not the Block inside it)
- pub size: usize,
-}
-
-impl Default for SeekIndex {
- fn default() -> Self {
- Self {
- version: SEEK_INDEX_VERSION,
- blocks: Vec::new(),
- keyframes: Vec::new(),
- }
- }
-}
diff --git a/common/src/stream.rs b/common/src/stream.rs
index 1c285b3..55f2f49 100644
--- a/common/src/stream.rs
+++ b/common/src/stream.rs
@@ -3,80 +3,226 @@
which is licensed under the GNU Affero General Public License (version 3); see /COPYING.
Copyright (C) 2025 metamuffin <metamuffin.org>
*/
-use bincode::{Decode, Encode};
-#[cfg(feature = "rocket")]
-use rocket::{FromForm, FromFormField, UriDisplayQuery};
use serde::{Deserialize, Serialize};
+use std::{collections::BTreeMap, fmt::Display, str::FromStr};
+
+pub type SegmentNum = usize;
+pub type TrackNum = usize;
+pub type FormatNum = usize;
+pub type IndexNum = usize;
+
+#[derive(Debug, Clone, Deserialize, Serialize)]
+pub enum StreamSpec {
+ Whep {
+ track: TrackNum,
+ seek: u64,
+ },
+ WhepControl {
+ token: String,
+ },
+ Remux {
+ tracks: Vec<usize>,
+ container: StreamContainer,
+ },
+ Original {
+ track: TrackNum,
+ },
+ HlsSuperMultiVariant {
+ container: StreamContainer,
+ },
+ HlsMultiVariant {
+ segment: SegmentNum,
+ container: StreamContainer,
+ },
+ HlsVariant {
+ segment: SegmentNum,
+ track: TrackNum,
+ container: StreamContainer,
+ format: FormatNum,
+ },
+ Info {
+ segment: Option<u64>,
+ },
+ FragmentIndex {
+ segment: SegmentNum,
+ track: TrackNum,
+ },
+ Fragment {
+ segment: SegmentNum,
+ track: TrackNum,
+ index: IndexNum,
+ container: StreamContainer,
+ format: FormatNum,
+ },
+}
+
+#[derive(Debug, Clone, Deserialize, Serialize)]
+pub struct StreamInfo {
+ pub name: Option<String>,
+ pub segments: Vec<StreamSegmentInfo>,
+}
+
+#[derive(Debug, Clone, Deserialize, Serialize)]
+pub struct StreamSegmentInfo {
+ pub name: Option<String>,
+ pub duration: f64,
+ pub tracks: Vec<StreamTrackInfo>,
+}
#[derive(Debug, Clone, Deserialize, Serialize)]
-#[cfg_attr(feature = "rocket", derive(FromForm, UriDisplayQuery))]
-pub struct StreamSpec {
- pub track: Vec<usize>,
- pub format: StreamFormat,
- pub webm: Option<bool>,
- pub profile: Option<usize>,
- pub index: Option<usize>,
+pub struct StreamTrackInfo {
+ pub name: Option<String>,
+ pub kind: TrackKind,
+ pub formats: Vec<StreamFormatInfo>,
}
-#[rustfmt::skip]
-#[derive(Debug, Clone, Copy, Deserialize, Serialize, PartialEq, Eq, Hash, Encode, Decode)]
+#[derive(Debug, Copy, Clone, Deserialize, Serialize)]
#[serde(rename_all = "snake_case")]
-#[cfg_attr(feature = "rocket", derive(FromFormField, UriDisplayQuery))]
-pub enum StreamFormat {
- #[cfg_attr(feature = "rocket", field(value = "original"))] Original,
- #[cfg_attr(feature = "rocket", field(value = "matroska"))] Matroska,
- #[cfg_attr(feature = "rocket", field(value = "hlsmaster"))] HlsMaster,
- #[cfg_attr(feature = "rocket", field(value = "hlsvariant"))] HlsVariant,
- #[cfg_attr(feature = "rocket", field(value = "jhlsi"))] JhlsIndex,
- #[cfg_attr(feature = "rocket", field(value = "frag"))] Fragment,
- #[cfg_attr(feature = "rocket", field(value = "webvtt"))] Webvtt,
- #[cfg_attr(feature = "rocket", field(value = "jvtt"))] Jvtt,
+pub enum TrackKind {
+ Video,
+ Audio,
+ Subtitle,
}
-impl Default for StreamSpec {
- fn default() -> Self {
- Self {
- track: Vec::new(),
- format: StreamFormat::Matroska,
- webm: Some(true),
- profile: None,
- index: None,
- }
- }
+#[derive(Debug, Clone, Deserialize, Serialize, Default)]
+pub struct StreamFormatInfo {
+ pub codec: String,
+ pub bitrate: f64,
+ pub remux: bool,
+ pub containers: Vec<StreamContainer>,
+
+ pub width: Option<u64>,
+ pub height: Option<u64>,
+ pub samplerate: Option<f64>,
+ pub channels: Option<usize>,
+ pub bit_depth: Option<u8>,
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Deserialize, Serialize)]
+#[serde(rename_all = "lowercase")]
+pub enum StreamContainer {
+ WebM,
+ Matroska,
+ WebVTT,
+ MPEG4,
+ JVTT,
}
impl StreamSpec {
pub fn to_query(&self) -> String {
- use std::fmt::Write;
- let mut u = String::new();
- write!(u, "format={}", self.format.ident()).unwrap();
- for t in &self.track {
- write!(u, "&track={}", t).unwrap();
- }
- if let Some(profile) = self.profile {
- write!(u, "&profile={profile}").unwrap();
- }
- if let Some(index) = self.index {
- write!(u, "&index={index}").unwrap();
+ match self {
+ StreamSpec::Whep { track, seek } => format!("?whep&track={track}&seek={seek}"),
+ StreamSpec::WhepControl { token } => format!("?whepcontrol&token={token}"),
+ StreamSpec::Remux { tracks, container } => {
+ format!(
+ "?remux&tracks={}&container={container}",
+ tracks
+ .iter()
+ .map(|t| t.to_string())
+ .collect::<Vec<String>>()
+ .join(",")
+ )
+ }
+ StreamSpec::Original { track } => format!("?original&track={track}"),
+ StreamSpec::HlsSuperMultiVariant { container } => {
+ format!("?hlssupermultivariant&container={container}")
+ }
+ StreamSpec::HlsMultiVariant { segment, container } => {
+ format!("?hlsmultivariant&segment={segment}&container={container}")
+ }
+ StreamSpec::HlsVariant {
+ segment,
+ track,
+ container,
+ format,
+ } => format!(
+ "?hlsvariant&segment={segment}&track={track}&container={container}&format={format}"
+ ),
+ StreamSpec::Info {
+ segment: Some(segment),
+ } => format!("?info&segment={segment}"),
+ StreamSpec::Info { segment: None } => format!("?info"),
+ StreamSpec::FragmentIndex { segment, track } => {
+ format!("?fragmentindex&segment={segment}&track={track}")
+ }
+ StreamSpec::Fragment {
+ segment,
+ track,
+ index,
+ container,
+ format,
+ } => format!("?fragment&segment={segment}&track={track}&index={index}&container={container}&format={format}"),
}
- if let Some(webm) = self.webm {
- write!(u, "&webm={webm}").unwrap();
+ }
+ pub fn from_query_kv(query: &BTreeMap<String, String>) -> Result<Self, &'static str> {
+ let get_num = |k: &'static str| {
+ query
+ .get(k)
+ .ok_or(k)
+ .and_then(|a| a.parse().map_err(|_| "invalid number"))
+ };
+ let get_container = || {
+ query
+ .get("container")
+ .ok_or("container")
+ .and_then(|s| s.parse().map_err(|()| "unknown container"))
+ };
+ if query.contains_key("info") {
+ Ok(Self::Info {
+ segment: get_num("segment").ok(),
+ })
+ } else if query.contains_key("hlsmultivariant") {
+ Ok(Self::HlsMultiVariant {
+ segment: get_num("segment")? as SegmentNum,
+ container: get_container()?,
+ })
+ } else if query.contains_key("hlsvariant") {
+ Ok(Self::HlsVariant {
+ segment: get_num("segment")? as SegmentNum,
+ track: get_num("track")? as TrackNum,
+ format: get_num("format")? as FormatNum,
+ container: get_container()?,
+ })
+ } else if query.contains_key("fragment") {
+ Ok(Self::Fragment {
+ segment: get_num("segment")? as SegmentNum,
+ track: get_num("track")? as TrackNum,
+ format: get_num("format")? as FormatNum,
+ index: get_num("index")? as IndexNum,
+ container: get_container()?,
+ })
+ } else if query.contains_key("fragmentindex") {
+ Ok(Self::FragmentIndex {
+ segment: get_num("segment")? as SegmentNum,
+ track: get_num("track")? as TrackNum,
+ })
+ } else {
+ Err("invalid stream spec")
}
- u
}
}
-impl StreamFormat {
- pub fn ident(&self) -> &'static str {
- match self {
- StreamFormat::Jvtt => "jvtt",
- StreamFormat::Original => "original",
- StreamFormat::Matroska => "matroska",
- StreamFormat::HlsMaster => "hlsmaster",
- StreamFormat::HlsVariant => "hlsvariant",
- StreamFormat::JhlsIndex => "jhlsi",
- StreamFormat::Fragment => "frag",
- StreamFormat::Webvtt => "webvtt",
- }
+impl Display for StreamContainer {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ f.write_str(match self {
+ StreamContainer::WebM => "webm",
+ StreamContainer::Matroska => "matroska",
+ StreamContainer::WebVTT => "webvtt",
+ StreamContainer::JVTT => "jvtt",
+ StreamContainer::MPEG4 => "mpeg4",
+ })
+ }
+}
+impl FromStr for StreamContainer {
+ type Err = ();
+ fn from_str(s: &str) -> Result<Self, Self::Err> {
+ Ok(match s {
+ "webm" => StreamContainer::WebM,
+ "matroska" => StreamContainer::Matroska,
+ "webvtt" => StreamContainer::WebVTT,
+ "jvtt" => StreamContainer::JVTT,
+ "mpeg4" => StreamContainer::MPEG4,
+ _ => return Err(()),
+ })
}
}
diff --git a/common/src/user.rs b/common/src/user.rs
index ef78eca..e0e7a0d 100644
--- a/common/src/user.rs
+++ b/common/src/user.rs
@@ -3,7 +3,6 @@
which is licensed under the GNU Affero General Public License (version 3); see /COPYING.
Copyright (C) 2025 metamuffin <metamuffin.org>
*/
-use crate::{stream::StreamFormat, user};
use bincode::{Decode, Encode};
#[cfg(feature = "rocket")]
use rocket::{FromFormField, UriDisplayQuery};
@@ -99,7 +98,6 @@ pub enum UserPermission {
ManageSelf,
AccessNode(String),
- StreamFormat(StreamFormat),
Transcode,
FederatedContent,
}
@@ -107,33 +105,11 @@ pub enum UserPermission {
impl UserPermission {
pub const ALL_ENUMERABLE: &'static [UserPermission] = {
use UserPermission::*;
- &[
- Admin,
- Transcode,
- ManageSelf,
- FederatedContent,
- StreamFormat(user::StreamFormat::Original),
- ]
+ &[Admin, Transcode, ManageSelf, FederatedContent]
};
pub fn default_value(&self) -> bool {
- use user::StreamFormat::*;
use UserPermission::*;
- matches!(
- self,
- Transcode
- | ManageSelf
- | FederatedContent
- | StreamFormat(
- JhlsIndex
- | Jvtt
- | HlsMaster
- | HlsVariant
- | Matroska
- | Fragment
- | Webvtt
- | Original // TODO remove later
- )
- )
+ matches!(self, Transcode | ManageSelf | FederatedContent)
}
}
@@ -143,15 +119,15 @@ impl Display for UserPermission {
UserPermission::ManageSelf => "manage self (password, display name, etc.)".to_string(),
UserPermission::FederatedContent => "access to federated content".to_string(),
UserPermission::Admin => "administrative rights".to_string(),
- UserPermission::StreamFormat(StreamFormat::Original) => {
- "downloading the original media".to_string()
- }
- UserPermission::StreamFormat(StreamFormat::Matroska) => {
- "downloading a remuxed WebM/Matroska version of the media ".to_string()
- }
- UserPermission::StreamFormat(x) => {
- format!("downloading media via {x:?}")
- }
+ // UserPermission::StreamFormat(StreamFormat::Original) => {
+ // "downloading the original media".to_string()
+ // }
+ // UserPermission::StreamFormat(StreamFormat::Matroska) => {
+ // "downloading a remuxed WebM/Matroska version of the media ".to_string()
+ // }
+ // UserPermission::StreamFormat(x) => {
+ // format!("downloading media via {x:?}")
+ // }
UserPermission::Transcode => "transcoding".to_string(),
// UserPermission::ManageUsers => format!("management of all users"),
// UserPermission::GenerateInvite => format!("inviting new users"),
diff --git a/doc/api.md b/doc/api.md
index 5713df7..b24fd46 100644
--- a/doc/api.md
+++ b/doc/api.md
@@ -111,19 +111,19 @@ federation.
by the server.
- `?whepcontrol&<token>`
- WebSocket endpoint for controlling WHEP playback. TODO schema
-- `?remux&<track...>&<webm>`
-- `?hlssupermultivariant&<webm>`
+- `?remux&<track...>&<container>`
+- `?hlssupermultivariant&<container>`
- Returns m3u8/HLS playlist of all known multi-variant playlists, one for each
segment. The plylist is updated for live media.
-- `?hlsmultivariant&<segment>&<webm>`
+- `?hlsmultivariant&<segment>&<container>`
- Returns m3u8/HLS playlist of all track formats' variant playlists.
-- `?hlsvariant&<segment>&<track>&<webm>&<format>`
+- `?hlsvariant&<segment>&<track>&<container>&<format>`
- Returns m3u8/HLS playlist of all known fragments of this track format. The
playlist is updated for live media.
- `?info&<segment?>`
- Returns JSON `SegmentInfo` if a segment index is provided, else `MediaInfo`
- `?fragmentindex&<segment>&<track>`
-- `?fragment&<segment>&<track>&<index>&<webm>&<format>`
+- `?fragment&<segment>&<track>&<index>&<container>&<format>`
```ts
interface MediaInfo {
@@ -147,10 +147,13 @@ interface TrackFormat {
bandwidth: number;
remux: bool;
title?: string;
+ containers: StreamContainer[];
a_sampling_frequency?: number;
a_channels?: number;
v_resolution_width?: number;
av_bit_depth?: number;
}
+type FragmentIndex = number[];
+type StreamContainer = "webm" | "matroska" | "webvtt" | "jvtt";
```
diff --git a/import/Cargo.toml b/import/Cargo.toml
index 645326d..37b5a77 100644
--- a/import/Cargo.toml
+++ b/import/Cargo.toml
@@ -6,10 +6,7 @@ edition = "2021"
[dependencies]
jellybase = { path = "../base" }
jellyclient = { path = "../client" }
-
-ebml-struct = { git = "https://codeberg.org/metamuffin/ebml-struct", features = [
- "bincode",
-] }
+jellyremuxer = { path = "../remuxer" }
rayon = "1.10.0"
crossbeam-channel = "0.5.14"
diff --git a/import/src/infojson.rs b/import/src/infojson.rs
index 3a8d76e..1efbae9 100644
--- a/import/src/infojson.rs
+++ b/import/src/infojson.rs
@@ -86,7 +86,7 @@ pub struct YFormat {
pub fps: Option<f64>,
pub columns: Option<u32>,
pub fragments: Option<Vec<YFragment>>,
- pub resolution: String,
+ pub resolution: Option<String>,
pub dynamic_range: Option<String>,
pub aspect_ratio: Option<f64>,
pub http_headers: HashMap<String, String>,
diff --git a/import/src/lib.rs b/import/src/lib.rs
index f7c047e..78a99c3 100644
--- a/import/src/lib.rs
+++ b/import/src/lib.rs
@@ -7,16 +7,15 @@ use anyhow::{anyhow, bail, Context, Result};
use infojson::YVideo;
use jellybase::{
assetfed::AssetInner,
- common::{
- Chapter, LocalTrack, MediaInfo, Node, NodeID, NodeKind, Rating, SourceTrack,
- SourceTrackKind, TrackSource,
- },
+ common::{Chapter, MediaInfo, Node, NodeID, NodeKind, Rating, SourceTrack, SourceTrackKind},
database::Database,
CONF, SECRETS,
};
-use jellyclient::{Appearance, PeopleGroup, TmdbKind, TraktKind, Visibility};
+use jellyclient::{
+ Appearance, LocalTrack, PeopleGroup, TmdbKind, TrackSource, TraktKind, Visibility,
+};
+use jellyremuxer::metadata::checked_matroska_metadata;
use log::info;
-use matroska::matroska_metadata;
use rayon::iter::{ParallelBridge, ParallelIterator};
use regex::Regex;
use std::{
@@ -36,7 +35,6 @@ use tokio::{
use trakt::Trakt;
pub mod infojson;
-pub mod matroska;
pub mod tmdb;
pub mod trakt;
@@ -279,7 +277,7 @@ fn import_media_file(
visibility: Visibility,
) -> Result<()> {
info!("media file {path:?}");
- let Some(m) = (*matroska_metadata(path)?).to_owned() else {
+ let Some(m) = (*checked_matroska_metadata(path)?).to_owned() else {
return Ok(());
};
let infojson = m
@@ -398,7 +396,6 @@ fn import_media_file(
},
source: TrackSource::Local(
AssetInner::LocalTrack(LocalTrack {
- codec_private: track.codec_private,
path: path.to_owned(),
track: track.track_number as usize,
})
diff --git a/remuxer/Cargo.toml b/remuxer/Cargo.toml
index 2313dcc..16713df 100644
--- a/remuxer/Cargo.toml
+++ b/remuxer/Cargo.toml
@@ -13,3 +13,7 @@ log = { workspace = true }
serde = { version = "1.0.217", features = ["derive"] }
bincode = { version = "2.0.0-rc.3", features = ["serde"] }
+
+ebml-struct = { git = "https://codeberg.org/metamuffin/ebml-struct", features = [
+ "bincode",
+] }
diff --git a/remuxer/src/extract.rs b/remuxer/src/extract.rs
index 12e4003..15c1e9d 100644
--- a/remuxer/src/extract.rs
+++ b/remuxer/src/extract.rs
@@ -5,29 +5,22 @@
*/
use crate::seek_index::get_seek_index;
use anyhow::{anyhow, bail};
-use jellybase::common::LocalTrack;
use jellymatroska::{block::Block, read::EbmlReader, Master, MatroskaTag};
use log::debug;
use std::{fs::File, io::BufReader, path::PathBuf};
pub type TrackExtract = Vec<(u64, Option<u64>, Vec<u8>)>;
-pub fn extract_track(path_base: PathBuf, track_info: LocalTrack) -> anyhow::Result<TrackExtract> {
- let source_path = path_base.join(track_info.path);
- let file = File::open(&source_path)?;
+pub fn extract_track(path: PathBuf, track: u64) -> anyhow::Result<TrackExtract> {
+ let file = File::open(&path)?;
let mut reader = EbmlReader::new(BufReader::new(file));
- let index = get_seek_index(&source_path)?;
- let index = index
- .get(&(track_info.track as u64))
- .ok_or(anyhow!("track missing"))?;
+ let index = get_seek_index(&path)?;
+ let index = index.get(&track).ok_or(anyhow!("track missing"))?;
let mut out = Vec::new();
for b in &index.blocks {
reader.seek(b.source_off, MatroskaTag::BlockGroup(Master::Start))?;
let (duration, block) = read_group(&mut reader)?;
- assert_eq!(
- track_info.track, block.track as usize,
- "seek index is wrong"
- );
+ assert_eq!(track, block.track, "seek index is wrong");
out.push((b.pts, duration, block.data))
}
Ok(out)
diff --git a/remuxer/src/fragment.rs b/remuxer/src/fragment.rs
index 9fa68f3..0da1ed5 100644
--- a/remuxer/src/fragment.rs
+++ b/remuxer/src/fragment.rs
@@ -5,11 +5,12 @@
*/
use crate::{
- ebml_header, ebml_segment_info, ebml_track_entry, seek_index::get_seek_index,
+ ebml_header, ebml_segment_info, ebml_track_entry,
+ metadata::{matroska_metadata, MatroskaMetadata},
+ seek_index::get_seek_index,
segment_extractor::SegmentExtractIter,
};
use anyhow::{anyhow, Context, Result};
-use jellybase::common::{LocalTrack, Node, SourceTrackKind};
use jellymatroska::{read::EbmlReader, write::EbmlWriter, Master, MatroskaTag};
use log::{debug, info};
use std::{
@@ -19,34 +20,35 @@ use std::{
path::Path,
};
-const FRAGMENT_LENGTH: f64 = 2.;
+const FRAGMENT_LENGTH: f64 = 5.;
-pub fn fragment_index(
- path_base: &Path,
- item: &Node,
- local_track: &LocalTrack,
- track_index: usize,
-) -> Result<Vec<Range<f64>>> {
- let media_info = item.media.as_ref().unwrap();
- let source_path = path_base.join(&local_track.path);
- let index = get_seek_index(&source_path)?;
+pub fn fragment_index(path: &Path, track: u64) -> Result<Vec<Range<f64>>> {
+ let meta = matroska_metadata(path)?;
+ let duration = media_duration(&meta);
+ let force_kf = meta
+ .as_ref()
+ .tracks
+ .as_ref()
+ .unwrap()
+ .entries
+ .iter()
+ .find(|t| t.track_number == track)
+ .unwrap()
+ .track_type
+ == 17;
+
+ let index = get_seek_index(&path)?;
let index = index
- .get(&(local_track.track as u64))
+ .get(&track)
.ok_or(anyhow!("seek index track missing"))?;
- // everything is a keyframe (even though nothing is...)
- let force_kf = matches!(
- media_info.tracks[track_index].kind,
- SourceTrackKind::Subtitles { .. }
- );
-
let n_kf = if force_kf {
index.blocks.len()
} else {
index.keyframes.len()
};
- let average_kf_interval = media_info.duration / n_kf as f64;
+ let average_kf_interval = duration / n_kf as f64;
let kf_per_frag = (FRAGMENT_LENGTH / average_kf_interval).ceil() as usize;
debug!("average keyframe interval: {average_kf_interval}");
debug!(" => keyframes per frag {kf_per_frag}");
@@ -72,7 +74,7 @@ pub fn fragment_index(
index.keyframes.get((i + 1) * kf_per_frag).copied()
}
.map(|i| index.blocks[i].pts as f64 / 1000.)
- .unwrap_or(media_info.duration);
+ .unwrap_or(duration);
start..end
})
.collect())
@@ -80,45 +82,46 @@ pub fn fragment_index(
pub fn write_fragment_into(
writer: impl Write,
- path_base: &Path,
- item: &Node,
- local_track: &LocalTrack,
- track: usize,
+ path: &Path,
+ track: u64,
webm: bool,
+ title: &str,
n: usize,
) -> anyhow::Result<()> {
- info!("writing fragment {n} of {:?} (track {track})", item.title);
- let mut output = EbmlWriter::new(BufWriter::new(writer), 0);
- let media_info = item.media.as_ref().unwrap();
- let info = media_info
+ let meta = matroska_metadata(path)?;
+ let duration = media_duration(&meta);
+ let track_meta = meta
+ .as_ref()
.tracks
- .get(track)
- .ok_or(anyhow!("track not available"))?
- .to_owned();
- let source_path = path_base.join(&local_track.path);
+ .as_ref()
+ .unwrap()
+ .entries
+ .iter()
+ .find(|t| t.track_number == track)
+ .unwrap();
+ let force_kf = track_meta.track_type == 17;
+
+ info!("writing fragment {n} of {:?} (track {track})", title);
+ let mut output = EbmlWriter::new(BufWriter::new(writer), 0);
let mapped = 1;
- info!(
- "\t- {track} {source_path:?} ({} => {mapped})",
- local_track.track
- );
- info!("\t {}", info);
- let file = File::open(&source_path).context("opening source file")?;
- let index = get_seek_index(&source_path)?;
+ info!("\t- {track} {path:?} ({} => {mapped})", track);
+ // info!("\t {}", info);
+ let file = File::open(&path).context("opening source file")?;
+ let index = get_seek_index(&path)?;
let index = index
- .get(&(local_track.track as u64))
+ .get(&track)
.ok_or(anyhow!("track missing 2"))?
.to_owned();
debug!("\t seek index: {} blocks loaded", index.blocks.len());
let mut reader = EbmlReader::new(BufReader::new(file));
- let force_kf = matches!(info.kind, SourceTrackKind::Subtitles { .. });
let n_kf = if force_kf {
index.blocks.len()
} else {
index.keyframes.len()
};
-
- let average_kf_interval = media_info.duration / n_kf as f64;
+ debug!("{duration} {n_kf}");
+ let average_kf_interval = duration / n_kf as f64;
let kf_per_frag = (FRAGMENT_LENGTH / average_kf_interval).ceil() as usize;
debug!("average keyframe interval: {average_kf_interval}");
debug!(" => keyframes per frag {kf_per_frag}");
@@ -144,25 +147,20 @@ pub fn write_fragment_into(
.blocks
.get(end_block_index)
.map(|b| b.pts)
- .unwrap_or((media_info.duration * 1000.) as u64);
+ .unwrap_or((duration * 1000.) as u64);
output.write_tag(&ebml_header(webm))?;
output.write_tag(&MatroskaTag::Segment(Master::Start))?;
output.write_tag(&ebml_segment_info(
- format!("{}: {info}", item.title.clone().unwrap_or_default()),
+ title.to_string(),
(last_block_pts - start_block.pts) as f64 / 1000.,
))?;
output.write_tag(&MatroskaTag::Tracks(Master::Collected(vec![
- ebml_track_entry(
- mapped,
- local_track.track as u64 * 100, // TODO something else that is unique to the track
- &info,
- local_track.codec_private.clone(),
- ),
+ ebml_track_entry(mapped, track_meta),
])))?;
reader.seek(start_block.source_off, MatroskaTag::Cluster(Master::Start))?;
- let mut reader = SegmentExtractIter::new(&mut reader, local_track.track as u64);
+ let mut reader = SegmentExtractIter::new(&mut reader, track);
{
// TODO this one caused fragments to get dropped by MSE for no reason
@@ -214,3 +212,8 @@ pub fn write_fragment_into(
debug!("wrote {} bytes", output.position());
Ok(())
}
+
+fn media_duration(m: &MatroskaMetadata) -> f64 {
+ let info = m.info.as_ref().unwrap();
+ (info.duration.unwrap_or_default() * info.timestamp_scale as f64) / 1_000_000_000.
+}
diff --git a/remuxer/src/lib.rs b/remuxer/src/lib.rs
index a98ffad..931d5e6 100644
--- a/remuxer/src/lib.rs
+++ b/remuxer/src/lib.rs
@@ -3,18 +3,22 @@
which is licensed under the GNU Affero General Public License (version 3); see /COPYING.
Copyright (C) 2025 metamuffin <metamuffin.org>
*/
+#![feature(random, exit_status_error)]
pub mod extract;
pub mod fragment;
+pub mod metadata;
+pub mod matroska_to_mpeg4;
pub mod remux;
pub mod seek_index;
pub mod segment_extractor;
pub mod trim_writer;
+pub mod matroska_to_webm;
+use ebml_struct::matroska::TrackEntry;
pub use fragment::write_fragment_into;
-pub use remux::remux_stream_into;
-
-use jellybase::common::{SourceTrack, SourceTrackKind};
use jellymatroska::{Master, MatroskaTag};
+pub use matroska_to_mpeg4::matroska_to_mpeg4;
+pub use remux::remux_stream_into;
pub fn ebml_header(webm: bool) -> MatroskaTag {
MatroskaTag::Ebml(Master::Collected(vec![
@@ -41,66 +45,56 @@ pub fn ebml_segment_info(title: String, duration: f64) -> MatroskaTag {
]))
}
-pub fn ebml_track_entry(
- number: u64,
- uid: u64,
- track: &SourceTrack,
- codec_private: Option<Vec<u8>>,
-) -> MatroskaTag {
+pub fn ebml_track_entry(number: u64, track: &TrackEntry) -> MatroskaTag {
let mut els = vec![
MatroskaTag::TrackNumber(number),
- MatroskaTag::TrackUID(uid),
MatroskaTag::FlagLacing(track.flag_lacing),
MatroskaTag::Language(track.language.clone()),
- MatroskaTag::CodecID(track.codec.clone()),
+ MatroskaTag::CodecID(track.codec_id.clone()),
MatroskaTag::CodecDelay(track.codec_delay),
MatroskaTag::SeekPreRoll(track.seek_pre_roll),
];
if let Some(d) = &track.default_duration {
els.push(MatroskaTag::DefaultDuration(*d));
}
- match track.kind {
- SourceTrackKind::Video {
- width,
- height,
- display_height,
- display_width,
- display_unit,
- fps,
- } => {
+ match track.track_type {
+ 1 => {
+ let video = track.video.as_ref().unwrap();
els.push(MatroskaTag::TrackType(1));
let mut props = vec![
- MatroskaTag::PixelWidth(width),
- MatroskaTag::PixelHeight(height),
+ MatroskaTag::PixelWidth(video.pixel_width),
+ MatroskaTag::PixelHeight(video.pixel_height),
];
- props.push(MatroskaTag::DisplayWidth(display_width.unwrap_or(width)));
- props.push(MatroskaTag::DisplayHeight(display_height.unwrap_or(height)));
- props.push(MatroskaTag::DisplayUnit(display_unit));
- if let Some(fps) = fps {
+ props.push(MatroskaTag::DisplayWidth(
+ video.display_width.unwrap_or(video.pixel_width),
+ ));
+ props.push(MatroskaTag::DisplayHeight(
+ video.display_height.unwrap_or(video.pixel_height),
+ ));
+ props.push(MatroskaTag::DisplayUnit(video.display_unit));
+ if let Some(fps) = video.frame_rate {
props.push(MatroskaTag::FrameRate(fps))
}
els.push(MatroskaTag::Video(Master::Collected(props)))
}
- SourceTrackKind::Audio {
- channels,
- sample_rate,
- bit_depth,
- } => {
+ 2 => {
+ let audio = track.audio.as_ref().unwrap();
els.push(MatroskaTag::TrackType(2));
let mut props = vec![
- MatroskaTag::SamplingFrequency(sample_rate),
- MatroskaTag::Channels(channels.try_into().unwrap()),
+ MatroskaTag::SamplingFrequency(audio.sampling_frequency),
+ MatroskaTag::Channels(audio.channels),
];
- if let Some(bit_depth) = bit_depth {
+ if let Some(bit_depth) = audio.bit_depth {
props.push(MatroskaTag::BitDepth(bit_depth.try_into().unwrap()));
}
els.push(MatroskaTag::Audio(Master::Collected(props)));
}
- SourceTrackKind::Subtitles => {
+ 17 => {
els.push(MatroskaTag::TrackType(17));
}
+ _ => unreachable!(),
}
- if let Some(d) = &codec_private {
+ if let Some(d) = &track.codec_private {
els.push(MatroskaTag::CodecPrivate(d.clone()));
}
MatroskaTag::TrackEntry(Master::Collected(els))
diff --git a/remuxer/src/matroska_to_mpeg4.rs b/remuxer/src/matroska_to_mpeg4.rs
new file mode 100644
index 0000000..e8268e7
--- /dev/null
+++ b/remuxer/src/matroska_to_mpeg4.rs
@@ -0,0 +1,36 @@
+/*
+ This file is part of jellything (https://codeberg.org/metamuffin/jellything)
+ which is licensed under the GNU Affero General Public License (version 3); see /COPYING.
+ Copyright (C) 2025 metamuffin <metamuffin.org>
+*/
+use anyhow::Result;
+use std::{
+ fs::{remove_file, File},
+ io::{copy, Read, Write},
+ process::{Command, Stdio},
+ random::random,
+};
+
+pub fn matroska_to_mpeg4(
+ mut input: impl Read + Send + 'static,
+ mut output: impl Write,
+) -> Result<()> {
+ let path = format!("/tmp/jellything-tc-hack-{:016x}", random::<u64>());
+ let args = format!(
+ "-hide_banner -loglevel warning -f matroska -i pipe:0 -copyts -c copy -f mp4 -movflags frag_keyframe+empty_moov {path}"
+ );
+ let mut child = Command::new("ffmpeg")
+ .args(args.split(" "))
+ .stdin(Stdio::piped())
+ .stderr(Stdio::inherit())
+ .spawn()?;
+
+ let mut stdin = child.stdin.take().unwrap();
+ copy(&mut input, &mut stdin)?;
+ drop(stdin);
+ child.wait()?.exit_ok()?;
+ copy(&mut File::open(&path)?, &mut output)?;
+ remove_file(path)?;
+
+ Ok(())
+}
diff --git a/remuxer/src/matroska_to_webm.rs b/remuxer/src/matroska_to_webm.rs
new file mode 100644
index 0000000..b9a1819
--- /dev/null
+++ b/remuxer/src/matroska_to_webm.rs
@@ -0,0 +1,84 @@
+use crate::ebml_track_entry;
+use anyhow::Context;
+use ebml_struct::{
+ ids::*,
+ matroska::{Cluster, Ebml, Info, Tracks},
+ read::{EbmlReadExt, TagRead},
+ write::TagWrite,
+};
+use jellymatroska::{read::EbmlReader, write::EbmlWriter, Master, MatroskaTag};
+use log::warn;
+use std::io::{BufReader, BufWriter, ErrorKind, Read, Seek, Write};
+
+pub fn matroska_to_webm(
+ input: impl Read + Seek + 'static,
+ output: impl Write,
+) -> anyhow::Result<()> {
+ let mut output = EbmlWriter::new(BufWriter::new(output), 0);
+ let mut input = EbmlReader::new(BufReader::new(input));
+
+ Ebml {
+ ebml_version: 1,
+ ebml_read_version: 1,
+ ebml_max_id_length: 4,
+ ebml_max_size_length: 8,
+ doc_type: "webm".to_string(),
+ doc_type_version: 4,
+ doc_type_read_version: 2,
+ doc_type_extensions: vec![],
+ }
+ .write(&mut output)?;
+ output.write_tag(&MatroskaTag::Segment(Master::Start))?;
+
+ let (x, mut ebml) = input.read_tag()?;
+ assert_eq!(x, EL_EBML);
+ let ebml = Ebml::read(&mut ebml).unwrap();
+ assert!(ebml.doc_type == "matroska" || ebml.doc_type == "webm");
+ let (x, mut segment) = input.read_tag()?;
+ assert_eq!(x, EL_SEGMENT);
+
+ loop {
+ let (x, mut seg) = match segment.read_tag() {
+ Ok(o) => o,
+ Err(e) if e.kind() == ErrorKind::UnexpectedEof => break,
+ Err(e) => return Err(e.into()),
+ };
+ match x {
+ EL_INFO => {
+ let info = Info::read(&mut seg).context("info")?;
+ output.write_tag(&{
+ MatroskaTag::Info(Master::Collected(vec![
+ MatroskaTag::TimestampScale(info.timestamp_scale),
+ MatroskaTag::Duration(info.duration.unwrap_or_default()),
+ MatroskaTag::Title(info.title.unwrap_or_default()),
+ MatroskaTag::MuxingApp("jellyremux".to_string()),
+ MatroskaTag::WritingApp("jellything".to_string()),
+ ]))
+ })?;
+ }
+ EL_TRACKS => {
+ let tracks = Tracks::read(&mut seg).context("tracks")?;
+ output.write_tag(&MatroskaTag::Tracks(Master::Collected(
+ tracks
+ .entries
+ .into_iter()
+ .map(|t| ebml_track_entry(t.track_number, &t))
+ .collect(),
+ )))?;
+ }
+ EL_VOID | EL_CRC32 | EL_CUES | EL_SEEKHEAD | EL_ATTACHMENTS | EL_TAGS => {
+ seg.consume()?;
+ }
+ EL_CLUSTER => {
+ let cluster = Cluster::read(&mut seg).context("cluster")?;
+ // TODO mixing both ebml libraries :)))
+ cluster.write(&mut output)?;
+ }
+ id => {
+ warn!("unknown top-level element {id:x}");
+ seg.consume()?;
+ }
+ }
+ }
+ Ok(())
+}
diff --git a/import/src/matroska.rs b/remuxer/src/metadata.rs
index 1593463..c8a5f8f 100644
--- a/import/src/matroska.rs
+++ b/remuxer/src/metadata.rs
@@ -23,8 +23,8 @@ use std::{
sync::Arc,
};
-#[derive(Encode, Decode, Clone)]
-pub(crate) struct MatroskaMetadata {
+#[derive(Debug, Encode, Decode, Clone)]
+pub struct MatroskaMetadata {
pub info: Option<Info>,
pub tracks: Option<Tracks>,
pub cover: Option<Asset>,
@@ -32,14 +32,21 @@ pub(crate) struct MatroskaMetadata {
pub tags: Option<Tags>,
pub infojson: Option<Vec<u8>>,
}
-pub(crate) fn matroska_metadata(path: &Path) -> Result<Arc<Option<MatroskaMetadata>>> {
- cache_memory(&["mkmeta-v2", path.to_string_lossy().as_ref()], || {
- let mut magic = [0; 4];
- File::open(path)?.read_exact(&mut magic).ok();
- if !matches!(magic, [0x1A, 0x45, 0xDF, 0xA3]) {
- return Ok(None);
- }
-
+pub fn checked_matroska_metadata(path: &Path) -> Result<Arc<Option<MatroskaMetadata>>> {
+ cache_memory(
+ &["mkmeta-check-v1", path.to_string_lossy().as_ref()],
+ || {
+ let mut magic = [0; 4];
+ File::open(path)?.read_exact(&mut magic).ok();
+ if !matches!(magic, [0x1A, 0x45, 0xDF, 0xA3]) {
+ return Ok(None);
+ }
+ Ok(Some((*matroska_metadata(path)?).clone()))
+ },
+ )
+}
+pub fn matroska_metadata(path: &Path) -> Result<Arc<MatroskaMetadata>> {
+ cache_memory(&["mkmeta-v3", path.to_string_lossy().as_ref()], || {
info!("reading {path:?}");
let mut file = BufReader::new(File::open(path)?);
let mut file = file.by_ref().take(u64::MAX);
@@ -104,13 +111,13 @@ pub(crate) fn matroska_metadata(path: &Path) -> Result<Arc<Option<MatroskaMetada
}
}
}
- Ok(Some(MatroskaMetadata {
+ Ok(MatroskaMetadata {
chapters,
cover,
info,
infojson,
tags,
tracks,
- }))
+ })
})
}
diff --git a/remuxer/src/remux.rs b/remuxer/src/remux.rs
index 0507f1e..a44c58b 100644
--- a/remuxer/src/remux.rs
+++ b/remuxer/src/remux.rs
@@ -3,333 +3,311 @@
which is licensed under the GNU Affero General Public License (version 3); see /COPYING.
Copyright (C) 2025 metamuffin <metamuffin.org>
*/
-use crate::{
- ebml_header, ebml_track_entry, seek_index::get_seek_index,
- segment_extractor::SegmentExtractIter, trim_writer::TrimWriter,
-};
-use anyhow::{anyhow, Context};
-use jellybase::common::{
- seek_index::{BlockIndex, SeekIndex},
- LocalTrack, Node, SourceTrack,
-};
-use jellymatroska::{
- read::EbmlReader,
- write::{bad_vint_length, vint_length, EbmlWriter},
- Master, MatroskaTag,
-};
-use log::{debug, info, trace, warn};
-use std::{
- fs::File,
- io::{BufReader, BufWriter, Seek, SeekFrom, Write},
- ops::Range,
- path::PathBuf,
- sync::Arc,
- time::Instant,
-};
+use jellybase::common::Node;
+use std::{io::Write, ops::Range, path::PathBuf};
-struct ClusterLayout {
- position: usize,
- timestamp: u64,
- source_offsets: Vec<Option<u64>>,
- blocks: Vec<(usize, BlockIndex)>,
-}
+// struct ClusterLayout {
+// position: usize,
+// timestamp: u64,
+// source_offsets: Vec<Option<u64>>,
+// blocks: Vec<(usize, BlockIndex)>,
+// }
pub fn remux_stream_into(
- writer: impl Write,
- range: Range<usize>,
- path_base: PathBuf,
- item: &Node,
- track_sources: Vec<LocalTrack>,
- selection: Vec<usize>,
- webm: bool,
+ _writer: impl Write,
+ _range: Range<usize>,
+ _path_base: PathBuf,
+ _item: &Node,
+ _selection: Vec<usize>,
+ _webm: bool,
) -> anyhow::Result<()> {
- info!("remuxing {:?} to have tracks {selection:?}", item.title);
- let writer = TrimWriter::new(BufWriter::new(writer), range.clone());
- let mut output = EbmlWriter::new(writer, 0);
+ // info!("remuxing {:?} to have tracks {selection:?}", item.title);
+ // let writer = TrimWriter::new(BufWriter::new(writer), range.clone());
+ // let mut output = EbmlWriter::new(writer, 0);
- struct ReaderC {
- info: SourceTrack,
- reader: EbmlReader,
- mapped: u64,
- index: Arc<SeekIndex>,
- source_track_index: usize,
- codec_private: Option<Vec<u8>>,
- layouting_progress_index: usize,
- }
+ // struct ReaderC {
+ // info: SourceTrack,
+ // reader: EbmlReader,
+ // mapped: u64,
+ // index: Arc<SeekIndex>,
+ // source_track_index: usize,
+ // codec_private: Option<Vec<u8>>,
+ // layouting_progress_index: usize,
+ // }
- let timing_cp = Instant::now();
+ // let timing_cp = Instant::now();
- let mut inputs = selection
- .iter()
- .enumerate()
- .map(|(index, sel)| {
- let info = item
- .media
- .as_ref()
- .unwrap()
- .tracks
- .get(*sel)
- .ok_or(anyhow!("track not available"))?
- .to_owned();
- let private = &track_sources[index];
- let source_path = path_base.join(&private.path);
- let mapped = index as u64 + 1;
- info!("\t- {sel} {source_path:?} ({} => {mapped})", private.track);
- info!("\t {}", info);
- let file = File::open(&source_path).context("opening source file")?;
- let index = get_seek_index(&source_path)?;
- let index = index
- .get(&(private.track as u64))
- .ok_or(anyhow!("track missing 3"))?
- .to_owned();
- debug!("\t seek index: {} blocks loaded", index.blocks.len());
- let reader = EbmlReader::new(BufReader::new(file));
- Ok(ReaderC {
- index,
- reader,
- info,
- mapped,
- source_track_index: private.track,
- codec_private: private.codec_private.clone(),
- layouting_progress_index: 0,
- })
- })
- .collect::<anyhow::Result<Vec<_>>>()?;
+ // let mut inputs = selection
+ // .iter()
+ // .enumerate()
+ // .map(|(index, sel)| {
+ // let info = item
+ // .media
+ // .as_ref()
+ // .unwrap()
+ // .tracks
+ // .get(*sel)
+ // .ok_or(anyhow!("track not available"))?
+ // .to_owned();
+ // let source_path = path_base.join(&private.path);
+ // let mapped = index as u64 + 1;
+ // info!("\t- {sel} {source_path:?} ({} => {mapped})", private.track);
+ // info!("\t {}", info);
+ // let file = File::open(&source_path).context("opening source file")?;
+ // let index = get_seek_index(&source_path)?;
+ // let index = index
+ // .get(&(private.track as u64))
+ // .ok_or(anyhow!("track missing 3"))?
+ // .to_owned();
+ // debug!("\t seek index: {} blocks loaded", index.blocks.len());
+ // let reader = EbmlReader::new(BufReader::new(file));
+ // Ok(ReaderC {
+ // index,
+ // reader,
+ // info,
+ // mapped,
+ // source_track_index: private.track,
+ // codec_private: private.codec_private.clone(),
+ // layouting_progress_index: 0,
+ // })
+ // })
+ // .collect::<anyhow::Result<Vec<_>>>()?;
- info!("(perf) prepare inputs: {:?}", Instant::now() - timing_cp);
- let timing_cp = Instant::now();
+ // info!("(perf) prepare inputs: {:?}", Instant::now() - timing_cp);
+ // let timing_cp = Instant::now();
- output.write_tag(&ebml_header(webm))?;
+ // output.write_tag(&ebml_header(webm))?;
- output.write_tag(&MatroskaTag::Segment(Master::Start))?;
- let segment_offset = output.position();
+ // output.write_tag(&MatroskaTag::Segment(Master::Start))?;
+ // let segment_offset = output.position();
- output.write_tag(&MatroskaTag::Info(Master::Collected(vec![
- MatroskaTag::TimestampScale(1_000_000),
- MatroskaTag::Duration(item.media.as_ref().unwrap().duration * 1000.0),
- MatroskaTag::Title(item.title.clone().unwrap_or_default()),
- MatroskaTag::MuxingApp("jellyremux".to_string()),
- MatroskaTag::WritingApp("jellything".to_string()),
- ])))?;
+ // output.write_tag(&MatroskaTag::Info(Master::Collected(vec![
+ // MatroskaTag::TimestampScale(1_000_000),
+ // MatroskaTag::Duration(item.media.as_ref().unwrap().duration * 1000.0),
+ // MatroskaTag::Title(item.title.clone().unwrap_or_default()),
+ // MatroskaTag::MuxingApp("jellyremux".to_string()),
+ // MatroskaTag::WritingApp("jellything".to_string()),
+ // ])))?;
- let tracks_header = inputs
- .iter_mut()
- .map(|rc| ebml_track_entry(rc.mapped, rc.mapped, &rc.info, rc.codec_private.take()))
- .collect();
- output.write_tag(&MatroskaTag::Tracks(Master::Collected(tracks_header)))?;
+ // let tracks_header = inputs
+ // .iter_mut()
+ // .map(|rc| ebml_track_entry(rc.mapped, rc.mapped, &rc.info, rc.codec_private.take()))
+ // .collect();
+ // output.write_tag(&MatroskaTag::Tracks(Master::Collected(tracks_header)))?;
- let mut segment_layout: Vec<ClusterLayout> = {
- let mut cluster_pts = 0;
- let mut clusters = vec![];
- let mut cluster = vec![];
- let mut source_offsets = vec![None; inputs.len()];
- let mut gp = 0usize; // cluster position (in the segment)
- let mut p = 0usize; // block position (in the cluster)
- loop {
- let (track, block) = {
- let mut best_block = BlockIndex {
- pts: u64::MAX,
- size: 0,
- source_off: 0,
- };
- let mut best_track = 0;
- for (i, r) in inputs.iter().enumerate() {
- if let Some(v) = r.index.blocks.get(r.layouting_progress_index) {
- if v.pts < best_block.pts {
- best_block = v.to_owned();
- best_track = i;
- }
- };
- }
- (best_track, best_block)
- };
- inputs[track].layouting_progress_index += 1;
- source_offsets[track].get_or_insert(block.source_off);
- if block.pts > cluster_pts + 1_000 {
- let cluster_content_size = 1 + 1 // timestamp {tag, size}
- + bad_vint_length(cluster_pts) // timestamp tag value
- + p;
- let cluster_size = 4 // tag length
- + vint_length(cluster_content_size as u64) // size varint
- + cluster_content_size;
- clusters.push(ClusterLayout {
- position: gp, // relative to the first cluster
- timestamp: cluster_pts,
- source_offsets,
- blocks: std::mem::take(&mut cluster),
- });
+ // let mut segment_layout: Vec<ClusterLayout> = {
+ // let mut cluster_pts = 0;
+ // let mut clusters = vec![];
+ // let mut cluster = vec![];
+ // let mut source_offsets = vec![None; inputs.len()];
+ // let mut gp = 0usize; // cluster position (in the segment)
+ // let mut p = 0usize; // block position (in the cluster)
+ // loop {
+ // let (track, block) = {
+ // let mut best_block = BlockIndex {
+ // pts: u64::MAX,
+ // size: 0,
+ // source_off: 0,
+ // };
+ // let mut best_track = 0;
+ // for (i, r) in inputs.iter().enumerate() {
+ // if let Some(v) = r.index.blocks.get(r.layouting_progress_index) {
+ // if v.pts < best_block.pts {
+ // best_block = v.to_owned();
+ // best_track = i;
+ // }
+ // };
+ // }
+ // (best_track, best_block)
+ // };
+ // inputs[track].layouting_progress_index += 1;
+ // source_offsets[track].get_or_insert(block.source_off);
+ // if block.pts > cluster_pts + 1_000 {
+ // let cluster_content_size = 1 + 1 // timestamp {tag, size}
+ // + bad_vint_length(cluster_pts) // timestamp tag value
+ // + p;
+ // let cluster_size = 4 // tag length
+ // + vint_length(cluster_content_size as u64) // size varint
+ // + cluster_content_size;
+ // clusters.push(ClusterLayout {
+ // position: gp, // relative to the first cluster
+ // timestamp: cluster_pts,
+ // source_offsets,
+ // blocks: std::mem::take(&mut cluster),
+ // });
- cluster_pts = block.pts;
- source_offsets = vec![None; inputs.len()];
- gp += cluster_size;
- p = 0;
- }
- if block.pts == u64::MAX {
- break;
- }
+ // cluster_pts = block.pts;
+ // source_offsets = vec![None; inputs.len()];
+ // gp += cluster_size;
+ // p = 0;
+ // }
+ // if block.pts == u64::MAX {
+ // break;
+ // }
- let simpleblock_size = 1 + 2 + 1 // block {tracknum, pts_off, flags}
- // TODO does not work, if more than 127 tracks are present
- + block.size; // block payload
- p += 1; // simpleblock tag
- p += vint_length(simpleblock_size as u64); // simpleblock size vint
- p += simpleblock_size;
+ // let simpleblock_size = 1 + 2 + 1 // block {tracknum, pts_off, flags}
+ // // TODO does not work, if more than 127 tracks are present
+ // + block.size; // block payload
+ // p += 1; // simpleblock tag
+ // p += vint_length(simpleblock_size as u64); // simpleblock size vint
+ // p += simpleblock_size;
- cluster.push((track, block))
- }
- info!("segment layout computed ({} clusters)", clusters.len());
- clusters
- };
- info!(
- "(perf) compute segment layout: {:?}",
- Instant::now() - timing_cp
- );
- let timing_cp = Instant::now();
+ // cluster.push((track, block))
+ // }
+ // info!("segment layout computed ({} clusters)", clusters.len());
+ // clusters
+ // };
+ // info!(
+ // "(perf) compute segment layout: {:?}",
+ // Instant::now() - timing_cp
+ // );
+ // let timing_cp = Instant::now();
- let max_cue_size = 4 // cues id
- + 8 // cues len
- + ( // cues content
- 1 // cp id
- + 1 // cp len
- + ( // cp content
- 1 // ctime id,
- + 1 // ctime len
- + 8 // ctime content uint
- + ( // ctps
- 1 // ctp id
- + 8 // ctp len
- + (// ctp content
- 1 // ctrack id
- + 1 // ctrack size
- + 1 // ctrack content int
- // TODO this breaks if inputs.len() >= 127
- + 1 // ccp id
- + 1 // ccp len
- + 8 // ccp content offset
- )
- )
- ) * inputs.len()
- ) * segment_layout.len()
- + 1 // void id
- + 8; // void len
+ // let max_cue_size = 4 // cues id
+ // + 8 // cues len
+ // + ( // cues content
+ // 1 // cp id
+ // + 1 // cp len
+ // + ( // cp content
+ // 1 // ctime id,
+ // + 1 // ctime len
+ // + 8 // ctime content uint
+ // + ( // ctps
+ // 1 // ctp id
+ // + 8 // ctp len
+ // + (// ctp content
+ // 1 // ctrack id
+ // + 1 // ctrack size
+ // + 1 // ctrack content int
+ // // TODO this breaks if inputs.len() >= 127
+ // + 1 // ccp id
+ // + 1 // ccp len
+ // + 8 // ccp content offset
+ // )
+ // )
+ // ) * inputs.len()
+ // ) * segment_layout.len()
+ // + 1 // void id
+ // + 8; // void len
- let first_cluster_offset_predict = max_cue_size + output.position();
+ // let first_cluster_offset_predict = max_cue_size + output.position();
- // make the cluster position relative to the segment start as they should
- segment_layout
- .iter_mut()
- .for_each(|e| e.position += first_cluster_offset_predict - segment_offset);
+ // // make the cluster position relative to the segment start as they should
+ // segment_layout
+ // .iter_mut()
+ // .for_each(|e| e.position += first_cluster_offset_predict - segment_offset);
- output.write_tag(&MatroskaTag::Cues(Master::Collected(
- segment_layout
- .iter()
- .map(|cluster| {
- MatroskaTag::CuePoint(Master::Collected(
- Some(MatroskaTag::CueTime(cluster.timestamp))
- .into_iter()
- // TODO: Subtitles should not have cues for every cluster
- .chain(inputs.iter().map(|i| {
- MatroskaTag::CueTrackPositions(Master::Collected(vec![
- MatroskaTag::CueTrack(i.mapped),
- MatroskaTag::CueClusterPosition(cluster.position as u64),
- ]))
- }))
- .collect(),
- ))
- })
- .collect(),
- )))?;
- output.write_padding(first_cluster_offset_predict)?;
- let first_cluster_offset = output.position();
- assert_eq!(first_cluster_offset, first_cluster_offset_predict);
+ // output.write_tag(&MatroskaTag::Cues(Master::Collected(
+ // segment_layout
+ // .iter()
+ // .map(|cluster| {
+ // MatroskaTag::CuePoint(Master::Collected(
+ // Some(MatroskaTag::CueTime(cluster.timestamp))
+ // .into_iter()
+ // // TODO: Subtitles should not have cues for every cluster
+ // .chain(inputs.iter().map(|i| {
+ // MatroskaTag::CueTrackPositions(Master::Collected(vec![
+ // MatroskaTag::CueTrack(i.mapped),
+ // MatroskaTag::CueClusterPosition(cluster.position as u64),
+ // ]))
+ // }))
+ // .collect(),
+ // ))
+ // })
+ // .collect(),
+ // )))?;
+ // output.write_padding(first_cluster_offset_predict)?;
+ // let first_cluster_offset = output.position();
+ // assert_eq!(first_cluster_offset, first_cluster_offset_predict);
- let mut skip = 0;
- // TODO binary search
- for (i, cluster) in segment_layout.iter().enumerate() {
- if (cluster.position + segment_offset) >= range.start {
- break;
- }
- skip = i;
- }
+ // let mut skip = 0;
+ // // TODO binary search
+ // for (i, cluster) in segment_layout.iter().enumerate() {
+ // if (cluster.position + segment_offset) >= range.start {
+ // break;
+ // }
+ // skip = i;
+ // }
- if skip != 0 {
- info!("skipping {skip} clusters");
- output.seek(SeekFrom::Start(
- (segment_layout[skip].position + segment_offset) as u64,
- ))?;
- }
+ // if skip != 0 {
+ // info!("skipping {skip} clusters");
+ // output.seek(SeekFrom::Start(
+ // (segment_layout[skip].position + segment_offset) as u64,
+ // ))?;
+ // }
- struct ReaderD<'a> {
- stream: SegmentExtractIter<'a>,
- mapped: u64,
- }
+ // struct ReaderD<'a> {
+ // stream: SegmentExtractIter<'a>,
+ // mapped: u64,
+ // }
- let mut track_readers = inputs
- .iter_mut()
- .enumerate()
- .map(|(i, inp)| {
- inp.reader
- .seek(
- // the seek target might be a hole; we continue until the next cluster of that track.
- // this should be fine since tracks are only read according to segment_layout
- find_first_cluster_with_off(&segment_layout, skip, i)
- .ok_or(anyhow!("cluster hole at eof"))?,
- MatroskaTag::Cluster(Master::Start), // TODO shouldn't this be a child of cluster?
- )
- .context("seeking in input")?;
- let stream = SegmentExtractIter::new(&mut inp.reader, inp.source_track_index as u64);
+ // let mut track_readers = inputs
+ // .iter_mut()
+ // .enumerate()
+ // .map(|(i, inp)| {
+ // inp.reader
+ // .seek(
+ // // the seek target might be a hole; we continue until the next cluster of that track.
+ // // this should be fine since tracks are only read according to segment_layout
+ // find_first_cluster_with_off(&segment_layout, skip, i)
+ // .ok_or(anyhow!("cluster hole at eof"))?,
+ // MatroskaTag::Cluster(Master::Start), // TODO shouldn't this be a child of cluster?
+ // )
+ // .context("seeking in input")?;
+ // let stream = SegmentExtractIter::new(&mut inp.reader, inp.source_track_index as u64);
- Ok(ReaderD {
- mapped: inp.mapped,
- stream,
- })
- })
- .collect::<anyhow::Result<Vec<_>>>()?;
+ // Ok(ReaderD {
+ // mapped: inp.mapped,
+ // stream,
+ // })
+ // })
+ // .collect::<anyhow::Result<Vec<_>>>()?;
- info!("(perf) seek inputs: {:?}", Instant::now() - timing_cp);
+ // info!("(perf) seek inputs: {:?}", Instant::now() - timing_cp);
- for (cluster_index, cluster) in segment_layout.into_iter().enumerate().skip(skip) {
- debug!(
- "writing cluster {cluster_index} (pts_base={}) with {} blocks",
- cluster.timestamp,
- cluster.blocks.len()
- );
- {
- let cue_error = cluster.position as i64 - (output.position() - segment_offset) as i64;
- if cue_error != 0 {
- warn!("calculation was {} bytes off", cue_error);
- }
- }
+ // for (cluster_index, cluster) in segment_layout.into_iter().enumerate().skip(skip) {
+ // debug!(
+ // "writing cluster {cluster_index} (pts_base={}) with {} blocks",
+ // cluster.timestamp,
+ // cluster.blocks.len()
+ // );
+ // {
+ // let cue_error = cluster.position as i64 - (output.position() - segment_offset) as i64;
+ // if cue_error != 0 {
+ // warn!("calculation was {} bytes off", cue_error);
+ // }
+ // }
- let mut cluster_blocks = vec![MatroskaTag::Timestamp(cluster.timestamp)];
- for (block_track, index_block) in cluster.blocks {
- let track_reader = &mut track_readers[block_track];
- // TODO handle duration
- let mut block = track_reader.stream.next_block()?.0;
+ // let mut cluster_blocks = vec![MatroskaTag::Timestamp(cluster.timestamp)];
+ // for (block_track, index_block) in cluster.blocks {
+ // let track_reader = &mut track_readers[block_track];
+ // // TODO handle duration
+ // let mut block = track_reader.stream.next_block()?.0;
- assert_eq!(index_block.size, block.data.len(), "seek index is wrong");
+ // assert_eq!(index_block.size, block.data.len(), "seek index is wrong");
- block.track = track_reader.mapped;
- block.timestamp_off = (index_block.pts - cluster.timestamp).try_into().unwrap();
- trace!("n={} tso={}", block.track, block.timestamp_off);
+ // block.track = track_reader.mapped;
+ // block.timestamp_off = (index_block.pts - cluster.timestamp).try_into().unwrap();
+ // trace!("n={} tso={}", block.track, block.timestamp_off);
- cluster_blocks.push(MatroskaTag::SimpleBlock(block))
- }
- output.write_tag(&MatroskaTag::Cluster(Master::Collected(cluster_blocks)))?;
- }
- // output.write_tag(&MatroskaTag::Segment(Master::End))?;
- Ok(())
+ // cluster_blocks.push(MatroskaTag::SimpleBlock(block))
+ // }
+ // output.write_tag(&MatroskaTag::Cluster(Master::Collected(cluster_blocks)))?;
+ // }
+ // // output.write_tag(&MatroskaTag::Segment(Master::End))?;
+ // Ok(())
+ todo!()
}
-fn find_first_cluster_with_off(
- segment_layout: &[ClusterLayout],
- skip: usize,
- track: usize,
-) -> Option<u64> {
- for cluster in segment_layout.iter().skip(skip) {
- if let Some(off) = cluster.source_offsets[track] {
- return Some(off);
- }
- }
- None
-}
+// fn find_first_cluster_with_off(
+// segment_layout: &[ClusterLayout],
+// skip: usize,
+// track: usize,
+// ) -> Option<u64> {
+// for cluster in segment_layout.iter().skip(skip) {
+// if let Some(off) = cluster.source_offsets[track] {
+// return Some(off);
+// }
+// }
+// None
+// }
diff --git a/remuxer/src/seek_index.rs b/remuxer/src/seek_index.rs
index bd351d9..7296d93 100644
--- a/remuxer/src/seek_index.rs
+++ b/remuxer/src/seek_index.rs
@@ -4,10 +4,8 @@
Copyright (C) 2025 metamuffin <metamuffin.org>
*/
use anyhow::{Context, Result};
-use jellybase::{
- cache::cache_memory,
- common::seek_index::{BlockIndex, SeekIndex},
-};
+use bincode::{Decode, Encode};
+use jellybase::cache::cache_memory;
use jellymatroska::{
block::Block,
read::EbmlReader,
@@ -17,6 +15,33 @@ use jellymatroska::{
use log::{debug, info, trace, warn};
use std::{collections::BTreeMap, fs::File, io::BufReader, path::Path, sync::Arc};
+pub const SEEK_INDEX_VERSION: u32 = 0x5eef1de4;
+
+#[derive(Debug, Clone, Decode, Encode)]
+pub struct SeekIndex {
+ pub version: u32,
+ pub blocks: Vec<BlockIndex>,
+ pub keyframes: Vec<usize>,
+}
+
+#[derive(Debug, Clone, Decode, Encode)]
+pub struct BlockIndex {
+ pub pts: u64,
+ // pub duration: Option<u64>,
+ pub source_off: u64, // points to start of SimpleBlock or BlockGroup (not the Block inside it)
+ pub size: usize,
+}
+
+impl Default for SeekIndex {
+ fn default() -> Self {
+ Self {
+ version: SEEK_INDEX_VERSION,
+ blocks: Vec::new(),
+ keyframes: Vec::new(),
+ }
+ }
+}
+
pub fn get_seek_index(path: &Path) -> anyhow::Result<Arc<BTreeMap<u64, Arc<SeekIndex>>>> {
cache_memory(&["seekindex", path.to_str().unwrap()], move || {
info!("generating seek index for {path:?}");
diff --git a/server/src/routes/compat/jellyfin/mod.rs b/server/src/routes/compat/jellyfin/mod.rs
index ab36a8c..7393c5f 100644
--- a/server/src/routes/compat/jellyfin/mod.rs
+++ b/server/src/routes/compat/jellyfin/mod.rs
@@ -5,23 +5,19 @@
*/
pub mod models;
-use crate::routes::{
- stream::rocket_uri_macro_r_stream,
- ui::{
- account::{login_logic, session::Session},
- assets::{
- rocket_uri_macro_r_asset, rocket_uri_macro_r_item_backdrop,
- rocket_uri_macro_r_item_poster,
- },
- error::MyResult,
- node::{aspect_class, DatabaseNodeUserDataExt},
- sort::{filter_and_sort_nodes, FilterProperty, NodeFilterSort, SortOrder, SortProperty},
+use crate::routes::ui::{
+ account::{login_logic, session::Session},
+ assets::{
+ rocket_uri_macro_r_asset, rocket_uri_macro_r_item_backdrop, rocket_uri_macro_r_item_poster,
},
+ error::MyResult,
+ node::{aspect_class, DatabaseNodeUserDataExt},
+ sort::{filter_and_sort_nodes, FilterProperty, NodeFilterSort, SortOrder, SortProperty},
};
use anyhow::{anyhow, Context};
use jellybase::{database::Database, CONF};
use jellycommon::{
- stream::{StreamFormat, StreamSpec},
+ stream::{StreamContainer, StreamSpec},
user::{NodeUserData, WatchedState},
MediaInfo, Node, NodeID, NodeKind, SourceTrack, SourceTrackKind, Visibility,
};
@@ -446,16 +442,12 @@ pub fn r_jellyfin_video_stream(
.get_node_slug(id)?
.ok_or(anyhow!("node does not exist"))?;
let media = node.media.as_ref().ok_or(anyhow!("node has no media"))?;
- Ok(Redirect::temporary(rocket::uri!(r_stream(
- id,
- StreamSpec {
- format: StreamFormat::Matroska,
- webm: Some(true),
- track: (0..media.tracks.len()).collect(),
- index: None,
- profile: None,
- }
- ))))
+ let params = StreamSpec::Remux {
+ tracks: (0..media.tracks.len()).collect(),
+ container: StreamContainer::WebM,
+ }
+ .to_query();
+ Ok(Redirect::temporary(format!("/n/{id}/stream{params}")))
}
#[derive(Deserialize)]
@@ -498,9 +490,7 @@ pub fn r_jellyfin_playback_bitratetest(_session: Session, Size: usize) -> Vec<u8
}
#[post("/Sessions/Capabilities/Full")]
-pub fn r_jellyfin_sessions_capabilities_full(_session: Session) {
-
-}
+pub fn r_jellyfin_sessions_capabilities_full(_session: Session) {}
#[derive(Deserialize)]
#[serde(rename_all = "PascalCase")]
@@ -796,7 +786,10 @@ fn item_object(node: &Node, userdata: &NodeUserData) -> JellyfinItem {
location_type: node.media.as_ref().map(|_| "FileSystem".to_owned()),
play_access: node.media.as_ref().map(|_| "Full".to_owned()),
container: node.media.as_ref().map(|_| "webm".to_owned()),
- run_time_ticks: node.media.as_ref().map(|m| (m.duration * 10_000_000.) as i64),
+ run_time_ticks: node
+ .media
+ .as_ref()
+ .map(|m| (m.duration * 10_000_000.) as i64),
media_sources: media_source.as_ref().map(|s| vec![s.clone()]),
media_streams: media_source.as_ref().map(|s| s.media_streams.clone()),
path: node
diff --git a/server/src/routes/stream.rs b/server/src/routes/stream.rs
index 1fb136c..0fbeb3a 100644
--- a/server/src/routes/stream.rs
+++ b/server/src/routes/stream.rs
@@ -6,13 +6,9 @@
use super::ui::{account::session::Session, error::MyError};
use crate::database::Database;
use anyhow::{anyhow, Result};
-use jellybase::{federation::Federation, permission::PermissionSetExt, SECRETS};
-use jellycommon::{
- config::FederationAccount,
- stream::StreamSpec,
- user::{CreateSessionParams, UserPermission},
- TrackSource,
-};
+use jellybase::{assetfed::AssetInner, federation::Federation};
+use jellycommon::{stream::StreamSpec, TrackSource};
+use jellystream::SMediaInfo;
use log::{info, warn};
use rocket::{
get, head,
@@ -21,15 +17,20 @@ use rocket::{
response::{self, Redirect, Responder},
Either, Request, Response, State,
};
-use std::{collections::HashSet, ops::Range};
+use std::{
+ collections::{BTreeMap, BTreeSet},
+ ops::Range,
+ sync::Arc,
+};
use tokio::io::{duplex, DuplexStream};
-#[head("/n/<_id>/stream?<spec>")]
+#[head("/n/<_id>/stream?<spec..>")]
pub async fn r_stream_head(
_sess: Session,
_id: &str,
- spec: StreamSpec,
+ spec: BTreeMap<String, String>,
) -> Result<Either<StreamResponse, Redirect>, MyError> {
+ let spec = StreamSpec::from_query_kv(&spec).map_err(|x| anyhow!("spec invalid: {x}"))?;
let head = jellystream::stream_head(&spec);
Ok(Either::Left(StreamResponse {
stream: duplex(0).0,
@@ -41,70 +42,72 @@ pub async fn r_stream_head(
#[get("/n/<id>/stream?<spec..>")]
pub async fn r_stream(
- session: Session,
- federation: &State<Federation>,
+ _session: Session,
+ _federation: &State<Federation>,
db: &State<Database>,
id: &str,
range: Option<RequestRange>,
- spec: StreamSpec,
+ spec: BTreeMap<String, String>,
) -> Result<Either<StreamResponse, RedirectResponse>, MyError> {
+ let spec = StreamSpec::from_query_kv(&spec).map_err(|x| anyhow!("spec invalid: {x}"))?;
// TODO perm
let node = db
.get_node_slug(id)?
.ok_or(anyhow!("node does not exist"))?;
- let media = node
- .media
- .as_ref()
- .ok_or(anyhow!("item does not contain media"))?;
+ let media = Arc::new(
+ node.media
+ .clone()
+ .ok_or(anyhow!("item does not contain media"))?,
+ );
// TODO its unclear how requests with multiple tracks should be handled.
- if spec.track.len() == 1 {
- let ti = spec.track[0];
- if let TrackSource::Remote(remote_index) = media.tracks[ti].source {
- session
- .user
- .permissions
- .assert(&UserPermission::FederatedContent)?;
+ // if spec.track.len() == 1 {
+ // let ti = spec.track[0];
+ // if let TrackSource::Remote(remote_index) = media.tracks[ti].source {
+ // session
+ // .user
+ // .permissions
+ // .assert(&UserPermission::FederatedContent)?;
- let track = &node.media.as_ref().ok_or(anyhow!("no media"))?.tracks[ti];
- let host = track
- .federated
- .last()
- .ok_or(anyhow!("federation inconsistent"))?;
+ // let track = &node.media.as_ref().ok_or(anyhow!("no media"))?.tracks[ti];
+ // let host = track
+ // .federated
+ // .last()
+ // .ok_or(anyhow!("federation inconsistent"))?;
- let FederationAccount {
- password, username, ..
- } = SECRETS
- .federation
- .get(host)
- .ok_or(anyhow!("no credentials on the server-side"))?;
+ // let FederationAccount {
+ // password, username, ..
+ // } = SECRETS
+ // .federation
+ // .get(host)
+ // .ok_or(anyhow!("no credentials on the server-side"))?;
- info!("creating session on {host}");
- let instance = federation.get_instance(host)?.to_owned();
- let session = instance
- .login(CreateSessionParams {
- username: username.to_owned(),
- password: password.to_owned(),
- expire: Some(60),
- drop_permissions: Some(HashSet::from_iter([
- UserPermission::ManageSelf,
- UserPermission::Admin, // in case somebody federated the admin :)))
- ])),
- })
- .await?;
+ // info!("creating session on {host}");
+ // let instance = federation.get_instance(host)?.to_owned();
+ // let session = instance
+ // .login(CreateSessionParams {
+ // username: username.to_owned(),
+ // password: password.to_owned(),
+ // expire: Some(60),
+ // drop_permissions: Some(HashSet::from_iter([
+ // UserPermission::ManageSelf,
+ // UserPermission::Admin, // in case somebody federated the admin :)))
+ // ])),
+ // })
+ // .await?;
- let uri = session.stream_url(
- node.slug.clone().into(),
- &StreamSpec {
- track: vec![remote_index],
- ..spec
- },
- );
- info!("federation redirect");
- return Ok(Either::Right(RedirectResponse(uri)));
- }
- }
+ // let uri = session.stream_url(
+ // node.slug.clone().into(),
+ // &StreamSpec {
+ // track: vec![remote_index],
+ // ..spec
+ // },
+ // );
+ // info!("federation redirect");
+ // return Ok(Either::Right(RedirectResponse(uri)));
+ // }
+ // }
info!(
"stream request (range={})",
@@ -124,7 +127,20 @@ pub async fn r_stream(
let head = jellystream::stream_head(&spec);
- match jellystream::stream(node, spec, urange, &session.user.permissions).await {
+ let mut sources = BTreeSet::new();
+ for t in &media.tracks {
+ if let TrackSource::Local(x) = &t.source {
+ if let AssetInner::LocalTrack(m) = AssetInner::deser(&x.0)? {
+ sources.insert(m.path);
+ }
+ }
+ }
+ let media = Arc::new(SMediaInfo {
+ files: sources,
+ info: node,
+ });
+
+ match jellystream::stream(media, spec, urange).await {
Ok(stream) => Ok(Either::Left(StreamResponse {
stream,
range,
diff --git a/server/src/routes/ui/player.rs b/server/src/routes/ui/player.rs
index c2188a8..d2a8236 100644
--- a/server/src/routes/ui/player.rs
+++ b/server/src/routes/ui/player.rs
@@ -6,22 +6,20 @@
use super::{
account::session::{token, Session},
layout::LayoutPage,
- node::{get_similar_media, DatabaseNodeUserDataExt, NodePage},
- sort::NodeFilterSort,
+ node::{get_similar_media, DatabaseNodeUserDataExt},
};
use crate::{
database::Database,
- routes::{
- stream::rocket_uri_macro_r_stream,
- ui::{error::MyResult, layout::DynLayoutPage},
+ routes::ui::{
+ assets::rocket_uri_macro_r_item_backdrop, error::MyResult, layout::DynLayoutPage,
},
uri,
};
use anyhow::anyhow;
-use jellybase::{permission::PermissionSetExt, CONF};
+use jellybase::CONF;
use jellycommon::{
- stream::{StreamFormat, StreamSpec},
- user::{PermissionSet, PlayerKind, UserPermission},
+ stream::{StreamContainer, StreamSpec},
+ user::{PermissionSet, PlayerKind},
Node, NodeID, SourceTrackKind, TrackID, Visibility,
};
use markup::DynRender;
@@ -49,13 +47,14 @@ impl PlayerConfig {
fn jellynative_url(action: &str, seek: f64, secret: &str, node: &str, session: &str) -> String {
let protocol = if CONF.tls { "https" } else { "http" };
let host = &CONF.hostname;
- let stream_url = uri!(r_stream(
- node,
- StreamSpec {
- format: StreamFormat::HlsMaster,
- ..Default::default()
+ let stream_url = format!(
+ "/n/{node}/stream{}",
+ StreamSpec::HlsMultiVariant {
+ segment: 0,
+ container: StreamContainer::Matroska
}
- ));
+ .to_query()
+ );
format!("jellynative://{action}/{secret}/{session}/{seek}/{protocol}://{host}{stream_url}",)
}
@@ -66,7 +65,7 @@ pub fn r_player(
id: NodeID,
conf: PlayerConfig,
) -> MyResult<Either<DynLayoutPage<'_>, Redirect>> {
- let (node, udata) = db.get_node_with_userdata(id, &session)?;
+ let (node, _udata) = db.get_node_with_userdata(id, &session)?;
let mut parents = node
.parents
@@ -80,14 +79,6 @@ pub fn r_player(
parents.retain(|(n, _)| n.visibility >= Visibility::Reduced);
let native_session = |action: &str| {
- let perm = [
- UserPermission::StreamFormat(StreamFormat::HlsMaster),
- UserPermission::StreamFormat(StreamFormat::HlsVariant),
- UserPermission::StreamFormat(StreamFormat::Fragment),
- ];
- for perm in &perm {
- session.user.permissions.assert(perm)?;
- }
Ok(Either::Right(Redirect::temporary(jellynative_url(
action,
conf.t.unwrap_or(0.),
@@ -95,7 +86,7 @@ pub fn r_player(
&id.to_string(),
&token::create(
session.user.name,
- PermissionSet(perm.map(|e| (e, true)).into()),
+ PermissionSet::default(), // TODO
chrono::Duration::hours(24),
),
))))
@@ -111,27 +102,32 @@ pub fn r_player(
}
}
- let spec = StreamSpec {
- track: None
- .into_iter()
- .chain(conf.v)
- .chain(conf.a)
- .chain(conf.s)
- .collect::<Vec<_>>(),
- format: StreamFormat::Matroska,
- webm: Some(true),
- ..Default::default()
- };
+ // TODO
+ // let spec = StreamSpec {
+ // track: None
+ // .into_iter()
+ // .chain(conf.v)
+ // .chain(conf.a)
+ // .chain(conf.s)
+ // .collect::<Vec<_>>(),
+ // format: StreamFormat::Matroska,
+ // webm: Some(true),
+ // ..Default::default()
+ // };
- let playing = !spec.track.is_empty();
+ let playing = false; // !spec.track.is_empty();
let conf = player_conf(node.clone(), playing)?;
Ok(Either::Left(LayoutPage {
title: node.title.to_owned().unwrap_or_default(),
class: Some("player"),
content: markup::new! {
- video[id="player", src=uri!(r_stream(&node.slug, &spec)), controls, preload="auto"]{}
- @NodePage { children: &[], parents: &parents, filter: &NodeFilterSort::default(), node: &node, udata: &udata, player: true, similar: &similar }
+ @if playing {
+ // TODO
+ // video[src=uri!(r_stream(&node.slug, &spec)), controls, preload="auto"]{}
+ } else {
+ img.backdrop[src=uri!(r_item_backdrop(&node.slug, Some(2048))).to_string()];
+ }
@conf
},
}))
diff --git a/stream/Cargo.toml b/stream/Cargo.toml
index 36979c9..21d1650 100644
--- a/stream/Cargo.toml
+++ b/stream/Cargo.toml
@@ -7,6 +7,8 @@ edition = "2021"
jellybase = { path = "../base", features = ["rocket"] }
jellytranscoder = { path = "../transcoder" }
jellyremuxer = { path = "../remuxer" }
+jellymatroska = { path = "../matroska" }
+ebml-struct = { git = "https://codeberg.org/metamuffin/ebml-struct" }
log = { workspace = true }
anyhow = { workspace = true }
diff --git a/stream/src/fragment.rs b/stream/src/fragment.rs
index e276d29..dfe101e 100644
--- a/stream/src/fragment.rs
+++ b/stream/src/fragment.rs
@@ -3,16 +3,10 @@
which is licensed under the GNU Affero General Public License (version 3); see /COPYING.
Copyright (C) 2025 metamuffin <metamuffin.org>
*/
+use crate::{stream_info, SMediaInfo};
use anyhow::{anyhow, bail, Result};
-use jellybase::{
- common::{
- stream::StreamSpec,
- user::{PermissionSet, UserPermission},
- LocalTrack, Node,
- },
- permission::PermissionSetExt,
- CONF,
-};
+use jellybase::common::stream::StreamContainer;
+use jellyremuxer::{matroska_to_mpeg4, matroska_to_webm::matroska_to_webm};
use jellytranscoder::fragment::transcode;
use log::warn;
use std::sync::Arc;
@@ -20,40 +14,57 @@ use tokio::{fs::File, io::DuplexStream};
use tokio_util::io::SyncIoBridge;
pub async fn fragment_stream(
- node: Arc<Node>,
- local_tracks: Vec<LocalTrack>,
- spec: StreamSpec,
mut b: DuplexStream,
- perms: &PermissionSet,
+ info: Arc<SMediaInfo>,
+ track: usize,
+ segment: usize,
+ index: usize,
+ format_num: usize,
+ container: StreamContainer,
) -> Result<()> {
- if spec.track.len() != 1 {
- bail!("unsupported number of tracks for segment, must be exactly one");
- }
- let track = spec.track[0];
- let n = spec.index.ok_or(anyhow!("segment index missing"))?;
-
- let local_track = local_tracks
- .first()
- .ok_or(anyhow!("track missing"))?
- .to_owned();
+ let (iinfo, info) = stream_info(info).await?;
+ let (file_index, track_num) = *iinfo
+ .track_to_file
+ .get(track)
+ .ok_or(anyhow!("track not found"))?;
+ let path = iinfo.paths[file_index].clone();
+ let seg = info
+ .segments
+ .get(segment)
+ .ok_or(anyhow!("segment not found"))?;
+ let track = seg.tracks.get(track).ok_or(anyhow!("track not found"))?;
+ let format = track
+ .formats
+ .get(format_num)
+ .ok_or(anyhow!("format not found"))?;
- if let Some(profile) = spec.profile {
- perms.assert(&UserPermission::Transcode)?;
+ if format.remux {
+ tokio::task::spawn_blocking(move || {
+ if let Err(err) = jellyremuxer::write_fragment_into(
+ SyncIoBridge::new(b),
+ &path,
+ track_num,
+ container == StreamContainer::WebM,
+ &info.name.unwrap_or_default(),
+ index,
+ ) {
+ warn!("segment stream error: {err}");
+ }
+ });
+ } else {
let location = transcode(
- &format!("{track} {n} {:?}", node), // TODO maybe not use the entire source
- CONF.transcoding_profiles
- .get(profile)
- .ok_or(anyhow!("profile out of range"))?,
+ &format!("{path:?} {track_num} {index} {format_num} {container}"), // TODO maybe not use the entire source
+ track.kind,
+ format,
move |b| {
tokio::task::spawn_blocking(move || {
if let Err(err) = jellyremuxer::write_fragment_into(
SyncIoBridge::new(b),
- &CONF.media_path,
- &node,
- &local_track,
- track,
+ &path,
+ track_num,
false,
- n,
+ &info.name.unwrap_or_default(),
+ index,
) {
warn!("segment stream error: {err}");
}
@@ -61,27 +72,36 @@ pub async fn fragment_stream(
},
)
.await?;
- let mut output = File::open(location.abs()).await?;
- tokio::task::spawn(async move {
- if let Err(err) = tokio::io::copy(&mut output, &mut b).await {
- warn!("cannot write stream: {err}")
+
+ let mut frag = File::open(location.abs()).await?;
+ match container {
+ StreamContainer::WebM => {
+ tokio::task::spawn_blocking(move || {
+ if let Err(err) =
+ matroska_to_webm(SyncIoBridge::new(frag), SyncIoBridge::new(b))
+ {
+ warn!("webm transmux failed: {err}");
+ }
+ });
}
- });
- } else {
- let b = SyncIoBridge::new(b);
- tokio::task::spawn_blocking(move || {
- if let Err(err) = jellyremuxer::write_fragment_into(
- b,
- &CONF.media_path,
- &node,
- &local_track,
- track,
- spec.webm.unwrap_or(false),
- n,
- ) {
- warn!("segment stream error: {err}");
+ StreamContainer::Matroska => {
+ tokio::task::spawn(async move {
+ if let Err(err) = tokio::io::copy(&mut frag, &mut b).await {
+ warn!("cannot write stream: {err}")
+ }
+ });
}
- });
+ StreamContainer::MPEG4 => {
+ tokio::task::spawn_blocking(move || {
+ if let Err(err) =
+ matroska_to_mpeg4(SyncIoBridge::new(frag), SyncIoBridge::new(b))
+ {
+ warn!("mpeg4 transmux failed: {err}");
+ }
+ });
+ }
+ _ => bail!("unsupported"),
+ }
}
Ok(())
diff --git a/stream/src/fragment_index.rs b/stream/src/fragment_index.rs
new file mode 100644
index 0000000..6fbddc6
--- /dev/null
+++ b/stream/src/fragment_index.rs
@@ -0,0 +1,32 @@
+/*
+ This file is part of jellything (https://codeberg.org/metamuffin/jellything)
+ which is licensed under the GNU Affero General Public License (version 3); see /COPYING.
+ Copyright (C) 2025 metamuffin <metamuffin.org>
+*/
+use crate::{stream_info, SMediaInfo};
+use anyhow::{anyhow, Result};
+use jellybase::common::stream::{SegmentNum, TrackNum};
+use std::sync::Arc;
+use tokio::io::{AsyncWriteExt, DuplexStream};
+
+pub async fn fragment_index_stream(
+ mut b: DuplexStream,
+ info: Arc<SMediaInfo>,
+ _segment: SegmentNum,
+ track: TrackNum,
+) -> Result<()> {
+ let (iinfo, _info) = stream_info(info).await?;
+ let (file_index, track_num) = *iinfo
+ .track_to_file
+ .get(track)
+ .ok_or(anyhow!("track not found"))?;
+
+ let fragments = tokio::task::spawn_blocking(move || {
+ jellyremuxer::fragment::fragment_index(&iinfo.paths[file_index], track_num)
+ })
+ .await??;
+
+ let out = serde_json::to_string(&fragments)?;
+ tokio::spawn(async move { b.write_all(out.as_bytes()).await });
+ Ok(())
+}
diff --git a/stream/src/hls.rs b/stream/src/hls.rs
index dca1036..3dfbf01 100644
--- a/stream/src/hls.rs
+++ b/stream/src/hls.rs
@@ -4,13 +4,10 @@
Copyright (C) 2025 metamuffin <metamuffin.org>
*/
+use crate::{stream_info, SMediaInfo};
use anyhow::{anyhow, Result};
-use jellybase::{
- common::{
- stream::{StreamFormat, StreamSpec},
- LocalTrack, Node, SourceTrackKind,
- },
- CONF,
+use jellybase::common::stream::{
+ FormatNum, SegmentNum, StreamContainer, StreamSpec, TrackKind, TrackNum,
};
use std::{fmt::Write, ops::Range, sync::Arc};
use tokio::{
@@ -18,32 +15,63 @@ use tokio::{
task::spawn_blocking,
};
-pub async fn hls_master_stream(
- node: Arc<Node>,
- _local_tracks: Vec<LocalTrack>,
- _spec: StreamSpec,
+pub async fn hls_supermultivariant_stream(
mut b: DuplexStream,
+ info: Arc<SMediaInfo>,
+ container: StreamContainer,
) -> Result<()> {
- let media = node.media.as_ref().ok_or(anyhow!("no media"))?;
+ let (_iinfo, info) = stream_info(info).await?;
+ let mut out = String::new();
+ writeln!(out, "#EXTM3U")?;
+ writeln!(out, "#EXT-X-VERSION:4")?;
+ for (i, _seg) in info.segments.iter().enumerate() {
+ let uri = format!(
+ "stream{}",
+ StreamSpec::HlsMultiVariant {
+ segment: i,
+ container,
+ }
+ .to_query()
+ );
+ writeln!(out, "{uri}")?;
+ }
+ tokio::spawn(async move { b.write_all(out.as_bytes()).await });
+ Ok(())
+}
+
+pub async fn hls_multivariant_stream(
+ mut b: DuplexStream,
+ info: Arc<SMediaInfo>,
+ segment: SegmentNum,
+ container: StreamContainer,
+) -> Result<()> {
+ let (_iinfo, info) = stream_info(info).await?;
+ let seg = info
+ .segments
+ .get(segment)
+ .ok_or(anyhow!("segment not found"))?;
+
let mut out = String::new();
writeln!(out, "#EXTM3U")?;
writeln!(out, "#EXT-X-VERSION:4")?;
// writeln!(out, "#EXT-X-INDEPENDENT-SEGMENTS")?;
- for (i, t) in media.tracks.iter().enumerate() {
+ for (i, t) in seg.tracks.iter().enumerate() {
let uri = format!(
- "stream?{}",
- StreamSpec {
- track: vec![i],
- format: StreamFormat::HlsVariant,
- ..Default::default()
+ "stream{}",
+ StreamSpec::HlsVariant {
+ segment,
+ track: i,
+ container,
+ format: 0
}
.to_query()
);
let r#type = match t.kind {
- SourceTrackKind::Video { .. } => "VIDEO",
- SourceTrackKind::Audio { .. } => "AUDIO",
- SourceTrackKind::Subtitles => "SUBTITLES",
+ TrackKind::Video => "VIDEO",
+ TrackKind::Audio => "AUDIO",
+ TrackKind::Subtitle => "SUBTITLES",
};
+ // TODO bw
writeln!(out, "#EXT-X-STREAM-INF:BANDWIDTH=5000000,TYPE={type}")?;
writeln!(out, "{uri}")?;
}
@@ -52,31 +80,49 @@ pub async fn hls_master_stream(
}
pub async fn hls_variant_stream(
- node: Arc<Node>,
- local_tracks: Vec<LocalTrack>,
- mut spec: StreamSpec,
mut b: DuplexStream,
+ info: Arc<SMediaInfo>,
+ segment: SegmentNum,
+ track: TrackNum,
+ format: FormatNum,
+ container: StreamContainer,
) -> Result<()> {
- let local_track = local_tracks.first().ok_or(anyhow!("no track"))?.to_owned();
- let track_index = spec.track[0];
- let media_info = node.media.to_owned().ok_or(anyhow!("no media?"))?;
+ let (iinfo, info) = stream_info(info).await?;
+ let (file_index, track_num) = *iinfo
+ .track_to_file
+ .get(track)
+ .ok_or(anyhow!("track not found"))?;
+ let seg = info
+ .segments
+ .get(segment)
+ .ok_or(anyhow!("segment not found"))?;
+
let frags = spawn_blocking(move || {
- jellyremuxer::fragment::fragment_index(&CONF.media_path, &node, &local_track, track_index)
+ jellyremuxer::fragment::fragment_index(&iinfo.paths[file_index], track_num)
})
.await??;
let mut out = String::new();
writeln!(out, "#EXTM3U")?;
writeln!(out, "#EXT-X-PLAYLIST-TYPE:VOD")?;
- writeln!(out, "#EXT-X-TARGETDURATION:{}", media_info.duration)?;
+ writeln!(out, "#EXT-X-TARGETDURATION:{}", seg.duration)?;
writeln!(out, "#EXT-X-VERSION:4")?;
writeln!(out, "#EXT-X-MEDIA-SEQUENCE:0")?;
- spec.format = StreamFormat::Fragment;
- for (i, Range { start, end }) in frags.iter().enumerate() {
+ for (index, Range { start, end }) in frags.iter().enumerate() {
writeln!(out, "#EXTINF:{:},", end - start)?;
- spec.index = Some(i);
- writeln!(out, "stream?{}", spec.to_query())?;
+ writeln!(
+ out,
+ "stream{}",
+ StreamSpec::Fragment {
+ segment,
+ track,
+ index,
+ container,
+ format,
+ }
+ .to_query()
+ )?;
}
writeln!(out, "#EXT-X-ENDLIST")?;
diff --git a/stream/src/jhls.rs b/stream/src/jhls.rs
deleted file mode 100644
index b222e39..0000000
--- a/stream/src/jhls.rs
+++ /dev/null
@@ -1,47 +0,0 @@
-/*
- This file is part of jellything (https://codeberg.org/metamuffin/jellything)
- which is licensed under the GNU Affero General Public License (version 3); see /COPYING.
- Copyright (C) 2025 metamuffin <metamuffin.org>
-*/
-use anyhow::{anyhow, Result};
-use jellybase::{
- common::{
- jhls::JhlsTrackIndex,
- stream::StreamSpec,
- user::{PermissionSet, UserPermission},
- LocalTrack, Node,
- },
- permission::PermissionSetExt,
- CONF,
-};
-use std::sync::Arc;
-use tokio::io::{AsyncWriteExt, DuplexStream};
-
-pub async fn jhls_index(
- node: Arc<Node>,
- local_tracks: &[LocalTrack],
- spec: StreamSpec,
- mut b: DuplexStream,
- perms: &PermissionSet,
-) -> Result<()> {
- let local_track = local_tracks
- .first()
- .ok_or(anyhow!("track missing"))?
- .to_owned();
-
- let fragments = tokio::task::spawn_blocking(move || {
- jellyremuxer::fragment::fragment_index(&CONF.media_path, &node, &local_track, spec.track[0])
- })
- .await??;
-
- let out = serde_json::to_string(&JhlsTrackIndex {
- extra_profiles: if perms.check(&UserPermission::Transcode) {
- CONF.transcoding_profiles.clone()
- } else {
- vec![]
- },
- fragments,
- })?;
- tokio::spawn(async move { b.write_all(out.as_bytes()).await });
- Ok(())
-}
diff --git a/stream/src/lib.rs b/stream/src/lib.rs
index 00338c1..4df87ae 100644
--- a/stream/src/lib.rs
+++ b/stream/src/lib.rs
@@ -5,136 +5,113 @@
*/
#![feature(iterator_try_collect)]
pub mod fragment;
+pub mod fragment_index;
pub mod hls;
-pub mod jhls;
+pub mod stream_info;
pub mod webvtt;
use anyhow::{anyhow, bail, Context, Result};
use fragment::fragment_stream;
-use hls::{hls_master_stream, hls_variant_stream};
-use jellybase::{
- assetfed::AssetInner,
- common::{
- stream::{StreamFormat, StreamSpec},
- user::{PermissionSet, UserPermission},
- LocalTrack, Node, TrackSource,
- },
- permission::PermissionSetExt,
- CONF,
+use fragment_index::fragment_index_stream;
+use hls::{hls_multivariant_stream, hls_supermultivariant_stream, hls_variant_stream};
+use jellybase::common::{
+ stream::{StreamContainer, StreamSpec},
+ Node,
};
-use jhls::jhls_index;
-use std::{io::SeekFrom, ops::Range, sync::Arc};
+use std::{collections::BTreeSet, io::SeekFrom, ops::Range, path::PathBuf, sync::Arc};
+use stream_info::{stream_info, write_stream_info};
use tokio::{
fs::File,
io::{duplex, AsyncReadExt, AsyncSeekExt, AsyncWriteExt, DuplexStream},
};
-use tokio_util::io::SyncIoBridge;
-use webvtt::vtt_stream;
+
+#[derive(Debug)]
+pub struct SMediaInfo {
+ pub info: Arc<Node>,
+ pub files: BTreeSet<PathBuf>,
+}
pub struct StreamHead {
pub content_type: &'static str,
pub range_supported: bool,
}
-#[rustfmt::skip]
pub fn stream_head(spec: &StreamSpec) -> StreamHead {
- let webm_or_mkv = if spec.webm.unwrap_or(false) { "video/webm" } else { "video/x-matroska" };
- match spec.format {
- StreamFormat::Original => StreamHead { content_type: "video/x-matroska", range_supported: true },
- StreamFormat::Matroska => StreamHead { content_type: webm_or_mkv, range_supported: true },
- StreamFormat::HlsMaster | StreamFormat::HlsVariant => StreamHead { content_type: "application/vnd.apple.mpegurl", range_supported: false },
- StreamFormat::JhlsIndex => StreamHead { content_type: "application/jellything-seekindex+json", range_supported: false },
- StreamFormat::Webvtt => StreamHead { content_type: "text/vtt", range_supported: false },
- StreamFormat::Fragment => StreamHead { content_type: webm_or_mkv, range_supported: false },
- StreamFormat::Jvtt => StreamHead { content_type: "application/jellything-vtt+json", range_supported: false },
+ let cons = |ct: &'static str, rs: bool| StreamHead {
+ content_type: ct,
+ range_supported: rs,
+ };
+ let container_ct = |x: StreamContainer| match x {
+ StreamContainer::WebM => "video/webm",
+ StreamContainer::Matroska => "video/x-matroska",
+ StreamContainer::WebVTT => "text/vtt",
+ StreamContainer::JVTT => "application/jellything-vtt+json",
+ StreamContainer::MPEG4 => "video/mp4",
+ };
+ match spec {
+ StreamSpec::Whep { .. } => cons("application/x-todo", false),
+ StreamSpec::WhepControl { .. } => cons("application/x-todo", false),
+ StreamSpec::Remux { container, .. } => cons(container_ct(*container), true),
+ StreamSpec::Original { .. } => cons("video/x-matroska", true),
+ StreamSpec::HlsSuperMultiVariant { .. } => cons("application/vnd.apple.mpegurl", false),
+ StreamSpec::HlsMultiVariant { .. } => cons("application/vnd.apple.mpegurl", false),
+ StreamSpec::HlsVariant { .. } => cons("application/vnd.apple.mpegurl", false),
+ StreamSpec::Info { .. } => cons("application/jellything-stream-info+json", false),
+ StreamSpec::FragmentIndex { .. } => cons("application/jellything-frag-index+json", false),
+ StreamSpec::Fragment { container, .. } => cons(container_ct(*container), false),
}
}
pub async fn stream(
- node: Arc<Node>,
+ info: Arc<SMediaInfo>,
spec: StreamSpec,
range: Range<usize>,
- perms: &PermissionSet,
) -> Result<DuplexStream> {
- perms.assert(&UserPermission::StreamFormat(spec.format))?;
-
let (a, b) = duplex(4096);
- // TODO remux of mixed remote and local tracks?!
- let track_sources = node.media.to_owned().ok_or(anyhow!("node has no media"))?;
-
- let local_tracks = spec
- .track
- .iter()
- .map(|i| {
- anyhow::Ok(
- match &track_sources
- .tracks
- .get(*i)
- .ok_or(anyhow!("track does not exist"))?
- .source
- {
- TrackSource::Local(t) => AssetInner::deser(&t.0)?
- .as_local_track()
- .ok_or(anyhow!("asset not a track"))?,
- TrackSource::Remote(_) => bail!("track is not local"),
- },
- )
- })
- .collect::<anyhow::Result<Vec<_>>>()?
- .into_iter()
- .collect::<Vec<_>>();
-
- match spec.format {
- StreamFormat::Original => original_stream(local_tracks, spec, range, b).await?,
- StreamFormat::Matroska => remux_stream(node, local_tracks, spec, range, b).await?,
- StreamFormat::HlsMaster => hls_master_stream(node, local_tracks, spec, b).await?,
- StreamFormat::HlsVariant => hls_variant_stream(node, local_tracks, spec, b).await?,
- StreamFormat::JhlsIndex => jhls_index(node, &local_tracks, spec, b, perms).await?,
- StreamFormat::Fragment => fragment_stream(node, local_tracks, spec, b, perms).await?,
- StreamFormat::Webvtt => vtt_stream(false, node, local_tracks, spec, b).await?,
- StreamFormat::Jvtt => vtt_stream(true, node, local_tracks, spec, b).await?,
+ match spec {
+ StreamSpec::Original { track } => original_stream(info, track, range, b).await?,
+ StreamSpec::HlsSuperMultiVariant { container } => {
+ hls_supermultivariant_stream(b, info, container).await?;
+ }
+ StreamSpec::HlsMultiVariant { segment, container } => {
+ hls_multivariant_stream(b, info, segment, container).await?
+ }
+ StreamSpec::HlsVariant {
+ segment,
+ track,
+ container,
+ format,
+ } => hls_variant_stream(b, info, segment, track, format, container).await?,
+ StreamSpec::Info { segment: _ } => write_stream_info(info, b).await?,
+ StreamSpec::FragmentIndex { segment, track } => {
+ fragment_index_stream(b, info, segment, track).await?
+ }
+ StreamSpec::Fragment {
+ segment,
+ track,
+ index,
+ container,
+ format,
+ } => fragment_stream(b, info, track, segment, index, format, container).await?,
+ _ => bail!("todo"),
}
Ok(a)
}
-async fn remux_stream(
- node: Arc<Node>,
- local_tracks: Vec<LocalTrack>,
- spec: StreamSpec,
- range: Range<usize>,
- b: DuplexStream,
-) -> Result<()> {
- let b = SyncIoBridge::new(b);
-
- tokio::task::spawn_blocking(move || {
- jellyremuxer::remux_stream_into(
- b,
- range,
- CONF.media_path.to_owned(),
- &node,
- local_tracks,
- spec.track,
- spec.webm.unwrap_or(false),
- )
- });
-
- Ok(())
-}
-
async fn original_stream(
- local_tracks: Vec<LocalTrack>,
- spec: StreamSpec,
+ info: Arc<SMediaInfo>,
+ track: usize,
range: Range<usize>,
b: DuplexStream,
) -> Result<()> {
- if spec.track.len() != 1 {
- bail!("invalid amout of source \"tracks\". original only allows for exactly one.")
- }
-
- let source = local_tracks[spec.track[0]].clone();
- let mut file = File::open(CONF.media_path.join(source.path))
+ let (iinfo, _info) = stream_info(info).await?;
+ let (file_index, _) = *iinfo
+ .track_to_file
+ .get(track)
+ .ok_or(anyhow!("unknown track"))?;
+ let mut file = File::open(&iinfo.paths[file_index])
.await
.context("opening source")?;
file.seek(SeekFrom::Start(range.start as u64))
diff --git a/stream/src/stream_info.rs b/stream/src/stream_info.rs
new file mode 100644
index 0000000..c3746c6
--- /dev/null
+++ b/stream/src/stream_info.rs
@@ -0,0 +1,169 @@
+use anyhow::Result;
+use ebml_struct::matroska::TrackEntry;
+use jellybase::{
+ common::stream::{
+ StreamContainer, StreamFormatInfo, StreamInfo, StreamSegmentInfo, StreamTrackInfo,
+ TrackKind,
+ },
+ CONF,
+};
+use jellyremuxer::metadata::{matroska_metadata, MatroskaMetadata};
+use std::{path::PathBuf, sync::Arc};
+use tokio::{
+ io::{AsyncWriteExt, DuplexStream},
+ spawn,
+ task::spawn_blocking,
+};
+
+use crate::SMediaInfo;
+
+async fn async_matroska_metadata(path: PathBuf) -> Result<Arc<MatroskaMetadata>> {
+ Ok(spawn_blocking(move || matroska_metadata(&path)).await??)
+}
+
+pub(crate) struct InternalStreamInfo {
+ pub paths: Vec<PathBuf>,
+ pub _metadata: Vec<Arc<MatroskaMetadata>>,
+ pub track_to_file: Vec<(usize, u64)>,
+}
+
+pub(crate) async fn stream_info(info: Arc<SMediaInfo>) -> Result<(InternalStreamInfo, StreamInfo)> {
+ let mut metadata = Vec::new();
+ let mut paths = Vec::new();
+ for path in &info.files {
+ metadata.push(async_matroska_metadata(path.clone()).await?);
+ paths.push(path.clone());
+ }
+ let mut tracks = Vec::new();
+ let mut track_to_file = Vec::new();
+
+ for (i, m) in metadata.iter().enumerate() {
+ if let Some(t) = &m.tracks {
+ for t in &t.entries {
+ tracks.push(StreamTrackInfo {
+ name: None,
+ kind: match t.track_type {
+ 1 => TrackKind::Video,
+ 2 => TrackKind::Audio,
+ 17 => TrackKind::Subtitle,
+ _ => todo!(),
+ },
+ formats: stream_formats(t),
+ });
+ track_to_file.push((i, t.track_number));
+ }
+ }
+ }
+
+ let segment = StreamSegmentInfo {
+ name: None,
+ duration: media_duration(&metadata[0]),
+ tracks,
+ };
+ Ok((
+ InternalStreamInfo {
+ _metadata: metadata,
+ paths,
+ track_to_file,
+ },
+ StreamInfo {
+ name: info.info.title.clone(),
+ segments: vec![segment],
+ },
+ ))
+}
+
+fn stream_formats(t: &TrackEntry) -> Vec<StreamFormatInfo> {
+ let mut formats = Vec::new();
+ formats.push(StreamFormatInfo {
+ codec: t.codec_id.to_string(),
+ remux: true,
+ bitrate: 10_000_000., // TODO
+ containers: {
+ let mut x = containers_by_codec(&t.codec_id);
+ // TODO remove this
+ x.retain_mut(|x| *x != StreamContainer::MPEG4);
+ x
+ },
+ bit_depth: t.audio.as_ref().and_then(|a| a.bit_depth.map(|e| e as u8)),
+ samplerate: t.audio.as_ref().map(|a| a.sampling_frequency),
+ channels: t.audio.as_ref().map(|a| a.channels as usize),
+ width: t.video.as_ref().map(|v| v.pixel_width),
+ height: t.video.as_ref().map(|v| v.pixel_height),
+ ..Default::default()
+ });
+
+ match t.track_type {
+ 1 => {
+ let sw = t.video.as_ref().unwrap().pixel_width;
+ let sh = t.video.as_ref().unwrap().pixel_height;
+ for (w, br) in [(3840, 8e6), (1920, 5e6), (1280, 3e6), (640, 1e6)] {
+ if w > sw {
+ continue;
+ }
+ let h = (w * sh) / sw;
+ for (cid, enable) in [
+ ("V_AV1", CONF.encoders.av1.is_some()),
+ ("V_VP8", CONF.encoders.vp8.is_some()),
+ ("V_VP9", CONF.encoders.vp9.is_some()),
+ ("V_MPEG4/ISO/AVC", CONF.encoders.avc.is_some()),
+ ("V_MPEGH/ISO/HEVC", CONF.encoders.hevc.is_some()),
+ ] {
+ if enable {
+ formats.push(StreamFormatInfo {
+ codec: cid.to_string(),
+ bitrate: br,
+ remux: false,
+ containers: containers_by_codec(cid),
+ width: Some(w),
+ height: Some(h),
+ samplerate: None,
+ channels: None,
+ bit_depth: None,
+ });
+ }
+ }
+ }
+ }
+ 2 => {
+ for br in [256e3, 128e3, 64e3] {
+ formats.push(StreamFormatInfo {
+ codec: "A_OPUS".to_string(),
+ bitrate: br,
+ remux: false,
+ containers: containers_by_codec("A_OPUS"),
+ width: None,
+ height: None,
+ samplerate: Some(48e3),
+ channels: Some(2),
+ bit_depth: Some(32),
+ });
+ }
+ }
+ 17 => {}
+ _ => {}
+ }
+
+ formats
+}
+
+fn containers_by_codec(codec: &str) -> Vec<StreamContainer> {
+ use StreamContainer::*;
+ match codec {
+ "V_VP8" | "V_VP9" | "V_AV1" | "A_OPUS" | "A_VORBIS" => vec![Matroska, WebM],
+ "V_MPEG4/ISO/AVC" | "A_AAC" => vec![Matroska, MPEG4],
+ "S_TEXT/UTF8" | "S_TEXT/WEBVTT" => vec![Matroska, WebVTT, WebM, JVTT],
+ _ => vec![Matroska],
+ }
+}
+
+pub(crate) async fn write_stream_info(info: Arc<SMediaInfo>, mut b: DuplexStream) -> Result<()> {
+ let (_, info) = stream_info(info).await?;
+ spawn(async move { b.write_all(&serde_json::to_vec(&info)?).await });
+ Ok(())
+}
+
+fn media_duration(m: &MatroskaMetadata) -> f64 {
+ let info = m.info.as_ref().unwrap();
+ (info.duration.unwrap_or_default() * info.timestamp_scale as f64) / 1_000_000_000.
+}
diff --git a/stream/src/webvtt.rs b/stream/src/webvtt.rs
index f78ac2f..e9f0181 100644
--- a/stream/src/webvtt.rs
+++ b/stream/src/webvtt.rs
@@ -3,61 +3,91 @@
which is licensed under the GNU Affero General Public License (version 3); see /COPYING.
Copyright (C) 2025 metamuffin <metamuffin.org>
*/
-use anyhow::{anyhow, Context, Result};
-use jellybase::{
- cache::async_cache_memory,
- common::{stream::StreamSpec, LocalTrack, Node},
- CONF,
-};
-use jellyremuxer::extract::extract_track;
-use jellytranscoder::subtitles::{parse_subtitles, write_webvtt};
+use anyhow::Result;
+use jellybase::common::{stream::StreamSpec, Node};
use std::sync::Arc;
-use tokio::io::{AsyncWriteExt, DuplexStream};
+use tokio::io::DuplexStream;
pub async fn vtt_stream(
json: bool,
node: Arc<Node>,
- local_tracks: Vec<LocalTrack>,
spec: StreamSpec,
- mut b: DuplexStream,
+ b: DuplexStream,
) -> Result<()> {
+ let _ = b;
+ let _ = spec;
+ let _ = node;
+ let _ = json;
// TODO cache
// TODO should use fragments too? big films take too long...
- let tracki = *spec.track.first().ok_or(anyhow!("no track selected"))?;
- let local_track = local_tracks.first().ok_or(anyhow!("no tracks"))?.clone();
- let track = &node.media.as_ref().unwrap().tracks[tracki];
- let cp = local_track.codec_private.clone();
+ // let tracki = *spec.track.first().ok_or(anyhow!("no track selected"))?;
+ // let local_track = local_tracks.first().ok_or(anyhow!("no tracks"))?.clone();
+ // let track = &node.media.as_ref().unwrap().tracks[tracki];
+ // let cp = local_track.codec_private.clone();
- let subtitles = async_cache_memory(
- &[
- "vtt",
- &format!(
- "{} {}",
- local_track.path.to_str().unwrap(),
- local_track.track
- ),
- ],
- move || async move {
- let blocks = tokio::task::spawn_blocking(move || {
- extract_track(CONF.media_path.clone(), local_track)
- })
- .await??;
- let subtitles = parse_subtitles(&track.codec, cp, blocks)?;
- Ok(subtitles)
- },
- )
- .await?;
+ // let subtitles = async_cache_memory(
+ // &[
+ // "vtt",
+ // &format!(
+ // "{} {}",
+ // local_track.path.to_str().unwrap(),
+ // local_track.track
+ // ),
+ // ],
+ // move || async move {
+ // let blocks = tokio::task::spawn_blocking(move || {
+ // extract_track(CONF.media_path.clone(), local_track)
+ // })
+ // .await??;
+ // let subtitles = parse_subtitles(&track.codec, cp, blocks)?;
+ // Ok(subtitles)
+ // },
+ // )spec.track.first().ok_or(anyhow!("no track selected"))?;
+ // let local_track = local_tracks.first().ok_or(anyhow!("no tracks"))?.clone();
+ // let track = &node.media.as_ref().unwrap().tracks[tracki];
+ // let cp = local_track.codec_private.clone();
- let output = if json {
- serde_json::to_string(subtitles.as_ref())?
- } else {
- write_webvtt(node.title.clone().unwrap_or_default(), subtitles.as_ref())
- .context("writing webvtt")?
- };
- tokio::task::spawn(async move {
- let _ = b.write_all(output.as_bytes()).await;
- });
+ // let subtitles = async_cache_memory(
+ // &[
+ // "vtt",
+ // &format!(
+ // "{} {}",
+ // local_track.path.to_str().unwrap(),
+ // local_track.track
+ // ),
+ // ],
+ // move || async move {
+ // let blocks = tokio::task::spawn_blocking(move || {
+ // extract_track(CONF.media_path.clone(), local_track)
+ // })
+ // .await??;
+ // let subtitles = parse_subtitles(&track.codec, cp, blocks)?;
+ // Ok(subtitles)
+ // },
+ // )
+ // .await?;
+
+ // let output = if json {
+ // serde_json::to_string(subtitles.as_ref())?
+ // } else {
+ // write_webvtt(node.title.clone().unwrap_or_default(), subtitles.as_ref())
+ // .context("writing webvtt")?
+ // };
+ // tokio::task::spawn(async move {
+ // let _ = b.write_all(output.as_bytes()).await;
+ // });
+ // .await?;
+
+ // let output = if json {
+ // serde_json::to_string(subtitles.as_ref())?
+ // } else {
+ // write_webvtt(node.title.clone().unwrap_or_default(), subtitles.as_ref())
+ // .context("writing webvtt")?
+ // };
+ // tokio::task::spawn(async move {
+ // let _ = b.write_all(output.as_bytes()).await;
+ // });
Ok(())
}
diff --git a/transcoder/src/fragment.rs b/transcoder/src/fragment.rs
index 8822fa2..88a311e 100644
--- a/transcoder/src/fragment.rs
+++ b/transcoder/src/fragment.rs
@@ -7,7 +7,8 @@
use crate::LOCAL_VIDEO_TRANSCODING_TASKS;
use jellybase::{
cache::{async_cache_file, CachePath},
- common::jhls::EncodingProfile,
+ common::stream::{StreamFormatInfo, TrackKind},
+ CONF,
};
use log::{debug, info};
use std::process::Stdio;
@@ -17,77 +18,70 @@ use tokio::{
};
// TODO odd video resolutions can cause errors when transcoding to YUV42{0,2}
-// TODO with an implementation that cant handle it (SVT-AV1 such an impl).
+// TODO with an implementation that cant handle it (SVT-AV1 is such an impl).
pub async fn transcode(
key: &str,
- enc: &EncodingProfile,
+ kind: TrackKind,
+ format: &StreamFormatInfo,
input: impl FnOnce(ChildStdin),
) -> anyhow::Result<CachePath> {
async_cache_file(
- &["frag-tc", key, &format!("{enc:?}")],
+ &["frag-tc", key, &format!("{format:?}")],
move |mut output| async move {
let _permit = LOCAL_VIDEO_TRANSCODING_TASKS.acquire().await?;
- debug!("transcoding fragment with {enc:?}");
+ debug!("transcoding fragment with {format:?}");
- let mut args = Vec::new();
- match enc {
- EncodingProfile::Video {
- codec,
- preset,
- bitrate,
- width,
- } => {
- if let Some(width) = width {
- args.push("-vf".to_string());
- args.push(format!("scale={width}:-1"));
- }
- args.push("-c:v".to_string());
- args.push(codec.to_string());
- if let Some(preset) = preset {
- args.push("-preset".to_string());
- args.push(format!("{preset}"));
- }
- args.push("-b:v".to_string());
- args.push(format!("{bitrate}"));
- }
- EncodingProfile::Audio {
- codec,
- bitrate,
- sample_rate,
- channels,
- } => {
- if let Some(channels) = channels {
- args.push("-ac".to_string());
- args.push(format!("{channels}"))
- }
- if let Some(sample_rate) = sample_rate {
- args.push("-ar".to_string());
- args.push(format!("{sample_rate}"))
- }
- args.push("-c:a".to_string());
- args.push(codec.to_string());
- args.push("-b:a".to_string());
- args.push(format!("{bitrate}"));
- }
- EncodingProfile::Subtitles { codec } => {
- args.push("-c:s".to_string());
- args.push(codec.to_string());
- }
+ let template = match format.codec.as_str() {
+ "V_MPEG4/ISO/AVC" => CONF.encoders.avc.as_ref(),
+ "V_MPEGH/ISO/HEVC" => CONF.encoders.hevc.as_ref(),
+ "V_VP8" => CONF.encoders.vp8.as_ref(),
+ "V_VP9" => CONF.encoders.vp9.as_ref(),
+ "V_AV1" => CONF.encoders.av1.as_ref(),
+ _ => None,
+ }
+ .or(CONF.encoders.generic.as_ref())
+ .cloned()
+ .unwrap_or("ffmpeg %i %f %e %o".to_owned());
+
+ let filter = match kind {
+ TrackKind::Video => format!("-vf scale={}:-1", format.width.unwrap()),
+ TrackKind::Audio => format!(""),
+ TrackKind::Subtitle => String::new(),
+ };
+ let typechar = match kind {
+ TrackKind::Video => "v",
+ TrackKind::Audio => "a",
+ TrackKind::Subtitle => "s",
+ };
+ let fallback_encoder = match format.codec.as_str() {
+ "A_OPUS" => "libopus",
+ "V_VP8" => "libvpx",
+ "V_VP9" => "libvpx-vp9",
+ "V_AV1" => "libaom", // svtav1 is x86 only :(
+ "V_MPEG4/ISO/AVC" => "libx264",
+ "V_MPEGH/ISO/HEVC" => "libx265",
+ _ => "",
};
- info!("encoding with {:?}", args.join(" "));
- let mut proc = Command::new("ffmpeg")
+ let args = template
+ .replace("%i", "-f matroska -i pipe:0 -copyts")
+ .replace("%o", "-f matroska pipe:1")
+ .replace("%f", &filter)
+ .replace("%e", "-c:%t %c -b:%t %r")
+ .replace("%t", typechar)
+ .replace("%c", fallback_encoder)
+ .replace("%r", &(format.bitrate as i64).to_string())
+ .replace(" ", " ");
+
+ info!("encoding with {:?}", args);
+
+ let mut args = args.split(" ");
+ let mut proc = Command::new(args.next().unwrap())
.stdin(Stdio::piped())
.stdout(Stdio::piped())
- .args(["-f", "matroska", "-i", "pipe:0"])
.args(args)
- .args(["-f", "webm", "pipe:1"])
.spawn()?;
- // let mut proc = Command::new("cat")
- // .stdin(Stdio::piped())
- // .stdout(Stdio::piped())
- // .spawn()?;
let stdin = proc.stdin.take().unwrap();
let mut stdout = proc.stdout.take().unwrap();
diff --git a/web/script/jshelper b/web/script/jshelper
-Subproject b2bcdcc99e42015085b4d0d63e7c94b2d4f84e2
+Subproject ef36d50d7858a56cbc08bfb4f272bab9476bb97
diff --git a/web/script/player/download.ts b/web/script/player/download.ts
index 18f1e8d..8294d2a 100644
--- a/web/script/player/download.ts
+++ b/web/script/player/download.ts
@@ -20,7 +20,7 @@ export class SegmentDownloader {
const dl_start = performance.now();
const res = await fetch(url)
const dl_header = performance.now();
- if (!res.ok) throw new Error("aaaaa");
+ if (!res.ok) throw new Error("aaaaaa");
const buf = await res.arrayBuffer()
const dl_body = performance.now();
diff --git a/web/script/player/mediacaps.ts b/web/script/player/mediacaps.ts
index e44b92b..3c55aa9 100644
--- a/web/script/player/mediacaps.ts
+++ b/web/script/player/mediacaps.ts
@@ -4,113 +4,76 @@
Copyright (C) 2025 metamuffin <metamuffin.org>
*/
/// <reference lib="dom" />
-import { EncodingProfile, SourceTrack, SourceTrackKind } from "./jhls.d.ts";
+
+import { FormatInfo, StreamContainer } from "./types_stream.ts";
const cache = new Map<string, boolean>()
// TODO this testing method makes the assumption, that if the codec is supported on its own, it can be
// TODO arbitrarly combined with others that are supported. in reality this is true but the spec does not gurantee it.
-export async function test_media_capability(track: SourceTrack): Promise<boolean> {
- const cache_key = `${get_track_kind(track.kind)};${track.codec}`
+export async function test_media_capability(format: FormatInfo, container: StreamContainer): Promise<boolean> {
+ const cache_key = JSON.stringify(format) + container
const cached = cache.get(cache_key);
if (cached !== undefined) return cached
- const r = await test_media_capability_inner(track)
- console.log(`${r ? "positive" : "negative"} media capability test finished for codec=${track.codec}`);
+ const r = await test_media_capability_inner(format, container)
+ console.log(`${r ? "positive" : "negative"} media capability test finished for codec=${format.codec}`);
cache.set(cache_key, r)
return r
}
-async function test_media_capability_inner(track: SourceTrack) {
- if (track.kind == "subtitles") {
+async function test_media_capability_inner(format: FormatInfo, container: StreamContainer) {
+ if (format.codec.startsWith("S_") || format.codec.startsWith("D_")) {
// TODO do we need to check this?
- return track.codec == "V_TEXT/WEBVTT" || track.codec == "D_WEBVTT/SUBTITLES"
+ return format.codec == "S_TEXT/WEBVTT" || format.codec == "S_TEXT/UTF8" || format.codec == "D_WEBVTT/SUBTITLES"
}
let res;
- const codec = MASTROSKA_CODEC_MAP[track.codec]
- if (!codec) return console.warn(`unknown codec: ${track.codec}`), false
- if ("audio" in track.kind) {
+ if (format.codec.startsWith("A_")) {
res = await navigator.mediaCapabilities.decodingInfo({
type: "media-source",
audio: {
- contentType: `audio/webm; codecs=${codec}`,
- samplerate: track.kind.audio.sample_rate,
- channels: "" + track.kind.audio.channels,
- bitrate: 128 * 1000,
+ contentType: track_to_content_type(format, container),
+ samplerate: format.samplerate,
+ channels: "" + format.channels,
+ bitrate: format.bitrate,
}
})
}
- if ("video" in track.kind) {
+ if (format.codec.startsWith("V_")) {
res = await navigator.mediaCapabilities.decodingInfo({
type: "media-source",
video: {
- contentType: `video/webm; codecs=${codec}`,
- framerate: track.kind.video.fps || 30,
- width: track.kind.video.width,
- height: track.kind.video.height,
- bitrate: 5 * 1000 * 1000 // TODO we dont know this but we should in the future
+ contentType: track_to_content_type(format, container),
+ framerate: 30, // TODO get average framerate from server
+ width: format.width ?? 1920,
+ height: format.height ?? 1080,
+ bitrate: format.bitrate
}
})
}
return res?.supported ?? false
}
-export function track_to_content_type(track: SourceTrack): string | undefined {
- if (track.kind == "subtitles") return "video/webm"
- const codec = MASTROSKA_CODEC_MAP[track.codec]
- if (!codec) return
- return `${get_track_kind(track.kind)}/webm; codecs="${codec}"`
-}
-export function profile_to_partial_track(profile: EncodingProfile): SourceTrack {
- if (profile.audio) {
- return {
- codec: FFMPEG_ENCODER_CODEC_MAP[profile.audio.codec],
- kind: { audio: { bit_depth: 16, channels: 2, sample_rate: 48000 } },
- name: "test audio",
- language: "en"
- }
- } else if (profile.video) {
- return {
- codec: FFMPEG_ENCODER_CODEC_MAP[profile.video.codec],
- kind: { video: { fps: 30, height: 1080, width: 1090 } },
- language: "en",
- name: "test video"
- }
- } else if (profile.subtitles) {
- return {
- codec: FFMPEG_ENCODER_CODEC_MAP[profile.subtitles.codec],
- kind: "subtitles",
- language: "en",
- name: "test subtitle"
- }
- } else throw new Error("unreachable");
+export function track_to_content_type(format: FormatInfo, container: StreamContainer): string {
+ let c = CONTAINER_TO_MIME_TYPE[container];
+ if (format.codec.startsWith("A_")) c = c.replace("video/", "audio/")
+ return `${c}; codecs="${MASTROSKA_CODEC_MAP[format.codec]}"`
}
const MASTROSKA_CODEC_MAP: { [key: string]: string } = {
"V_VP9": "vp9",
"V_VP8": "vp8",
"V_AV1": "av1",
- "V_MPEG4/ISO/AVC": "h264",
- "V_MPEGH/ISO/HEVC": "h265",
+ "V_MPEG4/ISO/AVC": "avc1.42C01F",
+ "V_MPEGH/ISO/HEVC": "hev1.1.6.L93.90",
"A_OPUS": "opus",
"A_VORBIS": "vorbis",
"S_TEXT/WEBVTT": "webvtt",
"D_WEBVTT/SUBTITLES": "webvtt",
}
-
-const FFMPEG_ENCODER_CODEC_MAP: { [key: string]: string } = {
- "libsvtav1": "V_AV1",
- "libvpx": "V_VP8",
- "libvpx-vp9": "V_VP9",
- "opus": "A_OPUS",
- "libopus": "A_OPUS",
-}
-
-export type TrackKind = "audio" | "video" | "subtitles"
-export function get_track_kind(track: SourceTrackKind): TrackKind {
- // TODO why different encodings for "subtitles"?
- if (track == "subtitles") return "subtitles"
- if ("subtitles" in track) return "subtitles"
- if ("audio" in track) return "audio"
- if ("video" in track) return "video"
- throw new Error("invalid track");
+const CONTAINER_TO_MIME_TYPE: { [key in StreamContainer]: string } = {
+ webvtt: "text/webvtt",
+ webm: "video/webm",
+ matroska: "video/x-matroska",
+ mpeg4: "video/mp4",
+ jvtt: "application/jellything-vtt+json"
}
diff --git a/web/script/player/mod.ts b/web/script/player/mod.ts
index 53f13bd..e8cde94 100644
--- a/web/script/player/mod.ts
+++ b/web/script/player/mod.ts
@@ -7,11 +7,11 @@
import { OVar, show } from "../jshelper/mod.ts";
import { e } from "../jshelper/mod.ts";
import { Logger } from "../jshelper/src/log.ts";
-import { EncodingProfile } from "./jhls.d.ts";
-import { TrackKind, get_track_kind } from "./mediacaps.ts";
import { Player } from "./player.ts";
import { Popup } from "./popup.ts";
import { Playersync, playersync_controls } from "./sync.ts"
+import { WatchedState } from "./types_node.ts";
+import { FormatInfo, TrackKind } from "./types_stream.ts";
globalThis.addEventListener("DOMContentLoaded", () => {
if (document.body.classList.contains("player")) {
@@ -36,10 +36,24 @@ function toggle_fullscreen() {
else document.documentElement.requestFullscreen()
}
+function get_continue_time(w: WatchedState): number {
+ if (typeof w == "string") return 0
+ else return w.progress
+}
+
+function get_query_start_time() {
+ const u = new URL(globalThis.location.href)
+ const p = u.searchParams.get("t")
+ if (!p) return
+ const x = parseFloat(p)
+ if (Number.isNaN(x)) return
+ return x
+}
function initialize_player(node_id: string): HTMLElement {
const logger = new Logger<string>(s => e("p", s))
- const player = new Player(node_id, logger)
+ const start_time = get_query_start_time() ?? 0 // TODO get_continue_time(ndata.userdata.watched);
+ const player = new Player(`/n/${encodeURIComponent(node_id)}/stream`, `/n/${encodeURIComponent(node_id)}/poster`, start_time, logger)
const show_stats = new OVar(false);
const idle_inhibit = new OVar(false)
const sync_state = new OVar<Playersync | undefined>(undefined)
@@ -50,11 +64,11 @@ function initialize_player(node_id: string): HTMLElement {
let mute_saved_volume = 1;
const toggle_mute = () => {
if (player.volume.value == 0) {
- logger.log("Unmuted.");
+ logger.log("Unmuted.", "volume");
player.volume.value = mute_saved_volume
}
else {
- logger.log("Muted.");
+ logger.log("Muted.", "volume");
mute_saved_volume = player.volume.value
player.volume.value = 0.
}
@@ -70,13 +84,13 @@ function initialize_player(node_id: string): HTMLElement {
const step_track_kind = (kind: TrackKind) => {
// TODO cycle through all of them
const active = player.active_tracks.value.filter(
- ts => get_track_kind(player.tracks![ts.track_index].kind) == kind)
+ ts => player.tracks![ts.track_index].kind == kind)
if (active.length > 0) {
for (const t of active) player.set_track_enabled(t.track_index, false)
} else {
const all_kind = (player.tracks ?? [])
.map((track, index) => ({ index, track }))
- .filter(({ track }) => get_track_kind(track.kind) == kind)
+ .filter(({ track }) => track.kind == kind)
if (all_kind.length < 1) return logger.log(`No ${kind} tracks available`)
player.set_track_enabled(all_kind[0].index, true)
}
@@ -90,7 +104,7 @@ function initialize_player(node_id: string): HTMLElement {
const track_select = (kind: TrackKind) => {
const button = e("div", player.active_tracks.map(_ => {
const active = player.active_tracks.value.filter(
- ts => get_track_kind(player.tracks![ts.track_index].kind) == kind)
+ ts => player.tracks![ts.track_index].kind == kind)
const enabled = active.length > 0
return e("button", MEDIA_KIND_ICONS[kind][+enabled], {
class: "icon",
@@ -103,7 +117,7 @@ function initialize_player(node_id: string): HTMLElement {
} else {
const all_kind = (player.tracks ?? [])
.map((track, index) => ({ index, track }))
- .filter(({ track }) => get_track_kind(track.kind) == kind)
+ .filter(({ track }) => track.kind == kind)
if (all_kind.length < 1) return
player.set_track_enabled(all_kind[0].index, true)
}
@@ -136,7 +150,7 @@ function initialize_player(node_id: string): HTMLElement {
player.active_tracks.map(_ => {
const tracks_avail = (player.tracks ?? [])
.map((track, index) => ({ index, track }))
- .filter(({ track }) => get_track_kind(track.kind) == kind);
+ .filter(({ track }) => track.kind == kind);
if (!tracks_avail.length) return e("p", `No ${kind} tracks available.`) as HTMLElement;
return e("ul", { class: "jsp-track-list" }, ...tracks_avail
.map(({ track, index }): HTMLElement => {
@@ -182,15 +196,16 @@ function initialize_player(node_id: string): HTMLElement {
),
pri = e("div", { class: "jsp-pri" },
pri_current = e("div", { class: "jsp-pri-current" }),
- player.chapters.map(
- chapters => e("div", ...chapters.map(chap => e("div", {
- class: "jsp-chapter",
- style: {
- left: pri_map(chap.time_start ?? 0),
- width: pri_map((chap.time_end ?? player.duration.value) - (chap.time_start ?? 0))
- }
- }, e("p", chap.labels[0][1]))))
- ),
+ // TODO
+ // player.chapters.map(
+ // chapters => e("div", ...chapters.map(chap => e("div", {
+ // class: "jsp-chapter",
+ // style: {
+ // left: pri_map(chap.time_start ?? 0),
+ // width: pri_map((chap.time_end ?? player.duration.value) - (chap.time_start ?? 0))
+ // }
+ // }, e("p", chap.labels[0][1]))))
+ // ),
player.active_tracks.map(
tracks => e("div", ...tracks.map((t, i) => t.buffered.map(
ranges => e("div", ...ranges.map(
@@ -263,8 +278,8 @@ function initialize_player(node_id: string): HTMLElement {
else if (k.code == "KeyS") screenshot_video(player.video)
else if (k.code == "KeyJ") step_track_kind("subtitles")
else if (k.code == "KeyM") toggle_mute()
- else if (k.code == "Digit9") (player.volume.value /= 1.2), logger.log(`Volume decreased to ${show_volume(player.volume.value)}`)
- else if (k.code == "Digit0") (player.volume.value *= 1.2), logger.log(`Volume increased to ${show_volume(player.volume.value)}`)
+ else if (k.code == "Digit9") (player.volume.value /= 1.2), logger.log(`Volume decreased to ${show_volume(player.volume.value)}`, "volume")
+ else if (k.code == "Digit0") (player.volume.value *= 1.2), logger.log(`Volume increased to ${show_volume(player.volume.value)}`, "volume")
else if (k.key == "#") step_track_kind("audio")
else if (k.key == "_") step_track_kind("video")
else if (k.code == "KeyV") show_stats.value = !show_stats.value
@@ -272,8 +287,8 @@ function initialize_player(node_id: string): HTMLElement {
else if (k.code == "ArrowRight") player.seek(player.position.value + 5)
else if (k.code == "ArrowUp") player.seek(player.position.value - 60)
else if (k.code == "ArrowDown") player.seek(player.position.value + 60)
- else if (k.code == "PageUp") player.seek(find_closest_chaps(player).prev?.time_start ?? 0)
- else if (k.code == "PageDown") player.seek(find_closest_chaps(player).next?.time_start ?? player.duration.value)
+ // else if (k.code == "PageUp") player.seek(find_closest_chaps(player).prev?.time_start ?? 0)
+ // else if (k.code == "PageDown") player.seek(find_closest_chaps(player).next?.time_start ?? player.duration.value)
else return;
k.preventDefault()
})
@@ -338,25 +353,29 @@ function mouse_idle(e: HTMLElement, timeout: number): OVar<boolean> {
return idle
}
-export function show_profile(profile: EncodingProfile): string {
- if (profile.audio) return `codec=${profile.audio.codec} br=${show.metric(profile.audio.bitrate, "b/s")}${profile.audio.sample_rate ? ` sr=${show.metric(profile.audio.sample_rate, "Hz")}` : ""}`
- if (profile.video) return `codec=${profile.video.codec} br=${show.metric(profile.video.bitrate, "b/s")} w=${profile.video.width} preset=${profile.video.preset}`
- if (profile.subtitles) return `codec=${profile.subtitles.codec}`
- return `???`
+export function show_format(format: FormatInfo): string {
+ let o = `${format.codec} br=${show.metric(format.bitrate, "b/s")} ac=${format.containers.join(",")}`
+ if (format.width) o += ` w=${format.width}`
+ if (format.height) o += ` h=${format.height}`
+ if (format.samplerate) o += ` ar=${show.metric(format.samplerate, "Hz")}`
+ if (format.channels) o += ` ac=${format.channels}`
+ if (format.bit_depth) o += ` bits=${format.bit_depth}`
+ return o
}
export function show_volume(v: number): string {
return `${v == 0 ? "-∞" : (Math.log10(v) * 10).toFixed(2)}dB | ${(v * 100).toFixed(2)}%`
}
-function find_closest_chaps(player: Player) {
- const now = player.position.value
- const chaps = player.chapters.value
- let prev, next;
- for (const c of chaps) {
- const t_start = (c.time_start ?? 0)
- next = c;
- if (t_start > now) break
- prev = c;
- }
- return { next, prev }
-}
+// TODO
+// function find_closest_chaps(player: Player) {
+// const now = player.position.value
+// const chaps = player.chapters.value
+// let prev, next;
+// for (const c of chaps) {
+// const t_start = (c.time_start ?? 0)
+// next = c;
+// if (t_start > now) break
+// prev = c;
+// }
+// return { next, prev }
+// }
diff --git a/web/script/player/player.ts b/web/script/player/player.ts
index e0a6ddf..f44c14f 100644
--- a/web/script/player/player.ts
+++ b/web/script/player/player.ts
@@ -5,20 +5,18 @@
*/
/// <reference lib="dom" />
import { OVar, e } from "../jshelper/mod.ts";
-import { NodePublic, NodeUserData, SourceTrack, TimeRange } from "./jhls.d.ts";
import { SegmentDownloader } from "./download.ts";
import { PlayerTrack } from "./track/mod.ts";
import { Logger } from "../jshelper/src/log.ts";
-import { WatchedState, Chapter } from "./jhls.d.ts";
-import { get_track_kind } from "./mediacaps.ts";
import { create_track } from "./track/create.ts";
+import { StreamInfo, TimeRange, TrackInfo } from "./types_stream.ts";
export interface BufferRange extends TimeRange { status: "buffered" | "loading" | "queued" }
export class Player {
public video = e("video")
public media_source = new MediaSource();
- public tracks?: SourceTrack[];
- public chapters = new OVar<Chapter[]>([]);
+ public streaminfo?: StreamInfo;
+ public tracks?: TrackInfo[];
public active_tracks = new OVar<PlayerTrack[]>([]);
public downloader: SegmentDownloader = new SegmentDownloader();
@@ -35,8 +33,8 @@ export class Player {
if (s) this.cancel_buffering_pers = this.logger?.log_persistent(s)
}
- constructor(public node_id: string, public logger?: Logger<string>) {
- this.video.poster = `/n/${encodeURIComponent(node_id)}/poster`
+ constructor(public base_url: string, poster: string, private start_time: number, public logger?: Logger<string>) {
+ this.video.poster = poster
this.volume.value = this.video.volume
let skip_change = false;
this.volume.onchange(v => {
@@ -100,40 +98,38 @@ export class Player {
}
async fetch_meta() {
- this.set_pers("Loading metadata...")
- const res = await fetch(`/n/${encodeURIComponent(this.node_id)}`, { headers: { "Accept": "application/json" } })
- if (!res.ok) return this.error.value = "Cannot download node."
+ this.set_pers("Loading stream metadata...")
+ const res = await fetch(`${this.base_url}?info`, { headers: { "Accept": "application/json" } })
+ if (!res.ok) return this.error.value = "Cannot download stream info."
- let ndata!: { node: NodePublic, userdata: NodeUserData } & { error: string }
- try { ndata = await res.json() }
+ let streaminfo!: StreamInfo & { error: string }
+ try { streaminfo = await res.json() }
catch (_) { this.set_pers("Error: Node data invalid") }
- if (ndata.error) return this.set_pers("server error: " + ndata.error)
+ if (streaminfo.error) return this.set_pers("server error: " + streaminfo.error)
this.set_pers()
//! bad code: assignment order is important because chapter callbacks use duration
- this.duration.value = ndata.node.media!.duration
- this.chapters.value = ndata.node.media!.chapters
- this.tracks = ndata.node.media!.tracks
+ this.duration.value = streaminfo.segments[0].duration
+ this.streaminfo = streaminfo
+ this.tracks = streaminfo!.segments[0].tracks;
this.video.src = URL.createObjectURL(this.media_source)
this.media_source.addEventListener("sourceopen", async () => {
let video = false, audio = false, subtitles = false;
for (let i = 0; i < this.tracks!.length; i++) {
const t = this.tracks![i];
- const kind = get_track_kind(t.kind)
- if (kind == "video" && !video)
+ if (t.kind == "video" && !video)
video = true, await this.set_track_enabled(i, true, false)
- if (kind == "audio" && !audio)
+ if (t.kind == "audio" && !audio)
audio = true, await this.set_track_enabled(i, true, false)
- if (kind == "subtitles" && !subtitles)
+ if (t.kind == "subtitles" && !subtitles)
subtitles = true, await this.set_track_enabled(i, true, false)
}
this.set_pers("Buffering initial stream fragments...")
- const start_time = get_query_start_time() ?? get_continue_time(ndata.userdata.watched);
- this.update(start_time)
- this.video.currentTime = start_time
+ this.update(this.start_time)
+ this.video.currentTime = this.start_time
await this.canplay.wait_for(true)
this.set_pers()
@@ -153,7 +149,7 @@ export class Player {
track.abort.abort()
} else if (state && active_index == -1) {
this.logger?.log(`Enabled track ${index}: ${display_track(this.tracks![index])}`)
- this.active_tracks.value.push(create_track(this, this.node_id, index, this.tracks![index])!)
+ this.active_tracks.value.push(create_track(this, this.base_url, 0, index, this.tracks![index])!)
if (update) await this.update()
}
this.active_tracks.change()
@@ -172,20 +168,6 @@ export class Player {
}
}
-function get_continue_time(w: WatchedState): number {
- if (typeof w == "string") return 0
- else return w.progress
-}
-
-function get_query_start_time() {
- const u = new URL(globalThis.location.href)
- const p = u.searchParams.get("t")
- if (!p) return
- const x = parseFloat(p)
- if (Number.isNaN(x)) return
- return x
-}
-
-function display_track(t: SourceTrack): string {
- return `"${t.name}" (${t.language})`
+function display_track(t: TrackInfo): string {
+ return `${t.name}`
}
diff --git a/web/script/player/profiles.ts b/web/script/player/profiles.ts_
index 5ebdeb4..943639c 100644
--- a/web/script/player/profiles.ts
+++ b/web/script/player/profiles.ts_
@@ -5,9 +5,6 @@
*/
/// <reference lib="dom" />
import { OVar } from "../jshelper/mod.ts";
-import { EncodingProfile, SourceTrackKind } from "./jhls.d.ts";
-import { get_track_kind } from "./mediacaps.ts";
-import { profile_to_partial_track, test_media_capability } from "./mediacaps.ts";
import { Player } from "./player.ts";
import { MSEPlayerTrack } from "./track/mse.ts";
@@ -29,7 +26,7 @@ export class ProfileSelector {
for (let id = 0; id < this.track.index!.extra_profiles.length; id++) {
const p = this.track.index!.extra_profiles[id];
// TODO hacky type casting solution
- if (get_track_kind(this.track.metadata.kind) != get_track_kind(p as unknown as SourceTrackKind)) continue
+ if (get_track_kind(this.track.trackinfo.kind) != get_track_kind(p as unknown as SourceTrackKind)) continue
if (!await test_media_capability(profile_to_partial_track(p))) continue
this.profiles.push({ id, order: 0, ...p })
}
diff --git a/web/script/player/track/create.ts b/web/script/player/track/create.ts
index 1aaf12c..95bccca 100644
--- a/web/script/player/track/create.ts
+++ b/web/script/player/track/create.ts
@@ -3,15 +3,13 @@
which is licensed under the GNU Affero General Public License (version 3); see /COPYING.
Copyright (C) 2025 metamuffin <metamuffin.org>
*/
-import { get_track_kind } from "../mediacaps.ts";
import { VttPlayerTrack } from "./vtt.ts";
import { MSEPlayerTrack } from "./mse.ts";
import { Player } from "../player.ts";
-import { SourceTrack } from "../jhls.d.ts";
import { PlayerTrack } from "./mod.ts";
+import { TrackInfo } from "../types_stream.ts";
-export function create_track(player: Player, node_id: string, track_index: number, metadata: SourceTrack): PlayerTrack | undefined {
- const kind = get_track_kind(metadata.kind)
- if (kind == "subtitles") return new VttPlayerTrack(player, node_id, track_index, metadata)
- else return new MSEPlayerTrack(player, node_id, track_index, metadata)
+export function create_track(player: Player, base_url: string, segment_index: number, track_index: number, track_info: TrackInfo): PlayerTrack | undefined {
+ if (track_info.kind == "subtitles") return new VttPlayerTrack(player, base_url, track_index, track_info)
+ else return new MSEPlayerTrack(player, base_url, segment_index, track_index, track_info)
}
diff --git a/web/script/player/track/mod.ts b/web/script/player/track/mod.ts
index 0c7c1c0..99b348c 100644
--- a/web/script/player/track/mod.ts
+++ b/web/script/player/track/mod.ts
@@ -4,11 +4,11 @@
Copyright (C) 2025 metamuffin <metamuffin.org>
*/
/// <reference lib="dom" />
-import { TimeRange } from "../jhls.d.ts";
+import { TimeRange } from "../types_stream.ts";
import { OVar } from "../../jshelper/mod.ts";
import { BufferRange } from "../player.ts";
-export const TARGET_BUFFER_DURATION = 10
+export const TARGET_BUFFER_DURATION = 15
export const MIN_BUFFER_DURATION = 1
export interface AppendRange extends TimeRange { buf: ArrayBuffer, index: number, cb: () => void }
diff --git a/web/script/player/track/mse.ts b/web/script/player/track/mse.ts
index d1a8c12..237b6f6 100644
--- a/web/script/player/track/mse.ts
+++ b/web/script/player/track/mse.ts
@@ -3,41 +3,42 @@
which is licensed under the GNU Affero General Public License (version 3); see /COPYING.
Copyright (C) 2025 metamuffin <metamuffin.org>
*/
-import { JhlsTrackIndex, SourceTrack } from "../jhls.d.ts";
import { OVar } from "../../jshelper/mod.ts";
-import { profile_to_partial_track, track_to_content_type } from "../mediacaps.ts";
+import { test_media_capability, track_to_content_type } from "../mediacaps.ts";
import { BufferRange, Player } from "../player.ts";
-import { EncodingProfileExt, ProfileSelector } from "../profiles.ts";
import { PlayerTrack, AppendRange, TARGET_BUFFER_DURATION, MIN_BUFFER_DURATION } from "./mod.ts";
-import { show_profile } from "../mod.ts";
import { e } from "../../jshelper/src/element.ts";
+import { FormatInfo, FragmentIndex, StreamContainer, TrackInfo } from "../types_stream.ts";
+import { show_format } from "../mod.ts";
+
+interface UsableFormat { format_index: number, usable_index: number, format: FormatInfo, container: StreamContainer }
export class MSEPlayerTrack extends PlayerTrack {
public source_buffer!: SourceBuffer;
private current_load?: AppendRange;
private loading = new Set<number>();
private append_queue: AppendRange[] = [];
- public profile_selector: ProfileSelector;
- public profile = new OVar<EncodingProfileExt | undefined>(undefined);
- public index?: JhlsTrackIndex
+ public index?: FragmentIndex
+ public active_format = new OVar<UsableFormat | undefined>(undefined);
+ public usable_formats: UsableFormat[] = []
constructor(
private player: Player,
- private node_id: string,
+ private base_url: string,
+ private segment_index: number,
track_index: number,
- public metadata: SourceTrack,
+ public trackinfo: TrackInfo,
) {
super(track_index);
- this.profile_selector = new ProfileSelector(player, this, player.downloader.bandwidth_avail);
this.init()
}
async init() {
this.buffered.value = [{ start: 0, end: this.player.duration.value, status: "loading" }]
try {
- const res = await fetch(`/n/${encodeURIComponent(this.node_id)}/stream?format=jhlsi&track=${this.track_index}`, { headers: { "Accept": "application/json" } });
+ const res = await fetch(`${this.base_url}?fragmentindex&segment=${this.segment_index}&track=${this.track_index}`, { headers: { "Accept": "application/json" } });
if (!res.ok) return this.player.error.value = "Cannot download index.", undefined;
- let index!: JhlsTrackIndex & { error: string; };
+ let index!: FragmentIndex & { error: string; };
try { index = await res.json(); }
catch (_) { this.player.set_pers("Error: Failed to fetch node"); }
if (index.error) return this.player.set_pers("server error: " + index.error), undefined;
@@ -49,10 +50,21 @@ export class MSEPlayerTrack extends PlayerTrack {
}
this.buffered.value = []
- const canplay = await this.profile_selector.select_optimal_profile(this.track_index, this.profile);
- if (!canplay) return this.player.set_track_enabled(this.track_index, false)
- const ct = track_to_content_type(this.track_from_profile())!;
- console.log(`track ${this.track_index} source buffer content-type: ${ct}`);
+ console.log(this.trackinfo);
+
+ for (let i = 0; i < this.trackinfo.formats.length; i++) {
+ const format = this.trackinfo.formats[i];
+ for (const container of format.containers) {
+ if (container != "webm" && container != "mpeg4") continue;
+ if (await test_media_capability(format, container))
+ this.usable_formats.push({ container, format, format_index: i, usable_index: this.usable_formats.length })
+ }
+ }
+ if (!this.usable_formats.length)
+ return this.player.logger?.log("No availble format is supported by this device. The track can't be played back.")
+ this.active_format.value = this.usable_formats[0]
+
+ const ct = track_to_content_type(this.active_format.value!.format, this.active_format.value!.container);
this.source_buffer = this.player.media_source.addSourceBuffer(ct);
this.abort.signal.addEventListener("abort", () => {
console.log(`destroy source buffer for track ${this.track_index}`);
@@ -81,10 +93,6 @@ export class MSEPlayerTrack extends PlayerTrack {
this.update(this.player.video.currentTime)
}
- track_from_profile(): SourceTrack {
- if (this.profile.value) return profile_to_partial_track(this.profile.value);
- else return this.metadata;
- }
update_buf_ranges() {
if (!this.index) return;
@@ -97,7 +105,7 @@ export class MSEPlayerTrack extends PlayerTrack {
});
}
for (const r of this.loading) {
- ranges.push({ ...this.index.fragments[r], status: "loading" });
+ ranges.push({ ...this.index[r], status: "loading" });
}
this.buffered.value = ranges;
}
@@ -107,8 +115,8 @@ export class MSEPlayerTrack extends PlayerTrack {
this.update_buf_ranges(); // TODO required?
const blocking = [];
- for (let i = 0; i < this.index.fragments.length; i++) {
- const frag = this.index.fragments[i];
+ for (let i = 0; i < this.index.length; i++) {
+ const frag = this.index[i];
if (frag.end < target) continue;
if (frag.start >= target + TARGET_BUFFER_DURATION) break;
if (!this.check_buf_collision(frag.start, frag.end)) continue;
@@ -129,13 +137,13 @@ export class MSEPlayerTrack extends PlayerTrack {
async load(index: number) {
this.loading.add(index);
- await this.profile_selector.select_optimal_profile(this.track_index, this.profile);
- const url = `/n/${encodeURIComponent(this.node_id)}/stream?format=frag&webm=true&track=${this.track_index}&index=${index}${this.profile.value ? `&profile=${this.profile.value.id}` : ""}`;
+ // TODO update format selection
+ const url = `${this.base_url}?fragment&segment=${this.segment_index}&track=${this.track_index}&format=${this.active_format.value!.format_index}&index=${index}&container=${this.active_format.value!.container}`;
const buf = await this.player.downloader.download(url);
await new Promise<void>(cb => {
if (!this.index) return;
if (this.abort.signal.aborted) return;
- this.append_queue.push({ buf, ...this.index.fragments[index], index, cb });
+ this.append_queue.push({ buf, ...this.index[index], index, cb });
this.tick_append();
});
}
@@ -146,9 +154,10 @@ export class MSEPlayerTrack extends PlayerTrack {
this.append_queue.splice(0, 1);
this.current_load = frag;
// TODO why is appending so unreliable?! sometimes it does not add it
- this.source_buffer.changeType(track_to_content_type(this.track_from_profile())!);
- this.source_buffer.timestampOffset = this.profile.value !== undefined ? frag.start : 0
- console.log(`append track ${this.track_index}`);
+ this.source_buffer.changeType(track_to_content_type(this.active_format.value!.format, this.active_format.value!.container));
+ this.source_buffer.timestampOffset = this.active_format.value?.container == "mpeg4" ? frag.start : 0
+ // this.source_buffer.timestampOffset = this.active_format.value?.format.remux ? 0 : frag.start
+ // this.source_buffer.timestampOffset = 0
this.source_buffer.appendBuffer(frag.buf);
}
}
@@ -156,15 +165,17 @@ export class MSEPlayerTrack extends PlayerTrack {
public debug(): OVar<HTMLElement> {
const rtype = (t: string, b: BufferRange[]) => {
const c = b.filter(r => r.status == t);
- return `${c.length} range${c.length != 1 ? "s" : ""}, ${c.reduce((a, v) => a + v.end - v.start, 0).toFixed(2)}s`
+ // ${c.length} range${c.length != 1 ? "s" : ""}
+ return `${c.reduce((a, v) => a + v.end - v.start, 0).toFixed(2)}s`
}
- return this.profile.liftA2(this.buffered, (p, b) =>
+ return this.active_format.liftA2(this.buffered, (p, b) =>
e("pre",
- `mse track ${this.track_index}: ${(p ? `profile ${p.id} (${show_profile(p)})` : `remux`)}`
- + `\n\ttype: ${track_to_content_type(this.track_from_profile())}`
- + `\n\tbuffered: ${rtype("buffered", b)}`
- + `\n\tqueued: ${rtype("queued", b)}`
- + `\n\tloading: ${rtype("loading", b)}`
+ p ?
+ `mse track ${this.track_index}: format ${p.format_index} (${p.format.remux ? "remux" : "transcode"})`
+ + `\n\tformat: ${show_format(p.format)}`
+ + `\n\tbuffer type: ${track_to_content_type(p.format, p.container)}`
+ + `\n\tbuffered: ${rtype("buffered", b)} / queued: ${rtype("queued", b)} / loading: ${rtype("loading", b)}`
+ : ""
) as HTMLElement
)
}
diff --git a/web/script/player/track/vtt.ts b/web/script/player/track/vtt.ts
index ea4951c..3dd7670 100644
--- a/web/script/player/track/vtt.ts
+++ b/web/script/player/track/vtt.ts
@@ -4,8 +4,8 @@
Copyright (C) 2025 metamuffin <metamuffin.org>
*/
import { e } from "../../jshelper/src/element.ts";
-import { SourceTrack, JvttCue } from "../jhls.d.ts";
import { Player } from "../player.ts";
+import { JvttCue, TrackInfo } from "../types_stream.ts";
import { PlayerTrack } from "./mod.ts";
export class VttPlayerTrack extends PlayerTrack {
@@ -16,10 +16,10 @@ export class VttPlayerTrack extends PlayerTrack {
private player: Player,
private node_id: string,
track_index: number,
- private metadata: SourceTrack,
+ private track_info: TrackInfo,
) {
super(track_index);
- this.track = this.player.video.addTextTrack("subtitles", this.metadata.name, this.metadata.language);
+ this.track = this.player.video.addTextTrack("subtitles", this.track_info.name, this.track_info.language);
this.buffered.value = [{ start: 0, end: this.player.duration.value, status: "loading" }]
this.init()
}
diff --git a/web/script/player/jhls.d.ts b/web/script/player/types_node.ts
index c7325e4..6946313 100644
--- a/web/script/player/jhls.d.ts
+++ b/web/script/player/types_node.ts
@@ -4,13 +4,6 @@
Copyright (C) 2025 metamuffin <metamuffin.org>
*/
-export interface JhlsTrackIndex {
- fragments: TimeRange[],
- extra_profiles: EncodingProfile[],
-}
-
-export interface TimeRange { start: number, end: number }
-
export interface NodePublic {
kind: NodeKind,
title?: string,
@@ -76,28 +69,8 @@ export type SourceTrackKind = {
}
} | "subtitles";
-export interface EncodingProfile {
- video?: {
- codec: string,
- preset: number,
- bitrate: number,
- width: number,
- },
- audio?: {
- codec: string,
- bitrate: number,
- sample_rate?: number,
- },
- subtitles?: {
- codec: string,
- },
-}
-
export interface NodeUserData {
watched: WatchedState
}
export type WatchedState = "none" | "watched" | "pending" | { progress: number }
-export interface JvttCue extends TimeRange {
- content: string
-} \ No newline at end of file
diff --git a/web/script/player/types_stream.ts b/web/script/player/types_stream.ts
new file mode 100644
index 0000000..290a778
--- /dev/null
+++ b/web/script/player/types_stream.ts
@@ -0,0 +1,39 @@
+/*
+ This file is part of jellything (https://codeberg.org/metamuffin/jellything)
+ which is licensed under the GNU Affero General Public License (version 3); see /COPYING.
+ Copyright (C) 2025 metamuffin <metamuffin.org>
+*/
+export type FragmentIndex = TimeRange[]
+export interface TimeRange { start: number, end: number }
+export interface JvttCue extends TimeRange {
+ content: string
+}
+export interface StreamInfo {
+ name?: string,
+ segments: SegmentInfo[],
+}
+export interface SegmentInfo {
+ name?: string,
+ duration: number,
+ tracks: TrackInfo[],
+}
+export type TrackKind = "video" | "audio" | "subtitles"
+export interface TrackInfo {
+ name?: string,
+ language?: string,
+ kind: TrackKind,
+ formats: FormatInfo[]
+}
+export type StreamContainer = "webm" | "matroska" | "mpeg4" | "jvtt" | "webvtt"
+export interface FormatInfo {
+ codec: string,
+ bitrate: number,
+ remux: boolean,
+ containers: StreamContainer[]
+
+ width?: number,
+ height?: number,
+ channels?: number,
+ samplerate?: number,
+ bit_depth?: number,
+}
diff --git a/web/style/js-player.css b/web/style/js-player.css
index c9a48e9..33669a5 100644
--- a/web/style/js-player.css
+++ b/web/style/js-player.css
@@ -112,6 +112,8 @@
padding: 0.15em;
margin: 0px;
font-size: large;
+}
+.jsp .jsh-log-line-appear {
animation-name: appear;
animation-timing-function: linear;
animation-duration: 0.5s;