aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--Cargo.lock47
-rw-r--r--Cargo.toml2
-rw-r--r--base/Cargo.toml1
-rw-r--r--base/src/assetfed.rs8
-rw-r--r--base/src/cache.rs306
-rw-r--r--base/src/lib.rs1
-rw-r--r--common/src/config.rs20
-rw-r--r--import/Cargo.toml1
-rw-r--r--import/src/acoustid.rs2
-rw-r--r--import/src/lib.rs7
-rw-r--r--import/src/musicbrainz.rs2
-rw-r--r--import/src/tmdb.rs10
-rw-r--r--import/src/trakt.rs6
-rw-r--r--import/src/vgmdb.rs2
-rw-r--r--import/src/wikidata.rs2
-rw-r--r--import/src/wikimedia_commons.rs2
-rw-r--r--remuxer/Cargo.toml2
-rw-r--r--remuxer/src/metadata.rs23
-rw-r--r--remuxer/src/remux.rs2
-rw-r--r--remuxer/src/seek_index.rs2
-rw-r--r--server/Cargo.toml1
-rw-r--r--server/src/ui/assets.rs5
-rw-r--r--stream/Cargo.toml4
-rw-r--r--stream/src/fragment.rs2
-rw-r--r--stream/src/fragment_index.rs2
-rw-r--r--stream/src/hls.rs4
-rw-r--r--stream/src/lib.rs30
-rw-r--r--stream/src/stream_info.rs20
-rw-r--r--stream/src/webvtt.rs2
-rw-r--r--transcoder/Cargo.toml4
-rw-r--r--transcoder/src/fragment.rs16
-rw-r--r--transcoder/src/image.rs2
-rw-r--r--transcoder/src/lib.rs26
-rw-r--r--transcoder/src/subtitles.rs2
-rw-r--r--transcoder/src/thumbnail.rs2
35 files changed, 154 insertions, 416 deletions
diff --git a/Cargo.lock b/Cargo.lock
index dfd9174..563b2c2 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -1756,10 +1756,11 @@ dependencies = [
"base64",
"bincode",
"humansize",
+ "jellycache",
"jellyclient",
"jellycommon",
"log",
- "rand 0.9.0",
+ "rand 0.9.1",
"redb",
"serde",
"serde_json",
@@ -1770,6 +1771,21 @@ dependencies = [
]
[[package]]
+name = "jellycache"
+version = "0.1.0"
+dependencies = [
+ "anyhow",
+ "base64",
+ "bincode",
+ "humansize",
+ "log",
+ "rand 0.9.1",
+ "serde",
+ "sha2",
+ "tokio",
+]
+
+[[package]]
name = "jellyclient"
version = "0.1.0"
dependencies = [
@@ -1806,6 +1822,7 @@ dependencies = [
"crossbeam-channel",
"futures",
"jellybase",
+ "jellycache",
"jellyclient",
"jellyimport-fallback-generator",
"jellyremuxer",
@@ -1847,7 +1864,7 @@ dependencies = [
"anyhow",
"bincode",
"ebml-struct",
- "jellybase",
+ "jellycache",
"jellymatroska",
"log",
"serde",
@@ -1860,11 +1877,13 @@ version = "0.1.0"
dependencies = [
"anyhow",
"ebml-struct",
- "jellybase",
+ "jellycache",
+ "jellycommon",
"jellymatroska",
"jellyremuxer",
"jellytranscoder",
"log",
+ "serde",
"serde_json",
"tokio",
"tokio-util",
@@ -1887,13 +1906,14 @@ dependencies = [
"glob",
"humansize",
"jellybase",
+ "jellycache",
"jellycommon",
"jellyimport",
"jellystream",
"jellytranscoder",
"log",
"markup",
- "rand 0.9.0",
+ "rand 0.9.1",
"rocket",
"rocket_ws",
"serde",
@@ -1920,7 +1940,7 @@ dependencies = [
"jellycommon",
"jellyimport",
"log",
- "rand 0.9.0",
+ "rand 0.9.1",
"reqwest",
"serde",
"serde_json",
@@ -1935,7 +1955,8 @@ dependencies = [
"anyhow",
"image",
"imgref",
- "jellybase",
+ "jellycache",
+ "jellycommon",
"jellyremuxer",
"libavif-image",
"log",
@@ -1943,6 +1964,7 @@ dependencies = [
"ravif",
"rayon",
"rgb",
+ "serde",
"tokio",
]
@@ -2079,6 +2101,10 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "04cbf5b083de1c7e0222a7a51dbfdba1cbe1c6ab0b15e29fff3f6c077fd9cd9f"
[[package]]
+name = "logic"
+version = "0.1.0"
+
+[[package]]
name = "loom"
version = "0.5.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -2776,13 +2802,12 @@ dependencies = [
[[package]]
name = "rand"
-version = "0.9.0"
+version = "0.9.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "3779b94aeb87e8bd4e834cee3650289ee9e0d5677f976ecdb6d219e5f4f6cd94"
+checksum = "9fbfd9d094a40bf3ae768db9361049ace4c0e04a4fd6b359518bd7b73a73dd97"
dependencies = [
"rand_chacha 0.9.0",
"rand_core 0.9.0",
- "zerocopy 0.8.14",
]
[[package]]
@@ -4092,6 +4117,10 @@ dependencies = [
]
[[package]]
+name = "ui"
+version = "0.1.0"
+
+[[package]]
name = "uncased"
version = "0.9.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
diff --git a/Cargo.toml b/Cargo.toml
index e351db0..6329846 100644
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -10,7 +10,7 @@ members = [
"transcoder",
"base",
"import",
- "import/fallback_generator",
+ "import/fallback_generator", "ui", "logic", "cache",
]
resolver = "2"
diff --git a/base/Cargo.toml b/base/Cargo.toml
index 6474e25..0ab4a4b 100644
--- a/base/Cargo.toml
+++ b/base/Cargo.toml
@@ -6,6 +6,7 @@ edition = "2021"
[dependencies]
jellycommon = { path = "../common" }
jellyclient = { path = "../client" }
+jellycache = { path = "../cache" }
serde = { version = "1.0.217", features = ["derive"] }
serde_yaml = "0.9.34"
log = { workspace = true }
diff --git a/base/src/assetfed.rs b/base/src/assetfed.rs
index 697cacb..621169f 100644
--- a/base/src/assetfed.rs
+++ b/base/src/assetfed.rs
@@ -1,4 +1,9 @@
-use crate::{cache::CachePath, SECRETS};
+/*
+ This file is part of jellything (https://codeberg.org/metamuffin/jellything)
+ which is licensed under the GNU Affero General Public License (version 3); see /COPYING.
+ Copyright (C) 2025 metamuffin <metamuffin.org>
+*/
+use crate::SECRETS;
use aes_gcm_siv::{
aead::{generic_array::GenericArray, Aead},
Aes256GcmSiv, KeyInit,
@@ -6,6 +11,7 @@ use aes_gcm_siv::{
use anyhow::{anyhow, bail, Context};
use base64::Engine;
use bincode::{Decode, Encode};
+use jellycache::CachePath;
use jellycommon::{Asset, LocalTrack};
use log::warn;
use serde::Serialize;
diff --git a/base/src/cache.rs b/base/src/cache.rs
deleted file mode 100644
index 02c42c8..0000000
--- a/base/src/cache.rs
+++ /dev/null
@@ -1,306 +0,0 @@
-/*
- This file is part of jellything (https://codeberg.org/metamuffin/jellything)
- which is licensed under the GNU Affero General Public License (version 3); see /COPYING.
- Copyright (C) 2025 metamuffin <metamuffin.org>
-*/
-use crate::CONF;
-use anyhow::{anyhow, Context};
-use base64::Engine;
-use bincode::{Decode, Encode};
-use log::{info, warn};
-use rand::random;
-use serde::Serialize;
-use sha2::Sha512;
-use std::{
- any::Any,
- collections::{BTreeMap, HashMap},
- fs::rename,
- future::Future,
- hash::{Hash, Hasher},
- io::{Seek, Write},
- path::PathBuf,
- sync::{
- atomic::{AtomicBool, AtomicUsize, Ordering},
- Arc, LazyLock, RwLock,
- },
- time::Instant,
-};
-use tokio::{
- io::{AsyncReadExt, AsyncSeekExt, AsyncWriteExt},
- sync::Mutex,
-};
-
-#[derive(Debug, Encode, Decode, Serialize)]
-pub struct CachePath(pub PathBuf);
-impl CachePath {
- pub fn abs(&self) -> PathBuf {
- CONF.cache_path.join(&self.0)
- }
-}
-
-pub fn cache_location(kind: &str, key: impl Hash) -> (usize, CachePath) {
- use sha2::Digest;
- struct ShaHasher(Sha512);
- impl Hasher for ShaHasher {
- fn finish(&self) -> u64 {
- unreachable!()
- }
- fn write(&mut self, bytes: &[u8]) {
- self.0.update(bytes);
- }
- }
- let mut d = ShaHasher(sha2::Sha512::new());
- d.0.update(kind);
- d.0.update(b"\0");
- key.hash(&mut d);
-
- let d = d.0.finalize();
- let n =
- d[0] as usize | ((d[1] as usize) << 8) | ((d[2] as usize) << 16) | ((d[3] as usize) << 24);
- let fname = base64::engine::general_purpose::URL_SAFE.encode(d);
- let fname = &fname[..30]; // 180 bits
- let fname = format!("{}/{}", kind, fname);
- (n, CachePath(fname.into()))
-}
-
-const CACHE_GENERATION_BUCKET_COUNT: usize = 1024;
-pub static CACHE_GENERATION_LOCKS: LazyLock<[Mutex<()>; CACHE_GENERATION_BUCKET_COUNT]> =
- LazyLock::new(|| [(); CACHE_GENERATION_BUCKET_COUNT].map(|_| Mutex::new(())));
-
-pub async fn async_cache_file<Fun, Fut>(
- kind: &str,
- key: impl Hash,
- generate: Fun,
-) -> Result<CachePath, anyhow::Error>
-where
- Fun: FnOnce(tokio::fs::File) -> Fut,
- Fut: Future<Output = Result<(), anyhow::Error>>,
-{
- let (bucket, location) = cache_location(kind, key);
- let loc_abs = location.abs();
- // we need a lock even if it exists since somebody might be still in the process of writing.
- let _guard = CACHE_GENERATION_LOCKS[bucket % CACHE_GENERATION_BUCKET_COUNT]
- .lock()
- .await;
- let exists = tokio::fs::try_exists(&loc_abs)
- .await
- .context("unable to test for cache file existance")?;
- if !exists {
- let temp_path = CONF.cache_path.join(format!("temp-{:x}", random::<u128>()));
- let f = tokio::fs::File::create(&temp_path)
- .await
- .context("creating new cache file")?;
- match generate(f).await {
- Ok(()) => (),
- Err(e) => {
- warn!("cache generation failed, unlinking entry");
- tokio::fs::remove_file(temp_path).await?;
- return Err(e);
- }
- }
- tokio::fs::create_dir_all(loc_abs.parent().unwrap())
- .await
- .context("create kind dir")?;
- tokio::fs::rename(temp_path, &loc_abs)
- .await
- .context("rename cache")?;
- }
- drop(_guard);
- Ok(location)
-}
-
-thread_local! { pub static WITHIN_CACHE_FILE: AtomicBool = const { AtomicBool::new(false) }; }
-
-pub fn cache_file<Fun>(
- kind: &str,
- key: impl Hash,
- mut generate: Fun,
-) -> Result<CachePath, anyhow::Error>
-where
- Fun: FnMut(std::fs::File) -> Result<(), anyhow::Error>,
-{
- let (bucket, location) = cache_location(kind, key);
- let loc_abs = location.abs();
- // we need a lock even if it exists since somebody might be still in the process of writing.
- let already_within = WITHIN_CACHE_FILE.with(|a| a.swap(true, Ordering::Relaxed));
- let _guard = if already_within {
- // TODO stupid hack to avoid deadlock for nested cache_file. proper solution needed
- CACHE_GENERATION_LOCKS[bucket % CACHE_GENERATION_BUCKET_COUNT]
- .try_lock()
- .ok()
- } else {
- Some(CACHE_GENERATION_LOCKS[bucket % CACHE_GENERATION_BUCKET_COUNT].blocking_lock())
- };
- if !loc_abs.exists() {
- let temp_path = CONF.cache_path.join(format!("temp-{:x}", random::<u128>()));
- let f = std::fs::File::create(&temp_path).context("creating new cache file")?;
- match generate(f) {
- Ok(()) => (),
- Err(e) => {
- warn!("cache generation failed, unlinking entry");
- std::fs::remove_file(temp_path)?;
- return Err(e);
- }
- }
- std::fs::create_dir_all(loc_abs.parent().unwrap()).context("create kind dir")?;
- rename(temp_path, loc_abs).context("rename cache")?;
- }
- if !already_within {
- WITHIN_CACHE_FILE.with(|a| a.swap(false, Ordering::Relaxed));
- }
- drop(_guard);
- Ok(location)
-}
-
-pub struct InMemoryCacheEntry {
- size: usize,
- last_access: Instant,
- object: Arc<dyn Any + Send + Sync + 'static>,
-}
-pub static CACHE_IN_MEMORY_OBJECTS: LazyLock<RwLock<HashMap<PathBuf, InMemoryCacheEntry>>> =
- LazyLock::new(|| RwLock::new(HashMap::new()));
-pub static CACHE_IN_MEMORY_SIZE: AtomicUsize = AtomicUsize::new(0);
-
-pub fn cache_memory<Fun, T>(
- kind: &str,
- key: impl Hash,
- mut generate: Fun,
-) -> Result<Arc<T>, anyhow::Error>
-where
- Fun: FnMut() -> Result<T, anyhow::Error>,
- T: Encode + Decode + Send + Sync + 'static,
-{
- let (_, location) = cache_location(kind, &key);
- {
- let mut g = CACHE_IN_MEMORY_OBJECTS.write().unwrap();
- if let Some(entry) = g.get_mut(&location.abs()) {
- entry.last_access = Instant::now();
- let object = entry
- .object
- .clone()
- .downcast::<T>()
- .map_err(|_| anyhow!("inconsistent types for in-memory cache"))?;
- return Ok(object);
- }
- }
-
- let location = cache_file(kind, &key, move |file| {
- let object = generate()?;
- let mut file = std::io::BufWriter::new(file);
- bincode::encode_into_std_write(&object, &mut file, bincode::config::standard())
- .context("encoding cache object")?;
- file.flush()?;
- Ok(())
- })?;
- let mut file = std::io::BufReader::new(std::fs::File::open(location.abs())?);
- let object = bincode::decode_from_std_read::<T, _, _>(&mut file, bincode::config::standard())
- .context("decoding cache object")?;
- let object = Arc::new(object);
- let size = file.stream_position()? as usize; // this is an approximation mainly since varint is used in bincode
-
- {
- let mut g = CACHE_IN_MEMORY_OBJECTS.write().unwrap();
- g.insert(
- location.abs(),
- InMemoryCacheEntry {
- size,
- last_access: Instant::now(),
- object: object.clone(),
- },
- );
- CACHE_IN_MEMORY_SIZE.fetch_add(size, Ordering::Relaxed);
- }
-
- cleanup_cache();
-
- Ok(object)
-}
-
-pub async fn async_cache_memory<Fun, Fut, T>(
- kind: &str,
- key: impl Hash,
- generate: Fun,
-) -> Result<Arc<T>, anyhow::Error>
-where
- Fun: FnOnce() -> Fut,
- Fut: Future<Output = Result<T, anyhow::Error>>,
- T: Encode + Decode + Send + Sync + 'static,
-{
- let (_, location) = cache_location(kind, &key);
- {
- let mut g = CACHE_IN_MEMORY_OBJECTS.write().unwrap();
- if let Some(entry) = g.get_mut(&location.abs()) {
- entry.last_access = Instant::now();
- let object = entry
- .object
- .clone()
- .downcast::<T>()
- .map_err(|_| anyhow!("inconsistent types for in-memory cache"))?;
- return Ok(object);
- }
- }
-
- let location = async_cache_file(kind, &key, move |mut file| async move {
- let object = generate().await?;
- let data = bincode::encode_to_vec(&object, bincode::config::standard())
- .context("encoding cache object")?;
-
- file.write_all(&data).await?;
-
- Ok(())
- })
- .await?;
- let mut file = tokio::fs::File::open(&location.abs()).await?;
- let mut data = Vec::new();
- file.read_to_end(&mut data)
- .await
- .context("reading cache object")?;
- let (object, _) = bincode::decode_from_slice::<T, _>(&data, bincode::config::standard())
- .context("decoding cache object")?;
- let object = Arc::new(object);
- let size = file.stream_position().await? as usize; // this is an approximation mainly since varint is used in bincode
-
- {
- let mut g = CACHE_IN_MEMORY_OBJECTS.write().unwrap();
- g.insert(
- location.abs(),
- InMemoryCacheEntry {
- size,
- last_access: Instant::now(),
- object: object.clone(),
- },
- );
- CACHE_IN_MEMORY_SIZE.fetch_add(size, Ordering::Relaxed);
- }
-
- cleanup_cache();
-
- Ok(object)
-}
-
-pub fn cleanup_cache() {
- let current_size = CACHE_IN_MEMORY_SIZE.load(Ordering::Relaxed);
- if current_size < CONF.max_in_memory_cache_size {
- return;
- }
- info!("running cache eviction");
- let mut g = CACHE_IN_MEMORY_OBJECTS.write().unwrap();
-
- // TODO: if two entries have *exactly* the same size, only one of the will be remove; this is fine for now
- let mut k = BTreeMap::new();
- for (loc, entry) in g.iter() {
- k.insert(entry.last_access.elapsed(), (loc.to_owned(), entry.size));
- }
- let mut reduction = 0;
- for (loc, size) in k.values().rev().take(k.len().div_ceil(2)) {
- g.remove(loc);
- reduction += size;
- }
- CACHE_IN_MEMORY_SIZE.fetch_sub(reduction, Ordering::Relaxed);
- drop(g);
-
- info!(
- "done, {} freed",
- humansize::format_size(reduction, humansize::DECIMAL)
- );
-}
diff --git a/base/src/lib.rs b/base/src/lib.rs
index 436aa9a..c897754 100644
--- a/base/src/lib.rs
+++ b/base/src/lib.rs
@@ -4,7 +4,6 @@
Copyright (C) 2025 metamuffin <metamuffin.org>
*/
pub mod assetfed;
-pub mod cache;
pub mod database;
pub mod federation;
pub mod locale;
diff --git a/common/src/config.rs b/common/src/config.rs
index e2f4e62..5dc2d14 100644
--- a/common/src/config.rs
+++ b/common/src/config.rs
@@ -28,26 +28,6 @@ pub struct GlobalConfig {
pub login_expire: i64,
#[serde(default)]
pub default_permission_set: PermissionSet,
- #[serde(default)]
- pub transcoder: TranscoderConfig,
-}
-
-#[rustfmt::skip]
-#[derive(Debug, Deserialize, Serialize, Default)]
-pub struct TranscoderConfig {
- #[serde(default)] pub offer_avc: bool,
- #[serde(default)] pub offer_hevc: bool,
- #[serde(default)] pub offer_vp8: bool,
- #[serde(default)] pub offer_vp9: bool,
- #[serde(default)] pub offer_av1: bool,
- #[serde(default)] pub enable_rkmpp: bool,
- #[serde(default)] pub enable_rkrga: bool,
- #[serde(default)] pub use_svtav1: bool,
- #[serde(default)] pub use_rav1e: bool,
- pub svtav1_preset: Option<u8>, // 0..=13, high is fast
- pub rav1e_preset: Option<u8>, // 0..=10
- pub aom_preset: Option<u8>, // 0..=8, high is fast
- pub x264_preset: Option<String>,
}
#[derive(Serialize, Deserialize, Debug, Default)]
diff --git a/import/Cargo.toml b/import/Cargo.toml
index ad9d529..506ed24 100644
--- a/import/Cargo.toml
+++ b/import/Cargo.toml
@@ -7,6 +7,7 @@ edition = "2021"
jellybase = { path = "../base" }
jellyclient = { path = "../client" }
jellyremuxer = { path = "../remuxer" }
+jellycache = { path = "../cache" }
jellyimport-fallback-generator = { path = "fallback_generator" }
rayon = "1.10.0"
diff --git a/import/src/acoustid.rs b/import/src/acoustid.rs
index 19ebc3d..741d491 100644
--- a/import/src/acoustid.rs
+++ b/import/src/acoustid.rs
@@ -6,7 +6,7 @@
use crate::USER_AGENT;
use anyhow::{Context, Result};
use bincode::{Decode, Encode};
-use jellybase::cache::async_cache_memory;
+use jellycache::async_cache_memory;
use log::info;
use reqwest::{
header::{HeaderMap, HeaderName, HeaderValue},
diff --git a/import/src/lib.rs b/import/src/lib.rs
index 0d72da2..2f7383a 100644
--- a/import/src/lib.rs
+++ b/import/src/lib.rs
@@ -9,11 +9,11 @@ use anyhow::{anyhow, bail, Context, Result};
use infojson::YVideo;
use jellybase::{
assetfed::AssetInner,
- cache::cache_file,
common::{Chapter, MediaInfo, Node, NodeID, NodeKind, Rating, SourceTrack, SourceTrackKind},
database::Database,
CONF, SECRETS,
};
+use jellycache::cache_file;
use jellyclient::{
Appearance, LocalTrack, ObjectIds, PeopleGroup, Person, TmdbKind, TrackSource, TraktKind,
Visibility,
@@ -425,7 +425,10 @@ fn import_media_file(
node.slug = slug;
node.title = info.title.or(node.title.clone());
node.visibility = iflags.visibility;
- node.poster = m.cover.or(node.poster.clone());
+ node.poster = m
+ .cover
+ .map(|a| AssetInner::Cache(a).ser())
+ .or(node.poster.clone());
node.description = tags
.remove("DESCRIPTION")
.or(tags.remove("SYNOPSIS"))
diff --git a/import/src/musicbrainz.rs b/import/src/musicbrainz.rs
index d02de69..612c4ba 100644
--- a/import/src/musicbrainz.rs
+++ b/import/src/musicbrainz.rs
@@ -7,7 +7,7 @@
use crate::USER_AGENT;
use anyhow::{Context, Result};
use bincode::{Decode, Encode};
-use jellybase::cache::async_cache_memory;
+use jellycache::async_cache_memory;
use log::info;
use reqwest::{
header::{HeaderMap, HeaderName, HeaderValue},
diff --git a/import/src/tmdb.rs b/import/src/tmdb.rs
index 45d7650..dff0e95 100644
--- a/import/src/tmdb.rs
+++ b/import/src/tmdb.rs
@@ -6,13 +6,11 @@
use crate::USER_AGENT;
use anyhow::{anyhow, bail, Context};
use bincode::{Decode, Encode};
-use jellybase::{
- cache::{async_cache_file, async_cache_memory, CachePath},
- common::{
- chrono::{format::Parsed, Utc},
- TmdbKind,
- },
+use jellybase::common::{
+ chrono::{format::Parsed, Utc},
+ TmdbKind,
};
+use jellycache::{async_cache_file, async_cache_memory, CachePath};
use log::info;
use reqwest::{
header::{HeaderMap, HeaderName, HeaderValue},
diff --git a/import/src/trakt.rs b/import/src/trakt.rs
index 86f2f42..434a3a0 100644
--- a/import/src/trakt.rs
+++ b/import/src/trakt.rs
@@ -6,10 +6,8 @@
use crate::USER_AGENT;
use anyhow::Context;
use bincode::{Decode, Encode};
-use jellybase::{
- cache::async_cache_memory,
- common::{Appearance, ObjectIds, PeopleGroup, Person, TraktKind},
-};
+use jellybase::common::{Appearance, ObjectIds, PeopleGroup, Person, TraktKind};
+use jellycache::async_cache_memory;
use log::info;
use reqwest::{
header::{HeaderMap, HeaderName, HeaderValue},
diff --git a/import/src/vgmdb.rs b/import/src/vgmdb.rs
index fb94f5f..6278aaa 100644
--- a/import/src/vgmdb.rs
+++ b/import/src/vgmdb.rs
@@ -6,7 +6,7 @@
use crate::USER_AGENT;
use anyhow::{Context, Result};
-use jellybase::cache::{async_cache_file, async_cache_memory, CachePath};
+use jellycache::{async_cache_file, async_cache_memory, CachePath};
use log::info;
use regex::Regex;
use reqwest::{
diff --git a/import/src/wikidata.rs b/import/src/wikidata.rs
index 9683950..1b7f06e 100644
--- a/import/src/wikidata.rs
+++ b/import/src/wikidata.rs
@@ -6,7 +6,7 @@
use crate::USER_AGENT;
use anyhow::{bail, Context, Result};
-use jellybase::cache::async_cache_memory;
+use jellycache::async_cache_memory;
use log::info;
use reqwest::{
header::{HeaderMap, HeaderName, HeaderValue},
diff --git a/import/src/wikimedia_commons.rs b/import/src/wikimedia_commons.rs
index faaef44..a5889fb 100644
--- a/import/src/wikimedia_commons.rs
+++ b/import/src/wikimedia_commons.rs
@@ -6,7 +6,7 @@
use crate::USER_AGENT;
use anyhow::{Context, Result};
-use jellybase::cache::{async_cache_file, CachePath};
+use jellycache::{async_cache_file, CachePath};
use reqwest::{
header::{HeaderMap, HeaderName, HeaderValue},
redirect::Policy,
diff --git a/remuxer/Cargo.toml b/remuxer/Cargo.toml
index 16713df..98dd86c 100644
--- a/remuxer/Cargo.toml
+++ b/remuxer/Cargo.toml
@@ -5,7 +5,7 @@ edition = "2021"
[dependencies]
jellymatroska = { path = "../matroska" }
-jellybase = { path = "../base" }
+jellycache = { path = "../cache" }
tokio = { version = "1.43.0", features = ["io-util"] }
anyhow = "1.0.95"
diff --git a/remuxer/src/metadata.rs b/remuxer/src/metadata.rs
index c2931f4..ddcf4c0 100644
--- a/remuxer/src/metadata.rs
+++ b/remuxer/src/metadata.rs
@@ -10,11 +10,7 @@ use ebml_struct::{
matroska::*,
read::{EbmlReadExt, TagRead},
};
-use jellybase::{
- assetfed::AssetInner,
- cache::{cache_file, cache_memory},
- common::Asset,
-};
+use jellycache::{cache_file, cache_memory, CachePath};
use log::{info, warn};
use std::{
fs::File,
@@ -29,7 +25,7 @@ pub use ebml_struct::matroska::TrackEntry as MatroskaTrackEntry;
pub struct MatroskaMetadata {
pub info: Option<Info>,
pub tracks: Option<Tracks>,
- pub cover: Option<Asset>,
+ pub cover: Option<CachePath>,
pub chapters: Option<Chapters>,
pub tags: Option<Tags>,
pub infojson: Option<Vec<u8>>,
@@ -83,17 +79,10 @@ pub fn matroska_metadata(path: &Path) -> Result<Arc<MatroskaMetadata>> {
}
"cover.webp" | "cover.png" | "cover.jpg" | "cover.jpeg"
| "cover.avif" => {
- cover = Some(
- AssetInner::Cache(cache_file(
- "att-cover",
- path,
- move |mut file| {
- file.write_all(&f.data)?;
- Ok(())
- },
- )?)
- .ser(),
- )
+ cover = Some(cache_file("att-cover", path, move |mut file| {
+ file.write_all(&f.data)?;
+ Ok(())
+ })?)
}
_ => (),
}
diff --git a/remuxer/src/remux.rs b/remuxer/src/remux.rs
index a44c58b..9e6d4b5 100644
--- a/remuxer/src/remux.rs
+++ b/remuxer/src/remux.rs
@@ -3,7 +3,6 @@
which is licensed under the GNU Affero General Public License (version 3); see /COPYING.
Copyright (C) 2025 metamuffin <metamuffin.org>
*/
-use jellybase::common::Node;
use std::{io::Write, ops::Range, path::PathBuf};
// struct ClusterLayout {
@@ -17,7 +16,6 @@ pub fn remux_stream_into(
_writer: impl Write,
_range: Range<usize>,
_path_base: PathBuf,
- _item: &Node,
_selection: Vec<usize>,
_webm: bool,
) -> anyhow::Result<()> {
diff --git a/remuxer/src/seek_index.rs b/remuxer/src/seek_index.rs
index c09fdf9..a1a97ef 100644
--- a/remuxer/src/seek_index.rs
+++ b/remuxer/src/seek_index.rs
@@ -5,7 +5,7 @@
*/
use anyhow::{Context, Result};
use bincode::{Decode, Encode};
-use jellybase::cache::cache_memory;
+use jellycache::cache_memory;
use jellymatroska::{
block::Block,
read::EbmlReader,
diff --git a/server/Cargo.toml b/server/Cargo.toml
index a709a98..17aeeb4 100644
--- a/server/Cargo.toml
+++ b/server/Cargo.toml
@@ -9,6 +9,7 @@ jellybase = { path = "../base" }
jellystream = { path = "../stream" }
jellytranscoder = { path = "../transcoder" }
jellyimport = { path = "../import" }
+jellycache = { path = "../cache" }
serde = { version = "1.0.217", features = ["derive", "rc"] }
bincode = { version = "2.0.0-rc.3", features = ["serde", "derive"] }
diff --git a/server/src/ui/assets.rs b/server/src/ui/assets.rs
index ce2a8e2..69f6bbc 100644
--- a/server/src/ui/assets.rs
+++ b/server/src/ui/assets.rs
@@ -7,9 +7,8 @@ use super::{error::MyResult, CacheControlFile};
use crate::logic::session::Session;
use anyhow::{anyhow, bail, Context};
use base64::Engine;
-use jellybase::{
- assetfed::AssetInner, cache::async_cache_file, database::Database, federation::Federation, CONF,
-};
+use jellybase::{assetfed::AssetInner, database::Database, federation::Federation, CONF};
+use jellycache::async_cache_file;
use jellycommon::{LocalTrack, NodeID, PeopleGroup, SourceTrackKind, TrackSource};
use log::info;
use rocket::{get, http::ContentType, response::Redirect, State};
diff --git a/stream/Cargo.toml b/stream/Cargo.toml
index 21d1650..ad6f098 100644
--- a/stream/Cargo.toml
+++ b/stream/Cargo.toml
@@ -4,7 +4,8 @@ version = "0.1.0"
edition = "2021"
[dependencies]
-jellybase = { path = "../base", features = ["rocket"] }
+jellycommon = { path = "../common" }
+jellycache = { path = "../cache" }
jellytranscoder = { path = "../transcoder" }
jellyremuxer = { path = "../remuxer" }
jellymatroska = { path = "../matroska" }
@@ -15,3 +16,4 @@ anyhow = { workspace = true }
tokio = { version = "1.43.0", features = ["io-util"] }
tokio-util = { version = "0.7.13", features = ["io", "io-util"] }
serde_json = "1.0.138"
+serde = { version = "1.0.217", features = ["derive"] }
diff --git a/stream/src/fragment.rs b/stream/src/fragment.rs
index 9e8c3bd..205c302 100644
--- a/stream/src/fragment.rs
+++ b/stream/src/fragment.rs
@@ -5,7 +5,7 @@
*/
use crate::{stream_info, SMediaInfo};
use anyhow::{anyhow, bail, Result};
-use jellybase::common::stream::StreamContainer;
+use jellycommon::stream::StreamContainer;
use jellyremuxer::{matroska_to_mpeg4, matroska_to_webm::matroska_to_webm};
use jellytranscoder::fragment::transcode;
use log::warn;
diff --git a/stream/src/fragment_index.rs b/stream/src/fragment_index.rs
index 6fbddc6..0632f0a 100644
--- a/stream/src/fragment_index.rs
+++ b/stream/src/fragment_index.rs
@@ -5,7 +5,7 @@
*/
use crate::{stream_info, SMediaInfo};
use anyhow::{anyhow, Result};
-use jellybase::common::stream::{SegmentNum, TrackNum};
+use jellycommon::stream::{SegmentNum, TrackNum};
use std::sync::Arc;
use tokio::io::{AsyncWriteExt, DuplexStream};
diff --git a/stream/src/hls.rs b/stream/src/hls.rs
index 3dfbf01..0ca7545 100644
--- a/stream/src/hls.rs
+++ b/stream/src/hls.rs
@@ -6,9 +6,7 @@
use crate::{stream_info, SMediaInfo};
use anyhow::{anyhow, Result};
-use jellybase::common::stream::{
- FormatNum, SegmentNum, StreamContainer, StreamSpec, TrackKind, TrackNum,
-};
+use jellycommon::stream::{FormatNum, SegmentNum, StreamContainer, StreamSpec, TrackKind, TrackNum};
use std::{fmt::Write, ops::Range, sync::Arc};
use tokio::{
io::{AsyncWriteExt, DuplexStream},
diff --git a/stream/src/lib.rs b/stream/src/lib.rs
index 4df87ae..ccc5cb9 100644
--- a/stream/src/lib.rs
+++ b/stream/src/lib.rs
@@ -14,17 +14,43 @@ use anyhow::{anyhow, bail, Context, Result};
use fragment::fragment_stream;
use fragment_index::fragment_index_stream;
use hls::{hls_multivariant_stream, hls_supermultivariant_stream, hls_variant_stream};
-use jellybase::common::{
+use jellycommon::{
stream::{StreamContainer, StreamSpec},
Node,
};
-use std::{collections::BTreeSet, io::SeekFrom, ops::Range, path::PathBuf, sync::Arc};
+use serde::{Deserialize, Serialize};
+use std::{
+ collections::BTreeSet,
+ io::SeekFrom,
+ ops::Range,
+ path::PathBuf,
+ sync::{Arc, LazyLock},
+};
use stream_info::{stream_info, write_stream_info};
use tokio::{
fs::File,
io::{duplex, AsyncReadExt, AsyncSeekExt, AsyncWriteExt, DuplexStream},
+ sync::Mutex,
};
+#[rustfmt::skip]
+#[derive(Debug, Deserialize, Serialize, Default)]
+pub struct Config {
+ #[serde(default)] pub offer_avc: bool,
+ #[serde(default)] pub offer_hevc: bool,
+ #[serde(default)] pub offer_vp8: bool,
+ #[serde(default)] pub offer_vp9: bool,
+ #[serde(default)] pub offer_av1: bool,
+}
+
+static CONF: LazyLock<Config> = LazyLock::new(|| {
+ CONF_PRELOAD
+ .blocking_lock()
+ .take()
+ .expect("cache config not preloaded. logic error")
+});
+static CONF_PRELOAD: Mutex<Option<Config>> = Mutex::const_new(None);
+
#[derive(Debug)]
pub struct SMediaInfo {
pub info: Arc<Node>,
diff --git a/stream/src/stream_info.rs b/stream/src/stream_info.rs
index ba6cc98..6f7824e 100644
--- a/stream/src/stream_info.rs
+++ b/stream/src/stream_info.rs
@@ -3,15 +3,11 @@
which is licensed under the GNU Affero General Public License (version 3); see /COPYING.
Copyright (C) 2025 metamuffin <metamuffin.org>
*/
-use crate::SMediaInfo;
+use crate::{SMediaInfo, CONF};
use anyhow::Result;
use ebml_struct::matroska::TrackEntry;
-use jellybase::{
- common::stream::{
- StreamContainer, StreamFormatInfo, StreamInfo, StreamSegmentInfo, StreamTrackInfo,
- TrackKind,
- },
- CONF,
+use jellycommon::stream::{
+ StreamContainer, StreamFormatInfo, StreamInfo, StreamSegmentInfo, StreamTrackInfo, TrackKind,
};
use jellyremuxer::{
metadata::{matroska_metadata, MatroskaMetadata},
@@ -119,11 +115,11 @@ fn stream_formats(t: &TrackEntry, remux_bitrate: f64) -> Vec<StreamFormatInfo> {
// most codecs use chroma subsampling that requires even dims
let h = ((w * sh) / sw) & !1; // clear last bit to ensure even height.
for (cid, enable) in [
- ("V_AV1", CONF.transcoder.offer_av1),
- ("V_VP8", CONF.transcoder.offer_vp8),
- ("V_VP9", CONF.transcoder.offer_vp9),
- ("V_MPEG4/ISO/AVC", CONF.transcoder.offer_avc),
- ("V_MPEGH/ISO/HEVC", CONF.transcoder.offer_hevc),
+ ("V_AV1", CONF.offer_av1),
+ ("V_VP8", CONF.offer_vp8),
+ ("V_VP9", CONF.offer_vp9),
+ ("V_MPEG4/ISO/AVC", CONF.offer_avc),
+ ("V_MPEGH/ISO/HEVC", CONF.offer_hevc),
] {
if enable {
formats.push(StreamFormatInfo {
diff --git a/stream/src/webvtt.rs b/stream/src/webvtt.rs
index e9f0181..c0bc466 100644
--- a/stream/src/webvtt.rs
+++ b/stream/src/webvtt.rs
@@ -4,7 +4,7 @@
Copyright (C) 2025 metamuffin <metamuffin.org>
*/
use anyhow::Result;
-use jellybase::common::{stream::StreamSpec, Node};
+use jellycommon::{stream::StreamSpec, Node};
use std::sync::Arc;
use tokio::io::DuplexStream;
diff --git a/transcoder/Cargo.toml b/transcoder/Cargo.toml
index d0ddf9c..f35b101 100644
--- a/transcoder/Cargo.toml
+++ b/transcoder/Cargo.toml
@@ -4,8 +4,9 @@ version = "0.1.0"
edition = "2021"
[dependencies]
-jellybase = { path = "../base" }
jellyremuxer = { path = "../remuxer" }
+jellycache = { path = "../cache" }
+jellycommon = { path = "../common" }
log = { workspace = true }
# TODO: change this back to crates.io when pr is merged
image = "0.25.5"
@@ -24,3 +25,4 @@ rayon = "1.10.0"
imgref = "1.11.0"
ravif = "0.11.11"
tokio = { workspace = true }
+serde = { version = "1.0.217", features = ["derive"] }
diff --git a/transcoder/src/fragment.rs b/transcoder/src/fragment.rs
index 027e80f..dbb8f6d 100644
--- a/transcoder/src/fragment.rs
+++ b/transcoder/src/fragment.rs
@@ -3,16 +3,10 @@
which is licensed under the GNU Affero General Public License (version 3); see /COPYING.
Copyright (C) 2025 metamuffin <metamuffin.org>
*/
-use crate::LOCAL_VIDEO_TRANSCODING_TASKS;
+use crate::{Config, CONF, LOCAL_VIDEO_TRANSCODING_TASKS};
use anyhow::Result;
-use jellybase::{
- cache::{async_cache_file, CachePath},
- common::{
- config::TranscoderConfig,
- stream::{StreamFormatInfo, TrackKind},
- },
- CONF,
-};
+use jellycache::{async_cache_file, CachePath};
+use jellycommon::stream::{StreamFormatInfo, TrackKind};
use jellyremuxer::metadata::MatroskaTrackEntry;
use log::info;
use std::fmt::Write;
@@ -32,7 +26,7 @@ pub async fn transcode(
input_key: &str,
input: impl FnOnce(ChildStdin),
) -> anyhow::Result<CachePath> {
- let command = transcode_command(kind, orig_metadata, format, &CONF.transcoder).unwrap();
+ let command = transcode_command(kind, orig_metadata, format, &*CONF).unwrap();
async_cache_file("frag-tc", (input_key, &command), async |mut output| {
let _permit = LOCAL_VIDEO_TRANSCODING_TASKS.acquire().await?;
info!("encoding with {command:?}");
@@ -60,7 +54,7 @@ fn transcode_command(
kind: TrackKind,
orig_metadata: &MatroskaTrackEntry,
format: &StreamFormatInfo,
- config: &TranscoderConfig,
+ config: &Config,
) -> Result<String> {
let br = format.bitrate as u64;
let w = format.width.unwrap_or(0);
diff --git a/transcoder/src/image.rs b/transcoder/src/image.rs
index c6e1367..6a7f693 100644
--- a/transcoder/src/image.rs
+++ b/transcoder/src/image.rs
@@ -6,7 +6,7 @@
use crate::LOCAL_IMAGE_TRANSCODING_TASKS;
use anyhow::Context;
use image::imageops::FilterType;
-use jellybase::cache::{async_cache_file, CachePath};
+use jellycache::{async_cache_file, CachePath};
use log::{debug, info};
use rgb::FromSlice;
use std::{
diff --git a/transcoder/src/lib.rs b/transcoder/src/lib.rs
index 1cf5d0d..c49f52c 100644
--- a/transcoder/src/lib.rs
+++ b/transcoder/src/lib.rs
@@ -5,11 +5,35 @@
*/
#![feature(exit_status_error)]
-use tokio::sync::Semaphore;
+use serde::{Deserialize, Serialize};
+use std::sync::LazyLock;
+use tokio::sync::{Mutex, Semaphore};
+
pub mod fragment;
pub mod image;
pub mod subtitles;
pub mod thumbnail;
+#[rustfmt::skip]
+#[derive(Debug, Deserialize, Serialize, Default)]
+pub struct Config {
+ #[serde(default)] pub enable_rkmpp: bool,
+ #[serde(default)] pub enable_rkrga: bool,
+ #[serde(default)] pub use_svtav1: bool,
+ #[serde(default)] pub use_rav1e: bool,
+ pub svtav1_preset: Option<u8>, // 0..=13, high is fast
+ pub rav1e_preset: Option<u8>, // 0..=10
+ pub aom_preset: Option<u8>, // 0..=8, high is fast
+ pub x264_preset: Option<String>,
+}
+
+static CONF: LazyLock<Config> = LazyLock::new(|| {
+ CONF_PRELOAD
+ .blocking_lock()
+ .take()
+ .expect("cache config not preloaded. logic error")
+});
+static CONF_PRELOAD: Mutex<Option<Config>> = Mutex::const_new(None);
+
static LOCAL_IMAGE_TRANSCODING_TASKS: Semaphore = Semaphore::const_new(8);
static LOCAL_VIDEO_TRANSCODING_TASKS: Semaphore = Semaphore::const_new(2);
diff --git a/transcoder/src/subtitles.rs b/transcoder/src/subtitles.rs
index 77b423d..d7e7b29 100644
--- a/transcoder/src/subtitles.rs
+++ b/transcoder/src/subtitles.rs
@@ -4,7 +4,7 @@
Copyright (C) 2025 metamuffin <metamuffin.org>
*/
use anyhow::{anyhow, bail, Context};
-use jellybase::common::jhls::SubtitleCue;
+use jellycommon::jhls::SubtitleCue;
use std::fmt::Write;
pub fn parse_subtitles(
diff --git a/transcoder/src/thumbnail.rs b/transcoder/src/thumbnail.rs
index caef397..8cefac3 100644
--- a/transcoder/src/thumbnail.rs
+++ b/transcoder/src/thumbnail.rs
@@ -1,5 +1,5 @@
use crate::LOCAL_IMAGE_TRANSCODING_TASKS;
-use jellybase::cache::{async_cache_file, CachePath};
+use jellycache::{async_cache_file, CachePath};
use log::info;
use std::{path::Path, process::Stdio};
use tokio::{io::copy, process::Command};