aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--common/object/src/lib.rs10
-rw-r--r--common/src/lib.rs3
-rw-r--r--import/src/lib.rs218
-rw-r--r--import/src/plugins/infojson.rs4
-rw-r--r--import/src/plugins/media_info.rs6
-rw-r--r--import/src/plugins/misc.rs72
-rw-r--r--import/src/plugins/mod.rs9
-rw-r--r--import/src/plugins/tags.rs63
-rw-r--r--import/src/plugins/tmdb.rs170
-rw-r--r--import/src/plugins/trakt.rs167
10 files changed, 429 insertions, 293 deletions
diff --git a/common/object/src/lib.rs b/common/object/src/lib.rs
index 290cf91..5637dd6 100644
--- a/common/object/src/lib.rs
+++ b/common/object/src/lib.rs
@@ -17,7 +17,11 @@ pub use path::*;
pub use registry::*;
pub use value::*;
-use std::{collections::HashSet, hash::Hash, marker::PhantomData};
+use std::{
+ collections::{BTreeSet, HashSet},
+ hash::Hash,
+ marker::PhantomData,
+};
#[repr(transparent)]
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]
@@ -150,7 +154,7 @@ impl<'a> Object<'a> {
}
}
#[must_use]
- pub fn extend<T: Value<'a> + Hash + Eq + PartialEq>(
+ pub fn extend<T: Value<'a> + Eq + Ord>(
&self,
tag: TypedTag<T>,
values: impl IntoIterator<Item = T>,
@@ -160,7 +164,7 @@ impl<'a> Object<'a> {
&self
.iter(tag)
.chain(values)
- .collect::<HashSet<_>>()
+ .collect::<BTreeSet<_>>()
.into_iter()
.collect::<Vec<_>>(),
)
diff --git a/common/src/lib.rs b/common/src/lib.rs
index cc11c58..e5531f4 100644
--- a/common/src/lib.rs
+++ b/common/src/lib.rs
@@ -19,7 +19,7 @@ pub static TAGREG: LazyLock<Registry> = LazyLock::new(|| {
});
fields! {
- // Tag counter: 38
+ // Tag counter: 39
NO_KIND: Tag = 1 "kind";
NO_TITLE: &str = 2 "title";
@@ -40,6 +40,7 @@ fields! {
NO_VISIBILITY: Tag = 14 "visibility";
NO_STORAGE_SIZE: u64 = 15 "storage_size";
NO_CREDIT: Object = 33 "credit"; // multi
+ NO_SLUG: &str = 37 "slug";
CR_NODE: u64 = 34 "node";
CR_KIND: Tag = 35 "kind";
diff --git a/import/src/lib.rs b/import/src/lib.rs
index 2d8d987..56da625 100644
--- a/import/src/lib.rs
+++ b/import/src/lib.rs
@@ -14,7 +14,10 @@ use crate::{
};
use anyhow::{Context, Result, anyhow};
use jellycache::{HashKey, cache_memory, cache_store};
-use jellycommon::jellyobject::{self, ObjectBuffer, Tag, TypedTag};
+use jellycommon::{
+ jellyobject::{self, ObjectBuffer, Path as TagPath, fields},
+ *,
+};
use jellydb::{
backends::Database,
query::{Filter, Query, Sort},
@@ -33,14 +36,11 @@ use serde::{Deserialize, Serialize};
use std::{
collections::HashSet,
fs::{File, read_to_string},
- hash::{DefaultHasher, Hash},
- marker::PhantomData,
mem::swap,
path::{Path, PathBuf},
sync::{Arc, LazyLock, Mutex},
time::UNIX_EPOCH,
};
-use std::{fmt::Display, hash::Hasher};
use tokio::{runtime::Handle, sync::Semaphore, task::spawn_blocking};
#[rustfmt::skip]
@@ -83,28 +83,21 @@ pub fn is_importing() -> bool {
IMPORT_SEM.available_permits() == 0
}
-#[derive(Debug, Clone)]
-pub struct NodeID(pub String);
-impl Display for NodeID {
- fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
- f.write_str(&self.0)
- }
+#[derive(Clone)]
+pub struct DatabaseTables {
+ pub db: Arc<dyn Database>,
+ pub nodes: Arc<Table>,
+ pub import_meta: Arc<Table>,
}
-const NODE_ID: TypedTag<&str> = TypedTag(Tag(0x8123), PhantomData);
-
-#[derive(Clone)]
-struct DatabaseTables {
- db: Arc<dyn Database>,
- nodes: Arc<Table>,
+fields! {
+ IM_PATH: &str = 0x11001 "path";
+ IM_MTIME: u64 = 0x11002 "mtime";
}
-fn node_id_query(node: &NodeID) -> Query {
+fn node_slug_query(slug: &str) -> Query {
Query {
- filter: Filter::Match(
- jellyobject::Path(vec![NODE_ID.0]),
- node.0.as_bytes().to_vec(),
- ),
+ filter: Filter::Match(jellyobject::Path(vec![NO_SLUG.0]), slug.as_bytes().to_vec()),
sort: Sort::None,
}
}
@@ -123,33 +116,35 @@ impl DatabaseTables {
})?;
Ok(())
}
- pub fn update_node_by_nodeid(
+ pub fn update_node_slug(
&self,
- node: NodeID,
+ slug: &str,
mut update: impl FnMut(ObjectBuffer) -> ObjectBuffer,
- ) -> Result<()> {
+ ) -> Result<RowNum> {
+ let mut row = 0;
self.db.write_transaction(&mut |txn| {
- let node = match self.nodes.query_single(txn, node_id_query(&node)) {
+ row = match self.nodes.query_single(txn, node_slug_query(slug)) {
Some(r) => r,
None => self
.nodes
- .insert(txn, ObjectBuffer::new(&mut [(NODE_ID.0, &node.0.as_str())]))?,
+ .insert(txn, ObjectBuffer::new(&mut [(NO_SLUG.0, &slug)]))?,
};
- let ob_before = self.nodes.get(txn, node)?.unwrap();
- let mut hash_before = DefaultHasher::new();
- ob_before.hash(&mut hash_before);
-
- let ob_after = update(ob_before);
-
- let mut hash_after = DefaultHasher::new();
- ob_after.hash(&mut hash_after);
- if hash_before.finish() != hash_after.finish() {
- self.nodes.update(txn, node, ob_after)?;
- }
+ let node = self.nodes.get(txn, row)?.unwrap();
+ let node = update(node);
+ let node = node.as_object().insert(NO_SLUG, slug);
+ self.nodes.update(txn, row, node)?;
Ok(())
})?;
- Ok(())
+ Ok(row)
+ }
+ pub fn get_node(&self, node: RowNum) -> Result<Option<ObjectBuffer>> {
+ let mut buf = None;
+ self.db.read_transaction(&mut |txn| {
+ buf = self.nodes.get(txn, node)?;
+ Ok(())
+ })?;
+ Ok(buf)
}
}
@@ -237,15 +232,14 @@ fn import_traverse(
path: &Path,
dba: &DatabaseTables,
incremental: bool,
- parent: Option<NodeID>,
+ parent: Option<RowNum>,
mut iflags: InheritedFlags,
- out: &Mutex<Vec<(PathBuf, NodeID, InheritedFlags)>>,
+ out: &Mutex<Vec<(PathBuf, RowNum, InheritedFlags)>>,
) -> Result<()> {
if path.is_dir() {
reporting::set_task(format!("indexing {path:?}"));
let slug = get_node_slug(path).unwrap();
- let node = NodeID::from_slug(&slug);
// Some flags need to applied immediatly because they are inherited
if let Ok(content) = read_to_string(path.join("flags")) {
@@ -259,18 +253,23 @@ fn import_traverse(
}
}
- dba.update_node_by_nodeid(node, |n| {
- if parent != NodeID::MIN {
- n.parents.insert(parent);
+ let row = dba.update_node_slug(&slug, |mut node| {
+ if let Some(parent) = parent {
+ node = node.as_object().extend(NO_PARENT, [parent]);
}
- n.slug = slug;
- n.visibility = iflags.visibility;
+ if iflags.hidden {
+ node = node.as_object().insert(NO_VISIBILITY, VISI_HIDDEN);
+ }
+ if iflags.reduced {
+ node = node.as_object().insert(NO_VISIBILITY, VISI_REDUCED);
+ }
+ node
})?;
path.read_dir()?.par_bridge().try_for_each(|e| {
let path = e?.path();
reporting::catch(
- import_traverse(&path, dba, incremental, node, iflags, out)
+ import_traverse(&path, dba, incremental, Some(row), iflags, out)
.context(anyhow!("index {:?}", path.file_name().unwrap())),
);
anyhow::Ok(())
@@ -278,15 +277,12 @@ fn import_traverse(
return Ok(());
}
- if path.is_file() {
- let meta = path.metadata()?;
- let mtime = meta.modified()?.duration_since(UNIX_EPOCH)?.as_secs();
-
+ if path.is_file()
+ && let Some(parent) = parent
+ {
if incremental {
- if let Some(last_mtime) = dba.get_import_file_mtime(path)? {
- if last_mtime >= mtime {
- return Ok(());
- }
+ if compare_mtime(dba, path)? {
+ return Ok(());
}
}
@@ -303,10 +299,10 @@ fn import_traverse(
fn import_file(
dba: &DatabaseTables,
rt: &Handle,
- pending_nodes: &Mutex<HashSet<NodeID>>,
+ pending_nodes: &Mutex<HashSet<RowNum>>,
plugins: &[Box<dyn ImportPlugin>],
path: &Path,
- parent: NodeID,
+ parent: RowNum,
iflags: InheritedFlags,
) {
let mut all_ok = true;
@@ -341,17 +337,20 @@ fn import_file(
if filename.ends_with("mkv") || filename.ends_with("mka") || filename.ends_with("mks") {
let slug = get_node_slug(path).unwrap();
- let node = NodeID::from_slug(&slug);
- pending_nodes.lock().unwrap().insert(node);
- all_ok &= reporting::catch(db.update_node_init(node, |node| {
- node.slug = slug;
- if parent != NodeID::MIN {
- node.parents.insert(parent);
+ let Some(row) = reporting::catch(dba.update_node_slug(&slug, |mut node| {
+ node = node.as_object().extend(NO_PARENT, [parent]);
+ if iflags.hidden {
+ node = node.as_object().insert(NO_VISIBILITY, VISI_HIDDEN);
}
- node.visibility = iflags.visibility;
- }))
- .is_some();
+ if iflags.reduced {
+ node = node.as_object().insert(NO_VISIBILITY, VISI_REDUCED);
+ }
+ node
+ })) else {
+ return;
+ };
+ pending_nodes.lock().unwrap().insert(row);
let flags = filename
.split(".")
@@ -368,7 +367,7 @@ fn import_file(
if inf.handle_instruction {
reporting::set_task(format!("{}(inst): {path:?}", inf.name));
all_ok &= reporting::catch(
- p.instruction(&ct, node, line)
+ p.instruction(&ct, row, line)
.context(anyhow!("{}(inst) {path:?}", inf.name)),
)
.is_some();
@@ -387,7 +386,7 @@ fn import_file(
if inf.handle_media {
reporting::set_task(format!("{}(media): {path:?}", inf.name));
all_ok &= reporting::catch(
- p.media(&ct, node, path, &seg)
+ p.media(&ct, row, path, &seg)
.context(anyhow!("{}(media) {path:?}", inf.name)),
)
.is_some();
@@ -408,7 +407,7 @@ fn import_file(
}
if all_ok {
- reporting::catch(update_mtime(db, path).context("updating mtime"));
+ reporting::catch(update_mtime(dba, path).context("updating mtime"));
}
}
@@ -416,26 +415,22 @@ fn process_node(
dba: &DatabaseTables,
rt: &Handle,
plugins: &[Box<dyn ImportPlugin>],
- pending_nodes: &Mutex<HashSet<NodeID>>,
- node: NodeID,
+ pending_nodes: &Mutex<HashSet<RowNum>>,
+ node: RowNum,
) {
- let Some(data) = reporting::catch(
- dba.get_node(node)
- .and_then(|e| e.ok_or(anyhow!("node missing"))),
- ) else {
- return;
- };
- let slug = &data.slug;
+ let mut slug = String::new();
+ reporting::catch(dba.db.read_transaction(&mut |txn| {
+ let no = dba.nodes.get(txn, node)?.unwrap();
+ if let Some(s) = no.as_object().get(NO_SLUG) {
+ slug = s.to_owned();
+ }
+ Ok(())
+ }));
for p in plugins {
let inf = p.info();
if inf.handle_process {
reporting::set_task(format!("{}(proc): {slug}", inf.name));
- let Some(data) = reporting::catch(
- dba.get_node(node)
- .and_then(|e| e.ok_or(anyhow!("node missing"))),
- ) else {
- return;
- };
+
reporting::catch(
p.process(
&ImportContext {
@@ -445,7 +440,6 @@ fn process_node(
pending_nodes,
},
node,
- &data,
)
.context(anyhow!("{}(proc) {slug}", inf.name)),
);
@@ -453,10 +447,60 @@ fn process_node(
}
}
-fn update_mtime(db: DatabaseTables, path: &Path) -> Result<()> {
+fn compare_mtime(dba: &DatabaseTables, path: &Path) -> Result<bool> {
let meta = path.metadata()?;
let mtime = meta.modified()?.duration_since(UNIX_EPOCH)?.as_secs();
- db.set_import_file_mtime(path, mtime)?;
+ let mut was_changed = false;
+ dba.db.read_transaction(&mut |txn| {
+ match dba.import_meta.query_single(
+ txn,
+ Query {
+ filter: Filter::Match(
+ TagPath(vec![IM_PATH.0]),
+ path.as_os_str().as_encoded_bytes().to_vec(),
+ ),
+ sort: Sort::None,
+ },
+ ) {
+ None => was_changed = true,
+ Some(row) => {
+ let meta = dba.import_meta.get(txn, row)?.unwrap();
+ let prev_mtime = meta.as_object().get(IM_MTIME).unwrap_or_default();
+ was_changed = mtime > prev_mtime
+ }
+ }
+ Ok(())
+ })?;
+ Ok(was_changed)
+}
+
+fn update_mtime(dba: &DatabaseTables, path: &Path) -> Result<()> {
+ let meta = path.metadata()?;
+ let mtime = meta.modified()?.duration_since(UNIX_EPOCH)?.as_secs();
+ dba.db.write_transaction(&mut |txn| {
+ let row = match dba.import_meta.query_single(
+ txn,
+ Query {
+ filter: Filter::Match(
+ TagPath(vec![IM_PATH.0]),
+ path.as_os_str().as_encoded_bytes().to_vec(),
+ ),
+ sort: Sort::None,
+ },
+ ) {
+ Some(row) => row,
+ None => dba.import_meta.insert(
+ txn,
+ ObjectBuffer::new(&mut [(IM_PATH.0, &path.as_os_str().as_encoded_bytes())]),
+ )?,
+ };
+
+ let mut ob = dba.import_meta.get(txn, row)?.unwrap();
+ ob = ob.as_object().insert(IM_MTIME, mtime);
+ dba.import_meta.update(txn, row, ob)?;
+
+ Ok(())
+ })?;
Ok(())
}
diff --git a/import/src/plugins/infojson.rs b/import/src/plugins/infojson.rs
index 72dd1ab..3e6bdb8 100644
--- a/import/src/plugins/infojson.rs
+++ b/import/src/plugins/infojson.rs
@@ -205,9 +205,7 @@ impl ImportPlugin for Infojson {
}
ct.dba.nodes.update(txn, parent, node)
- });
-
- Ok(())
+ })
}
fn media(&self, ct: &ImportContext, row: RowNum, _path: &Path, seg: &Segment) -> Result<()> {
diff --git a/import/src/plugins/media_info.rs b/import/src/plugins/media_info.rs
index 250e417..1730897 100644
--- a/import/src/plugins/media_info.rs
+++ b/import/src/plugins/media_info.rs
@@ -121,12 +121,10 @@ impl ImportPlugin for MediaInfo {
);
node = node.as_object().insert(NO_STORAGE_SIZE, size);
- ct.dba.nodes.update(txn, row, node);
+ ct.dba.nodes.update(txn, row, node)?;
Ok(())
- });
-
- Ok(())
+ })
}
}
diff --git a/import/src/plugins/misc.rs b/import/src/plugins/misc.rs
index 97bb6a5..ff08d87 100644
--- a/import/src/plugins/misc.rs
+++ b/import/src/plugins/misc.rs
@@ -11,12 +11,7 @@ use jellydb::table::RowNum;
use jellyremuxer::matroska::{AttachedFile, Segment};
use log::info;
use regex::Regex;
-use std::{
- fs::{File, read_to_string},
- io::Read,
- path::Path,
- sync::LazyLock,
-};
+use std::{fs::File, io::Read, path::Path, sync::LazyLock};
pub struct ImageFiles;
impl ImportPlugin for ImageFiles {
@@ -52,7 +47,7 @@ impl ImportPlugin for ImageFiles {
node = node
.as_object()
.update(NO_PICTURES, |picts| picts.insert(slot, &asset));
- ct.dba.nodes.update(txn, row, node);
+ ct.dba.nodes.update(txn, row, node)?;
Ok(())
})?;
Ok(())
@@ -102,12 +97,12 @@ impl ImportPlugin for General {
fn instruction(&self, ct: &ImportContext, node: RowNum, line: &str) -> Result<()> {
if line == "hidden" {
ct.dba.update_node(node, |node| {
- node.visibility = node.visibility.min(Visibility::Hidden);
+ node.as_object().insert(NO_VISIBILITY, VISI_HIDDEN)
})?;
}
if line == "reduced" {
- ct.db.update_node_init(node, |node| {
- node.visibility = node.visibility.min(Visibility::Reduced);
+ ct.dba.update_node(node, |node| {
+ node.as_object().insert(NO_VISIBILITY, VISI_REDUCED)
})?;
}
if let Some(kind) = line.strip_prefix("kind-").or(line.strip_prefix("kind=")) {
@@ -124,20 +119,17 @@ impl ImportPlugin for General {
"episode" => KIND_EPISODE,
_ => bail!("unknown node kind"),
};
- ct.db.update_node_init(node, |node| {
- node.kind = kind;
- })?;
+ ct.dba
+ .update_node(node, |node| node.as_object().insert(NO_KIND, kind))?;
}
if let Some(title) = line.strip_prefix("title=") {
- ct.db.update_node_init(node, |node| {
- node.title = Some(title.to_owned());
- })?;
+ ct.dba
+ .update_node(node, |node| node.as_object().insert(NO_TITLE, title))?;
}
if let Some(index) = line.strip_prefix("index=") {
let index = index.parse().context("parse index")?;
- ct.db.update_node_init(node, |node| {
- node.index = Some(index);
- })?;
+ ct.dba
+ .update_node(node, |node| node.as_object().insert(NO_INDEX, index))?;
}
Ok(())
}
@@ -153,20 +145,21 @@ impl ImportPlugin for Children {
}
}
fn file(&self, ct: &ImportContext, parent: RowNum, path: &Path) -> Result<()> {
- let filename = path.file_name().unwrap().to_string_lossy();
- if filename.as_ref() == "children" {
- info!("import children at {path:?}");
- for line in read_to_string(path)?.lines() {
- let line = line.trim();
- if line.starts_with("#") || line.is_empty() {
- continue;
- }
- ct.db.update_node_init(NodeID::from_slug(line), |n| {
- n.slug = line.to_owned();
- n.parents.insert(parent);
- })?;
- }
- }
+ // TODO use idents
+ // let filename = path.file_name().unwrap().to_string_lossy();
+ // if filename.as_ref() == "children" {
+ // info!("import children at {path:?}");
+ // for line in read_to_string(path)?.lines() {
+ // let line = line.trim();
+ // if line.starts_with("#") || line.is_empty() {
+ // continue;
+ // }
+ // ct.db.update_node_init(NodeID::from_slug(line), |n| {
+ // n.slug = line.to_owned();
+ // n.parents.insert(parent);
+ // })?;
+ // }
+ // }
Ok(())
}
}
@@ -188,16 +181,17 @@ impl ImportPlugin for EpisodeIndex {
if let Some(cap) = RE_EPISODE_FILENAME.captures(&filename) {
if let Some(episode) = cap.name("episode").map(|m| m.as_str()) {
let season = cap.name("season").map(|m| m.as_str());
- let episode = episode.parse::<usize>().context("parse episode num")?;
+ let episode = episode.parse::<u64>().context("parse episode num")?;
let season = season
.unwrap_or("1")
- .parse::<usize>()
+ .parse::<u64>()
.context("parse season num")?;
- ct.db.update_node_init(node, |node| {
- node.kind = NodeKind::Episode;
- node.index = Some(episode);
- node.season_index = Some(season);
+ ct.dba.update_node(node, |mut node| {
+ node = node.as_object().insert(NO_SEASON_INDEX, season);
+ node = node.as_object().insert(NO_INDEX, episode);
+ node = node.as_object().insert(NO_KIND, KIND_EPISODE);
+ node
})?;
}
}
diff --git a/import/src/plugins/mod.rs b/import/src/plugins/mod.rs
index 1402cf8..91be437 100644
--- a/import/src/plugins/mod.rs
+++ b/import/src/plugins/mod.rs
@@ -15,9 +15,8 @@ pub mod vgmdb;
pub mod wikidata;
pub mod wikimedia_commons;
-use crate::{ApiSecrets, DatabaseTables, InheritedFlags, NodeID};
+use crate::{ApiSecrets, DatabaseTables, InheritedFlags};
use anyhow::Result;
-use jellycommon::jellyobject::Object;
use jellydb::table::RowNum;
use jellyremuxer::matroska::Segment;
use std::{collections::HashSet, path::Path, sync::Mutex};
@@ -27,7 +26,7 @@ pub struct ImportContext<'a> {
pub dba: &'a DatabaseTables,
pub rt: &'a Handle,
pub iflags: InheritedFlags,
- pub pending_nodes: &'a Mutex<HashSet<NodeID>>,
+ pub pending_nodes: &'a Mutex<HashSet<RowNum>>,
}
#[derive(Default, Clone, Copy)]
@@ -53,8 +52,8 @@ pub trait ImportPlugin: Send + Sync {
let _ = (ct, node, line);
Ok(())
}
- fn process(&self, ct: &ImportContext, node: RowNum, data: Object<'_>) -> Result<()> {
- let _ = (ct, node, data);
+ fn process(&self, ct: &ImportContext, node: RowNum) -> Result<()> {
+ let _ = (ct, node);
Ok(())
}
}
diff --git a/import/src/plugins/tags.rs b/import/src/plugins/tags.rs
index 2257760..bfafbe8 100644
--- a/import/src/plugins/tags.rs
+++ b/import/src/plugins/tags.rs
@@ -4,8 +4,10 @@
Copyright (C) 2026 metamuffin <metamuffin.org>
*/
-use crate::{NodeID, plugins::{ImportContext, ImportPlugin, PluginInfo}};
+use crate::plugins::{ImportContext, ImportPlugin, PluginInfo};
use anyhow::Result;
+use jellycommon::*;
+use jellydb::table::RowNum;
use jellyremuxer::matroska::Segment;
use std::{collections::HashMap, path::Path};
@@ -18,7 +20,7 @@ impl ImportPlugin for Tags {
..Default::default()
}
}
- fn media(&self, ct: &ImportContext, node: NodeID, _path: &Path, seg: &Segment) -> Result<()> {
+ fn media(&self, ct: &ImportContext, node: RowNum, _path: &Path, seg: &Segment) -> Result<()> {
let tags = seg
.tags
.first()
@@ -31,35 +33,44 @@ impl ImportPlugin for Tags {
})
.unwrap_or_default();
- ct.dba.update_node_by_nodeid(node, |node| {
- node.title = seg.info.title.clone();
- for (key, value) in tags {
+ ct.dba.update_node(node, |mut node| {
+ if let Some(title) = &seg.info.title {
+ node = node.as_object().insert(NO_TITLE, title);
+ }
+ for (key, value) in &tags {
match key.as_str() {
- "DESCRIPTION" => node.description = Some(value),
- "SYNOPSIS" => node.description = Some(value),
- "COMMENT" => node.tagline = Some(value),
+ "DESCRIPTION" | "SYNOPSIS" => {
+ node = node.as_object().insert(NO_DESCRIPTION, &value)
+ }
+ "COMMENT" => node = node.as_object().insert(NO_TAGLINE, &value),
"CONTENT_TYPE" => {
- node.kind = match value.to_lowercase().trim() {
- "movie" | "documentary" | "film" => NodeKind::Movie,
- "music" | "recording" => NodeKind::Music,
+ node = node.as_object().insert(
+ NO_KIND,
+ match value.to_lowercase().trim() {
+ "movie" | "documentary" | "film" => KIND_MOVIE,
+ "music" | "recording" => KIND_MUSIC,
+ _ => continue,
+ },
+ );
+ }
+ _ => {
+ let idty = match key.as_str() {
+ "MUSICBRAINZ_TRACKID" => IDENT_MUSICBRAINZ_RECORDING,
+ "MUSICBRAINZ_ARTISTID" => IDENT_MUSICBRAINZ_ARTIST,
+ "MUSICBRAINZ_ALBUMID" => IDENT_MUSICBRAINZ_RELEASE,
+ "MUSICBRAINZ_ALBUMARTISTID" => continue,
+ "MUSICBRAINZ_RELEASEGROUPID" => IDENT_MUSICBRAINZ_RELEASE_GROUP,
+ "ISRC" => IDENT_ISRC,
+ "BARCODE" => IDENT_BARCODE,
_ => continue,
- }
+ };
+ node = node
+ .as_object()
+ .update(NO_IDENTIFIERS, |idents| idents.insert(idty, &value));
}
- _ => node.identifiers.extend(Some(match key.as_str() {
- "MUSICBRAINZ_TRACKID" => (IdentifierType::MusicbrainzRecording, value),
- "MUSICBRAINZ_ARTISTID" => (IdentifierType::MusicbrainzArtist, value),
- "MUSICBRAINZ_ALBUMID" => (IdentifierType::MusicbrainzRelease, value),
- "MUSICBRAINZ_ALBUMARTISTID" => continue,
- "MUSICBRAINZ_RELEASEGROUPID" => {
- (IdentifierType::MusicbrainzReleaseGroup, value)
- }
- "ISRC" => (IdentifierType::Isrc, value),
- "BARCODE" => (IdentifierType::Barcode, value),
- _ => continue,
- })),
}
}
- })?;
- Ok(())
+ node
+ })
}
}
diff --git a/import/src/plugins/tmdb.rs b/import/src/plugins/tmdb.rs
index 206781b..6b70d46 100644
--- a/import/src/plugins/tmdb.rs
+++ b/import/src/plugins/tmdb.rs
@@ -4,12 +4,14 @@
Copyright (C) 2026 metamuffin <metamuffin.org>
*/
use crate::{
- NodeID, USER_AGENT,
+ USER_AGENT,
plugins::{ImportContext, ImportPlugin, PluginInfo},
};
use anyhow::{Context, Result, anyhow, bail};
+use chrono::{Utc, format::Parsed};
use jellycache::{EscapeKey, HashKey, cache_memory, cache_store};
-use jellycommon::jellyobject::Object;
+use jellycommon::*;
+use jellydb::table::RowNum;
use log::info;
use reqwest::{
Client, ClientBuilder,
@@ -107,7 +109,7 @@ impl Tmdb {
})
.context("tmdb person images")
}
- pub fn image(&self, path: &str, rt: &Handle) -> Result<Asset> {
+ pub fn image(&self, path: &str, rt: &Handle) -> Result<String> {
cache_store(
format!("ext/tmdb/image/{}.image", EscapeKey(path)),
move || {
@@ -126,14 +128,13 @@ impl Tmdb {
},
)
.context("tmdb image download")
- .map(Asset)
}
pub fn episode_details(
&self,
series_id: u64,
- season: usize,
- episode: usize,
+ season: u64,
+ episode: u64,
rt: &Handle,
) -> Result<Arc<TmdbEpisode>> {
cache_memory(&format!("ext/tmdb/episode-details/{series_id}-S{season}-E{episode}.json"), move || {
@@ -161,37 +162,46 @@ impl ImportPlugin for Tmdb {
..Default::default()
}
}
- fn process(&self, ct: &ImportContext, node: NodeID, data: Object) -> Result<()> {
- self.process_primary(ct, node, data)?;
- self.process_episode(ct, node, data)?;
+ fn process(&self, ct: &ImportContext, node: RowNum) -> Result<()> {
+ self.process_primary(ct, node)?;
+ self.process_episode(ct, node)?;
Ok(())
}
}
impl Tmdb {
- fn process_primary(&self, ct: &ImportContext, node: NodeID, data: Object) -> Result<()> {
- let (tmdb_kind, tmdb_id): (_, u64) =
- if let Some(id) = data.identifiers.get(&IdentifierType::TmdbSeries) {
- (TmdbKind::Tv, id.parse()?)
- } else if let Some(id) = data.identifiers.get(&IdentifierType::TmdbMovie) {
- (TmdbKind::Movie, id.parse()?)
- } else {
- return Ok(());
- };
+ fn process_primary(&self, ct: &ImportContext, node: RowNum) -> Result<()> {
+ let data = ct.dba.get_node(node)?.unwrap();
+ let data = data.as_object();
+
+ let (tmdb_kind, tmdb_id): (_, u64) = if let Some(id) = data
+ .get(NO_IDENTIFIERS)
+ .unwrap_or_default()
+ .get(IDENT_TMDB_SERIES)
+ {
+ (TmdbKind::Tv, id.parse()?)
+ } else if let Some(id) = data
+ .get(NO_IDENTIFIERS)
+ .unwrap_or_default()
+ .get(IDENT_TMDB_MOVIE)
+ {
+ (TmdbKind::Movie, id.parse()?)
+ } else {
+ return Ok(());
+ };
let details = self.details(tmdb_kind, tmdb_id, ct.rt)?;
- let mut images = Vec::new();
- if let Some(path) = &details.backdrop_path {
- images.push((
- PictureSlot::Backdrop,
- self.image(path, ct.rt).context("backdrop image")?,
- ));
- }
- if let Some(path) = &details.poster_path {
- images.push((
- PictureSlot::Cover,
- self.image(path, ct.rt).context("poster image")?,
- ));
- }
+ let backdrop = details
+ .backdrop_path
+ .as_ref()
+ .map(|path| self.image(&path, ct.rt))
+ .transpose()
+ .context("backdrop image")?;
+ let poster = details
+ .poster_path
+ .as_ref()
+ .map(|path| self.image(&path, ct.rt))
+ .transpose()
+ .context("poster image")?;
let release_date = details
.release_date
@@ -200,46 +210,88 @@ impl Tmdb {
.transpose()?
.flatten();
- ct.dba.update_node_by_nodeid(node, |node| {
- node.title = details.title.clone().or(node.title.clone());
- node.tagline = details.tagline.clone().or(node.tagline.clone());
- node.description = Some(details.overview.clone());
- node.ratings.insert(RatingType::Tmdb, details.vote_average);
- node.pictures.extend(images);
- node.release_date = release_date.or(node.release_date);
+ ct.dba.update_node(node, |mut node| {
+ if let Some(title) = &details.title {
+ node = node.as_object().insert(NO_TITLE, &title);
+ }
+ if let Some(tagline) = &details.tagline {
+ node = node.as_object().insert(NO_TAGLINE, &tagline);
+ }
+ node = node.as_object().insert(NO_DESCRIPTION, &details.overview);
+ node = node.as_object().update(NO_RATINGS, |rat| {
+ rat.insert(RTYP_TMDB, details.vote_average)
+ });
+ if let Some(poster) = &poster {
+ node = node
+ .as_object()
+ .update(NO_PICTURES, |rat| rat.insert(PICT_COVER, &poster));
+ }
+ if let Some(backdrop) = &backdrop {
+ node = node
+ .as_object()
+ .update(NO_PICTURES, |rat| rat.insert(PICT_BACKDROP, &backdrop));
+ }
+ if let Some(releasedate) = release_date {
+ node = node.as_object().insert(NO_RELEASEDATE, releasedate);
+ }
+ node
})?;
Ok(())
}
- fn process_episode(&self, ct: &ImportContext, node: NodeID, data: &Node) -> Result<()> {
- let (Some(episode), Some(season)) = (data.index, data.season_index) else {
+ fn process_episode(&self, ct: &ImportContext, node: RowNum) -> Result<()> {
+ let data = ct.dba.get_node(node)?.unwrap();
+ let data = data.as_object();
+
+ let (Some(episode), Some(season)) = (data.get(NO_INDEX), data.get(NO_SEASON_INDEX)) else {
return Ok(());
};
let mut series_id = None;
- for &parent in &data.parents {
- let parent_data = ct.db.get_node(parent)?.ok_or(anyhow!("parent missing"))?;
- if let Some(id) = parent_data.identifiers.get(&IdentifierType::TmdbSeries) {
- series_id = Some(id.parse::<u64>()?);
- break;
+ ct.dba.db.read_transaction(&mut |txn| {
+ for parent in data.iter(NO_PARENT) {
+ let parent_data = ct
+ .dba
+ .nodes
+ .get(txn, parent)?
+ .ok_or(anyhow!("parent missing"))?;
+ if let Some(id) = parent_data
+ .as_object()
+ .get(NO_IDENTIFIERS)
+ .unwrap_or_default()
+ .get(IDENT_TMDB_SERIES)
+ {
+ series_id = Some(id.parse::<u64>()?);
+ break;
+ }
}
- }
+ Ok(())
+ })?;
let Some(series_id) = series_id else {
return Ok(());
};
let details = self.episode_details(series_id, season, episode, ct.rt)?;
- let mut images = Vec::new();
- if let Some(path) = &details.still_path {
- images.push((PictureSlot::Cover, self.image(path, ct.rt)?))
- }
-
+ let cover = details
+ .still_path
+ .as_ref()
+ .map(|path| self.image(&path, ct.rt))
+ .transpose()
+ .context("still image download")?;
let release_date = parse_release_date(&details.air_date)?;
- ct.db.update_node_init(node, |node| {
- node.title = Some(details.name.clone());
- node.description = Some(details.overview.clone());
- node.release_date = release_date.or(node.release_date);
- node.ratings.insert(RatingType::Tmdb, details.vote_average);
- node.pictures.extend(images);
- })?;
- Ok(())
+ ct.dba.update_node(node, |mut node| {
+ node = node.as_object().insert(NO_TITLE, &details.name);
+ node = node.as_object().insert(NO_DESCRIPTION, &details.overview);
+ if let Some(release_date) = release_date {
+ node = node.as_object().insert(NO_RELEASEDATE, release_date)
+ }
+ node = node.as_object().update(NO_RATINGS, |rat| {
+ rat.insert(RTYP_TMDB, details.vote_average)
+ });
+ if let Some(cover) = &cover {
+ node = node
+ .as_object()
+ .update(NO_PICTURES, |picts| picts.insert(PICT_COVER, &cover));
+ }
+ node
+ })
}
}
diff --git a/import/src/plugins/trakt.rs b/import/src/plugins/trakt.rs
index 5aee881..7530449 100644
--- a/import/src/plugins/trakt.rs
+++ b/import/src/plugins/trakt.rs
@@ -9,7 +9,7 @@ use crate::{
};
use anyhow::{Context, Result, anyhow, bail};
use jellycache::{HashKey, cache_memory};
-use jellycommon::jellyobject::{Object, Tag};
+use jellycommon::{jellyobject::Tag, *};
use jellydb::table::RowNum;
use log::info;
use reqwest::{
@@ -119,7 +119,7 @@ impl Trakt {
pub fn show_season_episodes(
&self,
id: u64,
- season: usize,
+ season: u64,
rt: &Handle,
) -> Result<Arc<Vec<TraktEpisode>>> {
cache_memory(
@@ -140,7 +140,7 @@ impl Trakt {
#[derive(Debug, Clone, Deserialize, Serialize, Default)]
pub struct TraktSeason {
- pub number: usize,
+ pub number: u64,
pub ids: TraktIds,
pub rating: f64,
pub votes: usize,
@@ -154,7 +154,7 @@ pub struct TraktSeason {
#[derive(Debug, Clone, Deserialize, Serialize, Default)]
pub struct TraktEpisode {
pub season: Option<usize>,
- pub number: usize,
+ pub number: u64,
pub number_abs: Option<usize>,
pub ids: TraktIds,
pub rating: Option<f64>,
@@ -251,7 +251,6 @@ pub enum TraktPeopleGroup {
}
impl TraktPeopleGroup {
pub fn as_credit_category(self) -> Tag {
- use jellycommon::*;
match self {
TraktPeopleGroup::Production => CRCAT_PRODUCTION,
TraktPeopleGroup::Art => CRCAT_ART,
@@ -399,42 +398,51 @@ impl ImportPlugin for Trakt {
}
Ok(())
}
- fn process(&self, ct: &ImportContext, node: RowNum, data: Object) -> Result<()> {
- self.process_primary(ct, node.clone(), data)?;
- self.process_episode(ct, node.clone(), data)?;
+ fn process(&self, ct: &ImportContext, node: RowNum) -> Result<()> {
+ self.process_primary(ct, node)?;
+ self.process_episode(ct, node)?;
Ok(())
}
}
impl Trakt {
- fn process_primary(&self, ct: &ImportContext, node: RowNum, data: Object) -> Result<()> {
- let (trakt_kind, trakt_id): (_, u64) =
- if let Some(id) = data.identifiers.get(&IdentifierType::TraktShow) {
- (TraktKind::Show, id.parse()?)
- } else if let Some(id) = data.identifiers.get(&IdentifierType::TraktMovie) {
- (TraktKind::Movie, id.parse()?)
- } else {
- return Ok(());
- };
+ fn process_primary(&self, ct: &ImportContext, node: RowNum) -> Result<()> {
+ let data = ct.dba.get_node(node)?.unwrap();
+ let data = data.as_object();
+ let (trakt_kind, trakt_id): (_, u64) = if let Some(id) = data
+ .get(NO_IDENTIFIERS)
+ .unwrap_or_default()
+ .get(IDENT_TRAKT_SHOW)
+ {
+ (TraktKind::Show, id.parse()?)
+ } else if let Some(id) = data
+ .get(NO_IDENTIFIERS)
+ .unwrap_or_default()
+ .get(IDENT_TRAKT_MOVIE)
+ {
+ (TraktKind::Movie, id.parse()?)
+ } else {
+ return Ok(());
+ };
let details = self.lookup(trakt_kind, trakt_id, ct.rt)?;
- let people = self.people(trakt_kind, trakt_id, ct.rt)?;
+ // let people = self.people(trakt_kind, trakt_id, ct.rt)?;
- let mut people_map = BTreeMap::<CreditCategory, Vec<Appearance>>::new();
- for p in people.cast.iter() {
- people_map
- .entry(CreditCategory::Cast)
- .or_default()
- .push(p.a())
- }
- for (group, people) in people.crew.iter() {
- for p in people {
- people_map
- .entry(group.as_credit_category())
- .or_default()
- .push(p.a())
- }
- }
+ // let mut people_map = BTreeMap::<CreditCategory, Vec<Appearance>>::new();
+ // for p in people.cast.iter() {
+ // people_map
+ // .entry(CreditCategory::Cast)
+ // .or_default()
+ // .push(p.a())
+ // }
+ // for (group, people) in people.crew.iter() {
+ // for p in people {
+ // people_map
+ // .entry(group.as_credit_category())
+ // .or_default()
+ // .push(p.a())
+ // }
+ // }
// for p in people_map.values_mut().flatten() {
// if let Some(id) = p.person.ids.tmdb {
@@ -446,51 +454,73 @@ impl Trakt {
// }
// }
- ct.db.update_node_init(node, |node| {
- node.kind = trakt_kind.as_node_kind();
- node.title = Some(details.title.clone());
+ ct.dba.update_node(node, |mut node| {
+ node = node.as_object().insert(NO_KIND, trakt_kind.as_node_kind());
+ node = node.as_object().insert(NO_TITLE, &details.title);
if let Some(overview) = &details.overview {
- node.description = Some(overview.clone())
+ node = node.as_object().insert(NO_DESCRIPTION, &overview);
}
if let Some(tagline) = &details.tagline {
- node.tagline = Some(tagline.clone())
+ node = node.as_object().insert(NO_TAGLINE, &tagline);
}
- node.credits.extend(people_map);
if let Some(x) = details.ids.imdb.clone() {
- node.identifiers.insert(IdentifierType::Imdb, x);
+ node = node
+ .as_object()
+ .update(NO_IDENTIFIERS, |idents| idents.insert(IDENT_IMDB, &x));
}
if let Some(x) = details.ids.tvdb.clone() {
- node.identifiers.insert(IdentifierType::Tvdb, x.to_string());
+ node = node.as_object().update(NO_IDENTIFIERS, |idents| {
+ idents.insert(IDENT_TVDB, &x.to_string())
+ });
}
if let Some(x) = details.ids.tmdb.clone() {
- match trakt_kind {
- TraktKind::Movie => node
- .identifiers
- .insert(IdentifierType::TmdbMovie, x.to_string()),
- TraktKind::Show => node
- .identifiers
- .insert(IdentifierType::TmdbSeries, x.to_string()),
- _ => None,
+ let key = match trakt_kind {
+ TraktKind::Movie => IDENT_TRAKT_MOVIE,
+ TraktKind::Show => IDENT_TRAKT_SHOW,
+ _ => return node,
};
+ node = node
+ .as_object()
+ .update(NO_IDENTIFIERS, |idents| idents.insert(key, &x.to_string()));
}
- if let Some(rating) = &details.rating {
- node.ratings.insert(RatingType::Trakt, *rating);
+ if let Some(rating) = details.rating {
+ node = node
+ .as_object()
+ .update(NO_RATINGS, |idents| idents.insert(RTYP_TRAKT, rating));
}
+ node
})?;
Ok(())
}
- fn process_episode(&self, ct: &ImportContext, node: RowNum, node_data: Object) -> Result<()> {
- let (Some(episode), Some(season)) = (node_data.index, node_data.season_index) else {
+ fn process_episode(&self, ct: &ImportContext, node: RowNum) -> Result<()> {
+ let node_data = ct.dba.get_node(node)?.unwrap();
+ let node_data = node_data.as_object();
+
+ let (Some(episode), Some(season)) =
+ (node_data.get(NO_INDEX), node_data.get(NO_SEASON_INDEX))
+ else {
return Ok(());
};
let mut show_id = None;
- for &parent in &node_data.parents {
- let parent_data = ct.db.get_node(parent)?.ok_or(anyhow!("parent missing"))?;
- if let Some(id) = parent_data.identifiers.get(&IdentifierType::TraktShow) {
- show_id = Some(id.parse::<u64>()?);
- break;
+ ct.dba.db.read_transaction(&mut |txn| {
+ for parent in node_data.iter(NO_PARENT) {
+ let parent_data = ct
+ .dba
+ .nodes
+ .get(txn, parent)?
+ .ok_or(anyhow!("parent missing"))?;
+ if let Some(id) = parent_data
+ .as_object()
+ .get(NO_IDENTIFIERS)
+ .unwrap_or_default()
+ .get(IDENT_TRAKT_SHOW)
+ {
+ show_id = Some(id.parse::<u64>()?);
+ break;
+ }
}
- }
+ Ok(())
+ })?;
let Some(show_id) = show_id else {
return Ok(());
};
@@ -498,15 +528,20 @@ impl Trakt {
let seasons = self.show_seasons(show_id, ct.rt)?;
if seasons.iter().any(|x| x.number == season) {
let episodes = self.show_season_episodes(show_id, season, ct.rt)?;
- if let Some(episode) = episodes.get(episode.saturating_sub(1)) {
- ct.db.update_node_init(node, |node| {
- node.kind = NodeKind::Episode;
- node.index = Some(episode.number);
- node.title = Some(episode.title.clone());
- node.description = episode.overview.clone().or(node.description.clone());
+ if let Some(episode) = episodes.get(episode.saturating_sub(1) as usize) {
+ ct.dba.update_node(node, |mut node| {
+ node = node.as_object().insert(NO_KIND, KIND_EPISODE);
+ node = node.as_object().insert(NO_INDEX, episode.number);
+ node = node.as_object().insert(NO_TITLE, &episode.title);
+ if let Some(overview) = &episode.overview {
+ node = node.as_object().insert(NO_DESCRIPTION, &overview);
+ }
if let Some(r) = episode.rating {
- node.ratings.insert(RatingType::Trakt, r);
+ node = node
+ .as_object()
+ .update(NO_RATINGS, |rats| rats.insert(RTYP_TRAKT, r));
}
+ node
})?;
}
}