aboutsummaryrefslogtreecommitdiff
path: root/import
diff options
context:
space:
mode:
Diffstat (limited to 'import')
-rw-r--r--import/Cargo.toml24
-rw-r--r--import/src/infojson.rs143
-rw-r--r--import/src/lib.rs376
-rw-r--r--import/src/mod.rs319
-rw-r--r--import/src/tmdb.rs116
5 files changed, 978 insertions, 0 deletions
diff --git a/import/Cargo.toml b/import/Cargo.toml
new file mode 100644
index 0000000..f2ba7af
--- /dev/null
+++ b/import/Cargo.toml
@@ -0,0 +1,24 @@
+[package]
+name = "jellyimport"
+version = "0.1.0"
+edition = "2021"
+
+[dependencies]
+jellycommon = { path = "../common" }
+jellybase = { path = "../base" }
+jellyclient = { path = "../client" }
+# jellymatroska = { path = "../matroska" }
+jellyremuxer = { path = "../remuxer" }
+
+log = { workspace = true }
+anyhow = "1.0.75"
+reqwest = { version = "0.11.22", features = ["blocking", "json"] }
+
+# serde = { version = "1.0.193", features = ["derive"] }
+serde_json = "1.0.108"
+serde_yaml = "0.9.27"
+# bincode = { version = "2.0.0-rc.3", features = ["serde"] }
+
+async-recursion = "1.0.5"
+futures = "0.3.29"
+tokio = { workspace = true }
diff --git a/import/src/infojson.rs b/import/src/infojson.rs
new file mode 100644
index 0000000..3f0edc9
--- /dev/null
+++ b/import/src/infojson.rs
@@ -0,0 +1,143 @@
+/*
+ This file is part of jellything (https://codeberg.org/metamuffin/jellything)
+ which is licensed under the GNU Affero General Public License (version 3); see /COPYING.
+ Copyright (C) 2023 metamuffin <metamuffin.org>
+*/
+
+use anyhow::Context;
+use jellycommon::chrono::{format::Parsed, DateTime, Utc};
+use serde::{Deserialize, Serialize};
+use std::collections::HashMap;
+
+#[derive(Debug, Serialize, Deserialize)]
+pub struct YVideo {
+ pub id: String,
+ pub title: String,
+ pub formats: Vec<YFormat>,
+ pub thumbnails: Vec<YThumbnail>,
+ pub thumbnail: String,
+ pub description: String,
+ pub channel_id: String,
+ pub duration: Option<f64>,
+ pub view_count: usize,
+ pub average_rating: Option<String>,
+ pub age_limit: usize,
+ pub webpage_url: String,
+ pub categories: Vec<String>,
+ pub tags: Vec<String>,
+ pub playable_in_embed: bool,
+ pub automatic_captions: HashMap<String, Vec<YCaption>>,
+ pub comment_count: Option<usize>,
+ pub chapters: Option<Vec<YChapter>>,
+ pub heatmap: Option<Vec<YHeatmapSample>>,
+ pub like_count: Option<usize>,
+ pub channel: Option<String>,
+ pub channel_follower_count: usize,
+ pub channel_is_verified: Option<bool>,
+ pub uploader: String,
+ pub uploader_id: String,
+ pub uploader_url: String,
+ pub upload_date: String,
+ pub availability: String, // "public" | "private" | "unlisted",
+ pub original_url: Option<String>,
+ pub webpage_url_basename: String,
+ pub webpage_url_domain: String,
+ pub extractor: String,
+ pub extractor_key: String,
+ pub playlist_count: Option<usize>,
+ pub playlist: Option<String>,
+ pub playlist_id: Option<String>,
+ pub playlist_title: Option<String>,
+ pub playlist_uploader: Option<String>,
+ pub playlist_uploader_id: Option<String>,
+ pub n_entries: Option<usize>,
+ pub playlist_index: Option<usize>,
+ pub display_id: String,
+ pub fulltitle: String,
+ pub duration_string: String,
+ pub is_live: bool,
+ pub was_live: bool,
+ pub epoch: usize,
+}
+
+#[derive(Debug, Serialize, Deserialize)]
+pub struct YCaption {
+ pub url: Option<String>,
+ pub ext: String, //"vtt" | "json3" | "srv1" | "srv2" | "srv3" | "ttml",
+ pub protocol: Option<String>,
+ pub name: Option<String>,
+}
+
+#[derive(Debug, Serialize, Deserialize)]
+pub struct YFormat {
+ pub format_id: String,
+ pub format_note: Option<String>,
+ pub ext: String,
+ pub protocol: String,
+ pub acodec: Option<String>,
+ pub vcodec: Option<String>,
+ pub url: Option<String>,
+ pub width: Option<u32>,
+ pub height: Option<u32>,
+ pub fps: Option<f64>,
+ pub columns: Option<u32>,
+ pub fragments: Option<Vec<YFragment>>,
+ pub resolution: String,
+ pub dynamic_range: Option<String>,
+ pub aspect_ratio: Option<f64>,
+ pub http_headers: HashMap<String, String>,
+ pub audio_ext: String,
+ pub video_ext: String,
+ pub vbr: Option<f64>,
+ pub abr: Option<f64>,
+ pub format: String,
+}
+
+#[derive(Debug, Serialize, Deserialize)]
+pub struct YFragment {
+ pub url: Option<String>,
+ pub duration: Option<f64>,
+}
+
+#[derive(Debug, Serialize, Deserialize)]
+pub struct YThumbnail {
+ pub url: String,
+ pub preference: i32,
+ pub id: String,
+ pub height: Option<u32>,
+ pub width: Option<u32>,
+ pub resolution: Option<String>,
+}
+
+#[derive(Debug, Serialize, Deserialize)]
+pub struct YChapter {
+ pub start_time: f64,
+ pub end_time: f64,
+ pub title: String,
+}
+
+#[derive(Debug, Serialize, Deserialize)]
+pub struct YHeatmapSample {
+ pub start_time: f64,
+ pub end_time: f64,
+ pub value: f64,
+}
+
+pub fn parse_upload_date(d: &str) -> anyhow::Result<DateTime<Utc>> {
+ let (year, month, day) = (&d[0..4], &d[4..6], &d[6..8]);
+ let (year, month, day) = (
+ year.parse().context("parsing year")?,
+ month.parse().context("parsing month")?,
+ day.parse().context("parsing day")?,
+ );
+
+ let mut p = Parsed::new();
+ p.year = Some(year);
+ p.month = Some(month);
+ p.day = Some(day);
+ p.hour_div_12 = Some(0);
+ p.hour_mod_12 = Some(0);
+ p.minute = Some(0);
+ p.second = Some(0);
+ Ok(p.to_datetime_with_timezone(&Utc)?)
+}
diff --git a/import/src/lib.rs b/import/src/lib.rs
new file mode 100644
index 0000000..3698f79
--- /dev/null
+++ b/import/src/lib.rs
@@ -0,0 +1,376 @@
+/*
+ This file is part of jellything (https://codeberg.org/metamuffin/jellything)
+ which is licensed under the GNU Affero General Public License (version 3); see /COPYING.
+ Copyright (C) 2023 metamuffin <metamuffin.org>
+*/
+#![feature(lazy_cell)]
+use anyhow::{Context, Ok};
+use async_recursion::async_recursion;
+use futures::{stream::FuturesUnordered, StreamExt};
+use jellybase::{
+ cache::async_cache_file, database::Database, federation::Federation, AssetLocationExt, CONF,
+};
+use jellyclient::Session;
+use jellycommon::{
+ AssetLocation, AssetRole, ImportOptions, ImportSource, Node, NodePrivate, NodePublic,
+};
+use log::{debug, info};
+use std::{
+ cmp::Ordering,
+ ffi::OsStr,
+ fs::File,
+ os::unix::prelude::OsStrExt,
+ path::{Path, PathBuf},
+ sync::{Arc, LazyLock},
+};
+use tokio::sync::Semaphore;
+
+static IMPORT_SEM: LazyLock<Semaphore> = LazyLock::new(|| Semaphore::new(1));
+
+pub async fn import(db: &Database, fed: &Federation) -> anyhow::Result<()> {
+ let permit = IMPORT_SEM.try_acquire()?;
+ info!("loading sources...");
+ import_path(CONF.library_path.clone(), vec![], db, fed)
+ .await
+ .context("indexing")?;
+ info!("merging nodes...");
+ merge_nodes(db).context("merging nodes")?;
+ info!("clearing temporary node tree");
+ db.node_import.clear()?;
+ info!("import completed");
+ drop(permit);
+ Ok(())
+}
+
+pub fn merge_nodes(db: &Database) -> anyhow::Result<()> {
+ for r in db.node_import.iter() {
+ let (id, mut nodes) = r?;
+
+ nodes.sort_by(|(x, _), (y, _)| compare_index_path(x, y));
+
+ let node = nodes
+ .into_iter()
+ .map(|(_, x)| x)
+ .reduce(|x, y| merge_node(x, y))
+ .unwrap();
+
+ db.node.insert(&id, &node)?;
+ }
+ Ok(())
+}
+
+fn compare_index_path(x: &[usize], y: &[usize]) -> Ordering {
+ if x.is_empty() {
+ Ordering::Greater
+ } else if y.is_empty() {
+ Ordering::Less
+ } else {
+ match x[0].cmp(&y[0]) {
+ o @ (Ordering::Less | Ordering::Greater) => o,
+ Ordering::Equal => compare_index_path(&x[1..], &y[1..]),
+ }
+ }
+}
+
+#[async_recursion]
+pub async fn import_path(
+ path: PathBuf,
+ index_path: Vec<usize>,
+ db: &Database,
+ fed: &Federation,
+) -> anyhow::Result<()> {
+ if path.is_dir() {
+ let mut children_paths = path
+ .read_dir()?
+ .map(Result::unwrap)
+ .filter_map(|e| {
+ if e.path().extension() == Some(&OsStr::from_bytes(b"yaml"))
+ || e.metadata().unwrap().is_dir()
+ {
+ Some(e.path())
+ } else {
+ None
+ }
+ })
+ .collect::<Vec<_>>();
+
+ children_paths.sort();
+
+ let mut children: FuturesUnordered<_> = children_paths
+ .into_iter()
+ .enumerate()
+ .map(|(i, p)| {
+ import_path(
+ p.clone(),
+ {
+ let mut path = index_path.clone();
+ path.push(i);
+ path
+ },
+ db,
+ fed,
+ )
+ })
+ .collect();
+
+ while let Some(k) = children.next().await {
+ k?
+ }
+ } else {
+ let opts: ImportOptions = serde_yaml::from_reader(File::open(&path)?)?;
+
+ for s in opts.sources {
+ process_source(opts.id.clone(), s, &path, &index_path, db, fed).await?;
+ }
+ }
+ Ok(())
+}
+
+async fn process_source(
+ id: String,
+ s: ImportSource,
+ path: &Path,
+ index_path: &[usize],
+ db: &Database,
+ fed: &Federation,
+) -> anyhow::Result<()> {
+ let insert_node = move |id: &String, n: Node| -> anyhow::Result<()> {
+ db.node_import.fetch_and_update(id, |l| {
+ let mut l = l.unwrap_or_default();
+ l.push((index_path.to_vec(), n.clone()));
+ Some(l)
+ })?;
+ Ok(())
+ };
+ match s {
+ ImportSource::Override(n) => insert_node(&id, n)?,
+ ImportSource::Tmdb(_) => todo!(),
+ ImportSource::Media { location } => {
+ let path = location.path();
+ }
+ ImportSource::Federated { host } => {
+ let session = fed.get_session(&host).await.context("creating session")?;
+
+ import_remote(id, &host, db, &session, index_path)
+ .await
+ .context("federated import")?
+ }
+ ImportSource::AutoChildren => {
+ // TODO dont forget to update path of children
+ }
+ }
+ Ok(())
+}
+
+fn merge_node(x: Node, y: Node) -> Node {
+ Node {
+ public: NodePublic {
+ kind: x.public.kind.or(y.public.kind),
+ title: x.public.title.or(y.public.title),
+ id: x.public.id.or(y.public.id),
+ path: vec![],
+ children: if x.public.children.is_empty() {
+ x.public.children
+ } else {
+ y.public.children
+ },
+ tagline: x.public.tagline.or(y.public.tagline),
+ description: x.public.description.or(y.public.description),
+ release_date: x.public.release_date.or(y.public.release_date),
+ index: x.public.index.or(y.public.index),
+ media: x.public.media.or(y.public.media), // TODO proper media merging
+ ratings: x
+ .public
+ .ratings
+ .into_iter()
+ .chain(y.public.ratings)
+ .collect(),
+ federated: x.public.federated.or(y.public.federated),
+ },
+ private: NodePrivate {
+ id: x.private.id.or(y.private.id),
+ poster: x.private.poster.or(y.private.poster),
+ backdrop: x.private.backdrop.or(y.private.backdrop),
+ source: x.private.source.or(y.private.source), // TODO here too
+ },
+ }
+}
+
+// #[async_recursion]
+// pub async fn import_path(
+// path: PathBuf,
+// db: &Database,
+// fed: &Federation,
+// mut node_path: Vec<String>,
+// ) -> anyhow::Result<(Vec<String>, usize)> {
+// if path.is_dir() {
+// let mpath = path.join("directory.json");
+// let children_paths = path.read_dir()?.map(Result::unwrap).filter_map(|e| {
+// if e.path().extension() == Some(&OsStr::from_bytes(b"jelly"))
+// || e.metadata().unwrap().is_dir()
+// {
+// Some(e.path())
+// } else {
+// None
+// }
+// });
+// let identifier = if mpath.exists() {
+// path.file_name().unwrap().to_str().unwrap().to_string()
+// } else {
+// node_path
+// .last()
+// .cloned()
+// .ok_or(anyhow!("non-root node requires parent"))?
+// };
+
+// node_path.push(identifier.clone());
+// let mut all: FuturesUnordered<_> = children_paths
+// .into_iter()
+// .map(|p| import_path(p.clone(), db, fed, node_path.clone()).map_err(|e| (p, e)))
+// .collect();
+// node_path.pop(); // we will set the dirs path later and need it to not be included
+
+// let mut children_ids = Vec::new();
+// let mut errs = 0;
+// while let Some(k) = all.next().await {
+// match k {
+// core::result::Result::Ok((els, errs2)) => {
+// errs += errs2;
+// children_ids.extend(els)
+// }
+// Err((p, e)) => {
+// errs += 1;
+// error!("import of {p:?} failed: {e:?}")
+// }
+// }
+// }
+// if mpath.exists() {
+// let mut node: Node =
+// serde_json::from_reader(File::open(mpath).context("metadata missing")?)?;
+
+// node.public.children = children_ids;
+// node.public.path = node_path;
+// node.public.id = Some(identifier.to_owned());
+// info!("adding {identifier}");
+// db.node.insert(&identifier, &node)?;
+// Ok((vec![identifier], errs))
+// } else {
+// Ok((children_ids, errs))
+// }
+// } else if path.is_file() {
+// info!("loading {path:?}");
+// let datafile = File::open(path.clone()).context("cant load metadata")?;
+// let mut node: Node = serde_json::from_reader(datafile).context("invalid metadata")?;
+// let identifier = node.private.id.clone().unwrap_or_else(|| {
+// path.file_name()
+// .unwrap()
+// .to_str()
+// .unwrap()
+// .strip_suffix(".json")
+// .unwrap()
+// .to_string()
+// });
+
+// let idents = if let Some(io) = node.private.import.take() {
+// let session = fed
+// .get_session(&io.host)
+// .await
+// .context("creating session")?;
+
+// import_remote(io, db, &session, identifier.clone(), node_path)
+// .await
+// .context("federated import")?
+// } else {
+// debug!("adding {identifier}");
+// node.public.path = node_path;
+// node.public.id = Some(identifier.to_owned());
+// let did_insert = db.node.insert(&identifier, &node)?.is_none();
+// if did_insert {
+// vec![identifier]
+// } else {
+// vec![]
+// }
+// };
+// Ok((idents, 0))
+// } else {
+// bail!("did somebody really put a fifo or socket in the library?!")
+// }
+// }
+
+static SEM_REMOTE_IMPORT: LazyLock<Semaphore> = LazyLock::new(|| Semaphore::new(16));
+
+#[async_recursion]
+async fn import_remote(
+ id: String,
+ host: &str,
+ db: &Database,
+ session: &Arc<Session>,
+ index_path: &[usize],
+) -> anyhow::Result<()> {
+ let insert_node = move |id: &String, n: Node| -> anyhow::Result<()> {
+ db.node_import.fetch_and_update(id, |l| {
+ let mut l = l.unwrap_or_default();
+ l.push((index_path.to_vec(), n.clone()));
+ Some(l)
+ })?;
+ Ok(())
+ };
+ let _permit = SEM_REMOTE_IMPORT.acquire().await.unwrap();
+ info!("loading federated node {id:?}");
+
+ let node = session.node(&id).await.context("fetching remote node")?;
+
+ if node.federated.as_ref() == Some(&CONF.hostname) {
+ return Ok(());
+ }
+
+ // TODO maybe use lazy download
+ let poster = cache_federation_asset(session.to_owned(), id.clone(), AssetRole::Poster).await?;
+ let backdrop =
+ cache_federation_asset(session.to_owned(), id.clone(), AssetRole::Backdrop).await?;
+
+ drop(_permit);
+
+ let node = Node {
+ public: node.clone(),
+ private: NodePrivate {
+ backdrop: Some(backdrop),
+ poster: Some(poster),
+ id: None,
+ source: None, // TODO
+ },
+ };
+
+ debug!("adding {id}");
+ insert_node(&id, node.clone())?;
+
+ let mut children: FuturesUnordered<_> = node
+ .public
+ .children
+ .iter()
+ .map(|c| import_remote(c.to_owned(), host, db, session, index_path))
+ .collect();
+
+ while let Some(r) = children.next().await {
+ r?;
+ }
+
+ Ok(())
+}
+
+async fn cache_federation_asset(
+ session: Arc<Session>,
+ identifier: String,
+ role: AssetRole,
+) -> anyhow::Result<AssetLocation> {
+ async_cache_file(
+ &["fed-asset", role.as_str(), &identifier.clone()],
+ move |out| async move {
+ let session = session;
+ session
+ .node_asset(identifier.as_str(), role, 1024, out)
+ .await
+ },
+ )
+ .await
+}
diff --git a/import/src/mod.rs b/import/src/mod.rs
new file mode 100644
index 0000000..0c43cde
--- /dev/null
+++ b/import/src/mod.rs
@@ -0,0 +1,319 @@
+/*
+ This file is part of jellything (https://codeberg.org/metamuffin/jellything)
+ which is licensed under the GNU Affero General Public License (version 3); see /COPYING.
+ Copyright (C) 2023 metamuffin <metamuffin.org>
+*/
+pub mod infojson;
+pub mod tmdb;
+
+use crate::{make_ident, ok_or_warn, Action};
+use anyhow::Context;
+use infojson::YVideo;
+use jellycommon::{
+ AssetLocation, LocalTrack, MediaInfo, Node, NodeKind, NodePrivate, NodePublic, Rating,
+ TrackSource,
+};
+use jellymatroska::read::EbmlReader;
+use jellyremuxer::import::import_metadata;
+use log::{debug, info, warn};
+use std::{
+ collections::BTreeMap,
+ fs::{remove_file, File},
+ io::{stdin, BufReader, Write},
+};
+use tmdb::{tmdb_details, tmdb_image};
+
+pub(crate) fn import(action: Action, dry: bool) -> anyhow::Result<()> {
+ match action {
+ Action::New {
+ path,
+ tmdb_id,
+ tmdb_search,
+ input,
+ series,
+ ident_prefix,
+ ignore_attachments,
+ copy,
+ video,
+ ignore_metadata,
+ r#move,
+ title,
+ skip_existing,
+ } => {
+ if std::env::current_dir().unwrap().file_name().unwrap() != "library" {
+ warn!("new command can only be used in the library directory; what you are doing right now probably wont work.")
+ }
+
+ if skip_existing {
+ if let Some(input) = &input {
+ let guessed_path = path.join(input.file_stem().unwrap_or(input.as_os_str()));
+ if guessed_path.exists() {
+ info!("guessed output ({guessed_path:?}) exists, skipping import");
+ return Ok(());
+ } else {
+ debug!("guessed output ({guessed_path:?}) missing");
+ }
+ }
+ }
+
+ let tmdb_kind = if series { "tv" } else { "movie" };
+ let tmdb_id = if let Some(id) = tmdb_id {
+ Some(id.parse().unwrap())
+ } else if let Some(title) = tmdb_search {
+ let tmdb_key = std::env::var("TMDB_API_KEY").context("tmdb api key required")?;
+ let results = tmdb::tmdb_search(tmdb_kind, &title, &tmdb_key)?;
+ info!("results:");
+ for (i, r) in results.results.iter().enumerate() {
+ info!(
+ "\t[{i}] {}: {} ({})",
+ r.id,
+ r.title.as_ref().or(r.name.as_ref()).unwrap(),
+ r.overview.chars().take(100).collect::<String>()
+ );
+ }
+ let res_index = if results.results.len() > 1 {
+ stdin()
+ .lines()
+ .next()
+ .unwrap()
+ .unwrap()
+ .parse::<usize>()
+ .unwrap()
+ } else {
+ 0
+ };
+ Some(results.results[res_index].id)
+ } else {
+ None
+ };
+
+ let tmdb_details = tmdb_id
+ .map(|id| {
+ let tmdb_key =
+ std::env::var("TMDB_API_KEY").context("tmdb api key required")?;
+ let td = tmdb_details(tmdb_kind, id, &tmdb_key)
+ .context("fetching details")
+ .unwrap();
+ Ok::<_, anyhow::Error>(td)
+ })
+ .transpose()?;
+
+ let mut kind = NodeKind::Series;
+ let mut file_meta = None;
+ let mut infojson = None;
+
+ if let Some(input_path) = &input {
+ file_meta = Some({
+ let input = BufReader::new(File::open(&input_path).unwrap());
+ let mut input = EbmlReader::new(input);
+ import_metadata(&mut input)?
+ });
+ if ignore_attachments {
+ let file_meta = file_meta.as_mut().unwrap();
+ file_meta.cover = None;
+ file_meta.infojson = None;
+ }
+ if ignore_metadata {
+ let file_meta = file_meta.as_mut().unwrap();
+ file_meta.description = None;
+ file_meta.tagline = None;
+ file_meta.title = None;
+ }
+
+ if let Some(ij) = &file_meta.as_ref().unwrap().infojson {
+ infojson =
+ Some(serde_json::from_str::<YVideo>(ij).context("parsing info.json")?);
+ }
+
+ kind = if video {
+ NodeKind::Video
+ } else {
+ NodeKind::Movie
+ };
+ }
+
+ let title = title
+ .or(tmdb_details
+ .as_ref()
+ .map(|d| d.title.clone().or(d.name.clone()))
+ .flatten())
+ .or(file_meta.as_ref().map(|m| m.title.clone()).flatten())
+ .expect("no title detected");
+
+ let ident = format!(
+ "{}{}",
+ ident_prefix.unwrap_or(String::new()),
+ make_ident(
+ &infojson
+ .as_ref()
+ .map(|i| i.id.clone())
+ .unwrap_or(title.clone())
+ ),
+ );
+ let path = path.join(&ident);
+ let source_path = input.as_ref().map(|_| path.join(format!("source.mkv")));
+
+ let (mut poster, mut backdrop) = (None, None);
+ if !dry {
+ std::fs::create_dir_all(&path)?;
+
+ poster = file_meta
+ .as_ref()
+ .map(|m| {
+ m.cover
+ .as_ref()
+ .map(|(mime, data)| {
+ let pu = path.join(format!(
+ "cover.{}",
+ match mime.as_str() {
+ "image/webp" => "webp",
+ "image/jpeg" => "jpeg",
+ "image/png" => "png",
+ _ => {
+ warn!("unknown mime, just using webp");
+ "webp"
+ }
+ }
+ ));
+ if !pu.exists() {
+ let mut f = File::create(&pu)?;
+ f.write_all(&data)?;
+ }
+ Ok::<_, anyhow::Error>(pu)
+ })
+ .transpose()
+ })
+ .transpose()?
+ .flatten()
+ .or(tmdb_details
+ .as_ref()
+ .map(|d| {
+ d.poster_path
+ .as_ref()
+ .map(|p| {
+ let pu = path.join("poster.jpeg");
+ let mut f = File::create(&pu)?;
+ tmdb_image(&p, &mut f)?;
+ Ok::<_, anyhow::Error>(pu)
+ })
+ .transpose()
+ })
+ .transpose()?
+ .flatten());
+
+ backdrop = tmdb_details
+ .as_ref()
+ .map(|d| {
+ d.backdrop_path
+ .as_ref()
+ .map(|p| {
+ let pu = path.join("backdrop.jpeg");
+ let mut f = File::create(&pu)?;
+ tmdb_image(&p, &mut f)?;
+ Ok::<_, anyhow::Error>(pu)
+ })
+ .transpose()
+ })
+ .transpose()?
+ .flatten();
+ }
+
+ let mut ratings = BTreeMap::new();
+
+ ratings.extend(
+ infojson
+ .as_ref()
+ .map(|i| (Rating::YoutubeViews, i.view_count as f64)),
+ );
+ ratings.extend(
+ infojson
+ .as_ref()
+ .map(|i| i.like_count.map(|l| (Rating::YoutubeLikes, l as f64)))
+ .flatten(),
+ );
+ ratings.extend(
+ tmdb_details
+ .as_ref()
+ .map(|d| (Rating::Tmdb, d.vote_average)),
+ );
+
+ let node = Node {
+ private: NodePrivate {
+ id: Some(ident.clone()),
+ backdrop: backdrop.clone().map(AssetLocation::Library),
+ poster: poster.clone().map(AssetLocation::Library),
+ source: file_meta.as_ref().map(|m| MediaSource::Local {
+ tracks: m
+ .track_sources
+ .clone()
+ .into_iter()
+ .map(|t| LocalTrack {
+ path: source_path.clone().unwrap(),
+ ..t
+ })
+ .collect(),
+ }),
+ },
+ public: NodePublic {
+ federated: None,
+ ratings,
+ description: file_meta
+ .as_ref()
+ .map(|m| m.description.clone())
+ .flatten()
+ .or(tmdb_details.as_ref().map(|d| d.overview.to_owned())),
+ tagline: file_meta.as_ref().map(|m| m.tagline.clone()).flatten().or(
+ tmdb_details
+ .as_ref()
+ .map(|d| d.tagline.to_owned())
+ .flatten(),
+ ),
+ title: Some(title),
+ index: None,
+ kind: Some(kind),
+ children: Vec::new(),
+ media: file_meta.as_ref().map(|m| MediaInfo {
+ chapters: m.chapters.clone(),
+ duration: m.duration,
+ tracks: m.tracks.clone(),
+ }),
+ release_date: tmdb_details
+ .as_ref()
+ .map(|d| tmdb::parse_release_date(&d.release_date.clone()?).ok())
+ .flatten()
+ .or(infojson
+ .as_ref()
+ .and_then(|j| ok_or_warn(infojson::parse_upload_date(&j.upload_date)))),
+ ..Default::default()
+ },
+ };
+
+ if dry {
+ println!("{}", serde_json::to_string_pretty(&node)?);
+ } else {
+ if let Some(source_path) = source_path {
+ let input = input.clone().unwrap();
+ if r#move {
+ std::fs::rename(&input, &source_path)?;
+ } else if copy {
+ std::fs::copy(&input, &source_path)?;
+ } else {
+ if source_path.is_symlink() {
+ remove_file(&source_path)?;
+ }
+ std::os::unix::fs::symlink(&input, &source_path)?;
+ }
+ }
+ let f = File::create(path.join(if series {
+ "directory.json"
+ } else {
+ "item.jelly"
+ }))?;
+ serde_json::to_writer_pretty(f, &node)?;
+ }
+
+ Ok(())
+ }
+ _ => unreachable!(),
+ }
+}
diff --git a/import/src/tmdb.rs b/import/src/tmdb.rs
new file mode 100644
index 0000000..c38d50e
--- /dev/null
+++ b/import/src/tmdb.rs
@@ -0,0 +1,116 @@
+/*
+ This file is part of jellything (https://codeberg.org/metamuffin/jellything)
+ which is licensed under the GNU Affero General Public License (version 3); see /COPYING.
+ Copyright (C) 2023 metamuffin <metamuffin.org>
+*/
+use anyhow::Context;
+use jellycommon::chrono::{format::Parsed, DateTime, Utc};
+use log::info;
+use serde::Deserialize;
+use std::io::Write;
+
+#[derive(Debug, Clone, Deserialize)]
+pub struct TmdbQuery {
+ pub page: usize,
+ pub results: Vec<TmdbQueryResult>,
+ pub total_pages: usize,
+ pub total_results: usize,
+}
+
+#[derive(Debug, Clone, Deserialize)]
+pub struct TmdbQueryResult {
+ pub adult: bool,
+ pub backdrop_path: Option<String>,
+ pub genre_ids: Vec<u64>,
+ pub id: u64,
+ pub original_language: Option<String>,
+ pub original_title: Option<String>,
+ pub overview: String,
+ pub popularity: f64,
+ pub poster_path: Option<String>,
+ pub release_date: Option<String>,
+ pub title: Option<String>,
+ pub name: Option<String>,
+ pub vote_average: f64,
+ pub vote_count: usize,
+}
+
+#[derive(Debug, Clone, Deserialize)]
+pub struct TmdbDetails {
+ pub adult: bool,
+ pub backdrop_path: Option<String>,
+ pub genres: Vec<TmdbGenre>,
+ pub id: u64,
+ pub original_language: Option<String>,
+ pub original_title: Option<String>,
+ pub overview: String,
+ pub popularity: f64,
+ pub poster_path: Option<String>,
+ pub release_date: Option<String>,
+ pub title: Option<String>,
+ pub name: Option<String>,
+ pub vote_average: f64,
+ pub vote_count: usize,
+ pub budget: Option<usize>,
+ pub homepage: Option<String>,
+ pub imdb_id: Option<String>,
+ pub production_companies: Vec<TmdbProductionCompany>,
+ pub revenue: Option<usize>,
+ pub tagline: Option<String>,
+}
+
+#[derive(Debug, Clone, Deserialize)]
+pub struct TmdbGenre {
+ pub id: u64,
+ pub name: String,
+}
+
+#[derive(Debug, Clone, Deserialize)]
+pub struct TmdbProductionCompany {
+ pub id: u64,
+ pub name: String,
+ pub logo_path: Option<String>,
+}
+
+pub fn tmdb_search(kind: &str, query: &str, key: &str) -> anyhow::Result<TmdbQuery> {
+ info!("searching tmdb: {query:?}");
+ Ok(reqwest::blocking::get(&format!(
+ "https://api.themoviedb.org/3/search/{kind}?query={}&api_key={key}",
+ query.replace(" ", "+")
+ ))?
+ .json::<TmdbQuery>()?)
+}
+
+pub fn tmdb_details(kind: &str, id: u64, key: &str) -> anyhow::Result<TmdbDetails> {
+ info!("fetching details: {id:?}");
+ Ok(reqwest::blocking::get(&format!(
+ "https://api.themoviedb.org/3/{kind}/{id}?api_key={key}"
+ ))?
+ .json()?)
+}
+
+pub fn tmdb_image(path: &str, out: &mut impl Write) -> anyhow::Result<()> {
+ info!("downloading image {path:?}");
+ let mut res = reqwest::blocking::get(&format!("https://image.tmdb.org/t/p/original{path}"))?;
+ res.copy_to(out)?;
+ Ok(())
+}
+
+pub fn parse_release_date(d: &str) -> anyhow::Result<DateTime<Utc>> {
+ let (year, month, day) = (&d[0..4], &d[5..7], &d[8..10]);
+ let (year, month, day) = (
+ year.parse().context("parsing year")?,
+ month.parse().context("parsing month")?,
+ day.parse().context("parsing day")?,
+ );
+
+ let mut p = Parsed::new();
+ p.year = Some(year);
+ p.month = Some(month);
+ p.day = Some(day);
+ p.hour_div_12 = Some(0);
+ p.hour_mod_12 = Some(0);
+ p.minute = Some(0);
+ p.second = Some(0);
+ Ok(p.to_datetime_with_timezone(&Utc)?)
+}