aboutsummaryrefslogtreecommitdiff
path: root/import/src
diff options
context:
space:
mode:
Diffstat (limited to 'import/src')
-rw-r--r--import/src/lib.rs186
1 files changed, 119 insertions, 67 deletions
diff --git a/import/src/lib.rs b/import/src/lib.rs
index 4da1707..116a15d 100644
--- a/import/src/lib.rs
+++ b/import/src/lib.rs
@@ -7,6 +7,7 @@
pub mod infojson;
pub mod tmdb;
+use crate::tmdb::TmdbKind;
use anyhow::{anyhow, Context, Ok};
use async_recursion::async_recursion;
use futures::{executor::block_on, stream::FuturesUnordered, StreamExt};
@@ -23,7 +24,8 @@ use jellycommon::{
};
use jellymatroska::read::EbmlReader;
use jellyremuxer::import::import_metadata;
-use log::{debug, info};
+use log::{debug, info, warn};
+use regex::Regex;
use std::{
cmp::Ordering,
ffi::OsStr,
@@ -36,8 +38,6 @@ use std::{
use tmdb::tmdb_image;
use tokio::{io::AsyncWriteExt, sync::Semaphore, task::spawn_blocking};
-use crate::tmdb::TmdbKind;
-
static IMPORT_SEM: LazyLock<Semaphore> = LazyLock::new(|| Semaphore::new(1));
pub async fn import(db: &Database, fed: &Federation) -> anyhow::Result<()> {
@@ -174,6 +174,7 @@ pub async fn import_path(
Ok(())
}
+#[async_recursion]
async fn process_source(
id: String,
s: ImportSource,
@@ -241,78 +242,103 @@ async fn process_source(
ignore_chapters,
ignore_metadata,
} => {
- // TODO use ignore options
let media_path = location.path();
+ if media_path.is_dir() {
+ let mut node = Node::default();
+ for f in media_path.read_dir()? {
+ let child_path = f?.path();
+ let inf_id = infer_id_from_path(&child_path).context("inferring child id")?;
+ process_source(
+ id.clone(),
+ ImportSource::Media {
+ location: AssetLocation::Media(child_path),
+ ignore_attachments,
+ ignore_chapters,
+ ignore_metadata,
+ },
+ path,
+ index_path,
+ db,
+ fed,
+ )
+ .await
+ .context("recursive media import")?;
+ node.public.children.push(inf_id);
+ }
+ insert_node(&id, node)?;
+ } else if media_path.is_file() {
+ let metadata = spawn_blocking(move || {
+ let input =
+ BufReader::new(File::open(&location.path()).context("opening media file")?);
+ let mut input = EbmlReader::new(input);
+ import_metadata(&mut input)
+ })
+ .await??;
- let metadata = spawn_blocking(move || {
- let input =
- BufReader::new(File::open(&location.path()).context("opening media file")?);
- let mut input = EbmlReader::new(input);
- import_metadata(&mut input)
- })
- .await??;
-
- let mut node = Node::default();
+ let mut node = Node::default();
- if !ignore_metadata {
- node.public.title = metadata.title;
- node.public.description = metadata.description;
- node.public.tagline = metadata.tagline;
- }
- node.public.media = Some(MediaInfo {
- duration: metadata.duration,
- tracks: metadata.tracks,
- chapters: if ignore_chapters {
- vec![]
- } else {
- metadata.chapters
- },
- });
- node.private.source = Some(
- metadata
- .track_sources
- .into_iter()
- .map(|mut ts| {
- ts.path = media_path.to_owned();
- TrackSource::Local(ts)
- })
- .collect(),
- );
+ if !ignore_metadata {
+ node.public.title = metadata.title;
+ node.public.description = metadata.description;
+ node.public.tagline = metadata.tagline;
+ }
+ node.public.media = Some(MediaInfo {
+ duration: metadata.duration,
+ tracks: metadata.tracks,
+ chapters: if ignore_chapters {
+ vec![]
+ } else {
+ metadata.chapters
+ },
+ });
+ node.private.source = Some(
+ metadata
+ .track_sources
+ .into_iter()
+ .map(|mut ts| {
+ ts.path = media_path.to_owned();
+ TrackSource::Local(ts)
+ })
+ .collect(),
+ );
- if !ignore_attachments {
- if let Some((filename, data)) = metadata.cover {
- node.private.poster = Some(
- async_cache_file(
- &["att-cover", media_path.to_str().unwrap(), &filename],
- |mut f| async move {
- f.write_all(&data).await?;
- Ok(())
- },
- )
- .await?,
- );
- };
+ if !ignore_attachments {
+ if let Some((filename, data)) = metadata.cover {
+ node.private.poster = Some(
+ async_cache_file(
+ &["att-cover", media_path.to_str().unwrap(), &filename],
+ |mut f| async move {
+ f.write_all(&data).await?;
+ Ok(())
+ },
+ )
+ .await?,
+ );
+ };
- if let Some(infojson) = metadata.infojson {
- let infojson: infojson::YVideo =
- serde_json::from_str(&infojson).context("parsing infojson")?;
+ if let Some(infojson) = metadata.infojson {
+ let infojson: infojson::YVideo =
+ serde_json::from_str(&infojson).context("parsing infojson")?;
- node.public.kind = Some(NodeKind::Video);
- node.public.title = Some(infojson.title);
- node.public.description = Some(infojson.description);
- node.public.tagline = Some(infojson.webpage_url);
- node.public
- .ratings
- .insert(Rating::YoutubeViews, infojson.view_count as f64);
- node.public.ratings.extend(
- infojson
- .like_count
- .map(|l| (Rating::YoutubeLikes, l as f64)),
- );
+ node.public.kind = Some(NodeKind::Video);
+ node.public.title = Some(infojson.title);
+ node.public.description = Some(infojson.description);
+ node.public.tagline = Some(infojson.webpage_url);
+ node.public
+ .ratings
+ .insert(Rating::YoutubeViews, infojson.view_count as f64);
+ node.public.ratings.extend(
+ infojson
+ .like_count
+ .map(|l| (Rating::YoutubeLikes, l as f64)),
+ );
+ }
}
- }
- insert_node(&id, node)?;
+ insert_node(&id, node)?;
+ } else {
+ warn!("non file/dir import ignored")
+ }
}
ImportSource::Federated { host } => {
let session = fed.get_session(&host).await.context("creating session")?;
@@ -351,6 +377,32 @@ async fn process_source(
Ok(())
}
+const RE_YOUTUBE_ID: LazyLock<Regex> =
+ LazyLock::new(|| Regex::new(r#"\[([A-Za-z0-9_-]{11})\]"#).unwrap());
+
+pub fn infer_id_from_path(path: &Path) -> anyhow::Result<String> {
+ let f = path
+ .file_stem()
+ .ok_or(anyhow!("no filename"))?
+ .to_str()
+ .ok_or(anyhow!("non utf8 filename"))?;
+
+ if let Some(mat) = RE_YOUTUBE_ID.captures(f) {
+ let id = mat.get(1).unwrap().as_str();
+ return Ok(format!("youtube-{id}"));
+ }
+
+ let mut fsan = String::with_capacity(f.len());
+ for c in f.chars() {
+ fsan.extend(match c {
+ 'A'..='Z' | 'a'..='z' | '0'..='9' | '_' | '-' => Some(c),
+ ' ' => Some('-'),
+ _ => None,
+ });
+ }
+ Ok(fsan)
+}
+
fn merge_node(x: Node, y: Node) -> Node {
Node {
public: NodePublic {