diff options
Diffstat (limited to 'tool')
-rw-r--r-- | tool/src/migrate.rs | 374 |
1 files changed, 318 insertions, 56 deletions
diff --git a/tool/src/migrate.rs b/tool/src/migrate.rs index 5037bf4..c774832 100644 --- a/tool/src/migrate.rs +++ b/tool/src/migrate.rs @@ -6,15 +6,89 @@ use crate::{Action, MigrateMode}; use anyhow::{bail, Context}; use indicatif::ProgressIterator; -use jellybase::database::{typed_sled::Tree, Database}; +use jellybase::database::{DataAcid, ReadableTable, Ser, T_INVITE, T_USER, T_USER_NODE}; +use jellycommon::user::{NodeUserData, User}; use log::{info, warn}; -use serde::Serialize; +use std::io::{BufRead, BufReader}; use std::{ fs::File, - io::{BufRead, BufReader, BufWriter, Write}, + io::{BufWriter, Write}, path::Path, }; +// macro_rules! process_tree { +// ($mode:ident, $save_location:ident, $da:ident, $name:literal, $table:ident, $dt:tt) => {{ +// let path = $save_location.join($name); +// match $mode { +// MigrateMode::Export => { +// let mut o = BufWriter::new(File::create(path)?); +// let txn = $da.begin_read()?; +// let table = txn.open_table($table)?; + +// let len = table.len()?; +// for r in table.iter()?.progress_count(len.try_into().unwrap()) { +// let (k, v) = r?; +// serde_json::to_writer(&mut o, &(k.value(), v.value().0))?; +// writeln!(&mut o)?; +// } +// drop(table); +// } +// MigrateMode::Import => { +// { +// let txn = $da.begin_read()?; +// let table = txn.open_table($table)?; +// if !table.is_empty()? { +// bail!("tree not empty, `rm -rf` your db please :)") +// } +// } + +// let Ok(i) = File::open(&path) else { +// warn!("{path:?} does not exist; the import of that tree will be skipped."); +// return Ok(()); +// }; +// let i = BufReader::new(i); +// for l in i.lines() { +// let l = l?; +// let (k, v) = serde_json::from_str::<$dt>(&l).context("reading db dump item")?; +// { +// let txn = $da.begin_write()?; +// let mut table = txn.open_table($table)?; + +// table.insert(&convert(k), Ser(v))?; +// drop(table); +// txn.commit()? +// } +// } +// } +// } +// }}; +// } + +// pub(crate) fn migrate(action: Action) -> anyhow::Result<()> { +// match action { +// Action::Migrate { +// mode, +// save_location, +// database, +// } => { +// std::fs::create_dir_all(&save_location)?; + +// let da = DataAcid::open(&database)?; + +// info!("processing 'user'"); +// process_tree(mode, &save_location.join("user"), &da, T_USER); +// info!("processing 'user_node'"); +// process_tree(mode, &save_location.join("user_node"), &da, T_USER_NODE); +// info!("processing 'invite'"); +// process_tree(mode, &save_location.join("invite"), &da, T_INVITE); +// info!("done"); + +// Ok(()) +// } +// _ => unreachable!(), +// } +// } + pub(crate) fn migrate(action: Action) -> anyhow::Result<()> { match action { Action::Migrate { @@ -23,73 +97,261 @@ pub(crate) fn migrate(action: Action) -> anyhow::Result<()> { database, } => { std::fs::create_dir_all(&save_location)?; - let db = Database::open(&database)?; + + let da = DataAcid::open(&database)?; info!("processing 'user'"); - process_tree(mode, &save_location.join("user"), &db.user)?; + { + let path: &Path = &save_location.join("user"); + let da = &da; + match mode { + MigrateMode::Export => { + let mut o = BufWriter::new(File::create(path)?); + let txn = da.begin_read()?; + let table = txn.open_table(T_USER)?; + + let len = table.len()?; + for r in table.iter()?.progress_count(len.try_into().unwrap()) { + let (k, v) = r?; + serde_json::to_writer(&mut o, &(k.value(), v.value().0))?; + writeln!(&mut o)?; + } + drop(table); + } + MigrateMode::Import => { + { + let txn = da.begin_read()?; + let table = txn.open_table(T_USER)?; + if !table.is_empty()? { + bail!("tree not empty, `rm -rf` your db please :)") + } + } + + let Ok(i) = File::open(path) else { + warn!( + "{path:?} does not exist; the import of that tree will be skipped." + ); + return Ok(()); + }; + let i = BufReader::new(i); + for l in i.lines() { + let l = l?; + let (k, v) = serde_json::from_str::<(String, User)>(l.as_str()) + .context("reading db dump item")?; + { + let txn = da.begin_write()?; + let mut table = txn.open_table(T_USER)?; + table.insert(k.as_str(), Ser(v))?; + drop(table); + txn.commit()? + } + } + } + } + }; info!("processing 'user_node'"); - process_tree(mode, &save_location.join("user_node"), &db.user_node)?; + { + let path: &Path = &save_location.join("user_node"); + let da = &da; + match mode { + MigrateMode::Export => { + let mut o = BufWriter::new(File::create(path)?); + let txn = da.begin_read()?; + let table = txn.open_table(T_USER_NODE)?; + + let len = table.len()?; + for r in table.iter()?.progress_count(len.try_into().unwrap()) { + let (k, v) = r?; + serde_json::to_writer(&mut o, &(k.value(), v.value().0))?; + writeln!(&mut o)?; + } + drop(table); + } + MigrateMode::Import => { + { + let txn = da.begin_read()?; + let table = txn.open_table(T_USER_NODE)?; + if !table.is_empty()? { + bail!("tree not empty, `rm -rf` your db please :)") + } + } + + let Ok(i) = File::open(path) else { + warn!( + "{path:?} does not exist; the import of that tree will be skipped." + ); + return Ok(()); + }; + let i = BufReader::new(i); + for l in i.lines() { + let l = l?; + let (k, v) = serde_json::from_str::<((String, String), NodeUserData)>( + l.as_str(), + ) + .context("reading db dump item")?; + { + let txn = da.begin_write()?; + let mut table = txn.open_table(T_USER_NODE)?; + + table.insert((k.0.as_str(), k.1.as_str()), Ser(v))?; + drop(table); + txn.commit()? + } + } + } + } + }; info!("processing 'invite'"); - process_tree(mode, &save_location.join("invite"), &db.invite)?; - info!("processing 'node'"); - process_tree(mode, &save_location.join("node"), &db.node)?; + { + let path: &Path = &save_location.join("invite"); + let da = &da; + match mode { + MigrateMode::Export => { + let mut o = BufWriter::new(File::create(path)?); + let txn = da.begin_read()?; + let table = txn.open_table(T_INVITE)?; + + let len = table.len()?; + for r in table.iter()?.progress_count(len.try_into().unwrap()) { + let (k, v) = r?; + serde_json::to_writer(&mut o, &(k.value(), v.value().0))?; + writeln!(&mut o)?; + } + drop(table); + } + MigrateMode::Import => { + { + let txn = da.begin_read()?; + let table = txn.open_table(T_INVITE)?; + if !table.is_empty()? { + bail!("tree not empty, `rm -rf` your db please :)") + } + } + + let Ok(i) = File::open(path) else { + warn!( + "{path:?} does not exist; the import of that tree will be skipped." + ); + return Ok(()); + }; + let i = BufReader::new(i); + for l in i.lines() { + let l = l?; + let (k, _v) = serde_json::from_str::<(String, ())>(l.as_str()) + .context("reading db dump item")?; + { + let txn = da.begin_write()?; + let mut table = txn.open_table(T_INVITE)?; + + table.insert(k.as_str(), Ser(()))?; + drop(table); + txn.commit()? + } + } + } + } + }; info!("done"); - Ok(()) } _ => unreachable!(), } + Ok(()) } +/* -fn process_tree< - K: Serialize + for<'de> serde::Deserialize<'de>, - V: Serialize + for<'de> serde::Deserialize<'de>, ->( +fn process_tree<'c, 'd, K, V>( mode: MigrateMode, path: &Path, - tree: &Tree<K, V>, -) -> anyhow::Result<()> { + da: &DataAcid, + table: TableDefinition<'static, K, Ser<V>>, +) -> anyhow::Result<()> +where + K: RedbKey + Owny<'c> + Clone, + V: Encode + Decode + Debug + Serialize + Owny<'d> + Clone, + <K as Owny<'c>>::Owned: for<'a> serde::Deserialize<'a>, + <V as Owny<'d>>::Owned: for<'a> serde::Deserialize<'a>, +{ match mode { - MigrateMode::Export => export_tree(path, tree), - MigrateMode::Import => import_tree(path, tree), - } -} + MigrateMode::Export => { + // let mut o = BufWriter::new(File::create(path)?); + // let txn = da.begin_read()?; + // let table = txn.open_table(table)?; -fn export_tree< - K: Serialize + for<'de> serde::Deserialize<'de>, - V: Serialize + for<'de> serde::Deserialize<'de>, ->( - path: &Path, - tree: &Tree<K, V>, -) -> anyhow::Result<()> { - let mut o = BufWriter::new(File::create(path)?); - let len = tree.len(); - for r in tree.iter().progress_count(len.try_into().unwrap()) { - let (k, v) = r?; - serde_json::to_writer(&mut o, &(k, v))?; - writeln!(&mut o)?; - } - Ok(()) -} + // let len = table.len()?; + // for r in table.iter()?.progress_count(len.try_into().unwrap()) { + // let (k, v) = r?; + // serde_json::to_writer(&mut o, &(k, v.value().0))?; + // writeln!(&mut o)?; + // } + // drop(table); + } + MigrateMode::Import => { + { + let txn = da.begin_read()?; + let table = txn.open_table(table)?; + if !table.is_empty()? { + bail!("tree not empty, `rm -rf` your db please :)") + } + } -fn import_tree< - K: Serialize + for<'de> serde::Deserialize<'de>, - V: Serialize + for<'de> serde::Deserialize<'de>, ->( - path: &Path, - tree: &Tree<K, V>, -) -> anyhow::Result<()> { - if !tree.is_empty() { - bail!("tree not empty, `rm -rf` your db please :)") - } - let Ok(i) = File::open(path) else { - warn!("{path:?} does not exist; the import of that tree will be skipped."); - return Ok(()); - }; - let i = BufReader::new(i); - for l in i.lines() { - let l = l?; - let (k, v) = serde_json::from_str::<(K, V)>(&l).context("reading db dump item")?; - tree.insert(&k, &v)?; + let Ok(i) = File::open(path) else { + warn!("{path:?} does not exist; the import of that tree will be skipped."); + return Ok(()); + }; + let i = BufReader::new(i); + for l in i.lines() { + let l = l?; + let (k, v) = + serde_json::from_str::<(<K as Owny>::Owned, <V as Owny>::Owned)>(l.as_str()) + .context("reading db dump item")?; + { + let (k, v) = (k.borrow(), v.borrow()); + + let txn = da.begin_write()?; + let mut table = txn.open_table(table)?; + + table.insert(k, Ser(v))?; + drop(table); + txn.commit()? + } + } + } } Ok(()) -} +} */ + +// trait Owny<'a> { +// type Owned; +// fn borrow(x: &'a Self::Owned) -> Self; +// } +// impl<'a> Owny<'a> for &'a str { +// type Owned = String; +// fn borrow(x: &'a Self::Owned) -> Self { +// x.as_str() +// } +// } +// impl<'a> Owny<'a> for (&'a str, &'a str) { +// type Owned = (String, String); + +// fn borrow(x: &'a Self::Owned) -> Self { +// (x.0.as_str(), x.1.as_str()) +// } +// } +// impl Owny<'_> for User { +// type Owned = User; +// fn borrow(x: &Self::Owned) -> Self { +// x.to_owned() +// } +// } +// impl Owny<'_> for NodeUserData { +// type Owned = NodeUserData; +// fn borrow(x: &Self::Owned) -> Self { +// x.to_owned() +// } +// } +// impl Owny<'_> for () { +// type Owned = (); +// fn borrow(x: &Self::Owned) -> Self { +// x.to_owned() +// } +// } |