aboutsummaryrefslogtreecommitdiff
path: root/base/src
diff options
context:
space:
mode:
authormetamuffin <metamuffin@disroot.org>2025-04-18 23:33:29 +0200
committermetamuffin <metamuffin@disroot.org>2025-04-18 23:33:29 +0200
commita123a1997f3ab527ab83b44ca18bec94883f46d0 (patch)
tree761c4f0e8e9bbb7834e59af3d6904dee39932923 /base/src
parent5b6fd021cc84ae7f5e1719ff398ff4627493a13c (diff)
downloadjellything-a123a1997f3ab527ab83b44ca18bec94883f46d0.tar
jellything-a123a1997f3ab527ab83b44ca18bec94883f46d0.tar.bz2
jellything-a123a1997f3ab527ab83b44ca18bec94883f46d0.tar.zst
use impl Hash for cache key instead of string
Diffstat (limited to 'base/src')
-rw-r--r--base/src/cache.rs57
1 files changed, 39 insertions, 18 deletions
diff --git a/base/src/cache.rs b/base/src/cache.rs
index 0b28e1b..40c6e7b 100644
--- a/base/src/cache.rs
+++ b/base/src/cache.rs
@@ -10,11 +10,13 @@ use bincode::{Decode, Encode};
use log::{info, warn};
use rand::random;
use serde::Serialize;
+use sha2::Sha512;
use std::{
any::Any,
collections::{BTreeMap, HashMap},
fs::rename,
future::Future,
+ hash::{Hash, Hasher},
io::Seek,
path::PathBuf,
sync::{
@@ -36,19 +38,28 @@ impl CachePath {
}
}
-pub fn cache_location(seed: &[&str]) -> (usize, CachePath) {
+pub fn cache_location(kind: &str, key: impl Hash) -> (usize, CachePath) {
use sha2::Digest;
- let mut d = sha2::Sha512::new();
- for s in seed {
- d.update(s.as_bytes());
- d.update(b"\0");
+ struct ShaHasher(Sha512);
+ impl Hasher for ShaHasher {
+ fn finish(&self) -> u64 {
+ unreachable!()
+ }
+ fn write(&mut self, bytes: &[u8]) {
+ self.0.update(bytes);
+ }
}
- let d = d.finalize();
+ let mut d = ShaHasher(sha2::Sha512::new());
+ d.0.update(kind);
+ d.0.update(b"\0");
+ key.hash(&mut d);
+
+ let d = d.0.finalize();
let n =
d[0] as usize | ((d[1] as usize) << 8) | ((d[2] as usize) << 16) | ((d[3] as usize) << 24);
let fname = base64::engine::general_purpose::URL_SAFE.encode(d);
- let fname = &fname[..22];
- let fname = format!("{}-{}", seed[0], fname); // about 128 bits
+ let fname = &fname[..30]; // 180 bits
+ let fname = format!("{}-{}", kind, fname);
(n, CachePath(fname.into()))
}
@@ -57,14 +68,15 @@ pub static CACHE_GENERATION_LOCKS: LazyLock<[Mutex<()>; CACHE_GENERATION_BUCKET_
LazyLock::new(|| [(); CACHE_GENERATION_BUCKET_COUNT].map(|_| Mutex::new(())));
pub async fn async_cache_file<Fun, Fut>(
- seed: &[&str],
+ kind: &str,
+ key: impl Hash,
generate: Fun,
) -> Result<CachePath, anyhow::Error>
where
Fun: FnOnce(tokio::fs::File) -> Fut,
Fut: Future<Output = Result<(), anyhow::Error>>,
{
- let (bucket, location) = cache_location(seed);
+ let (bucket, location) = cache_location(kind, key);
// we need a lock even if it exists since somebody might be still in the process of writing.
let _guard = CACHE_GENERATION_LOCKS[bucket % CACHE_GENERATION_BUCKET_COUNT]
.lock()
@@ -95,11 +107,15 @@ where
thread_local! { pub static WITHIN_CACHE_FILE: AtomicBool = const { AtomicBool::new(false) }; }
-pub fn cache_file<Fun>(seed: &[&str], mut generate: Fun) -> Result<CachePath, anyhow::Error>
+pub fn cache_file<Fun>(
+ kind: &str,
+ key: impl Hash,
+ mut generate: Fun,
+) -> Result<CachePath, anyhow::Error>
where
Fun: FnMut(std::fs::File) -> Result<(), anyhow::Error>,
{
- let (bucket, location) = cache_location(seed);
+ let (bucket, location) = cache_location(kind, key);
// we need a lock even if it exists since somebody might be still in the process of writing.
let already_within = WITHIN_CACHE_FILE.with(|a| a.swap(true, Ordering::Relaxed));
let _guard = if already_within {
@@ -139,12 +155,16 @@ pub static CACHE_IN_MEMORY_OBJECTS: LazyLock<RwLock<HashMap<PathBuf, InMemoryCac
LazyLock::new(|| RwLock::new(HashMap::new()));
pub static CACHE_IN_MEMORY_SIZE: AtomicUsize = AtomicUsize::new(0);
-pub fn cache_memory<Fun, T>(seed: &[&str], mut generate: Fun) -> Result<Arc<T>, anyhow::Error>
+pub fn cache_memory<Fun, T>(
+ kind: &str,
+ key: impl Hash,
+ mut generate: Fun,
+) -> Result<Arc<T>, anyhow::Error>
where
Fun: FnMut() -> Result<T, anyhow::Error>,
T: Encode + Decode + Send + Sync + 'static,
{
- let (_, location) = cache_location(seed);
+ let (_, location) = cache_location(kind, &key);
{
let mut g = CACHE_IN_MEMORY_OBJECTS.write().unwrap();
if let Some(entry) = g.get_mut(&location.abs()) {
@@ -158,7 +178,7 @@ where
}
}
- let location = cache_file(seed, move |mut file| {
+ let location = cache_file(kind, &key, move |mut file| {
let object = generate()?;
bincode::encode_into_std_write(&object, &mut file, bincode::config::standard())
.context("encoding cache object")?;
@@ -189,7 +209,8 @@ where
}
pub async fn async_cache_memory<Fun, Fut, T>(
- seed: &[&str],
+ kind: &str,
+ key: impl Hash,
generate: Fun,
) -> Result<Arc<T>, anyhow::Error>
where
@@ -197,7 +218,7 @@ where
Fut: Future<Output = Result<T, anyhow::Error>>,
T: Encode + Decode + Send + Sync + 'static,
{
- let (_, location) = cache_location(seed);
+ let (_, location) = cache_location(kind, &key);
{
let mut g = CACHE_IN_MEMORY_OBJECTS.write().unwrap();
if let Some(entry) = g.get_mut(&location.abs()) {
@@ -211,7 +232,7 @@ where
}
}
- let location = async_cache_file(seed, move |mut file| async move {
+ let location = async_cache_file(kind, &key, move |mut file| async move {
let object = generate().await?;
let data = bincode::encode_to_vec(&object, bincode::config::standard())
.context("encoding cache object")?;