aboutsummaryrefslogtreecommitdiff
path: root/common/object/src/value.rs
diff options
context:
space:
mode:
Diffstat (limited to 'common/object/src/value.rs')
-rw-r--r--common/object/src/value.rs62
1 files changed, 49 insertions, 13 deletions
diff --git a/common/object/src/value.rs b/common/object/src/value.rs
index 3a8b7df..d77d53a 100644
--- a/common/object/src/value.rs
+++ b/common/object/src/value.rs
@@ -4,9 +4,9 @@
Copyright (C) 2026 metamuffin <metamuffin.org>
*/
-use crate::Object;
+use crate::{Object, ObjectBuffer};
-pub trait Value<'a>: Sized {
+pub trait Value<'a>: ValueStore + Sized {
const ALIGNED: bool;
fn load_aligned(buf: &'a [u32]) -> Option<Self> {
let _ = buf;
@@ -16,7 +16,11 @@ pub trait Value<'a>: Sized {
let _ = buf;
None
}
- fn store(&self, buf: &mut Vec<u8>);
+}
+pub trait ValueStore {
+ fn is_aligned(&self) -> bool;
+ fn store_aligned(&self, _buf: &mut Vec<u32>) {}
+ fn store_unaligned(&self, _buf: &mut Vec<u8>) {}
fn size(&self) -> usize;
}
impl<'a> Value<'a> for &'a str {
@@ -24,7 +28,12 @@ impl<'a> Value<'a> for &'a str {
fn load_unaligned(buf: &'a [u8]) -> Option<Self> {
str::from_utf8(buf).ok()
}
- fn store(&self, buf: &mut Vec<u8>) {
+}
+impl ValueStore for &str {
+ fn is_aligned(&self) -> bool {
+ false
+ }
+ fn store_unaligned(&self, buf: &mut Vec<u8>) {
buf.extend(self.as_bytes());
}
fn size(&self) -> usize {
@@ -32,12 +41,17 @@ impl<'a> Value<'a> for &'a str {
}
}
impl Value<'_> for u32 {
- const ALIGNED: bool = false;
+ const ALIGNED: bool = true;
fn load_aligned(buf: &[u32]) -> Option<Self> {
buf.get(0).copied()
}
- fn store(&self, buf: &mut Vec<u8>) {
- buf.extend(self.to_ne_bytes());
+}
+impl ValueStore for u32 {
+ fn is_aligned(&self) -> bool {
+ true
+ }
+ fn store_aligned(&self, buf: &mut Vec<u32>) {
+ buf.push(*self);
}
fn size(&self) -> usize {
4
@@ -50,8 +64,14 @@ impl Value<'_> for u64 {
let lo = *buf.get(1)? as u64;
Some(hi << 32 | lo)
}
- fn store(&self, buf: &mut Vec<u8>) {
- buf.extend(self.to_ne_bytes());
+}
+impl ValueStore for u64 {
+ fn is_aligned(&self) -> bool {
+ true
+ }
+ fn store_aligned(&self, buf: &mut Vec<u32>) {
+ buf.push((self >> 32) as u32);
+ buf.push(*self as u32);
}
fn size(&self) -> usize {
8
@@ -62,12 +82,28 @@ impl<'a> Value<'a> for Object<'a> {
fn load_aligned(buf: &'a [u32]) -> Option<Self> {
Self::load(buf)
}
- fn store(&self, buf: &mut Vec<u8>) {
- buf.extend(self.tags.iter().copied().map(u32::to_ne_bytes).flatten());
- buf.extend(self.offsets.iter().copied().map(u32::to_ne_bytes).flatten());
- buf.extend(self.values.iter().copied().map(u32::to_ne_bytes).flatten());
+}
+impl ValueStore for Object<'_> {
+ fn is_aligned(&self) -> bool {
+ true
+ }
+ fn store_aligned(&self, buf: &mut Vec<u32>) {
+ buf.extend(self.tags);
+ buf.extend(self.offsets);
+ buf.extend(self.values);
}
fn size(&self) -> usize {
(self.tags.len() + self.offsets.len() + self.values.len()) * size_of::<u32>()
}
}
+impl ValueStore for ObjectBuffer {
+ fn is_aligned(&self) -> bool {
+ true
+ }
+ fn store_aligned(&self, buf: &mut Vec<u32>) {
+ buf.extend(&self.0);
+ }
+ fn size(&self) -> usize {
+ self.0.len() * 4
+ }
+}