aboutsummaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
authormetamuffin <metamuffin@disroot.org>2025-03-22 19:48:53 +0100
committermetamuffin <metamuffin@disroot.org>2025-03-22 19:48:53 +0100
commit205aca90d6c21f3dd73b576c06f5ab8c1010772a (patch)
tree8a5b58c2b1aca919d94bcdb1fe7d4edb91968ca9 /src
parent2ee2f1af847dbc9f1292baefc9fd652167b9103a (diff)
downloadunity-tools-205aca90d6c21f3dd73b576c06f5ab8c1010772a.tar
unity-tools-205aca90d6c21f3dd73b576c06f5ab8c1010772a.tar.bz2
unity-tools-205aca90d6c21f3dd73b576c06f5ab8c1010772a.tar.zst
horrible pptr external files system
Diffstat (limited to 'src')
-rw-r--r--src/assetbundle.rs132
-rw-r--r--src/classes/mod.rs2
-rw-r--r--src/classes/pptr.rs69
-rw-r--r--src/classes/renderer.rs93
-rw-r--r--src/object/mod.rs6
-rw-r--r--src/object/read.rs15
-rw-r--r--src/serialized_file.rs32
7 files changed, 255 insertions, 94 deletions
diff --git a/src/assetbundle.rs b/src/assetbundle.rs
index 8f6af15..0af9895 100644
--- a/src/assetbundle.rs
+++ b/src/assetbundle.rs
@@ -1,81 +1,129 @@
use crate::{
classes::pptr::PPtr,
serialized_file::SerializedFile,
- unityfs::{NodeReader, UnityFS, block_reader::BlockReader, multi_reader::MultiReader},
+ unityfs::{
+ NodeReader, UnityFS, block_reader::BlockReader, header::NodeInfo, multi_reader::MultiReader,
+ },
};
use anyhow::{Context, Result, anyhow};
use log::debug;
use std::{
- io::{Read, Seek},
+ collections::HashMap,
+ fs::File,
+ io::{BufReader, Read, Seek},
marker::PhantomData,
+ path::Path,
+ sync::{Arc, Mutex},
};
/// High-level wrapper around UnityFS, SerializedFile and all the classes.
pub struct AssetBundle<T> {
pub fs: UnityFS<T>,
- pub(crate) main: SerializedFile<NodeReader<BlockReader<MultiReader<T>>>>,
- pub(crate) shared_assets: Option<SerializedFile<NodeReader<BlockReader<MultiReader<T>>>>>,
+ pub ser_files:
+ HashMap<String, Arc<Mutex<SerializedFile<NodeReader<BlockReader<MultiReader<T>>>>>>>,
+ pub main_file: NodeInfo,
+ pub default_resources: SerializedFile<BufReader<File>>,
}
impl<T: Read + Seek> AssetBundle<T> {
- pub fn open(inner: T) -> Result<Self> {
+ pub fn open(inner: T, support_dir: impl AsRef<Path>) -> Result<Self> {
let fs = UnityFS::open(inner).context("opening UnityFS")?;
- let main_ni = fs
+ let main_file = fs
.find_main_file()
- .ok_or(anyhow!("AssetBundle seems to lack main file"))?;
- debug!("detected {:?} as main file", main_ni.name);
- let main = SerializedFile::read(fs.read(main_ni)?, 0)?;
- let shared_assets = if let Some(n) = main.find_fs_shared_assets(&fs) {
- debug!("detect {:?} as shared assets", n.name);
- Some(SerializedFile::read(fs.read(&n)?, 1)?)
- } else {
- None
- };
+ .ok_or(anyhow!("AssetBundle seems to lack main file"))?
+ .clone();
+ debug!("opening default resource file");
+ let default_resources = SerializedFile::read(
+ BufReader::new(File::open(
+ support_dir.as_ref().join("unity default resources"),
+ )?),
+ "Library/unity default resources".to_owned(),
+ )?;
+ debug!("detected {:?} as main file", main_file.name);
Ok(Self {
fs,
- main,
- shared_assets,
+ main_file,
+ default_resources,
+ ser_files: HashMap::new(),
})
}
- pub fn all_toplevel(&self) -> impl Iterator<Item = PPtr> {
- self.main
+ pub fn get_fs_file(
+ &mut self,
+ node: &NodeInfo,
+ ) -> Result<Arc<Mutex<SerializedFile<NodeReader<BlockReader<MultiReader<T>>>>>>> {
+ if !self.ser_files.contains_key(&node.name) {
+ let file =
+ SerializedFile::read(self.fs.read(node)?, format!("archive:/{}", node.name))?;
+ self.ser_files
+ .insert(node.name.clone(), Arc::new(Mutex::new(file)));
+ }
+ Ok(self.ser_files.get(&node.name).unwrap().clone())
+ }
+
+ pub fn all_toplevel(&mut self) -> Vec<PPtr> {
+ let main = self.get_fs_file(&self.main_file.clone()).unwrap();
+ let main = main.lock().unwrap();
+ let shared_assets = main.find_fs_shared_assets(&self.fs);
+ let main_obs = main
.objects
.iter()
- .map(|o| (0, o))
- .chain(
- self.shared_assets
- .as_ref()
- .map(|e| e.objects.iter().map(|o| (1, o)))
- .into_iter()
- .flatten(),
- )
- .map(|(si, o)| PPtr {
+ .map(|o| (main.ecx.clone(), o.to_owned()))
+ .collect::<Vec<_>>();
+ let shared_obs = if let Some(shared_assets) = shared_assets {
+ let shared = self.get_fs_file(&shared_assets).unwrap();
+ let shared = shared.lock().unwrap();
+ shared
+ .objects
+ .iter()
+ .map(|o| (shared.ecx.clone(), o.to_owned()))
+ .collect()
+ } else {
+ Vec::new()
+ };
+
+ main_obs
+ .into_iter()
+ .chain(shared_obs)
+ .map(|(ecx, o)| PPtr {
class: "".to_string(),
- source_file: si,
+ ecx,
file_id: 0,
path_id: o.path_id,
_class: PhantomData,
})
+ .collect()
}
- pub fn all_toplevel_of_class(&self, class_name: &str) -> impl Iterator<Item = PPtr> {
- self.main
+ pub fn all_toplevel_of_class(&mut self, class_name: &str) -> Vec<PPtr> {
+ let main = self.get_fs_file(&self.main_file.clone()).unwrap();
+ let main = main.lock().unwrap();
+ let shared_assets = main.find_fs_shared_assets(&self.fs);
+ let main_obs = main
.all_objects_of_class(class_name)
- .map(|o| (0, o))
- .chain(
- self.shared_assets
- .as_ref()
- .map(|e| e.all_objects_of_class(class_name).map(|o| (1, o)))
- .into_iter()
- .flatten(),
- )
- .map(|(si, o)| PPtr {
- class: class_name.to_owned(),
- source_file: si,
+ .map(|o| (main.ecx.clone(), o.to_owned()))
+ .collect::<Vec<_>>();
+ let shared_obs = if let Some(shared_assets) = shared_assets {
+ let shared = self.get_fs_file(&shared_assets).unwrap();
+ let shared = shared.lock().unwrap();
+ shared
+ .all_objects_of_class(class_name)
+ .map(|o| (shared.ecx.clone(), o.to_owned()))
+ .collect()
+ } else {
+ Vec::new()
+ };
+
+ main_obs
+ .into_iter()
+ .chain(shared_obs)
+ .map(|(ecx, o)| PPtr {
+ class: "".to_string(),
+ ecx,
file_id: 0,
path_id: o.path_id,
_class: PhantomData,
})
+ .collect()
}
}
diff --git a/src/classes/mod.rs b/src/classes/mod.rs
index 165d41e..bff033a 100644
--- a/src/classes/mod.rs
+++ b/src/classes/mod.rs
@@ -7,7 +7,7 @@ pub mod streaminginfo;
pub mod texture2d;
pub mod transform;
pub mod vectors;
-pub mod mesh_renderer;
+pub mod renderer;
pub mod shader;
use crate::object::{Value, parser::FromValue};
diff --git a/src/classes/pptr.rs b/src/classes/pptr.rs
index 0e66b5f..5d57c20 100644
--- a/src/classes/pptr.rs
+++ b/src/classes/pptr.rs
@@ -1,13 +1,15 @@
use crate::{
assetbundle::AssetBundle,
object::{Value, parser::FromValue},
+ serialized_file::ExternalsContext,
};
-use anyhow::{Result, anyhow, bail};
+use anyhow::{Context, Result, anyhow, bail};
use log::debug;
use serde::Serialize;
use std::{
io::{Read, Seek},
marker::PhantomData,
+ sync::Arc,
};
#[derive(Debug, Serialize)]
@@ -15,19 +17,15 @@ pub struct PPtr<T = Value> {
#[serde(skip, default)]
pub(crate) _class: PhantomData<T>,
pub class: String,
- pub source_file: usize,
+ #[serde(skip)]
+ pub ecx: Arc<ExternalsContext>,
pub file_id: i32,
pub path_id: i64,
}
impl<T> FromValue for PPtr<T> {
fn from_value(v: Value) -> Result<Self> {
- let Value::Object {
- class,
- fields,
- file,
- } = v
- else {
+ let Value::Object { class, fields, ecx } = v else {
bail!("PPtr expected but not an object")
};
let inner = class
@@ -38,7 +36,7 @@ impl<T> FromValue for PPtr<T> {
Ok(PPtr {
class: inner.to_owned(),
_class: PhantomData,
- source_file: file,
+ ecx,
file_id: fields["m_FileID"]
.as_i32()
.ok_or(anyhow!("PPtr m_FileID is not i32"))?,
@@ -54,7 +52,7 @@ impl<T: FromValue> PPtr<T> {
PPtr {
_class: PhantomData,
class: self.class,
- source_file: self.source_file,
+ ecx: self.ecx.clone(),
file_id: self.file_id,
path_id: self.path_id,
}
@@ -67,32 +65,45 @@ impl<T: FromValue> PPtr<T> {
"loading PPtr<{}> file_id={} path_id={}",
self.class, self.file_id, self.path_id
);
- let main_file = match (self.source_file, self.file_id) {
- (0, 0) => true,
- (0, 1) => false,
- (1, 0) => false,
- _ => unreachable!(),
+ let path = if self.file_id == 0 {
+ &self.ecx.name
+ } else {
+ &self.ecx.externals[self.file_id as usize - 1].path_name
};
- if main_file {
- let ob = bundle
- .main
- .objects
+ if let Some(path) = path.strip_prefix("archive:") {
+ let path = path.split_once("/").unwrap_or(("", path)).1;
+ let ni = bundle
+ .fs
+ .header
+ .nodes()
.iter()
- .find(|o| o.path_id == self.path_id)
- .ok_or(anyhow!("object with path id {} not found", self.path_id))?
- .to_owned();
- bundle.main.read_object(ob)?.parse()
- } else {
- let file = bundle.shared_assets.as_mut().ok_or(anyhow!(
- "shared assets referenced but not included in bundle"
- ))?;
+ .find(|n| n.name == path)
+ .unwrap()
+ .clone();
+ let file = bundle.get_fs_file(&ni).unwrap();
+ let mut file = file.lock().unwrap();
let ob = file
.objects
.iter()
.find(|o| o.path_id == self.path_id)
- .ok_or(anyhow!("object with path id {} not found", self.path_id))?
- .to_owned();
+ .unwrap()
+ .clone();
file.read_object(ob)?.parse()
+ } else if *path == bundle.default_resources.ecx.name {
+ let ob = bundle
+ .default_resources
+ .objects
+ .iter()
+ .find(|o| o.path_id == self.path_id)
+ .unwrap()
+ .clone();
+ bundle
+ .default_resources
+ .read_object(ob)
+ .context("reading object from default res file")?
+ .parse()
+ } else {
+ unreachable!("{path:?}")
}
}
}
diff --git a/src/classes/renderer.rs b/src/classes/renderer.rs
new file mode 100644
index 0000000..cd50524
--- /dev/null
+++ b/src/classes/renderer.rs
@@ -0,0 +1,93 @@
+use super::{
+ gameobject::GameObject, material::Material, mesh::Mesh, pptr::PPtr, transform::Transform,
+};
+use crate::object::{
+ Value,
+ parser::{Fields, FromValue},
+};
+use anyhow::{Context, Result};
+
+pub struct Renderer {
+ pub enabled: bool,
+ pub cast_shadows: u8,
+ pub game_object: PPtr<GameObject>,
+ pub materials: Vec<PPtr<Material>>,
+}
+pub struct MeshRenderer {
+ pub renderer: Renderer,
+ pub additional_vertex_streams: PPtr<Mesh>,
+}
+
+pub struct SkinnedMeshRenderer {
+ pub renderer: Renderer,
+ pub bones: Vec<PPtr<Transform>>,
+ pub mesh: PPtr<Mesh>,
+ pub root_bone: PPtr<Transform>,
+}
+
+pub struct MeshFilter {
+ pub gameobject: PPtr<GameObject>,
+ pub mesh: PPtr<Mesh>,
+}
+
+impl FromValue for Renderer {
+ fn from_value(v: Value) -> Result<Self> {
+ Self::from_fields(v.as_class("Renderer")?)
+ }
+}
+impl Renderer {
+ pub fn from_fields(mut fields: Fields) -> Result<Self> {
+ Ok(Self {
+ enabled: fields.field("m_Enabled")?,
+ cast_shadows: fields.field("m_CastShadows")?,
+ game_object: fields
+ .field("m_GameObject")
+ .context("gameobject of meshrenderer")?,
+ materials: fields
+ .remove("m_Materials")
+ .unwrap()
+ .as_vector()
+ .unwrap()
+ .into_iter()
+ .map(|e| e.parse().unwrap())
+ .collect(),
+ })
+ }
+}
+
+impl FromValue for MeshRenderer {
+ fn from_value(v: Value) -> Result<Self> {
+ let mut fields = v.as_class("MeshRenderer")?;
+ Ok(Self {
+ additional_vertex_streams: fields.field("m_AdditionalVertexStreams")?,
+ renderer: Renderer::from_fields(fields)?,
+ })
+ }
+}
+impl FromValue for SkinnedMeshRenderer {
+ fn from_value(v: Value) -> Result<Self> {
+ let mut fields = v.as_class("SkinnedMeshRenderer")?;
+ Ok(Self {
+ root_bone: fields.field("m_RootBone")?,
+ mesh: fields.field("m_Mesh")?,
+ bones: fields
+ .remove("m_Bones")
+ .unwrap()
+ .as_vector()
+ .unwrap()
+ .into_iter()
+ .map(|e| e.parse().unwrap())
+ .collect(),
+ renderer: Renderer::from_fields(fields)?,
+ })
+ }
+}
+impl FromValue for MeshFilter {
+ fn from_value(v: Value) -> Result<Self> {
+ let mut fields = v.as_class("MeshFilter")?;
+ Ok(Self {
+ mesh: fields.field("m_Mesh")?,
+ gameobject: fields.field("m_GameObject")?,
+ })
+ }
+}
diff --git a/src/object/mod.rs b/src/object/mod.rs
index 08f76cd..43cb011 100644
--- a/src/object/mod.rs
+++ b/src/object/mod.rs
@@ -1,5 +1,6 @@
+use crate::serialized_file::ExternalsContext;
use serde::Serialize;
-use std::collections::BTreeMap;
+use std::{collections::BTreeMap, sync::Arc};
pub mod helper;
pub mod parser;
@@ -21,7 +22,8 @@ pub enum Value {
Array(Vec<Value>),
Object {
class: String,
- file: usize,
+ #[serde(skip)]
+ ecx: Arc<ExternalsContext>,
fields: BTreeMap<String, Value>,
},
Typeless(Vec<u8>),
diff --git a/src/object/read.rs b/src/object/read.rs
index c41163a..013874c 100644
--- a/src/object/read.rs
+++ b/src/object/read.rs
@@ -1,16 +1,17 @@
use super::Value;
use crate::helper::{AlignExt, Endianness, ReadExt};
-use crate::serialized_file::TypeTreeNode;
+use crate::serialized_file::{ExternalsContext, TypeTreeNode};
use anyhow::{Result, bail};
use log::trace;
use std::io::Seek;
+use std::sync::Arc;
use std::{collections::BTreeMap, io::Read};
impl Value {
pub fn read(
ty: &TypeTreeNode,
e: Endianness,
- file: usize,
+ ecx: &Arc<ExternalsContext>,
data: &mut (impl Read + Seek),
) -> Result<Value> {
let mut align = false;
@@ -41,7 +42,7 @@ impl Value {
Ok(Value::F64(data.read_f64(e)?))
}
"string" => {
- let Value::Array(arr) = Value::read(&ty.children[0], e, file, data)? else {
+ let Value::Array(arr) = Value::read(&ty.children[0], e, ecx, data)? else {
unreachable!()
};
let bytes = arr
@@ -56,13 +57,13 @@ impl Value {
"Array" => {
align |= ty.children[0].post_align();
assert_eq!(ty.byte_size, -1);
- let Value::I32(size) = Value::read(&ty.children[0], e, file, data)? else {
+ let Value::I32(size) = Value::read(&ty.children[0], e, ecx, data)? else {
unreachable!()
};
trace!("array of size {size}");
let mut elems = Vec::new();
for _ in 0..size {
- elems.push(Value::read(&ty.children[1], e, file, data)?);
+ elems.push(Value::read(&ty.children[1], e, ecx, data)?);
}
Ok(Value::Array(elems))
}
@@ -78,11 +79,11 @@ impl Value {
}
let mut fields = BTreeMap::new();
for c in &ty.children {
- fields.insert(c.name_string.clone(), Value::read(&c, e, file, data)?);
+ fields.insert(c.name_string.clone(), Value::read(&c, e, ecx, data)?);
}
Ok(Value::Object {
fields,
- file,
+ ecx: ecx.to_owned(),
class: ty.type_string.clone(),
})
}
diff --git a/src/serialized_file.rs b/src/serialized_file.rs
index a37ff46..3ca32aa 100644
--- a/src/serialized_file.rs
+++ b/src/serialized_file.rs
@@ -6,7 +6,10 @@ use crate::{
};
use anyhow::{Result, anyhow, bail};
use log::{debug, info, trace, warn};
-use std::io::{Cursor, Read, Seek, SeekFrom};
+use std::{
+ io::{Cursor, Read, Seek, SeekFrom},
+ sync::Arc,
+};
#[derive(Debug, Clone)]
pub struct TypeTreeNode {
@@ -58,10 +61,9 @@ pub struct External {
pub struct SerializedFile<T> {
pub file: T,
- pub file_source_id: usize,
pub header: SerializedFileHeader,
pub types: Vec<SeralizedType>,
- pub externals: Vec<External>,
+ pub ecx: Arc<ExternalsContext>,
pub scripts: Vec<Script>,
pub objects: Vec<ObjectInfo>,
pub user_string: String,
@@ -73,7 +75,7 @@ impl<T> std::fmt::Debug for SerializedFile<T> {
f.debug_struct("SerializedFile")
.field("header", &self.header)
.field("types", &self.types)
- .field("externals", &self.externals)
+ .field("externals", &self.ecx)
.field("scripts", &self.scripts)
.field("objects", &self.objects)
.field("user_string", &self.user_string)
@@ -82,6 +84,12 @@ impl<T> std::fmt::Debug for SerializedFile<T> {
}
}
+#[derive(Debug, Default)]
+pub struct ExternalsContext {
+ pub name: String,
+ pub externals: Vec<External>,
+}
+
#[derive(Debug)]
pub struct SerializedFileHeader {
_metadata_size: u32,
@@ -139,7 +147,7 @@ impl SerializedFileHeader {
}
impl<T: Read + Seek> SerializedFile<T> {
- pub fn read(mut file: T, file_source_id: usize) -> Result<SerializedFile<T>> {
+ pub fn read(mut file: T, name: String) -> Result<SerializedFile<T>> {
let h = SerializedFileHeader::read(&mut file)?;
let e = h.endianness;
@@ -291,6 +299,7 @@ impl<T: Read + Seek> SerializedFile<T> {
let guid = file.read_u128_be()?;
let r#type = file.read_i32(e)?;
let path_name = file.read_cstr()?;
+ debug!("external {path_name:?}");
externals.push(External {
guid,
path_name,
@@ -299,6 +308,8 @@ impl<T: Read + Seek> SerializedFile<T> {
})
}
+ let externals = Arc::new(ExternalsContext { name, externals });
+
if h.format >= 20 {
let num_ref_types = file.read_i32(e)?;
debug!("num_ref_types={num_ref_types}");
@@ -314,17 +325,17 @@ impl<T: Read + Seek> SerializedFile<T> {
file,
header: h,
types,
- externals,
+ ecx: externals,
endianness: e,
objects,
scripts,
user_string,
- file_source_id,
})
}
pub fn find_fs_shared_assets(&self, fs: &UnityFS<impl Read + Seek>) -> Option<NodeInfo> {
let s = self
+ .ecx
.externals
.iter()
.find(|e| e.path_name.starts_with("archive:"))?;
@@ -358,12 +369,7 @@ impl<T: Read + Seek> SerializedFile<T> {
.type_tree
.as_ref()
.ok_or(anyhow!("type tree missing"))?;
- Value::read(
- typetree,
- self.endianness,
- self.file_source_id,
- &mut self.file,
- )
+ Value::read(typetree, self.endianness, &self.ecx, &mut self.file)
}
pub fn all_objects_of_class(&self, class_name: &str) -> impl Iterator<Item = &ObjectInfo> {