From d836e24357b81496c61f3cc9195ba36758523578 Mon Sep 17 00:00:00 2001 From: metamuffin Date: Sat, 15 Mar 2025 15:18:40 +0100 Subject: more abstraction around unityfs to read multiple files from a single reader --- src/classes/material.rs | 9 +- src/classes/mesh_renderer.rs | 6 +- src/classes/mod.rs | 1 + src/classes/pptr.rs | 35 +++-- src/classes/shader.rs | 42 ++++++ src/classes/streaminginfo.rs | 1 + src/serialized_file.rs | 13 ++ src/unityfs.rs | 327 ------------------------------------------- src/unityfs/block_reader.rs | 99 +++++++++++++ src/unityfs/header.rs | 175 +++++++++++++++++++++++ src/unityfs/mod.rs | 95 +++++++++++++ src/unityfs/multi_reader.rs | 50 +++++++ 12 files changed, 513 insertions(+), 340 deletions(-) create mode 100644 src/classes/shader.rs delete mode 100644 src/unityfs.rs create mode 100644 src/unityfs/block_reader.rs create mode 100644 src/unityfs/header.rs create mode 100644 src/unityfs/mod.rs create mode 100644 src/unityfs/multi_reader.rs (limited to 'src') diff --git a/src/classes/material.rs b/src/classes/material.rs index 354e319..9124b0b 100644 --- a/src/classes/material.rs +++ b/src/classes/material.rs @@ -1,4 +1,4 @@ -use super::{pptr::PPtr, texture2d::Texture2D, vectors::ColorRGBA}; +use super::{pptr::PPtr, shader::Shader, texture2d::Texture2D, vectors::ColorRGBA}; use crate::object::{Value, parser::FromValue}; use glam::Vec2; use serde::Serialize; @@ -29,8 +29,11 @@ pub struct UnityTexEnv { pub texture: PPtr, } -#[derive(Debug, Serialize)] -pub struct Shader {} +impl UnityPropertySheet { + pub fn textures(&self) -> impl Iterator { + self.textures.iter().filter(|(_, v)| !v.texture.is_null()) + } +} impl FromValue for Material { fn from_value(v: Value) -> anyhow::Result { diff --git a/src/classes/mesh_renderer.rs b/src/classes/mesh_renderer.rs index 94d6fd7..ac69483 100644 --- a/src/classes/mesh_renderer.rs +++ b/src/classes/mesh_renderer.rs @@ -1,4 +1,6 @@ -use super::{gameobject::GameObject, mesh::Mesh, pptr::PPtr, transform::Transform}; +use super::{ + gameobject::GameObject, material::Material, mesh::Mesh, pptr::PPtr, transform::Transform, +}; use crate::object::{ Value, parser::{Fields, FromValue}, @@ -10,7 +12,7 @@ pub struct MeshRenderer { pub mesh: PPtr, pub cast_shadows: u8, pub game_object: PPtr, - pub materials: Vec, + pub materials: Vec>, } pub struct SkinnedMeshRenderer { diff --git a/src/classes/mod.rs b/src/classes/mod.rs index b4fbe91..165d41e 100644 --- a/src/classes/mod.rs +++ b/src/classes/mod.rs @@ -8,6 +8,7 @@ pub mod texture2d; pub mod transform; pub mod vectors; pub mod mesh_renderer; +pub mod shader; use crate::object::{Value, parser::FromValue}; use anyhow::Result; diff --git a/src/classes/pptr.rs b/src/classes/pptr.rs index 30f37ad..9b54cbb 100644 --- a/src/classes/pptr.rs +++ b/src/classes/pptr.rs @@ -54,17 +54,36 @@ impl PPtr { pub fn is_null(&self) -> bool { self.path_id == 0 && self.file_id == 0 } - pub fn load(&self, file: &mut SerializedFile) -> Result { + pub fn load( + &self, + file: &mut SerializedFile, + shared_assets: Option<&mut SerializedFile>, + ) -> Result { debug!( "loading PPtr<{}> file_id={} path_id={}", self.class, self.file_id, self.path_id ); - let ob = file - .objects - .iter() - .find(|o| o.path_id == self.path_id) - .ok_or(anyhow!("object with path id {} not found", self.path_id))? - .to_owned(); - file.read_object(ob)?.parse() + match self.file_id { + 0 => { + let ob = file + .objects + .iter() + .find(|o| o.path_id == self.path_id) + .ok_or(anyhow!("object with path id {} not found", self.path_id))? + .to_owned(); + file.read_object(ob)?.parse() + } + 1 => { + let file = shared_assets.unwrap(); + let ob = file + .objects + .iter() + .find(|o| o.path_id == self.path_id) + .ok_or(anyhow!("object with path id {} not found", self.path_id))? + .to_owned(); + file.read_object(ob)?.parse() + } + _ => unimplemented!(), + } } } diff --git a/src/classes/shader.rs b/src/classes/shader.rs new file mode 100644 index 0000000..86f32b8 --- /dev/null +++ b/src/classes/shader.rs @@ -0,0 +1,42 @@ +use super::pptr::PPtr; +use crate::object::{Value, parser::FromValue}; +use anyhow::Result; +use serde::Serialize; + +#[derive(Debug, Serialize)] +pub struct Shader { + pub dependencies: Vec>, + pub name: String, + pub parsed: SerializedShader, +} + +#[derive(Debug, Serialize)] +pub struct SerializedShader { + pub name: String, +} + +impl FromValue for Shader { + fn from_value(v: Value) -> Result { + let mut fields = v.as_class("Shader")?; + Ok(Self { + name: fields.field("m_Name")?, + parsed: fields.field("m_ParsedForm")?, + dependencies: fields + .remove("m_Dependencies") + .unwrap() + .as_vector() + .unwrap() + .into_iter() + .map(|e| e.parse().unwrap()) + .collect(), + }) + } +} +impl FromValue for SerializedShader { + fn from_value(v: Value) -> Result { + let mut fields = v.as_class("SerializedShader")?; + Ok(Self { + name: fields.field("m_Name")?, + }) + } +} diff --git a/src/classes/streaminginfo.rs b/src/classes/streaminginfo.rs index 83b9f20..21029f4 100644 --- a/src/classes/streaminginfo.rs +++ b/src/classes/streaminginfo.rs @@ -31,6 +31,7 @@ impl StreamingInfo { bail!("StreamingInfo path does not start on archive:") } let nodeinfo = fs + .header .nodes() .iter() .find(|n| self.path.ends_with(&n.name)) diff --git a/src/serialized_file.rs b/src/serialized_file.rs index b3c1e3c..a6514ee 100644 --- a/src/serialized_file.rs +++ b/src/serialized_file.rs @@ -2,6 +2,7 @@ use crate::{ common_strings::COMMON_STRINGS, helper::{AlignExt, Endianness, ReadExt}, object::Value, + unityfs::{UnityFS, header::NodeInfo}, }; use anyhow::{Result, anyhow, bail}; use log::{debug, info, trace, warn}; @@ -307,6 +308,18 @@ impl SerializedFile { }) } + pub fn find_fs_shared_assets(&self, fs: &UnityFS) -> Option { + let s = self + .externals + .iter() + .find(|e| e.path_name.starts_with("archive:"))?; + fs.header + .nodes() + .iter() + .find(|n| n.name.ends_with(&s.path_name)) + .cloned() + } + pub fn get_object_type_tree(&self, ob: &ObjectInfo) -> Result<&'_ TypeTreeNode> { let r#type = if ob.type_id < 0 { unimplemented!() diff --git a/src/unityfs.rs b/src/unityfs.rs deleted file mode 100644 index 05ad922..0000000 --- a/src/unityfs.rs +++ /dev/null @@ -1,327 +0,0 @@ -use crate::helper::{AlignExt, ReadExt}; -use anyhow::{Result, anyhow, bail}; -use humansize::DECIMAL; -use log::{debug, info, trace}; -use std::io::{Cursor, Error, ErrorKind, Read, Seek, SeekFrom}; - -pub struct UnityFS { - nodes: Vec, - reader: BlocksReader, - pub file_version: u32, - pub player_version: String, - pub unity_version: String, -} - -#[derive(Debug, Clone)] -pub struct NodeInfo { - pub name: String, - pub size: u64, - offset: u64, - _status: u32, -} - -struct BlockInfo { - comp_size: u32, - decomp_size: u32, - comp_scheme: CompressionScheme, -} - -pub struct NodeReader<'a, T> { - inner: &'a mut BlocksReader, - position: u64, - offset: u64, - size: u64, -} - -impl UnityFS { - pub fn open(mut file: T) -> Result { - let signature = file.read_cstr()?; - if signature.as_str() != "UnityFS" { - bail!("unknown signature {signature:?}") - } - - let file_version = file.read_u32_be()?; - let player_version = file.read_cstr()?; - let unity_version = file.read_cstr()?; - let size = file.read_u64_be()?; - let blockindex_comp_size = file.read_u32_be()?; - let blockindex_decomp_size = file.read_u32_be()?; - let flags = file.read_u32_be()?; - - let meta_comp_scheme = CompressionScheme::from_flag_num(flags as u8).ok_or(anyhow!( - "unknown block compression 0x{:02x}", - (flags & 0x3f) as u8 - ))?; - let blockindex_eof = flags & 0x80 != 0; - let blockindex_has_directory = flags & 0x40 != 0; - let blockindex_need_padding = flags & 0x200 != 0; - - info!("File Version: {file_version:?}"); - info!("Player Version: {player_version:?}"); - info!("Unity Version: {unity_version:?}"); - debug!("size={size:?}"); - debug!("meta_comp_size={blockindex_comp_size:?}"); - debug!("meta_decomp_size={blockindex_decomp_size:?}"); - debug!("flags={flags:?}"); - debug!("meta_comp_scheme={meta_comp_scheme:?}"); - debug!("blockindex_eof={blockindex_eof:?}"); - debug!("blockindex_has_directory={blockindex_has_directory:?}"); - debug!("blockindex_need_padding={blockindex_need_padding:?}"); - - let mut blockindex = { - let restore_position = if blockindex_eof { - let pos = file.stream_position()?; - file.seek(SeekFrom::End(-(blockindex_comp_size as i64)))?; - Some(pos) - } else { - None - }; - - let mut blockindex = vec![0u8; blockindex_comp_size as usize]; - file.read_exact(&mut blockindex)?; - - if let Some(pos) = restore_position { - file.seek(SeekFrom::Start(pos))?; - } - let blockindex = - meta_comp_scheme.decompress(blockindex, blockindex_decomp_size as usize)?; - Cursor::new(blockindex) - }; - - file.align(16)?; - - blockindex.read_u128_be()?; - - let num_blocks = blockindex.read_u32_be()?; - info!("File has {num_blocks} blocks"); - let mut blocks = Vec::new(); - for _ in 0..num_blocks { - let decomp_size = blockindex.read_u32_be()?; - let comp_size = blockindex.read_u32_be()?; - let flags = blockindex.read_u16_be()?; - let comp_scheme = CompressionScheme::from_flag_num(flags as u8) - .ok_or(anyhow!("unknown block compression 0x{:02x}", flags & 0x3f))?; - blocks.push(BlockInfo { - comp_size, - decomp_size, - comp_scheme, - }) - } - - let num_nodes = blockindex.read_u32_be()?; - debug!("num_nodes={num_nodes:?}"); - let mut nodes = Vec::new(); - for _ in 0..num_nodes { - let offset = blockindex.read_u64_be()?; - let size = blockindex.read_u64_be()?; - let status = blockindex.read_u32_be()?; - let name = blockindex.read_cstr()?; - info!( - "found node {name:?} (size={}, status={status})", - humansize::format_size(size, DECIMAL) - ); - nodes.push(NodeInfo { - offset, - size, - _status: status, - name, - }) - } - - let position = file.stream_position()?; - - Ok(Self { - file_version, - player_version, - unity_version, - nodes, - reader: BlocksReader::new(blocks, file, position), - }) - } - - pub fn nodes(&self) -> &[NodeInfo] { - &self.nodes - } - - pub fn find_main_file(&self) -> Option<&NodeInfo> { - self.nodes().iter().find(|n| { - !n.name.ends_with(".resource") - && !n.name.ends_with(".resS") - && !n.name.ends_with(".sharedAssets") - }) - } - - pub fn read<'a>(&'a mut self, node: &NodeInfo) -> std::io::Result> { - self.reader.seek(SeekFrom::Start(node.offset))?; - Ok(NodeReader { - size: node.size, - offset: node.offset, - position: 0, - inner: &mut self.reader, - }) - } -} - -struct BlocksReader { - blocks: Vec, - inner: T, - inner_seek_offset: u64, - nblock_index: usize, - cblock_data: Vec, - cblock_off: usize, -} -impl BlocksReader { - pub fn new(blocks: Vec, inner: T, inner_seek_offset: u64) -> Self { - Self { - blocks, - inner, - inner_seek_offset, - nblock_index: 0, - cblock_data: Vec::new(), - cblock_off: 0, - } - } - pub fn load_next_block(&mut self) -> std::io::Result<()> { - trace!("loading block {}", self.nblock_index); - let block = &self.blocks[self.nblock_index]; - let mut comp_buf = vec![0; block.comp_size as usize]; - self.inner.read_exact(&mut comp_buf)?; - let decomp_buf = block - .comp_scheme - .decompress(comp_buf, block.decomp_size as usize) - .map_err(|e| { - std::io::Error::new( - ErrorKind::InvalidData, - format!("decompression failure: {e}"), - ) - })?; - self.nblock_index += 1; - self.cblock_data = decomp_buf; - self.cblock_off = 0; - Ok(()) - } -} -impl Read for BlocksReader { - fn read(&mut self, buf: &mut [u8]) -> std::io::Result { - if self.cblock_off >= self.cblock_data.len() { - self.load_next_block()?; - } - let size = (self.cblock_data.len() - self.cblock_off).min(buf.len()); - buf[..size].copy_from_slice(&self.cblock_data[self.cblock_off..self.cblock_off + size]); - self.cblock_off += size; - Ok(size) - } -} -impl Seek for BlocksReader { - fn seek(&mut self, pos: SeekFrom) -> std::io::Result { - let SeekFrom::Start(pos) = pos else { - unimplemented!() - }; - debug!("seek decomp to {pos}"); - let mut comp_off = self.inner_seek_offset; - let mut decomp_off = 0; - let mut target_block = None; - for (i, b) in self.blocks.iter().enumerate() { - if pos <= decomp_off + b.decomp_size as u64 { - target_block = Some(i); - break; - } - decomp_off += b.decomp_size as u64; - comp_off += b.comp_size as u64; - } - - let Some(i) = target_block else { - return Err(std::io::Error::new( - ErrorKind::UnexpectedEof, - "seek out of bounds", - )); - }; - - let block_off = pos - decomp_off; - debug!("target is block={i} offset={block_off}"); - if self.nblock_index == i + 1 { - debug!("intra-block seek") - } else { - debug!("seek comp to {comp_off}"); - self.inner.seek(SeekFrom::Start(comp_off))?; - self.nblock_index = i; - self.load_next_block()?; - } - self.cblock_off = block_off as usize; - - Ok(pos) - } -} - -impl Read for NodeReader<'_, T> { - fn read(&mut self, buf: &mut [u8]) -> std::io::Result { - let bytes_left = self.size - self.position; - let end = buf.len().min(bytes_left as usize); - let size = self.inner.read(&mut buf[..end])?; - self.position += size as u64; - Ok(size) - } -} -impl Seek for NodeReader<'_, T> { - fn seek(&mut self, pos: SeekFrom) -> std::io::Result { - match pos { - SeekFrom::Current(n) if n >= 0 => { - for _ in 0..n { - self.read_exact(&mut [0u8])?; - } - Ok(self.stream_position()?) - } - SeekFrom::Start(n) => { - debug!("seek node to {n} (off={})", self.offset); - if n > self.size { - return Err(Error::new(ErrorKind::NotSeekable, "seek out of bounds")); - } - self.position = n; - self.inner.seek(SeekFrom::Start(self.offset + n)) - } - _ => unimplemented!(), - } - } - fn stream_position(&mut self) -> std::io::Result { - Ok(self.position) - } -} - -#[derive(Debug, Clone, Copy, PartialEq)] -enum CompressionScheme { - None, - Lzma, - Lz4, - Lz4hc, - Lzham, -} -impl CompressionScheme { - pub fn from_flag_num(n: u8) -> Option { - Some(match n & 0x3f { - 0 => CompressionScheme::None, - 1 => CompressionScheme::Lzma, - 2 => CompressionScheme::Lz4, - 3 => CompressionScheme::Lz4hc, - 4 => CompressionScheme::Lzham, - _ => return None, - }) - } - pub fn decompress(&self, block: Vec, decomp_size: usize) -> Result> { - match self { - CompressionScheme::None => Ok(block), - CompressionScheme::Lzma => { - let mut r = lzma::Reader::from(Cursor::new(block))?; - let mut buf = Vec::new(); - r.read_to_end(&mut buf)?; - Ok(buf) - } - CompressionScheme::Lz4hc | CompressionScheme::Lz4 => { - Ok(lz4_flex::block::decompress(&block, decomp_size)?) - } - // CompressionScheme::LZ4HC | CompressionScheme::LZ4 => { - // Ok(lz4::block::decompress(&block, Some(decomp_size as i32))?) - // } - CompressionScheme::Lzham => todo!(), - } - } -} diff --git a/src/unityfs/block_reader.rs b/src/unityfs/block_reader.rs new file mode 100644 index 0000000..8aa18c3 --- /dev/null +++ b/src/unityfs/block_reader.rs @@ -0,0 +1,99 @@ +use super::BlockInfo; +use log::{debug, trace}; +use std::{ + io::{ErrorKind, Read, Seek, SeekFrom}, + sync::Arc, +}; + +pub struct BlockReader { + blocks: Arc>, + inner: T, + inner_seek_offset: u64, + nblock_index: usize, + cblock_data: Vec, + cblock_off: usize, +} + +impl BlockReader { + pub fn new(blocks: Arc>, inner: T, inner_seek_offset: u64) -> Self { + Self { + blocks, + inner, + inner_seek_offset, + nblock_index: 0, + cblock_data: Vec::new(), + cblock_off: 0, + } + } + pub fn load_next_block(&mut self) -> std::io::Result<()> { + trace!("loading block {}", self.nblock_index); + let block = &self.blocks[self.nblock_index]; + let mut comp_buf = vec![0; block.comp_size as usize]; + self.inner.read_exact(&mut comp_buf)?; + let decomp_buf = block + .comp_scheme + .decompress(comp_buf, block.decomp_size as usize) + .map_err(|e| { + std::io::Error::new( + ErrorKind::InvalidData, + format!("decompression failure: {e}"), + ) + })?; + self.nblock_index += 1; + self.cblock_data = decomp_buf; + self.cblock_off = 0; + Ok(()) + } +} + +impl Read for BlockReader { + fn read(&mut self, buf: &mut [u8]) -> std::io::Result { + if self.cblock_off >= self.cblock_data.len() { + self.load_next_block()?; + } + let size = (self.cblock_data.len() - self.cblock_off).min(buf.len()); + buf[..size].copy_from_slice(&self.cblock_data[self.cblock_off..self.cblock_off + size]); + self.cblock_off += size; + Ok(size) + } +} +impl Seek for BlockReader { + fn seek(&mut self, pos: SeekFrom) -> std::io::Result { + let SeekFrom::Start(pos) = pos else { + unimplemented!() + }; + debug!("seek decomp to {pos}"); + let mut comp_off = self.inner_seek_offset; + let mut decomp_off = 0; + let mut target_block = None; + for (i, b) in self.blocks.iter().enumerate() { + if pos <= decomp_off + b.decomp_size as u64 { + target_block = Some(i); + break; + } + decomp_off += b.decomp_size as u64; + comp_off += b.comp_size as u64; + } + + let Some(i) = target_block else { + return Err(std::io::Error::new( + ErrorKind::UnexpectedEof, + "seek out of bounds", + )); + }; + + let block_off = pos - decomp_off; + debug!("target is block={i} offset={block_off}"); + if self.nblock_index == i + 1 { + debug!("intra-block seek") + } else { + debug!("seek comp to {comp_off}"); + self.inner.seek(SeekFrom::Start(comp_off))?; + self.nblock_index = i; + self.load_next_block()?; + } + self.cblock_off = block_off as usize; + + Ok(pos) + } +} diff --git a/src/unityfs/header.rs b/src/unityfs/header.rs new file mode 100644 index 0000000..d4fc89f --- /dev/null +++ b/src/unityfs/header.rs @@ -0,0 +1,175 @@ +use crate::helper::{AlignExt, ReadExt}; +use anyhow::{Result, anyhow, bail}; +use humansize::DECIMAL; +use log::{debug, info}; +use std::io::{Cursor, Read, Seek, SeekFrom}; + +#[derive(Debug, Clone)] +pub struct NodeInfo { + pub name: String, + pub size: u64, + pub(super) offset: u64, + _status: u32, +} + +pub struct BlockInfo { + pub comp_size: u32, + pub decomp_size: u32, + pub comp_scheme: CompressionScheme, +} + +pub struct UnityFSHeader { + pub(crate) nodes: Vec, + pub file_version: u32, + pub player_version: String, + pub unity_version: String, +} + +impl UnityFSHeader { + pub fn read(mut file: impl Read + Seek) -> Result<(Self, Vec)> { + let signature = file.read_cstr()?; + if signature.as_str() != "UnityFS" { + bail!("unknown signature {signature:?}") + } + + let file_version = file.read_u32_be()?; + let player_version = file.read_cstr()?; + let unity_version = file.read_cstr()?; + let size = file.read_u64_be()?; + let blockindex_comp_size = file.read_u32_be()?; + let blockindex_decomp_size = file.read_u32_be()?; + let flags = file.read_u32_be()?; + + let meta_comp_scheme = CompressionScheme::from_flag_num(flags as u8).ok_or(anyhow!( + "unknown block compression 0x{:02x}", + (flags & 0x3f) as u8 + ))?; + let blockindex_eof = flags & 0x80 != 0; + let blockindex_has_directory = flags & 0x40 != 0; + let blockindex_need_padding = flags & 0x200 != 0; + + info!("File Version: {file_version:?}"); + info!("Player Version: {player_version:?}"); + info!("Unity Version: {unity_version:?}"); + debug!("size={size:?}"); + debug!("meta_comp_size={blockindex_comp_size:?}"); + debug!("meta_decomp_size={blockindex_decomp_size:?}"); + debug!("flags={flags:?}"); + debug!("meta_comp_scheme={meta_comp_scheme:?}"); + debug!("blockindex_eof={blockindex_eof:?}"); + debug!("blockindex_has_directory={blockindex_has_directory:?}"); + debug!("blockindex_need_padding={blockindex_need_padding:?}"); + + let mut blockindex = { + let restore_position = if blockindex_eof { + let pos = file.stream_position()?; + file.seek(SeekFrom::End(-(blockindex_comp_size as i64)))?; + Some(pos) + } else { + None + }; + + let mut blockindex = vec![0u8; blockindex_comp_size as usize]; + file.read_exact(&mut blockindex)?; + + if let Some(pos) = restore_position { + file.seek(SeekFrom::Start(pos))?; + } + let blockindex = + meta_comp_scheme.decompress(blockindex, blockindex_decomp_size as usize)?; + Cursor::new(blockindex) + }; + + file.align(16)?; + + blockindex.read_u128_be()?; + + let num_blocks = blockindex.read_u32_be()?; + info!("File has {num_blocks} blocks"); + let mut blocks = Vec::new(); + for _ in 0..num_blocks { + let decomp_size = blockindex.read_u32_be()?; + let comp_size = blockindex.read_u32_be()?; + let flags = blockindex.read_u16_be()?; + let comp_scheme = CompressionScheme::from_flag_num(flags as u8) + .ok_or(anyhow!("unknown block compression 0x{:02x}", flags & 0x3f))?; + blocks.push(BlockInfo { + comp_size, + decomp_size, + comp_scheme, + }) + } + + let num_nodes = blockindex.read_u32_be()?; + debug!("num_nodes={num_nodes:?}"); + let mut nodes = Vec::new(); + for _ in 0..num_nodes { + let offset = blockindex.read_u64_be()?; + let size = blockindex.read_u64_be()?; + let status = blockindex.read_u32_be()?; + let name = blockindex.read_cstr()?; + info!( + "found node {name:?} (size={}, status={status})", + humansize::format_size(size, DECIMAL) + ); + nodes.push(NodeInfo { + offset, + size, + _status: status, + name, + }) + } + + Ok(( + Self { + file_version, + player_version, + unity_version, + nodes, + }, + blocks, + )) + } + pub fn nodes(&self) -> &[NodeInfo] { + &self.nodes + } +} + +#[derive(Debug, Clone, Copy, PartialEq)] +pub enum CompressionScheme { + None, + Lzma, + Lz4, + Lz4hc, + Lzham, +} +impl CompressionScheme { + pub fn from_flag_num(n: u8) -> Option { + Some(match n & 0x3f { + 0 => CompressionScheme::None, + 1 => CompressionScheme::Lzma, + 2 => CompressionScheme::Lz4, + 3 => CompressionScheme::Lz4hc, + 4 => CompressionScheme::Lzham, + _ => return None, + }) + } + pub fn decompress(&self, block: Vec, decomp_size: usize) -> Result> { + match self { + CompressionScheme::None => Ok(block), + CompressionScheme::Lzma => { + let mut r = lzma::Reader::from(Cursor::new(block))?; + let mut buf = Vec::new(); + r.read_to_end(&mut buf)?; + Ok(buf) + } + CompressionScheme::Lz4hc | CompressionScheme::Lz4 => { + Ok(lz4_flex::block::decompress(&block, decomp_size)?) + } + // CompressionScheme::LZ4HC | CompressionScheme::LZ4 => { + // Ok(lz4::block::decompress(&block, Some(decomp_size as i32))?) + // } + CompressionScheme::Lzham => todo!(), + } + } +} diff --git a/src/unityfs/mod.rs b/src/unityfs/mod.rs new file mode 100644 index 0000000..bc7e3ec --- /dev/null +++ b/src/unityfs/mod.rs @@ -0,0 +1,95 @@ +pub mod block_reader; +pub mod header; +pub mod multi_reader; + +use anyhow::Result; +use block_reader::BlockReader; +use header::{BlockInfo, NodeInfo, UnityFSHeader}; +use log::debug; +use multi_reader::MultiReader; +use std::{ + io::{Error, ErrorKind, Read, Seek, SeekFrom}, + sync::Arc, +}; + +pub struct UnityFS { + reader: MultiReader, + blocks: Arc>, + inner_seek_offset: u64, + pub header: UnityFSHeader, +} + +pub struct NodeReader { + inner: T, + position: u64, + offset: u64, + size: u64, +} + +impl UnityFS { + pub fn open(mut file: T) -> Result { + let (header, blocks) = UnityFSHeader::read(&mut file)?; + let inner_seek_offset = file.stream_position()?; + + Ok(Self { + blocks: Arc::new(blocks), + header, + inner_seek_offset, + reader: MultiReader::new(file)?, + }) + } + + pub fn find_main_file(&self) -> Option<&NodeInfo> { + self.header.nodes().iter().find(|n| { + !n.name.ends_with(".resource") + && !n.name.ends_with(".resS") + && !n.name.ends_with(".sharedAssets") + }) + } + + pub fn read<'a>(&'a self, node: &NodeInfo) -> Result>>> { + let mut inner = self.reader.clone(); + inner.seek(SeekFrom::Start(self.inner_seek_offset))?; + let br = BlockReader::new(self.blocks.clone(), inner, self.inner_seek_offset); + Ok(NodeReader { + size: node.size, + offset: node.offset, + position: 0, + inner: br, + }) + } +} + +impl Read for NodeReader { + fn read(&mut self, buf: &mut [u8]) -> std::io::Result { + let bytes_left = self.size - self.position; + let end = buf.len().min(bytes_left as usize); + let size = self.inner.read(&mut buf[..end])?; + self.position += size as u64; + Ok(size) + } +} +impl Seek for NodeReader { + fn seek(&mut self, pos: SeekFrom) -> std::io::Result { + match pos { + SeekFrom::Current(n) if n >= 0 => { + for _ in 0..n { + self.read_exact(&mut [0u8])?; + } + Ok(self.stream_position()?) + } + SeekFrom::Start(n) => { + debug!("seek node to {n} (off={})", self.offset); + if n > self.size { + return Err(Error::new(ErrorKind::NotSeekable, "seek out of bounds")); + } + self.position = n; + self.inner.seek(SeekFrom::Start(self.offset + n)) + } + _ => unimplemented!(), + } + } + fn stream_position(&mut self) -> std::io::Result { + Ok(self.position) + } +} diff --git a/src/unityfs/multi_reader.rs b/src/unityfs/multi_reader.rs new file mode 100644 index 0000000..3de6cd5 --- /dev/null +++ b/src/unityfs/multi_reader.rs @@ -0,0 +1,50 @@ +use std::{ + io::{Read, Seek, SeekFrom}, + sync::{Arc, Mutex}, +}; + +use anyhow::Result; + +pub struct MultiReader { + position: u64, + inner: Arc>, +} +impl MultiReader { + pub fn new(mut inner: T) -> Result { + let position = inner.stream_position()?; + Ok(Self { + position, + inner: Arc::new(Mutex::new((position, inner))), + }) + } +} +impl Clone for MultiReader { + fn clone(&self) -> Self { + Self { + position: self.position, + inner: self.inner.clone(), + } + } +} +impl Read for MultiReader { + fn read(&mut self, buf: &mut [u8]) -> std::io::Result { + let mut g = self.inner.lock().unwrap(); + if g.0 != self.position { + g.1.seek(SeekFrom::Start(self.position))?; + } + let size = g.1.read(buf)?; + g.0 += size as u64; + self.position += size as u64; + Ok(size) + } +} +impl Seek for MultiReader { + fn seek(&mut self, pos: SeekFrom) -> std::io::Result { + self.position = match pos { + SeekFrom::Start(x) => x, + SeekFrom::Current(x) => self.position.saturating_add_signed(x), + SeekFrom::End(_) => unimplemented!(), + }; + Ok(self.position) + } +} -- cgit v1.2.3-70-g09d2