From 4a39359e892311ac8149618b8a965bf10c81fc90 Mon Sep 17 00:00:00 2001 From: ryan-rsm-mckenzie Date: Wed, 24 Jan 2024 10:47:37 -0800 Subject: [PATCH] use references to strings in the file where possible instead of always allocating copies --- src/containers.rs | 29 +++++++++++++- src/derive.rs | 68 ++++++++++++++++++++++---------- src/fo4/archive.rs | 49 +++++++++++------------ src/io.rs | 32 +++++++-------- src/protocols.rs | 76 +++++++++++++++++------------------- src/tes3/archive.rs | 32 ++++++++------- src/tes4/archive.rs | 91 ++++++++++++++++++++++--------------------- src/tes4/directory.rs | 2 +- 8 files changed, 216 insertions(+), 163 deletions(-) diff --git a/src/containers.rs b/src/containers.rs index 3b76dd1..754dc5b 100644 --- a/src/containers.rs +++ b/src/containers.rs @@ -1,3 +1,4 @@ +use core::ops::Range; use memmap2::Mmap; use std::sync::Arc; @@ -39,6 +40,12 @@ enum BytesInner<'bytes> { use BytesInner::*; +impl From for BytesInner<'static> { + fn from(value: Mapping) -> Self { + Mapped(value) + } +} + #[derive(Clone, Debug)] pub(crate) struct Bytes<'bytes> { inner: BytesInner<'bytes>, @@ -115,6 +122,26 @@ impl<'bytes> Bytes<'bytes> { }, } } + + #[must_use] + pub(crate) fn copy_slice(&self, slice: Range) -> Self { + match &self.inner { + Owned(x) => Self { + inner: Owned(x[slice].into()), + }, + Borrowed(x) => Self { + inner: Borrowed(&x[slice]), + }, + Mapped(x) => Self { + inner: Mapping { + pos: x.pos + slice.start, + len: slice.len(), + mapping: x.mapping.clone(), + } + .into(), + }, + } + } } impl Bytes<'static> { @@ -128,7 +155,7 @@ impl Bytes<'static> { #[must_use] pub(crate) fn from_mapped(pos: usize, len: usize, mapping: Arc) -> Self { Self { - inner: Mapped(Mapping { pos, len, mapping }), + inner: Mapping { pos, len, mapping }.into(), } } } diff --git a/src/derive.rs b/src/derive.rs index cbe261d..c852d95 100644 --- a/src/derive.rs +++ b/src/derive.rs @@ -225,47 +225,72 @@ pub(crate) use compressable_bytes; macro_rules! key { ($this:ident: $hash:ident) => { #[derive(::core::clone::Clone, ::core::fmt::Debug, ::core::default::Default)] - pub struct $this { - pub hash: $hash, - pub name: ::bstr::BString, + pub struct $this<'bytes> { + pub(crate) hash: $hash, + pub(crate) name: crate::containers::Bytes<'bytes>, + } + + impl<'bytes> $this<'bytes> { + #[must_use] + pub fn hash(&self) -> &$hash { + &self.hash + } + + #[must_use] + pub fn name(&self) -> &::bstr::BStr { + ::bstr::BStr::new(self.name.as_bytes()) + } } // false positive #[allow(clippy::unconditional_recursion)] - impl ::core::cmp::PartialEq for $this { + impl<'bytes> ::core::cmp::PartialEq for $this<'bytes> { fn eq(&self, other: &Self) -> bool { self.hash.eq(&other.hash) } } - impl ::core::cmp::Eq for $this {} + impl<'bytes> ::core::cmp::Eq for $this<'bytes> {} - impl ::core::cmp::PartialOrd for $this { + impl<'bytes> ::core::cmp::PartialOrd for $this<'bytes> { fn partial_cmp(&self, other: &Self) -> ::core::option::Option<::core::cmp::Ordering> { Some(self.cmp(other)) } } - impl ::core::cmp::Ord for $this { + impl<'bytes> ::core::cmp::Ord for $this<'bytes> { fn cmp(&self, other: &Self) -> ::core::cmp::Ordering { self.hash.cmp(&other.hash) } } - impl ::core::borrow::Borrow<$hash> for $this { + impl<'bytes> ::core::borrow::Borrow<$hash> for $this<'bytes> { fn borrow(&self) -> &$hash { &self.hash } } - impl ::core::convert::From for $this + impl ::core::convert::From<$hash> for $this<'static> { + fn from(value: $hash) -> Self { + Self { + hash: value, + name: crate::containers::Bytes::default(), + } + } + } + + impl ::core::convert::From for $this<'static> where - T: Into<::bstr::BString>, + T: ::core::convert::Into<::bstr::BString>, { fn from(value: T) -> Self { let mut name = value.into(); let hash = Self::hash_in_place(&mut name); - Self { hash, name } + let v: Vec = name.into(); + Self { + hash, + name: crate::containers::Bytes::from_owned(v.into()), + } } } }; @@ -275,7 +300,8 @@ pub(crate) use key; macro_rules! mapping { ($this:ident, $mapping:ident: ($key:ident: $hash:ident) => $value:ident) => { - pub(crate) type $mapping<'bytes> = ::std::collections::BTreeMap<$key, $value<'bytes>>; + pub(crate) type $mapping<'bytes> = + ::std::collections::BTreeMap<$key<'bytes>, $value<'bytes>>; impl<'bytes> crate::Sealed for $this<'bytes> {} @@ -301,7 +327,7 @@ macro_rules! mapping { pub fn get_key_value( &self, key: &K, - ) -> ::core::option::Option<(&$key, &$value<'bytes>)> + ) -> ::core::option::Option<(&$key<'bytes>, &$value<'bytes>)> where K: ::core::borrow::Borrow<$hash>, { @@ -322,7 +348,7 @@ macro_rules! mapping { value: $value<'bytes>, ) -> ::core::option::Option<$value<'bytes>> where - K: ::core::convert::Into<$key>, + K: ::core::convert::Into<$key<'bytes>>, { self.map.insert(key.into(), value) } @@ -332,17 +358,19 @@ macro_rules! mapping { self.map.is_empty() } - pub fn iter(&self) -> impl ::core::iter::Iterator)> { + pub fn iter( + &self, + ) -> impl ::core::iter::Iterator, &$value<'bytes>)> { self.map.iter() } pub fn iter_mut( &mut self, - ) -> impl ::core::iter::Iterator)> { + ) -> impl ::core::iter::Iterator, &mut $value<'bytes>)> { self.map.iter_mut() } - pub fn keys(&self) -> impl ::core::iter::Iterator { + pub fn keys(&self) -> impl ::core::iter::Iterator> { self.map.keys() } @@ -366,7 +394,7 @@ macro_rules! mapping { pub fn remove_entry( &mut self, key: &K, - ) -> ::core::option::Option<($key, $value<'bytes>)> + ) -> ::core::option::Option<($key<'bytes>, $value<'bytes>)> where K: ::core::borrow::Borrow<$hash>, { @@ -384,10 +412,10 @@ macro_rules! mapping { } } - impl<'bytes> ::core::iter::FromIterator<($key, $value<'bytes>)> for $this<'bytes> { + impl<'bytes> ::core::iter::FromIterator<($key<'bytes>, $value<'bytes>)> for $this<'bytes> { fn from_iter(iter: T) -> Self where - T: ::core::iter::IntoIterator)>, + T: ::core::iter::IntoIterator, $value<'bytes>)>, { Self { map: iter.into_iter().collect(), diff --git a/src/fo4/archive.rs b/src/fo4/archive.rs index 30bfde0..d3822b8 100644 --- a/src/fo4/archive.rs +++ b/src/fo4/archive.rs @@ -154,7 +154,7 @@ impl Options { derive::key!(Key: FileHash); -impl Key { +impl<'bytes> Key<'bytes> { #[must_use] fn hash_in_place(name: &mut BString) -> FileHash { fo4::hash_file_in_place(name) @@ -174,7 +174,7 @@ impl<'bytes> Archive<'bytes> { Self::write_header(&mut sink, &header)?; for (key, file) in self { - Self::write_file(&mut sink, &header, &mut offsets, &key.hash, file)?; + Self::write_file(&mut sink, &header, &mut offsets, key.hash(), file)?; } for file in self.values() { @@ -185,7 +185,7 @@ impl<'bytes> Archive<'bytes> { if options.strings { for key in self.keys() { - sink.write_protocol::(key.name.as_ref(), Endian::Little)?; + sink.write_protocol::(key.name(), Endian::Little)?; } } @@ -400,14 +400,14 @@ impl<'bytes> Archive<'bytes> { source: &mut In, header: &Header, strings: &mut usize, - ) -> Result<(Key, File<'bytes>)> + ) -> Result<(Key<'bytes>, File<'bytes>)> where In: ?Sized + Source<'bytes>, { let name = if *strings == 0 { - BString::default() + Bytes::default() } else { - source.save_restore_position(|source| -> Result { + source.save_restore_position(|source| -> Result> { source.seek_absolute(*strings)?; let name = source.read_protocol::(Endian::Little)?; *strings = source.stream_position(); @@ -526,7 +526,7 @@ mod tests { cc, fo4::{ Archive, ArchiveKey, ArchiveOptions, ChunkExtra, CompressionFormat, File, FileHeader, - Format, Hash, Version, + Format, Version, }, prelude::*, Borrowed, @@ -619,16 +619,13 @@ mod tests { #[test] fn write_general_archives() -> anyhow::Result<()> { let root = Path::new("data/fo4_write_test/data"); - let make_key = - |file: u32, extension: &[u8], directory: u32, path: &'static str| ArchiveKey { - hash: Hash { - file, - extension: cc::make_four(extension), - directory, - } - .into(), - name: path.into(), - }; + let make_key = |file: u32, extension: &[u8], directory: u32, path: &'static str| { + let key: ArchiveKey = path.into(); + assert_eq!(key.hash().file, file); + assert_eq!(key.hash().extension, cc::make_four(extension)); + assert_eq!(key.hash().directory, directory); + key + }; let keys = [ make_key( @@ -643,8 +640,8 @@ mod tests { 0xD9A32978, "Characters/character_0003.png", ), - make_key(0x36F72750, b"png", 0x60648919, "Construct 3/Readme.txt"), - make_key(0xCA042B67, b"png", 0x29246A47, "Share/License.txt"), + make_key(0x36F72750, b"txt", 0x60648919, "Construct 3/Readme.txt"), + make_key(0xCA042B67, b"txt", 0x29246A47, "Share/License.txt"), make_key(0xDA3773A6, b"png", 0x0B0A447E, "Tilemap/tiles.png"), make_key(0x785183FF, b"png", 0xDA3773A6, "Tiles/tile_0003.png"), ]; @@ -652,7 +649,7 @@ mod tests { let mappings: Vec<_> = keys .iter() .map(|key| { - let path: PathBuf = [root.as_os_str(), key.name.to_os_str_lossy().as_ref()] + let path: PathBuf = [root.as_os_str(), key.name().to_os_str_lossy().as_ref()] .into_iter() .collect(); let fd = fs::File::open(&path) @@ -686,19 +683,19 @@ mod tests { assert_eq!(main.len(), child.len()); for (key, mapping) in keys.iter().zip(&mappings) { - let file = child - .get_key_value(key) - .with_context(|| format!("failed to get file: {}", key.name.to_str_lossy()))?; - assert_eq!(file.0.hash, key.hash); + let file = child.get_key_value(key).with_context(|| { + format!("failed to get file: {}", key.name().to_str_lossy()) + })?; + assert_eq!(file.0.hash(), key.hash()); assert_eq!(file.1.len(), 1); if strings { - assert_eq!(file.0.name, key.name); + assert_eq!(file.0.name(), key.name()); } let chunk = &file.1[0]; let decompressed_chunk = if chunk.is_compressed() { let result = chunk.decompress(&Default::default()).with_context(|| { - format!("failed to decompress chunk: {}", key.name.to_str_lossy()) + format!("failed to decompress chunk: {}", key.name().to_str_lossy()) })?; Some(result) } else { diff --git a/src/io.rs b/src/io.rs index 5820a82..26138e9 100644 --- a/src/io.rs +++ b/src/io.rs @@ -31,14 +31,14 @@ pub(crate) trait Source<'bytes> { fn read(&mut self, endian: Endian) -> io::Result where - T: BinaryReadable, + T: BinaryReadable<'bytes, Item = T>, { T::from_stream(self, endian) } fn read_protocol(&mut self, endian: Endian) -> io::Result where - T: BinaryReadable, + T: BinaryReadable<'bytes>, { T::from_stream(self, endian) } @@ -183,31 +183,31 @@ impl TryFrom<&File> for MappedSource { make_sourceable!(MappedSource, 'static); -pub(crate) trait BinaryReadable { +pub(crate) trait BinaryReadable<'bytes> { type Item; - fn from_ne_stream<'bytes, In>(stream: &mut In) -> io::Result + fn from_ne_stream(stream: &mut In) -> io::Result where In: ?Sized + Source<'bytes>, { Self::from_stream(stream, Endian::Native) } - fn from_be_stream<'bytes, In>(stream: &mut In) -> io::Result + fn from_be_stream(stream: &mut In) -> io::Result where In: ?Sized + Source<'bytes>, { Self::from_stream(stream, Endian::Big) } - fn from_le_stream<'bytes, In>(stream: &mut In) -> io::Result + fn from_le_stream(stream: &mut In) -> io::Result where In: ?Sized + Source<'bytes>, { Self::from_stream(stream, Endian::Little) } - fn from_stream<'bytes, In>(stream: &mut In, endian: Endian) -> io::Result + fn from_stream(stream: &mut In, endian: Endian) -> io::Result where In: ?Sized + Source<'bytes>, { @@ -257,10 +257,10 @@ pub(crate) trait BinaryWriteable { macro_rules! make_binary_streamable { ($t:ty) => { - impl BinaryReadable for $t { + impl<'bytes> BinaryReadable<'bytes> for $t { type Item = $t; - fn from_be_stream<'bytes, In>(stream: &mut In) -> io::Result + fn from_be_stream(stream: &mut In) -> io::Result where In: ?Sized + Source<'bytes>, { @@ -269,7 +269,7 @@ macro_rules! make_binary_streamable { Ok(Self::from_be_bytes(bytes)) } - fn from_le_stream<'bytes, In>(stream: &mut In) -> io::Result + fn from_le_stream(stream: &mut In) -> io::Result where In: ?Sized + Source<'bytes>, { @@ -278,7 +278,7 @@ macro_rules! make_binary_streamable { Ok(Self::from_le_bytes(bytes)) } - fn from_ne_stream<'bytes, In>(stream: &mut In) -> io::Result + fn from_ne_stream(stream: &mut In) -> io::Result where In: ?Sized + Source<'bytes>, { @@ -330,13 +330,13 @@ make_binary_streamable!(i64); macro_rules! make_binary_streamable_tuple { ($($idx:tt $t:ident),+) => { - impl<$($t,)+> BinaryReadable for ($($t,)+) + impl<'bytes, $($t,)+> BinaryReadable<'bytes> for ($($t,)+) where - $($t: BinaryReadable,)+ + $($t: BinaryReadable<'bytes>,)+ { type Item = ($($t::Item,)+); - fn from_be_stream<'bytes, In>(stream: &mut In) -> io::Result + fn from_be_stream(stream: &mut In) -> io::Result where In: ?Sized + Source<'bytes>, { @@ -345,7 +345,7 @@ macro_rules! make_binary_streamable_tuple { )+)) } - fn from_le_stream<'bytes, In>(stream: &mut In) -> io::Result + fn from_le_stream(stream: &mut In) -> io::Result where In: ?Sized + Source<'bytes>, { @@ -354,7 +354,7 @@ macro_rules! make_binary_streamable_tuple { )+)) } - fn from_ne_stream<'bytes, In>(stream: &mut In) -> io::Result + fn from_ne_stream(stream: &mut In) -> io::Result where In: ?Sized + Source<'bytes>, { diff --git a/src/protocols.rs b/src/protocols.rs index 2f790a3..b8054c7 100644 --- a/src/protocols.rs +++ b/src/protocols.rs @@ -1,5 +1,9 @@ -use crate::io::{BinaryReadable, BinaryWriteable, Endian, Sink, Source}; -use bstr::{BStr as ByteStr, BString as ByteString}; +use crate::{ + containers::Bytes, + io::{BinaryReadable, BinaryWriteable, Endian, Sink, Source}, +}; +use bstr::BStr as ByteStr; +use core::num::NonZeroU8; use std::io::{self, Write}; #[derive(Debug, thiserror::Error)] @@ -19,19 +23,15 @@ impl From for io::Error { pub(crate) struct BString; -impl BinaryReadable for BString { - type Item = ByteString; +impl<'bytes> BinaryReadable<'bytes> for BString { + type Item = Bytes<'bytes>; - fn from_stream<'bytes, In>(stream: &mut In, endian: Endian) -> io::Result + fn from_stream(stream: &mut In, endian: Endian) -> io::Result where In: ?Sized + Source<'bytes>, { let len: u8 = stream.read(endian)?; - let mut result = Vec::new(); - result.resize_with(len.into(), Default::default); - stream.read_into(&mut result[..])?; - result.shrink_to_fit(); - Ok(result.into()) + stream.read_bytes(len.into()) } } @@ -56,24 +56,27 @@ impl BinaryWriteable for BString { pub(crate) struct ZString; -impl BinaryReadable for ZString { - type Item = ByteString; +impl<'bytes> BinaryReadable<'bytes> for ZString { + type Item = Bytes<'bytes>; - fn from_stream<'bytes, In>(stream: &mut In, endian: Endian) -> io::Result + fn from_stream(stream: &mut In, endian: Endian) -> io::Result where In: ?Sized + Source<'bytes>, { - let mut result = Vec::new(); + let start = stream.stream_position(); + let mut len = 0; loop { let byte: u8 = stream.read(endian)?; match byte { 0 => break, - byte => result.push(byte), + _ => len += 1, }; } - result.shrink_to_fit(); - Ok(result.into()) + stream.seek_absolute(start)?; + let result = stream.read_bytes(len)?; + stream.seek_relative(1)?; // skip null terminator + Ok(result) } } @@ -92,27 +95,22 @@ impl BinaryWriteable for ZString { pub(crate) struct BZString; -impl BinaryReadable for BZString { - type Item = ByteString; +impl<'bytes> BinaryReadable<'bytes> for BZString { + type Item = Bytes<'bytes>; - fn from_stream<'bytes, In>(stream: &mut In, endian: Endian) -> io::Result + fn from_stream(stream: &mut In, endian: Endian) -> io::Result where In: ?Sized + Source<'bytes>, { let len: u8 = stream.read(endian)?; - if len > 0 { - let mut result = Vec::new(); - result.resize_with(len.into(), Default::default); - stream.read_into(&mut result[..])?; - match result.pop() { - Some(b'\0') => { - result.shrink_to_fit(); - Ok(result.into()) - } - _ => Err(Error::MissingNullTerminator.into()), - } - } else { - Ok(Self::Item::default()) + let Some(len) = NonZeroU8::new(len) else { + return Err(Error::MissingNullTerminator.into()); + }; + + let result = stream.read_bytes((len.get() - 1).into())?; + match stream.read(endian)? { + b'\0' => Ok(result), + _ => Err(Error::MissingNullTerminator.into()), } } } @@ -139,19 +137,15 @@ impl BinaryWriteable for BZString { pub(crate) struct WString; -impl BinaryReadable for WString { - type Item = ByteString; +impl<'bytes> BinaryReadable<'bytes> for WString { + type Item = Bytes<'bytes>; - fn from_stream<'bytes, In>(stream: &mut In, endian: Endian) -> io::Result + fn from_stream(stream: &mut In, endian: Endian) -> io::Result where In: ?Sized + Source<'bytes>, { let len: u16 = stream.read(endian)?; - let mut result = Vec::new(); - result.resize_with(len.into(), Default::default); - stream.read_into(&mut result[..])?; - result.shrink_to_fit(); - Ok(result.into()) + stream.read_bytes(len.into()) } } diff --git a/src/tes3/archive.rs b/src/tes3/archive.rs index ab53f7a..eae66f7 100644 --- a/src/tes3/archive.rs +++ b/src/tes3/archive.rs @@ -46,7 +46,7 @@ impl Header { derive::key!(Key: FileHash); -impl Key { +impl<'bytes> Key<'bytes> { #[must_use] fn hash_in_place(name: &mut BString) -> FileHash { tes3::hash_file_in_place(name) @@ -78,7 +78,7 @@ impl<'bytes> Archive<'bytes> { file_count: self.map.len().try_into()?, hash_offset: { let names_offset = 0xC * self.map.len(); - let names_len: usize = self.map.keys().map(|x| x.name.len() + 1).sum(); + let names_len: usize = self.map.keys().map(|x| x.name().len() + 1).sum(); (names_offset + names_len).try_into()? }, }) @@ -114,7 +114,7 @@ impl<'bytes> Archive<'bytes> { Out: Write, { for key in self.map.keys() { - let hash = &key.hash; + let hash = &key.hash(); sink.write(&(hash.lo, hash.hi), Endian::Little)?; } @@ -143,7 +143,7 @@ impl<'bytes> Archive<'bytes> { let mut offset: u32 = 0; for key in self.map.keys() { sink.write(&offset, Endian::Little)?; - offset += u32::try_from(key.name.len() + 1)?; + offset += u32::try_from(key.name().len() + 1)?; } Ok(()) @@ -154,7 +154,7 @@ impl<'bytes> Archive<'bytes> { Out: Write, { for key in self.map.keys() { - sink.write_protocol::(key.name.as_ref(), Endian::Little)?; + sink.write_protocol::(key.name(), Endian::Little)?; } Ok(()) @@ -176,7 +176,11 @@ impl<'bytes> Archive<'bytes> { Ok(Self { map }) } - fn read_file(source: &mut In, idx: usize, offsets: &Offsets) -> Result<(Key, File<'bytes>)> + fn read_file( + source: &mut In, + idx: usize, + offsets: &Offsets, + ) -> Result<(Key<'bytes>, File<'bytes>)> where In: ?Sized + Source<'bytes>, { @@ -185,7 +189,7 @@ impl<'bytes> Archive<'bytes> { Self::read_hash(source) })??; - let name = source.save_restore_position(|source| -> Result { + let name = source.save_restore_position(|source| -> Result> { source.seek_absolute(offsets.name_offsets + 0x4 * idx)?; let offset: u32 = source.read(Endian::Little)?; source.seek_absolute(offsets.names + offset as usize)?; @@ -325,16 +329,16 @@ mod tests { #[test] fn writing() -> anyhow::Result<()> { - struct Info<'a> { - key: ArchiveKey, - path: &'a Path, + struct Info { + key: ArchiveKey<'static>, + path: &'static Path, } - impl<'a> Info<'a> { - fn new(lo: u32, hi: u32, path: &'a str) -> Self { + impl Info { + fn new(lo: u32, hi: u32, path: &'static str) -> Self { let hash = Hash { lo, hi }; let key = ArchiveKey::from(path); - assert_eq!(hash, key.hash); + assert_eq!(&hash, key.hash()); Self { key, path: Path::new(path), @@ -382,7 +386,7 @@ mod tests { let archive = Archive::read(Borrowed(&stream)).context("failed to read from archive in memory")?; for (data, info) in mmapped.iter().zip(&infos) { - let file = archive.get(&info.key.hash).with_context(|| { + let file = archive.get(info.key.hash()).with_context(|| { format!("failed to get value from archive with key: {:?}", info.path) })?; assert_eq!(file.as_bytes(), &data[..]); diff --git a/src/tes4/archive.rs b/src/tes4/archive.rs index fe6d1d0..e62fa22 100644 --- a/src/tes4/archive.rs +++ b/src/tes4/archive.rs @@ -1,5 +1,5 @@ use crate::{ - containers::CompressableBytes, + containers::{Bytes, CompressableBytes}, derive, io::{Endian, Sink, Source}, protocols::{self, BZString, ZString}, @@ -232,20 +232,20 @@ impl Header { } struct SortedFile<'this, 'bytes> { - key: &'this DirectoryKey, + key: &'this DirectoryKey<'bytes>, this: &'this File<'bytes>, embedded_name: Option>, } struct SortedDirectory<'this, 'bytes> { - key: &'this Key, + key: &'this Key<'bytes>, this: &'this Directory<'bytes>, files: Vec>, } derive::key!(Key: DirectoryHash); -impl Key { +impl<'bytes> Key<'bytes> { #[must_use] fn hash_in_place(name: &mut BString) -> DirectoryHash { tes4::hash_directory_in_place(name) @@ -347,7 +347,7 @@ impl<'bytes> Archive<'bytes> { let mut file_data_offset = u32::try_from(offsets.file_data)?; for directory in &directories { if options.flags.directory_strings() { - sink.write_protocol::(directory.key.name.as_ref(), Endian::Little)?; + sink.write_protocol::(directory.key.name(), Endian::Little)?; } for file in &directory.files { Self::write_file_entry( @@ -364,7 +364,7 @@ impl<'bytes> Archive<'bytes> { if options.flags.file_strings() { for directory in &directories { for file in &directory.files { - sink.write_protocol::(file.key.name.as_ref(), Endian::Little)?; + sink.write_protocol::(file.key.name(), Endian::Little)?; } } } @@ -396,14 +396,14 @@ impl<'bytes> Archive<'bytes> { directories.count += 1; if options.flags.directory_strings() { // zstring -> include null terminator - directories.names_len += directory.0.name.len() + 1; + directories.names_len += directory.0.name().len() + 1; } for file in directory.1 { files.count += 1; if options.flags.file_strings() { // zstring -> include null terminator - files.names_len += file.0.name.len() + 1; + files.names_len += file.0.name().len() + 1; } } } @@ -420,27 +420,27 @@ impl<'bytes> Archive<'bytes> { } fn concat_directory_and_file_name<'string>( - directory: &'string Key, - file: &'string DirectoryKey, + directory: &'string Key<'bytes>, + file: &'string DirectoryKey<'bytes>, ) -> Cow<'string, BStr> { - let directory = &directory.name; - let file = &file.name; + let directory = directory.name(); + let file = file.name(); let directory = match directory.len() { 0 => b"".as_bstr(), 1 => match directory[0] { b'/' | b'\\' | b'.' => b"".as_bstr(), - _ => directory.as_ref(), + _ => directory, }, - _ => directory.as_ref(), + _ => directory, }; match (directory.is_empty(), file.is_empty()) { (true, true) => Cow::default(), - (true, false) => Cow::from(file.as_ref()), + (true, false) => Cow::from(file), (false, true) => Cow::from(directory), (false, false) => { - let string: BString = [directory, b"\\".as_bstr(), file.as_ref()] + let string: BString = [directory, b"\\".as_bstr(), file] .into_iter() .flat_map(|x| x.as_bytes()) .copied() @@ -477,7 +477,7 @@ impl<'bytes> Archive<'bytes> { }) .collect(); if options.flags.xbox_archive() { - files.sort_by_key(|x| x.key.hash.numeric().swap_bytes()); + files.sort_by_key(|x| x.key.hash().numeric().swap_bytes()); } SortedDirectory { key: directory_key, @@ -487,7 +487,7 @@ impl<'bytes> Archive<'bytes> { }) .collect(); if options.flags.xbox_archive() { - directories.sort_by_key(|x| x.key.hash.numeric().swap_bytes()); + directories.sort_by_key(|x| x.key.hash().numeric().swap_bytes()); } directories } @@ -495,14 +495,14 @@ impl<'bytes> Archive<'bytes> { fn write_directory_entry( sink: &mut Sink, options: Options, - key: &Key, + key: &Key<'bytes>, directory: &Directory<'bytes>, file_entries_offset: &mut u32, ) -> Result<()> where Out: Write, { - Self::write_hash(sink, options, key.hash.into())?; + Self::write_hash(sink, options, (*key.hash()).into())?; let file_count: u32 = directory.len().try_into()?; sink.write(&file_count, Endian::Little)?; @@ -519,9 +519,9 @@ impl<'bytes> Archive<'bytes> { if options.flags.directory_strings() { // bzstring -> include prefix byte and null terminator - // file_entries_offset += key.name.len() + 2; + // file_entries_offset += key.name().len() + 2; *file_entries_offset = file_entries_offset - .checked_add((key.name.len() + 2).try_into()?) + .checked_add((key.name().len() + 2).try_into()?) .ok_or(Error::IntegralOverflow)?; } @@ -563,7 +563,7 @@ impl<'bytes> Archive<'bytes> { fn write_file_entry( sink: &mut Sink, options: Options, - key: &DirectoryKey, + key: &DirectoryKey<'bytes>, file: &File<'bytes>, file_data_offset: &mut u32, embedded_file_name: Option<&BStr>, @@ -571,7 +571,7 @@ impl<'bytes> Archive<'bytes> { where Out: Write, { - Self::write_hash(sink, options, key.hash.into())?; + Self::write_hash(sink, options, (*key.hash()).into())?; let (size_with_info, size) = { let mut size = file.len(); @@ -673,7 +673,7 @@ impl<'bytes> Archive<'bytes> { source: &mut In, header: &Header, offsets: &mut Offsets, - ) -> Result<(Key, Directory<'bytes>)> + ) -> Result<(Key<'bytes>, Directory<'bytes>)> where In: ?Sized + Source<'bytes>, { @@ -686,8 +686,8 @@ impl<'bytes> Archive<'bytes> { } let mut map = DirectoryMap::default(); - let (name, directory) = - source.save_restore_position(|source| -> Result<(BString, Directory<'bytes>)> { + let (name, directory) = source.save_restore_position( + |source| -> Result<(Bytes<'bytes>, Directory<'bytes>)> { source.seek_absolute(offsets.file_entries)?; let mut name = if header.archive_flags.directory_strings() { Some(source.read_protocol::(Endian::Little)?) @@ -700,7 +700,8 @@ impl<'bytes> Archive<'bytes> { } offsets.file_entries = source.stream_position(); Ok((name.unwrap_or_default(), Directory { map })) - })??; + }, + )??; Ok(( Key { @@ -715,8 +716,8 @@ impl<'bytes> Archive<'bytes> { source: &mut In, header: &Header, offsets: &mut Offsets, - directory_name: &mut Option, - ) -> Result<(DirectoryKey, File<'bytes>)> + directory_name: &mut Option>, + ) -> Result<(DirectoryKey<'bytes>, File<'bytes>)> where In: ?Sized + Source<'bytes>, { @@ -732,7 +733,7 @@ impl<'bytes> Archive<'bytes> { }; let mut name = if header.archive_flags.file_strings() { - source.save_restore_position(|source| -> Result> { + source.save_restore_position(|source| -> Result>> { source.seek_absolute(offsets.file_names)?; let result = source.read_protocol::(Endian::Little)?; offsets.file_names = source.stream_position(); @@ -750,11 +751,13 @@ impl<'bytes> Archive<'bytes> { Version::v104 | Version::v105 if header.archive_flags.embedded_file_names() => { let mut s = source.read_protocol::(Endian::Little)?; data_size -= s.len() + 1; // include prefix byte - if let Some(pos) = s.iter().rposition(|&x| x == b'\\' || x == b'/') { + if let Some(pos) = + s.as_bytes().iter().rposition(|&x| x == b'\\' || x == b'/') + { if directory_name.is_none() { - *directory_name = Some(s[..pos].into()); + *directory_name = Some(s.copy_slice(0..pos)); } - s.drain(..=pos); + s = s.copy_slice(pos + 1..s.len()); } if name.is_none() { name = Some(s); @@ -951,13 +954,13 @@ mod tests { assert_eq!(normal.len(), xbox.len()); for (directory_normal, directory_xbox) in normal.iter().zip(xbox) { - assert_eq!(directory_normal.0.hash, directory_xbox.0.hash); - assert_eq!(directory_normal.0.name, directory_xbox.0.name); + assert_eq!(directory_normal.0.hash(), directory_xbox.0.hash()); + assert_eq!(directory_normal.0.name(), directory_xbox.0.name()); assert_eq!(directory_normal.1.len(), directory_xbox.1.len()); for (file_normal, file_xbox) in directory_normal.1.iter().zip(directory_xbox.1) { - assert_eq!(file_normal.0.hash, file_xbox.0.hash); - assert_eq!(file_normal.0.name, file_xbox.0.name); + assert_eq!(file_normal.0.hash(), file_xbox.0.hash()); + assert_eq!(file_normal.0.name(), file_xbox.0.name()); assert!(!file_normal.1.is_compressed()); assert!(!file_xbox.1.is_compressed()); assert_eq!(file_normal.1.len(), file_xbox.1.len()); @@ -1150,10 +1153,10 @@ mod tests { let directory = child .get_key_value(&archive_key) .with_context(|| format!("failed to get directory: {}", info.directory.name))?; - assert_eq!(directory.0.hash.numeric(), info.directory.hash); + assert_eq!(directory.0.hash().numeric(), info.directory.hash); assert_eq!(directory.1.len(), 1); if flags.directory_strings() || embedded_file_names { - assert_eq!(directory.0.name, archive_key.name); + assert_eq!(directory.0.name(), archive_key.name()); } let directory_key: DirectoryKey = info.file.name.into(); @@ -1161,9 +1164,9 @@ mod tests { .1 .get_key_value(&directory_key) .with_context(|| format!("failed to get file: {}", info.file.name))?; - assert_eq!(file.0.hash.numeric(), info.file.hash); + assert_eq!(file.0.hash().numeric(), info.file.hash); if flags.file_strings() || embedded_file_names { - assert_eq!(file.0.name, directory_key.name); + assert_eq!(file.0.name(), directory_key.name()); } let decompressed_file = if file.1.is_compressed() { @@ -1263,13 +1266,13 @@ mod tests { let directory = archive .get_key_value(&ArchiveKey::from(directory_name)) .with_context(|| format!("failed to get directory: {directory_name}"))?; - assert_eq!(directory.0.name, directory_name); + assert_eq!(directory.0.name(), directory_name); let file = directory .1 .get_key_value(&DirectoryKey::from(file_name)) .with_context(|| format!("failed to get file: {file_name}"))?; - assert_eq!(file.0.name, file_name); + assert_eq!(file.0.name(), file_name); Ok(()) }; diff --git a/src/tes4/directory.rs b/src/tes4/directory.rs index cf5c897..2718787 100644 --- a/src/tes4/directory.rs +++ b/src/tes4/directory.rs @@ -6,7 +6,7 @@ use bstr::BString; derive::key!(Key: FileHash); -impl Key { +impl<'bytes> Key<'bytes> { #[must_use] fn hash_in_place(name: &mut BString) -> FileHash { tes4::hash_file_in_place(name)