Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

CasObject validation with updated tests #21

Merged
merged 2 commits into from
Sep 24, 2024
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
12 changes: 6 additions & 6 deletions cas_object/src/cas_chunk_format.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ use anyhow::anyhow;
use crate::CompressionScheme;
use lz4_flex::frame::{FrameDecoder, FrameEncoder};

pub const CAS_CHUNK_HEADER_LENGTH: u8 = 8;
pub const CAS_CHUNK_HEADER_LENGTH: usize = size_of::<CASChunkHeader>();
const CURRENT_VERSION: u8 = 0;

#[repr(C, packed)]
Expand Down Expand Up @@ -131,10 +131,10 @@ pub fn deserialize_chunk_header<R: Read>(reader: &mut R) -> Result<CASChunkHeade
Ok(result)
}

pub fn deserialize_chunk<R: Read>(reader: &mut R) -> Result<Vec<u8>, CasObjectError> {
pub fn deserialize_chunk<R: Read>(reader: &mut R) -> Result<(Vec<u8>, usize), CasObjectError> {
let mut buf = Vec::new();
let _ = deserialize_chunk_to_writer(reader, &mut buf)?;
Ok(buf)
let bytes_read = deserialize_chunk_to_writer(reader, &mut buf)?;
Ok((buf, bytes_read))
}

pub fn deserialize_chunk_to_writer<R: Read, W: Write>(
Expand All @@ -153,7 +153,7 @@ pub fn deserialize_chunk_to_writer<R: Read, W: Write>(
}
};

Ok(header.get_uncompressed_length() as usize)
Ok(header.get_compressed_length() as usize + CAS_CHUNK_HEADER_LENGTH)
}

pub fn deserialize_chunks<R: Read>(reader: &mut R) -> Result<Vec<u8>, CasObjectError> {
Expand Down Expand Up @@ -240,7 +240,7 @@ mod tests {
write_chunk_header(&mut buf, &header).unwrap();
buf.extend_from_slice(data);

let data_copy = deserialize_chunk(&mut Cursor::new(buf)).unwrap();
let (data_copy, _) = deserialize_chunk(&mut Cursor::new(buf)).unwrap();
assert_eq!(data_copy.as_slice(), data);
}

Expand Down
128 changes: 102 additions & 26 deletions cas_object/src/cas_object_format.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
use bytes::Buf;
use merkledb::{prelude::MerkleDBHighLevelMethodsV1, Chunk, MerkleMemDB};
use merklehash::{DataHash, MerkleHash};
use tracing::warn;
use std::{
cmp::min,
io::{Cursor, Error, Read, Seek, Write},
Expand Down Expand Up @@ -403,7 +404,7 @@ impl CasObject {
let mut res = Vec::<u8>::new();

while reader.has_remaining() {
let data = deserialize_chunk(&mut reader)?;
let (data, _) = deserialize_chunk(&mut reader)?;
res.extend_from_slice(&data);
}
Ok(res)
Expand Down Expand Up @@ -538,6 +539,78 @@ impl CasObject {
*ret.hash() == *hash
}

/// Validate CasObject.
/// Verifies each chunk is valid and correctly represented in CasObjectInfo, along with
/// recomputing the hash and validating it matches CasObjectInfo.
///
/// Returns Ok(true) if recomputed hash matches what is passed in.
pub fn validate_cas_object<R: Read + Seek>(reader: &mut R, hash: &MerkleHash) -> Result<bool, CasObjectError> {

// 1. deserialize to get Info
let cas = CasObject::deserialize(reader)?;

// 2. walk chunks from Info (skip the final dummy chunk)
let mut hash_chunks: Vec<Chunk> = Vec::new();
let mut cumulative_uncompressed_length: u32 = 0;
let mut cumulative_compressed_length: u32 = 0;
for (idx, c) in cas.info.chunk_size_info[..cas.info.chunk_size_info.len() - 1].iter().enumerate() {

// fence post conditions - starting chunk and final chunk
if idx == 0 && c.start_byte_index != 0 {
// for 1st chunk verify that its start_byte_index is 0
warn!("XORB Validation: Byte 0 does not contain 1st chunk.");
return Ok(false);
}
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

can we move this outside the loop to check the 1 value?


// 3. verify on each chunk:
reader.seek(std::io::SeekFrom::Start(c.start_byte_index as u64))?;
let (data, compressed_chunk_length) = deserialize_chunk(reader)?;
let chunk_uncompressed_length = data.len();

// 3a. compute hash
hash_chunks.push(Chunk {hash: merklehash::compute_data_hash(&data), length: chunk_uncompressed_length});

cumulative_uncompressed_length += data.len() as u32;
cumulative_compressed_length += compressed_chunk_length as u32;

// 3b. verify deserialized chunk is expected size from Info object
if cumulative_uncompressed_length != c.cumulative_uncompressed_len {
warn!("XORB Validation: Chunk length does not match Info object.");
return Ok(false);
}

// 3c. verify start byte index of next chunk matches current byte index + compressed length
if cas.info.chunk_size_info[idx+1].start_byte_index != (c.start_byte_index + compressed_chunk_length as u32) {
warn!("XORB Validation: Chunk start byte index does not match Info object.");
return Ok(false);
}
}

// validate that Info/footer begins immediately after final content xorb.
// end of for loop completes the content chunks, now should be able to deserialize an Info directly
let cur_position = reader.stream_position()? as u32;
let expected_position = cumulative_compressed_length;
let expected_from_end_position = reader.seek(std::io::SeekFrom::End(0))? as u32 - cas.info_length - size_of::<u32>() as u32;
if cur_position != expected_position || cur_position != expected_from_end_position {
warn!("XORB Validation: Content bytes after known chunks in Info object.");
return Ok(false);
}

// 4. combine hashes to get full xorb hash, compare to provided
let mut db = MerkleMemDB::default();
let mut staging = db.start_insertion_staging();
db.add_file(&mut staging, &hash_chunks);
let ret = db.finalize(staging);

if *ret.hash() != *hash || *ret.hash() != cas.info.cashash {
warn!("XORB Validation: Computed hash does not match provided hash or Info hash.");
return Ok(false);
}

Ok(true)

}

}

#[cfg(test)]
Expand Down Expand Up @@ -767,43 +840,38 @@ mod tests {
fn test_basic_serialization_mem() {
// Arrange
let (c, _cas_data, raw_data) = build_cas_object(3, 100, false, false);
let mut writer: Cursor<Vec<u8>> = Cursor::new(Vec::new());
let mut buf: Cursor<Vec<u8>> = Cursor::new(Vec::new());
// Act & Assert
assert!(CasObject::serialize(
&mut writer,
&mut buf,
&c.info.cashash,
&raw_data,
&c.get_chunk_boundaries(),
CompressionScheme::None
)
.is_ok());

let mut reader = writer.clone();
reader.set_position(0);
let res = CasObject::deserialize(&mut reader);
assert!(res.is_ok());
let c2 = res.unwrap();
assert_eq!(c, c2);
assert_eq!(c.info.cashash, c2.info.cashash);
assert_eq!(c.info.num_chunks, c2.info.num_chunks);
assert!(CasObject::validate_cas_object(&mut buf, &c.info.cashash).unwrap());
}

#[test]
fn test_serialization_deserialization_mem_medium() {
// Arrange
let (c, _cas_data, raw_data) = build_cas_object(32, 16384, false, false);
let mut writer: Cursor<Vec<u8>> = Cursor::new(Vec::new());
let mut buf: Cursor<Vec<u8>> = Cursor::new(Vec::new());
// Act & Assert
assert!(CasObject::serialize(
&mut writer,
&mut buf,
&c.info.cashash,
&raw_data,
&c.get_chunk_boundaries(),
CompressionScheme::None
)
.is_ok());

let mut reader = writer.clone();
assert!(CasObject::validate_cas_object(&mut buf, &c.info.cashash).unwrap());

let mut reader = buf.clone();
reader.set_position(0);
let res = CasObject::deserialize(&mut reader);
assert!(res.is_ok());
Expand All @@ -820,18 +888,20 @@ mod tests {
fn test_serialization_deserialization_mem_large_random() {
// Arrange
let (c, _cas_data, raw_data) = build_cas_object(32, 65536, true, false);
let mut writer: Cursor<Vec<u8>> = Cursor::new(Vec::new());
let mut buf: Cursor<Vec<u8>> = Cursor::new(Vec::new());
// Act & Assert
assert!(CasObject::serialize(
&mut writer,
&mut buf,
&c.info.cashash,
&raw_data,
&c.get_chunk_boundaries(),
CompressionScheme::None
)
.is_ok());

let mut reader = writer.clone();
assert!(CasObject::validate_cas_object(&mut buf, &c.info.cashash).unwrap());

let mut reader = buf.clone();
reader.set_position(0);
let res = CasObject::deserialize(&mut reader);
assert!(res.is_ok());
Expand All @@ -847,18 +917,20 @@ mod tests {
fn test_serialization_deserialization_file_large_random() {
// Arrange
let (c, _cas_data, raw_data) = build_cas_object(256, 65536, true, false);
let mut writer: Cursor<Vec<u8>> = Cursor::new(Vec::new());
let mut buf: Cursor<Vec<u8>> = Cursor::new(Vec::new());
// Act & Assert
assert!(CasObject::serialize(
&mut writer,
&mut buf,
&c.info.cashash,
&raw_data,
&c.get_chunk_boundaries(),
CompressionScheme::None
)
.is_ok());

let mut reader = writer.clone();
assert!(CasObject::validate_cas_object(&mut buf, &c.info.cashash).unwrap());

let mut reader = buf.clone();
reader.set_position(0);
let res = CasObject::deserialize(&mut reader);
assert!(res.is_ok());
Expand Down Expand Up @@ -902,18 +974,20 @@ mod tests {
fn test_serialization_deserialization_mem_medium_lz4() {
// Arrange
let (c, _cas_data, raw_data) = build_cas_object(32, 16384, false, true);
let mut writer: Cursor<Vec<u8>> = Cursor::new(Vec::new());
let mut buf: Cursor<Vec<u8>> = Cursor::new(Vec::new());
// Act & Assert
assert!(CasObject::serialize(
&mut writer,
&mut buf,
&c.info.cashash,
&raw_data,
&c.get_chunk_boundaries(),
CompressionScheme::LZ4
)
.is_ok());

let mut reader = writer.clone();
assert!(CasObject::validate_cas_object(&mut buf, &c.info.cashash).unwrap());

let mut reader = buf.clone();
reader.set_position(0);
let res = CasObject::deserialize(&mut reader);
assert!(res.is_ok());
Expand All @@ -930,18 +1004,20 @@ mod tests {
fn test_serialization_deserialization_mem_large_random_lz4() {
// Arrange
let (c, _cas_data, raw_data) = build_cas_object(32, 65536, true, true);
let mut writer: Cursor<Vec<u8>> = Cursor::new(Vec::new());
let mut buf: Cursor<Vec<u8>> = Cursor::new(Vec::new());
// Act & Assert
assert!(CasObject::serialize(
&mut writer,
&mut buf,
&c.info.cashash,
&raw_data,
&c.get_chunk_boundaries(),
CompressionScheme::LZ4
)
.is_ok());

let mut reader = writer.clone();
assert!(CasObject::validate_cas_object(&mut buf, &c.info.cashash).unwrap());

let mut reader = buf.clone();
reader.set_position(0);
let res = CasObject::deserialize(&mut reader);
assert!(res.is_ok());
Expand Down
2 changes: 1 addition & 1 deletion cas_types/src/key.rs
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,7 @@ mod hex {
}
}

#[derive(Debug, Clone, Serialize, Deserialize, Default)]
#[derive(Debug, Clone, Copy, Serialize, Deserialize, Default)]
pub struct HexMerkleHash(#[serde(with = "hex::serde")] pub MerkleHash);

impl From<MerkleHash> for HexMerkleHash {
Expand Down