Skip to content

Commit

Permalink
fix issue with single chunk sized block
Browse files Browse the repository at this point in the history
also add a little brute force test
  • Loading branch information
rklaehn committed Aug 1, 2023
1 parent c85017d commit ac6fb22
Show file tree
Hide file tree
Showing 2 changed files with 39 additions and 20 deletions.
34 changes: 34 additions & 0 deletions src/guts.rs
Original file line number Diff line number Diff line change
Expand Up @@ -137,5 +137,39 @@ mod test {

assert_eq!(is_subtree(4, 1024 * 4 - 1), true);
assert_eq!(is_subtree(1, 1024 * 4), false);

fn recursive_hash_block(start_chunk: u64, data: &[u8], is_root: bool) -> crate::Hash {
if data.len() <= CHUNK_LEN {
let mut hasher = ChunkState::new(start_chunk);
hasher.update(data);
hasher.finalize(is_root)
} else {
let chunks = data.len() / CHUNK_LEN + (data.len() % CHUNK_LEN != 0) as usize;
let chunks = chunks.next_power_of_two();
let mid = chunks / 2;
let mid_bytes = mid * CHUNK_LEN;
let left = recursive_hash_block(start_chunk, &data[..mid_bytes], false);
let right =
recursive_hash_block(start_chunk + mid as u64, &data[mid_bytes..], false);
parent_cv(&left, &right, is_root)
}
}

let data = (0..1024 << 4).map(|i| i as u8).collect::<Vec<_>>();
for block_log in 0..4 {
let block_size = 1usize << block_log;
let block_size_u64 = block_size as u64;
for i in 0..100 {
let start_chunk = i * block_size_u64;
assert_eq!(
recursive_hash_block(start_chunk, &data[..CHUNK_LEN], false),
hash_block(start_chunk, &data[..CHUNK_LEN], false)
);
assert_eq!(
recursive_hash_block(start_chunk, &data[..block_size * CHUNK_LEN], false),
hash_block(start_chunk, &data[..block_size * CHUNK_LEN], false)
);
}
}
}
}
25 changes: 5 additions & 20 deletions src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,6 @@
#[cfg(feature = "zeroize")]
extern crate zeroize_crate as zeroize; // Needed because `zeroize::Zeroize` assumes the crate is named `zeroize`.


#[cfg(test)]
mod test;

Expand Down Expand Up @@ -409,18 +408,6 @@ impl Output {
Hash(platform::le_bytes_from_words_32(&cv))
}

fn hash(&self, is_root: bool) -> Hash {
// debug_assert_eq!(self.counter, 0);
let mut cv = self.input_chaining_value;
let mut flags = self.flags;
if is_root {
flags |= ROOT;
}
self.platform
.compress_in_place(&mut cv, &self.block, self.block_len, 0, flags);
Hash(platform::le_bytes_from_words_32(&cv))
}

fn root_output_block(&self) -> [u8; 2 * OUT_LEN] {
self.platform.compress_xof(
&self.input_chaining_value,
Expand Down Expand Up @@ -1267,7 +1254,6 @@ impl Hasher {
// also. Convert it directly into an Output. Otherwise, we need to
// merge subtrees below.
if self.cv_stack.is_empty() {
// debug_assert_eq!(self.chunk_state.chunk_counter, 0);
return self.chunk_state.output();
}

Expand All @@ -1286,11 +1272,6 @@ impl Hasher {
let mut output: Output;
let mut num_cvs_remaining = self.cv_stack.len();
if self.chunk_state.len() > 0 {
// debug_assert_eq!(
// self.cv_stack.len(),
// self.chunk_state.chunk_counter.count_ones() as usize,
// "cv stack does not need a merge"
// );
output = self.chunk_state.output();
} else {
debug_assert!(self.cv_stack.len() >= 2);
Expand Down Expand Up @@ -1327,7 +1308,11 @@ impl Hasher {

fn finalize_node(&self, is_root: bool) -> Hash {
let output = self.final_output();
output.hash(is_root)
if is_root {
output.root_hash()
} else {
output.chaining_value().into()
}
}

/// Finalize the hash state and return an [`OutputReader`], which can
Expand Down

0 comments on commit ac6fb22

Please sign in to comment.