Skip to content

Commit

Permalink
feat: use merge_many
Browse files Browse the repository at this point in the history
  • Loading branch information
Al-Kindi-0 committed Oct 21, 2024
1 parent 3314255 commit 3b6ccd9
Show file tree
Hide file tree
Showing 2 changed files with 20 additions and 11 deletions.
9 changes: 5 additions & 4 deletions prover/src/matrix/row_matrix.rs
Original file line number Diff line number Diff line change
Expand Up @@ -208,12 +208,13 @@ impl<E: FieldElement> RowMatrix<E> {
&mut row_hashes,
128, // min batch size
|batch: &mut [H::Digest], batch_offset: usize| {
let mut buffer = vec![H::Digest::default(); num_partitions];
for (i, row_hash) in batch.iter_mut().enumerate() {
*row_hash = self
.row(batch_offset + i)
self.row(batch_offset + i)
.chunks(num_elements_per_partition)
.map(|chunk| H::hash_elements(chunk))
.fold(H::Digest::default(), |acc, cur| H::merge(&[acc, cur]));
.zip(buffer.iter_mut())
.for_each(|(chunk, buf)| *buf = H::hash_elements(chunk));
*row_hash = H::merge_many(&buffer);
}
}
);
Expand Down
22 changes: 15 additions & 7 deletions verifier/src/channel.rs
Original file line number Diff line number Diff line change
Expand Up @@ -200,13 +200,16 @@ where
let number_of_base_field_elements = queries.main_states.num_columns();
let num_elements_per_partition =
number_of_base_field_elements.div_ceil(self.num_partitions);
let mut buffer = vec![H::Digest::default(); self.num_partitions];

queries
.main_states
.rows()
.map(|row| {
row.chunks(num_elements_per_partition)
.map(|chunk| H::hash_elements(chunk))
.fold(H::Digest::default(), |acc, cur| H::merge(&[acc, cur]))
.zip(buffer.iter_mut())
.for_each(|(chunk, buf)| *buf = H::hash_elements(chunk));
H::merge_many(&buffer)
})
.collect()
};
Expand All @@ -226,13 +229,15 @@ where
let number_of_base_field_elements = aux_states.num_columns() * E::EXTENSION_DEGREE;
let num_elements_per_partition =
number_of_base_field_elements.div_ceil(self.num_partitions);
let mut buffer = vec![H::Digest::default(); self.num_partitions];

aux_states
.rows()
.map(|row| {
row.chunks(num_elements_per_partition)
.map(|chunk| H::hash_elements(chunk))
.fold(H::Digest::default(), |acc, cur| H::merge(&[acc, cur]))
.zip(buffer.iter_mut())
.for_each(|(chunk, buf)| *buf = H::hash_elements(chunk));
H::merge_many(&buffer)
})
.collect()
};
Expand Down Expand Up @@ -261,17 +266,20 @@ where
let items: Vec<H::Digest> = if self.num_partitions == 1 {
queries.evaluations.rows().map(|row| H::hash_elements(row)).collect()
} else {
let number_of_base_field_elements = queries.evaluations.num_columns();
let number_of_base_field_elements =
queries.evaluations.num_columns() * E::EXTENSION_DEGREE;
let num_elements_per_partition =
number_of_base_field_elements.div_ceil(self.num_partitions);
let mut buffer = vec![H::Digest::default(); self.num_partitions];

queries
.evaluations
.rows()
.map(|row| {
row.chunks(num_elements_per_partition)
.map(|chunk| H::hash_elements(chunk))
.fold(H::Digest::default(), |acc, cur| H::merge(&[acc, cur]))
.zip(buffer.iter_mut())
.for_each(|(chunk, buf)| *buf = H::hash_elements(chunk));
H::merge_many(&buffer)
})
.collect()
};
Expand Down

0 comments on commit 3b6ccd9

Please sign in to comment.