From 39bca59d45428bb2c13e8959bd6dab2dbad95ba2 Mon Sep 17 00:00:00 2001 From: Paul-Henry Kajfasz <42912740+phklive@users.noreply.github.com> Date: Tue, 11 Jun 2024 10:42:40 +0200 Subject: [PATCH 01/11] Swapped `maybe-async` for `maybe-async-await` (#283) --- air/src/air/assertions/mod.rs | 24 ++++----- air/src/air/context.rs | 23 ++++---- prover/Cargo.toml | 5 +- prover/src/lib.rs | 71 +++++++++++++------------ utils/maybe_async/Cargo.toml | 18 +++---- utils/maybe_async/README.md | 90 ++++++++----------------------- utils/maybe_async/src/lib.rs | 96 +++++++++++++++------------------- utils/maybe_async/src/parse.rs | 50 ------------------ utils/maybe_async/src/visit.rs | 50 ------------------ 9 files changed, 134 insertions(+), 293 deletions(-) delete mode 100644 utils/maybe_async/src/parse.rs delete mode 100644 utils/maybe_async/src/visit.rs diff --git a/air/src/air/assertions/mod.rs b/air/src/air/assertions/mod.rs index b898d3f56..0234d6613 100644 --- a/air/src/air/assertions/mod.rs +++ b/air/src/air/assertions/mod.rs @@ -30,18 +30,18 @@ const NO_STRIDE: usize = 0; /// An assertion is always placed against a single column of an execution trace, but can cover /// multiple steps and multiple values. Specifically, there are three kinds of assertions: /// -/// 1. **Single** assertion - which requires that a value in a single cell of an execution trace -/// is equal to the specified value. -/// 2. **Periodic** assertion - which requires that values in multiple cells of a single column -/// are equal to the specified value. The cells must be evenly spaced at intervals with lengths -/// equal to powers of two. For example, we can specify that values in a column must be equal -/// to 0 at steps 0, 8, 16, 24, 32 etc. Steps can also start at some offset - e.g., 1, 9, 17, -/// 25, 33 is also a valid sequence of steps. -/// 3. **Sequence** assertion - which requires that multiple cells in a single column are equal -/// to the values from the provided list. The cells must be evenly spaced at intervals with -/// lengths equal to powers of two. For example, we can specify that values in a column must -/// be equal to a sequence 1, 2, 3, 4 at steps 0, 8, 16, 24. That is, value at step 0 should be -/// equal to 1, value at step 8 should be equal to 2 etc. +/// 1. **Single** assertion - which requires that a value in a single cell of an execution trace +/// is equal to the specified value. +/// 2. **Periodic** assertion - which requires that values in multiple cells of a single column +/// are equal to the specified value. The cells must be evenly spaced at intervals with lengths +/// equal to powers of two. For example, we can specify that values in a column must be equal +/// to 0 at steps 0, 8, 16, 24, 32 etc. Steps can also start at some offset - e.g., 1, 9, 17, +/// 25, 33 is also a valid sequence of steps. +/// 3. **Sequence** assertion - which requires that multiple cells in a single column are equal +/// to the values from the provided list. The cells must be evenly spaced at intervals with +/// lengths equal to powers of two. For example, we can specify that values in a column must +/// be equal to a sequence 1, 2, 3, 4 at steps 0, 8, 16, 24. That is, value at step 0 should be +/// equal to 1, value at step 8 should be equal to 2 etc. /// /// Note that single and periodic assertions are succinct. That is, a verifier can evaluate them /// very efficiently. However, sequence assertions have liner complexity in the number of diff --git a/air/src/air/context.rs b/air/src/air/context.rs index a4a3756d9..f97e231da 100644 --- a/air/src/air/context.rs +++ b/air/src/air/context.rs @@ -263,20 +263,19 @@ impl AirContext { /// /// This is the maximum of: /// 1. The maximum evaluation degree over all transition constraints minus the degree of the - /// transition constraint divisor divided by trace length. + /// transition constraint divisor divided by trace length. /// 2. `1`, because the constraint composition polynomial requires at least one column. /// - /// Since the degree of a constraint `C(x)` can be computed as `[constraint.base + - /// constraint.cycles.len()] * [trace_length - 1]` the degree of the constraint composition - /// polynomial can be computed as: `([constraint.base + constraint.cycles.len()] * [trace_length - /// - 1] - [trace_length - n])` where `constraint` is the constraint attaining the maximum and - /// `n` is the number of exemption points. In the case `n = 1`, the expression simplifies to: - /// `[constraint.base + constraint.cycles.len() - 1] * [trace_length - 1]` Thus, if each column - /// is of length `trace_length`, we would need `[constraint.base + constraint.cycles.len() - 1]` - /// columns to store the coefficients of the constraint composition polynomial. This means that - /// if the highest constraint degree is equal to `5`, the constraint composition polynomial will - /// require four columns and if the highest constraint degree is equal to `7`, it will require - /// six columns to store. + /// Since the degree of a constraint `C(x)` can be computed as `[constraint.base + + /// constraint.cycles.len()] * [trace_length - 1]` the degree of the constraint composition + /// polynomial can be computed as: `([constraint.base + constraint.cycles.len()] * [trace_length + /// - 1] - [trace_length - n])` where `constraint` is the constraint attaining the maximum and + /// `n` is the number of exemption points. In the case `n = 1`, the expression simplifies to: + /// `[constraint.base + constraint.cycles.len() - 1] * [trace_length - 1]` Thus, if each column + /// is of length `trace_length`, we would need `[constraint.base + constraint.cycles.len() - 1]` + /// columns to store the coefficients of the constraint composition polynomial. This means that + /// if the highest constraint degree is equal to `5`, the constraint composition polynomial will + /// require four columns and if the highest constraint degree is equal to `7`, it will require six columns to store. /// /// Note that the Lagrange kernel constraints require only 1 column, since the degree of the /// numerator is `trace_len - 1` for all transition constraints (i.e. the base degree is 1). diff --git a/prover/Cargo.toml b/prover/Cargo.toml index 42857726f..36272766f 100644 --- a/prover/Cargo.toml +++ b/prover/Cargo.toml @@ -24,18 +24,17 @@ name = "lagrange_kernel" harness = false [features] -async = ["async-trait", "maybe_async/async"] +async = ["maybe_async/async"] concurrent = ["crypto/concurrent", "math/concurrent", "fri/concurrent", "utils/concurrent", "std"] default = ["std"] std = ["air/std", "crypto/std", "fri/std", "math/std", "utils/std"] [dependencies] air = { version = "0.9", path = "../air", package = "winter-air", default-features = false } -async-trait = { version = "0.1.80", optional = true } crypto = { version = "0.9", path = "../crypto", package = "winter-crypto", default-features = false } fri = { version = "0.9", path = '../fri', package = "winter-fri", default-features = false } math = { version = "0.9", path = "../math", package = "winter-math", default-features = false } -maybe_async = { version = "0.9", path = "../utils/maybe_async", package = "winter-maybe-async"} +maybe_async = { path = "../utils/maybe_async" , package = "winter-maybe-async" } tracing = { version = "0.1", default-features = false, features = ["attributes"]} utils = { version = "0.9", path = "../utils/core", package = "winter-utils", default-features = false } diff --git a/prover/src/lib.rs b/prover/src/lib.rs index edac1fdad..7347a6c69 100644 --- a/prover/src/lib.rs +++ b/prover/src/lib.rs @@ -42,9 +42,6 @@ #[macro_use] extern crate alloc; -#[cfg(feature = "async")] -use alloc::boxed::Box; - use air::AuxRandElements; pub use air::{ proof, proof::Proof, Air, AirContext, Assertion, BoundaryConstraint, BoundaryConstraintGroup, @@ -61,7 +58,7 @@ use math::{ fields::{CubeExtension, QuadExtension}, ExtensibleField, FieldElement, StarkField, ToElements, }; -use maybe_async::maybe_async; +use maybe_async::{maybe_async, maybe_await}; use tracing::{event, info_span, instrument, Level}; pub use utils::{ iterators, ByteReader, ByteWriter, Deserializable, DeserializationError, Serializable, @@ -129,7 +126,6 @@ pub type ProverGkrProof

= <

::Air as Air>::GkrProof; /// of these types are provided with the prover). For example, providing custom implementations /// of [TraceLde] and/or [ConstraintEvaluator] can be beneficial when some steps of proof /// generation can be delegated to non-CPU hardware (e.g., GPUs). -#[maybe_async] pub trait Prover { /// Base field for the computation described by this prover. type BaseField: StarkField + ExtensibleField<2> + ExtensibleField<3>; @@ -177,7 +173,8 @@ pub trait Prover { /// /// Returns a tuple containing a [TracePolyTable] with the trace polynomials for the main trace /// and a new [TraceLde] instance from which the LDE and trace commitments can be obtained. - async fn new_trace_lde( + #[maybe_async] + fn new_trace_lde( &self, trace_info: &TraceInfo, main_trace: &ColMatrix, @@ -188,7 +185,8 @@ pub trait Prover { /// Returns a new constraint evaluator which can be used to evaluate transition and boundary /// constraints over the extended execution trace. - async fn new_evaluator<'a, E>( + #[maybe_async] + fn new_evaluator<'a, E>( &self, air: &'a Self::Air, aux_rand_elements: Option>, @@ -202,7 +200,8 @@ pub trait Prover { /// Builds the GKR proof. If the [`Air`] doesn't use a GKR proof, leave unimplemented. #[allow(unused_variables)] - async fn generate_gkr_proof( + #[maybe_async] + fn generate_gkr_proof( &self, main_trace: &Self::Trace, public_coin: &mut Self::RandomCoin, @@ -215,7 +214,8 @@ pub trait Prover { /// Builds and returns the auxiliary trace. #[allow(unused_variables)] - async fn build_aux_trace( + #[maybe_async] + fn build_aux_trace( &self, main_trace: &Self::Trace, aux_rand_elements: &AuxRandElements, @@ -234,7 +234,8 @@ pub trait Prover { /// public inputs. It may also contain a GKR proof, further documented in [`Proof`]. /// Public inputs must match the value returned from /// [Self::get_pub_inputs()](Prover::get_pub_inputs) for the provided trace. - async fn prove(&self, trace: Self::Trace) -> Result + #[maybe_async] + fn prove(&self, trace: Self::Trace) -> Result where ::PublicInputs: Send, ::GkrProof: Send, @@ -243,18 +244,18 @@ pub trait Prover { // of static dispatch for selecting two generic parameter: extension field and hash // function. match self.options().field_extension() { - FieldExtension::None => self.generate_proof::(trace).await, + FieldExtension::None => maybe_await!(self.generate_proof::(trace)), FieldExtension::Quadratic => { if !>::is_supported() { return Err(ProverError::UnsupportedFieldExtension(2)); } - self.generate_proof::>(trace).await + maybe_await!(self.generate_proof::>(trace)) }, FieldExtension::Cubic => { if !>::is_supported() { return Err(ProverError::UnsupportedFieldExtension(3)); } - self.generate_proof::>(trace).await + maybe_await!(self.generate_proof::>(trace)) }, } } @@ -266,7 +267,8 @@ pub trait Prover { /// execution `trace` is valid against this prover's AIR. /// TODO: make this function un-callable externally? #[doc(hidden)] - async fn generate_proof(&self, trace: Self::Trace) -> Result + #[maybe_async] + fn generate_proof(&self, trace: Self::Trace) -> Result where E: FieldElement, ::PublicInputs: Send, @@ -303,7 +305,7 @@ pub trait Prover { // commit to the main trace segment let (mut trace_lde, mut trace_polys) = - self.commit_to_main_trace_segment(&trace, &domain, &mut channel).await; + maybe_await!(self.commit_to_main_trace_segment(&trace, &domain, &mut channel)); // build the auxiliary trace segment, and append the resulting segments to trace commitment // and trace polynomial table structs @@ -311,7 +313,7 @@ pub trait Prover { let (gkr_proof, lagrange_rand_elements) = if air.context().has_lagrange_kernel_aux_column() { let (gkr_proof, lagrange_rand_elements) = - self.generate_gkr_proof(&trace, channel.public_coin()).await; + maybe_await!(self.generate_gkr_proof(&trace, channel.public_coin())); (Some(gkr_proof), Some(lagrange_rand_elements)) } else { @@ -326,7 +328,7 @@ pub trait Prover { AuxRandElements::new_with_lagrange(rand_elements, lagrange_rand_elements) }; - let aux_trace = self.build_aux_trace(&trace, &aux_rand_elements).await; + let aux_trace = maybe_await!(self.build_aux_trace(&trace, &aux_rand_elements)); // commit to the auxiliary trace segment let aux_segment_polys = { @@ -373,16 +375,17 @@ pub trait Prover { // compute random linear combinations of these evaluations using coefficients drawn from // the channel let ce_domain_size = air.ce_domain_size(); - let composition_poly_trace = self - .new_evaluator(&air, aux_rand_elements, channel.get_constraint_composition_coeffs()) - .await - .evaluate(&trace_lde, &domain); + let composition_poly_trace = maybe_await!(self.new_evaluator( + &air, + aux_rand_elements, + channel.get_constraint_composition_coeffs() + )) + .evaluate(&trace_lde, &domain); assert_eq!(composition_poly_trace.num_rows(), ce_domain_size); // 3 ----- commit to constraint evaluations ----------------------------------------------- - let (constraint_commitment, composition_poly) = self - .commit_to_constraint_evaluations(&air, composition_poly_trace, &domain, &mut channel) - .await; + let (constraint_commitment, composition_poly) = maybe_await!(self + .commit_to_constraint_evaluations(&air, composition_poly_trace, &domain, &mut channel)); // 4 ----- build DEEP composition polynomial ---------------------------------------------- let deep_composition_poly = { @@ -509,7 +512,8 @@ pub trait Prover { /// /// The commitment is computed by hashing each row in the evaluation matrix, and then building /// a Merkle tree from the resulting hashes. - async fn build_constraint_commitment( + #[maybe_async] + fn build_constraint_commitment( &self, composition_poly_trace: CompositionPolyTrace, num_constraint_composition_columns: usize, @@ -556,18 +560,19 @@ pub trait Prover { #[doc(hidden)] #[instrument(skip_all)] - async fn commit_to_main_trace_segment( + #[maybe_async] + fn commit_to_main_trace_segment( &self, trace: &Self::Trace, domain: &StarkDomain, - channel: &mut ProverChannel, + channel: &mut ProverChannel<'_, Self::Air, E, Self::HashFn, Self::RandomCoin>, ) -> (Self::TraceLde, TracePolyTable) where E: FieldElement, { // extend the main execution trace and build a Merkle tree from the extended trace let (trace_lde, trace_polys) = - self.new_trace_lde(trace.info(), trace.main_segment(), domain).await; + maybe_await!(self.new_trace_lde(trace.info(), trace.main_segment(), domain)); // get the commitment to the main trace segment LDE let main_trace_root = trace_lde.get_main_trace_commitment(); @@ -581,25 +586,25 @@ pub trait Prover { #[doc(hidden)] #[instrument(skip_all)] - async fn commit_to_constraint_evaluations( + #[maybe_async] + fn commit_to_constraint_evaluations( &self, air: &Self::Air, composition_poly_trace: CompositionPolyTrace, domain: &StarkDomain, - channel: &mut ProverChannel, + channel: &mut ProverChannel<'_, Self::Air, E, Self::HashFn, Self::RandomCoin>, ) -> (ConstraintCommitment, CompositionPoly) where E: FieldElement, { // first, build a commitment to the evaluations of the constraint composition polynomial // columns - let (constraint_commitment, composition_poly) = self + let (constraint_commitment, composition_poly) = maybe_await!(self .build_constraint_commitment::( composition_poly_trace, air.context().num_constraint_composition_columns(), domain, - ) - .await; + )); // then, commit to the evaluations of constraints by writing the root of the constraint // Merkle tree into the channel diff --git a/utils/maybe_async/Cargo.toml b/utils/maybe_async/Cargo.toml index 355dba2da..5eb4f83ae 100644 --- a/utils/maybe_async/Cargo.toml +++ b/utils/maybe_async/Cargo.toml @@ -1,26 +1,22 @@ [package] name = "winter-maybe-async" -version = "0.9.0" +version = "0.10.0" description = "sync/async macro for winterfell" authors = ["winterfell contributors"] readme = "README.md" license = "MIT" repository = "https://github.com/facebook/winterfell" -documentation = "https://docs.rs/winter-maybe-async/0.9.0" +documentation = "https://docs.rs/winter-maybe-async/0.10.0" keywords = ["async"] edition = "2021" -rust-version = "1.75" +rust-version = "1.78" [lib] proc-macro = true -[dependencies] -proc-macro2 = "1.0" -quote = "1.0" -syn = { version = "2.0", features = ["full", "visit-mut"] } - -[dev-dependencies] -async-trait = "0.1" - [features] async = [] + +[dependencies] +quote = "1" +syn = { version = "2", features = ["full"] } diff --git a/utils/maybe_async/README.md b/utils/maybe_async/README.md index 87c14ac1d..8be404ca5 100644 --- a/utils/maybe_async/README.md +++ b/utils/maybe_async/README.md @@ -1,91 +1,47 @@ # Winter maybe-async -This crate contains a `maybe_async` proc macro that abstracts away sync/async. It is heavily based on [`maybe-async`](https://github.com/fMeow/maybe-async-rs). -The `maybe_async` macro will generate a synchronous or asynchronous version of the trait it is marking. To generate the asynchronous version, enable the `async` feature on the crate. If the `async` feature is off, the synchronous version will be generated. For example, +This crate contains the `maybe_async` procedural attribute macro and the `maybe_await` procedural macro which abstract away Rust sync/async. -```rs -#[maybe_async] -trait ExampleTrait { - async fn say_hello(&self) { - let hello = self.get_hello().await; +## maybe_async - println!("{}", hello); - } +The `maybe_async` macro will conditionally add the `async` keyword to a function it marks depending on the `async` feature being enabled. To generate the asynchronous version, enable the `async` feature on the crate. If the `async` feature is off, the synchronous version will be generated. For example, - async fn get_hello(&self) -> String { - "hello".into() - } -} - -// Generate code when `async` feature is turned ON -#[async_trait] +```rust +// Adding `maybe_async` to trait functions trait ExampleTrait { - async fn say_hello(&self) { - let hello = self.get_hello().await; - - println!("{}", hello); - } + #[maybe_async] + fn say_hello(&self); - async fn get_hello(&self) -> String { - "hello".into() - } + #[maybe_async] + fn get_hello(&self) -> String; } -// Generate code when `async` feature is turned OFF -trait ExampleTrait { - fn say_hello(&self) { - let hello = self.get_hello(); - - println!("{}", hello); - } - - fn get_hello(&self) -> String { - "hello".into() - } +// Adding `maybe_async` to regular functions +#[maybe_async] +fn hello_world() { + // ... } ``` -where `#[async_trait]` is the proc macro provided by the [`async-trait`](https://crates.io/crates/async-trait) crate. Notice how `#[maybe_async]` took care of removing the `.await` in the synchronous version of `say_hello()`. - -`#[maybe_async]` can also mark `impl` blocks in a similar manner. For example, +## maybe_await -```rs -struct ExampleStruct; +To compliment `maybe_async` we also have the `maybe_await` procedural macro that conditionally adds the `.await` keyword to the end of an expression depending on the `async` feature flag. +```rust #[maybe_async] -impl ExampleTrait for ExampleStruct { - async fn say_hello(&self) { - println!("hello!"); - } -} - -// Generate code when `async` feature is turned ON -#[async_trait] -impl ExampleTrait for ExampleStruct { - async fn say_hello(&self) { - println!("hello!"); - } -} +fn hello_world() { + // Adding `maybe_await` to an expression + let w = maybe_await!(world()); -// Generate code when `async` feature is turned OFF -impl ExampleTrait for ExampleStruct { - fn say_hello(&self) { - println!("hello!"); - } + println!("hello {}", w); } -``` - -Finally, `#[maybe_async]` can be used on `fn` items, which works in an analogous way to the previous examples. - -```rs #[maybe_async] -async fn say_hello() { - // ... +fn world() -> String { + "world".to_string() } ``` -License -------- +## License This project is [MIT licensed](../../LICENSE). diff --git a/utils/maybe_async/src/lib.rs b/utils/maybe_async/src/lib.rs index d60708c80..11505327a 100644 --- a/utils/maybe_async/src/lib.rs +++ b/utils/maybe_async/src/lib.rs @@ -4,67 +4,53 @@ // LICENSE file in the root directory of this source tree. use proc_macro::TokenStream; -use proc_macro2::TokenStream as TokenStream2; use quote::quote; -use syn::{parse_macro_input, ImplItem, TraitItem}; +use syn::{parse_macro_input, Expr, ItemFn, TraitItemFn}; -mod parse; -use parse::Item; - -mod visit; -use visit::AsyncAwaitRemoval; - -/// maybe_async attribute macro +/// maybe_async procedural attribute macro +/// +/// Parses a function (regular or trait) and conditionally adds the `async` keyword +/// depending on the `async` feature flag being enabled. #[proc_macro_attribute] -pub fn maybe_async(_args: TokenStream, input: TokenStream) -> TokenStream { - let mut item = parse_macro_input!(input as Item); - - let token = if cfg!(feature = "async") { - convert_async(&mut item) - } else { - convert_sync(&mut item) - }; - - token.into() -} - -fn convert_sync(input: &mut Item) -> TokenStream2 { - match input { - Item::Impl(item) => { - for inner in &mut item.items { - if let ImplItem::Fn(ref mut method) = inner { - if method.sig.asyncness.is_some() { - method.sig.asyncness = None; - } - } +pub fn maybe_async(_attr: TokenStream, input: TokenStream) -> TokenStream { + if let Ok(func) = syn::parse::(input.clone()) { + if cfg!(feature = "async") { + let ItemFn { attrs, vis, sig, block } = func; + quote! { + #(#attrs)* #vis async #sig { #block } } - AsyncAwaitRemoval.remove_async_await(quote!(#item)) - }, - Item::Trait(item) => { - for inner in &mut item.items { - if let TraitItem::Fn(ref mut method) = inner { - if method.sig.asyncness.is_some() { - method.sig.asyncness = None; - } - } + .into() + } else { + quote!(#func).into() + } + } else if let Ok(func) = syn::parse::(input.clone()) { + if cfg!(feature = "async") { + let TraitItemFn { attrs, sig, default, semi_token } = func; + quote! { + #(#attrs)* async #sig #default #semi_token } - AsyncAwaitRemoval.remove_async_await(quote!(#item)) - }, - Item::Fn(item) => { - if item.sig.asyncness.is_some() { - item.sig.asyncness = None; - } - AsyncAwaitRemoval.remove_async_await(quote!(#item)) - }, - Item::Static(item) => AsyncAwaitRemoval.remove_async_await(quote!(#item)), + .into() + } else { + quote!(#func).into() + } + } else { + input } } -fn convert_async(input: &mut Item) -> TokenStream2 { - match input { - Item::Trait(item) => quote!(#[async_trait::async_trait]#item), - Item::Impl(item) => quote!(#[async_trait::async_trait]#item), - Item::Fn(item) => quote!(#item), - Item::Static(item) => quote!(#item), - } +/// maybe_await procedural macro +/// +/// Parses an expression and conditionally adds the `.await` keyword at the end of it +/// depending on the `async` feature flag being enabled. +#[proc_macro] +pub fn maybe_await(input: TokenStream) -> TokenStream { + let item = parse_macro_input!(input as Expr); + + let quote = if cfg!(feature = "async") { + quote!(#item.await) + } else { + quote!(#item) + }; + + quote.into() } diff --git a/utils/maybe_async/src/parse.rs b/utils/maybe_async/src/parse.rs deleted file mode 100644 index 960eafcd3..000000000 --- a/utils/maybe_async/src/parse.rs +++ /dev/null @@ -1,50 +0,0 @@ -// Copyright (c) Facebook, Inc. and its affiliates. -// -// This source code is licensed under the MIT license found in the -// LICENSE file in the root directory of this source tree. - -//! This file was heavily inspired by [`parse.rs`](https://github.com/fMeow/maybe-async-rs/blob/b32a81704f6d84576d77e06882a06570e7f38de9/src/parse.rs). - -use proc_macro2::Span; -use syn::{ - parse::{discouraged::Speculative, Parse, ParseStream, Result}, - Attribute, Error, ItemFn, ItemImpl, ItemStatic, ItemTrait, -}; - -pub enum Item { - Trait(ItemTrait), - Impl(ItemImpl), - Fn(ItemFn), - Static(ItemStatic), -} - -macro_rules! fork { - ($fork:ident = $input:ident) => {{ - $fork = $input.fork(); - &$fork - }}; -} - -impl Parse for Item { - fn parse(input: ParseStream) -> Result { - let attrs = input.call(Attribute::parse_outer)?; - let mut fork; - let item = if let Ok(mut item) = fork!(fork = input).parse::() { - item.attrs = attrs; - Item::Impl(item) - } else if let Ok(mut item) = fork!(fork = input).parse::() { - item.attrs = attrs; - Item::Trait(item) - } else if let Ok(mut item) = fork!(fork = input).parse::() { - item.attrs = attrs; - Item::Fn(item) - } else if let Ok(mut item) = fork!(fork = input).parse::() { - item.attrs = attrs; - Item::Static(item) - } else { - return Err(Error::new(Span::call_site(), "expected impl, trait or fn")); - }; - input.advance_to(&fork); - Ok(item) - } -} diff --git a/utils/maybe_async/src/visit.rs b/utils/maybe_async/src/visit.rs deleted file mode 100644 index cdb8f8898..000000000 --- a/utils/maybe_async/src/visit.rs +++ /dev/null @@ -1,50 +0,0 @@ -// Copyright (c) Facebook, Inc. and its affiliates. -// -// This source code is licensed under the MIT license found in the -// LICENSE file in the root directory of this source tree. - -//! This file was heavily inspired by [`visit.rs`](https://github.com/fMeow/maybe-async-rs/blob/b32a81704f6d84576d77e06882a06570e7f38de9/src/visit.rs). - -use proc_macro2::TokenStream; -use quote::quote; -use syn::{ - visit_mut::{self, VisitMut}, - Expr, ExprBlock, File, Stmt, -}; - -pub struct AsyncAwaitRemoval; - -impl AsyncAwaitRemoval { - pub fn remove_async_await(&mut self, item: TokenStream) -> TokenStream { - let mut syntax_tree: File = syn::parse(item.into()).unwrap(); - self.visit_file_mut(&mut syntax_tree); - quote!(#syntax_tree) - } -} - -impl VisitMut for AsyncAwaitRemoval { - fn visit_expr_mut(&mut self, node: &mut Expr) { - // Delegate to the default impl to visit nested expressions. - visit_mut::visit_expr_mut(self, node); - - match node { - Expr::Await(expr) => *node = (*expr.base).clone(), - - Expr::Async(expr) => { - let inner = &expr.block; - let sync_expr = if let [Stmt::Expr(expr, None)] = inner.stmts.as_slice() { - // remove useless braces when there is only one statement - expr.clone() - } else { - Expr::Block(ExprBlock { - attrs: expr.attrs.clone(), - block: inner.clone(), - label: None, - }) - }; - *node = sync_expr; - }, - _ => {}, - } - } -} From 445816f27e09213ecd81d9d963665ad9dcc020f9 Mon Sep 17 00:00:00 2001 From: Irakliy Khaburzaniya Date: Tue, 11 Jun 2024 01:52:19 -0700 Subject: [PATCH 02/11] updated changelog and improved maybe-async docs --- CHANGELOG.md | 3 ++ utils/maybe_async/README.md | 26 ++++++++++++++ utils/maybe_async/src/lib.rs | 69 ++++++++++++++++++++++++++++++++---- 3 files changed, 91 insertions(+), 7 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 9cef5f459..c3057c630 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,8 @@ # Changelog +## 0.10.0 (2024-06-11) - `utils/maybe-async` crate only +* [BREAKING] Refactored `maybe-async` macro into simpler `maybe-async` and `maybe-await` macros. + ## 0.9.0 (2024-05-09) - [BREAKING] Merged `TraceLayout` into `TraceInfo` (#245). - Implemented Lagrange Kernel constraints (#247, ) diff --git a/utils/maybe_async/README.md b/utils/maybe_async/README.md index 8be404ca5..e1827dfe1 100644 --- a/utils/maybe_async/README.md +++ b/utils/maybe_async/README.md @@ -23,6 +23,19 @@ fn hello_world() { } ``` +When the `async` feature is enabled, the above code will be transformed into: +```Rust +trait ExampleTrait { + async fn say_hello(&self); + + async fn get_hello(&self) -> String; +} + +async fn hello_world() { + // ... +} +``` + ## maybe_await To compliment `maybe_async` we also have the `maybe_await` procedural macro that conditionally adds the `.await` keyword to the end of an expression depending on the `async` feature flag. @@ -42,6 +55,19 @@ fn world() -> String { } ``` +When the `async` feature is enabled, the above code will be transformed into: +```Rust +async fn hello_world() { + let w = world().await; + + println!("hello {}", w); +} + +async fn world() -> String { + "world".to_string() +} +``` + ## License This project is [MIT licensed](../../LICENSE). diff --git a/utils/maybe_async/src/lib.rs b/utils/maybe_async/src/lib.rs index 11505327a..44c5cde12 100644 --- a/utils/maybe_async/src/lib.rs +++ b/utils/maybe_async/src/lib.rs @@ -7,10 +7,39 @@ use proc_macro::TokenStream; use quote::quote; use syn::{parse_macro_input, Expr, ItemFn, TraitItemFn}; -/// maybe_async procedural attribute macro +/// Parses a function (regular or trait) and conditionally adds the `async` keyword depending on +/// the `async` feature flag being enabled. /// -/// Parses a function (regular or trait) and conditionally adds the `async` keyword -/// depending on the `async` feature flag being enabled. +/// For example: +/// ```ignore +/// trait ExampleTrait { +/// #[maybe_async] +/// fn say_hello(&self); +/// +/// #[maybe_async] +/// fn get_hello(&self) -> String; +/// } +/// +/// +/// #[maybe_async] +/// fn hello_world() { +/// // ... +/// } +/// ``` +/// +/// When the `async` feature is enabled, will be transformed into: +/// ```ignore +/// trait ExampleTrait { +/// async fn say_hello(&self); +/// +/// async fn get_hello(&self) -> String; +/// } +/// +/// +/// async fn hello_world() { +/// // ... +/// } +/// ``` #[proc_macro_attribute] pub fn maybe_async(_attr: TokenStream, input: TokenStream) -> TokenStream { if let Ok(func) = syn::parse::(input.clone()) { @@ -38,10 +67,36 @@ pub fn maybe_async(_attr: TokenStream, input: TokenStream) -> TokenStream { } } -/// maybe_await procedural macro -/// -/// Parses an expression and conditionally adds the `.await` keyword at the end of it -/// depending on the `async` feature flag being enabled. +/// Parses an expression and conditionally adds the `.await` keyword at the end of it depending on +/// the `async` feature flag being enabled. +/// +/// ```ignore +/// #[maybe_async] +/// fn hello_world() { +/// // Adding `maybe_await` to an expression +/// let w = maybe_await!(world()); +/// +/// println!("hello {}", w); +/// } +/// +/// #[maybe_async] +/// fn world() -> String { +/// "world".to_string() +/// } +/// ``` +/// +/// When the `async` feature is enabled, will be transformed into: +/// ```ignore +/// async fn hello_world() { +/// let w = world().await; +/// +/// println!("hello {}", w); +/// } +/// +/// async fn world() -> String { +/// "world".to_string() +/// } +/// ``` #[proc_macro] pub fn maybe_await(input: TokenStream) -> TokenStream { let item = parse_macro_input!(input as Expr); From aaa94316e6eff002036e8663440602f8c22b5715 Mon Sep 17 00:00:00 2001 From: Irakliy Khaburzaniya Date: Tue, 11 Jun 2024 02:03:30 -0700 Subject: [PATCH 03/11] pacified rustfmt --- utils/maybe_async/src/lib.rs | 22 +++++++++++----------- 1 file changed, 11 insertions(+), 11 deletions(-) diff --git a/utils/maybe_async/src/lib.rs b/utils/maybe_async/src/lib.rs index 44c5cde12..c9eb3a056 100644 --- a/utils/maybe_async/src/lib.rs +++ b/utils/maybe_async/src/lib.rs @@ -15,27 +15,27 @@ use syn::{parse_macro_input, Expr, ItemFn, TraitItemFn}; /// trait ExampleTrait { /// #[maybe_async] /// fn say_hello(&self); -/// +/// /// #[maybe_async] /// fn get_hello(&self) -> String; /// } /// -/// +/// /// #[maybe_async] /// fn hello_world() { /// // ... /// } /// ``` -/// +/// /// When the `async` feature is enabled, will be transformed into: /// ```ignore /// trait ExampleTrait { /// async fn say_hello(&self); -/// +/// /// async fn get_hello(&self) -> String; /// } /// -/// +/// /// async fn hello_world() { /// // ... /// } @@ -69,30 +69,30 @@ pub fn maybe_async(_attr: TokenStream, input: TokenStream) -> TokenStream { /// Parses an expression and conditionally adds the `.await` keyword at the end of it depending on /// the `async` feature flag being enabled. -/// +/// /// ```ignore /// #[maybe_async] /// fn hello_world() { /// // Adding `maybe_await` to an expression /// let w = maybe_await!(world()); -/// +/// /// println!("hello {}", w); /// } -/// +/// /// #[maybe_async] /// fn world() -> String { /// "world".to_string() /// } /// ``` -/// +/// /// When the `async` feature is enabled, will be transformed into: /// ```ignore /// async fn hello_world() { /// let w = world().await; -/// +/// /// println!("hello {}", w); /// } -/// +/// /// async fn world() -> String { /// "world".to_string() /// } From ff5496bcb9863d32279ad62b9f73758abdaa3ab1 Mon Sep 17 00:00:00 2001 From: Irakliy Khaburzaniya Date: Tue, 11 Jun 2024 02:09:49 -0700 Subject: [PATCH 04/11] minor doc fixes --- utils/maybe_async/README.md | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/utils/maybe_async/README.md b/utils/maybe_async/README.md index e1827dfe1..117c70362 100644 --- a/utils/maybe_async/README.md +++ b/utils/maybe_async/README.md @@ -24,7 +24,8 @@ fn hello_world() { ``` When the `async` feature is enabled, the above code will be transformed into: -```Rust + +```rust trait ExampleTrait { async fn say_hello(&self); @@ -56,7 +57,8 @@ fn world() -> String { ``` When the `async` feature is enabled, the above code will be transformed into: -```Rust + +```rust async fn hello_world() { let w = world().await; From 00f25792bb54ce45065afed12492920f625915fb Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Philippe=20Laferri=C3=A8re?= Date: Mon, 24 Jun 2024 17:59:42 -0400 Subject: [PATCH 05/11] Fix GKR-LogUp API (#287) --- air/src/air/aux.rs | 74 ++++++++++++++++---- air/src/air/lagrange/mod.rs | 6 +- air/src/air/mod.rs | 10 +-- air/src/lib.rs | 2 +- examples/src/rescue_raps/air.rs | 8 ++- prover/benches/lagrange_kernel.rs | 16 ++--- prover/src/constraints/evaluator/boundary.rs | 4 +- prover/src/constraints/evaluator/default.rs | 7 +- prover/src/lib.rs | 28 ++++---- prover/src/trace/mod.rs | 4 +- utils/core/src/serde/mod.rs | 4 +- verifier/src/evaluator.rs | 8 +-- verifier/src/lib.rs | 6 +- winterfell/src/tests.rs | 20 +++--- 14 files changed, 122 insertions(+), 75 deletions(-) diff --git a/air/src/air/aux.rs b/air/src/air/aux.rs index 8290a449d..11cbc33db 100644 --- a/air/src/air/aux.rs +++ b/air/src/air/aux.rs @@ -8,39 +8,83 @@ use super::lagrange::LagrangeKernelRandElements; /// Holds the randomly generated elements necessary to build the auxiliary trace. /// -/// Specifically, [`AuxRandElements`] currently supports 2 types of random elements: +/// Specifically, [`AuxRandElements`] currently supports 3 types of random elements: /// - the ones needed to build the Lagrange kernel column (when using GKR to accelerate LogUp), +/// - the ones needed to build the "s" auxiliary column (when using GKR to accelerate LogUp), /// - the ones needed to build all the other auxiliary columns #[derive(Debug, Clone)] pub struct AuxRandElements { rand_elements: Vec, - lagrange: Option>, + gkr: Option>, } impl AuxRandElements { /// Creates a new [`AuxRandElements`], where the auxiliary trace doesn't contain a Lagrange /// kernel column. pub fn new(rand_elements: Vec) -> Self { - Self { rand_elements, lagrange: None } + Self { rand_elements, gkr: None } } - /// Creates a new [`AuxRandElements`], where the auxiliary trace contains a Lagrange kernel - /// column. - pub fn new_with_lagrange( - rand_elements: Vec, - lagrange: Option>, - ) -> Self { - Self { rand_elements, lagrange } + /// Creates a new [`AuxRandElements`], where the auxiliary trace contains columns needed when + /// using GKR to accelerate LogUp (i.e. a Lagrange kernel column and the "s" column). + pub fn new_with_gkr(rand_elements: Vec, gkr: GkrRandElements) -> Self { + Self { rand_elements, gkr: Some(gkr) } } - /// Returns the random elements needed to build all columns other than the Lagrange kernel one. + /// Returns the random elements needed to build all columns other than the two GKR-related ones. pub fn rand_elements(&self) -> &[E] { &self.rand_elements } /// Returns the random elements needed to build the Lagrange kernel column. pub fn lagrange(&self) -> Option<&LagrangeKernelRandElements> { - self.lagrange.as_ref() + self.gkr.as_ref().map(|gkr| &gkr.lagrange) + } + + /// Returns the random values used to linearly combine the openings returned from the GKR proof. + /// + /// These correspond to the lambdas in our documentation. + pub fn gkr_openings_combining_randomness(&self) -> Option<&[E]> { + self.gkr.as_ref().map(|gkr| gkr.openings_combining_randomness.as_ref()) + } +} + +/// Holds all the random elements needed when using GKR to accelerate LogUp. +/// +/// This consists of two sets of random values: +/// 1. The Lagrange kernel random elements (expanded on in [`LagrangeKernelRandElements`]), and +/// 2. The "openings combining randomness". +/// +/// After the verifying the LogUp-GKR circuit, the verifier is left with unproven claims provided +/// nondeterministically by the prover about the evaluations of the MLE of the main trace columns at +/// the Lagrange kernel random elements. Those claims are (linearly) combined into one using the +/// openings combining randomness. +#[derive(Clone, Debug)] +pub struct GkrRandElements { + lagrange: LagrangeKernelRandElements, + openings_combining_randomness: Vec, +} + +impl GkrRandElements { + /// Constructs a new [`GkrRandElements`] from [`LagrangeKernelRandElements`], and the openings + /// combining randomness. + /// + /// See [`GkrRandElements`] for a more detailed description. + pub fn new( + lagrange: LagrangeKernelRandElements, + openings_combining_randomness: Vec, + ) -> Self { + Self { lagrange, openings_combining_randomness } + } + + /// Returns the random elements needed to build the Lagrange kernel column. + pub fn lagrange_kernel_rand_elements(&self) -> &LagrangeKernelRandElements { + &self.lagrange + } + + /// Returns the random values used to linearly combine the openings returned from the GKR proof. + pub fn openings_combining_randomness(&self) -> &[E] { + &self.openings_combining_randomness } } @@ -61,7 +105,7 @@ pub trait GkrVerifier { &self, gkr_proof: Self::GkrProof, public_coin: &mut impl RandomCoin, - ) -> Result, Self::Error> + ) -> Result, Self::Error> where E: FieldElement, Hasher: ElementHasher; @@ -75,11 +119,11 @@ impl GkrVerifier for () { &self, _gkr_proof: Self::GkrProof, _public_coin: &mut impl RandomCoin, - ) -> Result, Self::Error> + ) -> Result, Self::Error> where E: FieldElement, Hasher: ElementHasher, { - Ok(LagrangeKernelRandElements::new(Vec::new())) + Ok(GkrRandElements::new(LagrangeKernelRandElements::default(), Vec::new())) } } diff --git a/air/src/air/lagrange/mod.rs b/air/src/air/lagrange/mod.rs index b4bb0751a..fed5897f3 100644 --- a/air/src/air/lagrange/mod.rs +++ b/air/src/air/lagrange/mod.rs @@ -46,7 +46,11 @@ impl LagrangeKernelConstraints { } /// Holds the randomly generated elements needed to build the Lagrange kernel auxiliary column. -#[derive(Debug, Clone)] +/// +/// The Lagrange kernel consists of evaluating the function $eq(x, r)$, where $x$ is the binary +/// decomposition of the row index, and $r$ is some random point. The "Lagrange kernel random +/// elements" refer to this (multidimensional) point $r$. +#[derive(Debug, Clone, Default)] pub struct LagrangeKernelRandElements { elements: Vec, } diff --git a/air/src/air/mod.rs b/air/src/air/mod.rs index ae3f34d07..53a59fa5a 100644 --- a/air/src/air/mod.rs +++ b/air/src/air/mod.rs @@ -11,7 +11,7 @@ use math::{fft, ExtensibleField, ExtensionOf, FieldElement, StarkField, ToElemen use crate::ProofOptions; mod aux; -pub use aux::{AuxRandElements, GkrVerifier}; +pub use aux::{AuxRandElements, GkrRandElements, GkrVerifier}; mod trace_info; pub use trace_info::TraceInfo; @@ -269,7 +269,7 @@ pub trait Air: Send + Sync { main_frame: &EvaluationFrame, aux_frame: &EvaluationFrame, periodic_values: &[F], - aux_rand_elements: &[E], + aux_rand_elements: &AuxRandElements, result: &mut [E], ) where F: FieldElement, @@ -298,7 +298,7 @@ pub trait Air: Send + Sync { #[allow(unused_variables)] fn get_aux_assertions>( &self, - aux_rand_elements: &[E], + aux_rand_elements: &AuxRandElements, ) -> Vec> { Vec::new() } @@ -309,7 +309,7 @@ pub trait Air: Send + Sync { /// Returns the [`GkrVerifier`] to be used to verify the GKR proof. /// /// Leave unimplemented if the `Air` doesn't use a GKR proof. - fn get_auxiliary_proof_verifier>( + fn get_gkr_proof_verifier>( &self, ) -> Self::GkrVerifier { unimplemented!("`get_auxiliary_proof_verifier()` must be implemented when the proof contains a GKR proof"); @@ -422,7 +422,7 @@ pub trait Air: Send + Sync { /// combination of boundary constraints during constraint merging. fn get_boundary_constraints>( &self, - aux_rand_elements: Option<&[E]>, + aux_rand_elements: Option<&AuxRandElements>, composition_coefficients: &[E], ) -> BoundaryConstraints { BoundaryConstraints::new( diff --git a/air/src/lib.rs b/air/src/lib.rs index 1e3f34318..539a812d9 100644 --- a/air/src/lib.rs +++ b/air/src/lib.rs @@ -44,7 +44,7 @@ mod air; pub use air::{ Air, AirContext, Assertion, AuxRandElements, BoundaryConstraint, BoundaryConstraintGroup, BoundaryConstraints, ConstraintCompositionCoefficients, ConstraintDivisor, - DeepCompositionCoefficients, EvaluationFrame, GkrVerifier, + DeepCompositionCoefficients, EvaluationFrame, GkrRandElements, GkrVerifier, LagrangeConstraintsCompositionCoefficients, LagrangeKernelBoundaryConstraint, LagrangeKernelConstraints, LagrangeKernelEvaluationFrame, LagrangeKernelRandElements, LagrangeKernelTransitionConstraints, TraceInfo, TransitionConstraintDegree, diff --git a/examples/src/rescue_raps/air.rs b/examples/src/rescue_raps/air.rs index e1cc84847..6fb5321b1 100644 --- a/examples/src/rescue_raps/air.rs +++ b/examples/src/rescue_raps/air.rs @@ -5,7 +5,7 @@ use core_utils::flatten_slice_elements; use winterfell::{ - math::ToElements, Air, AirContext, Assertion, EvaluationFrame, TraceInfo, + math::ToElements, Air, AirContext, Assertion, AuxRandElements, EvaluationFrame, TraceInfo, TransitionConstraintDegree, }; @@ -162,7 +162,7 @@ impl Air for RescueRapsAir { main_frame: &EvaluationFrame, aux_frame: &EvaluationFrame, periodic_values: &[F], - aux_rand_elements: &[E], + aux_rand_elements: &AuxRandElements, result: &mut [E], ) where F: FieldElement, @@ -174,6 +174,8 @@ impl Air for RescueRapsAir { let aux_current = aux_frame.current(); let aux_next = aux_frame.next(); + let aux_rand_elements = aux_rand_elements.rand_elements(); + let absorption_flag = periodic_values[1]; // We want to enforce that the absorbed values of the first hash chain are a @@ -233,7 +235,7 @@ impl Air for RescueRapsAir { ] } - fn get_aux_assertions(&self, _aux_rand_elements: &[E]) -> Vec> + fn get_aux_assertions(&self, _aux_rand_elements: &AuxRandElements) -> Vec> where E: FieldElement, { diff --git a/prover/benches/lagrange_kernel.rs b/prover/benches/lagrange_kernel.rs index eaecdb45e..559af93ea 100644 --- a/prover/benches/lagrange_kernel.rs +++ b/prover/benches/lagrange_kernel.rs @@ -7,8 +7,8 @@ use std::time::Duration; use air::{ Air, AirContext, Assertion, AuxRandElements, ConstraintCompositionCoefficients, - EvaluationFrame, FieldExtension, LagrangeKernelRandElements, ProofOptions, TraceInfo, - TransitionConstraintDegree, + EvaluationFrame, FieldExtension, GkrRandElements, LagrangeKernelRandElements, ProofOptions, + TraceInfo, TransitionConstraintDegree, }; use criterion::{criterion_group, criterion_main, BatchSize, BenchmarkId, Criterion}; use crypto::{hashers::Blake3_256, DefaultRandomCoin, RandomCoin}; @@ -144,7 +144,7 @@ impl Air for LagrangeKernelAir { _main_frame: &EvaluationFrame, _aux_frame: &EvaluationFrame, _periodic_values: &[F], - _aux_rand_elements: &[E], + _aux_rand_elements: &AuxRandElements, _result: &mut [E], ) where F: FieldElement, @@ -155,7 +155,7 @@ impl Air for LagrangeKernelAir { fn get_aux_assertions>( &self, - _aux_rand_elements: &[E], + _aux_rand_elements: &AuxRandElements, ) -> Vec> { vec![Assertion::single(1, 0, E::ZERO)] } @@ -223,22 +223,22 @@ impl Prover for LagrangeProver { &self, main_trace: &Self::Trace, public_coin: &mut Self::RandomCoin, - ) -> (ProverGkrProof, LagrangeKernelRandElements) + ) -> (ProverGkrProof, GkrRandElements) where E: FieldElement, { let main_trace = main_trace.main_segment(); - let lagrange_kernel_rand_elements: Vec = { + let lagrange_kernel_rand_elements = { let log_trace_len = main_trace.num_rows().ilog2() as usize; let mut rand_elements = Vec::with_capacity(log_trace_len); for _ in 0..log_trace_len { rand_elements.push(public_coin.draw().unwrap()); } - rand_elements + LagrangeKernelRandElements::new(rand_elements) }; - ((), LagrangeKernelRandElements::new(lagrange_kernel_rand_elements)) + ((), GkrRandElements::new(lagrange_kernel_rand_elements, Vec::new())) } fn build_aux_trace( diff --git a/prover/src/constraints/evaluator/boundary.rs b/prover/src/constraints/evaluator/boundary.rs index 67766b5ad..e161e959d 100644 --- a/prover/src/constraints/evaluator/boundary.rs +++ b/prover/src/constraints/evaluator/boundary.rs @@ -5,7 +5,7 @@ use alloc::{collections::BTreeMap, vec::Vec}; -use air::{Air, ConstraintDivisor}; +use air::{Air, AuxRandElements, ConstraintDivisor}; use math::{fft, ExtensionOf, FieldElement}; use super::StarkDomain; @@ -35,7 +35,7 @@ impl BoundaryConstraints { /// by an instance of AIR for a specific computation. pub fn new>( air: &A, - aux_rand_elements: Option<&[E]>, + aux_rand_elements: Option<&AuxRandElements>, composition_coefficients: &[E], ) -> Self { // get constraints from the AIR instance diff --git a/prover/src/constraints/evaluator/default.rs b/prover/src/constraints/evaluator/default.rs index 685af4b6f..8f96c7dcd 100644 --- a/prover/src/constraints/evaluator/default.rs +++ b/prover/src/constraints/evaluator/default.rs @@ -154,9 +154,7 @@ where // constraint evaluations. let boundary_constraints = BoundaryConstraints::new( air, - aux_rand_elements - .as_ref() - .map(|aux_rand_elements| aux_rand_elements.rand_elements()), + aux_rand_elements.as_ref(), &composition_coefficients.boundary, ); @@ -378,8 +376,7 @@ where periodic_values, self.aux_rand_elements .as_ref() - .expect("expected aux rand elements to be present") - .rand_elements(), + .expect("expected aux rand elements to be present"), evaluations, ); diff --git a/prover/src/lib.rs b/prover/src/lib.rs index 7347a6c69..4874973f2 100644 --- a/prover/src/lib.rs +++ b/prover/src/lib.rs @@ -42,13 +42,13 @@ #[macro_use] extern crate alloc; -use air::AuxRandElements; pub use air::{ proof, proof::Proof, Air, AirContext, Assertion, BoundaryConstraint, BoundaryConstraintGroup, ConstraintCompositionCoefficients, ConstraintDivisor, DeepCompositionCoefficients, EvaluationFrame, FieldExtension, LagrangeKernelRandElements, ProofOptions, TraceInfo, TransitionConstraintDegree, }; +use air::{AuxRandElements, GkrRandElements}; pub use crypto; use crypto::{ElementHasher, RandomCoin}; use fri::FriProver; @@ -205,7 +205,7 @@ pub trait Prover { &self, main_trace: &Self::Trace, public_coin: &mut Self::RandomCoin, - ) -> (ProverGkrProof, LagrangeKernelRandElements) + ) -> (ProverGkrProof, GkrRandElements) where E: FieldElement, { @@ -280,7 +280,7 @@ pub trait Prover { let pub_inputs = self.get_pub_inputs(&trace); let pub_inputs_elements = pub_inputs.to_elements(); - // create an instance of AIR for the provided parameters. this takes a generic description + // create an instance of AIR for the provided parameters. This takes a generic description // of the computation (provided via AIR type), and creates a description of a specific // execution of the computation for the provided public inputs. let air = Self::Air::new(trace.info().clone(), pub_inputs, self.options().clone()); @@ -310,22 +310,24 @@ pub trait Prover { // build the auxiliary trace segment, and append the resulting segments to trace commitment // and trace polynomial table structs let aux_trace_with_metadata = if air.trace_info().is_multi_segment() { - let (gkr_proof, lagrange_rand_elements) = - if air.context().has_lagrange_kernel_aux_column() { - let (gkr_proof, lagrange_rand_elements) = - maybe_await!(self.generate_gkr_proof(&trace, channel.public_coin())); + let (gkr_proof, aux_rand_elements) = if air.context().has_lagrange_kernel_aux_column() { + let (gkr_proof, gkr_rand_elements) = + maybe_await!(self.generate_gkr_proof(&trace, channel.public_coin())); - (Some(gkr_proof), Some(lagrange_rand_elements)) - } else { - (None, None) - }; + let rand_elements = air + .get_aux_rand_elements(channel.public_coin()) + .expect("failed to draw random elements for the auxiliary trace segment"); + + let aux_rand_elements = + AuxRandElements::new_with_gkr(rand_elements, gkr_rand_elements); - let aux_rand_elements = { + (Some(gkr_proof), aux_rand_elements) + } else { let rand_elements = air .get_aux_rand_elements(channel.public_coin()) .expect("failed to draw random elements for the auxiliary trace segment"); - AuxRandElements::new_with_lagrange(rand_elements, lagrange_rand_elements) + (None, AuxRandElements::new(rand_elements)) }; let aux_trace = maybe_await!(self.build_aux_trace(&trace, &aux_rand_elements)); diff --git a/prover/src/trace/mod.rs b/prover/src/trace/mod.rs index aa0721ec8..26b383a3b 100644 --- a/prover/src/trace/mod.rs +++ b/prover/src/trace/mod.rs @@ -127,7 +127,7 @@ pub trait Trace: Sized { let aux_trace = &aux_trace_with_metadata.aux_trace; let aux_rand_elements = &aux_trace_with_metadata.aux_rand_elements; - for assertion in air.get_aux_assertions(aux_rand_elements.rand_elements()) { + for assertion in air.get_aux_assertions(aux_rand_elements) { // get the matrix and verify the assertion against it assertion.apply(self.length(), |step, value| { assert!( @@ -209,7 +209,7 @@ pub trait Trace: Sized { &main_frame, aux_frame, &periodic_values, - aux_rand_elements.rand_elements(), + aux_rand_elements, &mut aux_evaluations, ); for (i, &evaluation) in aux_evaluations.iter().enumerate() { diff --git a/utils/core/src/serde/mod.rs b/utils/core/src/serde/mod.rs index 200940c6c..5ebfde927 100644 --- a/utils/core/src/serde/mod.rs +++ b/utils/core/src/serde/mod.rs @@ -196,7 +196,7 @@ impl Serializable for [T; C] { } impl Serializable for [T] { - fn write_into(&self, target: &mut W) { + fn write_into(&self, target: &mut W) { target.write_usize(self.len()); for element in self.iter() { element.write_into(target); @@ -226,7 +226,7 @@ impl Serializable for BTreeSet { } impl Serializable for str { - fn write_into(&self, target: &mut W) { + fn write_into(&self, target: &mut W) { target.write_usize(self.len()); target.write_many(self.as_bytes()); } diff --git a/verifier/src/evaluator.rs b/verifier/src/evaluator.rs index 73187b67b..10910a555 100644 --- a/verifier/src/evaluator.rs +++ b/verifier/src/evaluator.rs @@ -54,7 +54,7 @@ pub fn evaluate_constraints>( main_trace_frame, aux_trace_frame, &periodic_values, - aux_rand_elements.rand_elements(), + aux_rand_elements, &mut t_evaluations2, ); } @@ -67,10 +67,8 @@ pub fn evaluate_constraints>( // 2 ----- evaluate boundary constraints ------------------------------------------------------ // get boundary constraints grouped by common divisor from the AIR - let b_constraints = air.get_boundary_constraints( - aux_rand_elements.as_ref().map(|eles| eles.rand_elements()), - &composition_coefficients.boundary, - ); + let b_constraints = + air.get_boundary_constraints(aux_rand_elements, &composition_coefficients.boundary); // iterate over boundary constraint groups for the main trace segment (each group has a // distinct divisor), evaluate constraints in each group and add their combination to the diff --git a/verifier/src/lib.rs b/verifier/src/lib.rs index 881a8aa83..a9c5ab7f7 100644 --- a/verifier/src/lib.rs +++ b/verifier/src/lib.rs @@ -178,8 +178,8 @@ where Deserializable::read_from_bytes(gkr_proof_serialized) .map_err(|err| VerifierError::ProofDeserializationError(err.to_string()))? }; - let lagrange_rand_elements = air - .get_auxiliary_proof_verifier::() + let gkr_rand_elements = air + .get_gkr_proof_verifier::() .verify::(gkr_proof, &mut public_coin) .map_err(|err| VerifierError::GkrProofVerificationFailed(err.to_string()))?; @@ -189,7 +189,7 @@ where public_coin.reseed(trace_commitments[AUX_TRACE_IDX]); - Some(AuxRandElements::new_with_lagrange(rand_elements, Some(lagrange_rand_elements))) + Some(AuxRandElements::new_with_gkr(rand_elements, gkr_rand_elements)) } else { let rand_elements = air.get_aux_rand_elements(&mut public_coin).expect( "failed to generate the random elements needed to build the auxiliary trace", diff --git a/winterfell/src/tests.rs b/winterfell/src/tests.rs index e7b944da5..bfc0aa264 100644 --- a/winterfell/src/tests.rs +++ b/winterfell/src/tests.rs @@ -5,7 +5,7 @@ use std::{vec, vec::Vec}; -use air::LagrangeKernelRandElements; +use air::{GkrRandElements, LagrangeKernelRandElements}; use prover::{ crypto::{hashers::Blake3_256, DefaultRandomCoin, RandomCoin}, math::{fields::f64::BaseElement, ExtensionOf, FieldElement}, @@ -96,7 +96,7 @@ impl GkrVerifier for DummyGkrVerifier { &self, gkr_proof: usize, public_coin: &mut impl RandomCoin, - ) -> Result, Self::Error> + ) -> Result, Self::Error> where E: FieldElement, Hasher: crypto::ElementHasher, @@ -111,7 +111,7 @@ impl GkrVerifier for DummyGkrVerifier { LagrangeKernelRandElements::new(rand_elements) }; - Ok(lagrange_kernel_rand_elements) + Ok(GkrRandElements::new(lagrange_kernel_rand_elements, Vec::new())) } } @@ -168,7 +168,7 @@ impl Air for LagrangeKernelComplexAir { _main_frame: &EvaluationFrame, _aux_frame: &EvaluationFrame, _periodic_values: &[F], - _aux_rand_elements: &[E], + _aux_rand_elements: &AuxRandElements, _result: &mut [E], ) where F: FieldElement, @@ -179,12 +179,12 @@ impl Air for LagrangeKernelComplexAir { fn get_aux_assertions>( &self, - _aux_rand_elements: &[E], + _aux_rand_elements: &AuxRandElements, ) -> Vec> { vec![Assertion::single(0, 0, E::ZERO)] } - fn get_auxiliary_proof_verifier>( + fn get_gkr_proof_verifier>( &self, ) -> Self::GkrVerifier { DummyGkrVerifier @@ -253,22 +253,22 @@ impl Prover for LagrangeComplexProver { &self, main_trace: &Self::Trace, public_coin: &mut Self::RandomCoin, - ) -> (ProverGkrProof, LagrangeKernelRandElements) + ) -> (ProverGkrProof, GkrRandElements) where E: FieldElement, { let main_trace = main_trace.main_segment(); let log_trace_len = main_trace.num_rows().ilog2() as usize; - let lagrange_kernel_rand_elements: Vec = { + let lagrange_kernel_rand_elements = { let mut rand_elements = Vec::with_capacity(log_trace_len); for _ in 0..log_trace_len { rand_elements.push(public_coin.draw().unwrap()); } - rand_elements + LagrangeKernelRandElements::new(rand_elements) }; - (log_trace_len, LagrangeKernelRandElements::new(lagrange_kernel_rand_elements)) + (log_trace_len, GkrRandElements::new(lagrange_kernel_rand_elements, Vec::new())) } fn build_aux_trace( From 3c163baa1de175a5a8873de9a34d15f88fe6e0fd Mon Sep 17 00:00:00 2001 From: Al-Kindi-0 <82364884+Al-Kindi-0@users.noreply.github.com> Date: Tue, 9 Jul 2024 01:26:15 +0200 Subject: [PATCH 06/11] feat: abstract over the vector commitment scheme (#285) --- air/src/proof/mod.rs | 10 +- air/src/proof/queries.rs | 86 ++++---- crypto/src/commitment.rs | 86 ++++++++ crypto/src/lib.rs | 3 + crypto/src/merkle/mod.rs | 128 ++++++++--- crypto/src/merkle/proofs.rs | 222 ++++++++------------ crypto/src/merkle/tests.rs | 166 ++++++++------- examples/src/fibonacci/fib2/mod.rs | 8 +- examples/src/fibonacci/fib2/prover.rs | 8 +- examples/src/fibonacci/fib8/mod.rs | 8 +- examples/src/fibonacci/fib8/prover.rs | 8 +- examples/src/fibonacci/fib_small/mod.rs | 8 +- examples/src/fibonacci/fib_small/prover.rs | 18 +- examples/src/fibonacci/mulfib2/mod.rs | 8 +- examples/src/fibonacci/mulfib2/prover.rs | 8 +- examples/src/fibonacci/mulfib8/mod.rs | 8 +- examples/src/fibonacci/mulfib8/prover.rs | 8 +- examples/src/lamport/aggregate/mod.rs | 8 +- examples/src/lamport/aggregate/prover.rs | 8 +- examples/src/lamport/signature.rs | 6 +- examples/src/lamport/threshold/mod.rs | 8 +- examples/src/lamport/threshold/prover.rs | 8 +- examples/src/lamport/threshold/signature.rs | 5 +- examples/src/merkle/mod.rs | 13 +- examples/src/merkle/prover.rs | 8 +- examples/src/rescue/mod.rs | 8 +- examples/src/rescue/prover.rs | 8 +- examples/src/rescue_raps/mod.rs | 8 +- examples/src/rescue_raps/prover.rs | 8 +- examples/src/utils/rescue.rs | 2 +- examples/src/vdf/exempt/mod.rs | 8 +- examples/src/vdf/exempt/prover.rs | 8 +- examples/src/vdf/regular/mod.rs | 8 +- examples/src/vdf/regular/prover.rs | 8 +- fri/benches/prover.rs | 5 +- fri/src/lib.rs | 2 +- fri/src/proof.rs | 75 ++++--- fri/src/prover/channel.rs | 20 +- fri/src/prover/mod.rs | 142 +++++++------ fri/src/prover/tests.rs | 8 +- fri/src/utils.rs | 23 +- fri/src/verifier/channel.rs | 72 ++++--- fri/src/verifier/mod.rs | 20 +- prover/benches/lagrange_kernel.rs | 6 +- prover/src/channel.rs | 13 +- prover/src/constraints/commitment.rs | 65 +++--- prover/src/lib.rs | 71 ++++--- prover/src/matrix/col_matrix.rs | 15 +- prover/src/matrix/row_matrix.rs | 17 +- prover/src/trace/trace_lde/default/mod.rs | 108 +++++----- prover/src/trace/trace_lde/default/tests.rs | 16 +- prover/src/trace/trace_lde/mod.rs | 11 +- verifier/src/channel.rs | 155 ++++++++++---- verifier/src/errors.rs | 11 +- verifier/src/lib.rs | 20 +- winterfell/src/lib.rs | 27 +-- winterfell/src/tests.rs | 6 +- 57 files changed, 1073 insertions(+), 756 deletions(-) create mode 100644 crypto/src/commitment.rs diff --git a/air/src/proof/mod.rs b/air/src/proof/mod.rs index 35910819f..7307ba1d3 100644 --- a/air/src/proof/mod.rs +++ b/air/src/proof/mod.rs @@ -8,7 +8,7 @@ use alloc::vec::Vec; use core::cmp; -use crypto::Hasher; +use crypto::{Hasher, MerkleTree}; use fri::FriProof; use math::FieldElement; use utils::{ByteReader, Deserializable, DeserializationError, Serializable, SliceReader}; @@ -154,12 +154,8 @@ impl Proof { num_unique_queries: 0, commitments: Commitments::default(), trace_queries: Vec::new(), - constraint_queries: Queries::new::<_, DummyField>( - BatchMerkleProof::> { - leaves: Vec::new(), - nodes: Vec::new(), - depth: 0, - }, + constraint_queries: Queries::new::, DummyField, MerkleTree<_>>( + BatchMerkleProof::> { nodes: Vec::new(), depth: 0 }, vec![vec![DummyField::ONE]], ), ood_frame: OodFrame::default(), diff --git a/air/src/proof/queries.rs b/air/src/proof/queries.rs index 405545254..3c5250fc0 100644 --- a/air/src/proof/queries.rs +++ b/air/src/proof/queries.rs @@ -5,7 +5,7 @@ use alloc::vec::Vec; -use crypto::{BatchMerkleProof, ElementHasher, Hasher}; +use crypto::{ElementHasher, Hasher, VectorCommitment}; use math::FieldElement; use utils::{ ByteReader, ByteWriter, Deserializable, DeserializationError, Serializable, SliceReader, @@ -17,21 +17,21 @@ use super::Table; // ================================================================================================ /// Decommitments to evaluations of a set of functions at multiple points. /// -/// Given a set of functions evaluated over a domain *D*, a commitment is assumed to be a Merkle -/// tree where a leaf at position *i* contains evaluations of all functions at *xi*. +/// Given a set of functions evaluated over a domain *D*, a commitment is assumed to be a vector +/// commitment where the *i*-th vector entry contains evaluations of all functions at *xi*. /// Thus, a query (i.e. a single decommitment) for position *i* includes evaluations of all -/// functions at *xi*, accompanied by a Merkle authentication path from the leaf *i* to -/// the tree root. +/// functions at *xi*, accompanied by an opening proof of leaf *i* against the vector +/// commitment string. /// /// This struct can contain one or more queries. In cases when more than one query is stored, -/// Merkle authentication paths are compressed to remove redundant nodes. +/// a batch opening proof is used in order to compress the individual opening proofs. /// -/// Internally, all Merkle paths and query values are stored as a sequence of bytes. Thus, to -/// retrieve query values and the corresponding Merkle authentication paths, -/// [parse()](Queries::parse) function should be used. +/// Internally, all opening proofs and query values are stored as a sequence of bytes. Thus, to +/// retrieve query values and their corresponding opening proofs, [parse()](Queries::parse) +/// function should be used. #[derive(Debug, Clone, Eq, PartialEq)] pub struct Queries { - paths: Vec, + opening_proof: Vec, values: Vec, } @@ -39,26 +39,23 @@ impl Queries { // CONSTRUCTOR // -------------------------------------------------------------------------------------------- /// Returns queries constructed from evaluations of a set of functions at some number of points - /// in a domain and their corresponding Merkle authentication paths. + /// in a domain and their corresponding batch opening proof. /// - /// For each evaluation point, the same number of values must be provided, and a hash of - /// these values must be equal to a leaf node in the corresponding Merkle authentication path. + /// For each evaluation point, the same number of values must be provided. /// /// # Panics /// Panics if: /// * No queries were provided (`query_values` is an empty vector). /// * Any of the queries does not contain any evaluations. /// * Not all queries contain the same number of evaluations. - pub fn new( - merkle_proof: BatchMerkleProof, + pub fn new>( + opening_proof: V::MultiProof, query_values: Vec>, ) -> Self { assert!(!query_values.is_empty(), "query values cannot be empty"); let elements_per_query = query_values[0].len(); assert_ne!(elements_per_query, 0, "a query must contain at least one evaluation"); - // TODO: add debug check that values actually hash into the leaf nodes of the batch proof - // concatenate all elements together into a single vector of bytes let num_queries = query_values.len(); let mut values = Vec::with_capacity(num_queries * elements_per_query * E::ELEMENT_BYTES); @@ -70,33 +67,31 @@ impl Queries { ); values.write_many(elements); } + let opening_proof = opening_proof.to_bytes(); - // serialize internal nodes of the batch Merkle proof; we care about internal nodes only - // because leaf nodes can be reconstructed from hashes of query values - let paths = merkle_proof.serialize_nodes(); - - Queries { paths, values } + Queries { opening_proof, values } } // PARSER // -------------------------------------------------------------------------------------------- - /// Convert internally stored bytes into a set of query values and the corresponding Merkle - /// authentication paths. + /// Convert internally stored bytes into a set of query values and the corresponding batch + /// opening proof. /// /// # Panics /// Panics if: /// * `domain_size` is not a power of two. /// * `num_queries` is zero. /// * `values_per_query` is zero. - pub fn parse( + pub fn parse( self, domain_size: usize, num_queries: usize, values_per_query: usize, - ) -> Result<(BatchMerkleProof, Table), DeserializationError> + ) -> Result<(V::MultiProof, Table), DeserializationError> where E: FieldElement, H: ElementHasher, + V: VectorCommitment, { assert!(domain_size.is_power_of_two(), "domain size must be a power of two"); assert!(num_queries > 0, "there must be at least one query"); @@ -113,20 +108,27 @@ impl Queries { ))); } - // read bytes corresponding to each query, convert them into field elements, - // and also hash them to build leaf nodes of the batch Merkle proof + // read bytes corresponding to each query and convert them into field elements. let query_values = Table::::from_bytes(&self.values, num_queries, values_per_query)?; - let hashed_queries = query_values.rows().map(|row| H::hash_elements(row)).collect(); - // build batch Merkle proof - let mut reader = SliceReader::new(&self.paths); - let tree_depth = domain_size.ilog2() as u8; - let merkle_proof = BatchMerkleProof::deserialize(&mut reader, hashed_queries, tree_depth)?; + // build batch opening proof + let mut reader = SliceReader::new(&self.opening_proof); + let opening_proof = ::read_from(&mut reader)?; + + // check that the opening proof matches the domain length + if >::get_multiproof_domain_len(&opening_proof) != domain_size { + return Err(DeserializationError::InvalidValue(format!( + "expected a domain of size {} but was {}", + domain_size, + >::get_multiproof_domain_len(&opening_proof), + ))); + } + if reader.has_more_bytes() { return Err(DeserializationError::UnconsumedBytes); } - Ok((merkle_proof, query_values)) + Ok((opening_proof, query_values)) } } @@ -137,17 +139,15 @@ impl Serializable for Queries { /// Serializes `self` and writes the resulting bytes into the `target`. fn write_into(&self, target: &mut W) { // write value bytes - target.write_u32(self.values.len() as u32); - target.write_bytes(&self.values); + self.values.write_into(target); // write path bytes - target.write_u32(self.paths.len() as u32); - target.write_bytes(&self.paths); + self.opening_proof.write_into(target); } /// Returns an estimate of how many bytes are needed to represent self. fn get_size_hint(&self) -> usize { - self.paths.len() + self.values.len() + 8 + self.opening_proof.len() + self.values.len() + 8 } } @@ -158,13 +158,11 @@ impl Deserializable for Queries { /// Returns an error of a valid query struct could not be read from the specified source. fn read_from(source: &mut R) -> Result { // read values - let num_value_bytes = source.read_u32()?; - let values = source.read_vec(num_value_bytes as usize)?; + let values = Vec::<_>::read_from(source)?; // read paths - let num_paths_bytes = source.read_u32()?; - let paths = source.read_vec(num_paths_bytes as usize)?; + let paths = Vec::<_>::read_from(source)?; - Ok(Queries { paths, values }) + Ok(Queries { opening_proof: paths, values }) } } diff --git a/crypto/src/commitment.rs b/crypto/src/commitment.rs new file mode 100644 index 000000000..1d2667f7a --- /dev/null +++ b/crypto/src/commitment.rs @@ -0,0 +1,86 @@ +// Copyright (c) Facebook, Inc. and its affiliates. +// +// This source code is licensed under the MIT license found in the +// LICENSE file in the root directory of this source tree. + +use alloc::vec::Vec; +use core::fmt::Debug; + +use utils::{Deserializable, Serializable}; + +use crate::Hasher; + +/// A vector commitment (VC) scheme. +/// +/// This is a cryptographic primitive allowing one to commit, using a commitment string `com`, to +/// a vector of values (v_0, ..., v_{n-1}) such that one can later reveal the value at the i-th +/// position. +/// +/// This is achieved by providing the value `v_i` together with a proof `proof_i` such that anyone +/// posessing `com` can be convinced, with high confidence, that the claim is true. +/// +/// Vector commitment schemes usually have some batching properties in the sense that opening +/// proofs for a number of `(i, v_i)` can be batched together into one batch opening proof in order +/// to optimize both the proof size as well as the verification time. +/// +/// The current implementation restricts both of the commitment string as well as the leaf values +/// to be `H::Digest` where `H` is a type parameter such that `H: Hasher`. +pub trait VectorCommitment: Sized { + /// Options defining the VC i.e., public parameters. + type Options: Default; + /// Opening proof of some value at some position index. + type Proof: Clone + Serializable + Deserializable; + /// Batch opening proof of a number of {(i, v_i)}_{i ∈ S} for an index set. + type MultiProof: Serializable + Deserializable; + /// Error returned by the scheme. + type Error: Debug; + + /// Creates a commitment to a vector of values (v_0, ..., v_{n-1}) using the default + /// options. + fn new(items: Vec) -> Result { + Self::with_options(items, Self::Options::default()) + } + + /// Creates a commitment to a vector of values (v_0, ..., v_{n-1}) given a set of + /// options. + fn with_options(items: Vec, options: Self::Options) -> Result; + + /// Returns the commitment string to the committed values. + fn commitment(&self) -> H::Digest; + + /// Returns the length of the vector committed to for `Self`. + fn domain_len(&self) -> usize; + + /// Returns the length of the vector committed to for `Self::Proof`. + fn get_proof_domain_len(proof: &Self::Proof) -> usize; + + /// Returns the length of the vector committed to for `Self::MultiProof`. + fn get_multiproof_domain_len(proof: &Self::MultiProof) -> usize; + + /// Opens the value at a given index and provides a proof for the correctness of claimed value. + fn open(&self, index: usize) -> Result<(H::Digest, Self::Proof), Self::Error>; + + /// Opens the values at a given index set and provides a proof for the correctness of claimed + /// values. + #[allow(clippy::type_complexity)] + fn open_many( + &self, + indexes: &[usize], + ) -> Result<(Vec, Self::MultiProof), Self::Error>; + + /// Verifies that the claimed value is at the given index using a proof. + fn verify( + commitment: H::Digest, + index: usize, + item: H::Digest, + proof: &Self::Proof, + ) -> Result<(), Self::Error>; + + /// Verifies that the claimed values are at the given set of indices using a batch proof. + fn verify_many( + commitment: H::Digest, + indexes: &[usize], + items: &[H::Digest], + proof: &Self::MultiProof, + ) -> Result<(), Self::Error>; +} diff --git a/crypto/src/lib.rs b/crypto/src/lib.rs index 122b5de24..ff29176bb 100644 --- a/crypto/src/lib.rs +++ b/crypto/src/lib.rs @@ -39,3 +39,6 @@ pub use random::{DefaultRandomCoin, RandomCoin}; mod errors; pub use errors::{MerkleTreeError, RandomCoinError}; + +mod commitment; +pub use commitment::VectorCommitment; diff --git a/crypto/src/merkle/mod.rs b/crypto/src/merkle/mod.rs index 5929c79d4..51b4a76dc 100644 --- a/crypto/src/merkle/mod.rs +++ b/crypto/src/merkle/mod.rs @@ -9,11 +9,11 @@ use alloc::{ }; use core::slice; -use crate::{errors::MerkleTreeError, hash::Hasher}; - mod proofs; pub use proofs::BatchMerkleProof; +use crate::{Hasher, MerkleTreeError, VectorCommitment}; + #[cfg(feature = "concurrent")] pub mod concurrent; @@ -79,13 +79,13 @@ mod tests; /// assert_eq!(leaves, tree.leaves()); /// /// // generate a proof -/// let proof = tree.prove(2).unwrap(); -/// assert_eq!(3, proof.len()); -/// assert_eq!(leaves[2], proof[0]); +/// let (leaf, proof) = tree.prove(2).unwrap(); +/// assert_eq!(2, proof.len()); +/// assert_eq!(leaves[2], leaf); /// /// // verify proof -/// assert!(MerkleTree::::verify(*tree.root(), 2, &proof).is_ok()); -/// assert!(MerkleTree::::verify(*tree.root(), 1, &proof).is_err()); +/// assert!(MerkleTree::::verify(*tree.root(), 2, leaf, &proof).is_ok()); +/// assert!(MerkleTree::::verify(*tree.root(), 1, leaf, &proof).is_err()); /// ``` #[derive(Debug)] pub struct MerkleTree { @@ -93,6 +93,10 @@ pub struct MerkleTree { leaves: Vec, } +/// Merkle tree opening consisting of a leaf value and a Merkle path leading from this leaf +/// up to the root (excluding the root itself). +pub type MerkleTreeOpening = (::Digest, Vec<::Digest>); + // MERKLE TREE IMPLEMENTATION // ================================================================================================ @@ -179,19 +183,19 @@ impl MerkleTree { // PROVING METHODS // -------------------------------------------------------------------------------------------- - /// Returns a Merkle path to a leaf at the specified `index`. + /// Returns a Merkle proof to a leaf at the specified `index`. /// - /// The leaf itself will be the first element in the path. + /// The leaf itself will be the first element of the returned tuple. /// /// # Errors /// Returns an error if the specified index is greater than or equal to the number of leaves /// in the tree. - pub fn prove(&self, index: usize) -> Result, MerkleTreeError> { + pub fn prove(&self, index: usize) -> Result, MerkleTreeError> { if index >= self.leaves.len() { return Err(MerkleTreeError::LeafIndexOutOfBounds(self.leaves.len(), index)); } - - let mut proof = vec![self.leaves[index], self.leaves[index ^ 1]]; + let leaf = self.leaves[index]; + let mut proof = vec![self.leaves[index ^ 1]]; let mut index = (index + self.nodes.len()) >> 1; while index > 1 { @@ -199,25 +203,25 @@ impl MerkleTree { index >>= 1; } - Ok(proof) + Ok((leaf, proof)) } - /// Computes Merkle paths for the provided indexes and compresses the paths into a single proof. + /// Computes Merkle proofs for the provided indexes, compresses the proofs into a single batch + /// and returns the batch proof alongside the leaves at the provided indexes. /// /// # Errors /// Returns an error if: /// * No indexes were provided (i.e., `indexes` is an empty slice). - /// * Number of provided indexes is greater than 255. /// * Any of the provided indexes are greater than or equal to the number of leaves in the /// tree. /// * List of indexes contains duplicates. - pub fn prove_batch(&self, indexes: &[usize]) -> Result, MerkleTreeError> { + pub fn prove_batch( + &self, + indexes: &[usize], + ) -> Result<(Vec, BatchMerkleProof), MerkleTreeError> { if indexes.is_empty() { return Err(MerkleTreeError::TooFewLeafIndexes); } - if indexes.len() > proofs::MAX_PATHS { - return Err(MerkleTreeError::TooManyLeafIndexes(proofs::MAX_PATHS, indexes.len())); - } let index_map = map_indexes(indexes, self.depth())?; let indexes = normalize_indexes(indexes); @@ -265,13 +269,13 @@ impl MerkleTree { } } - Ok(BatchMerkleProof { leaves, nodes, depth: self.depth() as u8 }) + Ok((leaves, BatchMerkleProof { depth: self.depth() as u8, nodes })) } // VERIFICATION METHODS // -------------------------------------------------------------------------------------------- - /// Checks whether the `proof` for the specified `index` is valid. + /// Checks whether the `proof` for the given `leaf` at the specified `index` is valid. /// /// # Errors /// Returns an error if the specified `proof` (which is a Merkle path) does not resolve to the @@ -279,13 +283,18 @@ impl MerkleTree { pub fn verify( root: H::Digest, index: usize, + leaf: H::Digest, proof: &[H::Digest], ) -> Result<(), MerkleTreeError> { let r = index & 1; - let mut v = H::merge(&[proof[r], proof[1 - r]]); + let mut v = if r == 0 { + H::merge(&[leaf, proof[0]]) + } else { + H::merge(&[proof[0], leaf]) + }; - let mut index = (index + 2usize.pow((proof.len() - 1) as u32)) >> 1; - for &p in proof.iter().skip(2) { + let mut index = (index + 2usize.pow((proof.len()) as u32)) >> 1; + for &p in proof.iter().skip(1) { v = if index & 1 == 0 { H::merge(&[v, p]) } else { @@ -300,22 +309,23 @@ impl MerkleTree { Ok(()) } - /// Checks whether the batch proof contains Merkle paths for the of the specified `indexes`. + /// Checks whether the batch `proof` contains Merkle proofs resolving to `root` for + /// the provided `leaves` at the specified `indexes`. /// /// # Errors /// Returns an error if: /// * No indexes were provided (i.e., `indexes` is an empty slice). - /// * Number of provided indexes is greater than 255. /// * Any of the specified `indexes` is greater than or equal to the number of leaves in the /// tree from which the batch proof was generated. /// * List of indexes contains duplicates. - /// * Any of the paths in the batch proof does not resolve to the specified `root`. + /// * Any of the proofs in the batch proof does not resolve to the specified `root`. pub fn verify_batch( root: &H::Digest, indexes: &[usize], + leaves: &[H::Digest], proof: &BatchMerkleProof, ) -> Result<(), MerkleTreeError> { - if *root != proof.get_root(indexes)? { + if *root != proof.get_root(indexes, leaves)? { return Err(MerkleTreeError::InvalidProof); } Ok(()) @@ -385,3 +395,65 @@ fn normalize_indexes(indexes: &[usize]) -> Vec { } set.into_iter().collect() } + +// VECTOR COMMITMENT IMPLEMENTATION +// ================================================================================================ + +impl VectorCommitment for MerkleTree { + type Options = (); + + type Proof = Vec; + + type MultiProof = BatchMerkleProof; + + type Error = MerkleTreeError; + + fn with_options(items: Vec, _options: Self::Options) -> Result { + MerkleTree::new(items) + } + + fn commitment(&self) -> H::Digest { + *self.root() + } + + fn domain_len(&self) -> usize { + 1 << self.depth() + } + + fn get_proof_domain_len(proof: &Self::Proof) -> usize { + 1 << proof.len() + } + + fn get_multiproof_domain_len(proof: &Self::MultiProof) -> usize { + 1 << proof.depth + } + + fn open(&self, index: usize) -> Result<(H::Digest, Self::Proof), Self::Error> { + self.prove(index) + } + + fn open_many( + &self, + indexes: &[usize], + ) -> Result<(Vec, Self::MultiProof), Self::Error> { + self.prove_batch(indexes) + } + + fn verify( + commitment: H::Digest, + index: usize, + item: H::Digest, + proof: &Self::Proof, + ) -> Result<(), Self::Error> { + MerkleTree::::verify(commitment, index, item, proof) + } + + fn verify_many( + commitment: H::Digest, + indexes: &[usize], + items: &[H::Digest], + proof: &Self::MultiProof, + ) -> Result<(), Self::Error> { + MerkleTree::::verify_batch(&commitment, indexes, items, proof) + } +} diff --git a/crypto/src/merkle/proofs.rs b/crypto/src/merkle/proofs.rs index 95dd82d96..71b70d858 100644 --- a/crypto/src/merkle/proofs.rs +++ b/crypto/src/merkle/proofs.rs @@ -3,33 +3,24 @@ // This source code is licensed under the MIT license found in the // LICENSE file in the root directory of this source tree. -use alloc::{collections::BTreeMap, string::ToString, vec::Vec}; +use alloc::{collections::BTreeMap, vec::Vec}; -use utils::{ByteReader, DeserializationError, Serializable}; +use utils::{ByteReader, Deserializable, DeserializationError, Serializable}; +use super::MerkleTreeOpening; use crate::{errors::MerkleTreeError, Hasher}; -// CONSTANTS -// ================================================================================================ - -pub(super) const MAX_PATHS: usize = 255; - // BATCH MERKLE PROOF // ================================================================================================ -/// Multiple Merkle paths aggregated into a single proof. +/// Multiple Merkle proofs aggregated into a single proof. /// /// The aggregation is done in a way which removes all duplicate internal nodes, and thus, /// it is possible to achieve non-negligible compression as compared to naively concatenating -/// individual Merkle paths. The algorithm is for aggregation is a variation of +/// individual Merkle proofs. The algorithm is for aggregation is a variation of /// [Octopus](https://eprint.iacr.org/2017/933). -/// -/// Currently, at most 255 paths can be aggregated into a single proof. This limitation is -/// imposed primarily for serialization purposes. #[derive(Debug, Clone, PartialEq, Eq)] pub struct BatchMerkleProof { - /// The leaves being proven - pub leaves: Vec, /// Hashes of Merkle Tree proof values above the leaf layer pub nodes: Vec>, /// Depth of the leaves @@ -37,31 +28,32 @@ pub struct BatchMerkleProof { } impl BatchMerkleProof { - /// Constructs a batch Merkle proof from individual Merkle authentication paths. + /// Constructs a batch Merkle proof from collection of single Merkle proofs. /// /// # Panics /// Panics if: - /// * No paths have been provided (i.e., `paths` is an empty slice). - /// * More than 255 paths have been provided. - /// * Number of paths is not equal to the number of indexes. - /// * Not all paths have the same length. - pub fn from_paths(paths: &[Vec], indexes: &[usize]) -> BatchMerkleProof { + /// * No proofs have been provided (i.e., `proofs` is an empty slice). + /// * Number of proofs is not equal to the number of indexes. + /// * Not all proofs have the same length. + pub fn from_single_proofs( + proofs: &[MerkleTreeOpening], + indexes: &[usize], + ) -> BatchMerkleProof { // TODO: optimize this to reduce amount of vector cloning. - assert!(!paths.is_empty(), "at least one path must be provided"); - assert!(paths.len() <= MAX_PATHS, "number of paths cannot exceed {MAX_PATHS}"); - assert_eq!(paths.len(), indexes.len(), "number of paths must equal number of indexes"); + assert!(!proofs.is_empty(), "at least one proof must be provided"); + assert_eq!(proofs.len(), indexes.len(), "number of proofs must equal number of indexes"); - let depth = paths[0].len(); + let depth = proofs[0].1.len(); - // sort indexes in ascending order, and also re-arrange paths accordingly - let mut path_map = BTreeMap::new(); - for (&index, path) in indexes.iter().zip(paths.iter().cloned()) { - assert_eq!(depth, path.len(), "not all paths have the same length"); - path_map.insert(index, path); + // sort indexes in ascending order, and also re-arrange proofs accordingly + let mut proof_map = BTreeMap::new(); + for (&index, proof) in indexes.iter().zip(proofs.iter().cloned()) { + assert_eq!(depth, proof.1.len(), "not all proofs have the same length"); + proof_map.insert(index, proof); } - let indexes = path_map.keys().cloned().collect::>(); - let paths = path_map.values().cloned().collect::>(); - path_map.clear(); + let indexes = proof_map.keys().cloned().collect::>(); + let proofs = proof_map.values().cloned().collect::>(); + proof_map.clear(); let mut leaves = vec![H::Digest::default(); indexes.len()]; let mut nodes: Vec> = Vec::with_capacity(indexes.len()); @@ -69,59 +61,60 @@ impl BatchMerkleProof { // populate values and the first layer of proof nodes let mut i = 0; while i < indexes.len() { - leaves[i] = paths[i][0]; + leaves[i] = proofs[i].0; + if indexes.len() > i + 1 && are_siblings(indexes[i], indexes[i + 1]) { - leaves[i + 1] = paths[i][1]; + leaves[i + 1] = proofs[i].1[0]; nodes.push(vec![]); i += 1; } else { - nodes.push(vec![paths[i][1]]); + nodes.push(vec![proofs[i].1[0]]); } - path_map.insert(indexes[i] >> 1, paths[i].clone()); + proof_map.insert(indexes[i] >> 1, proofs[i].clone()); i += 1; } // populate all remaining layers of proof nodes - for d in 2..depth { - let indexes = path_map.keys().cloned().collect::>(); - let mut next_path_map = BTreeMap::new(); + for d in 1..depth { + let indexes = proof_map.keys().cloned().collect::>(); + let mut next_proof_map = BTreeMap::new(); let mut i = 0; while i < indexes.len() { let index = indexes[i]; - let path = path_map.get(&index).unwrap(); + let proof = proof_map.get(&index).unwrap(); if indexes.len() > i + 1 && are_siblings(index, indexes[i + 1]) { i += 1; } else { - nodes[i].push(path[d]); + nodes[i].push(proof.1[d]); } - next_path_map.insert(index >> 1, path.clone()); + next_proof_map.insert(index >> 1, proof.clone()); i += 1; } - core::mem::swap(&mut path_map, &mut next_path_map); + core::mem::swap(&mut proof_map, &mut next_proof_map); } - BatchMerkleProof { leaves, nodes, depth: (depth - 1) as u8 } + BatchMerkleProof { nodes, depth: (depth) as u8 } } - /// Computes a node to which all Merkle paths aggregated in this proof resolve. + /// Computes a node to which all Merkle proofs aggregated in this proof resolve. /// /// # Errors /// Returns an error if: /// * No indexes were provided (i.e., `indexes` is an empty slice). - /// * Number of provided indexes is greater than 255. /// * Any of the specified `indexes` is greater than or equal to the number of leaves in the /// tree for which this batch proof was generated. /// * List of indexes contains duplicates. /// * The proof does not resolve to a single root. - pub fn get_root(&self, indexes: &[usize]) -> Result { + pub fn get_root( + &self, + indexes: &[usize], + leaves: &[H::Digest], + ) -> Result { if indexes.is_empty() { return Err(MerkleTreeError::TooFewLeafIndexes); } - if indexes.len() > MAX_PATHS { - return Err(MerkleTreeError::TooManyLeafIndexes(MAX_PATHS, indexes.len())); - } let mut buf = [H::Digest::default(); 2]; let mut v = BTreeMap::new(); @@ -141,16 +134,16 @@ impl BatchMerkleProof { // copy values of leaf sibling leaf nodes into the buffer match index_map.get(&index) { Some(&index1) => { - if self.leaves.len() <= index1 { + if leaves.len() <= index1 { return Err(MerkleTreeError::InvalidProof); } - buf[0] = self.leaves[index1]; + buf[0] = leaves[index1]; match index_map.get(&(index + 1)) { Some(&index2) => { - if self.leaves.len() <= index2 { + if leaves.len() <= index2 { return Err(MerkleTreeError::InvalidProof); } - buf[1] = self.leaves[index2]; + buf[1] = leaves[index2]; proof_pointers.push(0); }, None => { @@ -169,10 +162,10 @@ impl BatchMerkleProof { buf[0] = self.nodes[i][0]; match index_map.get(&(index + 1)) { Some(&index2) => { - if self.leaves.len() <= index2 { + if leaves.len() <= index2 { return Err(MerkleTreeError::InvalidProof); } - buf[1] = self.leaves[index2]; + buf[1] = leaves[index2]; }, None => return Err(MerkleTreeError::InvalidProof), } @@ -242,27 +235,27 @@ impl BatchMerkleProof { v.remove(&1).ok_or(MerkleTreeError::InvalidProof) } - /// Computes the uncompressed Merkle paths which aggregate to this proof. + /// Computes the uncompressed individual Merkle proofs which aggregate to this batch proof. /// /// # Errors /// Returns an error if: /// * No indexes were provided (i.e., `indexes` is an empty slice). - /// * Number of provided indexes is greater than 255. /// * Number of provided indexes does not match the number of leaf nodes in the proof. - pub fn into_paths(self, indexes: &[usize]) -> Result>, MerkleTreeError> { + pub fn into_openings( + self, + leaves: &[H::Digest], + indexes: &[usize], + ) -> Result>, MerkleTreeError> { if indexes.is_empty() { return Err(MerkleTreeError::TooFewLeafIndexes); } - if indexes.len() > MAX_PATHS { - return Err(MerkleTreeError::TooManyLeafIndexes(MAX_PATHS, indexes.len())); - } - if indexes.len() != self.leaves.len() { + if indexes.len() != leaves.len() { return Err(MerkleTreeError::InvalidProof); } let mut partial_tree_map = BTreeMap::new(); - for (&i, leaf) in indexes.iter().zip(self.leaves.iter()) { + for (&i, leaf) in indexes.iter().zip(leaves.iter()) { partial_tree_map.insert(i + (1 << (self.depth)), *leaf); } @@ -285,16 +278,16 @@ impl BatchMerkleProof { // copy values of leaf sibling leaf nodes into the buffer match index_map.get(&index) { Some(&index1) => { - if self.leaves.len() <= index1 { + if leaves.len() <= index1 { return Err(MerkleTreeError::InvalidProof); } - buf[0] = self.leaves[index1]; + buf[0] = leaves[index1]; match index_map.get(&(index + 1)) { Some(&index2) => { - if self.leaves.len() <= index2 { + if leaves.len() <= index2 { return Err(MerkleTreeError::InvalidProof); } - buf[1] = self.leaves[index2]; + buf[1] = leaves[index2]; proof_pointers.push(0); }, None => { @@ -313,10 +306,10 @@ impl BatchMerkleProof { buf[0] = self.nodes[i][0]; match index_map.get(&(index + 1)) { Some(&index2) => { - if self.leaves.len() <= index2 { + if leaves.len() <= index2 { return Err(MerkleTreeError::InvalidProof); } - buf[1] = self.leaves[index2]; + buf[1] = leaves[index2]; }, None => return Err(MerkleTreeError::InvalidProof), } @@ -386,83 +379,46 @@ impl BatchMerkleProof { original_indexes .iter() - .map(|&i| get_path::(i, &partial_tree_map, self.depth as usize)) + .map(|&i| get_proof::(i, &partial_tree_map, self.depth as usize)) .collect() } +} - // SERIALIZATION / DESERIALIZATION - // -------------------------------------------------------------------------------------------- +// SERIALIZATION / DESERIALIZATION +// -------------------------------------------------------------------------------------------- - /// Converts all internal proof nodes into a vector of bytes. - /// - /// # Panics - /// Panics if: - /// * The proof contains more than 255 Merkle paths. - /// * The Merkle paths consist of more than 255 nodes. - pub fn serialize_nodes(&self) -> Vec { - let mut result = Vec::new(); - - // record total number of node vectors - assert!(self.nodes.len() <= u8::MAX as usize, "too many paths"); - result.push(self.nodes.len() as u8); +impl Serializable for BatchMerkleProof { + /// Writes all internal proof nodes into the provided target. + fn write_into(&self, target: &mut W) { + target.write_u8(self.depth); + target.write_usize(self.nodes.len()); - // record each node vector as individual bytes for nodes in self.nodes.iter() { - assert!(nodes.len() <= u8::MAX as usize, "too many nodes"); - // record the number of nodes, and append all nodes to the paths buffer - result.push(nodes.len() as u8); - for node in nodes.iter() { - result.append(&mut node.to_bytes()); - } + // record the number of nodes, and append all nodes to the proof buffer + nodes.write_into(target); } - - result } +} - /// Parses internal nodes from the provided `node_bytes`, and constructs a batch Merkle proof - /// from these nodes, provided `leaves`, and provided tree `depth`. +impl Deserializable for BatchMerkleProof { + /// Parses internal nodes from the provided `source`, and constructs a batch Merkle proof + /// from these nodes. /// /// # Errors /// Returns an error if: - /// * No leaves were provided (i.e., `leaves` is an empty slice). - /// * Number of provided leaves is greater than 255. - /// * Tree `depth` was set to zero. - /// * `node_bytes` could not be deserialized into a valid set of internal nodes. - pub fn deserialize( - node_bytes: &mut R, - leaves: Vec, - depth: u8, - ) -> Result { - if depth == 0 { - return Err(DeserializationError::InvalidValue( - "tree depth must be greater than zero".to_string(), - )); - } - if leaves.is_empty() { - return Err(DeserializationError::InvalidValue( - "at lease one leaf must be provided".to_string(), - )); - } - if leaves.len() > MAX_PATHS { - return Err(DeserializationError::InvalidValue(format!( - "number of leaves cannot exceed {}, but {} were provided", - MAX_PATHS, - leaves.len() - ))); - } + /// * `source` could not be deserialized into a valid set of internal nodes. + fn read_from(source: &mut R) -> Result { + let depth = source.read_u8()?; + let num_node_vectors = source.read_usize()?; - let num_node_vectors = node_bytes.read_u8()? as usize; let mut nodes = Vec::with_capacity(num_node_vectors); for _ in 0..num_node_vectors { - // read the number of digests in the vector - let num_digests = node_bytes.read_u8()? as usize; - // read the digests and add them to the node vector - let digests = node_bytes.read_many(num_digests)?; + let digests = Vec::<_>::read_from(source)?; nodes.push(digests); } - Ok(BatchMerkleProof { leaves, nodes, depth }) + Ok(BatchMerkleProof { nodes, depth }) } } @@ -475,12 +431,12 @@ fn are_siblings(left: usize, right: usize) -> bool { left & 1 == 0 && right - 1 == left } -/// Computes the Merkle path from the computed (partial) tree. -pub fn get_path( +/// Computes the Merkle proof from the computed (partial) tree. +pub fn get_proof( index: usize, tree: &BTreeMap::Digest>, depth: usize, -) -> Result, MerkleTreeError> { +) -> Result, MerkleTreeError> { let mut index = index + (1 << depth); let leaf = if let Some(leaf) = tree.get(&index) { *leaf @@ -488,7 +444,7 @@ pub fn get_path( return Err(MerkleTreeError::InvalidProof); }; - let mut proof = vec![leaf]; + let mut proof = vec![]; while index > 1 { let leaf = if let Some(leaf) = tree.get(&(index ^ 1)) { *leaf @@ -500,5 +456,5 @@ pub fn get_path( index >>= 1; } - Ok(proof) + Ok((leaf, proof)) } diff --git a/crypto/src/merkle/tests.rs b/crypto/src/merkle/tests.rs index 6610eb908..f66c638a2 100644 --- a/crypto/src/merkle/tests.rs +++ b/crypto/src/merkle/tests.rs @@ -89,31 +89,29 @@ fn prove() { let leaves = Digest256::bytes_as_digests(&LEAVES4).to_vec(); let tree = MerkleTree::::new(leaves.clone()).unwrap(); - let proof = vec![leaves[1], leaves[0], hash_2x1(leaves[2], leaves[3])]; - assert_eq!(proof, tree.prove(1).unwrap()); + let proof = vec![leaves[0], hash_2x1(leaves[2], leaves[3])]; + assert_eq!((leaves[1], proof), tree.prove(1).unwrap()); - let proof = vec![leaves[2], leaves[3], hash_2x1(leaves[0], leaves[1])]; - assert_eq!(proof, tree.prove(2).unwrap()); + let proof = vec![leaves[3], hash_2x1(leaves[0], leaves[1])]; + assert_eq!((leaves[2], proof), tree.prove(2).unwrap()); // depth 5 let leaves = Digest256::bytes_as_digests(&LEAVES8).to_vec(); let tree = MerkleTree::::new(leaves.clone()).unwrap(); let proof = vec![ - leaves[1], leaves[0], hash_2x1(leaves[2], leaves[3]), hash_2x1(hash_2x1(leaves[4], leaves[5]), hash_2x1(leaves[6], leaves[7])), ]; - assert_eq!(proof, tree.prove(1).unwrap()); + assert_eq!((leaves[1], proof), tree.prove(1).unwrap()); let proof = vec![ - leaves[6], leaves[7], hash_2x1(leaves[4], leaves[5]), hash_2x1(hash_2x1(leaves[0], leaves[1]), hash_2x1(leaves[2], leaves[3])), ]; - assert_eq!(proof, tree.prove(6).unwrap()); + assert_eq!((leaves[6], proof), tree.prove(6).unwrap()); } #[test] @@ -121,20 +119,20 @@ fn verify() { // depth 4 let leaves = Digest256::bytes_as_digests(&LEAVES4).to_vec(); let tree = MerkleTree::::new(leaves).unwrap(); - let proof = tree.prove(1).unwrap(); - assert!(MerkleTree::::verify(*tree.root(), 1, &proof).is_ok()); + let (leaf, proof) = tree.prove(1).unwrap(); + assert!(MerkleTree::::verify(*tree.root(), 1, leaf, &proof).is_ok()); - let proof = tree.prove(2).unwrap(); - assert!(MerkleTree::::verify(*tree.root(), 2, &proof).is_ok()); + let (leaf, proof) = tree.prove(2).unwrap(); + assert!(MerkleTree::::verify(*tree.root(), 2, leaf, &proof).is_ok()); // depth 5 - let leaves = Digest256::bytes_as_digests(&LEAVES8).to_vec(); - let tree = MerkleTree::::new(leaves).unwrap(); - let proof = tree.prove(1).unwrap(); - assert!(MerkleTree::::verify(*tree.root(), 1, &proof).is_ok()); + let leaf = Digest256::bytes_as_digests(&LEAVES8).to_vec(); + let tree = MerkleTree::::new(leaf).unwrap(); + let (leaf, proof) = tree.prove(1).unwrap(); + assert!(MerkleTree::::verify(*tree.root(), 1, leaf, &proof).is_ok()); - let proof = tree.prove(6).unwrap(); - assert!(MerkleTree::::verify(*tree.root(), 6, &proof).is_ok()); + let (leaf, proof) = tree.prove(6).unwrap(); + assert!(MerkleTree::::verify(*tree.root(), 6, leaf, &proof).is_ok()); } #[test] @@ -150,9 +148,9 @@ fn prove_batch() { hash_2x1(leaves[2], leaves[3]), hash_2x1(hash_2x1(leaves[4], leaves[5]), hash_2x1(leaves[6], leaves[7])), ]]; - assert_eq!(expected_values, proof.leaves); - assert_eq!(expected_nodes, proof.nodes); - assert_eq!(3, proof.depth); + assert_eq!(expected_values, proof.0); + assert_eq!(expected_nodes, proof.1.nodes); + assert_eq!(3, proof.1.depth); // 2 indexes let proof = tree.prove_batch(&[1, 2]).unwrap(); @@ -164,9 +162,9 @@ fn prove_batch() { ], vec![leaves[3]], ]; - assert_eq!(expected_values, proof.leaves); - assert_eq!(expected_nodes, proof.nodes); - assert_eq!(3, proof.depth); + assert_eq!(expected_values, proof.0); + assert_eq!(expected_nodes, proof.1.nodes); + assert_eq!(3, proof.1.depth); // 2 indexes on opposite sides let proof = tree.prove_batch(&[1, 6]).unwrap(); @@ -175,16 +173,16 @@ fn prove_batch() { vec![leaves[0], hash_2x1(leaves[2], leaves[3])], vec![leaves[7], hash_2x1(leaves[4], leaves[5])], ]; - assert_eq!(expected_values, proof.leaves); - assert_eq!(expected_nodes, proof.nodes); - assert_eq!(3, proof.depth); + assert_eq!(expected_values, proof.0); + assert_eq!(expected_nodes, proof.1.nodes); + assert_eq!(3, proof.1.depth); // all indexes let proof = tree.prove_batch(&[0, 1, 2, 3, 4, 5, 6, 7]).unwrap(); let expected_nodes: Vec> = vec![vec![], vec![], vec![], vec![]]; - assert_eq!(leaves, proof.leaves); - assert_eq!(expected_nodes, proof.nodes); - assert_eq!(3, proof.depth); + assert_eq!(leaves, proof.0); + assert_eq!(expected_nodes, proof.1.nodes); + assert_eq!(3, proof.1.depth); } #[test] @@ -192,48 +190,68 @@ fn verify_batch() { let leaves = Digest256::bytes_as_digests(&LEAVES8).to_vec(); let tree = MerkleTree::::new(leaves).unwrap(); - let proof = tree.prove_batch(&[1]).unwrap(); - assert!(MerkleTree::verify_batch(tree.root(), &[1], &proof).is_ok()); - assert!(MerkleTree::verify_batch(tree.root(), &[2], &proof).is_err()); + let (leaves, proof) = tree.prove_batch(&[1]).unwrap(); + assert!(MerkleTree::verify_batch(tree.root(), &[1], &leaves, &proof).is_ok()); + assert!(MerkleTree::verify_batch(tree.root(), &[2], &leaves, &proof).is_err()); - let proof = tree.prove_batch(&[1, 2]).unwrap(); - assert!(MerkleTree::verify_batch(tree.root(), &[1, 2], &proof).is_ok()); - assert!(MerkleTree::verify_batch(tree.root(), &[1], &proof).is_err()); - assert!(MerkleTree::verify_batch(tree.root(), &[1, 3], &proof).is_err()); - assert!(MerkleTree::verify_batch(tree.root(), &[1, 2, 3], &proof).is_err()); + let (leaves, proof) = tree.prove_batch(&[1, 2]).unwrap(); + assert!(MerkleTree::verify_batch(tree.root(), &[1, 2], &leaves, &proof).is_ok()); + assert!(MerkleTree::verify_batch(tree.root(), &[1], &leaves, &proof).is_err()); + assert!(MerkleTree::verify_batch(tree.root(), &[1, 3], &leaves, &proof).is_err()); + assert!(MerkleTree::verify_batch(tree.root(), &[1, 2, 3], &leaves, &proof).is_err()); - let proof = tree.prove_batch(&[1, 6]).unwrap(); - assert!(MerkleTree::verify_batch(tree.root(), &[1, 6], &proof).is_ok()); + let (leaves, proof) = tree.prove_batch(&[1, 6]).unwrap(); + assert!(MerkleTree::verify_batch(tree.root(), &[1, 6], &leaves, &proof).is_ok()); - let proof = tree.prove_batch(&[1, 3, 6]).unwrap(); - assert!(MerkleTree::verify_batch(tree.root(), &[1, 3, 6], &proof).is_ok()); + let (leaves, proof) = tree.prove_batch(&[1, 3, 6]).unwrap(); + assert!(MerkleTree::verify_batch(tree.root(), &[1, 3, 6], &leaves, &proof).is_ok()); - let proof = tree.prove_batch(&[0, 1, 2, 3, 4, 5, 6, 7]).unwrap(); - assert!(MerkleTree::verify_batch(tree.root(), &[0, 1, 2, 3, 4, 5, 6, 7], &proof).is_ok()); + let (leaves, proof) = tree.prove_batch(&[0, 1, 2, 3, 4, 5, 6, 7]).unwrap(); + assert!( + MerkleTree::verify_batch(tree.root(), &[0, 1, 2, 3, 4, 5, 6, 7], &leaves, &proof).is_ok() + ); } #[test] -fn verify_into_paths() { +fn verify_into_openings() { let leaves = Digest256::bytes_as_digests(&LEAVES8).to_vec(); let tree = MerkleTree::::new(leaves).unwrap(); - let proof1 = tree.prove(1).unwrap(); - let proof2 = tree.prove(2).unwrap(); - let proof1_2 = tree.prove_batch(&[1, 2]).unwrap(); - let result = proof1_2.into_paths(&[1, 2]).unwrap(); + let (_, proof1) = tree.prove(1).unwrap(); + let (_, proof2) = tree.prove(2).unwrap(); + let (leaves1_2, proof1_2) = tree.prove_batch(&[1, 2]).unwrap(); + let result = proof1_2.into_openings(&leaves1_2, &[1, 2]).unwrap(); + + assert_eq!(proof1, result[0].1); + assert_eq!(proof2, result[1].1); - assert_eq!(proof1, result[0]); - assert_eq!(proof2, result[1]); + let (_, proof3) = tree.prove(3).unwrap(); + let (_, proof4) = tree.prove(4).unwrap(); + let (_, proof6) = tree.prove(5).unwrap(); + let (leaves, proof3_4_6) = tree.prove_batch(&[3, 4, 5]).unwrap(); + let result = proof3_4_6.into_openings(&leaves, &[3, 4, 5]).unwrap(); - let proof3 = tree.prove(3).unwrap(); - let proof4 = tree.prove(4).unwrap(); - let proof6 = tree.prove(5).unwrap(); - let proof3_4_6 = tree.prove_batch(&[3, 4, 5]).unwrap(); - let result = proof3_4_6.into_paths(&[3, 4, 5]).unwrap(); + assert_eq!(proof3, result[0].1); + assert_eq!(proof4, result[1].1); + assert_eq!(proof6, result[2].1); +} + +#[test] +fn from_proofs() { + let leaves = Digest256::bytes_as_digests(&LEAVES8).to_vec(); + let tree = MerkleTree::::new(leaves).unwrap(); + let indices: Vec = vec![1, 2]; + let (_, proof1) = tree.prove_batch(&indices[..]).unwrap(); + + let mut proofs = Vec::new(); + for &idx in indices.iter() { + proofs.push(tree.prove(idx).unwrap()); + } + let proof2: BatchMerkleProof = + BatchMerkleProof::from_single_proofs(&proofs, &indices); - assert_eq!(proof3, result[0]); - assert_eq!(proof4, result[1]); - assert_eq!(proof6, result[2]); + assert!(proof1.nodes == proof2.nodes); + assert_eq!(proof1.depth, proof2.depth); } proptest! { @@ -242,8 +260,8 @@ proptest! { proof_indices in prop::collection::vec(any::(), 10..20) ) { for proof_index in proof_indices{ - let proof = tree.prove(proof_index.index(128)).unwrap(); - prop_assert!(MerkleTree::::verify(*tree.root(), proof_index.index(128), &proof).is_ok()) + let (leaves, proof) = tree.prove(proof_index.index(128)).unwrap(); + prop_assert!(MerkleTree::::verify(*tree.root(), proof_index.index(128), leaves, &proof).is_ok()) } } @@ -253,43 +271,43 @@ proptest! { ) { let mut indices: Vec = proof_indices.iter().map(|idx| idx.index(128)).collect(); indices.sort_unstable(); indices.dedup(); - let proof = tree.prove_batch(&indices[..]).unwrap(); - prop_assert!(MerkleTree::verify_batch(tree.root(), &indices[..], &proof).is_ok()); + let (leaves, proof) = tree.prove_batch(&indices[..]).unwrap(); + prop_assert!(MerkleTree::verify_batch(tree.root(), &indices[..], &leaves, &proof).is_ok()); } #[test] - fn batch_proof_from_paths(tree in random_blake3_merkle_tree(128), + fn batch_proof_from_proofs(tree in random_blake3_merkle_tree(128), proof_indices in prop::collection::vec(any::(), 10..20) ) { let mut indices: Vec = proof_indices.iter().map(|idx| idx.index(128)).collect(); indices.sort_unstable(); indices.dedup(); - let proof1 = tree.prove_batch(&indices[..]).unwrap(); + let (_, proof1) = tree.prove_batch(&indices[..]).unwrap(); - let mut paths = Vec::new(); + let mut proofs = Vec::new(); for &idx in indices.iter() { - paths.push(tree.prove(idx).unwrap()); + proofs.push(tree.prove(idx).unwrap()); } - let proof2 = BatchMerkleProof::from_paths(&paths, &indices); + let proof2 = BatchMerkleProof::from_single_proofs(&proofs, &indices); prop_assert!(proof1 == proof2); } #[test] - fn into_paths(tree in random_blake3_merkle_tree(32), + fn into_openings(tree in random_blake3_merkle_tree(32), proof_indices in prop::collection::vec(any::(), 1..30) ) { let mut indices: Vec = proof_indices.iter().map(|idx| idx.index(32)).collect(); indices.sort_unstable(); indices.dedup(); - let proof1 = tree.prove_batch(&indices[..]).unwrap(); + let (values1, proof1) = tree.prove_batch(&indices[..]).unwrap(); - let mut paths_expected = Vec::new(); + let mut proofs_expected = Vec::new(); for &idx in indices.iter() { - paths_expected.push(tree.prove(idx).unwrap()); + proofs_expected.push(tree.prove(idx).unwrap().1); } - let paths = proof1.into_paths(&indices); + let proofs: Vec<_> = proof1.into_openings(&values1, &indices).unwrap().into_iter().map(|(_, proofs)| proofs).collect(); - prop_assert!(paths_expected == paths.unwrap()); + prop_assert!(proofs_expected == proofs); } } diff --git a/examples/src/fibonacci/fib2/mod.rs b/examples/src/fibonacci/fib2/mod.rs index 49fd8f00d..ddc6cf77e 100644 --- a/examples/src/fibonacci/fib2/mod.rs +++ b/examples/src/fibonacci/fib2/mod.rs @@ -8,7 +8,7 @@ use std::time::Instant; use tracing::{field, info_span}; use winterfell::{ - crypto::{DefaultRandomCoin, ElementHasher}, + crypto::{DefaultRandomCoin, ElementHasher, MerkleTree}, math::{fields::f128::BaseElement, FieldElement}, Proof, ProofOptions, Prover, Trace, VerifierError, }; @@ -87,7 +87,7 @@ impl FibExample { impl Example for FibExample where - H: ElementHasher, + H: ElementHasher + Sync, { fn prove(&self) -> Proof { println!( @@ -115,7 +115,7 @@ where let acceptable_options = winterfell::AcceptableOptions::OptionSet(vec![proof.options().clone()]); - winterfell::verify::>( + winterfell::verify::, MerkleTree>( proof, self.result, &acceptable_options, @@ -125,7 +125,7 @@ where fn verify_with_wrong_inputs(&self, proof: Proof) -> Result<(), VerifierError> { let acceptable_options = winterfell::AcceptableOptions::OptionSet(vec![proof.options().clone()]); - winterfell::verify::>( + winterfell::verify::, MerkleTree>( proof, self.result + BaseElement::ONE, &acceptable_options, diff --git a/examples/src/fibonacci/fib2/prover.rs b/examples/src/fibonacci/fib2/prover.rs index 696bcf93d..9fb3dd500 100644 --- a/examples/src/fibonacci/fib2/prover.rs +++ b/examples/src/fibonacci/fib2/prover.rs @@ -4,7 +4,7 @@ // LICENSE file in the root directory of this source tree. use winterfell::{ - matrix::ColMatrix, AuxRandElements, ConstraintCompositionCoefficients, + crypto::MerkleTree, matrix::ColMatrix, AuxRandElements, ConstraintCompositionCoefficients, DefaultConstraintEvaluator, DefaultTraceLde, StarkDomain, Trace, TraceInfo, TracePolyTable, TraceTable, }; @@ -50,14 +50,16 @@ impl FibProver { impl Prover for FibProver where - H: ElementHasher, + H: ElementHasher + Sync, { type BaseField = BaseElement; type Air = FibAir; type Trace = TraceTable; type HashFn = H; + type VC = MerkleTree; type RandomCoin = DefaultRandomCoin; - type TraceLde> = DefaultTraceLde; + type TraceLde> = + DefaultTraceLde; type ConstraintEvaluator<'a, E: FieldElement> = DefaultConstraintEvaluator<'a, Self::Air, E>; diff --git a/examples/src/fibonacci/fib8/mod.rs b/examples/src/fibonacci/fib8/mod.rs index 28962df26..322079c21 100644 --- a/examples/src/fibonacci/fib8/mod.rs +++ b/examples/src/fibonacci/fib8/mod.rs @@ -8,7 +8,7 @@ use std::time::Instant; use tracing::{field, info_span}; use winterfell::{ - crypto::{DefaultRandomCoin, ElementHasher}, + crypto::{DefaultRandomCoin, ElementHasher, MerkleTree}, math::{fields::f128::BaseElement, FieldElement}, Proof, ProofOptions, Prover, Trace, VerifierError, }; @@ -87,7 +87,7 @@ impl Fib8Example { impl Example for Fib8Example where - H: ElementHasher, + H: ElementHasher + Sync, { fn prove(&self) -> Proof { println!( @@ -114,7 +114,7 @@ where fn verify(&self, proof: Proof) -> Result<(), VerifierError> { let acceptable_options = winterfell::AcceptableOptions::OptionSet(vec![proof.options().clone()]); - winterfell::verify::>( + winterfell::verify::, MerkleTree>( proof, self.result, &acceptable_options, @@ -124,7 +124,7 @@ where fn verify_with_wrong_inputs(&self, proof: Proof) -> Result<(), VerifierError> { let acceptable_options = winterfell::AcceptableOptions::OptionSet(vec![proof.options().clone()]); - winterfell::verify::>( + winterfell::verify::, MerkleTree>( proof, self.result + BaseElement::ONE, &acceptable_options, diff --git a/examples/src/fibonacci/fib8/prover.rs b/examples/src/fibonacci/fib8/prover.rs index cc995a62d..425bfbd42 100644 --- a/examples/src/fibonacci/fib8/prover.rs +++ b/examples/src/fibonacci/fib8/prover.rs @@ -4,7 +4,7 @@ // LICENSE file in the root directory of this source tree. use winterfell::{ - matrix::ColMatrix, AuxRandElements, ConstraintCompositionCoefficients, + crypto::MerkleTree, matrix::ColMatrix, AuxRandElements, ConstraintCompositionCoefficients, DefaultConstraintEvaluator, DefaultTraceLde, StarkDomain, Trace, TraceInfo, TracePolyTable, TraceTable, }; @@ -65,14 +65,16 @@ impl Fib8Prover { impl Prover for Fib8Prover where - H: ElementHasher, + H: ElementHasher + Sync, { type BaseField = BaseElement; type Air = Fib8Air; type Trace = TraceTable; type HashFn = H; + type VC = MerkleTree; type RandomCoin = DefaultRandomCoin; - type TraceLde> = DefaultTraceLde; + type TraceLde> = + DefaultTraceLde; type ConstraintEvaluator<'a, E: FieldElement> = DefaultConstraintEvaluator<'a, Self::Air, E>; diff --git a/examples/src/fibonacci/fib_small/mod.rs b/examples/src/fibonacci/fib_small/mod.rs index ce3fc229a..672605ac4 100644 --- a/examples/src/fibonacci/fib_small/mod.rs +++ b/examples/src/fibonacci/fib_small/mod.rs @@ -8,7 +8,7 @@ use std::time::Instant; use tracing::{field, info_span}; use winterfell::{ - crypto::{DefaultRandomCoin, ElementHasher}, + crypto::{DefaultRandomCoin, ElementHasher, MerkleTree}, math::{fields::f64::BaseElement, FieldElement}, Proof, ProofOptions, Prover, Trace, VerifierError, }; @@ -98,7 +98,7 @@ impl FibExample { impl Example for FibExample where - H: ElementHasher, + H: ElementHasher + Sync, { fn prove(&self) -> Proof { println!( @@ -126,7 +126,7 @@ where let acceptable_options = winterfell::AcceptableOptions::OptionSet(vec![proof.options().clone()]); - winterfell::verify::>( + winterfell::verify::, MerkleTree>( proof, self.result, &acceptable_options, @@ -136,7 +136,7 @@ where fn verify_with_wrong_inputs(&self, proof: Proof) -> Result<(), VerifierError> { let acceptable_options = winterfell::AcceptableOptions::OptionSet(vec![proof.options().clone()]); - winterfell::verify::>( + winterfell::verify::, MerkleTree>( proof, self.result + BaseElement::ONE, &acceptable_options, diff --git a/examples/src/fibonacci/fib_small/prover.rs b/examples/src/fibonacci/fib_small/prover.rs index 40285a386..53ba615da 100644 --- a/examples/src/fibonacci/fib_small/prover.rs +++ b/examples/src/fibonacci/fib_small/prover.rs @@ -3,7 +3,7 @@ // This source code is licensed under the MIT license found in the // LICENSE file in the root directory of this source tree. use winterfell::{ - matrix::ColMatrix, AuxRandElements, ConstraintCompositionCoefficients, + crypto::MerkleTree, matrix::ColMatrix, AuxRandElements, ConstraintCompositionCoefficients, DefaultConstraintEvaluator, DefaultTraceLde, StarkDomain, Trace, TraceInfo, TracePolyTable, TraceTable, }; @@ -16,12 +16,18 @@ use super::{ // FIBONACCI PROVER // ================================================================================================ -pub struct FibSmallProver { +pub struct FibSmallProver +where + H: Sync, +{ options: ProofOptions, _hasher: PhantomData, } -impl FibSmallProver { +impl FibSmallProver +where + H: Sync, +{ pub fn new(options: ProofOptions) -> Self { Self { options, _hasher: PhantomData } } @@ -47,7 +53,7 @@ impl FibSmallProver { } } -impl Prover for FibSmallProver +impl Prover for FibSmallProver where H: ElementHasher, { @@ -55,8 +61,10 @@ where type Air = FibSmall; type Trace = TraceTable; type HashFn = H; + type VC = MerkleTree; type RandomCoin = DefaultRandomCoin; - type TraceLde> = DefaultTraceLde; + type TraceLde> = + DefaultTraceLde; type ConstraintEvaluator<'a, E: FieldElement> = DefaultConstraintEvaluator<'a, Self::Air, E>; diff --git a/examples/src/fibonacci/mulfib2/mod.rs b/examples/src/fibonacci/mulfib2/mod.rs index e8da735e3..d7b3e11d8 100644 --- a/examples/src/fibonacci/mulfib2/mod.rs +++ b/examples/src/fibonacci/mulfib2/mod.rs @@ -8,7 +8,7 @@ use std::time::Instant; use tracing::{field, info_span}; use winterfell::{ - crypto::{DefaultRandomCoin, ElementHasher}, + crypto::{DefaultRandomCoin, ElementHasher, MerkleTree}, math::{fields::f128::BaseElement, FieldElement}, Proof, ProofOptions, Prover, Trace, VerifierError, }; @@ -86,7 +86,7 @@ impl MulFib2Example { impl Example for MulFib2Example where - H: ElementHasher, + H: ElementHasher + Sync, { fn prove(&self) -> Proof { let sequence_length = self.sequence_length; @@ -114,7 +114,7 @@ where fn verify(&self, proof: Proof) -> Result<(), VerifierError> { let acceptable_options = winterfell::AcceptableOptions::OptionSet(vec![proof.options().clone()]); - winterfell::verify::>( + winterfell::verify::, MerkleTree>( proof, self.result, &acceptable_options, @@ -124,7 +124,7 @@ where fn verify_with_wrong_inputs(&self, proof: Proof) -> Result<(), VerifierError> { let acceptable_options = winterfell::AcceptableOptions::OptionSet(vec![proof.options().clone()]); - winterfell::verify::>( + winterfell::verify::, MerkleTree>( proof, self.result + BaseElement::ONE, &acceptable_options, diff --git a/examples/src/fibonacci/mulfib2/prover.rs b/examples/src/fibonacci/mulfib2/prover.rs index 6636b5f79..b1daba2fb 100644 --- a/examples/src/fibonacci/mulfib2/prover.rs +++ b/examples/src/fibonacci/mulfib2/prover.rs @@ -4,7 +4,7 @@ // LICENSE file in the root directory of this source tree. use winterfell::{ - matrix::ColMatrix, AuxRandElements, ConstraintCompositionCoefficients, + crypto::MerkleTree, matrix::ColMatrix, AuxRandElements, ConstraintCompositionCoefficients, DefaultConstraintEvaluator, DefaultTraceLde, StarkDomain, Trace, TraceInfo, TracePolyTable, TraceTable, }; @@ -46,14 +46,16 @@ impl MulFib2Prover { impl Prover for MulFib2Prover where - H: ElementHasher, + H: ElementHasher + Sync, { type BaseField = BaseElement; type Air = MulFib2Air; type Trace = TraceTable; type HashFn = H; + type VC = MerkleTree; type RandomCoin = DefaultRandomCoin; - type TraceLde> = DefaultTraceLde; + type TraceLde> = + DefaultTraceLde; type ConstraintEvaluator<'a, E: FieldElement> = DefaultConstraintEvaluator<'a, Self::Air, E>; diff --git a/examples/src/fibonacci/mulfib8/mod.rs b/examples/src/fibonacci/mulfib8/mod.rs index 8289831a4..43bd27be0 100644 --- a/examples/src/fibonacci/mulfib8/mod.rs +++ b/examples/src/fibonacci/mulfib8/mod.rs @@ -8,7 +8,7 @@ use std::time::Instant; use tracing::{field, info_span}; use winterfell::{ - crypto::{DefaultRandomCoin, ElementHasher}, + crypto::{DefaultRandomCoin, ElementHasher, MerkleTree}, math::{fields::f128::BaseElement, FieldElement}, Proof, ProofOptions, Prover, Trace, VerifierError, }; @@ -87,7 +87,7 @@ impl MulFib8Example { impl Example for MulFib8Example where - H: ElementHasher, + H: ElementHasher + Sync, { fn prove(&self) -> Proof { let sequence_length = self.sequence_length; @@ -115,7 +115,7 @@ where fn verify(&self, proof: Proof) -> Result<(), VerifierError> { let acceptable_options = winterfell::AcceptableOptions::OptionSet(vec![proof.options().clone()]); - winterfell::verify::>( + winterfell::verify::, MerkleTree>( proof, self.result, &acceptable_options, @@ -125,7 +125,7 @@ where fn verify_with_wrong_inputs(&self, proof: Proof) -> Result<(), VerifierError> { let acceptable_options = winterfell::AcceptableOptions::OptionSet(vec![proof.options().clone()]); - winterfell::verify::>( + winterfell::verify::, MerkleTree>( proof, self.result + BaseElement::ONE, &acceptable_options, diff --git a/examples/src/fibonacci/mulfib8/prover.rs b/examples/src/fibonacci/mulfib8/prover.rs index f1c693e98..20297d0e5 100644 --- a/examples/src/fibonacci/mulfib8/prover.rs +++ b/examples/src/fibonacci/mulfib8/prover.rs @@ -4,7 +4,7 @@ // LICENSE file in the root directory of this source tree. use winterfell::{ - matrix::ColMatrix, AuxRandElements, ConstraintCompositionCoefficients, + crypto::MerkleTree, matrix::ColMatrix, AuxRandElements, ConstraintCompositionCoefficients, DefaultConstraintEvaluator, DefaultTraceLde, StarkDomain, Trace, TraceInfo, TracePolyTable, TraceTable, }; @@ -58,14 +58,16 @@ impl MulFib8Prover { impl Prover for MulFib8Prover where - H: ElementHasher, + H: ElementHasher + Sync, { type BaseField = BaseElement; type Air = MulFib8Air; type Trace = TraceTable; type HashFn = H; + type VC = MerkleTree; type RandomCoin = DefaultRandomCoin; - type TraceLde> = DefaultTraceLde; + type TraceLde> = + DefaultTraceLde; type ConstraintEvaluator<'a, E: FieldElement> = DefaultConstraintEvaluator<'a, Self::Air, E>; diff --git a/examples/src/lamport/aggregate/mod.rs b/examples/src/lamport/aggregate/mod.rs index be91bd1dd..6dd2a8d02 100644 --- a/examples/src/lamport/aggregate/mod.rs +++ b/examples/src/lamport/aggregate/mod.rs @@ -8,7 +8,7 @@ use std::time::Instant; use tracing::{field, info_span}; use winterfell::{ - crypto::{DefaultRandomCoin, ElementHasher}, + crypto::{DefaultRandomCoin, ElementHasher, MerkleTree}, math::{fields::f128::BaseElement, get_power_series, FieldElement, StarkField}, Proof, ProofOptions, Prover, Trace, VerifierError, }; @@ -114,7 +114,7 @@ impl LamportAggregateExample { impl Example for LamportAggregateExample where - H: ElementHasher, + H: ElementHasher + Sync, { fn prove(&self) -> Proof { // generate the execution trace @@ -144,7 +144,7 @@ where }; let acceptable_options = winterfell::AcceptableOptions::OptionSet(vec![proof.options().clone()]); - winterfell::verify::>( + winterfell::verify::, MerkleTree>( proof, pub_inputs, &acceptable_options, @@ -160,7 +160,7 @@ where }; let acceptable_options = winterfell::AcceptableOptions::OptionSet(vec![proof.options().clone()]); - winterfell::verify::>( + winterfell::verify::, MerkleTree>( proof, pub_inputs, &acceptable_options, diff --git a/examples/src/lamport/aggregate/prover.rs b/examples/src/lamport/aggregate/prover.rs index df27166d3..51d8e9c30 100644 --- a/examples/src/lamport/aggregate/prover.rs +++ b/examples/src/lamport/aggregate/prover.rs @@ -6,7 +6,7 @@ #[cfg(feature = "concurrent")] use winterfell::iterators::*; use winterfell::{ - matrix::ColMatrix, AuxRandElements, ConstraintCompositionCoefficients, + crypto::MerkleTree, matrix::ColMatrix, AuxRandElements, ConstraintCompositionCoefficients, DefaultConstraintEvaluator, DefaultTraceLde, StarkDomain, TraceInfo, TracePolyTable, TraceTable, }; @@ -95,14 +95,16 @@ impl LamportAggregateProver { impl Prover for LamportAggregateProver where - H: ElementHasher, + H: ElementHasher + Sync, { type BaseField = BaseElement; type Air = LamportAggregateAir; type Trace = TraceTable; type HashFn = H; + type VC = MerkleTree; type RandomCoin = DefaultRandomCoin; - type TraceLde> = DefaultTraceLde; + type TraceLde> = + DefaultTraceLde; type ConstraintEvaluator<'a, E: FieldElement> = DefaultConstraintEvaluator<'a, Self::Air, E>; diff --git a/examples/src/lamport/signature.rs b/examples/src/lamport/signature.rs index d4350b3dd..6818adfde 100644 --- a/examples/src/lamport/signature.rs +++ b/examples/src/lamport/signature.rs @@ -5,11 +5,9 @@ use std::cmp::Ordering; +use core_utils::Serializable; use rand_utils::prng_vector; -use winterfell::{ - math::{fields::f128::BaseElement, FieldElement, StarkField}, - Serializable, -}; +use winterfell::math::{fields::f128::BaseElement, FieldElement, StarkField}; use super::rescue::Rescue128; diff --git a/examples/src/lamport/threshold/mod.rs b/examples/src/lamport/threshold/mod.rs index 33eaf8cbd..c64fa7755 100644 --- a/examples/src/lamport/threshold/mod.rs +++ b/examples/src/lamport/threshold/mod.rs @@ -8,7 +8,7 @@ use std::time::Instant; use tracing::{field, info_span}; use winterfell::{ - crypto::{DefaultRandomCoin, ElementHasher}, + crypto::{DefaultRandomCoin, ElementHasher, MerkleTree}, math::{fields::f128::BaseElement, get_power_series, FieldElement, StarkField}, Proof, ProofOptions, Prover, Trace, VerifierError, }; @@ -112,7 +112,7 @@ impl LamportThresholdExample { impl Example for LamportThresholdExample where - H: ElementHasher, + H: ElementHasher + Sync, { fn prove(&self) -> Proof { // generate the execution trace @@ -152,7 +152,7 @@ where }; let acceptable_options = winterfell::AcceptableOptions::OptionSet(vec![proof.options().clone()]); - winterfell::verify::>( + winterfell::verify::, MerkleTree>( proof, pub_inputs, &acceptable_options, @@ -168,7 +168,7 @@ where }; let acceptable_options = winterfell::AcceptableOptions::OptionSet(vec![proof.options().clone()]); - winterfell::verify::>( + winterfell::verify::, MerkleTree>( proof, pub_inputs, &acceptable_options, diff --git a/examples/src/lamport/threshold/prover.rs b/examples/src/lamport/threshold/prover.rs index 5b7e76217..f5c9c748b 100644 --- a/examples/src/lamport/threshold/prover.rs +++ b/examples/src/lamport/threshold/prover.rs @@ -8,7 +8,7 @@ use std::collections::HashMap; #[cfg(feature = "concurrent")] use winterfell::iterators::*; use winterfell::{ - matrix::ColMatrix, AuxRandElements, ConstraintCompositionCoefficients, + crypto::MerkleTree, matrix::ColMatrix, AuxRandElements, ConstraintCompositionCoefficients, DefaultConstraintEvaluator, DefaultTraceLde, StarkDomain, TraceInfo, TracePolyTable, TraceTable, }; @@ -137,14 +137,16 @@ impl LamportThresholdProver { impl Prover for LamportThresholdProver where - H: ElementHasher, + H: ElementHasher + Sync, { type BaseField = BaseElement; type Air = LamportThresholdAir; type Trace = TraceTable; type HashFn = H; + type VC = MerkleTree; type RandomCoin = DefaultRandomCoin; - type TraceLde> = DefaultTraceLde; + type TraceLde> = + DefaultTraceLde; type ConstraintEvaluator<'a, E: FieldElement> = DefaultConstraintEvaluator<'a, Self::Air, E>; diff --git a/examples/src/lamport/threshold/signature.rs b/examples/src/lamport/threshold/signature.rs index 6fc7c0894..ec579d420 100644 --- a/examples/src/lamport/threshold/signature.rs +++ b/examples/src/lamport/threshold/signature.rs @@ -78,6 +78,9 @@ impl AggPublicKey { /// Returns a Merkle path to the specified leaf. pub fn get_leaf_path(&self, index: usize) -> Vec { - self.tree.prove(index).unwrap() + let (leaf, path) = self.tree.prove(index).unwrap(); + let mut result = vec![leaf]; + result.extend_from_slice(&path); + result } } diff --git a/examples/src/merkle/mod.rs b/examples/src/merkle/mod.rs index 0538716f8..6b8771218 100644 --- a/examples/src/merkle/mod.rs +++ b/examples/src/merkle/mod.rs @@ -82,7 +82,10 @@ impl MerkleExample { // compute Merkle path form the leaf specified by the index let now = Instant::now(); - let path = tree.prove(index).unwrap(); + let (leaf, path) = tree.prove(index).unwrap(); + let mut result = vec![leaf]; + result.extend_from_slice(&path); + println!( "Computed Merkle path from leaf {} to root {} in {} ms", index, @@ -95,7 +98,7 @@ impl MerkleExample { tree_root: *tree.root(), value, index, - path, + path: result, _hasher: PhantomData, } } @@ -106,7 +109,7 @@ impl MerkleExample { impl Example for MerkleExample where - H: ElementHasher, + H: ElementHasher + Sync, { fn prove(&self) -> Proof { // generate the execution trace @@ -134,7 +137,7 @@ where let pub_inputs = PublicInputs { tree_root: self.tree_root.to_elements() }; let acceptable_options = winterfell::AcceptableOptions::OptionSet(vec![proof.options().clone()]); - winterfell::verify::>( + winterfell::verify::, MerkleTree>( proof, pub_inputs, &acceptable_options, @@ -146,7 +149,7 @@ where let pub_inputs = PublicInputs { tree_root: [tree_root[1], tree_root[0]] }; let acceptable_options = winterfell::AcceptableOptions::OptionSet(vec![proof.options().clone()]); - winterfell::verify::>( + winterfell::verify::, MerkleTree>( proof, pub_inputs, &acceptable_options, diff --git a/examples/src/merkle/prover.rs b/examples/src/merkle/prover.rs index 10070279e..db6d7f407 100644 --- a/examples/src/merkle/prover.rs +++ b/examples/src/merkle/prover.rs @@ -4,7 +4,7 @@ // LICENSE file in the root directory of this source tree. use winterfell::{ - matrix::ColMatrix, AuxRandElements, ConstraintCompositionCoefficients, + crypto::MerkleTree, matrix::ColMatrix, AuxRandElements, ConstraintCompositionCoefficients, DefaultConstraintEvaluator, DefaultTraceLde, StarkDomain, Trace, TraceInfo, TracePolyTable, TraceTable, }; @@ -99,14 +99,16 @@ impl MerkleProver { impl Prover for MerkleProver where - H: ElementHasher, + H: ElementHasher + Sync, { type BaseField = BaseElement; type Air = MerkleAir; type Trace = TraceTable; type HashFn = H; + type VC = MerkleTree; type RandomCoin = DefaultRandomCoin; - type TraceLde> = DefaultTraceLde; + type TraceLde> = + DefaultTraceLde; type ConstraintEvaluator<'a, E: FieldElement> = DefaultConstraintEvaluator<'a, Self::Air, E>; diff --git a/examples/src/rescue/mod.rs b/examples/src/rescue/mod.rs index 7f4e3e20b..5534625d5 100644 --- a/examples/src/rescue/mod.rs +++ b/examples/src/rescue/mod.rs @@ -8,7 +8,7 @@ use std::time::Instant; use tracing::{field, info_span}; use winterfell::{ - crypto::{DefaultRandomCoin, ElementHasher}, + crypto::{DefaultRandomCoin, ElementHasher, MerkleTree}, math::{fields::f128::BaseElement, FieldElement}, Proof, ProofOptions, Prover, Trace, VerifierError, }; @@ -94,7 +94,7 @@ impl RescueExample { impl Example for RescueExample where - H: ElementHasher, + H: ElementHasher + Sync, { fn prove(&self) -> Proof { // generate the execution trace @@ -120,7 +120,7 @@ where let pub_inputs = PublicInputs { seed: self.seed, result: self.result }; let acceptable_options = winterfell::AcceptableOptions::OptionSet(vec![proof.options().clone()]); - winterfell::verify::>( + winterfell::verify::, MerkleTree>( proof, pub_inputs, &acceptable_options, @@ -134,7 +134,7 @@ where }; let acceptable_options = winterfell::AcceptableOptions::OptionSet(vec![proof.options().clone()]); - winterfell::verify::>( + winterfell::verify::, MerkleTree>( proof, pub_inputs, &acceptable_options, diff --git a/examples/src/rescue/prover.rs b/examples/src/rescue/prover.rs index 5fc2224b5..050838af6 100644 --- a/examples/src/rescue/prover.rs +++ b/examples/src/rescue/prover.rs @@ -4,7 +4,7 @@ // LICENSE file in the root directory of this source tree. use winterfell::{ - matrix::ColMatrix, AuxRandElements, ConstraintCompositionCoefficients, + crypto::MerkleTree, matrix::ColMatrix, AuxRandElements, ConstraintCompositionCoefficients, DefaultConstraintEvaluator, DefaultTraceLde, StarkDomain, Trace, TraceInfo, TracePolyTable, TraceTable, }; @@ -65,14 +65,16 @@ impl RescueProver { impl Prover for RescueProver where - H: ElementHasher, + H: ElementHasher + Sync, { type BaseField = BaseElement; type Air = RescueAir; type Trace = TraceTable; type HashFn = H; + type VC = MerkleTree; type RandomCoin = DefaultRandomCoin; - type TraceLde> = DefaultTraceLde; + type TraceLde> = + DefaultTraceLde; type ConstraintEvaluator<'a, E: FieldElement> = DefaultConstraintEvaluator<'a, Self::Air, E>; diff --git a/examples/src/rescue_raps/mod.rs b/examples/src/rescue_raps/mod.rs index 4ee52b480..533298097 100644 --- a/examples/src/rescue_raps/mod.rs +++ b/examples/src/rescue_raps/mod.rs @@ -9,7 +9,7 @@ use std::time::Instant; use rand_utils::rand_array; use tracing::{field, info_span}; use winterfell::{ - crypto::{DefaultRandomCoin, ElementHasher}, + crypto::{DefaultRandomCoin, ElementHasher, MerkleTree}, math::{fields::f128::BaseElement, ExtensionOf, FieldElement}, Proof, ProofOptions, Prover, Trace, VerifierError, }; @@ -107,7 +107,7 @@ impl RescueRapsExample { impl Example for RescueRapsExample where - H: ElementHasher, + H: ElementHasher + Sync, { fn prove(&self) -> Proof { // generate the execution trace @@ -134,7 +134,7 @@ where let acceptable_options = winterfell::AcceptableOptions::OptionSet(vec![proof.options().clone()]); - winterfell::verify::>( + winterfell::verify::, MerkleTree>( proof, pub_inputs, &acceptable_options, @@ -146,7 +146,7 @@ where let acceptable_options = winterfell::AcceptableOptions::OptionSet(vec![proof.options().clone()]); - winterfell::verify::>( + winterfell::verify::, MerkleTree>( proof, pub_inputs, &acceptable_options, diff --git a/examples/src/rescue_raps/prover.rs b/examples/src/rescue_raps/prover.rs index 2be9afafa..7adee9bbb 100644 --- a/examples/src/rescue_raps/prover.rs +++ b/examples/src/rescue_raps/prover.rs @@ -5,7 +5,7 @@ use core_utils::uninit_vector; use winterfell::{ - matrix::ColMatrix, AuxRandElements, ConstraintCompositionCoefficients, + crypto::MerkleTree, matrix::ColMatrix, AuxRandElements, ConstraintCompositionCoefficients, DefaultConstraintEvaluator, DefaultTraceLde, StarkDomain, Trace, TraceInfo, TracePolyTable, }; @@ -94,14 +94,16 @@ impl RescueRapsProver { impl Prover for RescueRapsProver where - H: ElementHasher, + H: ElementHasher + Sync, { type BaseField = BaseElement; type Air = RescueRapsAir; type Trace = RapTraceTable; type HashFn = H; + type VC = MerkleTree; type RandomCoin = DefaultRandomCoin; - type TraceLde> = DefaultTraceLde; + type TraceLde> = + DefaultTraceLde; type ConstraintEvaluator<'a, E: FieldElement> = DefaultConstraintEvaluator<'a, Self::Air, E>; diff --git a/examples/src/utils/rescue.rs b/examples/src/utils/rescue.rs index 323d44bf0..e09cb094e 100644 --- a/examples/src/utils/rescue.rs +++ b/examples/src/utils/rescue.rs @@ -5,10 +5,10 @@ use core::slice; +use core_utils::{ByteReader, ByteWriter, Deserializable, DeserializationError, Serializable}; use winterfell::{ crypto::{Digest, Hasher}, math::{fields::f128::BaseElement, FieldElement}, - ByteReader, ByteWriter, Deserializable, DeserializationError, Serializable, }; use crate::utils::{are_equal, EvaluationResult}; diff --git a/examples/src/vdf/exempt/mod.rs b/examples/src/vdf/exempt/mod.rs index 766adb5e9..cc1dd53e9 100644 --- a/examples/src/vdf/exempt/mod.rs +++ b/examples/src/vdf/exempt/mod.rs @@ -8,7 +8,7 @@ use std::time::Instant; use tracing::{field, info_span}; use winterfell::{ - crypto::{DefaultRandomCoin, ElementHasher}, + crypto::{DefaultRandomCoin, ElementHasher, MerkleTree}, math::{fields::f128::BaseElement, FieldElement}, Proof, ProofOptions, Prover, Trace, VerifierError, }; @@ -86,7 +86,7 @@ impl VdfExample { impl Example for VdfExample where - H: ElementHasher, + H: ElementHasher + Sync, { fn prove(&self) -> Proof { println!("Generating proof for executing a VDF function for {} steps", self.num_steps); @@ -111,7 +111,7 @@ where let pub_inputs = VdfInputs { seed: self.seed, result: self.result }; let acceptable_options = winterfell::AcceptableOptions::OptionSet(vec![proof.options().clone()]); - winterfell::verify::>( + winterfell::verify::, MerkleTree>( proof, pub_inputs, &acceptable_options, @@ -125,7 +125,7 @@ where }; let acceptable_options = winterfell::AcceptableOptions::OptionSet(vec![proof.options().clone()]); - winterfell::verify::>( + winterfell::verify::, MerkleTree>( proof, pub_inputs, &acceptable_options, diff --git a/examples/src/vdf/exempt/prover.rs b/examples/src/vdf/exempt/prover.rs index af50e49a3..cc5d3e8e8 100644 --- a/examples/src/vdf/exempt/prover.rs +++ b/examples/src/vdf/exempt/prover.rs @@ -4,7 +4,7 @@ // LICENSE file in the root directory of this source tree. use winterfell::{ - matrix::ColMatrix, AuxRandElements, ConstraintCompositionCoefficients, + crypto::MerkleTree, matrix::ColMatrix, AuxRandElements, ConstraintCompositionCoefficients, DefaultConstraintEvaluator, DefaultTraceLde, StarkDomain, Trace, TraceInfo, TracePolyTable, TraceTable, }; @@ -46,14 +46,16 @@ impl VdfProver { impl Prover for VdfProver where - H: ElementHasher, + H: ElementHasher + Sync, { type BaseField = BaseElement; type Air = VdfAir; type Trace = TraceTable; type HashFn = H; + type VC = MerkleTree; type RandomCoin = DefaultRandomCoin; - type TraceLde> = DefaultTraceLde; + type TraceLde> = + DefaultTraceLde; type ConstraintEvaluator<'a, E: FieldElement> = DefaultConstraintEvaluator<'a, Self::Air, E>; diff --git a/examples/src/vdf/regular/mod.rs b/examples/src/vdf/regular/mod.rs index 7d69bc24b..3cdcaba3d 100644 --- a/examples/src/vdf/regular/mod.rs +++ b/examples/src/vdf/regular/mod.rs @@ -8,7 +8,7 @@ use std::time::Instant; use tracing::{field, info_span}; use winterfell::{ - crypto::{DefaultRandomCoin, ElementHasher}, + crypto::{DefaultRandomCoin, ElementHasher, MerkleTree}, math::{fields::f128::BaseElement, FieldElement}, Proof, ProofOptions, Prover, Trace, VerifierError, }; @@ -83,7 +83,7 @@ impl VdfExample { impl Example for VdfExample where - H: ElementHasher, + H: ElementHasher + Sync, { fn prove(&self) -> Proof { println!("Generating proof for executing a VDF function for {} steps", self.num_steps); @@ -108,7 +108,7 @@ where let pub_inputs = VdfInputs { seed: self.seed, result: self.result }; let acceptable_options = winterfell::AcceptableOptions::OptionSet(vec![proof.options().clone()]); - winterfell::verify::>( + winterfell::verify::, MerkleTree>( proof, pub_inputs, &acceptable_options, @@ -122,7 +122,7 @@ where }; let acceptable_options = winterfell::AcceptableOptions::OptionSet(vec![proof.options().clone()]); - winterfell::verify::>( + winterfell::verify::, MerkleTree>( proof, pub_inputs, &acceptable_options, diff --git a/examples/src/vdf/regular/prover.rs b/examples/src/vdf/regular/prover.rs index 12f272bb2..c880611ff 100644 --- a/examples/src/vdf/regular/prover.rs +++ b/examples/src/vdf/regular/prover.rs @@ -4,7 +4,7 @@ // LICENSE file in the root directory of this source tree. use winterfell::{ - matrix::ColMatrix, AuxRandElements, ConstraintCompositionCoefficients, + crypto::MerkleTree, matrix::ColMatrix, AuxRandElements, ConstraintCompositionCoefficients, DefaultConstraintEvaluator, DefaultTraceLde, StarkDomain, Trace, TraceInfo, TracePolyTable, TraceTable, }; @@ -43,14 +43,16 @@ impl VdfProver { impl Prover for VdfProver where - H: ElementHasher, + H: ElementHasher + Sync, { type BaseField = BaseElement; type Air = VdfAir; type Trace = TraceTable; type HashFn = H; + type VC = MerkleTree; type RandomCoin = DefaultRandomCoin; - type TraceLde> = DefaultTraceLde; + type TraceLde> = + DefaultTraceLde; type ConstraintEvaluator<'a, E: FieldElement> = DefaultConstraintEvaluator<'a, Self::Air, E>; diff --git a/fri/benches/prover.rs b/fri/benches/prover.rs index b7b7c417f..bfc096fc3 100644 --- a/fri/benches/prover.rs +++ b/fri/benches/prover.rs @@ -6,7 +6,7 @@ use std::time::Duration; use criterion::{criterion_group, criterion_main, BatchSize, BenchmarkId, Criterion}; -use crypto::{hashers::Blake3_256, DefaultRandomCoin}; +use crypto::{hashers::Blake3_256, DefaultRandomCoin, MerkleTree}; use math::{fft, fields::f128::BaseElement, FieldElement}; use rand_utils::rand_vector; use winter_fri::{DefaultProverChannel, FriOptions, FriProver}; @@ -28,7 +28,8 @@ pub fn build_layers(c: &mut Criterion) { BenchmarkId::new("build_layers", domain_size), &evaluations, |b, e| { - let mut prover = FriProver::new(options.clone()); + let mut prover = + FriProver::<_, _, _, MerkleTree>>::new(options.clone()); b.iter_batched( || e.clone(), |evaluations| { diff --git a/fri/src/lib.rs b/fri/src/lib.rs index 6f680c428..3d49ded34 100644 --- a/fri/src/lib.rs +++ b/fri/src/lib.rs @@ -51,7 +51,7 @@ //! * Base STARK field, //! * Extension field, //! * Domain blowup factor, -//! * Hash function (used for Merkle tree commitments), +//! * Hash function (used for building vector commitments), //! * Folding factor (used for degree reduction for each FRI layer), //! * Maximum size of the last FRI layer. //! diff --git a/fri/src/proof.rs b/fri/src/proof.rs index 73b05249a..65dd2af92 100644 --- a/fri/src/proof.rs +++ b/fri/src/proof.rs @@ -5,7 +5,7 @@ use alloc::{string::ToString, vec::Vec}; -use crypto::{BatchMerkleProof, ElementHasher, Hasher}; +use crypto::{ElementHasher, Hasher, VectorCommitment}; use math::FieldElement; use utils::{ ByteReader, ByteWriter, Deserializable, DeserializationError, Serializable, SliceReader, @@ -17,14 +17,14 @@ use utils::{ /// A proof generated by a FRI prover. /// /// A FRI proof contains information proving that a function *f* is a polynomial of some bounded -/// degree *d*. FRI proofs cannot be instantiated directly - they must be generated by a instance -/// of a [FriProver](crate::FriProver), and can be verified by a instance of a +/// degree *d*. FRI proofs cannot be instantiated directly - they must be generated by an instance +/// of a [FriProver](crate::FriProver), and can be verified by an instance of a /// [FriVerifier](crate::FriVerifier) via [VerifierChannel](crate::VerifierChannel) interface. /// /// A proof consists of zero or more layers and a remainder polynomial. Each layer contains a set of -/// polynomial evaluations at positions queried by the verifier as well as Merkle authentication -/// paths for these evaluations (the Merkle paths are compressed into a batch Merkle proof). The -/// remainder polynomial is given by its list of coefficients i.e. field elements. +/// polynomial evaluations at positions queried by the verifier, a vector commitment to LDE of +/// each polynomial, as well as opening proofs for the evaluations against the vector commitments. +/// The remainder polynomial is given by its list of coefficients i.e. field elements. /// /// All values in a proof are stored as vectors of bytes. Thus, the values must be parsed before /// they can be returned to the user. To do this, [parse_layers()](FriProof::parse_layers()) @@ -113,8 +113,8 @@ impl FriProof { // PARSING // -------------------------------------------------------------------------------------------- - /// Decomposes this proof into vectors of query values for each layer and corresponding Merkle - /// authentication paths for each query (grouped into batch Merkle proofs). + /// Decomposes this proof into vectors of query values for each layer and corresponding batch + /// opening proofs. /// /// # Panics /// Panics if: @@ -126,14 +126,15 @@ impl FriProof { /// * This proof is not consistent with the specified `domain_size` and `folding_factor`. /// * Any of the layers could not be parsed successfully. #[allow(clippy::type_complexity)] - pub fn parse_layers( + pub fn parse_layers( self, mut domain_size: usize, folding_factor: usize, - ) -> Result<(Vec>, Vec>), DeserializationError> + ) -> Result<(Vec>, Vec<>::MultiProof>), DeserializationError> where E: FieldElement, H: ElementHasher, + V: VectorCommitment, { assert!(domain_size.is_power_of_two(), "domain size must be a power of two"); assert!(folding_factor.is_power_of_two(), "folding factor must be a power of two"); @@ -145,10 +146,20 @@ impl FriProof { // parse all layers for (i, layer) in self.layers.into_iter().enumerate() { domain_size /= folding_factor; - let (qv, mp) = layer.parse(domain_size, folding_factor).map_err(|err| { + let (qv, op) = layer.parse::<_, H, V>(folding_factor).map_err(|err| { DeserializationError::InvalidValue(format!("failed to parse FRI layer {i}: {err}")) })?; - layer_proofs.push(mp); + + // check that the opening proof matches the domain length + if >::get_multiproof_domain_len(&op) != domain_size { + return Err(DeserializationError::InvalidValue(format!( + "expected a domain of size {} but was {}", + domain_size, + >::get_multiproof_domain_len(&op), + ))); + } + + layer_proofs.push(op); layer_queries.push(qv); } @@ -235,14 +246,14 @@ pub struct FriProofLayer { impl FriProofLayer { // CONSTRUCTOR // -------------------------------------------------------------------------------------------- - /// Creates a new proof layer from the specified query values and the corresponding Merkle - /// paths aggregated into a single batch Merkle proof. + /// Creates a new proof layer from the specified query values and the corresponding batch + /// opening proof. /// /// # Panics /// Panics if `query_values` is an empty slice. - pub(crate) fn new( + pub(crate) fn new, const N: usize>( query_values: Vec<[E; N]>, - merkle_proof: BatchMerkleProof, + proof: >::MultiProof, ) -> Self { assert!(!query_values.is_empty(), "query values cannot be empty"); @@ -251,13 +262,10 @@ impl FriProofLayer { let mut value_bytes = Vec::with_capacity(E::ELEMENT_BYTES * N * query_values.len()); value_bytes.write_many(&query_values); - // concatenate all query values and all internal Merkle proof nodes into vectors of bytes; - // we care about internal nodes only because leaf nodes can be reconstructed from hashes - // of query values - FriProofLayer { - values: value_bytes, - paths: merkle_proof.serialize_nodes(), - } + let mut proof_bytes = Vec::new(); + proof.write_into(&mut proof_bytes); + + FriProofLayer { values: value_bytes, paths: proof_bytes } } // PUBLIC ACCESSORS @@ -271,22 +279,22 @@ impl FriProofLayer { // PARSING // -------------------------------------------------------------------------------------------- - /// Decomposes this layer into a combination of query values and corresponding Merkle - /// authentication paths (grouped together into a single batch Merkle proof). + /// Decomposes this layer into a combination of query values and corresponding batch opening + /// proof. /// /// # Errors /// Returns an error if: /// * This layer does not contain at least one query. - /// * Parsing of any of the query values or the corresponding Merkle paths fails. + /// * Parsing of any of the query values or the corresponding batch opening proof fails. /// * Not all bytes have been consumed while parsing this layer. - pub fn parse( + pub fn parse( self, - domain_size: usize, folding_factor: usize, - ) -> Result<(Vec, BatchMerkleProof), DeserializationError> + ) -> Result<(Vec, >::MultiProof), DeserializationError> where E: FieldElement, H: ElementHasher, + V: VectorCommitment, { // make sure the number of value bytes can be parsed into a whole number of queries let num_query_bytes = E::ELEMENT_BYTES * folding_factor; @@ -307,7 +315,7 @@ impl FriProofLayer { let mut query_values = Vec::with_capacity(num_queries * folding_factor); // read bytes corresponding to each query, convert them into field elements, - // and also hash them to build leaf nodes of the batch Merkle proof + // and also hash them to build leaf nodes of the batch opening proof let mut reader = SliceReader::new(&self.values); for query_hash in hashed_queries.iter_mut() { let mut qe = reader.read_many(folding_factor)?; @@ -318,15 +326,14 @@ impl FriProofLayer { return Err(DeserializationError::UnconsumedBytes); } - // build batch Merkle proof + // build batch opening proof let mut reader = SliceReader::new(&self.paths); - let tree_depth = domain_size.ilog2() as u8; - let merkle_proof = BatchMerkleProof::deserialize(&mut reader, hashed_queries, tree_depth)?; + let multi_proof = ::read_from(&mut reader)?; if reader.has_more_bytes() { return Err(DeserializationError::UnconsumedBytes); } - Ok((query_values, merkle_proof)) + Ok((query_values, multi_proof)) } } diff --git a/fri/src/prover/channel.rs b/fri/src/prover/channel.rs index 7fa81e3ac..7231e757c 100644 --- a/fri/src/prover/channel.rs +++ b/fri/src/prover/channel.rs @@ -23,20 +23,18 @@ use math::FieldElement; /// commitments the prover has written into the channel up to this point. pub trait ProverChannel { /// Hash function used by the prover to commit to polynomial evaluations. - type Hasher: Hasher; + type Hasher: ElementHasher; /// Sends a layer commitment to the verifier. /// - /// A layer commitment is a root of a Merkle tree built from evaluations of a polynomial - /// at a given layer. The Merkle tree is built by first transposing evaluations into a - /// two-dimensional matrix where each row contains values needed to compute a single - /// value of the next FRI layer, and then putting each row of the matrix into a single - /// leaf of the Merkle tree. Thus, the number of elements grouped into a single leaf is - /// equal to the `folding_factor` used for FRI layer construction. - fn commit_fri_layer( - &mut self, - layer_root: <>::Hasher as Hasher>::Digest, - ); + /// A layer commitment is the commitment string of a vector commitment to the vector of + /// evaluations of a polynomial at a given layer. The vector commitment is built by + /// first transposing evaluations into a two-dimensional matrix where each row contains + /// values needed to compute a single value of the next FRI layer, and then computing + /// the hash of each row to get one entry of the vector being committed to. Thus, the number + /// of elements grouped into a single leaf is equal to the `folding_factor` used for FRI layer + /// construction. + fn commit_fri_layer(&mut self, layer_root: ::Digest); /// Returns a random α drawn uniformly at random from the entire field. /// diff --git a/fri/src/prover/mod.rs b/fri/src/prover/mod.rs index 5bef65aae..17092ad34 100644 --- a/fri/src/prover/mod.rs +++ b/fri/src/prover/mod.rs @@ -6,14 +6,17 @@ use alloc::vec::Vec; use core::marker::PhantomData; -use crypto::{ElementHasher, Hasher, MerkleTree}; -use math::{fft, FieldElement, StarkField}; -use utils::{flatten_vector_elements, group_slice_elements, transpose_slice}; +use crypto::{ElementHasher, Hasher, VectorCommitment}; +use math::{fft, FieldElement}; +#[cfg(feature = "concurrent")] +use utils::iterators::*; +use utils::{ + flatten_vector_elements, group_slice_elements, iter_mut, transpose_slice, uninit_vector, +}; use crate::{ folding::{apply_drp, fold_positions}, proof::{FriProof, FriProofLayer}, - utils::hash_values, FriOptions, }; @@ -29,19 +32,19 @@ mod tests; /// Implements the prover component of the FRI protocol. /// /// Given evaluations of a function *f* over domain *D* (`evaluations`), a FRI prover generates -/// a proof that *f* is a polynomial of some bounded degree *d*, such that *d* < |*D*| / *blowup_factor*. -/// The proof is succinct: it exponentially smaller than `evaluations` and the verifier can verify it -/// exponentially faster than it would have taken them to read all `evaluations`. +/// a proof that *f* is a polynomial of some bounded degree *d*, such that +/// *d* < |*D*| / *blowup_factor*. +/// The proof is succinct: it exponentially smaller than `evaluations` and the verifier can verify +/// it exponentially faster than it would have taken them to read all `evaluations`. /// /// The prover is parametrized with the following types: /// -/// * `B` specifies the base field of the STARK protocol. -/// * `E` specifies the field in which the FRI protocol is executed. This can be the same as the -/// base field `B`, but it can also be an extension of the base field in cases when the base -/// field is too small to provide desired security level for the FRI protocol. +/// * `E` specifies the field in which the FRI protocol is executed. /// * `C` specifies the type used to simulate prover-verifier interaction. -/// * `H` specifies the hash function used to build layer Merkle trees. The same hash function -/// must be used in the prover channel to generate pseudo random values. +/// * `H` specifies the hash function used to build for each layer the vector of values committed to +/// using the specified vector commitment scheme. The same hash function must be used in +/// the prover channel to generate pseudo random values. +/// * `V` specifies the vector commitment scheme used in order to commit to each layer. /// /// Proof generation is performed in two phases: commit phase and query phase. /// @@ -54,12 +57,12 @@ mod tests; /// a number of coefficients less than or equal to `remainder_max_degree_plus_1`. /// /// At each layer of reduction, the prover commits to the current set of evaluations. This is done -/// by building a Merkle tree from the evaluations and sending the root of the tree to the verifier -/// (via [ProverChannel]). The Merkle tree is build in such a way that all evaluations needed to -/// compute a single value in the next FRI layer are grouped into the same leaf (the number of -/// evaluations needed to compute a single element in the next FRI layer is equal to the -/// `folding_factor`). This allows us to decommit all these values using a single Merkle -/// authentication path. +/// by building a vector commitment to hashed evaluations and sending the commitment string +/// to the verifier (via [ProverChannel]). The vector commitment is build in such a way that all +/// evaluations needed to compute a single value in the next FRI layer are grouped into the same +/// leaf (the number of evaluations needed to compute a single element in the next FRI layer is +/// equal to the `folding_factor`). This allows us to decommit all these values using a single +/// individual opening proof. /// /// After committing to the set of evaluations at the current layer, the prover draws a random /// field element α from the channel, and uses it to build the next FRI layer. In the interactive @@ -67,8 +70,8 @@ mod tests; /// sends it to the prover. In the non-interactive version, α is pseudo-randomly generated based /// on the values the prover has written into the channel up to that point. /// -/// The prover keeps all FRI layers (consisting of evaluations and corresponding Merkle trees) in -/// its internal state. +/// The prover keeps all FRI layers (consisting of evaluations and corresponding vector +/// commitments) in its internal state. /// /// # Query phase /// In the query phase, which is executed via [build_proof()](FriProver::build_proof()) function, @@ -89,23 +92,23 @@ mod tests; /// /// Calling [build_layers()](FriProver::build_layers()) when the internal state is dirty, or /// calling [build_proof()](FriProver::build_proof()) on a clean state will result in a panic. -pub struct FriProver +pub struct FriProver where - B: StarkField, - E: FieldElement, + E: FieldElement, C: ProverChannel, - H: ElementHasher, + H: ElementHasher, + V: VectorCommitment, { options: FriOptions, - layers: Vec>, + layers: Vec>, remainder_poly: FriRemainder, _channel: PhantomData, } -struct FriLayer, H: Hasher> { - tree: MerkleTree, +struct FriLayer> { + commitment: V, evaluations: Vec, - _base_field: PhantomData, + _h: PhantomData, } struct FriRemainder(Vec); @@ -113,12 +116,12 @@ struct FriRemainder(Vec); // PROVER IMPLEMENTATION // ================================================================================================ -impl FriProver +impl FriProver where - B: StarkField, - E: FieldElement, + E: FieldElement, C: ProverChannel, - H: ElementHasher, + H: ElementHasher, + V: VectorCommitment, { // CONSTRUCTOR // -------------------------------------------------------------------------------------------- @@ -141,7 +144,7 @@ where } /// Returns offset of the domain over which FRI protocol is executed by this prover. - pub fn domain_offset(&self) -> B { + pub fn domain_offset(&self) -> E::BaseField { self.options.domain_offset() } @@ -166,9 +169,10 @@ where /// application of the DRP the degree of the function (and size of the domain) is reduced by /// `folding_factor` until the remaining evaluations can be represented by a remainder polynomial /// with at most `remainder_max_degree_plus_1` number of coefficients. - /// At each layer of reduction the current evaluations are committed to using a Merkle tree, - /// and the root of this tree is written into the channel. After this the prover draws a random - /// field element α from the channel, and uses it in the next application of the DRP. + /// At each layer of reduction the current evaluations are committed to using a vector commitment + /// scheme, and the commitment string of this vector commitment is written into the channel. + /// After this the prover draws a random field element α from the channel, and uses it in + /// the next application of the DRP. /// /// # Panics /// Panics if the prover state is dirty (the vector of layers is not empty). @@ -197,23 +201,23 @@ where /// alpha from the channel and use it to perform degree-respecting projection. fn build_layer(&mut self, channel: &mut C, evaluations: &mut Vec) { // commit to the evaluations at the current layer; we do this by first transposing the - // evaluations into a matrix of N columns, and then building a Merkle tree from the - // rows of this matrix; we do this so that we could de-commit to N values with a single - // Merkle authentication path. + // evaluations into a matrix of N columns, then hashing each row into a digest, and finally + // commiting to vector of these digests; we do this so that we could de-commit to N values + // with a single opening proof. let transposed_evaluations = transpose_slice(evaluations); - let hashed_evaluations = hash_values::(&transposed_evaluations); - let evaluation_tree = - MerkleTree::::new(hashed_evaluations).expect("failed to construct FRI layer tree"); - channel.commit_fri_layer(*evaluation_tree.root()); + let evaluation_vector_commitment = + build_layer_commitment::<_, _, V, N>(&transposed_evaluations) + .expect("failed to construct FRI layer commitment"); + channel.commit_fri_layer(evaluation_vector_commitment.commitment()); // draw a pseudo-random coefficient from the channel, and use it in degree-respecting // projection to reduce the degree of evaluations by N let alpha = channel.draw_fri_alpha(); *evaluations = apply_drp(&transposed_evaluations, self.domain_offset(), alpha); self.layers.push(FriLayer { - tree: evaluation_tree, + commitment: evaluation_vector_commitment, evaluations: flatten_vector_elements(transposed_evaluations), - _base_field: PhantomData, + _h: PhantomData, }); } @@ -233,9 +237,9 @@ where /// Executes query phase of FRI protocol. /// /// For each of the provided `positions`, corresponding evaluations from each of the layers - /// (excluding the remainder layer) are recorded into the proof together with Merkle - /// authentication paths from the root of layer commitment trees. For the remainder, we send - /// the whole remainder polynomial resulting from interpolating the remainder layer. + /// (excluding the remainder layer) are recorded into the proof together with a batch opening + /// proof against the sent vector commitment. For the remainder, we send the whole remainder + /// polynomial resulting from interpolating the remainder layer evaluations. /// /// # Panics /// Panics is the prover state is clean (no FRI layers have been build yet). @@ -256,10 +260,10 @@ where // sort of a static dispatch for folding_factor parameter let proof_layer = match folding_factor { - 2 => query_layer::(&self.layers[i], &positions), - 4 => query_layer::(&self.layers[i], &positions), - 8 => query_layer::(&self.layers[i], &positions), - 16 => query_layer::(&self.layers[i], &positions), + 2 => query_layer::(&self.layers[i], &positions), + 4 => query_layer::(&self.layers[i], &positions), + 8 => query_layer::(&self.layers[i], &positions), + 16 => query_layer::(&self.layers[i], &positions), _ => unimplemented!("folding factor {} is not supported", folding_factor), }; @@ -283,15 +287,15 @@ where /// Builds a single proof layer by querying the evaluations of the passed in FRI layer at the /// specified positions. -fn query_layer, H: Hasher, const N: usize>( - layer: &FriLayer, +fn query_layer, const N: usize>( + layer: &FriLayer, positions: &[usize], ) -> FriProofLayer { - // build Merkle authentication paths for all query positions + // build a batch opening proof for all query positions let proof = layer - .tree - .prove_batch(positions) - .expect("failed to generate a Merkle proof for FRI layer queries"); + .commitment + .open_many(positions) + .expect("failed to generate a batch opening proof for FRI layer queries"); // build a list of polynomial evaluations at each position; since evaluations in FRI layers // are stored in transposed form, a position refers to N evaluations which are committed @@ -301,6 +305,24 @@ fn query_layer, H: Hasher, const N for &position in positions.iter() { queried_values.push(evaluations[position]); } + FriProofLayer::new::<_, _, V, N>(queried_values, proof.1) +} + +/// Hashes each of the arrays in the provided slice and returns a vector commitment to resulting +/// hashes. +pub fn build_layer_commitment( + values: &[[E; N]], +) -> Result>::Error> +where + E: FieldElement, + H: ElementHasher, + V: VectorCommitment, +{ + let mut hashed_evaluations: Vec = unsafe { uninit_vector(values.len()) }; + iter_mut!(hashed_evaluations, 1024).zip(values).for_each(|(e, v)| { + let digest: H::Digest = H::hash_elements(v); + *e = digest + }); - FriProofLayer::new(queried_values, proof) + V::new(hashed_evaluations) } diff --git a/fri/src/prover/tests.rs b/fri/src/prover/tests.rs index 87cc7c798..e765092c5 100644 --- a/fri/src/prover/tests.rs +++ b/fri/src/prover/tests.rs @@ -5,7 +5,7 @@ use alloc::vec::Vec; -use crypto::{hashers::Blake3_256, DefaultRandomCoin, Hasher, RandomCoin}; +use crypto::{hashers::Blake3_256, DefaultRandomCoin, Hasher, MerkleTree, RandomCoin}; use math::{fft, fields::f128::BaseElement, FieldElement}; use utils::{Deserializable, Serializable, SliceReader}; @@ -76,14 +76,14 @@ pub fn verify_proof( let proof = FriProof::read_from(&mut reader).unwrap(); // verify the proof - let mut channel = DefaultVerifierChannel::::new( + let mut channel = DefaultVerifierChannel::>::new( proof, commitments, domain_size, options.folding_factor(), ) .unwrap(); - let mut coin = DefaultRandomCoin::::new(&[]); + let mut coin = crypto::DefaultRandomCoin::::new(&[]); let verifier = FriVerifier::new(&mut channel, &mut coin, options.clone(), max_degree)?; let queried_evaluations = positions.iter().map(|&p| evaluations[p]).collect::>(); verifier.verify(&mut channel, &queried_evaluations, positions) @@ -104,7 +104,7 @@ fn fri_prove_verify( let evaluations = build_evaluations(trace_length, lde_blowup); // instantiate the prover and generate the proof - let mut prover = FriProver::new(options.clone()); + let mut prover = FriProver::<_, _, _, MerkleTree>::new(options.clone()); prover.build_layers(&mut channel, evaluations.clone()); let positions = channel.draw_query_positions(0); let proof = prover.build_proof(&positions); diff --git a/fri/src/utils.rs b/fri/src/utils.rs index 1138b9608..725e5b4c9 100644 --- a/fri/src/utils.rs +++ b/fri/src/utils.rs @@ -5,20 +5,14 @@ use alloc::vec::Vec; -use crypto::ElementHasher; -use math::FieldElement; -#[cfg(feature = "concurrent")] -use utils::iterators::*; -use utils::{iter_mut, uninit_vector}; - -/// Maps positions in the evaluation domain to indexes of commitment Merkle tree. +/// Maps positions in the evaluation domain to indexes of of the vector commitment. pub fn map_positions_to_indexes( positions: &[usize], source_domain_size: usize, folding_factor: usize, num_partitions: usize, ) -> Vec { - // if there was only 1 partition, order of elements in the commitment tree + // if there was only 1 partition, order of elements in the vector commitment // is the same as the order of elements in the evaluation domain if num_partitions == 1 { return positions.to_vec(); @@ -37,16 +31,3 @@ pub fn map_positions_to_indexes( result } - -/// Hashes each of the arrays in the provided slice and returns a vector of resulting hashes. -pub fn hash_values(values: &[[E; N]]) -> Vec -where - E: FieldElement, - H: ElementHasher, -{ - let mut result: Vec = unsafe { uninit_vector(values.len()) }; - iter_mut!(result, 1024).zip(values).for_each(|(r, v)| { - *r = H::hash_elements(v); - }); - result -} diff --git a/fri/src/verifier/channel.rs b/fri/src/verifier/channel.rs index 0c34f73a1..6f8709858 100644 --- a/fri/src/verifier/channel.rs +++ b/fri/src/verifier/channel.rs @@ -4,8 +4,9 @@ // LICENSE file in the root directory of this source tree. use alloc::vec::Vec; +use core::marker::PhantomData; -use crypto::{BatchMerkleProof, ElementHasher, Hasher, MerkleTree}; +use crypto::{ElementHasher, Hasher, VectorCommitment}; use math::FieldElement; use utils::{group_slice_elements, DeserializationError}; @@ -25,6 +26,8 @@ use crate::{FriProof, VerifierError}; pub trait VerifierChannel { /// Hash function used by the prover to commit to polynomial evaluations. type Hasher: ElementHasher; + /// Vector commitment used to commit to polynomial evaluations. + type VectorCommitment: VectorCommitment; // REQUIRED METHODS // -------------------------------------------------------------------------------------------- @@ -39,9 +42,7 @@ pub trait VerifierChannel { /// from the entire field after each layer commitment is received. In the non-interactive /// version, the verifier can read all layer commitments at once, and then generate α values /// locally. - fn read_fri_layer_commitments( - &mut self, - ) -> Vec<<>::Hasher as Hasher>::Digest>; + fn read_fri_layer_commitments(&mut self) -> Vec<::Digest>; /// Reads and removes from the channel evaluations of the polynomial at the queried positions /// for the next FRI layer. @@ -50,20 +51,21 @@ pub trait VerifierChannel { /// the verifier during the query phase of the FRI protocol. /// /// It is expected that layer queries and layer proofs at the same FRI layer are consistent. - /// That is, query values hash into the leaf nodes of corresponding Merkle authentication - /// paths. + /// That is, query values hash into the leaf nodes of corresponding vector commitment. fn take_next_fri_layer_queries(&mut self) -> Vec; - /// Reads and removes from the channel Merkle authentication paths for queried evaluations for - /// the next FRI layer. + /// Reads and removes from the channel vector commitment opening proofs of queried evaluations + /// for the next FRI layer. /// /// In the interactive version of the protocol, these authentication paths are sent from the /// prover to the verifier during the query phase of the FRI protocol. /// /// It is expected that layer proofs and layer queries at the same FRI layer are consistent. - /// That is, query values hash into the leaf nodes of corresponding Merkle authentication - /// paths. - fn take_next_fri_layer_proof(&mut self) -> BatchMerkleProof; + /// That is, query values hash into the elements of the vector committed to using the specified + /// vector commitment scheme. + fn take_next_fri_layer_proof( + &mut self, + ) -> >::MultiProof; /// Reads and removes the remainder polynomial from the channel. fn take_fri_remainder(&mut self) -> Vec; @@ -81,16 +83,29 @@ pub trait VerifierChannel { fn read_layer_queries( &mut self, positions: &[usize], - commitment: &<>::Hasher as Hasher>::Digest, + commitment: &::Digest, ) -> Result, VerifierError> { let layer_proof = self.take_next_fri_layer_proof(); - MerkleTree::::verify_batch(commitment, positions, &layer_proof) - .map_err(|_| VerifierError::LayerCommitmentMismatch)?; - - // TODO: make sure layer queries hash into leaves of layer proof - let layer_queries = self.take_next_fri_layer_queries(); - Ok(group_slice_elements(&layer_queries).to_vec()) + // build the values (i.e., polynomial evaluations over a coset of a multiplicative subgroup + // of the current evaluation domain) corresponding to each leaf of the layer commitment + let leaf_values = group_slice_elements(&layer_queries); + // hash the aforementioned values to get the leaves to be verified against the previously + // received commitment + let hashed_values: Vec<::Digest> = leaf_values + .iter() + .map(|seg| ::hash_elements(seg)) + .collect(); + + <>::VectorCommitment as VectorCommitment>::verify_many( + *commitment, + positions, + &hashed_values, + &layer_proof, + ) + .map_err(|_| VerifierError::LayerCommitmentMismatch)?; + + Ok(leaf_values.to_vec()) } /// Returns FRI remainder polynomial read from this channel. @@ -110,18 +125,24 @@ pub trait VerifierChannel { /// /// Though this implementation is primarily intended for testing purposes, it can be used in /// production use cases as well. -pub struct DefaultVerifierChannel> { +pub struct DefaultVerifierChannel< + E: FieldElement, + H: ElementHasher, + V: VectorCommitment, +> { layer_commitments: Vec, - layer_proofs: Vec>, + layer_proofs: Vec, layer_queries: Vec>, remainder: Vec, num_partitions: usize, + _h: PhantomData, } -impl DefaultVerifierChannel +impl DefaultVerifierChannel where E: FieldElement, H: ElementHasher, + V: VectorCommitment, { /// Builds a new verifier channel from the specified [FriProof]. /// @@ -137,7 +158,7 @@ where let remainder = proof.parse_remainder()?; let (layer_queries, layer_proofs) = - proof.parse_layers::(domain_size, folding_factor)?; + proof.parse_layers::(domain_size, folding_factor)?; Ok(DefaultVerifierChannel { layer_commitments, @@ -145,16 +166,19 @@ where layer_queries, remainder, num_partitions, + _h: PhantomData, }) } } -impl VerifierChannel for DefaultVerifierChannel +impl VerifierChannel for DefaultVerifierChannel where E: FieldElement, H: ElementHasher, + V: VectorCommitment, { type Hasher = H; + type VectorCommitment = V; fn read_fri_num_partitions(&self) -> usize { self.num_partitions @@ -164,7 +188,7 @@ where self.layer_commitments.drain(..).collect() } - fn take_next_fri_layer_proof(&mut self) -> BatchMerkleProof { + fn take_next_fri_layer_proof(&mut self) -> V::MultiProof { self.layer_proofs.remove(0) } diff --git a/fri/src/verifier/mod.rs b/fri/src/verifier/mod.rs index 9067fcb45..ff0582b2c 100644 --- a/fri/src/verifier/mod.rs +++ b/fri/src/verifier/mod.rs @@ -8,7 +8,7 @@ use alloc::vec::Vec; use core::{marker::PhantomData, mem}; -use crypto::{ElementHasher, RandomCoin}; +use crypto::{ElementHasher, RandomCoin, VectorCommitment}; use math::{polynom, FieldElement, StarkField}; use crate::{folding::fold_positions, utils::map_positions_to_indexes, FriOptions, VerifierError}; @@ -48,21 +48,22 @@ pub use channel::{DefaultVerifierChannel, VerifierChannel}; /// # Query phase /// During the query phase, which is executed via [verify()](FriVerifier::verify()) function, /// the verifier sends a set of positions in the domain *D* to the prover, and the prover responds -/// with polynomial evaluations at these positions (together with corresponding Merkle paths) +/// with polynomial evaluations at these positions (together with corresponding opening proofs) /// across all FRI layers. The verifier then checks that: -/// * The Merkle paths are valid against the layer commitments the verifier received during +/// * The opening proofs are valid against the layer commitments the verifier received during /// the commit phase. /// * The evaluations are consistent across FRI layers (i.e., the degree-respecting projection /// was applied correctly). /// * The degree of the polynomial implied by evaluations at the last FRI layer (the remainder) /// is smaller than the degree resulting from reducing degree *d* by `folding_factor` at each /// FRI layer. -pub struct FriVerifier +pub struct FriVerifier where E: FieldElement, C: VerifierChannel, H: ElementHasher, R: RandomCoin, + V: VectorCommitment, { max_poly_degree: usize, domain_size: usize, @@ -73,14 +74,16 @@ where num_partitions: usize, _channel: PhantomData, _public_coin: PhantomData, + _vector_com: PhantomData, } -impl FriVerifier +impl FriVerifier where E: FieldElement, - C: VerifierChannel, + C: VerifierChannel, H: ElementHasher, R: RandomCoin, + V: VectorCommitment, { /// Returns a new instance of FRI verifier created from the specified parameters. /// @@ -146,6 +149,7 @@ where num_partitions, _channel: PhantomData, _public_coin: PhantomData, + _vector_com: PhantomData, }) } @@ -251,14 +255,14 @@ where // determine which evaluations were queried in the folded layer let mut folded_positions = fold_positions(&positions, domain_size, self.options.folding_factor()); - // determine where these evaluations are in the commitment Merkle tree + // determine where these evaluations are in the vector commitment let position_indexes = map_positions_to_indexes( &folded_positions, domain_size, self.options.folding_factor(), self.num_partitions, ); - // read query values from the specified indexes in the Merkle tree + // read query values from the specified indexes let layer_commitment = self.layer_commitments[depth]; // TODO: add layer depth to the potential error message let layer_values = channel.read_layer_queries(&position_indexes, &layer_commitment)?; diff --git a/prover/benches/lagrange_kernel.rs b/prover/benches/lagrange_kernel.rs index 559af93ea..7ee8ab3c3 100644 --- a/prover/benches/lagrange_kernel.rs +++ b/prover/benches/lagrange_kernel.rs @@ -11,7 +11,7 @@ use air::{ TraceInfo, TransitionConstraintDegree, }; use criterion::{criterion_group, criterion_main, BatchSize, BenchmarkId, Criterion}; -use crypto::{hashers::Blake3_256, DefaultRandomCoin, RandomCoin}; +use crypto::{hashers::Blake3_256, DefaultRandomCoin, MerkleTree, RandomCoin}; use math::{fields::f64::BaseElement, ExtensionOf, FieldElement}; use winter_prover::{ matrix::ColMatrix, DefaultConstraintEvaluator, DefaultTraceLde, Prover, ProverGkrProof, @@ -183,8 +183,10 @@ impl Prover for LagrangeProver { type Air = LagrangeKernelAir; type Trace = LagrangeTrace; type HashFn = Blake3_256; + type VC = MerkleTree>; type RandomCoin = DefaultRandomCoin; - type TraceLde> = DefaultTraceLde; + type TraceLde> = + DefaultTraceLde; type ConstraintEvaluator<'a, E: FieldElement> = DefaultConstraintEvaluator<'a, LagrangeKernelAir, E>; diff --git a/prover/src/channel.rs b/prover/src/channel.rs index c3d99675a..34a39d3fc 100644 --- a/prover/src/channel.rs +++ b/prover/src/channel.rs @@ -10,7 +10,7 @@ use air::{ proof::{Commitments, Context, OodFrame, Proof, Queries, TraceOodFrame}, Air, ConstraintCompositionCoefficients, DeepCompositionCoefficients, }; -use crypto::{ElementHasher, RandomCoin}; +use crypto::{ElementHasher, RandomCoin, VectorCommitment}; use fri::FriProof; use math::{FieldElement, ToElements}; #[cfg(feature = "concurrent")] @@ -19,12 +19,13 @@ use utils::iterators::*; // TYPES AND INTERFACES // ================================================================================================ -pub struct ProverChannel<'a, A, E, H, R> +pub struct ProverChannel<'a, A, E, H, R, V> where A: Air, E: FieldElement, H: ElementHasher, R: RandomCoin, + V: VectorCommitment, { air: &'a A, public_coin: R, @@ -33,17 +34,19 @@ where ood_frame: OodFrame, pow_nonce: u64, _field_element: PhantomData, + _vector_commitment: PhantomData, } // PROVER CHANNEL IMPLEMENTATION // ================================================================================================ -impl<'a, A, E, H, R> ProverChannel<'a, A, E, H, R> +impl<'a, A, E, H, R, V> ProverChannel<'a, A, E, H, R, V> where A: Air, E: FieldElement, H: ElementHasher, R: RandomCoin, + V: VectorCommitment, { // CONSTRUCTOR // -------------------------------------------------------------------------------------------- @@ -65,6 +68,7 @@ where ood_frame: OodFrame::default(), pow_nonce: 0, _field_element: PhantomData, + _vector_commitment: PhantomData, } } @@ -199,12 +203,13 @@ where // FRI PROVER CHANNEL IMPLEMENTATION // ================================================================================================ -impl<'a, A, E, H, R> fri::ProverChannel for ProverChannel<'a, A, E, H, R> +impl<'a, A, E, H, R, V> fri::ProverChannel for ProverChannel<'a, A, E, H, R, V> where A: Air, E: FieldElement, H: ElementHasher, R: RandomCoin, + V: VectorCommitment, { type Hasher = H; diff --git a/prover/src/constraints/commitment.rs b/prover/src/constraints/commitment.rs index a28a2f873..ac71fdc94 100644 --- a/prover/src/constraints/commitment.rs +++ b/prover/src/constraints/commitment.rs @@ -4,9 +4,10 @@ // LICENSE file in the root directory of this source tree. use alloc::vec::Vec; +use core::marker::PhantomData; use air::proof::Queries; -use crypto::{ElementHasher, MerkleTree}; +use crypto::{ElementHasher, VectorCommitment}; use math::FieldElement; use super::RowMatrix; @@ -18,44 +19,54 @@ use super::RowMatrix; /// /// The commitment consists of two components: /// * Evaluations of composition polynomial columns over the LDE domain. -/// * Merkle tree where each leaf in the tree corresponds to a row in the composition polynomial -/// evaluation matrix. -pub struct ConstraintCommitment> { +/// * Vector commitment where each vector element corresponds to the digest of a row in +/// the composition polynomial evaluation matrix. +pub struct ConstraintCommitment< + E: FieldElement, + H: ElementHasher, + V: VectorCommitment, +> { evaluations: RowMatrix, - commitment: MerkleTree, + vector_commitment: V, + _h: PhantomData, } -impl> ConstraintCommitment { +impl ConstraintCommitment +where + E: FieldElement, + H: ElementHasher, + V: VectorCommitment, +{ /// Creates a new constraint evaluation commitment from the provided composition polynomial - /// evaluations and the corresponding Merkle tree commitment. - pub fn new(evaluations: RowMatrix, commitment: MerkleTree) -> ConstraintCommitment { + /// evaluations and the corresponding vector commitment. + pub fn new(evaluations: RowMatrix, commitment: V) -> ConstraintCommitment { assert_eq!( evaluations.num_rows(), - commitment.leaves().len(), - "number of rows in constraint evaluation matrix must be the same as number of leaves in constraint commitment" + commitment.domain_len(), + "number of rows in constraint evaluation matrix must be the same as the size \ + of the vector commitment domain" ); - ConstraintCommitment { evaluations, commitment } - } - /// Returns the root of the commitment Merkle tree. - pub fn root(&self) -> H::Digest { - *self.commitment.root() + ConstraintCommitment { + evaluations, + vector_commitment: commitment, + _h: PhantomData, + } } - /// Returns the depth of the commitment Merkle tree. - #[allow(unused)] - pub fn tree_depth(&self) -> usize { - self.commitment.depth() + /// Returns the commitment. + pub fn commitment(&self) -> H::Digest { + self.vector_commitment.commitment() } - /// Returns constraint evaluations at the specified positions along with Merkle authentication - /// paths from the root of the commitment to these evaluations. + /// Returns constraint evaluations at the specified positions along with a batch opening proof + /// against the vector commitment. pub fn query(self, positions: &[usize]) -> Queries { - // build Merkle authentication paths to the leaves specified by positions - let merkle_proof = self - .commitment - .prove_batch(positions) - .expect("failed to generate a Merkle proof for constraint queries"); + // build batch opening proof to the leaves specified by positions + let opening_proof = self + .vector_commitment + .open_many(positions) + .expect("failed to generate a batch opening proof for constraint queries"); // determine a set of evaluations corresponding to each position let mut evaluations = Vec::new(); @@ -64,6 +75,6 @@ impl> ConstraintComm evaluations.push(row); } - Queries::new(merkle_proof, evaluations) + Queries::new::(opening_proof.1, evaluations) } } diff --git a/prover/src/lib.rs b/prover/src/lib.rs index 4874973f2..ac0e82be2 100644 --- a/prover/src/lib.rs +++ b/prover/src/lib.rs @@ -50,7 +50,7 @@ pub use air::{ }; use air::{AuxRandElements, GkrRandElements}; pub use crypto; -use crypto::{ElementHasher, RandomCoin}; +use crypto::{ElementHasher, RandomCoin, VectorCommitment}; use fri::FriProver; pub use math; use math::{ @@ -58,7 +58,6 @@ use math::{ fields::{CubeExtension, QuadExtension}, ExtensibleField, FieldElement, StarkField, ToElements, }; -use maybe_async::{maybe_async, maybe_await}; use tracing::{event, info_span, instrument, Level}; pub use utils::{ iterators, ByteReader, ByteWriter, Deserializable, DeserializationError, Serializable, @@ -81,6 +80,7 @@ mod composer; use composer::DeepCompositionPoly; mod trace; +use maybe_async::{maybe_async, maybe_await}; pub use trace::{ AuxTraceWithMetadata, DefaultTraceLde, Trace, TraceLde, TracePolyTable, TraceTable, TraceTableFragment, @@ -139,11 +139,14 @@ pub trait Prover { /// Hash function to be used. type HashFn: ElementHasher; + /// Vector commitment scheme to be used. + type VC: VectorCommitment; + /// PRNG to be used for generating random field elements. - type RandomCoin: RandomCoin + Send + Sync; + type RandomCoin: RandomCoin; /// Trace low-degree extension for building the LDEs of trace segments and their commitments. - type TraceLde: TraceLde + Send + Sync + type TraceLde: TraceLde where E: FieldElement; @@ -288,10 +291,11 @@ pub trait Prover { // create a channel which is used to simulate interaction between the prover and the // verifier; the channel will be used to commit to values and to draw randomness that // should come from the verifier. - let mut channel = ProverChannel::::new( - &air, - pub_inputs_elements, - ); + let mut channel = + ProverChannel::::new( + &air, + pub_inputs_elements, + ); // 1 ----- Commit to the execution trace -------------------------------------------------- @@ -334,15 +338,14 @@ pub trait Prover { // commit to the auxiliary trace segment let aux_segment_polys = { - // extend the auxiliary trace segment and build a Merkle tree from the extended - // trace + // extend the auxiliary trace segment and commit to the extended trace let span = info_span!("commit_to_aux_trace_segment").entered(); - let (aux_segment_polys, aux_segment_root) = + let (aux_segment_polys, aux_segment_commitment) = trace_lde.set_aux_trace(&aux_trace, &domain); - // commit to the LDE of the extended auxiliary trace segment by writing the root of - // its Merkle tree into the channel - channel.commit_trace(aux_segment_root); + // commit to the LDE of the extended auxiliary trace segment by writing its + // commitment into the channel + channel.commit_trace(aux_segment_commitment); drop(span); aux_segment_polys @@ -450,7 +453,7 @@ pub trait Prover { // 6 ----- compute FRI layers for the composition polynomial ------------------------------ let fri_options = air.options().to_fri_options(); let num_layers = fri_options.num_fri_layers(lde_domain_size); - let mut fri_prover = FriProver::new(fri_options); + let mut fri_prover = FriProver::<_, _, _, Self::VC>::new(fri_options); info_span!("compute_fri_layers", num_layers) .in_scope(|| fri_prover.build_layers(&mut channel, deep_evaluations)); @@ -479,13 +482,12 @@ pub trait Prover { let fri_proof = fri_prover.build_proof(&query_positions); // query the execution trace at the selected position; for each query, we need the - // state of the trace at that position + Merkle authentication path + // state of the trace at that position and a batch opening proof at specified queries let trace_queries = trace_lde.query(&query_positions); // query the constraint commitment at the selected positions; for each query, we need - // just a Merkle authentication path. this is because constraint evaluations for each - // step are merged into a single value and Merkle authentication paths contain these - // values already + // the state of the trace at that position and a batch opening proof at specified + // queries let constraint_queries = constraint_commitment.query(&query_positions); // build the proof object @@ -512,15 +514,15 @@ pub trait Prover { /// columns each of size equal to trace length, and finally evaluating each composition /// polynomial column over the LDE domain. /// - /// The commitment is computed by hashing each row in the evaluation matrix, and then building - /// a Merkle tree from the resulting hashes. + /// The commitment is computed by building a vector containing the hashes of each row in + /// the evaluation matrix, and then building vector commitment of the resulting vector. #[maybe_async] fn build_constraint_commitment( &self, composition_poly_trace: CompositionPolyTrace, num_constraint_composition_columns: usize, domain: &StarkDomain, - ) -> (ConstraintCommitment, CompositionPoly) + ) -> (ConstraintCommitment, CompositionPoly) where E: FieldElement, { @@ -549,13 +551,12 @@ pub trait Prover { // finally, build constraint evaluation commitment let constraint_commitment = info_span!( "compute_constraint_evaluation_commitment", - tree_depth = domain_size.ilog2() + log_domain_size = domain_size.ilog2() ) .in_scope(|| { - let commitment = composed_evaluations.commit_to_rows(); + let commitment = composed_evaluations.commit_to_rows::(); ConstraintCommitment::new(composed_evaluations, commitment) }); - assert_eq!(constraint_commitment.tree_depth(), domain_size.ilog2() as usize); (constraint_commitment, composition_poly) } @@ -567,21 +568,21 @@ pub trait Prover { &self, trace: &Self::Trace, domain: &StarkDomain, - channel: &mut ProverChannel<'_, Self::Air, E, Self::HashFn, Self::RandomCoin>, + channel: &mut ProverChannel<'_, Self::Air, E, Self::HashFn, Self::RandomCoin, Self::VC>, ) -> (Self::TraceLde, TracePolyTable) where E: FieldElement, { - // extend the main execution trace and build a Merkle tree from the extended trace + // extend the main execution trace and commit to the extended trace let (trace_lde, trace_polys) = maybe_await!(self.new_trace_lde(trace.info(), trace.main_segment(), domain)); // get the commitment to the main trace segment LDE - let main_trace_root = trace_lde.get_main_trace_commitment(); + let main_trace_commitment = trace_lde.get_main_trace_commitment(); - // commit to the LDE of the main trace by writing the root of its Merkle tree into + // commit to the LDE of the main trace by writing the the commitment string into // the channel - channel.commit_trace(main_trace_root); + channel.commit_trace(main_trace_commitment); (trace_lde, trace_polys) } @@ -594,8 +595,8 @@ pub trait Prover { air: &Self::Air, composition_poly_trace: CompositionPolyTrace, domain: &StarkDomain, - channel: &mut ProverChannel<'_, Self::Air, E, Self::HashFn, Self::RandomCoin>, - ) -> (ConstraintCommitment, CompositionPoly) + channel: &mut ProverChannel<'_, Self::Air, E, Self::HashFn, Self::RandomCoin, Self::VC>, + ) -> (ConstraintCommitment, CompositionPoly) where E: FieldElement, { @@ -608,9 +609,9 @@ pub trait Prover { domain, )); - // then, commit to the evaluations of constraints by writing the root of the constraint - // Merkle tree into the channel - channel.commit_constraints(constraint_commitment.root()); + // then, commit to the evaluations of constraints by writing the commitment string of + // the constraint commitment into the channel + channel.commit_constraints(constraint_commitment.commitment()); (constraint_commitment, composition_poly) } diff --git a/prover/src/matrix/col_matrix.rs b/prover/src/matrix/col_matrix.rs index 57a7f40ee..61f67aca1 100644 --- a/prover/src/matrix/col_matrix.rs +++ b/prover/src/matrix/col_matrix.rs @@ -6,7 +6,7 @@ use alloc::vec::Vec; use core::{iter::FusedIterator, slice}; -use crypto::{ElementHasher, MerkleTree}; +use crypto::{ElementHasher, VectorCommitment}; use math::{fft, polynom, FieldElement}; #[cfg(feature = "concurrent")] use utils::iterators::*; @@ -256,13 +256,13 @@ impl ColMatrix { /// /// The commitment is built as follows: /// * Each row of the matrix is hashed into a single digest of the specified hash function. - /// * The resulting values are used to built a binary Merkle tree such that each row digest - /// becomes a leaf in the tree. Thus, the number of leaves in the tree is equal to the - /// number of rows in the matrix. - /// * The resulting Merkle tree is return as the commitment to the entire matrix. - pub fn commit_to_rows(&self) -> MerkleTree + /// * The resulting vector of digests is committed to using the specified vector commitment + /// scheme. + /// * The resulting commitment is returned as the commitment to the entire matrix. + pub fn commit_to_rows(&self) -> V where H: ElementHasher, + V: VectorCommitment, { // allocate vector to store row hashes let mut row_hashes = unsafe { uninit_vector::(self.num_rows()) }; @@ -282,8 +282,7 @@ impl ColMatrix { } ); - // build Merkle tree out of hashed rows - MerkleTree::new(row_hashes).expect("failed to construct trace Merkle tree") + V::new(row_hashes).expect("failed to construct trace vector commitment") } // CONVERSIONS diff --git a/prover/src/matrix/row_matrix.rs b/prover/src/matrix/row_matrix.rs index ded689bd6..f42ca0e7a 100644 --- a/prover/src/matrix/row_matrix.rs +++ b/prover/src/matrix/row_matrix.rs @@ -5,7 +5,7 @@ use alloc::vec::Vec; -use crypto::{ElementHasher, MerkleTree}; +use crypto::{ElementHasher, VectorCommitment}; use math::{fft, FieldElement, StarkField}; #[cfg(feature = "concurrent")] use utils::iterators::*; @@ -176,13 +176,14 @@ impl RowMatrix { /// /// The commitment is built as follows: /// * Each row of the matrix is hashed into a single digest of the specified hash function. - /// * The resulting values are used to build a binary Merkle tree such that each row digest - /// becomes a leaf in the tree. Thus, the number of leaves in the tree is equal to the - /// number of rows in the matrix. - /// * The resulting Merkle tree is returned as the commitment to the entire matrix. - pub fn commit_to_rows(&self) -> MerkleTree + /// The result is a vector of digests of length equal to the number of matrix rows. + /// * A vector commitment is computed for the resulting vector using the specified vector + /// commitment scheme. + /// * The resulting vector commitment is returned as the commitment to the entire matrix. + pub fn commit_to_rows(&self) -> V where H: ElementHasher, + V: VectorCommitment, { // allocate vector to store row hashes let mut row_hashes = unsafe { uninit_vector::(self.num_rows()) }; @@ -198,8 +199,8 @@ impl RowMatrix { } ); - // build Merkle tree out of hashed rows - MerkleTree::new(row_hashes).expect("failed to construct trace Merkle tree") + // build the vector commitment to the hashed rows + V::new(row_hashes).expect("failed to construct trace vector commitment") } } diff --git a/prover/src/trace/trace_lde/default/mod.rs b/prover/src/trace/trace_lde/default/mod.rs index b5c7c1cce..e06839d53 100644 --- a/prover/src/trace/trace_lde/default/mod.rs +++ b/prover/src/trace/trace_lde/default/mod.rs @@ -4,14 +4,14 @@ // LICENSE file in the root directory of this source tree. use alloc::vec::Vec; +use core::marker::PhantomData; -use air::LagrangeKernelEvaluationFrame; -use crypto::MerkleTree; +use air::{proof::Queries, LagrangeKernelEvaluationFrame, TraceInfo}; +use crypto::VectorCommitment; use tracing::info_span; use super::{ - ColMatrix, ElementHasher, EvaluationFrame, FieldElement, Hasher, Queries, StarkDomain, - TraceInfo, TraceLde, TracePolyTable, + ColMatrix, ElementHasher, EvaluationFrame, FieldElement, StarkDomain, TraceLde, TracePolyTable, }; use crate::{RowMatrix, DEFAULT_SEGMENT_WIDTH}; @@ -28,20 +28,30 @@ mod tests; /// will always be elements in the base field (even when an extension field is used). /// - Auxiliary segments: a list of 0 or more segments for traces generated after the prover /// commits to the first trace segment. Currently, at most 1 auxiliary segment is possible. -pub struct DefaultTraceLde> { +pub struct DefaultTraceLde< + E: FieldElement, + H: ElementHasher, + V: VectorCommitment, +> { // low-degree extension of the main segment of the trace main_segment_lde: RowMatrix, // commitment to the main segment of the trace - main_segment_tree: MerkleTree, + main_segment_oracles: V, // low-degree extensions of the auxiliary segment of the trace aux_segment_lde: Option>, // commitment to the auxiliary segment of the trace - aux_segment_tree: Option>, + aux_segment_oracles: Option, blowup: usize, trace_info: TraceInfo, + _h: PhantomData, } -impl> DefaultTraceLde { +impl DefaultTraceLde +where + E: FieldElement, + H: ElementHasher, + V: VectorCommitment, +{ /// Takes the main trace segment columns as input, interpolates them into polynomials in /// coefficient form, evaluates the polynomials over the LDE domain, commits to the /// polynomial evaluations, and creates a new [DefaultTraceLde] with the LDE of the main trace @@ -54,18 +64,19 @@ impl> DefaultTraceLd main_trace: &ColMatrix, domain: &StarkDomain, ) -> (Self, TracePolyTable) { - // extend the main execution trace and build a Merkle tree from the extended trace - let (main_segment_lde, main_segment_tree, main_segment_polys) = - build_trace_commitment::(main_trace, domain); + // extend the main execution trace and build a commitment to the extended trace + let (main_segment_lde, main_segment_vector_com, main_segment_polys) = + build_trace_commitment::(main_trace, domain); let trace_poly_table = TracePolyTable::new(main_segment_polys); let trace_lde = DefaultTraceLde { main_segment_lde, - main_segment_tree, + main_segment_oracles: main_segment_vector_com, aux_segment_lde: None, - aux_segment_tree: None, + aux_segment_oracles: None, blowup: domain.trace_to_lde_blowup(), trace_info: trace_info.clone(), + _h: PhantomData, }; (trace_lde, trace_poly_table) @@ -95,17 +106,18 @@ impl> DefaultTraceLd } } -impl TraceLde for DefaultTraceLde +impl TraceLde for DefaultTraceLde where E: FieldElement, - H: ElementHasher, + H: ElementHasher + core::marker::Sync, + V: VectorCommitment + core::marker::Sync, { type HashFn = H; + type VC = V; /// Returns the commitment to the low-degree extension of the main trace segment. - fn get_main_trace_commitment(&self) -> ::Digest { - let root_hash = self.main_segment_tree.root(); - *root_hash + fn get_main_trace_commitment(&self) -> H::Digest { + self.main_segment_oracles.commitment() } /// Takes auxiliary trace segment columns as input, interpolates them into polynomials in @@ -124,10 +136,10 @@ where &mut self, aux_trace: &ColMatrix, domain: &StarkDomain, - ) -> (ColMatrix, ::Digest) { - // extend the auxiliary trace segment and build a Merkle tree from the extended trace - let (aux_segment_lde, aux_segment_tree, aux_segment_polys) = - build_trace_commitment::(aux_trace, domain); + ) -> (ColMatrix, H::Digest) { + // extend the auxiliary trace segment and build a commitment to the extended trace + let (aux_segment_lde, aux_segment_oracles, aux_segment_polys) = + build_trace_commitment::(aux_trace, domain); // check errors assert!( @@ -142,10 +154,10 @@ where // save the lde and commitment self.aux_segment_lde = Some(aux_segment_lde); - let root_hash = *aux_segment_tree.root(); - self.aux_segment_tree = Some(aux_segment_tree); + let commitment_string = aux_segment_oracles.commitment(); + self.aux_segment_oracles = Some(aux_segment_oracles); - (aux_segment_polys, root_hash) + (aux_segment_polys, commitment_string) } /// Reads current and next rows from the main trace segment into the specified frame. @@ -200,21 +212,21 @@ where } } - /// Returns trace table rows at the specified positions along with Merkle authentication paths - /// from the commitment root to these rows. + /// Returns trace table rows at the specified positions along with an opening proof to these + /// rows againt the already computed commitment. fn query(&self, positions: &[usize]) -> Vec { // build queries for the main trace segment - let mut result = vec![build_segment_queries( + let mut result = vec![build_segment_queries::( &self.main_segment_lde, - &self.main_segment_tree, + &self.main_segment_oracles, positions, )]; // build queries for the auxiliary trace segment - if let Some(ref segment_tree) = self.aux_segment_tree { + if let Some(ref segment_oracles) = self.aux_segment_oracles { let segment_lde = self.aux_segment_lde.as_ref().expect("expected aux segment to be present"); - result.push(build_segment_queries(segment_lde, segment_tree, positions)); + result.push(build_segment_queries::(segment_lde, segment_oracles, positions)); } result @@ -246,16 +258,17 @@ where /// polynomial of degree = trace_length - 1, and then evaluating the polynomial over the LDE /// domain. /// -/// The trace commitment is computed by hashing each row of the extended execution trace, then -/// building a Merkle tree from the resulting hashes. -fn build_trace_commitment( +/// The trace commitment is computed by building a vector containing the hashes of each row of +/// the extended execution trace, then building a vector commitment to the resulting vector. +fn build_trace_commitment( trace: &ColMatrix, domain: &StarkDomain, -) -> (RowMatrix, MerkleTree, ColMatrix) +) -> (RowMatrix, V, ColMatrix) where E: FieldElement, F: FieldElement, H: ElementHasher, + V: VectorCommitment, { // extend the execution trace let (trace_lde, trace_polys) = { @@ -277,32 +290,33 @@ where assert_eq!(trace_lde.num_rows(), domain.lde_domain_size()); // build trace commitment - let tree_depth = trace_lde.num_rows().ilog2() as usize; - let trace_tree = info_span!("compute_execution_trace_commitment", tree_depth) - .in_scope(|| trace_lde.commit_to_rows()); - assert_eq!(trace_tree.depth(), tree_depth); + let commitment_domain_size = trace_lde.num_rows(); + let trace_vector_com = info_span!("compute_execution_trace_commitment", commitment_domain_size) + .in_scope(|| trace_lde.commit_to_rows::()); + assert_eq!(trace_vector_com.domain_len(), commitment_domain_size); - (trace_lde, trace_tree, trace_polys) + (trace_lde, trace_vector_com, trace_polys) } -fn build_segment_queries( +fn build_segment_queries( segment_lde: &RowMatrix, - segment_tree: &MerkleTree, + segment_vector_com: &V, positions: &[usize], ) -> Queries where E: FieldElement, H: ElementHasher, + V: VectorCommitment, { // for each position, get the corresponding row from the trace segment LDE and put all these // rows into a single vector let trace_states = positions.iter().map(|&pos| segment_lde.row(pos).to_vec()).collect::>(); - // build Merkle authentication paths to the leaves specified by positions - let trace_proof = segment_tree - .prove_batch(positions) - .expect("failed to generate a Merkle proof for trace queries"); + // build a batch opening proof to the leaves specified by positions + let trace_proof = segment_vector_com + .open_many(positions) + .expect("failed to generate a batch opening proof for trace queries"); - Queries::new(trace_proof, trace_states) + Queries::new::(trace_proof.1, trace_states) } diff --git a/prover/src/trace/trace_lde/default/tests.rs b/prover/src/trace/trace_lde/default/tests.rs index 11100c03d..c06cc2e60 100644 --- a/prover/src/trace/trace_lde/default/tests.rs +++ b/prover/src/trace/trace_lde/default/tests.rs @@ -27,8 +27,11 @@ fn extend_trace_table() { let domain = StarkDomain::new(&air); // build the trace polynomials, extended trace, and commitment using the default TraceLde impl - let (trace_lde, trace_polys) = - DefaultTraceLde::::new(trace.info(), trace.main_segment(), &domain); + let (trace_lde, trace_polys) = DefaultTraceLde::>::new( + trace.info(), + trace.main_segment(), + &domain, + ); // check the width and length of the extended trace assert_eq!(2, trace_lde.main_segment_width()); @@ -74,10 +77,13 @@ fn commit_trace_table() { let domain = StarkDomain::new(&air); // build the trace polynomials, extended trace, and commitment using the default TraceLde impl - let (trace_lde, _) = - DefaultTraceLde::::new(trace.info(), trace.main_segment(), &domain); + let (trace_lde, _) = DefaultTraceLde::>::new( + trace.info(), + trace.main_segment(), + &domain, + ); - // build Merkle tree from trace rows + // build commitment, using a Merkle tree, to the trace rows let mut hashed_states = Vec::new(); let mut trace_state = vec![BaseElement::ZERO; trace_lde.main_segment_width()]; #[allow(clippy::needless_range_loop)] diff --git a/prover/src/trace/trace_lde/mod.rs b/prover/src/trace/trace_lde/mod.rs index 5429e3f5b..dbce21491 100644 --- a/prover/src/trace/trace_lde/mod.rs +++ b/prover/src/trace/trace_lde/mod.rs @@ -6,7 +6,7 @@ use alloc::vec::Vec; use air::{proof::Queries, LagrangeKernelEvaluationFrame, TraceInfo}; -use crypto::{ElementHasher, Hasher}; +use crypto::{ElementHasher, Hasher, VectorCommitment}; use super::{ColMatrix, EvaluationFrame, FieldElement, TracePolyTable}; use crate::StarkDomain; @@ -24,9 +24,12 @@ pub use default::DefaultTraceLde; /// - Auxiliary segments: a list of 0 or more segments for traces generated after the prover /// commits to the first trace segment. Currently, at most 1 auxiliary segment is possible. pub trait TraceLde: Sync { - /// The hash function used for building the Merkle tree commitments to trace segment LDEs. + /// The hash function used for hashing the rows of trace segment LDEs. type HashFn: ElementHasher; + /// The vector commitment scheme used for commiting to the trace. + type VC: VectorCommitment; + /// Returns the commitment to the low-degree extension of the main trace segment. fn get_main_trace_commitment(&self) -> ::Digest; @@ -70,8 +73,8 @@ pub trait TraceLde: Sync { frame: &mut LagrangeKernelEvaluationFrame, ); - /// Returns trace table rows at the specified positions along with Merkle authentication paths - /// from the commitment root to these rows. + /// Returns trace table rows at the specified positions along with an opening proof to these + /// rows. fn query(&self, positions: &[usize]) -> Vec; /// Returns the number of rows in the execution trace. diff --git a/verifier/src/channel.rs b/verifier/src/channel.rs index 6b008c700..c84f4ec2a 100644 --- a/verifier/src/channel.rs +++ b/verifier/src/channel.rs @@ -4,12 +4,13 @@ // LICENSE file in the root directory of this source tree. use alloc::{string::ToString, vec::Vec}; +use core::marker::PhantomData; use air::{ proof::{Proof, Queries, Table, TraceOodFrame}, Air, }; -use crypto::{BatchMerkleProof, ElementHasher, MerkleTree}; +use crypto::{ElementHasher, VectorCommitment}; use fri::VerifierChannel as FriVerifierChannel; use math::{FieldElement, StarkField}; @@ -23,16 +24,20 @@ use crate::VerifierError; /// A channel is instantiated for a specific proof, which is parsed into structs over the /// appropriate field (specified by type parameter `E`). This also validates that the proof is /// well-formed in the context of the computation for the specified [Air]. -pub struct VerifierChannel> { +pub struct VerifierChannel< + E: FieldElement, + H: ElementHasher, + V: VectorCommitment, +> { // trace queries - trace_roots: Vec, - trace_queries: Option>, + trace_commitments: Vec, + trace_queries: Option>, // constraint queries - constraint_root: H::Digest, - constraint_queries: Option>, + constraint_commitment: H::Digest, + constraint_queries: Option>, // FRI proof - fri_roots: Option>, - fri_layer_proofs: Vec>, + fri_commitments: Option>, + fri_layer_proofs: Vec, fri_layer_queries: Vec>, fri_remainder: Option>, fri_num_partitions: usize, @@ -44,7 +49,12 @@ pub struct VerifierChannel>, } -impl> VerifierChannel { +impl VerifierChannel +where + E: FieldElement, + H: ElementHasher, + V: VectorCommitment, +{ // CONSTRUCTOR // -------------------------------------------------------------------------------------------- /// Creates and returns a new [VerifierChannel] initialized from the specified `proof`. @@ -77,14 +87,18 @@ impl> VerifierChanne let fri_options = air.options().to_fri_options(); // --- parse commitments ------------------------------------------------------------------ - let (trace_roots, constraint_root, fri_roots) = commitments + let (trace_commitments, constraint_commitment, fri_commitments) = commitments .parse::(num_trace_segments, fri_options.num_fri_layers(lde_domain_size)) .map_err(|err| VerifierError::ProofDeserializationError(err.to_string()))?; // --- parse trace and constraint queries ------------------------------------------------- - let trace_queries = TraceQueries::new(trace_queries, air, num_unique_queries as usize)?; - let constraint_queries = - ConstraintQueries::new(constraint_queries, air, num_unique_queries as usize)?; + let trace_queries = + TraceQueries::::new(trace_queries, air, num_unique_queries as usize)?; + let constraint_queries = ConstraintQueries::::new( + constraint_queries, + air, + num_unique_queries as usize, + )?; // --- parse FRI proofs ------------------------------------------------------------------- let fri_num_partitions = fri_proof.num_partitions(); @@ -92,7 +106,7 @@ impl> VerifierChanne .parse_remainder() .map_err(|err| VerifierError::ProofDeserializationError(err.to_string()))?; let (fri_layer_queries, fri_layer_proofs) = fri_proof - .parse_layers::(lde_domain_size, fri_options.folding_factor()) + .parse_layers::(lde_domain_size, fri_options.folding_factor()) .map_err(|err| VerifierError::ProofDeserializationError(err.to_string()))?; // --- parse out-of-domain evaluation frame ----------------------------------------------- @@ -102,13 +116,13 @@ impl> VerifierChanne Ok(VerifierChannel { // trace queries - trace_roots, + trace_commitments, trace_queries: Some(trace_queries), // constraint queries - constraint_root, + constraint_commitment, constraint_queries: Some(constraint_queries), // FRI proof - fri_roots: Some(fri_roots), + fri_commitments: Some(fri_commitments), fri_layer_proofs, fri_layer_queries, fri_remainder: Some(fri_remainder), @@ -130,12 +144,12 @@ impl> VerifierChanne /// For computations requiring multiple trace segment, the returned slice will contain a /// commitment for each trace segment. pub fn read_trace_commitments(&self) -> &[H::Digest] { - &self.trace_roots + &self.trace_commitments } /// Returns constraint evaluation commitment sent by the prover. pub fn read_constraint_commitment(&self) -> H::Digest { - self.constraint_root + self.constraint_commitment } /// Returns trace polynomial evaluations at out-of-domain points z and z * g, where g is the @@ -177,9 +191,27 @@ impl> VerifierChanne let queries = self.trace_queries.take().expect("already read"); // make sure the states included in the proof correspond to the trace commitment - for (root, proof) in self.trace_roots.iter().zip(queries.query_proofs.iter()) { - MerkleTree::verify_batch(root, positions, proof) - .map_err(|_| VerifierError::TraceQueryDoesNotMatchCommitment)?; + + let items: Vec = + queries.main_states.rows().map(|row| H::hash_elements(row)).collect(); + >::verify_many( + self.trace_commitments[0], + positions, + &items, + &queries.query_proofs[0], + ) + .map_err(|_| VerifierError::TraceQueryDoesNotMatchCommitment)?; + + if let Some(ref aux_states) = queries.aux_states { + let items: Vec = + aux_states.rows().map(|row| H::hash_elements(row)).collect(); + >::verify_many( + self.trace_commitments[1], + positions, + &items, + &queries.query_proofs[1], + ) + .map_err(|_| VerifierError::TraceQueryDoesNotMatchCommitment)?; } Ok((queries.main_states, queries.aux_states)) @@ -193,9 +225,15 @@ impl> VerifierChanne positions: &[usize], ) -> Result, VerifierError> { let queries = self.constraint_queries.take().expect("already read"); - - MerkleTree::verify_batch(&self.constraint_root, positions, &queries.query_proofs) - .map_err(|_| VerifierError::ConstraintQueryDoesNotMatchCommitment)?; + let items: Vec = + queries.evaluations.rows().map(|row| H::hash_elements(row)).collect(); + >::verify_many( + self.constraint_commitment, + positions, + &items, + &queries.query_proofs, + ) + .map_err(|_| VerifierError::ConstraintQueryDoesNotMatchCommitment)?; Ok(queries.evaluations) } @@ -204,22 +242,24 @@ impl> VerifierChanne // FRI VERIFIER CHANNEL IMPLEMENTATION // ================================================================================================ -impl FriVerifierChannel for VerifierChannel +impl FriVerifierChannel for VerifierChannel where E: FieldElement, H: ElementHasher, + V: VectorCommitment, { type Hasher = H; + type VectorCommitment = V; fn read_fri_num_partitions(&self) -> usize { self.fri_num_partitions } fn read_fri_layer_commitments(&mut self) -> Vec { - self.fri_roots.take().expect("already read") + self.fri_commitments.take().expect("already read") } - fn take_next_fri_layer_proof(&mut self) -> BatchMerkleProof { + fn take_next_fri_layer_proof(&mut self) -> V::MultiProof { self.fri_layer_proofs.remove(0) } @@ -237,18 +277,28 @@ where /// Container of trace query data, including: /// * Queried states for all trace segments. -/// * Merkle authentication paths for all queries. +/// * Batch opening proof for all queries. /// /// Trace states for all auxiliary segments are stored in a single table. -struct TraceQueries> { - query_proofs: Vec>, +struct TraceQueries< + E: FieldElement, + H: ElementHasher, + V: VectorCommitment, +> { + query_proofs: Vec, main_states: Table, aux_states: Option>, + _h: PhantomData, } -impl> TraceQueries { +impl TraceQueries +where + E: FieldElement, + H: ElementHasher, + V: VectorCommitment, +{ /// Parses the provided trace queries into trace states in the specified field and - /// corresponding Merkle authentication paths. + /// corresponding batch opening proof. pub fn new>( mut queries: Vec, air: &A, @@ -262,12 +312,11 @@ impl> TraceQueries(air.lde_domain_size(), num_queries, main_segment_width) + .parse::(air.lde_domain_size(), num_queries, main_segment_width) .map_err(|err| { VerifierError::ProofDeserializationError(format!( "main trace segment query deserialization failed: {err}" @@ -278,14 +327,13 @@ impl> TraceQueries(air.lde_domain_size(), num_queries, segment_width) + .parse::(air.lde_domain_size(), num_queries, segment_width) .map_err(|err| { VerifierError::ProofDeserializationError(format!( "auxiliary trace segment query deserialization failed: {err}" @@ -305,6 +353,7 @@ impl> TraceQueries> TraceQueries> { - query_proofs: BatchMerkleProof, +/// * Batch opening proof for all queries. +struct ConstraintQueries< + E: FieldElement, + H: ElementHasher, + V: VectorCommitment, +> { + query_proofs: V::MultiProof, evaluations: Table, + _h: PhantomData, } -impl> ConstraintQueries { +impl ConstraintQueries +where + E: FieldElement, + H: ElementHasher, + V: VectorCommitment, +{ /// Parses the provided constraint queries into evaluations in the specified field and - /// corresponding Merkle authentication paths. + /// corresponding batch opening proof. pub fn new>( queries: Queries, air: &A, @@ -331,13 +390,17 @@ impl> ConstraintQuer let constraint_frame_width = air.context().num_constraint_composition_columns(); let (query_proofs, evaluations) = queries - .parse::(air.lde_domain_size(), num_queries, constraint_frame_width) + .parse::(air.lde_domain_size(), num_queries, constraint_frame_width) .map_err(|err| { VerifierError::ProofDeserializationError(format!( "constraint evaluation query deserialization failed: {err}" )) })?; - Ok(Self { query_proofs, evaluations }) + Ok(Self { + query_proofs, + evaluations, + _h: PhantomData, + }) } } diff --git a/verifier/src/errors.rs b/verifier/src/errors.rs index fb2aaa36e..e1b072db5 100644 --- a/verifier/src/errors.rs +++ b/verifier/src/errors.rs @@ -29,11 +29,10 @@ pub enum VerifierError { /// This error occurs when constraints evaluated over out-of-domain trace rows do not match /// evaluations of the constraint composition polynomial at the out-of-domain point. InconsistentOodConstraintEvaluations, - /// This error occurs when Merkle authentication paths of trace queries do not resolve to the - /// execution trace commitment included in the proof. + /// This error occurs when the batch opening proof fails to verify for trace queries. TraceQueryDoesNotMatchCommitment, - /// This error occurs when Merkle authentication paths of constraint evaluation queries do not - /// resolve to the constraint evaluation commitment included in the proof. + /// This error occurs when the batch opening proof fails to verify for constraint evaluation + /// queries. ConstraintQueryDoesNotMatchCommitment, /// This error occurs when the proof-of-work nonce hashed with the current state of the public /// coin resolves to a value which does not meet the proof-of-work threshold specified by the @@ -79,10 +78,10 @@ impl fmt::Display for VerifierError { write!(f, "constraint evaluations over the out-of-domain frame are inconsistent") } Self::TraceQueryDoesNotMatchCommitment => { - write!(f, "trace query did not match the commitment") + write!(f, "failed to open trace query against the given commitment") } Self::ConstraintQueryDoesNotMatchCommitment => { - write!(f, "constraint query did not match the commitment") + write!(f, "failed to open constraint query against the given commitment") } Self::QuerySeedProofOfWorkVerificationFailed => { write!(f, "query seed proof-of-work verification failed") diff --git a/verifier/src/lib.rs b/verifier/src/lib.rs index a9c5ab7f7..2c75ecd1d 100644 --- a/verifier/src/lib.rs +++ b/verifier/src/lib.rs @@ -40,7 +40,7 @@ pub use air::{ }; use air::{AuxRandElements, GkrVerifier}; pub use crypto; -use crypto::{ElementHasher, Hasher, RandomCoin}; +use crypto::{ElementHasher, Hasher, RandomCoin, VectorCommitment}; use fri::FriVerifier; pub use math; use math::{ @@ -78,7 +78,7 @@ pub use errors::VerifierError; /// - The specified proof was generated for a different computation. /// - The specified proof was generated for this computation but for different public inputs. /// - The specified proof was generated with parameters not providing an acceptable security level. -pub fn verify( +pub fn verify( proof: Proof, pub_inputs: AIR::PublicInputs, acceptable_options: &AcceptableOptions, @@ -87,6 +87,7 @@ where AIR: Air, HashFn: ElementHasher, RandCoin: RandomCoin, + VC: VectorCommitment, { // check that `proof` was generated with an acceptable set of parameters from the point of view // of the verifier @@ -107,7 +108,11 @@ where FieldExtension::None => { let public_coin = RandCoin::new(&public_coin_seed); let channel = VerifierChannel::new(&air, proof)?; - perform_verification::(air, channel, public_coin) + perform_verification::( + air, + channel, + public_coin, + ) }, FieldExtension::Quadratic => { if !>::is_supported() { @@ -115,7 +120,7 @@ where } let public_coin = RandCoin::new(&public_coin_seed); let channel = VerifierChannel::new(&air, proof)?; - perform_verification::, HashFn, RandCoin>( + perform_verification::, HashFn, RandCoin, VC>( air, channel, public_coin, @@ -127,7 +132,7 @@ where } let public_coin = RandCoin::new(&public_coin_seed); let channel = VerifierChannel::new(&air, proof)?; - perform_verification::, HashFn, RandCoin>( + perform_verification::, HashFn, RandCoin, VC>( air, channel, public_coin, @@ -140,9 +145,9 @@ where // ================================================================================================ /// Performs the actual verification by reading the data from the `channel` and making sure it /// attests to a correct execution of the computation specified by the provided `air`. -fn perform_verification( +fn perform_verification( air: A, - mut channel: VerifierChannel, + mut channel: VerifierChannel, mut public_coin: R, ) -> Result<(), VerifierError> where @@ -150,6 +155,7 @@ where A: Air, H: ElementHasher, R: RandomCoin, + V: VectorCommitment, { // 1 ----- trace commitment ------------------------------------------------------------------- // Read the commitments to evaluations of the trace polynomials over the LDE domain sent by the diff --git a/winterfell/src/lib.rs b/winterfell/src/lib.rs index a4fe90125..86c5e0345 100644 --- a/winterfell/src/lib.rs +++ b/winterfell/src/lib.rs @@ -152,7 +152,7 @@ //! math::{fields::f128::BaseElement, FieldElement, ToElements}, //! Air, AirContext, Assertion, GkrVerifier, EvaluationFrame, //! ProofOptions, TraceInfo, TransitionConstraintDegree, -//! crypto::{hashers::Blake3_256, DefaultRandomCoin}, +//! crypto::{hashers::Blake3_256, DefaultRandomCoin, MerkleTree}, //! }; //! //! // Public inputs for our computation will consist of the starting value and the end result. @@ -258,7 +258,7 @@ //! //! ```no_run //! use winterfell::{ -//! crypto::{hashers::Blake3_256, DefaultRandomCoin}, +//! crypto::{hashers::Blake3_256, DefaultRandomCoin, MerkleTree}, //! math::{fields::f128::BaseElement, FieldElement, ToElements}, //! matrix::ColMatrix, //! DefaultTraceLde, ProofOptions, Prover, StarkDomain, Trace, TracePolyTable, TraceTable, @@ -347,8 +347,9 @@ //! type Air = WorkAir; //! type Trace = TraceTable; //! type HashFn = Blake3_256; +//! type VC = MerkleTree; //! type RandomCoin = DefaultRandomCoin; -//! type TraceLde> = DefaultTraceLde; +//! type TraceLde> = DefaultTraceLde; //! type ConstraintEvaluator<'a, E: FieldElement> = //! DefaultConstraintEvaluator<'a, Self::Air, E>; //! @@ -394,7 +395,7 @@ //! //! ``` //! # use winterfell::{ -//! # crypto::{hashers::Blake3_256, DefaultRandomCoin}, +//! # crypto::{hashers::Blake3_256, DefaultRandomCoin, MerkleTree}, //! # math::{fields::f128::BaseElement, FieldElement, ToElements}, //! # matrix::ColMatrix, //! # Air, AirContext, Assertion, AuxRandElements, ByteWriter, DefaultConstraintEvaluator, @@ -490,8 +491,9 @@ //! # type Air = WorkAir; //! # type Trace = TraceTable; //! # type HashFn = Blake3_256; +//! # type VC = MerkleTree; //! # type RandomCoin = DefaultRandomCoin; -//! # type TraceLde> = DefaultTraceLde; +//! # type TraceLde> = DefaultTraceLde; //! # type ConstraintEvaluator<'a, E: FieldElement> = //! # DefaultConstraintEvaluator<'a, Self::Air, E>; //! # @@ -559,7 +561,8 @@ //! let pub_inputs = PublicInputs { start, result }; //! assert!(winterfell::verify::, -//! DefaultRandomCoin> +//! DefaultRandomCoin>, +//! MerkleTree> //! >(proof, pub_inputs, &min_opts).is_ok()); //! ``` //! @@ -594,14 +597,14 @@ extern crate std; pub use air::{AuxRandElements, GkrVerifier}; pub use prover::{ crypto, iterators, math, matrix, Air, AirContext, Assertion, AuxTraceWithMetadata, - BoundaryConstraint, BoundaryConstraintGroup, ByteReader, ByteWriter, CompositionPolyTrace, + BoundaryConstraint, BoundaryConstraintGroup, CompositionPolyTrace, ConstraintCompositionCoefficients, ConstraintDivisor, ConstraintEvaluator, - DeepCompositionCoefficients, DefaultConstraintEvaluator, DefaultTraceLde, Deserializable, - DeserializationError, EvaluationFrame, FieldExtension, Proof, ProofOptions, Prover, - ProverError, ProverGkrProof, Serializable, SliceReader, StarkDomain, Trace, TraceInfo, - TraceLde, TracePolyTable, TraceTable, TraceTableFragment, TransitionConstraintDegree, + DeepCompositionCoefficients, DefaultConstraintEvaluator, DefaultTraceLde, EvaluationFrame, + FieldExtension, Proof, ProofOptions, Prover, ProverError, ProverGkrProof, StarkDomain, Trace, + TraceInfo, TraceLde, TracePolyTable, TraceTable, TraceTableFragment, + TransitionConstraintDegree, }; -pub use verifier::{verify, AcceptableOptions, VerifierError}; +pub use verifier::{verify, AcceptableOptions, ByteWriter, VerifierError}; #[cfg(test)] mod tests; diff --git a/winterfell/src/tests.rs b/winterfell/src/tests.rs index bfc0aa264..3757e2010 100644 --- a/winterfell/src/tests.rs +++ b/winterfell/src/tests.rs @@ -6,6 +6,7 @@ use std::{vec, vec::Vec}; use air::{GkrRandElements, LagrangeKernelRandElements}; +use crypto::MerkleTree; use prover::{ crypto::{hashers::Blake3_256, DefaultRandomCoin, RandomCoin}, math::{fields::f64::BaseElement, ExtensionOf, FieldElement}, @@ -28,6 +29,7 @@ fn test_complex_lagrange_kernel_air() { LagrangeKernelComplexAir, Blake3_256, DefaultRandomCoin>, + MerkleTree>, >(proof, (), &AcceptableOptions::MinConjecturedSecurity(0)) .unwrap() } @@ -213,8 +215,10 @@ impl Prover for LagrangeComplexProver { type Air = LagrangeKernelComplexAir; type Trace = LagrangeComplexTrace; type HashFn = Blake3_256; + type VC = MerkleTree>; type RandomCoin = DefaultRandomCoin; - type TraceLde> = DefaultTraceLde; + type TraceLde> = + DefaultTraceLde; type ConstraintEvaluator<'a, E: FieldElement> = DefaultConstraintEvaluator<'a, LagrangeKernelComplexAir, E>; From e92e5419ad24988833058276f4e50986d9278106 Mon Sep 17 00:00:00 2001 From: igamigo Date: Fri, 4 Oct 2024 15:37:51 -0300 Subject: [PATCH 07/11] feat: Add `maybe_async_trait` (#334) --- CHANGELOG.md | 1 + air/src/air/context.rs | 3 +- air/src/proof/ood_frame.rs | 2 + air/src/proof/table.rs | 4 +- crypto/src/hash/mds/mds_f64_12x12.rs | 28 ++--- crypto/src/hash/mds/mds_f64_8x8.rs | 26 ++-- crypto/src/merkle/concurrent.rs | 2 + examples/src/utils/rescue.rs | 2 + math/src/field/extensions/cubic.rs | 2 +- math/src/field/extensions/quadratic.rs | 2 +- math/src/field/f128/mod.rs | 2 +- math/src/field/f62/mod.rs | 2 +- math/src/field/f64/mod.rs | 3 +- prover/src/channel.rs | 2 +- prover/src/constraints/evaluation_table.rs | 2 +- prover/src/constraints/evaluator/default.rs | 2 +- prover/src/matrix/col_matrix.rs | 10 +- prover/src/trace/trace_table.rs | 2 +- utils/core/src/serde/byte_reader.rs | 4 +- utils/maybe_async/README.md | 52 ++++++++ utils/maybe_async/src/lib.rs | 125 +++++++++++++++++++- 21 files changed, 230 insertions(+), 48 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index ddcfc76b5..34569fa41 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,6 +1,7 @@ # Changelog ## 0.10.0 (2024-06-11) - `utils/maybe-async` crate only +- Added `maybe-async-trait` procedural macro. - [BREAKING] Refactored `maybe-async` macro into simpler `maybe-async` and `maybe-await` macros. ## 0.9.1 (2024-06-24) - `utils/core` crate only diff --git a/air/src/air/context.rs b/air/src/air/context.rs index 183f575fc..09341afe3 100644 --- a/air/src/air/context.rs +++ b/air/src/air/context.rs @@ -309,8 +309,7 @@ impl AirContext { // we use the identity: ceil(a/b) = (a + b - 1)/b let num_constraint_col = - (highest_constraint_degree - transition_divisior_degree + trace_length - 1) - / trace_length; + (highest_constraint_degree - transition_divisior_degree).div_ceil(trace_length); cmp::max(num_constraint_col, 1) } diff --git a/air/src/proof/ood_frame.rs b/air/src/proof/ood_frame.rs index d4b3f14ec..f1ba818bd 100644 --- a/air/src/proof/ood_frame.rs +++ b/air/src/proof/ood_frame.rs @@ -229,6 +229,8 @@ impl Deserializable for OodFrame { // OOD FRAME TRACE STATES // ================================================================================================ +/// Trace evaluation frame at the out-of-domain point. +/// /// Stores the trace evaluations at `z` and `gz`, where `z` is a random Field element in /// `current_row` and `next_row`, respectively. If the Air contains a Lagrange kernel auxiliary /// column, then that column interpolated polynomial will be evaluated at `z`, `gz`, `g^2 z`, ... diff --git a/air/src/proof/table.rs b/air/src/proof/table.rs index 785147925..4c5f0e15e 100644 --- a/air/src/proof/table.rs +++ b/air/src/proof/table.rs @@ -138,10 +138,10 @@ impl<'a, E: FieldElement> Iterator for RowIterator<'a, E> { } } -impl<'a, E: FieldElement> ExactSizeIterator for RowIterator<'a, E> { +impl ExactSizeIterator for RowIterator<'_, E> { fn len(&self) -> usize { self.table.num_rows() } } -impl<'a, E: FieldElement> FusedIterator for RowIterator<'a, E> {} +impl FusedIterator for RowIterator<'_, E> {} diff --git a/crypto/src/hash/mds/mds_f64_12x12.rs b/crypto/src/hash/mds/mds_f64_12x12.rs index 44f5660b9..79a0bc3a3 100644 --- a/crypto/src/hash/mds/mds_f64_12x12.rs +++ b/crypto/src/hash/mds/mds_f64_12x12.rs @@ -3,6 +3,20 @@ // This source code is licensed under the MIT license found in the // LICENSE file in the root directory of this source tree. +//! This module contains helper functions as well as constants used to perform a 12x12 vector-matrix +//! multiplication. The special form of our MDS matrix i.e. being circulant, allows us to reduce +//! the vector-matrix multiplication to a Hadamard product of two vectors in "frequency domain". +//! This follows from the simple fact that every circulant matrix has the columns of the discrete +//! Fourier transform matrix as orthogonal eigenvectors. +//! The implementation also avoids the use of 3-point FFTs, and 3-point iFFTs, and substitutes that +//! with explicit expressions. It also avoids, due to the form of our matrix in the frequency domain, +//! divisions by 2 and repeated modular reductions. This is because of our explicit choice of +//! an MDS matrix that has small powers of 2 entries in frequency domain. +//! The following implementation has benefited greatly from the discussions and insights of +//! Hamish Ivey-Law and Jacqueline Nabaglo of Polygon Zero and is based on Nabaglo's implementation +//! in [Plonky2](https://github.com/mir-protocol/plonky2). +//! The circulant matrix is identified by its first row: [7, 23, 8, 26, 13, 10, 9, 7, 6, 22, 21, 8]. + // FFT-BASED MDS MULTIPLICATION HELPER FUNCTIONS // ================================================================================================ @@ -12,20 +26,6 @@ use math::{ FieldElement, }; -/// This module contains helper functions as well as constants used to perform a 12x12 vector-matrix -/// multiplication. The special form of our MDS matrix i.e. being circulant, allows us to reduce -/// the vector-matrix multiplication to a Hadamard product of two vectors in "frequency domain". -/// This follows from the simple fact that every circulant matrix has the columns of the discrete -/// Fourier transform matrix as orthogonal eigenvectors. -/// The implementation also avoids the use of 3-point FFTs, and 3-point iFFTs, and substitutes that -/// with explicit expressions. It also avoids, due to the form of our matrix in the frequency domain, -/// divisions by 2 and repeated modular reductions. This is because of our explicit choice of -/// an MDS matrix that has small powers of 2 entries in frequency domain. -/// The following implementation has benefited greatly from the discussions and insights of -/// Hamish Ivey-Law and Jacqueline Nabaglo of Polygon Zero and is based on Nabaglo's implementation -/// in [Plonky2](https://github.com/mir-protocol/plonky2). -/// The circulant matrix is identified by its first row: [7, 23, 8, 26, 13, 10, 9, 7, 6, 22, 21, 8]. - // MDS matrix in frequency domain. // More precisely, this is the output of the three 4-point (real) FFTs of the first column of // the MDS matrix i.e. just before the multiplication with the appropriate twiddle factors diff --git a/crypto/src/hash/mds/mds_f64_8x8.rs b/crypto/src/hash/mds/mds_f64_8x8.rs index 037dee721..2742c6519 100644 --- a/crypto/src/hash/mds/mds_f64_8x8.rs +++ b/crypto/src/hash/mds/mds_f64_8x8.rs @@ -3,6 +3,19 @@ // This source code is licensed under the MIT license found in the // LICENSE file in the root directory of this source tree. +//! This module contains helper functions as well as constants used to perform a 8x8 vector-matrix +//! multiplication. The special form of our MDS matrix i.e. being circulant, allows us to reduce +//! the vector-matrix multiplication to a Hadamard product of two vectors in "frequency domain". +//! This follows from the simple fact that every circulant matrix has the columns of the discrete +//! Fourier transform matrix as orthogonal eigenvectors. +//! The implementation also avoids the use of internal 2-point FFTs, and 2-point iFFTs, and substitutes +//! them with explicit expressions. It also avoids, due to the form of our matrix in the frequency domain, +//! divisions by 2 and repeated modular reductions. This is because of our explicit choice of +//! an MDS matrix that has small powers of 2 entries in frequency domain. +//! The following implementation has benefited greatly from the discussions and insights of +//! Hamish Ivey-Law and Jacqueline Nabaglo of Polygon Zero is based on Nabaglo's implementation +//! in [Plonky2](https://github.com/mir-protocol/plonky2). + // FFT-BASED MDS MULTIPLICATION HELPER FUNCTIONS // ================================================================================================ @@ -12,20 +25,7 @@ use math::{ FieldElement, }; -/// This module contains helper functions as well as constants used to perform a 8x8 vector-matrix -/// multiplication. The special form of our MDS matrix i.e. being circulant, allows us to reduce -/// the vector-matrix multiplication to a Hadamard product of two vectors in "frequency domain". -/// This follows from the simple fact that every circulant matrix has the columns of the discrete -/// Fourier transform matrix as orthogonal eigenvectors. -/// The implementation also avoids the use of internal 2-point FFTs, and 2-point iFFTs, and substitutes -/// them with explicit expressions. It also avoids, due to the form of our matrix in the frequency domain, -/// divisions by 2 and repeated modular reductions. This is because of our explicit choice of -/// an MDS matrix that has small powers of 2 entries in frequency domain. -/// The following implementation has benefited greatly from the discussions and insights of -/// Hamish Ivey-Law and Jacqueline Nabaglo of Polygon Zero is based on Nabaglo's implementation -/// in [Plonky2](https://github.com/mir-protocol/plonky2). /// The circulant matrix is identified by its first row: [23, 8, 13, 10, 7, 6, 21, 8]. - // MDS matrix in frequency domain. // More precisely, this is the output of the two 4-point (real) FFTs of the first column of // the MDS matrix i.e. just before the multiplication with the appropriate twiddle factors diff --git a/crypto/src/merkle/concurrent.rs b/crypto/src/merkle/concurrent.rs index 637bd51b5..66696c174 100644 --- a/crypto/src/merkle/concurrent.rs +++ b/crypto/src/merkle/concurrent.rs @@ -18,6 +18,8 @@ pub const MIN_CONCURRENT_LEAVES: usize = 1024; // PUBLIC FUNCTIONS // ================================================================================================ +/// Returns internal nodes of a Merkle tree constructed from the provided leaves. +/// /// Builds all internal nodes of the Merkle using all available threads and stores the /// results in a single vector such that root of the tree is at position 1, nodes immediately /// under the root is at positions 2 and 3 etc. diff --git a/examples/src/utils/rescue.rs b/examples/src/utils/rescue.rs index e09cb094e..ab9e6a79b 100644 --- a/examples/src/utils/rescue.rs +++ b/examples/src/utils/rescue.rs @@ -21,6 +21,8 @@ pub const RATE_WIDTH: usize = 4; /// Two elements (32-bytes) are returned as digest. const DIGEST_SIZE: usize = 2; +/// Number of rounds in a single permutation of the hash function. +/// /// The number of rounds is set to 7 to provide 128-bit security level with 40% security margin; /// computed using algorithm 7 from /// security margin here differs from Rescue Prime specification which suggests 50% security diff --git a/math/src/field/extensions/cubic.rs b/math/src/field/extensions/cubic.rs index bc6b58697..ee967a6cf 100644 --- a/math/src/field/extensions/cubic.rs +++ b/math/src/field/extensions/cubic.rs @@ -320,7 +320,7 @@ impl> TryFrom for CubeExtension { } } -impl<'a, B: ExtensibleField<3>> TryFrom<&'a [u8]> for CubeExtension { +impl> TryFrom<&'_ [u8]> for CubeExtension { type Error = DeserializationError; /// Converts a slice of bytes into a field element; returns error if the value encoded in bytes diff --git a/math/src/field/extensions/quadratic.rs b/math/src/field/extensions/quadratic.rs index 15b5500e3..e22a06f74 100644 --- a/math/src/field/extensions/quadratic.rs +++ b/math/src/field/extensions/quadratic.rs @@ -315,7 +315,7 @@ impl> TryFrom for QuadExtension { } } -impl<'a, B: ExtensibleField<2>> TryFrom<&'a [u8]> for QuadExtension { +impl> TryFrom<&'_ [u8]> for QuadExtension { type Error = DeserializationError; /// Converts a slice of bytes into a field element; returns error if the value encoded in bytes diff --git a/math/src/field/f128/mod.rs b/math/src/field/f128/mod.rs index 5bc4bb273..22ecbd67f 100644 --- a/math/src/field/f128/mod.rs +++ b/math/src/field/f128/mod.rs @@ -352,7 +352,7 @@ impl TryFrom for BaseElement { } } -impl<'a> TryFrom<&'a [u8]> for BaseElement { +impl TryFrom<&'_ [u8]> for BaseElement { type Error = String; /// Converts a slice of bytes into a field element; returns error if the value encoded in bytes diff --git a/math/src/field/f62/mod.rs b/math/src/field/f62/mod.rs index 32e0899d6..006f90fe2 100644 --- a/math/src/field/f62/mod.rs +++ b/math/src/field/f62/mod.rs @@ -462,7 +462,7 @@ impl TryFrom<[u8; 8]> for BaseElement { } } -impl<'a> TryFrom<&'a [u8]> for BaseElement { +impl TryFrom<&'_ [u8]> for BaseElement { type Error = DeserializationError; /// Converts a slice of bytes into a field element; returns error if the value encoded in bytes diff --git a/math/src/field/f64/mod.rs b/math/src/field/f64/mod.rs index 119676076..d857e58b8 100644 --- a/math/src/field/f64/mod.rs +++ b/math/src/field/f64/mod.rs @@ -5,6 +5,7 @@ //! An implementation of a 64-bit STARK-friendly prime field with modulus $2^{64} - 2^{32} + 1$ //! using Montgomery representation. +//! //! Our implementation follows and is constant-time. //! //! This field supports very fast modular arithmetic and has a number of other attractive @@ -571,7 +572,7 @@ impl TryFrom<[u8; 8]> for BaseElement { } } -impl<'a> TryFrom<&'a [u8]> for BaseElement { +impl TryFrom<&'_ [u8]> for BaseElement { type Error = DeserializationError; /// Converts a slice of bytes into a field element; returns error if the value encoded in bytes diff --git a/prover/src/channel.rs b/prover/src/channel.rs index 34a39d3fc..db82f5095 100644 --- a/prover/src/channel.rs +++ b/prover/src/channel.rs @@ -203,7 +203,7 @@ where // FRI PROVER CHANNEL IMPLEMENTATION // ================================================================================================ -impl<'a, A, E, H, R, V> fri::ProverChannel for ProverChannel<'a, A, E, H, R, V> +impl fri::ProverChannel for ProverChannel<'_, A, E, H, R, V> where A: Air, E: FieldElement, diff --git a/prover/src/constraints/evaluation_table.rs b/prover/src/constraints/evaluation_table.rs index 826c61253..9add913f4 100644 --- a/prover/src/constraints/evaluation_table.rs +++ b/prover/src/constraints/evaluation_table.rs @@ -243,7 +243,7 @@ pub struct EvaluationTableFragment<'a, E: FieldElement> { ta_evaluations: Vec<&'a mut [E]>, } -impl<'a, E: FieldElement> EvaluationTableFragment<'a, E> { +impl EvaluationTableFragment<'_, E> { /// Returns the row at which the fragment starts. pub fn offset(&self) -> usize { self.offset diff --git a/prover/src/constraints/evaluator/default.rs b/prover/src/constraints/evaluator/default.rs index 8f96c7dcd..a8ded6412 100644 --- a/prover/src/constraints/evaluator/default.rs +++ b/prover/src/constraints/evaluator/default.rs @@ -45,7 +45,7 @@ pub struct DefaultConstraintEvaluator<'a, A: Air, E: FieldElement, } -impl<'a, A, E> ConstraintEvaluator for DefaultConstraintEvaluator<'a, A, E> +impl ConstraintEvaluator for DefaultConstraintEvaluator<'_, A, E> where A: Air, E: FieldElement, diff --git a/prover/src/matrix/col_matrix.rs b/prover/src/matrix/col_matrix.rs index 61f67aca1..8872cca71 100644 --- a/prover/src/matrix/col_matrix.rs +++ b/prover/src/matrix/col_matrix.rs @@ -333,15 +333,15 @@ impl<'a, E: FieldElement> Iterator for ColumnIter<'a, E> { } } -impl<'a, E: FieldElement> ExactSizeIterator for ColumnIter<'a, E> { +impl ExactSizeIterator for ColumnIter<'_, E> { fn len(&self) -> usize { self.matrix.map(|matrix| matrix.num_cols()).unwrap_or_default() } } -impl<'a, E: FieldElement> FusedIterator for ColumnIter<'a, E> {} +impl FusedIterator for ColumnIter<'_, E> {} -impl<'a, E: FieldElement> Default for ColumnIter<'a, E> { +impl Default for ColumnIter<'_, E> { fn default() -> Self { Self::empty() } @@ -382,10 +382,10 @@ impl<'a, E: FieldElement> Iterator for ColumnIterMut<'a, E> { } } -impl<'a, E: FieldElement> ExactSizeIterator for ColumnIterMut<'a, E> { +impl ExactSizeIterator for ColumnIterMut<'_, E> { fn len(&self) -> usize { self.matrix.num_cols() } } -impl<'a, E: FieldElement> FusedIterator for ColumnIterMut<'a, E> {} +impl FusedIterator for ColumnIterMut<'_, E> {} diff --git a/prover/src/trace/trace_table.rs b/prover/src/trace/trace_table.rs index dfbd6fe72..a5c10069b 100644 --- a/prover/src/trace/trace_table.rs +++ b/prover/src/trace/trace_table.rs @@ -313,7 +313,7 @@ pub struct TraceTableFragment<'a, B: StarkField> { data: Vec<&'a mut [B]>, } -impl<'a, B: StarkField> TraceTableFragment<'a, B> { +impl TraceTableFragment<'_, B> { // PUBLIC ACCESSORS // -------------------------------------------------------------------------------------------- diff --git a/utils/core/src/serde/byte_reader.rs b/utils/core/src/serde/byte_reader.rs index 966edcfc4..e49c39593 100644 --- a/utils/core/src/serde/byte_reader.rs +++ b/utils/core/src/serde/byte_reader.rs @@ -453,7 +453,7 @@ impl<'a> ReadAdapter<'a> { } #[cfg(feature = "std")] -impl<'a> ByteReader for ReadAdapter<'a> { +impl ByteReader for ReadAdapter<'_> { #[inline(always)] fn read_u8(&mut self) -> Result { self.pop() @@ -638,7 +638,7 @@ impl<'a> SliceReader<'a> { } } -impl<'a> ByteReader for SliceReader<'a> { +impl ByteReader for SliceReader<'_> { fn read_u8(&mut self) -> Result { self.check_eor(1)?; let result = self.source[self.pos]; diff --git a/utils/maybe_async/README.md b/utils/maybe_async/README.md index 117c70362..0dd895d60 100644 --- a/utils/maybe_async/README.md +++ b/utils/maybe_async/README.md @@ -70,6 +70,58 @@ async fn world() -> String { } ``` +## maybe_async_trait + +The `maybe_async_trait` macro can be applied to traits, and it will conditionally add the `async` keyword to trait methods annotated with `#[maybe_async]`, depending on the async feature being enabled. It also applies `#[async_trait::async_trait(?Send)]` to the trait or impl block when the async feature is on. + +For example: + +```rust +// Adding `maybe_async_trait` to a trait definition +#[maybe_async_trait] +trait ExampleTrait { + #[maybe_async] + fn hello_world(&self); + + fn get_hello(&self) -> String; +} + +// Adding `maybe_async_trait` to an implementation of the trait +#[maybe_async_trait] +impl ExampleTrait for MyStruct { + #[maybe_async] + fn hello_world(&self) { + // ... + } + + fn get_hello(&self) -> String { + // ... + } +} +``` + +When `async` is set, it gets transformed into: + +```rust +#[async_trait::async_trait(?Send)] +trait ExampleTrait { + async fn hello_world(&self); + + fn get_hello(&self) -> String; +} + +#[async_trait::async_trait(?Send)] +impl ExampleTrait for MyStruct { + async fn hello_world(&self) { + // ... + } + + fn get_hello(&self) -> String { + // ... + } +} +``` + ## License This project is [MIT licensed](../../LICENSE). diff --git a/utils/maybe_async/src/lib.rs b/utils/maybe_async/src/lib.rs index c9eb3a056..7d6ed2c2c 100644 --- a/utils/maybe_async/src/lib.rs +++ b/utils/maybe_async/src/lib.rs @@ -5,7 +5,7 @@ use proc_macro::TokenStream; use quote::quote; -use syn::{parse_macro_input, Expr, ItemFn, TraitItemFn}; +use syn::{parse_macro_input, Expr, ImplItem, ItemFn, ItemImpl, ItemTrait, TraitItem, TraitItemFn}; /// Parses a function (regular or trait) and conditionally adds the `async` keyword depending on /// the `async` feature flag being enabled. @@ -67,6 +67,129 @@ pub fn maybe_async(_attr: TokenStream, input: TokenStream) -> TokenStream { } } +/// Conditionally add `async` keyword to functions. +/// +/// Parses a trait or an `impl` block and conditionally adds the `async` keyword to methods that +/// are annotated with `#[maybe_async]`, depending on the `async` feature flag being enabled. +/// Additionally, if applied to a trait definition or impl block, it will add +/// `#[async_trait::async_trait(?Send)]` to the it. +/// +/// For example, given the following trait definition: +/// ```ignore +/// #[maybe_async_trait] +/// trait ExampleTrait { +/// #[maybe_async] +/// fn hello_world(&self); +/// +/// fn get_hello(&self) -> String; +/// } +/// ``` +/// +/// And the following implementation: +/// ```ignore +/// #[maybe_async_trait] +/// impl ExampleTrait for MyStruct { +/// #[maybe_async] +/// fn hello_world(&self) { +/// // ... +/// } +/// +/// fn get_hello(&self) -> String { +/// // ... +/// } +/// } +/// ``` +/// +/// When the `async` feature is enabled, this will be transformed into: +/// ```ignore +/// #[async_trait::async_trait(?Send)] +/// trait ExampleTrait { +/// async fn hello_world(&self); +/// +/// fn get_hello(&self) -> String; +/// } +/// +/// #[async_trait::async_trait(?Send)] +/// impl ExampleTrait for MyStruct { +/// async fn hello_world(&self) { +/// // ... +/// } +/// +/// fn get_hello(&self) -> String { +/// // ... +/// } +/// } +/// ``` +/// +/// When the `async` feature is disabled, the code remains unchanged, and neither the `async` +/// keyword nor the `#[async_trait::async_trait(?Send)]` attribute is applied. +#[proc_macro_attribute] +pub fn maybe_async_trait(_attr: TokenStream, input: TokenStream) -> TokenStream { + // Try parsing the input as a trait definition + if let Ok(mut trait_item) = syn::parse::(input.clone()) { + let output = if cfg!(feature = "async") { + for item in &mut trait_item.items { + if let TraitItem::Fn(method) = item { + // Remove the #[maybe_async] and make method async + method.attrs.retain(|attr| { + if attr.path().is_ident("maybe_async") { + method.sig.asyncness = Some(syn::token::Async::default()); + false + } else { + true + } + }); + } + } + + quote! { + #[async_trait::async_trait(?Send)] + #trait_item + } + } else { + quote! { + #trait_item + } + }; + + return output.into(); + } + // Check if it is an Impl block + else if let Ok(mut impl_item) = syn::parse::(input.clone()) { + let output = if cfg!(feature = "async") { + for item in &mut impl_item.items { + if let ImplItem::Fn(method) = item { + // Remove #[maybe_async] and make method async + method.attrs.retain(|attr| { + if attr.path().is_ident("maybe_async") { + method.sig.asyncness = Some(syn::token::Async::default()); + false // Remove the attribute + } else { + true // Keep other attributes + } + }); + } + } + quote! { + #[async_trait::async_trait(?Send)] + #impl_item + } + } else { + quote! { + #[cfg(not(feature = "async"))] + #impl_item + } + }; + + return output.into(); + } + + // If input is neither a trait nor an impl block, emit a compile-time error + quote! { + compile_error!("`maybe_async_trait` can only be applied to trait definitions and trait impl blocks"); + }.into() +} + /// Parses an expression and conditionally adds the `.await` keyword at the end of it depending on /// the `async` feature flag being enabled. /// From 9a9377fe1fab12dc0a54f258856e40ac960ddac2 Mon Sep 17 00:00:00 2001 From: Irakliy Khaburzaniya Date: Fri, 4 Oct 2024 12:16:37 -0700 Subject: [PATCH 08/11] updated winter-maybe-async crate version to v0.10.1 --- CHANGELOG.md | 4 +++- utils/maybe_async/Cargo.toml | 4 ++-- 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 34569fa41..2dc0e15e7 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,7 +1,9 @@ # Changelog -## 0.10.0 (2024-06-11) - `utils/maybe-async` crate only +## 0.10.1 (2024-10-04) - `utils/maybe-async` crate only - Added `maybe-async-trait` procedural macro. + +## 0.10.0 (2024-06-11) - `utils/maybe-async` crate only - [BREAKING] Refactored `maybe-async` macro into simpler `maybe-async` and `maybe-await` macros. ## 0.9.1 (2024-06-24) - `utils/core` crate only diff --git a/utils/maybe_async/Cargo.toml b/utils/maybe_async/Cargo.toml index 5eb4f83ae..8bff37682 100644 --- a/utils/maybe_async/Cargo.toml +++ b/utils/maybe_async/Cargo.toml @@ -1,12 +1,12 @@ [package] name = "winter-maybe-async" -version = "0.10.0" +version = "0.10.1" description = "sync/async macro for winterfell" authors = ["winterfell contributors"] readme = "README.md" license = "MIT" repository = "https://github.com/facebook/winterfell" -documentation = "https://docs.rs/winter-maybe-async/0.10.0" +documentation = "https://docs.rs/winter-maybe-async/0.10.1" keywords = ["async"] edition = "2021" rust-version = "1.78" From 6204d61a0b6c88602f4ab8a00497553f5eb04dfb Mon Sep 17 00:00:00 2001 From: Al-Kindi-0 <82364884+Al-Kindi-0@users.noreply.github.com> Date: Thu, 24 Oct 2024 03:03:39 +0200 Subject: [PATCH 09/11] Add option for partitioned trace commitment (#336) --- air/src/lib.rs | 2 +- air/src/options.rs | 114 ++++++++++++++++++-- crypto/src/hash/blake/mod.rs | 9 ++ crypto/src/hash/blake/tests.rs | 24 +++++ crypto/src/hash/mod.rs | 3 + crypto/src/hash/rescue/rp62_248/mod.rs | 4 + crypto/src/hash/rescue/rp62_248/tests.rs | 14 +++ crypto/src/hash/rescue/rp64_256/mod.rs | 4 + crypto/src/hash/rescue/rp64_256/tests.rs | 14 +++ crypto/src/hash/rescue/rp64_256_jive/mod.rs | 4 + crypto/src/hash/sha/mod.rs | 4 + examples/src/fibonacci/fib2/prover.rs | 7 +- examples/src/fibonacci/fib8/prover.rs | 7 +- examples/src/fibonacci/fib_small/prover.rs | 7 +- examples/src/fibonacci/mulfib2/prover.rs | 7 +- examples/src/fibonacci/mulfib8/prover.rs | 7 +- examples/src/lamport/aggregate/prover.rs | 7 +- examples/src/lamport/threshold/prover.rs | 7 +- examples/src/merkle/prover.rs | 7 +- examples/src/rescue/prover.rs | 7 +- examples/src/rescue_raps/prover.rs | 6 +- examples/src/utils/rescue.rs | 4 + examples/src/vdf/exempt/prover.rs | 7 +- examples/src/vdf/regular/prover.rs | 7 +- prover/benches/lagrange_kernel.rs | 7 +- prover/src/lib.rs | 17 ++- prover/src/matrix/row_matrix.rs | 38 +++++-- prover/src/trace/trace_lde/default/mod.rs | 24 ++++- prover/src/trace/trace_lde/default/tests.rs | 5 + verifier/src/channel.rs | 61 +++++++++-- winterfell/src/lib.rs | 12 ++- winterfell/src/tests.rs | 3 +- 32 files changed, 369 insertions(+), 81 deletions(-) diff --git a/air/src/lib.rs b/air/src/lib.rs index 539a812d9..0a471a706 100644 --- a/air/src/lib.rs +++ b/air/src/lib.rs @@ -38,7 +38,7 @@ mod errors; pub use errors::AssertionError; mod options; -pub use options::{FieldExtension, ProofOptions}; +pub use options::{FieldExtension, PartitionOptions, ProofOptions}; mod air; pub use air::{ diff --git a/air/src/options.rs b/air/src/options.rs index 7e71450bd..a831bdad7 100644 --- a/air/src/options.rs +++ b/air/src/options.rs @@ -4,9 +4,10 @@ // LICENSE file in the root directory of this source tree. use alloc::vec::Vec; +use core::{cmp, ops::Div}; use fri::FriOptions; -use math::{StarkField, ToElements}; +use math::{FieldElement, StarkField, ToElements}; use utils::{ByteReader, ByteWriter, Deserializable, DeserializationError, Serializable}; // CONSTANTS @@ -74,6 +75,17 @@ pub enum FieldExtension { /// is the hash function used in the protocol. The soundness of a STARK proof is limited by the /// collision resistance of the hash function used by the protocol. For example, if a hash function /// with 128-bit collision resistance is used, soundness of a STARK proof cannot exceed 128 bits. +/// +/// In addition to the above, the parameter `num_partitions` is used in order to specify the number +/// of partitions each of the traces committed to during proof generation is split into, and +/// the parameter `min_partition_size` gives a lower bound on the size of each such partition. +/// More precisely, and taking the main segment trace as an example, the prover will split the main +/// segment trace into `num_partitions` parts each of size at least `min_partition_size`. The prover +/// will then proceed to hash each part row-wise resulting in `num_partitions` digests per row of +/// the trace. The prover finally combines the `num_partitions` digest (per row) into one digest +/// (per row) and at this point the vector commitment scheme can be called. +/// In the case when `num_partitions` is equal to `1` the prover will just hash each row in one go +/// producing one digest per row of the trace. #[derive(Debug, Clone, Eq, PartialEq)] pub struct ProofOptions { num_queries: u8, @@ -82,6 +94,7 @@ pub struct ProofOptions { field_extension: FieldExtension, fri_folding_factor: u8, fri_remainder_max_degree: u8, + partition_options: PartitionOptions, } // PROOF OPTIONS IMPLEMENTATION @@ -108,7 +121,6 @@ impl ProofOptions { /// - `grinding_factor` is greater than 32. /// - `fri_folding_factor` is not 2, 4, 8, or 16. /// - `fri_remainder_max_degree` is greater than 255 or is not a power of two minus 1. - #[rustfmt::skip] pub const fn new( num_queries: usize, blowup_factor: usize, @@ -125,11 +137,20 @@ impl ProofOptions { assert!(blowup_factor >= MIN_BLOWUP_FACTOR, "blowup factor cannot be smaller than 2"); assert!(blowup_factor <= MAX_BLOWUP_FACTOR, "blowup factor cannot be greater than 128"); - assert!(grinding_factor <= MAX_GRINDING_FACTOR, "grinding factor cannot be greater than 32"); + assert!( + grinding_factor <= MAX_GRINDING_FACTOR, + "grinding factor cannot be greater than 32" + ); assert!(fri_folding_factor.is_power_of_two(), "FRI folding factor must be a power of 2"); - assert!(fri_folding_factor >= FRI_MIN_FOLDING_FACTOR, "FRI folding factor cannot be smaller than 2"); - assert!(fri_folding_factor <= FRI_MAX_FOLDING_FACTOR, "FRI folding factor cannot be greater than 16"); + assert!( + fri_folding_factor >= FRI_MIN_FOLDING_FACTOR, + "FRI folding factor cannot be smaller than 2" + ); + assert!( + fri_folding_factor <= FRI_MAX_FOLDING_FACTOR, + "FRI folding factor cannot be greater than 16" + ); assert!( (fri_remainder_max_degree + 1).is_power_of_two(), @@ -140,16 +161,33 @@ impl ProofOptions { "FRI polynomial remainder degree cannot be greater than 255" ); - ProofOptions { + Self { num_queries: num_queries as u8, blowup_factor: blowup_factor as u8, grinding_factor: grinding_factor as u8, field_extension, fri_folding_factor: fri_folding_factor as u8, fri_remainder_max_degree: fri_remainder_max_degree as u8, + partition_options: PartitionOptions::new(1, 1), } } + /// Updates the provided [ProofOptions] instance with the specified partition parameters. + /// + /// # Panics + /// Panics if: + /// - `num_partitions` is zero or greater than 16. + /// - `min_partition_size` is zero or greater than 256. + pub const fn with_partitions( + mut self, + num_partitions: usize, + min_partition_size: usize, + ) -> ProofOptions { + self.partition_options = PartitionOptions::new(num_partitions, min_partition_size); + + self + } + // PUBLIC ACCESSORS // -------------------------------------------------------------------------------------------- @@ -206,6 +244,11 @@ impl ProofOptions { let remainder_max_degree = self.fri_remainder_max_degree as usize; FriOptions::new(self.blowup_factor(), folding_factor, remainder_max_degree) } + + /// Returns the `[PartitionOptions]` used in this instance of proof options. + pub fn partition_options(&self) -> PartitionOptions { + self.partition_options + } } impl ToElements for ProofOptions { @@ -233,6 +276,8 @@ impl Serializable for ProofOptions { target.write(self.field_extension); target.write_u8(self.fri_folding_factor); target.write_u8(self.fri_remainder_max_degree); + target.write_u8(self.partition_options.num_partitions); + target.write_u8(self.partition_options.min_partition_size); } } @@ -242,14 +287,15 @@ impl Deserializable for ProofOptions { /// # Errors /// Returns an error of a valid proof options could not be read from the specified `source`. fn read_from(source: &mut R) -> Result { - Ok(ProofOptions::new( + let result = ProofOptions::new( source.read_u8()? as usize, source.read_u8()? as usize, source.read_u8()? as u32, FieldExtension::read_from(source)?, source.read_u8()? as usize, source.read_u8()? as usize, - )) + ); + Ok(result.with_partitions(source.read_u8()? as usize, source.read_u8()? as usize)) } } @@ -272,9 +318,6 @@ impl FieldExtension { } } -// SERIALIZATION -// ================================================================================================ - impl Serializable for FieldExtension { /// Serializes `self` and writes the resulting bytes into the `target`. fn write_into(&self, target: &mut W) { @@ -301,6 +344,55 @@ impl Deserializable for FieldExtension { } } +// PARTITION OPTION IMPLEMENTATION +// ================================================================================================ + +/// Defines the parameters used when committing to the traces generated during the protocol. +#[derive(Debug, Clone, Copy, Eq, PartialEq)] +pub struct PartitionOptions { + num_partitions: u8, + min_partition_size: u8, +} + +impl PartitionOptions { + /// Returns a new instance of `[PartitionOptions]`. + pub const fn new(num_partitions: usize, min_partition_size: usize) -> Self { + assert!(num_partitions >= 1, "number of partitions must be greater than or eqaul to 1"); + assert!(num_partitions <= 16, "number of partitions must be smaller than or equal to 16"); + + assert!( + min_partition_size >= 1, + "smallest partition size must be greater than or equal to 1" + ); + assert!( + min_partition_size <= 256, + "smallest partition size must be smaller than or equal to 256" + ); + + Self { + num_partitions: num_partitions as u8, + min_partition_size: min_partition_size as u8, + } + } + + /// Returns the size of each partition used when committing to the main and auxiliary traces as + /// well as the constraint evaluation trace. + pub fn partition_size(&self, num_columns: usize) -> usize { + let base_elements_per_partition = cmp::max( + (num_columns * E::EXTENSION_DEGREE).div_ceil(self.num_partitions as usize), + self.min_partition_size as usize, + ); + + base_elements_per_partition.div(E::EXTENSION_DEGREE) + } +} + +impl Default for PartitionOptions { + fn default() -> Self { + Self { num_partitions: 1, min_partition_size: 1 } + } +} + // TESTS // ================================================================================================ diff --git a/crypto/src/hash/blake/mod.rs b/crypto/src/hash/blake/mod.rs index 9b52cefef..6f17d9fdd 100644 --- a/crypto/src/hash/blake/mod.rs +++ b/crypto/src/hash/blake/mod.rs @@ -34,6 +34,10 @@ impl Hasher for Blake3_256 { ByteDigest(blake3::hash(ByteDigest::digests_as_bytes(values)).into()) } + fn merge_many(values: &[Self::Digest]) -> Self::Digest { + ByteDigest(blake3::hash(ByteDigest::digests_as_bytes(values)).into()) + } + fn merge_with_int(seed: Self::Digest, value: u64) -> Self::Digest { let mut data = [0; 40]; data[..32].copy_from_slice(&seed.0); @@ -84,6 +88,11 @@ impl Hasher for Blake3_192 { ByteDigest(result.as_bytes()[..24].try_into().unwrap()) } + fn merge_many(values: &[Self::Digest]) -> Self::Digest { + let result = blake3::hash(ByteDigest::digests_as_bytes(values)); + ByteDigest(result.as_bytes()[..24].try_into().unwrap()) + } + fn merge_with_int(seed: Self::Digest, value: u64) -> Self::Digest { let mut data = [0; 32]; data[..24].copy_from_slice(&seed.0); diff --git a/crypto/src/hash/blake/tests.rs b/crypto/src/hash/blake/tests.rs index 773e7410e..7018aee56 100644 --- a/crypto/src/hash/blake/tests.rs +++ b/crypto/src/hash/blake/tests.rs @@ -5,8 +5,10 @@ use math::{fields::f62::BaseElement, FieldElement}; use rand_utils::rand_array; +use utils::Deserializable; use super::{Blake3_256, ElementHasher, Hasher}; +use crate::hash::{Blake3_192, ByteDigest}; #[test] fn hash_padding() { @@ -29,3 +31,25 @@ fn hash_elements_padding() { let r2 = Blake3_256::hash_elements(&e2); assert_ne!(r1, r2); } + +#[test] +fn merge_vs_merge_many_256() { + let digest_0 = ByteDigest::read_from_bytes(&[1_u8; 32]).unwrap(); + let digest_1 = ByteDigest::read_from_bytes(&[2_u8; 32]).unwrap(); + + let r1 = Blake3_256::::merge(&[digest_0, digest_1]); + let r2 = Blake3_256::::merge_many(&[digest_0, digest_1]); + + assert_eq!(r1, r2) +} + +#[test] +fn merge_vs_merge_many_192() { + let digest_0 = ByteDigest::read_from_bytes(&[1_u8; 24]).unwrap(); + let digest_1 = ByteDigest::read_from_bytes(&[2_u8; 24]).unwrap(); + + let r1 = Blake3_192::::merge(&[digest_0, digest_1]); + let r2 = Blake3_192::::merge_many(&[digest_0, digest_1]); + + assert_eq!(r1, r2) +} diff --git a/crypto/src/hash/mod.rs b/crypto/src/hash/mod.rs index 6d68ac8ca..4bede6b8d 100644 --- a/crypto/src/hash/mod.rs +++ b/crypto/src/hash/mod.rs @@ -42,6 +42,9 @@ pub trait Hasher { /// Merkle trees. fn merge(values: &[Self::Digest; 2]) -> Self::Digest; + /// Returns a hash of many digests. + fn merge_many(values: &[Self::Digest]) -> Self::Digest; + /// Returns hash(`seed` || `value`). This method is intended for use in PRNG and PoW contexts. fn merge_with_int(seed: Self::Digest, value: u64) -> Self::Digest; } diff --git a/crypto/src/hash/rescue/rp62_248/mod.rs b/crypto/src/hash/rescue/rp62_248/mod.rs index aebcfbc3a..3c39155b6 100644 --- a/crypto/src/hash/rescue/rp62_248/mod.rs +++ b/crypto/src/hash/rescue/rp62_248/mod.rs @@ -165,6 +165,10 @@ impl Hasher for Rp62_248 { ElementDigest::new(state[..DIGEST_SIZE].try_into().unwrap()) } + fn merge_many(values: &[Self::Digest]) -> Self::Digest { + Self::hash_elements(ElementDigest::digests_as_elements(values)) + } + fn merge_with_int(seed: Self::Digest, value: u64) -> Self::Digest { // initialize the state as follows: // - seed is copied into the first 4 elements of the state. diff --git a/crypto/src/hash/rescue/rp62_248/tests.rs b/crypto/src/hash/rescue/rp62_248/tests.rs index 3f6960b98..ffee98602 100644 --- a/crypto/src/hash/rescue/rp62_248/tests.rs +++ b/crypto/src/hash/rescue/rp62_248/tests.rs @@ -83,6 +83,20 @@ fn hash_elements_vs_merge() { assert_eq!(m_result, h_result); } +#[test] +fn merge_vs_merge_many() { + let elements: [BaseElement; 8] = rand_array(); + + let digests: [ElementDigest; 2] = [ + ElementDigest::new(elements[..4].try_into().unwrap()), + ElementDigest::new(elements[4..].try_into().unwrap()), + ]; + + let m_result = Rp62_248::merge(&digests); + let h_result = Rp62_248::merge_many(&digests); + assert_eq!(m_result, h_result); +} + #[test] fn hash_elements_vs_merge_with_int() { let seed = ElementDigest::new(rand_array()); diff --git a/crypto/src/hash/rescue/rp64_256/mod.rs b/crypto/src/hash/rescue/rp64_256/mod.rs index b4f3e9d29..0d87de3f7 100644 --- a/crypto/src/hash/rescue/rp64_256/mod.rs +++ b/crypto/src/hash/rescue/rp64_256/mod.rs @@ -191,6 +191,10 @@ impl Hasher for Rp64_256 { ElementDigest::new(state[DIGEST_RANGE].try_into().unwrap()) } + fn merge_many(values: &[Self::Digest]) -> Self::Digest { + Self::hash_elements(ElementDigest::digests_as_elements(values)) + } + fn merge_with_int(seed: Self::Digest, value: u64) -> Self::Digest { // initialize the state as follows: // - seed is copied into the first 4 elements of the rate portion of the state. diff --git a/crypto/src/hash/rescue/rp64_256/tests.rs b/crypto/src/hash/rescue/rp64_256/tests.rs index bbd10adee..8d7d8f89d 100644 --- a/crypto/src/hash/rescue/rp64_256/tests.rs +++ b/crypto/src/hash/rescue/rp64_256/tests.rs @@ -118,6 +118,20 @@ fn hash_elements_vs_merge() { assert_eq!(m_result, h_result); } +#[test] +fn merge_vs_merge_many() { + let elements: [BaseElement; 8] = rand_array(); + + let digests: [ElementDigest; 2] = [ + ElementDigest::new(elements[..4].try_into().unwrap()), + ElementDigest::new(elements[4..].try_into().unwrap()), + ]; + + let m_result = Rp64_256::merge(&digests); + let h_result = Rp64_256::merge_many(&digests); + assert_eq!(m_result, h_result); +} + #[test] fn hash_elements_vs_merge_with_int() { let seed = ElementDigest::new(rand_array()); diff --git a/crypto/src/hash/rescue/rp64_256_jive/mod.rs b/crypto/src/hash/rescue/rp64_256_jive/mod.rs index 6c5c4411f..d369b0164 100644 --- a/crypto/src/hash/rescue/rp64_256_jive/mod.rs +++ b/crypto/src/hash/rescue/rp64_256_jive/mod.rs @@ -195,6 +195,10 @@ impl Hasher for RpJive64_256 { Self::apply_jive_summation(&initial_state, &state) } + fn merge_many(values: &[Self::Digest]) -> Self::Digest { + Self::hash_elements(ElementDigest::digests_as_elements(values)) + } + // We do not rely on the sponge construction to build our compression function. Instead, we use // the Jive compression mode designed in https://eprint.iacr.org/2022/840.pdf. fn merge_with_int(seed: Self::Digest, value: u64) -> Self::Digest { diff --git a/crypto/src/hash/sha/mod.rs b/crypto/src/hash/sha/mod.rs index 28da1ae1d..a0035cd3b 100644 --- a/crypto/src/hash/sha/mod.rs +++ b/crypto/src/hash/sha/mod.rs @@ -31,6 +31,10 @@ impl Hasher for Sha3_256 { ByteDigest(sha3::Sha3_256::digest(ByteDigest::digests_as_bytes(values)).into()) } + fn merge_many(values: &[Self::Digest]) -> Self::Digest { + ByteDigest(sha3::Sha3_256::digest(ByteDigest::digests_as_bytes(values)).into()) + } + fn merge_with_int(seed: Self::Digest, value: u64) -> Self::Digest { let mut data = [0; 40]; data[..32].copy_from_slice(&seed.0); diff --git a/examples/src/fibonacci/fib2/prover.rs b/examples/src/fibonacci/fib2/prover.rs index 9fb3dd500..99d48f004 100644 --- a/examples/src/fibonacci/fib2/prover.rs +++ b/examples/src/fibonacci/fib2/prover.rs @@ -5,8 +5,8 @@ use winterfell::{ crypto::MerkleTree, matrix::ColMatrix, AuxRandElements, ConstraintCompositionCoefficients, - DefaultConstraintEvaluator, DefaultTraceLde, StarkDomain, Trace, TraceInfo, TracePolyTable, - TraceTable, + DefaultConstraintEvaluator, DefaultTraceLde, PartitionOptions, StarkDomain, Trace, TraceInfo, + TracePolyTable, TraceTable, }; use super::{ @@ -77,8 +77,9 @@ where trace_info: &TraceInfo, main_trace: &ColMatrix, domain: &StarkDomain, + partition_option: PartitionOptions, ) -> (Self::TraceLde, TracePolyTable) { - DefaultTraceLde::new(trace_info, main_trace, domain) + DefaultTraceLde::new(trace_info, main_trace, domain, partition_option) } fn new_evaluator<'a, E: FieldElement>( diff --git a/examples/src/fibonacci/fib8/prover.rs b/examples/src/fibonacci/fib8/prover.rs index 425bfbd42..64182978c 100644 --- a/examples/src/fibonacci/fib8/prover.rs +++ b/examples/src/fibonacci/fib8/prover.rs @@ -5,8 +5,8 @@ use winterfell::{ crypto::MerkleTree, matrix::ColMatrix, AuxRandElements, ConstraintCompositionCoefficients, - DefaultConstraintEvaluator, DefaultTraceLde, StarkDomain, Trace, TraceInfo, TracePolyTable, - TraceTable, + DefaultConstraintEvaluator, DefaultTraceLde, PartitionOptions, StarkDomain, Trace, TraceInfo, + TracePolyTable, TraceTable, }; use super::{ @@ -92,8 +92,9 @@ where trace_info: &TraceInfo, main_trace: &ColMatrix, domain: &StarkDomain, + partition_option: PartitionOptions, ) -> (Self::TraceLde, TracePolyTable) { - DefaultTraceLde::new(trace_info, main_trace, domain) + DefaultTraceLde::new(trace_info, main_trace, domain, partition_option) } fn new_evaluator<'a, E: FieldElement>( diff --git a/examples/src/fibonacci/fib_small/prover.rs b/examples/src/fibonacci/fib_small/prover.rs index 53ba615da..553988064 100644 --- a/examples/src/fibonacci/fib_small/prover.rs +++ b/examples/src/fibonacci/fib_small/prover.rs @@ -4,8 +4,8 @@ // LICENSE file in the root directory of this source tree. use winterfell::{ crypto::MerkleTree, matrix::ColMatrix, AuxRandElements, ConstraintCompositionCoefficients, - DefaultConstraintEvaluator, DefaultTraceLde, StarkDomain, Trace, TraceInfo, TracePolyTable, - TraceTable, + DefaultConstraintEvaluator, DefaultTraceLde, PartitionOptions, StarkDomain, Trace, TraceInfo, + TracePolyTable, TraceTable, }; use super::{ @@ -82,8 +82,9 @@ where trace_info: &TraceInfo, main_trace: &ColMatrix, domain: &StarkDomain, + partition_option: PartitionOptions, ) -> (Self::TraceLde, TracePolyTable) { - DefaultTraceLde::new(trace_info, main_trace, domain) + DefaultTraceLde::new(trace_info, main_trace, domain, partition_option) } fn new_evaluator<'a, E: FieldElement>( diff --git a/examples/src/fibonacci/mulfib2/prover.rs b/examples/src/fibonacci/mulfib2/prover.rs index b1daba2fb..4c99187bf 100644 --- a/examples/src/fibonacci/mulfib2/prover.rs +++ b/examples/src/fibonacci/mulfib2/prover.rs @@ -5,8 +5,8 @@ use winterfell::{ crypto::MerkleTree, matrix::ColMatrix, AuxRandElements, ConstraintCompositionCoefficients, - DefaultConstraintEvaluator, DefaultTraceLde, StarkDomain, Trace, TraceInfo, TracePolyTable, - TraceTable, + DefaultConstraintEvaluator, DefaultTraceLde, PartitionOptions, StarkDomain, Trace, TraceInfo, + TracePolyTable, TraceTable, }; use super::{ @@ -73,8 +73,9 @@ where trace_info: &TraceInfo, main_trace: &ColMatrix, domain: &StarkDomain, + partition_option: PartitionOptions, ) -> (Self::TraceLde, TracePolyTable) { - DefaultTraceLde::new(trace_info, main_trace, domain) + DefaultTraceLde::new(trace_info, main_trace, domain, partition_option) } fn new_evaluator<'a, E: FieldElement>( diff --git a/examples/src/fibonacci/mulfib8/prover.rs b/examples/src/fibonacci/mulfib8/prover.rs index 20297d0e5..1fb58bd1a 100644 --- a/examples/src/fibonacci/mulfib8/prover.rs +++ b/examples/src/fibonacci/mulfib8/prover.rs @@ -5,8 +5,8 @@ use winterfell::{ crypto::MerkleTree, matrix::ColMatrix, AuxRandElements, ConstraintCompositionCoefficients, - DefaultConstraintEvaluator, DefaultTraceLde, StarkDomain, Trace, TraceInfo, TracePolyTable, - TraceTable, + DefaultConstraintEvaluator, DefaultTraceLde, PartitionOptions, StarkDomain, Trace, TraceInfo, + TracePolyTable, TraceTable, }; use super::{ @@ -85,8 +85,9 @@ where trace_info: &TraceInfo, main_trace: &ColMatrix, domain: &StarkDomain, + partition_option: PartitionOptions, ) -> (Self::TraceLde, TracePolyTable) { - DefaultTraceLde::new(trace_info, main_trace, domain) + DefaultTraceLde::new(trace_info, main_trace, domain, partition_option) } fn new_evaluator<'a, E: FieldElement>( diff --git a/examples/src/lamport/aggregate/prover.rs b/examples/src/lamport/aggregate/prover.rs index 51d8e9c30..3927a20e6 100644 --- a/examples/src/lamport/aggregate/prover.rs +++ b/examples/src/lamport/aggregate/prover.rs @@ -7,8 +7,8 @@ use winterfell::iterators::*; use winterfell::{ crypto::MerkleTree, matrix::ColMatrix, AuxRandElements, ConstraintCompositionCoefficients, - DefaultConstraintEvaluator, DefaultTraceLde, StarkDomain, TraceInfo, TracePolyTable, - TraceTable, + DefaultConstraintEvaluator, DefaultTraceLde, PartitionOptions, StarkDomain, TraceInfo, + TracePolyTable, TraceTable, }; use super::{ @@ -121,8 +121,9 @@ where trace_info: &TraceInfo, main_trace: &ColMatrix, domain: &StarkDomain, + partition_option: PartitionOptions, ) -> (Self::TraceLde, TracePolyTable) { - DefaultTraceLde::new(trace_info, main_trace, domain) + DefaultTraceLde::new(trace_info, main_trace, domain, partition_option) } fn new_evaluator<'a, E: FieldElement>( diff --git a/examples/src/lamport/threshold/prover.rs b/examples/src/lamport/threshold/prover.rs index f5c9c748b..87bd09bf6 100644 --- a/examples/src/lamport/threshold/prover.rs +++ b/examples/src/lamport/threshold/prover.rs @@ -9,8 +9,8 @@ use std::collections::HashMap; use winterfell::iterators::*; use winterfell::{ crypto::MerkleTree, matrix::ColMatrix, AuxRandElements, ConstraintCompositionCoefficients, - DefaultConstraintEvaluator, DefaultTraceLde, StarkDomain, TraceInfo, TracePolyTable, - TraceTable, + DefaultConstraintEvaluator, DefaultTraceLde, PartitionOptions, StarkDomain, TraceInfo, + TracePolyTable, TraceTable, }; use super::{ @@ -163,8 +163,9 @@ where trace_info: &TraceInfo, main_trace: &ColMatrix, domain: &StarkDomain, + partition_option: PartitionOptions, ) -> (Self::TraceLde, TracePolyTable) { - DefaultTraceLde::new(trace_info, main_trace, domain) + DefaultTraceLde::new(trace_info, main_trace, domain, partition_option) } fn new_evaluator<'a, E: FieldElement>( diff --git a/examples/src/merkle/prover.rs b/examples/src/merkle/prover.rs index db6d7f407..b1164ff83 100644 --- a/examples/src/merkle/prover.rs +++ b/examples/src/merkle/prover.rs @@ -5,8 +5,8 @@ use winterfell::{ crypto::MerkleTree, matrix::ColMatrix, AuxRandElements, ConstraintCompositionCoefficients, - DefaultConstraintEvaluator, DefaultTraceLde, StarkDomain, Trace, TraceInfo, TracePolyTable, - TraceTable, + DefaultConstraintEvaluator, DefaultTraceLde, PartitionOptions, StarkDomain, Trace, TraceInfo, + TracePolyTable, TraceTable, }; use super::{ @@ -128,8 +128,9 @@ where trace_info: &TraceInfo, main_trace: &ColMatrix, domain: &StarkDomain, + partition_option: PartitionOptions, ) -> (Self::TraceLde, TracePolyTable) { - DefaultTraceLde::new(trace_info, main_trace, domain) + DefaultTraceLde::new(trace_info, main_trace, domain, partition_option) } fn new_evaluator<'a, E: FieldElement>( diff --git a/examples/src/rescue/prover.rs b/examples/src/rescue/prover.rs index 050838af6..e8ca93757 100644 --- a/examples/src/rescue/prover.rs +++ b/examples/src/rescue/prover.rs @@ -5,8 +5,8 @@ use winterfell::{ crypto::MerkleTree, matrix::ColMatrix, AuxRandElements, ConstraintCompositionCoefficients, - DefaultConstraintEvaluator, DefaultTraceLde, StarkDomain, Trace, TraceInfo, TracePolyTable, - TraceTable, + DefaultConstraintEvaluator, DefaultTraceLde, PartitionOptions, StarkDomain, Trace, TraceInfo, + TracePolyTable, TraceTable, }; use super::{ @@ -95,8 +95,9 @@ where trace_info: &TraceInfo, main_trace: &ColMatrix, domain: &StarkDomain, + partition_option: PartitionOptions, ) -> (Self::TraceLde, TracePolyTable) { - DefaultTraceLde::new(trace_info, main_trace, domain) + DefaultTraceLde::new(trace_info, main_trace, domain, partition_option) } fn new_evaluator<'a, E: FieldElement>( diff --git a/examples/src/rescue_raps/prover.rs b/examples/src/rescue_raps/prover.rs index 7adee9bbb..b8b21b1f3 100644 --- a/examples/src/rescue_raps/prover.rs +++ b/examples/src/rescue_raps/prover.rs @@ -6,7 +6,8 @@ use core_utils::uninit_vector; use winterfell::{ crypto::MerkleTree, matrix::ColMatrix, AuxRandElements, ConstraintCompositionCoefficients, - DefaultConstraintEvaluator, DefaultTraceLde, StarkDomain, Trace, TraceInfo, TracePolyTable, + DefaultConstraintEvaluator, DefaultTraceLde, PartitionOptions, StarkDomain, Trace, TraceInfo, + TracePolyTable, }; use super::{ @@ -126,8 +127,9 @@ where trace_info: &TraceInfo, main_trace: &ColMatrix, domain: &StarkDomain, + partition_option: PartitionOptions, ) -> (Self::TraceLde, TracePolyTable) { - DefaultTraceLde::new(trace_info, main_trace, domain) + DefaultTraceLde::new(trace_info, main_trace, domain, partition_option) } fn new_evaluator<'a, E: FieldElement>( diff --git a/examples/src/utils/rescue.rs b/examples/src/utils/rescue.rs index ab9e6a79b..33ca425ca 100644 --- a/examples/src/utils/rescue.rs +++ b/examples/src/utils/rescue.rs @@ -117,6 +117,10 @@ impl Hasher for Rescue128 { Self::digest(Hash::hashes_as_elements(values)) } + fn merge_many(_values: &[Self::Digest]) -> Self::Digest { + unimplemented!("not implemented") + } + fn merge_with_int(_seed: Self::Digest, _value: u64) -> Self::Digest { unimplemented!("not implemented") } diff --git a/examples/src/vdf/exempt/prover.rs b/examples/src/vdf/exempt/prover.rs index cc5d3e8e8..16a7b8169 100644 --- a/examples/src/vdf/exempt/prover.rs +++ b/examples/src/vdf/exempt/prover.rs @@ -5,8 +5,8 @@ use winterfell::{ crypto::MerkleTree, matrix::ColMatrix, AuxRandElements, ConstraintCompositionCoefficients, - DefaultConstraintEvaluator, DefaultTraceLde, StarkDomain, Trace, TraceInfo, TracePolyTable, - TraceTable, + DefaultConstraintEvaluator, DefaultTraceLde, PartitionOptions, StarkDomain, Trace, TraceInfo, + TracePolyTable, TraceTable, }; use super::{ @@ -78,8 +78,9 @@ where trace_info: &TraceInfo, main_trace: &ColMatrix, domain: &StarkDomain, + partition_option: PartitionOptions, ) -> (Self::TraceLde, TracePolyTable) { - DefaultTraceLde::new(trace_info, main_trace, domain) + DefaultTraceLde::new(trace_info, main_trace, domain, partition_option) } fn new_evaluator<'a, E: FieldElement>( diff --git a/examples/src/vdf/regular/prover.rs b/examples/src/vdf/regular/prover.rs index c880611ff..20bdf7874 100644 --- a/examples/src/vdf/regular/prover.rs +++ b/examples/src/vdf/regular/prover.rs @@ -5,8 +5,8 @@ use winterfell::{ crypto::MerkleTree, matrix::ColMatrix, AuxRandElements, ConstraintCompositionCoefficients, - DefaultConstraintEvaluator, DefaultTraceLde, StarkDomain, Trace, TraceInfo, TracePolyTable, - TraceTable, + DefaultConstraintEvaluator, DefaultTraceLde, PartitionOptions, StarkDomain, Trace, TraceInfo, + TracePolyTable, TraceTable, }; use super::{ @@ -73,8 +73,9 @@ where trace_info: &TraceInfo, main_trace: &ColMatrix, domain: &StarkDomain, + partition_option: PartitionOptions, ) -> (Self::TraceLde, TracePolyTable) { - DefaultTraceLde::new(trace_info, main_trace, domain) + DefaultTraceLde::new(trace_info, main_trace, domain, partition_option) } fn new_evaluator<'a, E: FieldElement>( diff --git a/prover/benches/lagrange_kernel.rs b/prover/benches/lagrange_kernel.rs index 7ee8ab3c3..d6ab6a5bc 100644 --- a/prover/benches/lagrange_kernel.rs +++ b/prover/benches/lagrange_kernel.rs @@ -7,8 +7,8 @@ use std::time::Duration; use air::{ Air, AirContext, Assertion, AuxRandElements, ConstraintCompositionCoefficients, - EvaluationFrame, FieldExtension, GkrRandElements, LagrangeKernelRandElements, ProofOptions, - TraceInfo, TransitionConstraintDegree, + EvaluationFrame, FieldExtension, GkrRandElements, LagrangeKernelRandElements, PartitionOptions, + ProofOptions, TraceInfo, TransitionConstraintDegree, }; use criterion::{criterion_group, criterion_main, BatchSize, BenchmarkId, Criterion}; use crypto::{hashers::Blake3_256, DefaultRandomCoin, MerkleTree, RandomCoin}; @@ -202,11 +202,12 @@ impl Prover for LagrangeProver { trace_info: &TraceInfo, main_trace: &ColMatrix, domain: &StarkDomain, + partition_option: PartitionOptions, ) -> (Self::TraceLde, TracePolyTable) where E: math::FieldElement, { - DefaultTraceLde::new(trace_info, main_trace, domain) + DefaultTraceLde::new(trace_info, main_trace, domain, partition_option) } fn new_evaluator<'a, E>( diff --git a/prover/src/lib.rs b/prover/src/lib.rs index ac0e82be2..035d6c655 100644 --- a/prover/src/lib.rs +++ b/prover/src/lib.rs @@ -48,7 +48,7 @@ pub use air::{ EvaluationFrame, FieldExtension, LagrangeKernelRandElements, ProofOptions, TraceInfo, TransitionConstraintDegree, }; -use air::{AuxRandElements, GkrRandElements}; +use air::{AuxRandElements, GkrRandElements, PartitionOptions}; pub use crypto; use crypto::{ElementHasher, RandomCoin, VectorCommitment}; use fri::FriProver; @@ -182,6 +182,7 @@ pub trait Prover { trace_info: &TraceInfo, main_trace: &ColMatrix, domain: &StarkDomain, + partition_option: PartitionOptions, ) -> (Self::TraceLde, TracePolyTable) where E: FieldElement; @@ -554,7 +555,11 @@ pub trait Prover { log_domain_size = domain_size.ilog2() ) .in_scope(|| { - let commitment = composed_evaluations.commit_to_rows::(); + let commitment = composed_evaluations.commit_to_rows::( + self.options() + .partition_options() + .partition_size::(num_constraint_composition_columns), + ); ConstraintCommitment::new(composed_evaluations, commitment) }); @@ -574,8 +579,12 @@ pub trait Prover { E: FieldElement, { // extend the main execution trace and commit to the extended trace - let (trace_lde, trace_polys) = - maybe_await!(self.new_trace_lde(trace.info(), trace.main_segment(), domain)); + let (trace_lde, trace_polys) = maybe_await!(self.new_trace_lde( + trace.info(), + trace.main_segment(), + domain, + self.options().partition_options(), + )); // get the commitment to the main trace segment LDE let main_trace_commitment = trace_lde.get_main_trace_commitment(); diff --git a/prover/src/matrix/row_matrix.rs b/prover/src/matrix/row_matrix.rs index f42ca0e7a..85b43122e 100644 --- a/prover/src/matrix/row_matrix.rs +++ b/prover/src/matrix/row_matrix.rs @@ -180,7 +180,7 @@ impl RowMatrix { /// * A vector commitment is computed for the resulting vector using the specified vector /// commitment scheme. /// * The resulting vector commitment is returned as the commitment to the entire matrix. - pub fn commit_to_rows(&self) -> V + pub fn commit_to_rows(&self, partition_size: usize) -> V where H: ElementHasher, V: VectorCommitment, @@ -188,16 +188,34 @@ impl RowMatrix { // allocate vector to store row hashes let mut row_hashes = unsafe { uninit_vector::(self.num_rows()) }; - // iterate though matrix rows, hashing each row - batch_iter_mut!( - &mut row_hashes, - 128, // min batch size - |batch: &mut [H::Digest], batch_offset: usize| { - for (i, row_hash) in batch.iter_mut().enumerate() { - *row_hash = H::hash_elements(self.row(batch_offset + i)); + if partition_size == self.num_cols() * E::EXTENSION_DEGREE { + // iterate though matrix rows, hashing each row + batch_iter_mut!( + &mut row_hashes, + 128, // min batch size + |batch: &mut [H::Digest], batch_offset: usize| { + for (i, row_hash) in batch.iter_mut().enumerate() { + *row_hash = H::hash_elements(self.row(batch_offset + i)); + } } - } - ); + ); + } else { + // iterate though matrix rows, hashing each row + batch_iter_mut!( + &mut row_hashes, + 128, // min batch size + |batch: &mut [H::Digest], batch_offset: usize| { + let mut buffer = vec![H::Digest::default(); partition_size]; + for (i, row_hash) in batch.iter_mut().enumerate() { + self.row(batch_offset + i) + .chunks(partition_size) + .zip(buffer.iter_mut()) + .for_each(|(chunk, buf)| *buf = H::hash_elements(chunk)); + *row_hash = H::merge_many(&buffer); + } + } + ); + } // build the vector commitment to the hashed rows V::new(row_hashes).expect("failed to construct trace vector commitment") diff --git a/prover/src/trace/trace_lde/default/mod.rs b/prover/src/trace/trace_lde/default/mod.rs index e06839d53..26b5e3916 100644 --- a/prover/src/trace/trace_lde/default/mod.rs +++ b/prover/src/trace/trace_lde/default/mod.rs @@ -6,7 +6,7 @@ use alloc::vec::Vec; use core::marker::PhantomData; -use air::{proof::Queries, LagrangeKernelEvaluationFrame, TraceInfo}; +use air::{proof::Queries, LagrangeKernelEvaluationFrame, PartitionOptions, TraceInfo}; use crypto::VectorCommitment; use tracing::info_span; @@ -43,6 +43,7 @@ pub struct DefaultTraceLde< aux_segment_oracles: Option, blowup: usize, trace_info: TraceInfo, + partition_option: PartitionOptions, _h: PhantomData, } @@ -63,10 +64,15 @@ where trace_info: &TraceInfo, main_trace: &ColMatrix, domain: &StarkDomain, + partition_option: PartitionOptions, ) -> (Self, TracePolyTable) { // extend the main execution trace and build a commitment to the extended trace let (main_segment_lde, main_segment_vector_com, main_segment_polys) = - build_trace_commitment::(main_trace, domain); + build_trace_commitment::( + main_trace, + domain, + partition_option.partition_size::(main_trace.num_cols()), + ); let trace_poly_table = TracePolyTable::new(main_segment_polys); let trace_lde = DefaultTraceLde { @@ -76,6 +82,7 @@ where aux_segment_oracles: None, blowup: domain.trace_to_lde_blowup(), trace_info: trace_info.clone(), + partition_option, _h: PhantomData, }; @@ -122,7 +129,9 @@ where /// Takes auxiliary trace segment columns as input, interpolates them into polynomials in /// coefficient form, evaluates the polynomials over the LDE domain, and commits to the - /// polynomial evaluations. + /// polynomial evaluations. Depending on whether `num_partitions` is equal to `1` or is + /// greater than `1`, committing to the polynomial evaluations row-wise is done either + /// in one go in the former or in `num_partition` steps which are then combined in the latter. /// /// Returns a tuple containing the column polynomials in coefficient from and the commitment /// to the polynomial evaluations over the LDE domain. @@ -139,7 +148,11 @@ where ) -> (ColMatrix, H::Digest) { // extend the auxiliary trace segment and build a commitment to the extended trace let (aux_segment_lde, aux_segment_oracles, aux_segment_polys) = - build_trace_commitment::(aux_trace, domain); + build_trace_commitment::( + aux_trace, + domain, + self.partition_option.partition_size::(aux_trace.num_cols()), + ); // check errors assert!( @@ -263,6 +276,7 @@ where fn build_trace_commitment( trace: &ColMatrix, domain: &StarkDomain, + partition_size: usize, ) -> (RowMatrix, V, ColMatrix) where E: FieldElement, @@ -292,7 +306,7 @@ where // build trace commitment let commitment_domain_size = trace_lde.num_rows(); let trace_vector_com = info_span!("compute_execution_trace_commitment", commitment_domain_size) - .in_scope(|| trace_lde.commit_to_rows::()); + .in_scope(|| trace_lde.commit_to_rows::(partition_size)); assert_eq!(trace_vector_com.domain_len(), commitment_domain_size); (trace_lde, trace_vector_com, trace_polys) diff --git a/prover/src/trace/trace_lde/default/tests.rs b/prover/src/trace/trace_lde/default/tests.rs index c06cc2e60..734accf68 100644 --- a/prover/src/trace/trace_lde/default/tests.rs +++ b/prover/src/trace/trace_lde/default/tests.rs @@ -5,6 +5,7 @@ use alloc::vec::Vec; +use air::PartitionOptions; use crypto::{hashers::Blake3_256, ElementHasher, MerkleTree}; use math::{ fields::f128::BaseElement, get_power_series, get_power_series_with_offset, polynom, @@ -25,12 +26,14 @@ fn extend_trace_table() { let air = MockAir::with_trace_length(trace_length); let trace = build_fib_trace(trace_length * 2); let domain = StarkDomain::new(&air); + let partition_option = PartitionOptions::default(); // build the trace polynomials, extended trace, and commitment using the default TraceLde impl let (trace_lde, trace_polys) = DefaultTraceLde::>::new( trace.info(), trace.main_segment(), &domain, + partition_option, ); // check the width and length of the extended trace @@ -75,12 +78,14 @@ fn commit_trace_table() { let air = MockAir::with_trace_length(trace_length); let trace = build_fib_trace(trace_length * 2); let domain = StarkDomain::new(&air); + let partition_option = PartitionOptions::default(); // build the trace polynomials, extended trace, and commitment using the default TraceLde impl let (trace_lde, _) = DefaultTraceLde::>::new( trace.info(), trace.main_segment(), &domain, + partition_option, ); // build commitment, using a Merkle tree, to the trace rows diff --git a/verifier/src/channel.rs b/verifier/src/channel.rs index c84f4ec2a..9d7dbc426 100644 --- a/verifier/src/channel.rs +++ b/verifier/src/channel.rs @@ -35,6 +35,10 @@ pub struct VerifierChannel< // constraint queries constraint_commitment: H::Digest, constraint_queries: Option>, + // partition sizes for the rows of main, auxiliary and constraint traces rows + partition_size_main: usize, + partition_size_aux: usize, + partition_size_constraint: usize, // FRI proof fri_commitments: Option>, fri_layer_proofs: Vec, @@ -85,6 +89,7 @@ where let aux_trace_width = air.trace_info().aux_segment_width(); let lde_domain_size = air.lde_domain_size(); let fri_options = air.options().to_fri_options(); + let partition_options = air.options().partition_options(); // --- parse commitments ------------------------------------------------------------------ let (trace_commitments, constraint_commitment, fri_commitments) = commitments @@ -114,6 +119,14 @@ where .parse(main_trace_width, aux_trace_width, constraint_frame_width) .map_err(|err| VerifierError::ProofDeserializationError(err.to_string()))?; + // --- compute the partition size for each trace ------------------------------------------ + let partition_size_main = partition_options + .partition_size::(air.context().trace_info().main_trace_width()); + let partition_size_aux = + partition_options.partition_size::(air.context().trace_info().aux_segment_width()); + let partition_size_constraint = partition_options + .partition_size::(air.context().num_constraint_composition_columns()); + Ok(VerifierChannel { // trace queries trace_commitments, @@ -121,6 +134,10 @@ where // constraint queries constraint_commitment, constraint_queries: Some(constraint_queries), + // num partitions used in commitment + partition_size_main, + partition_size_aux, + partition_size_constraint, // FRI proof fri_commitments: Some(fri_commitments), fri_layer_proofs, @@ -191,9 +208,12 @@ where let queries = self.trace_queries.take().expect("already read"); // make sure the states included in the proof correspond to the trace commitment + let items: Vec = queries + .main_states + .rows() + .map(|row| hash_row::(row, self.partition_size_main)) + .collect(); - let items: Vec = - queries.main_states.rows().map(|row| H::hash_elements(row)).collect(); >::verify_many( self.trace_commitments[0], positions, @@ -203,8 +223,11 @@ where .map_err(|_| VerifierError::TraceQueryDoesNotMatchCommitment)?; if let Some(ref aux_states) = queries.aux_states { - let items: Vec = - aux_states.rows().map(|row| H::hash_elements(row)).collect(); + let items: Vec = aux_states + .rows() + .map(|row| hash_row::(row, self.partition_size_aux)) + .collect(); + >::verify_many( self.trace_commitments[1], positions, @@ -225,8 +248,13 @@ where positions: &[usize], ) -> Result, VerifierError> { let queries = self.constraint_queries.take().expect("already read"); - let items: Vec = - queries.evaluations.rows().map(|row| H::hash_elements(row)).collect(); + + let items: Vec = queries + .evaluations + .rows() + .map(|row| hash_row::(row, self.partition_size_constraint)) + .collect(); + >::verify_many( self.constraint_commitment, positions, @@ -404,3 +432,24 @@ where }) } } + +// HELPER +// ================================================================================================ + +/// Hashes a row of a trace in batches where each batch is of size at most `partition_size`. +fn hash_row(row: &[E], partition_size: usize) -> H::Digest +where + E: FieldElement, + H: ElementHasher, +{ + if partition_size == row.len() * E::EXTENSION_DEGREE { + H::hash_elements(row) + } else { + let mut buffer = vec![H::Digest::default(); partition_size]; + + row.chunks(partition_size) + .zip(buffer.iter_mut()) + .for_each(|(chunk, buf)| *buf = H::hash_elements(chunk)); + H::merge_many(&buffer) + } +} diff --git a/winterfell/src/lib.rs b/winterfell/src/lib.rs index 86c5e0345..9074f6eef 100644 --- a/winterfell/src/lib.rs +++ b/winterfell/src/lib.rs @@ -266,7 +266,7 @@ //! //! # use winterfell::{ //! # Air, AirContext, Assertion, AuxRandElements, ByteWriter, DefaultConstraintEvaluator, -//! # EvaluationFrame, TraceInfo, TransitionConstraintDegree, +//! # EvaluationFrame, PartitionOptions, TraceInfo, TransitionConstraintDegree, //! # }; //! # //! # pub struct PublicInputs { @@ -371,8 +371,9 @@ //! trace_info: &TraceInfo, //! main_trace: &ColMatrix, //! domain: &StarkDomain, +//! partition_option: PartitionOptions, //! ) -> (Self::TraceLde, TracePolyTable) { -//! DefaultTraceLde::new(trace_info, main_trace, domain) +//! DefaultTraceLde::new(trace_info, main_trace, domain, partition_option) //! } //! //! fn new_evaluator<'a, E: FieldElement>( @@ -400,7 +401,7 @@ //! # matrix::ColMatrix, //! # Air, AirContext, Assertion, AuxRandElements, ByteWriter, DefaultConstraintEvaluator, //! # DefaultTraceLde, EvaluationFrame, TraceInfo, -//! # TransitionConstraintDegree, TraceTable, FieldExtension, Prover, +//! # TransitionConstraintDegree, TraceTable, FieldExtension, PartitionOptions, Prover, //! # ProofOptions, StarkDomain, Proof, Trace, TracePolyTable, //! # }; //! # @@ -514,8 +515,9 @@ //! # trace_info: &TraceInfo, //! # main_trace: &ColMatrix, //! # domain: &StarkDomain, +//! # partition_option: PartitionOptions, //! # ) -> (Self::TraceLde, TracePolyTable) { -//! # DefaultTraceLde::new(trace_info, main_trace, domain) +//! # DefaultTraceLde::new(trace_info, main_trace, domain, partition_option) //! # } //! # //! # fn new_evaluator<'a, E: FieldElement>( @@ -594,7 +596,7 @@ #[cfg(test)] extern crate std; -pub use air::{AuxRandElements, GkrVerifier}; +pub use air::{AuxRandElements, GkrVerifier, PartitionOptions}; pub use prover::{ crypto, iterators, math, matrix, Air, AirContext, Assertion, AuxTraceWithMetadata, BoundaryConstraint, BoundaryConstraintGroup, CompositionPolyTrace, diff --git a/winterfell/src/tests.rs b/winterfell/src/tests.rs index 3757e2010..3fb0c5197 100644 --- a/winterfell/src/tests.rs +++ b/winterfell/src/tests.rs @@ -234,11 +234,12 @@ impl Prover for LagrangeComplexProver { trace_info: &TraceInfo, main_trace: &ColMatrix, domain: &StarkDomain, + partition_option: PartitionOptions, ) -> (Self::TraceLde, TracePolyTable) where E: math::FieldElement, { - DefaultTraceLde::new(trace_info, main_trace, domain) + DefaultTraceLde::new(trace_info, main_trace, domain, partition_option) } fn new_evaluator<'a, E>( From 657e15636e318c1b0d1ded508d34846ea50f55e3 Mon Sep 17 00:00:00 2001 From: Irakliy Khaburzaniya Date: Fri, 25 Oct 2024 19:07:35 -0700 Subject: [PATCH 10/11] update crate version to v0.10.0, update changelog, and increment MSRV to 1.82 --- CHANGELOG.md | 7 ++++--- air/Cargo.toml | 18 +++++++++--------- crypto/Cargo.toml | 12 ++++++------ examples/Cargo.toml | 10 +++++----- fri/Cargo.toml | 14 +++++++------- math/Cargo.toml | 10 +++++----- prover/Cargo.toml | 18 +++++++++--------- utils/core/Cargo.toml | 6 +++--- utils/maybe_async/Cargo.toml | 2 +- utils/rand/Cargo.toml | 8 ++++---- verifier/Cargo.toml | 16 ++++++++-------- winterfell/Cargo.toml | 12 ++++++------ 12 files changed, 67 insertions(+), 66 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index e0ef14c3e..96d4a3678 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,10 +1,11 @@ # Changelog -## 0.10.1 (2024-10-04) - `utils/maybe-async` crate only +## 0.10.0 (2024-10-25) - Added `maybe-async-trait` procedural macro. - -## 0.10.0 (2024-06-11) - `utils/maybe-async` crate only - [BREAKING] Refactored `maybe-async` macro into simpler `maybe-async` and `maybe-await` macros. +- Introduce `VectorCommitment` abstraction (#285). +- Add options for partitioned trace commitments (#336). +- Updated minimum supported Rust version to 1.82. ## 0.9.3 (2024-09-25) - `utils/core` and `math` crates only - Implemented `get_size_hint()` for default impls (#332). diff --git a/air/Cargo.toml b/air/Cargo.toml index 4d1b0641f..12c56cd72 100644 --- a/air/Cargo.toml +++ b/air/Cargo.toml @@ -1,16 +1,16 @@ [package] name = "winter-air" -version = "0.9.0" +version = "0.10.0" description = "AIR components for the Winterfell STARK prover/verifier" authors = ["winterfell contributors"] readme = "README.md" license = "MIT" repository = "https://github.com/novifinancial/winterfell" -documentation = "https://docs.rs/winter-air/0.9.0" +documentation = "https://docs.rs/winter-air/0.10.0" categories = ["cryptography", "no-std"] keywords = ["crypto", "arithmetization", "air"] edition = "2021" -rust-version = "1.78" +rust-version = "1.82" [lib] bench = false @@ -20,14 +20,14 @@ default = ["std"] std = ["crypto/std", "fri/std", "math/std", "utils/std"] [dependencies] -crypto = { version = "0.9", path = "../crypto", package = "winter-crypto", default-features = false } -fri = { version = "0.9", path = "../fri", package = "winter-fri", default-features = false } -libm = "0.2.8" -math = { version = "0.9", path = "../math", package = "winter-math", default-features = false } -utils = { version = "0.9", path = "../utils/core", package = "winter-utils", default-features = false } +crypto = { version = "0.10", path = "../crypto", package = "winter-crypto", default-features = false } +fri = { version = "0.10", path = "../fri", package = "winter-fri", default-features = false } +libm = "0.2" +math = { version = "0.10", path = "../math", package = "winter-math", default-features = false } +utils = { version = "0.10", path = "../utils/core", package = "winter-utils", default-features = false } [dev-dependencies] -rand-utils = { version = "0.9", path = "../utils/rand", package = "winter-rand-utils" } +rand-utils = { version = "0.10", path = "../utils/rand", package = "winter-rand-utils" } # Allow math in docs [package.metadata.docs.rs] diff --git a/crypto/Cargo.toml b/crypto/Cargo.toml index 5de0a433e..23f985fee 100644 --- a/crypto/Cargo.toml +++ b/crypto/Cargo.toml @@ -1,16 +1,16 @@ [package] name = "winter-crypto" -version = "0.9.0" +version = "0.10.0" description = "Cryptographic library for the Winterfell STARK prover/verifier" authors = ["winterfell contributors"] readme = "README.md" license = "MIT" repository = "https://github.com/novifinancial/winterfell" -documentation = "https://docs.rs/winter-crypto/0.9.0" +documentation = "https://docs.rs/winter-crypto/0.10.0" categories = ["cryptography", "no-std"] keywords = ["crypto", "merkle-tree", "hash"] edition = "2021" -rust-version = "1.78" +rust-version = "1.82" [lib] bench = false @@ -31,11 +31,11 @@ std = ["blake3/std", "math/std", "sha3/std", "utils/std"] [dependencies] blake3 = { version = "1.5", default-features = false } -math = { version = "0.9", path = "../math", package = "winter-math", default-features = false } +math = { version = "0.10", path = "../math", package = "winter-math", default-features = false } sha3 = { version = "0.10", default-features = false } -utils = { version = "0.9", path = "../utils/core", package = "winter-utils", default-features = false } +utils = { version = "0.10", path = "../utils/core", package = "winter-utils", default-features = false } [dev-dependencies] criterion = "0.5" proptest = "1.4" -rand-utils = { version = "0.9", path = "../utils/rand", package = "winter-rand-utils" } +rand-utils = { version = "0.10", path = "../utils/rand", package = "winter-rand-utils" } diff --git a/examples/Cargo.toml b/examples/Cargo.toml index 2fda1d952..f86e9ad50 100644 --- a/examples/Cargo.toml +++ b/examples/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "examples" -version = "0.9.0" +version = "0.10.0" description = "Examples of using Winterfell STARK prover/verifier" authors = ["winterfell contributors"] readme = "README.md" @@ -8,7 +8,7 @@ license = "MIT" repository = "https://github.com/novifinancial/winterfell" categories = ["cryptography"] edition = "2021" -rust-version = "1.78" +rust-version = "1.82" [lib] bench = false @@ -27,14 +27,14 @@ std = ["core-utils/std", "hex/std", "rand-utils", "winterfell/std"] [dependencies] blake3 = { version = "1.5", default-features = false } -core-utils = { version = "0.9", path = "../utils/core", package = "winter-utils", default-features = false } +core-utils = { version = "0.10", path = "../utils/core", package = "winter-utils", default-features = false } hex = { version = "0.4", optional = true } -rand-utils = { version = "0.9", path = "../utils/rand", package = "winter-rand-utils", optional = true } +rand-utils = { version = "0.10", path = "../utils/rand", package = "winter-rand-utils", optional = true } structopt = { version = "0.3", default-features = false } tracing = { version = "0.1", default-features = false } tracing-forest = { version = "0.1", features = ["ansi", "smallvec"], optional = true } tracing-subscriber = { version = "0.3", features = ["std", "env-filter"] } -winterfell = { version = "0.9", path = "../winterfell", default-features = false } +winterfell = { version = "0.10", path = "../winterfell", default-features = false } [dev-dependencies] criterion = "0.5" diff --git a/fri/Cargo.toml b/fri/Cargo.toml index b3f4ec631..2e3d1b20b 100644 --- a/fri/Cargo.toml +++ b/fri/Cargo.toml @@ -1,16 +1,16 @@ [package] name = "winter-fri" -version = "0.9.0" +version = "0.10.0" description = "Implementation of FRI protocol for the Winterfell STARK prover/verifier" authors = ["winterfell contributors"] readme = "README.md" license = "MIT" repository = "https://github.com/novifinancial/winterfell" -documentation = "https://docs.rs/winter-fri/0.9.0" +documentation = "https://docs.rs/winter-fri/0.10.0" categories = ["cryptography", "no-std"] keywords = ["crypto", "polynomial", "commitments"] edition = "2021" -rust-version = "1.78" +rust-version = "1.82" [lib] bench = false @@ -29,10 +29,10 @@ default = ["std"] std = ["crypto/std", "math/std", "utils/std"] [dependencies] -crypto = { version = "0.9", path = "../crypto", package = "winter-crypto", default-features = false } -math = { version = "0.9", path = "../math", package = "winter-math", default-features = false } -utils = { version = "0.9", path = "../utils/core", package = "winter-utils", default-features = false } +crypto = { version = "0.10", path = "../crypto", package = "winter-crypto", default-features = false } +math = { version = "0.10", path = "../math", package = "winter-math", default-features = false } +utils = { version = "0.10", path = "../utils/core", package = "winter-utils", default-features = false } [dev-dependencies] criterion = "0.5" -rand-utils = { version = "0.9", path = "../utils/rand", package = "winter-rand-utils" } +rand-utils = { version = "0.10", path = "../utils/rand", package = "winter-rand-utils" } diff --git a/math/Cargo.toml b/math/Cargo.toml index fe0b94b58..061c2d52f 100644 --- a/math/Cargo.toml +++ b/math/Cargo.toml @@ -1,16 +1,16 @@ [package] name = "winter-math" -version = "0.9.3" +version = "0.10.0" description = "Math library for the Winterfell STARK prover/verifier" authors = ["winterfell contributors"] readme = "README.md" license = "MIT" repository = "https://github.com/novifinancial/winterfell" -documentation = "https://docs.rs/winter-math/0.9.3" +documentation = "https://docs.rs/winter-math/0.10.0" categories = ["cryptography", "no-std"] keywords = ["crypto", "finite-fields", "polynomials", "fft"] edition = "2021" -rust-version = "1.78" +rust-version = "1.82" [lib] bench = false @@ -38,13 +38,13 @@ std = ["utils/std"] [dependencies] serde = { version = "1.0", features = [ "derive" ], optional = true, default-features = false } -utils = { version = "0.9", path = "../utils/core", package = "winter-utils", default-features = false } +utils = { version = "0.10", path = "../utils/core", package = "winter-utils", default-features = false } [dev-dependencies] criterion = "0.5" num-bigint = "0.4" proptest = "1.4" -rand-utils = { version = "0.9", path = "../utils/rand", package = "winter-rand-utils" } +rand-utils = { version = "0.10", path = "../utils/rand", package = "winter-rand-utils" } # Allow math in docs [package.metadata.docs.rs] diff --git a/prover/Cargo.toml b/prover/Cargo.toml index 36272766f..85b5dbcd7 100644 --- a/prover/Cargo.toml +++ b/prover/Cargo.toml @@ -1,16 +1,16 @@ [package] name = "winter-prover" -version = "0.9.0" +version = "0.10.0" description = "Winterfell STARK prover" authors = ["winterfell contributors"] readme = "README.md" license = "MIT" repository = "https://github.com/novifinancial/winterfell" -documentation = "https://docs.rs/winter-prover/0.9.0" +documentation = "https://docs.rs/winter-prover/0.10.0" categories = ["cryptography", "no-std"] keywords = ["crypto", "zkp", "stark", "prover"] edition = "2021" -rust-version = "1.78" +rust-version = "1.82" [lib] bench = false @@ -30,17 +30,17 @@ default = ["std"] std = ["air/std", "crypto/std", "fri/std", "math/std", "utils/std"] [dependencies] -air = { version = "0.9", path = "../air", package = "winter-air", default-features = false } -crypto = { version = "0.9", path = "../crypto", package = "winter-crypto", default-features = false } -fri = { version = "0.9", path = '../fri', package = "winter-fri", default-features = false } -math = { version = "0.9", path = "../math", package = "winter-math", default-features = false } +air = { version = "0.10", path = "../air", package = "winter-air", default-features = false } +crypto = { version = "0.10", path = "../crypto", package = "winter-crypto", default-features = false } +fri = { version = "0.10", path = '../fri', package = "winter-fri", default-features = false } +math = { version = "0.10", path = "../math", package = "winter-math", default-features = false } maybe_async = { path = "../utils/maybe_async" , package = "winter-maybe-async" } tracing = { version = "0.1", default-features = false, features = ["attributes"]} -utils = { version = "0.9", path = "../utils/core", package = "winter-utils", default-features = false } +utils = { version = "0.10", path = "../utils/core", package = "winter-utils", default-features = false } [dev-dependencies] criterion = "0.5" -rand-utils = { version = "0.9", path = "../utils/rand", package = "winter-rand-utils" } +rand-utils = { version = "0.10", path = "../utils/rand", package = "winter-rand-utils" } # Allow math in docs [package.metadata.docs.rs] diff --git a/utils/core/Cargo.toml b/utils/core/Cargo.toml index 96e5c3d30..c606caa08 100644 --- a/utils/core/Cargo.toml +++ b/utils/core/Cargo.toml @@ -1,16 +1,16 @@ [package] name = "winter-utils" -version = "0.9.3" +version = "0.10.0" description = "Utilities for the Winterfell STARK prover/verifier" authors = ["winterfell contributors"] readme = "README.md" license = "MIT" repository = "https://github.com/novifinancial/winterfell" -documentation = "https://docs.rs/winter-utils/0.9.3" +documentation = "https://docs.rs/winter-utils/0.10.0" categories = ["cryptography", "no-std"] keywords = ["serialization", "transmute"] edition = "2021" -rust-version = "1.78" +rust-version = "1.82" [lib] bench = false diff --git a/utils/maybe_async/Cargo.toml b/utils/maybe_async/Cargo.toml index 8bff37682..825d991b0 100644 --- a/utils/maybe_async/Cargo.toml +++ b/utils/maybe_async/Cargo.toml @@ -9,7 +9,7 @@ repository = "https://github.com/facebook/winterfell" documentation = "https://docs.rs/winter-maybe-async/0.10.1" keywords = ["async"] edition = "2021" -rust-version = "1.78" +rust-version = "1.82" [lib] proc-macro = true diff --git a/utils/rand/Cargo.toml b/utils/rand/Cargo.toml index 77ae755fc..3e05c6437 100644 --- a/utils/rand/Cargo.toml +++ b/utils/rand/Cargo.toml @@ -1,22 +1,22 @@ [package] name = "winter-rand-utils" -version = "0.9.0" +version = "0.10.0" description = "Random value generation utilities for Winterfell crates" authors = ["winterfell contributors"] readme = "README.md" license = "MIT" repository = "https://github.com/novifinancial/winterfell" -documentation = "https://docs.rs/winter-rand-utils/0.9.0" +documentation = "https://docs.rs/winter-rand-utils/0.10.0" categories = ["cryptography"] keywords = ["rand"] edition = "2021" -rust-version = "1.78" +rust-version = "1.82" [lib] bench = false [dependencies] -utils = { version = "0.9", path = "../core", package = "winter-utils" } +utils = { version = "0.10", path = "../core", package = "winter-utils" } [target.'cfg(not(target_family = "wasm"))'.dependencies] rand = { version = "0.8" } diff --git a/verifier/Cargo.toml b/verifier/Cargo.toml index de8c3f24c..63d4b9c0f 100644 --- a/verifier/Cargo.toml +++ b/verifier/Cargo.toml @@ -1,16 +1,16 @@ [package] name = "winter-verifier" -version = "0.9.0" +version = "0.10.0" description = "Winterfell STARK verifier" authors = ["winterfell contributors"] readme = "README.md" license = "MIT" repository = "https://github.com/novifinancial/winterfell" -documentation = "https://docs.rs/winter-verifier/0.9.0" +documentation = "https://docs.rs/winter-verifier/0.10.0" categories = ["cryptography", "no-std"] keywords = ["crypto", "zkp", "stark", "verifier"] edition = "2021" -rust-version = "1.78" +rust-version = "1.82" [lib] bench = false @@ -20,11 +20,11 @@ default = ["std"] std = ["air/std", "crypto/std", "fri/std", "math/std", "utils/std"] [dependencies] -air = { version = "0.9", path = "../air", package = "winter-air", default-features = false } -crypto = { version = "0.9", path = "../crypto", package = "winter-crypto", default-features = false } -fri = { version = "0.9", path = "../fri", package = "winter-fri", default-features = false } -math = { version = "0.9", path = "../math", package = "winter-math", default-features = false } -utils = { version = "0.9", path = "../utils/core", package = "winter-utils", default-features = false } +air = { version = "0.10", path = "../air", package = "winter-air", default-features = false } +crypto = { version = "0.10", path = "../crypto", package = "winter-crypto", default-features = false } +fri = { version = "0.10", path = "../fri", package = "winter-fri", default-features = false } +math = { version = "0.10", path = "../math", package = "winter-math", default-features = false } +utils = { version = "0.10", path = "../utils/core", package = "winter-utils", default-features = false } # Allow math in docs [package.metadata.docs.rs] diff --git a/winterfell/Cargo.toml b/winterfell/Cargo.toml index 6d3c02cae..cdeeb59ec 100644 --- a/winterfell/Cargo.toml +++ b/winterfell/Cargo.toml @@ -1,16 +1,16 @@ [package] name = "winterfell" -version = "0.9.0" +version = "0.10.0" description = "Winterfell STARK prover and verifier" authors = ["winterfell contributors"] readme = "../README.md" license = "MIT" repository = "https://github.com/novifinancial/winterfell" -documentation = "https://docs.rs/winterfell/0.9.0" +documentation = "https://docs.rs/winterfell/0.10.0" categories = ["cryptography", "no-std"] keywords = ["crypto", "zkp", "stark", "prover", "verifier"] edition = "2021" -rust-version = "1.78" +rust-version = "1.82" [lib] bench = false @@ -22,9 +22,9 @@ default = ["std"] std = ["prover/std", "verifier/std"] [dependencies] -air = { version = "0.9", path = "../air", package = "winter-air", default-features = false } -prover = { version = "0.9", path = "../prover", package = "winter-prover", default-features = false } -verifier = { version = "0.9", path = "../verifier", package = "winter-verifier", default-features = false } +air = { version = "0.10", path = "../air", package = "winter-air", default-features = false } +prover = { version = "0.10", path = "../prover", package = "winter-prover", default-features = false } +verifier = { version = "0.10", path = "../verifier", package = "winter-verifier", default-features = false } # Allow math in docs [package.metadata.docs.rs] From 0e2291ad45baee263a66a73584e053c331b03290 Mon Sep 17 00:00:00 2001 From: Irakliy Khaburzaniya Date: Sat, 26 Oct 2024 12:35:50 -0700 Subject: [PATCH 11/11] updated changelog --- CHANGELOG.md | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 96d4a3678..0f783b5e7 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,10 +1,10 @@ # Changelog ## 0.10.0 (2024-10-25) -- Added `maybe-async-trait` procedural macro. -- [BREAKING] Refactored `maybe-async` macro into simpler `maybe-async` and `maybe-await` macros. -- Introduce `VectorCommitment` abstraction (#285). -- Add options for partitioned trace commitments (#336). +- [BREAKING] Refactored maybe-async macro into simpler maybe-async and maybe-await macros (#283). +- [BREAKING] Introduce `VectorCommitment` abstraction (#285). +- Added `maybe-async-trait` procedural macro (#334). +- [BREAKING] Add options for partitioned trace commitments (#336). - Updated minimum supported Rust version to 1.82. ## 0.9.3 (2024-09-25) - `utils/core` and `math` crates only