From 63be641f52e7c16f696d76cb23746ff3064aad6c Mon Sep 17 00:00:00 2001 From: polydez <155382956+polydez@users.noreply.github.com> Date: Sat, 30 Mar 2024 17:02:56 +0500 Subject: [PATCH] feat: writing account details on block applying --- Cargo.lock | 142 +++++++++++++++++ Cargo.toml | 1 + block-producer/src/batch_builder/batch.rs | 17 ++- block-producer/src/block.rs | 4 +- block-producer/src/block_builder/mod.rs | 8 +- .../src/block_builder/prover/block_witness.rs | 6 +- .../src/block_builder/prover/tests.rs | 2 +- block-producer/src/state_view/mod.rs | 15 +- .../src/state_view/tests/apply_block.rs | 6 +- block-producer/src/test_utils/block.rs | 19 +-- block-producer/src/test_utils/store.rs | 2 +- proto/proto/requests.proto | 2 + proto/src/domain/accounts.rs | 7 +- proto/src/generated/requests.rs | 3 + store/Cargo.toml | 1 + store/src/db/mod.rs | 5 +- store/src/db/sql.rs | 80 +++++++++- store/src/db/tests.rs | 144 +++++++++++++++++- store/src/server/api.rs | 5 + store/src/state.rs | 5 +- 20 files changed, 421 insertions(+), 53 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 62d29d6e5..4a189e9ad 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -409,6 +409,15 @@ dependencies = [ "bytemuck", ] +[[package]] +name = "atomic-polyfill" +version = "1.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8cf2bce30dfe09ef0bfaef228b9d414faaf7e563035494d7fe092dba54b300f4" +dependencies = [ + "critical-section", +] + [[package]] name = "autocfg" version = "1.2.0" @@ -598,6 +607,12 @@ version = "1.15.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5d6d68c57235a3a081186990eca2867354726650f42f7516ca50c28d6281fd15" +[[package]] +name = "byteorder" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b" + [[package]] name = "bytes" version = "1.6.0" @@ -703,6 +718,12 @@ version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "98cc8fbded0c607b7ba9dd60cd98df59af97e84d24e49c8557331cfc26d301ce" +[[package]] +name = "cobs" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "67ba02a97a2bd10f4b59b25c7973101c79642302776489e030cd13cdab09ed15" + [[package]] name = "colorchoice" version = "1.0.0" @@ -768,6 +789,12 @@ dependencies = [ "cfg-if", ] +[[package]] +name = "critical-section" +version = "1.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7059fff8937831a9ae6f0fe4d658ffabf58f2ca96aa9dec1c889f936f705f216" + [[package]] name = "crossbeam-deque" version = "0.8.5" @@ -925,6 +952,12 @@ version = "1.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "11157ac094ffbdde99aa67b23417ebdd801842852b500e395a45a9c0aac03e4a" +[[package]] +name = "embedded-io" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ef1a6892d9eef45c8fa6b9e0086428a2cca8491aca8f787c534a3d6d0bcb3ced" + [[package]] name = "encoding_rs" version = "0.8.33" @@ -934,6 +967,29 @@ dependencies = [ "cfg-if", ] +[[package]] +name = "env_filter" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a009aa4810eb158359dda09d0c87378e4bbb89b5a801f016885a4707ba24f7ea" +dependencies = [ + "log", + "regex", +] + +[[package]] +name = "env_logger" +version = "0.11.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "38b35839ba51819680ba087cd351788c9a3c476841207e0b8cee0b04722343b9" +dependencies = [ + "anstream", + "anstyle", + "env_filter", + "humantime", + "log", +] + [[package]] name = "equivalent" version = "1.0.1" @@ -1113,6 +1169,15 @@ dependencies = [ "tracing", ] +[[package]] +name = "hash32" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b0c35f58762feb77d74ebe43bdbc3210f09be9fe6742234d573bacc26ed92b67" +dependencies = [ + "byteorder", +] + [[package]] name = "hashbrown" version = "0.12.3" @@ -1138,6 +1203,20 @@ dependencies = [ "hashbrown 0.14.3", ] +[[package]] +name = "heapless" +version = "0.7.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cdc6457c0eb62c71aac4bc17216026d8410337c4126773b9c5daba343f17964f" +dependencies = [ + "atomic-polyfill", + "hash32", + "rustc_version", + "serde", + "spin", + "stable_deref_trait", +] + [[package]] name = "heck" version = "0.4.1" @@ -1211,6 +1290,12 @@ version = "1.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "df3b46402a9d5adb4c86a0cf463f42e19994e3ee891101b1841f30a545cb49a9" +[[package]] +name = "humantime" +version = "2.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9a3a5bfb195931eeb336b2a7b4d761daec841b97f947d34394601737a7bba5e4" + [[package]] name = "hyper" version = "0.14.28" @@ -1613,6 +1698,23 @@ dependencies = [ "miden-stdlib", ] +[[package]] +name = "miden-mock" +version = "0.2.0" +source = "git+https://github.com/0xPolygonMiden/miden-base.git?branch=next#acfb52888ad4d76291186c1cae33d98a193e9921" +dependencies = [ + "env_logger", + "hex", + "miden-lib 0.2.0", + "miden-objects 0.2.0", + "miden-processor", + "miden-prover", + "postcard", + "rand", + "rand_pcg", + "winter-rand-utils", +] + [[package]] name = "miden-node" version = "0.2.0" @@ -1752,6 +1854,7 @@ dependencies = [ "figment", "hex", "miden-lib 0.2.0", + "miden-mock", "miden-node-proto 0.2.0", "miden-node-utils 0.2.0", "miden-objects 0.2.0", @@ -1825,11 +1928,13 @@ name = "miden-objects" version = "0.2.0" source = "git+https://github.com/0xPolygonMiden/miden-base.git?branch=next#3175580f3eae7b2dfa7885e7959bce671afff433" dependencies = [ + "log", "miden-assembly", "miden-core", "miden-crypto", "miden-processor", "miden-verifier", + "serde", "winter-rand-utils", ] @@ -2189,6 +2294,18 @@ version = "0.3.30" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d231b230927b5e4ad203db57bbcbee2802f6bce620b1e4a9024a07d94e2907ec" +[[package]] +name = "postcard" +version = "1.0.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a55c51ee6c0db07e68448e336cf8ea4131a620edefebf9893e759b2d793420f8" +dependencies = [ + "cobs", + "embedded-io", + "heapless", + "serde", +] + [[package]] name = "powerfmt" version = "0.2.0" @@ -2401,6 +2518,16 @@ dependencies = [ "getrandom", ] +[[package]] +name = "rand_pcg" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "59cad018caf63deb318e5a4586d99a24424a364f40f1e5778c29aca23f4fc73e" +dependencies = [ + "rand_core", + "serde", +] + [[package]] name = "rand_xorshift" version = "0.3.0" @@ -2716,6 +2843,21 @@ dependencies = [ "windows-sys 0.52.0", ] +[[package]] +name = "spin" +version = "0.9.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6980e8d7511241f8acf4aebddbb1ff938df5eebe98691418c4468d0b72a96a67" +dependencies = [ + "lock_api", +] + +[[package]] +name = "stable_deref_trait" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3" + [[package]] name = "strsim" version = "0.11.0" diff --git a/Cargo.toml b/Cargo.toml index 6933bce14..333f4a890 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -25,6 +25,7 @@ exclude = [".github/"] [workspace.dependencies] miden-air = { version = "0.8", default-features = false } miden-lib = { git = "https://github.com/0xPolygonMiden/miden-base.git", branch = "next" } +miden-mock = { git = "https://github.com/0xPolygonMiden/miden-base.git", branch = "next" } miden-objects = { git = "https://github.com/0xPolygonMiden/miden-base.git", branch = "next" } miden-processor = { version = "0.8" } miden-stdlib = { version = "0.8", default-features = false } diff --git a/block-producer/src/batch_builder/batch.rs b/block-producer/src/batch_builder/batch.rs index 2f4752f1d..4976a15ab 100644 --- a/block-producer/src/batch_builder/batch.rs +++ b/block-producer/src/batch_builder/batch.rs @@ -7,6 +7,7 @@ use miden_objects::{ merkle::SimpleSmt, }, notes::{NoteEnvelope, Nullifier}, + transaction::AccountDetails, Digest, BATCH_OUTPUT_NOTES_TREE_DEPTH, MAX_NOTES_PER_BATCH, }; use tracing::instrument; @@ -55,6 +56,7 @@ impl TransactionBatch { AccountStates { initial_state: tx.initial_account_hash(), final_state: tx.final_account_hash(), + details: tx.account_details().cloned(), }, ) }) @@ -108,12 +110,14 @@ impl TransactionBatch { .map(|(account_id, account_states)| (*account_id, account_states.initial_state)) } - /// Returns an iterator over (account_id, new_state_hash) tuples for accounts that were + /// Returns an iterator over (account_id, details, new_state_hash) tuples for accounts that were /// modified in this transaction batch. - pub fn updated_accounts(&self) -> impl Iterator + '_ { - self.updated_accounts - .iter() - .map(|(account_id, account_states)| (*account_id, account_states.final_state)) + pub fn updated_accounts( + &self + ) -> impl Iterator, Digest)> + '_ { + self.updated_accounts.iter().map(|(account_id, account_states)| { + (*account_id, account_states.details.clone(), account_states.final_state) + }) } /// Returns an iterator over produced nullifiers for all consumed notes. @@ -147,8 +151,9 @@ impl TransactionBatch { /// account. /// /// TODO: should this be moved into domain objects? -#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord)] +#[derive(Debug, Clone, PartialEq, Eq)] struct AccountStates { initial_state: Digest, final_state: Digest, + details: Option, } diff --git a/block-producer/src/block.rs b/block-producer/src/block.rs index 1f1c677b6..99b21ce60 100644 --- a/block-producer/src/block.rs +++ b/block-producer/src/block.rs @@ -9,6 +9,7 @@ use miden_objects::{ accounts::AccountId, crypto::merkle::{MerklePath, MmrPeaks, SmtProof}, notes::{NoteEnvelope, Nullifier}, + transaction::AccountDetails, BlockHeader, Digest, }; @@ -17,11 +18,10 @@ use crate::store::BlockInputsError; #[derive(Debug, Clone)] pub struct Block { pub header: BlockHeader, - pub updated_accounts: Vec<(AccountId, Digest)>, + pub updated_accounts: Vec<(AccountId, Option, Digest)>, pub created_notes: BTreeMap, pub produced_nullifiers: Vec, // TODO: - // - full states for updated public accounts // - full states for created public notes // - zk proof } diff --git a/block-producer/src/block_builder/mod.rs b/block-producer/src/block_builder/mod.rs index d92bd7943..f3924e81f 100644 --- a/block-producer/src/block_builder/mod.rs +++ b/block-producer/src/block_builder/mod.rs @@ -2,7 +2,9 @@ use std::sync::Arc; use async_trait::async_trait; use miden_node_utils::formatting::{format_array, format_blake3_digest}; -use miden_objects::{accounts::AccountId, notes::Nullifier, Digest, MAX_NOTES_PER_BATCH}; +use miden_objects::{ + accounts::AccountId, notes::Nullifier, transaction::AccountDetails, Digest, MAX_NOTES_PER_BATCH, +}; use tracing::{debug, info, instrument}; use crate::{ @@ -77,7 +79,7 @@ where batches = %format_array(batches.iter().map(|batch| format_blake3_digest(batch.id()))), ); - let updated_accounts: Vec<(AccountId, Digest)> = + let updated_accounts: Vec<(AccountId, Option, Digest)> = batches.iter().flat_map(TransactionBatch::updated_accounts).collect(); let created_notes = batches .iter() @@ -95,7 +97,7 @@ where let block_inputs = self .store .get_block_inputs( - updated_accounts.iter().map(|(account_id, _)| account_id), + updated_accounts.iter().map(|(account_id, _, _)| account_id), produced_nullifiers.iter(), ) .await?; diff --git a/block-producer/src/block_builder/prover/block_witness.rs b/block-producer/src/block_builder/prover/block_witness.rs index 87bcbcd0e..25f18dccb 100644 --- a/block-producer/src/block_builder/prover/block_witness.rs +++ b/block-producer/src/block_builder/prover/block_witness.rs @@ -37,7 +37,7 @@ impl BlockWitness { let updated_accounts = { let mut account_initial_states: BTreeMap = - batches.iter().flat_map(|batch| batch.account_initial_states()).collect(); + batches.iter().flat_map(TransactionBatch::account_initial_states).collect(); let mut account_merkle_proofs: BTreeMap = block_inputs .accounts @@ -47,8 +47,8 @@ impl BlockWitness { batches .iter() - .flat_map(|batch| batch.updated_accounts()) - .map(|(account_id, final_state_hash)| { + .flat_map(TransactionBatch::updated_accounts) + .map(|(account_id, _details, final_state_hash)| { let initial_state_hash = account_initial_states .remove(&account_id) .expect("already validated that key exists"); diff --git a/block-producer/src/block_builder/prover/tests.rs b/block-producer/src/block_builder/prover/tests.rs index 5c698cfa0..a085d436e 100644 --- a/block-producer/src/block_builder/prover/tests.rs +++ b/block-producer/src/block_builder/prover/tests.rs @@ -235,7 +235,7 @@ async fn test_compute_account_root_success() { account_ids .iter() .zip(account_final_states.iter()) - .map(|(&account_id, &account_hash)| (account_id, account_hash.into())) + .map(|(&account_id, &account_hash)| (account_id, None, account_hash.into())) .collect(), ) .build(); diff --git a/block-producer/src/state_view/mod.rs b/block-producer/src/state_view/mod.rs index bafbf4e2d..9742a626e 100644 --- a/block-producer/src/state_view/mod.rs +++ b/block-producer/src/state_view/mod.rs @@ -121,24 +121,23 @@ where &self, block: Block, ) -> Result<(), ApplyBlockError> { - self.store.apply_block(block.clone()).await?; + let account_ids_in_block: Vec = + block.updated_accounts.iter().map(|(account_id, _, _)| *account_id).collect(); + let produced_nullifiers = block.produced_nullifiers.clone(); + + self.store.apply_block(block).await?; let mut locked_accounts_in_flight = self.accounts_in_flight.write().await; let mut locked_nullifiers_in_flight = self.nullifiers_in_flight.write().await; // Remove account ids of transactions in block - let account_ids_in_block = block - .updated_accounts - .iter() - .map(|(account_id, _final_account_hash)| account_id); - - for account_id in account_ids_in_block { + for account_id in account_ids_in_block.iter() { let was_in_flight = locked_accounts_in_flight.remove(account_id); debug_assert!(was_in_flight); } // Remove new nullifiers of transactions in block - for nullifier in block.produced_nullifiers.iter() { + for nullifier in produced_nullifiers.iter() { let was_in_flight = locked_nullifiers_in_flight.remove(nullifier); debug_assert!(was_in_flight); } diff --git a/block-producer/src/state_view/tests/apply_block.rs b/block-producer/src/state_view/tests/apply_block.rs index ac4a52348..410d2219d 100644 --- a/block-producer/src/state_view/tests/apply_block.rs +++ b/block-producer/src/state_view/tests/apply_block.rs @@ -32,7 +32,7 @@ async fn test_apply_block_ab1() { .await .account_updates( std::iter::once(account) - .map(|mock_account| (mock_account.id, mock_account.states[1])) + .map(|mock_account| (mock_account.id, None, mock_account.states[1])) .collect(), ) .build(); @@ -75,7 +75,7 @@ async fn test_apply_block_ab2() { .account_updates( accounts_in_block .into_iter() - .map(|mock_account| (mock_account.id, mock_account.states[1])) + .map(|mock_account| (mock_account.id, None, mock_account.states[1])) .collect(), ) .build(); @@ -120,7 +120,7 @@ async fn test_apply_block_ab3() { accounts .clone() .into_iter() - .map(|mock_account| (mock_account.id, mock_account.states[1])) + .map(|mock_account| (mock_account.id, None, mock_account.states[1])) .collect(), ) .build(); diff --git a/block-producer/src/test_utils/block.rs b/block-producer/src/test_utils/block.rs index a158a8d72..9aa12e1c4 100644 --- a/block-producer/src/test_utils/block.rs +++ b/block-producer/src/test_utils/block.rs @@ -4,6 +4,7 @@ use miden_objects::{ accounts::AccountId, crypto::merkle::{Mmr, SimpleSmt}, notes::{NoteEnvelope, Nullifier}, + transaction::AccountDetails, BlockHeader, Digest, ACCOUNT_TREE_DEPTH, BLOCK_OUTPUT_NOTES_TREE_DEPTH, MAX_NOTES_PER_BATCH, ONE, ZERO, }; @@ -25,11 +26,11 @@ pub async fn build_expected_block_header( let last_block_header = *store.last_block_header.read().await; // Compute new account root - let updated_accounts: Vec<(AccountId, Digest)> = + let updated_accounts: Vec<(AccountId, Option, Digest)> = batches.iter().flat_map(TransactionBatch::updated_accounts).collect(); let new_account_root = { let mut store_accounts = store.accounts.read().await.clone(); - for (account_id, new_account_state) in updated_accounts { + for (account_id, _details, new_account_state) in updated_accounts { store_accounts.insert(account_id.into(), new_account_state.into()); } @@ -67,14 +68,14 @@ pub async fn build_actual_block_header( store: &MockStoreSuccess, batches: Vec, ) -> BlockHeader { - let updated_accounts: Vec<(AccountId, Digest)> = - batches.iter().flat_map(|batch| batch.updated_accounts()).collect(); + let updated_accounts: Vec<(AccountId, Option, Digest)> = + batches.iter().flat_map(TransactionBatch::updated_accounts).collect(); let produced_nullifiers: Vec = - batches.iter().flat_map(|batch| batch.produced_nullifiers()).collect(); + batches.iter().flat_map(TransactionBatch::produced_nullifiers).collect(); let block_inputs_from_store: BlockInputs = store .get_block_inputs( - updated_accounts.iter().map(|(account_id, _)| account_id), + updated_accounts.iter().map(|(account_id, _, _)| account_id), produced_nullifiers.iter(), ) .await @@ -91,7 +92,7 @@ pub struct MockBlockBuilder { store_chain_mmr: Mmr, last_block_header: BlockHeader, - updated_accounts: Option>, + updated_accounts: Option, Digest)>>, created_notes: Option>, produced_nullifiers: Option>, } @@ -111,9 +112,9 @@ impl MockBlockBuilder { pub fn account_updates( mut self, - updated_accounts: Vec<(AccountId, Digest)>, + updated_accounts: Vec<(AccountId, Option, Digest)>, ) -> Self { - for &(account_id, new_account_state) in updated_accounts.iter() { + for &(account_id, ref _details, new_account_state) in updated_accounts.iter() { self.store_accounts.insert(account_id.into(), new_account_state.into()); } diff --git a/block-producer/src/test_utils/store.rs b/block-producer/src/test_utils/store.rs index 00f26a2a1..9e7844676 100644 --- a/block-producer/src/test_utils/store.rs +++ b/block-producer/src/test_utils/store.rs @@ -180,7 +180,7 @@ impl ApplyBlock for MockStoreSuccess { let mut locked_produced_nullifiers = self.produced_nullifiers.write().await; // update accounts - for &(account_id, account_hash) in block.updated_accounts.iter() { + for &(account_id, ref _details, account_hash) in block.updated_accounts.iter() { locked_accounts.insert(account_id.into(), account_hash.into()); } debug_assert_eq!(locked_accounts.root(), block.header.account_root()); diff --git a/proto/proto/requests.proto b/proto/proto/requests.proto index c40b6e6d3..8c271696d 100644 --- a/proto/proto/requests.proto +++ b/proto/proto/requests.proto @@ -10,6 +10,8 @@ import "note.proto"; message AccountUpdate { account.AccountId account_id = 1; digest.Digest account_hash = 2; + // Details for public (on-chain) account. + account.AccountDetails details = 3; } message ApplyBlockRequest { diff --git a/proto/src/domain/accounts.rs b/proto/src/domain/accounts.rs index 3c4257f00..57c8b46c6 100644 --- a/proto/src/domain/accounts.rs +++ b/proto/src/domain/accounts.rs @@ -70,11 +70,14 @@ impl TryFrom for AccountId { // INTO ACCOUNT UPDATE // ================================================================================================ -impl From<(AccountId, Digest)> for AccountUpdate { - fn from((account_id, account_hash): (AccountId, Digest)) -> Self { +impl From<(AccountId, Option, Digest)> for AccountUpdate { + fn from( + (account_id, details, account_hash): (AccountId, Option, Digest) + ) -> Self { Self { account_id: Some(account_id.into()), account_hash: Some(account_hash.into()), + details: details.as_ref().map(Into::into), } } } diff --git a/proto/src/generated/requests.rs b/proto/src/generated/requests.rs index b9f9fa20b..796ef4bb8 100644 --- a/proto/src/generated/requests.rs +++ b/proto/src/generated/requests.rs @@ -6,6 +6,9 @@ pub struct AccountUpdate { pub account_id: ::core::option::Option, #[prost(message, optional, tag = "2")] pub account_hash: ::core::option::Option, + /// Details for public (on-chain) account. + #[prost(message, optional, tag = "3")] + pub details: ::core::option::Option, } #[derive(Eq, PartialOrd, Ord, Hash)] #[allow(clippy::derive_partial_eq_without_eq)] diff --git a/store/Cargo.toml b/store/Cargo.toml index db5f8b0a3..20b6fb9b2 100644 --- a/store/Cargo.toml +++ b/store/Cargo.toml @@ -42,4 +42,5 @@ tracing-subscriber = { workspace = true } [dev-dependencies] figment = { version = "0.10", features = ["toml", "env", "test"] } +miden-mock = { workspace = true } miden-node-utils = { path = "../utils", version = "0.2", features = ["tracing-forest"] } diff --git a/store/src/db/mod.rs b/store/src/db/mod.rs index 46d2db372..36fddc4c9 100644 --- a/store/src/db/mod.rs +++ b/store/src/db/mod.rs @@ -5,6 +5,7 @@ use miden_objects::{ accounts::Account, crypto::{hash::rpo::RpoDigest, merkle::MerklePath, utils::Deserializable}, notes::Nullifier, + transaction::AccountDetails, BlockHeader, GENESIS_BLOCK, }; use rusqlite::vtab::array; @@ -260,7 +261,7 @@ impl Db { block_header: BlockHeader, notes: Vec, nullifiers: Vec, - accounts: Vec<(AccountId, RpoDigest)>, + accounts: Vec<(AccountId, Option, RpoDigest)>, ) -> Result<()> { self.pool .get() @@ -343,7 +344,7 @@ impl Db { let transaction = conn.transaction()?; let accounts: Vec<_> = account_smt .leaves() - .map(|(account_id, state_hash)| (account_id, state_hash.into())) + .map(|(account_id, state_hash)| (account_id, None, state_hash.into())) .collect(); sql::apply_block( &transaction, diff --git a/store/src/db/sql.rs b/store/src/db/sql.rs index 5707111b5..fd11c9da8 100644 --- a/store/src/db/sql.rs +++ b/store/src/db/sql.rs @@ -6,6 +6,7 @@ use miden_objects::{ assets::AssetVault, crypto::{hash::rpo::RpoDigest, merkle::MerklePath}, notes::Nullifier, + transaction::AccountDetails, utils::serde::{Deserializable, Serializable}, BlockHeader, }; @@ -154,16 +155,83 @@ pub fn select_account_details( /// transaction. pub fn upsert_accounts( transaction: &Transaction, - accounts: &[(AccountId, RpoDigest)], + accounts: &[(AccountId, Option, RpoDigest)], block_num: BlockNumber, ) -> Result { - let mut stmt = transaction.prepare("INSERT OR REPLACE INTO accounts (account_id, account_hash, block_num) VALUES (?1, ?2, ?3);")?; + let mut acc_stmt = transaction.prepare( + "INSERT OR REPLACE INTO accounts (account_id, account_hash, block_num) VALUES (?1, ?2, ?3);", + )?; + let mut select_details_stmt = transaction.prepare( + "SELECT nonce, vault, storage, code FROM account_details WHERE account_id = ?1;", + )?; + let mut new_details_stmt = transaction.prepare( + "INSERT INTO account_details (account_id, nonce, vault, storage, code) VALUES (?1, ?2, ?3, ?4, ?5);" + )?; + let mut update_details_stmt = transaction.prepare( + "UPDATE account_details SET nonce = ?2, vault = ?3, storage = ?4 WHERE account_id = ?1;", + )?; let mut count = 0; - for (account_id, account_hash) in accounts.iter() { - count += - stmt.execute(params![u64_to_value(*account_id), account_hash.to_bytes(), block_num])? + for (account_id, details, account_hash) in accounts.iter() { + let account_id = *account_id; + count += acc_stmt.execute(params![ + u64_to_value(account_id), + account_hash.to_bytes(), + block_num + ])?; + if let Some(details) = details { + match details { + AccountDetails::Full(account) => { + debug_assert!(account.is_new()); + debug_assert_eq!(account_id, u64::from(account.id())); + let inserted = new_details_stmt.execute(params![ + u64_to_value(account.id().into()), + u64_to_value(account.nonce().as_int()), + account.vault().to_bytes(), + account.storage().to_bytes(), + account.code().to_bytes(), + ])?; + + debug_assert_eq!(inserted, 1); + }, + AccountDetails::Delta(delta) => { + let mut rows = select_details_stmt.query(params![u64_to_value(account_id)])?; + let Some(row) = rows.next()? else { + return Err(DatabaseError::AccountNotOnChain(account_id)); + }; + + let mut account = Account::new( + account_id.try_into()?, + AssetVault::read_from_bytes(row.get_ref(1)?.as_blob()?)?, + AccountStorage::read_from_bytes(row.get_ref(2)?.as_blob()?)?, + AccountCode::read_from_bytes(row.get_ref(3)?.as_blob()?)?, + column_value_as_u64(row, 0)? + .try_into() + .map_err(DatabaseError::CorruptedData)?, + ); + + account.apply_delta(delta)?; + + if &account.hash() != account_hash { + return Err(DatabaseError::ApplyBlockFailedAccountHashesMismatch { + calculated: account.hash(), + expected: *account_hash, + }); + } + + let updated = update_details_stmt.execute(params![ + u64_to_value(account.id().into()), + u64_to_value(account.nonce().as_int()), + account.vault().to_bytes(), + account.storage().to_bytes(), + ])?; + + debug_assert_eq!(updated, 1); + }, + } + } } + Ok(count) } @@ -567,7 +635,7 @@ pub fn apply_block( block_header: &BlockHeader, notes: &[Note], nullifiers: &[Nullifier], - accounts: &[(AccountId, RpoDigest)], + accounts: &[(AccountId, Option, RpoDigest)], ) -> Result { let mut count = 0; count += insert_block_header(transaction, block_header)?; diff --git a/store/src/db/tests.rs b/store/src/db/tests.rs index 57af54176..406a72b34 100644 --- a/store/src/db/tests.rs +++ b/store/src/db/tests.rs @@ -1,15 +1,26 @@ +use miden_lib::transaction::TransactionKernel; +use miden_mock::mock::account::{ + generate_account_seed, mock_account_code, AccountSeedType, + ACCOUNT_ID_NON_FUNGIBLE_FAUCET_ON_CHAIN, +}; use miden_objects::{ + accounts::{ + Account, AccountDelta, AccountId, AccountStorage, AccountStorageDelta, AccountVaultDelta, + ACCOUNT_ID_FUNGIBLE_FAUCET_ON_CHAIN, + }, + assets::{Asset, AssetVault, FungibleAsset, NonFungibleAsset, NonFungibleAssetDetails}, crypto::{ hash::rpo::RpoDigest, merkle::{LeafIndex, MerklePath, SimpleSmt}, }, notes::{Nullifier, NOTE_LEAF_DEPTH}, - BlockHeader, Felt, FieldElement, + transaction::AccountDetails, + BlockHeader, Felt, FieldElement, Word, ONE, ZERO, }; use rusqlite::{vtab::array, Connection}; use super::{sql, AccountInfo, Note, NoteCreated, NullifierInfo}; -use crate::db::migrations; +use crate::{db::migrations, errors::DatabaseError}; fn create_db() -> Connection { let mut conn = Connection::open_in_memory().unwrap(); @@ -170,7 +181,8 @@ fn test_sql_select_accounts() { }); let transaction = conn.transaction().unwrap(); - let res = sql::upsert_accounts(&transaction, &[(account_id, account_hash)], block_num); + let res = + sql::upsert_accounts(&transaction, &[(account_id, None, account_hash)], block_num); assert_eq!(res.unwrap(), 1, "One element must have been inserted"); transaction.commit().unwrap(); let accounts = sql::select_accounts(&mut conn).unwrap(); @@ -178,6 +190,124 @@ fn test_sql_select_accounts() { } } +#[test] +fn test_sql_public_account_details() { + let mut conn = create_db(); + + let block_num = 1; + create_block(&mut conn, block_num); + + let (account_id, _seed) = + generate_account_seed(AccountSeedType::RegularAccountUpdatableCodeOnChain); + let fungible_faucet_id = AccountId::try_from(ACCOUNT_ID_FUNGIBLE_FAUCET_ON_CHAIN).unwrap(); + let non_fungible_faucet_id = + AccountId::try_from(ACCOUNT_ID_NON_FUNGIBLE_FAUCET_ON_CHAIN).unwrap(); + + let mut storage = AccountStorage::new(vec![]).unwrap(); + storage.set_item(1, num_to_word(1)).unwrap(); + storage.set_item(3, num_to_word(3)).unwrap(); + storage.set_item(5, num_to_word(5)).unwrap(); + + let nft1 = Asset::NonFungible( + NonFungibleAsset::new( + &NonFungibleAssetDetails::new(non_fungible_faucet_id, vec![1, 2, 3]).unwrap(), + ) + .unwrap(), + ); + + let mut account = Account::new( + account_id, + AssetVault::new(&[ + Asset::Fungible(FungibleAsset::new(fungible_faucet_id, 150).unwrap()), + nft1, + ]) + .unwrap(), + storage, + mock_account_code(&TransactionKernel::assembler()), + ZERO, + ); + + // test querying empty table + let res = sql::get_account_details(&mut conn, account_id.into()); + assert!(matches!(res, Err(DatabaseError::AccountNotOnChain(_)))); + + let transaction = conn.transaction().unwrap(); + let inserted = sql::upsert_accounts( + &transaction, + &[(account_id.into(), Some(AccountDetails::Full(account.clone())), account.hash())], + block_num, + ) + .unwrap(); + + assert_eq!(inserted, 1, "One element must have been inserted"); + + transaction.commit().unwrap(); + + let account_read = sql::get_account_details(&mut conn, account_id.into()).unwrap(); + + // TODO: substitute by a single check, once code imports deserialization is fixed: + // assert_eq!(account_read, account); + assert_eq!(account_read.id(), account.id()); + assert_eq!(account_read.vault(), account.vault()); + assert_eq!(account_read.storage(), account.storage()); + assert_eq!(account_read.nonce(), account.nonce()); + + let storage_delta = AccountStorageDelta { + cleared_items: vec![3], + updated_items: vec![(4, num_to_word(5)), (5, num_to_word(6))], + }; + + let nft2 = Asset::NonFungible( + NonFungibleAsset::new( + &NonFungibleAssetDetails::new(non_fungible_faucet_id, vec![4, 5, 6]).unwrap(), + ) + .unwrap(), + ); + + let vault_delta = AccountVaultDelta { + added_assets: vec![nft2], + removed_assets: vec![nft1], + }; + + let delta = AccountDelta::new(storage_delta, vault_delta, Some(ONE)).unwrap(); + + account.apply_delta(&delta).unwrap(); + + let transaction = conn.transaction().unwrap(); + let inserted = sql::upsert_accounts( + &transaction, + &[(account_id.into(), Some(AccountDetails::Delta(delta.clone())), account.hash())], + block_num, + ) + .unwrap(); + + assert_eq!(inserted, 1, "One element must have been inserted"); + + transaction.commit().unwrap(); + + let mut account_read = sql::get_account_details(&mut conn, account_id.into()).unwrap(); + + assert_eq!(account_read.id(), account.id()); + assert_eq!(account_read.vault(), account.vault()); + assert_eq!(account_read.nonce(), account.nonce()); + + // Cleared item was not serialized, check it and apply delta only with clear item second time: + assert_eq!(account_read.storage().get_item(3), RpoDigest::default()); + + let storage_delta = AccountStorageDelta { + cleared_items: vec![3], + updated_items: vec![], + }; + account_read + .apply_delta( + &AccountDelta::new(storage_delta, AccountVaultDelta::default(), Some(Felt::new(2))) + .unwrap(), + ) + .unwrap(); + + assert_eq!(account_read.storage(), account.storage()); +} + #[test] fn test_sql_select_nullifiers_by_block_range() { let mut conn = create_db(); @@ -385,7 +515,7 @@ fn test_db_account() { let transaction = conn.transaction().unwrap(); let row_count = - sql::upsert_accounts(&transaction, &[(account_id, account_hash)], block_num).unwrap(); + sql::upsert_accounts(&transaction, &[(account_id, None, account_hash)], block_num).unwrap(); transaction.commit().unwrap(); assert_eq!(row_count, 1); @@ -518,7 +648,11 @@ fn test_notes() { // UTILITIES // ------------------------------------------------------------------------------------------- fn num_to_rpo_digest(n: u64) -> RpoDigest { - RpoDigest::new([Felt::ZERO, Felt::ZERO, Felt::ZERO, Felt::new(n)]) + RpoDigest::new(num_to_word(n)) +} + +fn num_to_word(n: u64) -> Word { + [Felt::ZERO, Felt::ZERO, Felt::ZERO, Felt::new(n)] } fn num_to_nullifier(n: u64) -> Nullifier { diff --git a/store/src/server/api.rs b/store/src/server/api.rs index e5f16c537..001d20555 100644 --- a/store/src/server/api.rs +++ b/store/src/server/api.rs @@ -216,11 +216,16 @@ impl api_server::Api for StoreApi { .accounts .into_iter() .map(|account_update| { + let account_details: Option = + account_update.details.as_ref().map(TryInto::try_into).transpose().map_err( + |err: ConversionError| Status::invalid_argument(err.to_string()), + )?; let account_state: AccountState = account_update .try_into() .map_err(|err: ConversionError| Status::invalid_argument(err.to_string()))?; Ok(( account_state.account_id.into(), + account_details, account_state .account_hash .ok_or(invalid_argument("Account update missing account hash"))?, diff --git a/store/src/state.rs b/store/src/state.rs index baddf17ba..0d96f1552 100644 --- a/store/src/state.rs +++ b/store/src/state.rs @@ -13,6 +13,7 @@ use miden_objects::{ merkle::{LeafIndex, Mmr, MmrDelta, MmrPeaks, SimpleSmt, SmtProof, ValuePath}, }, notes::{NoteMetadata, NoteType, Nullifier, NOTE_LEAF_DEPTH}, + transaction::AccountDetails, AccountError, BlockHeader, NoteError, Word, ACCOUNT_TREE_DEPTH, ZERO, }; use tokio::{ @@ -106,7 +107,7 @@ impl State { &self, block_header: BlockHeader, nullifiers: Vec, - accounts: Vec<(AccountId, RpoDigest)>, + accounts: Vec<(AccountId, Option, RpoDigest)>, notes: Vec, ) -> Result<(), ApplyBlockError> { let _ = self.writer.try_lock().map_err(|_| ApplyBlockError::ConcurrentWrite)?; @@ -180,7 +181,7 @@ impl State { // update account tree let mut account_tree = inner.account_tree.clone(); - for (account_id, account_hash) in accounts.iter() { + for (account_id, _, account_hash) in accounts.iter() { account_tree.insert(LeafIndex::new_max_depth(*account_id), account_hash.into()); }