From 15704e5b412f72bceca4eb35c081e6a0076e2ce5 Mon Sep 17 00:00:00 2001 From: JereSalo Date: Fri, 21 Feb 2025 12:55:51 -0300 Subject: [PATCH 01/16] start implementing Blockchain struct --- .tool-versions => .tool-versions~ | 2 +- Cargo.lock | 1 + cmd/ef_tests/blockchain/test_runner.rs | 4 +- crates/blockchain/blockchain.rs | 61 ++++++++++++++++++++++++++ crates/blockchain/smoke_test.rs | 46 +++++++++++++------ crates/l2/prover/tests/perf_zkvm.rs | 4 +- crates/l2/utils/prover/save_state.rs | 4 +- crates/l2/utils/test_data_io.rs | 4 +- 8 files changed, 103 insertions(+), 23 deletions(-) rename .tool-versions => .tool-versions~ (57%) diff --git a/.tool-versions b/.tool-versions~ similarity index 57% rename from .tool-versions rename to .tool-versions~ index 6374ee5807..0e6030dd6e 100644 --- a/.tool-versions +++ b/.tool-versions~ @@ -1,2 +1,2 @@ -rust 1.82.0 +rust 1.81.0 # golang 1.23.2 diff --git a/Cargo.lock b/Cargo.lock index fc6b495a59..66ff2fb3d6 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2330,6 +2330,7 @@ dependencies = [ "ethrex-common", "ethrex-rlp", "ethrex-storage", + "ethrex-vm", "hex", "lazy_static", "serde", diff --git a/cmd/ef_tests/blockchain/test_runner.rs b/cmd/ef_tests/blockchain/test_runner.rs index 5638ba645a..913be77200 100644 --- a/cmd/ef_tests/blockchain/test_runner.rs +++ b/cmd/ef_tests/blockchain/test_runner.rs @@ -1,7 +1,7 @@ use std::{collections::HashMap, path::Path}; use crate::types::{BlockWithRLP, TestUnit}; -use ethrex_blockchain::{add_block, fork_choice::apply_fork_choice}; +use ethrex_blockchain::{fork_choice::apply_fork_choice, Blockchain}; use ethrex_common::types::{ Account as CoreAccount, Block as CoreBlock, BlockHeader as CoreBlockHeader, }; @@ -33,7 +33,7 @@ pub fn run_ef_test(test_key: &str, test: &TestUnit) { let hash = block.hash(); // Attempt to add the block as the head of the chain - let chain_result = add_block(block, &store); + let chain_result = Blockchain::default().add_block(block, &store); match chain_result { Err(error) => { assert!( diff --git a/crates/blockchain/blockchain.rs b/crates/blockchain/blockchain.rs index ceb26499e6..0ed2f72fa3 100644 --- a/crates/blockchain/blockchain.rs +++ b/crates/blockchain/blockchain.rs @@ -18,12 +18,73 @@ use ethrex_common::H256; use ethrex_storage::error::StoreError; use ethrex_storage::Store; +use ethrex_vm::backends::EVM; use ethrex_vm::db::evm_state; use ethrex_vm::{backends::BlockExecutionResult, get_evm_backend_or_default}; //TODO: Implement a struct Chain or BlockChain to encapsulate //functionality and canonical chain state and config +pub struct Blockchain { + pub vm: EVM, +} + +impl Blockchain { + pub fn new(evm: EVM) -> Self { + Self { vm: evm } + } + + pub fn add_block(&self, block: &Block, storage: &Store) -> Result<(), ChainError> { + let block_hash = block.header.compute_block_hash(); + + // Validate if it can be the new head and find the parent + let Ok(parent_header) = find_parent_header(&block.header, storage) else { + // If the parent is not present, we store it as pending. + storage.add_pending_block(block.clone())?; + return Err(ChainError::ParentNotFound); + }; + let mut state = evm_state(storage.clone(), block.header.parent_hash); + let chain_config = state.chain_config().map_err(ChainError::from)?; + + // Validate the block pre-execution + validate_block(block, &parent_header, &chain_config)?; + let BlockExecutionResult { + receipts, + requests, + account_updates, + } = self.vm.execute_block(block, &mut state)?; + + validate_gas_used(&receipts, &block.header)?; + + // Apply the account updates over the last block's state and compute the new state root + let new_state_root = state + .database() + .ok_or(ChainError::StoreError(StoreError::MissingStore))? + .apply_account_updates(block.header.parent_hash, &account_updates)? + .ok_or(ChainError::ParentStateNotFound)?; + + // Check state root matches the one in block header after execution + validate_state_root(&block.header, new_state_root)?; + + // Check receipts root matches the one in block header after execution + validate_receipts_root(&block.header, &receipts)?; + + // Processes requests from receipts, computes the requests_hash and compares it against the header + validate_requests_hash(&block.header, &chain_config, &requests)?; + + store_block(storage, block.clone())?; + store_receipts(storage, receipts, block_hash)?; + + Ok(()) + } +} + +impl Default for Blockchain { + fn default() -> Self { + Self { vm: EVM::default() } + } +} + /// Adds a new block to the store. It may or may not be canonical, as long as its ancestry links /// with the canonical chain and its parent's post-state is calculated. It doesn't modify the /// canonical chain/head. Fork choice needs to be updated for that in a separate step. diff --git a/crates/blockchain/smoke_test.rs b/crates/blockchain/smoke_test.rs index 9417225ed4..730ecfa173 100644 --- a/crates/blockchain/smoke_test.rs +++ b/crates/blockchain/smoke_test.rs @@ -3,11 +3,11 @@ mod blockchain_integration_test { use std::{fs::File, io::BufReader}; use crate::{ - add_block, error::{ChainError, InvalidForkChoice}, fork_choice::apply_fork_choice, is_canonical, latest_canonical_block_hash, payload::{build_payload, create_payload, BuildPayloadArgs}, + Blockchain, }; use ethrex_common::{ @@ -26,7 +26,7 @@ mod blockchain_integration_test { // Add first block. We'll make it canonical. let block_1a = new_block(&store, &genesis_header); let hash_1a = block_1a.hash(); - add_block(&block_1a, &store).unwrap(); + Blockchain::default().add_block(&block_1a, &store).unwrap(); store.set_canonical_block(1, hash_1a).unwrap(); let retrieved_1a = store.get_block_header(1).unwrap().unwrap(); @@ -36,7 +36,9 @@ mod blockchain_integration_test { // Add second block at height 1. Will not be canonical. let block_1b = new_block(&store, &genesis_header); let hash_1b = block_1b.hash(); - add_block(&block_1b, &store).expect("Could not add block 1b."); + Blockchain::default() + .add_block(&block_1b, &store) + .expect("Could not add block 1b."); let retrieved_1b = store.get_block_header_by_hash(hash_1b).unwrap().unwrap(); assert_ne!(retrieved_1a, retrieved_1b); @@ -45,7 +47,9 @@ mod blockchain_integration_test { // Add a third block at height 2, child to the non canonical block. let block_2 = new_block(&store, &block_1b.header); let hash_2 = block_2.hash(); - add_block(&block_2, &store).expect("Could not add block 2."); + Blockchain::default() + .add_block(&block_2, &store) + .expect("Could not add block 2."); let retrieved_2 = store.get_block_header_by_hash(hash_2).unwrap(); assert!(retrieved_2.is_some()); @@ -75,14 +79,14 @@ mod blockchain_integration_test { // Build a single valid block. let block_1 = new_block(&store, &genesis_header); let hash_1 = block_1.header.compute_block_hash(); - add_block(&block_1, &store).unwrap(); + Blockchain::default().add_block(&block_1, &store).unwrap(); apply_fork_choice(&store, hash_1, H256::zero(), H256::zero()).unwrap(); // Build a child, then change its parent, making it effectively a pending block. let mut block_2 = new_block(&store, &block_1.header); block_2.header.parent_hash = H256::random(); let hash_2 = block_2.header.compute_block_hash(); - let result = add_block(&block_2, &store); + let result = Blockchain::default().add_block(&block_2, &store); assert!(matches!(result, Err(ChainError::ParentNotFound))); // block 2 should now be pending. @@ -105,7 +109,7 @@ mod blockchain_integration_test { // Add first block. Not canonical. let block_1a = new_block(&store, &genesis_header); let hash_1a = block_1a.hash(); - add_block(&block_1a, &store).unwrap(); + Blockchain::default().add_block(&block_1a, &store).unwrap(); let retrieved_1a = store.get_block_header_by_hash(hash_1a).unwrap().unwrap(); assert!(!is_canonical(&store, 1, hash_1a).unwrap()); @@ -113,7 +117,9 @@ mod blockchain_integration_test { // Add second block at height 1. Canonical. let block_1b = new_block(&store, &genesis_header); let hash_1b = block_1b.hash(); - add_block(&block_1b, &store).expect("Could not add block 1b."); + Blockchain::default() + .add_block(&block_1b, &store) + .expect("Could not add block 1b."); apply_fork_choice(&store, hash_1b, genesis_hash, genesis_hash).unwrap(); let retrieved_1b = store.get_block_header(1).unwrap().unwrap(); @@ -125,7 +131,9 @@ mod blockchain_integration_test { // Add a third block at height 2, child to the canonical one. let block_2 = new_block(&store, &block_1b.header); let hash_2 = block_2.hash(); - add_block(&block_2, &store).expect("Could not add block 2."); + Blockchain::default() + .add_block(&block_2, &store) + .expect("Could not add block 2."); apply_fork_choice(&store, hash_2, genesis_hash, genesis_hash).unwrap(); let retrieved_2 = store.get_block_header_by_hash(hash_2).unwrap(); assert_eq!(latest_canonical_block_hash(&store).unwrap(), hash_2); @@ -160,12 +168,16 @@ mod blockchain_integration_test { // Add block at height 1. let block_1 = new_block(&store, &genesis_header); let hash_1 = block_1.hash(); - add_block(&block_1, &store).expect("Could not add block 1b."); + Blockchain::default() + .add_block(&block_1, &store) + .expect("Could not add block 1b."); // Add child at height 2. let block_2 = new_block(&store, &block_1.header); let hash_2 = block_2.hash(); - add_block(&block_2, &store).expect("Could not add block 2."); + Blockchain::default() + .add_block(&block_2, &store) + .expect("Could not add block 2."); assert!(!is_canonical(&store, 1, hash_1).unwrap()); assert!(!is_canonical(&store, 2, hash_2).unwrap()); @@ -201,12 +213,16 @@ mod blockchain_integration_test { // Add block at height 1. let block_1 = new_block(&store, &genesis_header); - add_block(&block_1, &store).expect("Could not add block 1b."); + Blockchain::default() + .add_block(&block_1, &store) + .expect("Could not add block 1b."); // Add child at height 2. let block_2 = new_block(&store, &block_1.header); let hash_2 = block_2.hash(); - add_block(&block_2, &store).expect("Could not add block 2."); + Blockchain::default() + .add_block(&block_2, &store) + .expect("Could not add block 2."); assert_eq!(latest_canonical_block_hash(&store).unwrap(), genesis_hash); @@ -218,7 +234,9 @@ mod blockchain_integration_test { // Add a new, non canonical block, starting from genesis. let block_1b = new_block(&store, &genesis_header); let hash_b = block_1b.hash(); - add_block(&block_1b, &store).expect("Could not add block b."); + Blockchain::default() + .add_block(&block_1b, &store) + .expect("Could not add block b."); // The latest block should be the same. assert_eq!(latest_canonical_block_hash(&store).unwrap(), hash_2); diff --git a/crates/l2/prover/tests/perf_zkvm.rs b/crates/l2/prover/tests/perf_zkvm.rs index 5091f66e56..8ec98e68cd 100644 --- a/crates/l2/prover/tests/perf_zkvm.rs +++ b/crates/l2/prover/tests/perf_zkvm.rs @@ -1,10 +1,10 @@ #![allow(clippy::expect_used)] #![allow(clippy::unwrap_used)] +use ethrex_blockchain::Blockchain; use ethrex_common::types::Block; use std::path::Path; use tracing::info; -use ethrex_blockchain::add_block; use ethrex_prover_lib::prover::{Prover, Risc0Prover, Sp1Prover}; use ethrex_storage::{EngineType, Store}; use ethrex_vm::execution_db::ExecutionDB; @@ -80,7 +80,7 @@ async fn setup() -> (ProgramInput, Block) { block.body.transactions.len(), block.header.number ); - add_block(block, &store).unwrap(); + Blockchain::default().add_block(block, &store).unwrap(); } let block_to_prove = blocks.last().unwrap(); diff --git a/crates/l2/utils/prover/save_state.rs b/crates/l2/utils/prover/save_state.rs index 190a10b8bb..eecb4b5694 100644 --- a/crates/l2/utils/prover/save_state.rs +++ b/crates/l2/utils/prover/save_state.rs @@ -385,7 +385,7 @@ pub fn block_number_has_all_proofs(block_number: u64) -> Result> = Vec::new(); diff --git a/crates/l2/utils/test_data_io.rs b/crates/l2/utils/test_data_io.rs index cde47d5b2d..5bd4780815 100644 --- a/crates/l2/utils/test_data_io.rs +++ b/crates/l2/utils/test_data_io.rs @@ -1,7 +1,7 @@ #![allow(clippy::unwrap_used)] #![allow(clippy::expect_used)] -use ethrex_blockchain::add_block; +use ethrex_blockchain::Blockchain; use ethrex_common::types::{Block, Genesis}; use ethrex_rlp::{decode::RLPDecode, encode::RLPEncode}; use ethrex_storage::{EngineType, Store}; @@ -72,7 +72,7 @@ pub fn generate_program_input( let store = Store::new("memory", EngineType::InMemory)?; store.add_initial_state(genesis)?; for block in chain { - add_block(&block, &store)?; + Blockchain::default().add_block(&block, &store)?; } let parent_block_header = store From 2d54b80e380ef5766a695e9b7dae738f3fc4db1b Mon Sep 17 00:00:00 2001 From: JereSalo Date: Fri, 21 Feb 2025 17:10:17 -0300 Subject: [PATCH 02/16] continue with refactor, replace add_block function for method --- cmd/ethrex/ethrex.rs | 20 ++++++---- cmd/ethrex_l2/src/commands/wallet.rs | 1 - crates/blockchain/blockchain.rs | 52 +------------------------ crates/networking/p2p/sync.rs | 12 ++++-- crates/networking/rpc/engine/payload.rs | 13 ++++++- 5 files changed, 35 insertions(+), 63 deletions(-) diff --git a/cmd/ethrex/ethrex.rs b/cmd/ethrex/ethrex.rs index 62631a2680..e429f051ae 100644 --- a/cmd/ethrex/ethrex.rs +++ b/cmd/ethrex/ethrex.rs @@ -1,6 +1,6 @@ use bytes::Bytes; use directories::ProjectDirs; -use ethrex_blockchain::{add_block, fork_choice::apply_fork_choice}; +use ethrex_blockchain::{fork_choice::apply_fork_choice, Blockchain}; use ethrex_common::types::{Block, Genesis}; use ethrex_p2p::{ kademlia::KademliaTable, @@ -155,6 +155,8 @@ async fn main() { let evm = EVM_BACKEND.get_or_init(|| evm.clone()); info!("EVM_BACKEND set to: {:?}", evm); + let blockchain = Blockchain::new(evm.clone()); + let path = path::PathBuf::from(data_dir.clone()); let store: Store = if path.ends_with("memory") { Store::new(&data_dir, EngineType::InMemory).expect("Failed to create Store") @@ -181,7 +183,7 @@ async fn main() { if let Some(chain_rlp_path) = matches.get_one::("import") { info!("Importing blocks from chain file: {}", chain_rlp_path); let blocks = read_chain_file(chain_rlp_path); - import_blocks(&store, &blocks); + import_blocks(&store, &blocks, &blockchain); } if let Some(blocks_path) = matches.get_one::("import_dir") { @@ -200,7 +202,7 @@ async fn main() { blocks.push(read_block_file(s)); } - import_blocks(&store, &blocks); + import_blocks(&store, &blocks, &blockchain); } let jwt_secret = read_jwtsecret_file(authrpc_jwtsecret); @@ -251,7 +253,12 @@ async fn main() { // Create a cancellation_token for long_living tasks let cancel_token = tokio_util::sync::CancellationToken::new(); // Create SyncManager - let syncer = SyncManager::new(peer_table.clone(), sync_mode, cancel_token.clone()); + let syncer = SyncManager::new( + peer_table.clone(), + sync_mode, + cancel_token.clone(), + blockchain, + ); // TODO: Check every module starts properly. let tracker = TaskTracker::new(); @@ -419,7 +426,7 @@ fn set_datadir(datadir: &str) -> String { .to_owned() } -fn import_blocks(store: &Store, blocks: &Vec) { +fn import_blocks(store: &Store, blocks: &Vec, blockchain: &Blockchain) { let size = blocks.len(); for block in blocks { let hash = block.hash(); @@ -427,8 +434,7 @@ fn import_blocks(store: &Store, blocks: &Vec) { "Adding block {} with hash {:#x}.", block.header.number, hash ); - let result = add_block(block, store); - if let Some(error) = result.err() { + if let Err(error) = blockchain.add_block(block, store) { warn!( "Failed to add block {} with hash {:#x}: {}.", block.header.number, hash, error diff --git a/cmd/ethrex_l2/src/commands/wallet.rs b/cmd/ethrex_l2/src/commands/wallet.rs index bfd2430d1c..4dc187ad58 100644 --- a/cmd/ethrex_l2/src/commands/wallet.rs +++ b/cmd/ethrex_l2/src/commands/wallet.rs @@ -2,7 +2,6 @@ use crate::config::EthrexL2Config; use bytes::Bytes; use clap::Subcommand; use ethereum_types::{Address, H256, U256}; -use ethrex_common::types::Transaction; use ethrex_l2_sdk::calldata::{encode_calldata, Value}; use ethrex_l2_sdk::merkle_tree::merkle_proof; use ethrex_l2_sdk::{get_withdrawal_hash, COMMON_BRIDGE_L2_ADDRESS, L2_WITHDRAW_SIGNATURE}; diff --git a/crates/blockchain/blockchain.rs b/crates/blockchain/blockchain.rs index 0ed2f72fa3..937bc98c16 100644 --- a/crates/blockchain/blockchain.rs +++ b/crates/blockchain/blockchain.rs @@ -18,13 +18,14 @@ use ethrex_common::H256; use ethrex_storage::error::StoreError; use ethrex_storage::Store; +use ethrex_vm::backends::BlockExecutionResult; use ethrex_vm::backends::EVM; use ethrex_vm::db::evm_state; -use ethrex_vm::{backends::BlockExecutionResult, get_evm_backend_or_default}; //TODO: Implement a struct Chain or BlockChain to encapsulate //functionality and canonical chain state and config +#[derive(Debug, Clone)] pub struct Blockchain { pub vm: EVM, } @@ -85,55 +86,6 @@ impl Default for Blockchain { } } -/// Adds a new block to the store. It may or may not be canonical, as long as its ancestry links -/// with the canonical chain and its parent's post-state is calculated. It doesn't modify the -/// canonical chain/head. Fork choice needs to be updated for that in a separate step. -/// -/// Performs pre and post execution validation, and updates the database with the post state. -pub fn add_block(block: &Block, storage: &Store) -> Result<(), ChainError> { - let block_hash = block.header.compute_block_hash(); - - // Validate if it can be the new head and find the parent - let Ok(parent_header) = find_parent_header(&block.header, storage) else { - // If the parent is not present, we store it as pending. - storage.add_pending_block(block.clone())?; - return Err(ChainError::ParentNotFound); - }; - let mut state = evm_state(storage.clone(), block.header.parent_hash); - let chain_config = state.chain_config().map_err(ChainError::from)?; - - // Validate the block pre-execution - validate_block(block, &parent_header, &chain_config)?; - let BlockExecutionResult { - receipts, - requests, - account_updates, - } = get_evm_backend_or_default().execute_block(block, &mut state)?; - - validate_gas_used(&receipts, &block.header)?; - - // Apply the account updates over the last block's state and compute the new state root - let new_state_root = state - .database() - .ok_or(ChainError::StoreError(StoreError::MissingStore))? - .apply_account_updates(block.header.parent_hash, &account_updates)? - .ok_or(ChainError::ParentStateNotFound)?; - - // Check state root matches the one in block header after execution - validate_state_root(&block.header, new_state_root)?; - - // Check receipts root matches the one in block header after execution - validate_receipts_root(&block.header, &receipts)?; - - // Processes requests from receipts, computes the requests_hash and compares it against the header - validate_requests_hash(&block.header, &chain_config, &requests)?; - - store_block(storage, block.clone())?; - store_receipts(storage, receipts, block_hash)?; - - Ok(()) -} - pub fn validate_requests_hash( header: &BlockHeader, chain_config: &ChainConfig, diff --git a/crates/networking/p2p/sync.rs b/crates/networking/p2p/sync.rs index 93742c0246..8b5d3f5de2 100644 --- a/crates/networking/p2p/sync.rs +++ b/crates/networking/p2p/sync.rs @@ -6,7 +6,7 @@ mod storage_healing; mod trie_rebuild; use bytecode_fetcher::bytecode_fetcher; -use ethrex_blockchain::error::ChainError; +use ethrex_blockchain::{error::ChainError, Blockchain}; use ethrex_common::{ types::{Block, BlockHash}, BigEndianHash, H256, U256, U512, @@ -89,6 +89,7 @@ pub struct SyncManager { trie_rebuilder: Option, // Used for cancelling long-living tasks upon shutdown cancel_token: CancellationToken, + pub blockchain: Blockchain, } impl SyncManager { @@ -96,6 +97,7 @@ impl SyncManager { peer_table: Arc>, sync_mode: SyncMode, cancel_token: CancellationToken, + blockchain: Blockchain, ) -> Self { Self { sync_mode, @@ -104,6 +106,7 @@ impl SyncManager { invalid_ancestors: HashMap::new(), trie_rebuilder: None, cancel_token, + blockchain, } } @@ -119,6 +122,7 @@ impl SyncManager { trie_rebuilder: None, // This won't be used cancel_token: CancellationToken::new(), + blockchain: Blockchain::default(), } } @@ -276,7 +280,7 @@ impl SyncManager { let block = store .get_block_by_hash(*hash)? .ok_or(SyncError::CorruptDB)?; - ethrex_blockchain::add_block(&block, &store)?; + self.blockchain.add_block(&block, &store)?; store.set_canonical_block(block.header.number, *hash)?; store.update_latest_block_number(block.header.number)?; } @@ -293,6 +297,7 @@ impl SyncManager { self.peers.clone(), store.clone(), &mut self.invalid_ancestors, + &self.blockchain, ) .await? } @@ -308,6 +313,7 @@ async fn download_and_run_blocks( peers: PeerHandler, store: Store, invalid_ancestors: &mut HashMap, + blockchain: &Blockchain, ) -> Result<(), SyncError> { let mut last_valid_hash = H256::default(); loop { @@ -325,7 +331,7 @@ async fn download_and_run_blocks( .ok_or(SyncError::CorruptDB)?; let number = header.number; let block = Block::new(header, body); - if let Err(error) = ethrex_blockchain::add_block(&block, &store) { + if let Err(error) = blockchain.add_block(&block, &store) { invalid_ancestors.insert(hash, last_valid_hash); return Err(error.into()); } diff --git a/crates/networking/rpc/engine/payload.rs b/crates/networking/rpc/engine/payload.rs index ecef8d35f5..4d5728451a 100644 --- a/crates/networking/rpc/engine/payload.rs +++ b/crates/networking/rpc/engine/payload.rs @@ -1,4 +1,3 @@ -use ethrex_blockchain::add_block; use ethrex_blockchain::error::ChainError; use ethrex_blockchain::payload::build_payload; use ethrex_common::types::{BlobsBundle, Block, BlockBody, BlockHash, BlockNumber, Fork}; @@ -427,7 +426,17 @@ fn execute_payload(block: &Block, context: &RpcApiContext) -> Result Ok(PayloadStatus::syncing()), // Under the current implementation this is not possible: we always calculate the state // transition of any new payload as long as the parent is present. If we received the From bc9265d4922166722cf913481b0844e3d0914ab1 Mon Sep 17 00:00:00 2001 From: JereSalo Date: Fri, 21 Feb 2025 17:45:19 -0300 Subject: [PATCH 03/16] make progress in refactor, removed global variable --- cmd/ethrex/ethrex.rs | 11 +- crates/blockchain/blockchain.rs | 8 +- crates/blockchain/payload.rs | 542 ++++++++++++------------ crates/blockchain/smoke_test.rs | 6 +- crates/networking/rpc/engine/payload.rs | 14 +- crates/vm/vm.rs | 13 - 6 files changed, 297 insertions(+), 297 deletions(-) diff --git a/cmd/ethrex/ethrex.rs b/cmd/ethrex/ethrex.rs index e429f051ae..056f149391 100644 --- a/cmd/ethrex/ethrex.rs +++ b/cmd/ethrex/ethrex.rs @@ -10,7 +10,7 @@ use ethrex_p2p::{ }; use ethrex_rlp::decode::RLPDecode; use ethrex_storage::{EngineType, Store}; -use ethrex_vm::{backends::EVM, EVM_BACKEND}; +use ethrex_vm::backends::EVM; use k256::ecdsa::SigningKey; use local_ip_address::local_ip; use rand::rngs::OsRng; @@ -152,8 +152,6 @@ async fn main() { let sync_mode = sync_mode(&matches); let evm = matches.get_one::("evm").unwrap_or(&EVM::REVM); - let evm = EVM_BACKEND.get_or_init(|| evm.clone()); - info!("EVM_BACKEND set to: {:?}", evm); let blockchain = Blockchain::new(evm.clone()); @@ -460,13 +458,12 @@ fn import_blocks(store: &Store, blocks: &Vec, blockchain: &Blockchain) { } if let Some(last_block) = blocks.last() { let hash = last_block.hash(); - match EVM_BACKEND.get() { - Some(EVM::LEVM) => { + match blockchain.vm { + EVM::LEVM => { // We are allowing this not to unwrap so that tests can run even if block execution results in the wrong root hash with LEVM. let _ = apply_fork_choice(store, hash, hash, hash); } - // This means we are using REVM as default - Some(EVM::REVM) | None => { + EVM::REVM => { apply_fork_choice(store, hash, hash, hash).unwrap(); } } diff --git a/crates/blockchain/blockchain.rs b/crates/blockchain/blockchain.rs index 937bc98c16..d9b53a829e 100644 --- a/crates/blockchain/blockchain.rs +++ b/crates/blockchain/blockchain.rs @@ -25,7 +25,7 @@ use ethrex_vm::db::evm_state; //TODO: Implement a struct Chain or BlockChain to encapsulate //functionality and canonical chain state and config -#[derive(Debug, Clone)] +#[derive(Debug, Clone, Default)] pub struct Blockchain { pub vm: EVM, } @@ -80,12 +80,6 @@ impl Blockchain { } } -impl Default for Blockchain { - fn default() -> Self { - Self { vm: EVM::default() } - } -} - pub fn validate_requests_hash( header: &BlockHeader, chain_config: &ChainConfig, diff --git a/crates/blockchain/payload.rs b/crates/blockchain/payload.rs index 46c9fb4561..a1c5a747dc 100644 --- a/crates/blockchain/payload.rs +++ b/crates/blockchain/payload.rs @@ -18,7 +18,7 @@ use ethrex_common::{ use ethrex_vm::{ backends::levm::CacheDB, db::{evm_state, EvmState}, - get_evm_backend_or_default, EvmError, + EvmError, }; use ethrex_rlp::encode::RLPEncode; @@ -35,6 +35,7 @@ use crate::{ constants::{GAS_LIMIT_BOUND_DIVISOR, MIN_GAS_LIMIT, TX_GAS_COST}, error::{ChainError, InvalidBlockError}, mempool::{self, PendingTxFilter}, + Blockchain, }; use tracing::debug; @@ -230,299 +231,310 @@ impl<'a> PayloadBuildContext<'a> { } } -/// Completes the payload building process, return the block value -pub fn build_payload( - payload: &mut Block, - store: &Store, -) -> Result<(BlobsBundle, U256), ChainError> { - debug!("Building payload"); - let mut evm_state = evm_state(store.clone(), payload.header.parent_hash); - let mut context = PayloadBuildContext::new(payload, &mut evm_state)?; - apply_system_operations(&mut context)?; - apply_withdrawals(&mut context)?; - fill_transactions(&mut context)?; - extract_requests(&mut context)?; - finalize_payload(&mut context)?; - Ok((context.blobs_bundle, context.block_value)) -} +impl Blockchain { + /// Completes the payload building process, return the block value + pub fn build_payload( + &self, + payload: &mut Block, + store: &Store, + ) -> Result<(BlobsBundle, U256), ChainError> { + debug!("Building payload"); + let mut evm_state = evm_state(store.clone(), payload.header.parent_hash); + let mut context = PayloadBuildContext::new(payload, &mut evm_state)?; + self.apply_system_operations(&mut context)?; + self.apply_withdrawals(&mut context)?; + self.fill_transactions(&mut context)?; + self.extract_requests(&mut context)?; + self.finalize_payload(&mut context)?; + Ok((context.blobs_bundle, context.block_value)) + } + + pub fn apply_withdrawals(&self, context: &mut PayloadBuildContext) -> Result<(), EvmError> { + let binding = Vec::new(); + let withdrawals = context + .payload + .body + .withdrawals + .as_ref() + .unwrap_or(&binding); + self.vm + .process_withdrawals( + withdrawals, + context.evm_state, + &context.payload.header, + &mut context.block_cache, + ) + .map_err(EvmError::from) + } -pub fn apply_withdrawals(context: &mut PayloadBuildContext) -> Result<(), EvmError> { - let binding = Vec::new(); - let withdrawals = context - .payload - .body - .withdrawals - .as_ref() - .unwrap_or(&binding); - get_evm_backend_or_default() - .process_withdrawals( - withdrawals, + // This function applies system level operations: + // - Call beacon root contract, and obtain the new state root + // - Call block hash process contract, and store parent block hash + pub fn apply_system_operations( + &self, + context: &mut PayloadBuildContext, + ) -> Result<(), EvmError> { + let chain_config = context.chain_config()?; + self.vm.apply_system_calls( context.evm_state, &context.payload.header, &mut context.block_cache, + &chain_config, ) - .map_err(EvmError::from) -} - -// This function applies system level operations: -// - Call beacon root contract, and obtain the new state root -// - Call block hash process contract, and store parent block hash -pub fn apply_system_operations(context: &mut PayloadBuildContext) -> Result<(), EvmError> { - let chain_config = context.chain_config()?; - get_evm_backend_or_default().apply_system_calls( - context.evm_state, - &context.payload.header, - &mut context.block_cache, - &chain_config, - ) -} - -/// Fetches suitable transactions from the mempool -/// Returns two transaction queues, one for plain and one for blob txs -fn fetch_mempool_transactions( - context: &mut PayloadBuildContext, -) -> Result<(TransactionQueue, TransactionQueue), ChainError> { - let tx_filter = PendingTxFilter { - /*TODO(https://github.com/lambdaclass/ethrex/issues/680): add tip filter */ - base_fee: context.base_fee_per_gas(), - blob_fee: Some(context.base_fee_per_blob_gas), - ..Default::default() - }; - let plain_tx_filter = PendingTxFilter { - only_plain_txs: true, - ..tx_filter - }; - let blob_tx_filter = PendingTxFilter { - only_blob_txs: true, - ..tx_filter - }; - let store = context.store().ok_or(StoreError::Custom( - "no store in the context (is an ExecutionDB being used?)".to_string(), - ))?; - Ok(( - // Plain txs - TransactionQueue::new( - mempool::filter_transactions(&plain_tx_filter, store)?, - context.base_fee_per_gas(), - )?, - // Blob txs - TransactionQueue::new( - mempool::filter_transactions(&blob_tx_filter, store)?, - context.base_fee_per_gas(), - )?, - )) -} + } -/// Fills the payload with transactions taken from the mempool -/// Returns the block value -pub fn fill_transactions(context: &mut PayloadBuildContext) -> Result<(), ChainError> { - let chain_config = context.chain_config()?; - let max_blob_number_per_block = chain_config - .get_fork_blob_schedule(context.payload.header.timestamp) - .map(|schedule| schedule.max) - .unwrap_or_default() as usize; - - debug!("Fetching transactions from mempool"); - // Fetch mempool transactions - let (mut plain_txs, mut blob_txs) = fetch_mempool_transactions(context)?; - // Execute and add transactions to payload (if suitable) - loop { - // Check if we have enough gas to run more transactions - if context.remaining_gas < TX_GAS_COST { - debug!("No more gas to run transactions"); - break; + /// Fetches suitable transactions from the mempool + /// Returns two transaction queues, one for plain and one for blob txs + fn fetch_mempool_transactions( + &self, + context: &mut PayloadBuildContext, + ) -> Result<(TransactionQueue, TransactionQueue), ChainError> { + let tx_filter = PendingTxFilter { + /*TODO(https://github.com/lambdaclass/ethrex/issues/680): add tip filter */ + base_fee: context.base_fee_per_gas(), + blob_fee: Some(context.base_fee_per_blob_gas), + ..Default::default() }; - if !blob_txs.is_empty() && context.blobs_bundle.blobs.len() >= max_blob_number_per_block { - debug!("No more blob gas to run blob transactions"); - blob_txs.clear(); - } - // Fetch the next transactions - let (head_tx, is_blob) = match (plain_txs.peek(), blob_txs.peek()) { - (None, None) => break, - (None, Some(tx)) => (tx, true), - (Some(tx), None) => (tx, false), - (Some(a), Some(b)) if b < a => (b, true), - (Some(tx), _) => (tx, false), + let plain_tx_filter = PendingTxFilter { + only_plain_txs: true, + ..tx_filter }; - - let txs = if is_blob { - &mut blob_txs - } else { - &mut plain_txs + let blob_tx_filter = PendingTxFilter { + only_blob_txs: true, + ..tx_filter }; + let store = context.store().ok_or(StoreError::Custom( + "no store in the context (is an ExecutionDB being used?)".to_string(), + ))?; + Ok(( + // Plain txs + TransactionQueue::new( + mempool::filter_transactions(&plain_tx_filter, store)?, + context.base_fee_per_gas(), + )?, + // Blob txs + TransactionQueue::new( + mempool::filter_transactions(&blob_tx_filter, store)?, + context.base_fee_per_gas(), + )?, + )) + } - // Check if we have enough gas to run the transaction - if context.remaining_gas < head_tx.tx.gas_limit() { - debug!( - "Skipping transaction: {}, no gas left", - head_tx.tx.compute_hash() - ); - // We don't have enough gas left for the transaction, so we skip all txs from this account - txs.pop(); - continue; - } - - // TODO: maybe fetch hash too when filtering mempool so we don't have to compute it here (we can do this in the same refactor as adding timestamp) - let tx_hash = head_tx.tx.compute_hash(); - - // Check wether the tx is replay-protected - if head_tx.tx.protected() && !chain_config.is_eip155_activated(context.block_number()) { - // Ignore replay protected tx & all txs from the sender - // Pull transaction from the mempool - debug!("Ignoring replay-protected transaction: {}", tx_hash); - txs.pop(); - mempool::remove_transaction( - &head_tx.tx.compute_hash(), - context - .store() - .ok_or(ChainError::StoreError(StoreError::MissingStore))?, - )?; - continue; - } + /// Fills the payload with transactions taken from the mempool + /// Returns the block value + pub fn fill_transactions(&self, context: &mut PayloadBuildContext) -> Result<(), ChainError> { + let chain_config = context.chain_config()?; + let max_blob_number_per_block = chain_config + .get_fork_blob_schedule(context.payload.header.timestamp) + .map(|schedule| schedule.max) + .unwrap_or_default() as usize; + + debug!("Fetching transactions from mempool"); + // Fetch mempool transactions + let (mut plain_txs, mut blob_txs) = self.fetch_mempool_transactions(context)?; + // Execute and add transactions to payload (if suitable) + loop { + // Check if we have enough gas to run more transactions + if context.remaining_gas < TX_GAS_COST { + debug!("No more gas to run transactions"); + break; + }; + if !blob_txs.is_empty() && context.blobs_bundle.blobs.len() >= max_blob_number_per_block + { + debug!("No more blob gas to run blob transactions"); + blob_txs.clear(); + } + // Fetch the next transactions + let (head_tx, is_blob) = match (plain_txs.peek(), blob_txs.peek()) { + (None, None) => break, + (None, Some(tx)) => (tx, true), + (Some(tx), None) => (tx, false), + (Some(a), Some(b)) if b < a => (b, true), + (Some(tx), _) => (tx, false), + }; + + let txs = if is_blob { + &mut blob_txs + } else { + &mut plain_txs + }; + + // Check if we have enough gas to run the transaction + if context.remaining_gas < head_tx.tx.gas_limit() { + debug!( + "Skipping transaction: {}, no gas left", + head_tx.tx.compute_hash() + ); + // We don't have enough gas left for the transaction, so we skip all txs from this account + txs.pop(); + continue; + } - // Increment the total transaction counter - // CHECK: do we want it here to count every processed transaction - // or we want it before the return? - metrics!(METRICS_TX.inc_tx()); + // TODO: maybe fetch hash too when filtering mempool so we don't have to compute it here (we can do this in the same refactor as adding timestamp) + let tx_hash = head_tx.tx.compute_hash(); - // Execute tx - let receipt = match apply_transaction(&head_tx, context) { - Ok(receipt) => { - txs.shift()?; + // Check wether the tx is replay-protected + if head_tx.tx.protected() && !chain_config.is_eip155_activated(context.block_number()) { + // Ignore replay protected tx & all txs from the sender // Pull transaction from the mempool + debug!("Ignoring replay-protected transaction: {}", tx_hash); + txs.pop(); mempool::remove_transaction( &head_tx.tx.compute_hash(), context .store() .ok_or(ChainError::StoreError(StoreError::MissingStore))?, )?; - - metrics!(METRICS_TX.inc_tx_with_status_and_type( - MetricsTxStatus::Succeeded, - MetricsTxType(head_tx.tx_type()) - )); - receipt - } - // Ignore following txs from sender - Err(e) => { - debug!("Failed to execute transaction: {}, {e}", tx_hash); - metrics!(METRICS_TX.inc_tx_with_status_and_type( - MetricsTxStatus::Failed, - MetricsTxType(head_tx.tx_type()) - )); - txs.pop(); continue; } - }; - // Add transaction to block - debug!("Adding transaction: {} to payload", tx_hash); - context.payload.body.transactions.push(head_tx.into()); - // Save receipt for hash calculation - context.receipts.push(receipt); + + // Increment the total transaction counter + // CHECK: do we want it here to count every processed transaction + // or we want it before the return? + metrics!(METRICS_TX.inc_tx()); + + // Execute tx + let receipt = match self.apply_transaction(&head_tx, context) { + Ok(receipt) => { + txs.shift()?; + // Pull transaction from the mempool + mempool::remove_transaction( + &head_tx.tx.compute_hash(), + context + .store() + .ok_or(ChainError::StoreError(StoreError::MissingStore))?, + )?; + + metrics!(METRICS_TX.inc_tx_with_status_and_type( + MetricsTxStatus::Succeeded, + MetricsTxType(head_tx.tx_type()) + )); + receipt + } + // Ignore following txs from sender + Err(e) => { + debug!("Failed to execute transaction: {}, {e}", tx_hash); + metrics!(METRICS_TX.inc_tx_with_status_and_type( + MetricsTxStatus::Failed, + MetricsTxType(head_tx.tx_type()) + )); + txs.pop(); + continue; + } + }; + // Add transaction to block + debug!("Adding transaction: {} to payload", tx_hash); + context.payload.body.transactions.push(head_tx.into()); + // Save receipt for hash calculation + context.receipts.push(receipt); + } + Ok(()) } - Ok(()) -} -/// Executes the transaction, updates gas-related context values & return the receipt -/// The payload build context should have enough remaining gas to cover the transaction's gas_limit -fn apply_transaction( - head: &HeadTransaction, - context: &mut PayloadBuildContext, -) -> Result { - match **head { - Transaction::EIP4844Transaction(_) => apply_blob_transaction(head, context), - _ => apply_plain_transaction(head, context), + /// Executes the transaction, updates gas-related context values & return the receipt + /// The payload build context should have enough remaining gas to cover the transaction's gas_limit + fn apply_transaction( + &self, + head: &HeadTransaction, + context: &mut PayloadBuildContext, + ) -> Result { + match **head { + Transaction::EIP4844Transaction(_) => self.apply_blob_transaction(head, context), + _ => self.apply_plain_transaction(head, context), + } } -} -/// Runs a blob transaction, updates the gas count & blob data and returns the receipt -fn apply_blob_transaction( - head: &HeadTransaction, - context: &mut PayloadBuildContext, -) -> Result { - // Fetch blobs bundle - let tx_hash = head.tx.compute_hash(); - let chain_config = context.chain_config()?; - let max_blob_number_per_block = chain_config - .get_fork_blob_schedule(context.payload.header.timestamp) - .map(|schedule| schedule.max) - .unwrap_or_default() as usize; - - let Some(blobs_bundle) = context - .store() - .ok_or(ChainError::StoreError(StoreError::MissingStore))? - .get_blobs_bundle_from_pool(tx_hash)? - else { - // No blob tx should enter the mempool without its blobs bundle so this is an internal error - return Err( - StoreError::Custom(format!("No blobs bundle found for blob tx {tx_hash}")).into(), - ); - }; - if context.blobs_bundle.blobs.len() + blobs_bundle.blobs.len() > max_blob_number_per_block { - // This error will only be used for debug tracing - return Err(EvmError::Custom("max data blobs reached".to_string()).into()); - }; - // Apply transaction - let receipt = apply_plain_transaction(head, context)?; - // Update context with blob data - let prev_blob_gas = context.payload.header.blob_gas_used.unwrap_or_default(); - context.payload.header.blob_gas_used = - Some(prev_blob_gas + blobs_bundle.blobs.len() as u64 * GAS_PER_BLOB); - context.blobs_bundle += blobs_bundle; - Ok(receipt) -} + /// Runs a blob transaction, updates the gas count & blob data and returns the receipt + fn apply_blob_transaction( + &self, + head: &HeadTransaction, + context: &mut PayloadBuildContext, + ) -> Result { + // Fetch blobs bundle + let tx_hash = head.tx.compute_hash(); + let chain_config = context.chain_config()?; + let max_blob_number_per_block = chain_config + .get_fork_blob_schedule(context.payload.header.timestamp) + .map(|schedule| schedule.max) + .unwrap_or_default() as usize; + + let Some(blobs_bundle) = context + .store() + .ok_or(ChainError::StoreError(StoreError::MissingStore))? + .get_blobs_bundle_from_pool(tx_hash)? + else { + // No blob tx should enter the mempool without its blobs bundle so this is an internal error + return Err( + StoreError::Custom(format!("No blobs bundle found for blob tx {tx_hash}")).into(), + ); + }; + if context.blobs_bundle.blobs.len() + blobs_bundle.blobs.len() > max_blob_number_per_block { + // This error will only be used for debug tracing + return Err(EvmError::Custom("max data blobs reached".to_string()).into()); + }; + // Apply transaction + let receipt = self.apply_plain_transaction(head, context)?; + // Update context with blob data + let prev_blob_gas = context.payload.header.blob_gas_used.unwrap_or_default(); + context.payload.header.blob_gas_used = + Some(prev_blob_gas + blobs_bundle.blobs.len() as u64 * GAS_PER_BLOB); + context.blobs_bundle += blobs_bundle; + Ok(receipt) + } -/// Runs a plain (non blob) transaction, updates the gas count and returns the receipt -fn apply_plain_transaction( - head: &HeadTransaction, - context: &mut PayloadBuildContext, -) -> Result { - let chain_config = context.chain_config()?; - let (report, gas_used) = get_evm_backend_or_default().execute_tx( - context.evm_state, - &head.tx, - &context.payload.header, - &mut context.block_cache, - &chain_config, - &mut context.remaining_gas, - )?; - context.block_value += U256::from(gas_used) * head.tip; - Ok(report) -} + /// Runs a plain (non blob) transaction, updates the gas count and returns the receipt + fn apply_plain_transaction( + &self, + head: &HeadTransaction, + context: &mut PayloadBuildContext, + ) -> Result { + let chain_config = context.chain_config()?; + let (report, gas_used) = self.vm.execute_tx( + context.evm_state, + &head.tx, + &context.payload.header, + &mut context.block_cache, + &chain_config, + &mut context.remaining_gas, + )?; + context.block_value += U256::from(gas_used) * head.tip; + Ok(report) + } -pub fn extract_requests(context: &mut PayloadBuildContext) -> Result<(), EvmError> { - let requests = get_evm_backend_or_default().extract_requests( - &context.receipts, - context.evm_state, - &context.payload.header, - &mut context.block_cache, - ); - context.requests = requests?; + pub fn extract_requests(&self, context: &mut PayloadBuildContext) -> Result<(), EvmError> { + let requests = self.vm.extract_requests( + &context.receipts, + context.evm_state, + &context.payload.header, + &mut context.block_cache, + ); + context.requests = requests?; - Ok(()) -} + Ok(()) + } -fn finalize_payload(context: &mut PayloadBuildContext) -> Result<(), ChainError> { - let account_updates = get_evm_backend_or_default().get_state_transitions( - context.evm_state, - context.parent_hash(), - &context.block_cache, - )?; - - context.payload.header.state_root = context - .store() - .ok_or(StoreError::MissingStore)? - .apply_account_updates(context.parent_hash(), &account_updates)? - .unwrap_or_default(); - context.payload.header.transactions_root = - compute_transactions_root(&context.payload.body.transactions); - context.payload.header.receipts_root = compute_receipts_root(&context.receipts); - context.payload.header.requests_hash = context - .chain_config()? - .is_prague_activated(context.payload.header.timestamp) - .then_some(compute_requests_hash(&context.requests)); - context.payload.header.gas_used = context.payload.header.gas_limit - context.remaining_gas; - Ok(()) + fn finalize_payload(&self, context: &mut PayloadBuildContext) -> Result<(), ChainError> { + let account_updates = self.vm.get_state_transitions( + context.evm_state, + context.parent_hash(), + &context.block_cache, + )?; + + context.payload.header.state_root = context + .store() + .ok_or(StoreError::MissingStore)? + .apply_account_updates(context.parent_hash(), &account_updates)? + .unwrap_or_default(); + context.payload.header.transactions_root = + compute_transactions_root(&context.payload.body.transactions); + context.payload.header.receipts_root = compute_receipts_root(&context.receipts); + context.payload.header.requests_hash = context + .chain_config()? + .is_prague_activated(context.payload.header.timestamp) + .then_some(compute_requests_hash(&context.requests)); + context.payload.header.gas_used = context.payload.header.gas_limit - context.remaining_gas; + Ok(()) + } } /// A struct representing suitable mempool transactions waiting to be included in a block diff --git a/crates/blockchain/smoke_test.rs b/crates/blockchain/smoke_test.rs index 730ecfa173..fad51d6b57 100644 --- a/crates/blockchain/smoke_test.rs +++ b/crates/blockchain/smoke_test.rs @@ -6,7 +6,7 @@ mod blockchain_integration_test { error::{ChainError, InvalidForkChoice}, fork_choice::apply_fork_choice, is_canonical, latest_canonical_block_hash, - payload::{build_payload, create_payload, BuildPayloadArgs}, + payload::{create_payload, BuildPayloadArgs}, Blockchain, }; @@ -260,7 +260,9 @@ mod blockchain_integration_test { }; let mut block = create_payload(&args, store).unwrap(); - build_payload(&mut block, store).unwrap(); + Blockchain::default() + .build_payload(&mut block, store) + .unwrap(); block } diff --git a/crates/networking/rpc/engine/payload.rs b/crates/networking/rpc/engine/payload.rs index 4d5728451a..dec2095578 100644 --- a/crates/networking/rpc/engine/payload.rs +++ b/crates/networking/rpc/engine/payload.rs @@ -1,5 +1,4 @@ use ethrex_blockchain::error::ChainError; -use ethrex_blockchain::payload::build_payload; use ethrex_common::types::{BlobsBundle, Block, BlockBody, BlockHash, BlockNumber, Fork}; use ethrex_common::{H256, U256}; use serde_json::Value; @@ -543,8 +542,17 @@ fn build_execution_payload_response( should_override_builder, }) } else { - let (blobs_bundle, block_value) = build_payload(&mut payload_block, &context.storage) - .map_err(|err| RpcErr::Internal(err.to_string()))?; + let result = { + let syncer = context + .syncer + .try_lock() + .map_err(|_| RpcErr::Internal("Error locking syncer".to_string()))?; + syncer + .blockchain + .build_payload(&mut payload_block, &context.storage) + .map_err(|err| RpcErr::Internal(err.to_string()))? + }; + let (blobs_bundle, block_value) = result; context.storage.update_payload( payload_id, diff --git a/crates/vm/vm.rs b/crates/vm/vm.rs index 93d8b3056f..ba29507fba 100644 --- a/crates/vm/vm.rs +++ b/crates/vm/vm.rs @@ -6,7 +6,6 @@ mod execution_result; #[cfg(feature = "l2")] mod mods; -use backends::EVM; use db::EvmState; use crate::backends::revm_b::*; @@ -28,18 +27,6 @@ pub use errors::EvmError; pub use execution_result::*; pub use revm::primitives::{Address as RevmAddress, SpecId, U256 as RevmU256}; -use std::sync::OnceLock; - -// This global variable can be initialized by the ethrex cli. -// EVM_BACKEND.get_or_init(|| evm); -// Then, we can retrieve the evm with: -// EVM_BACKEND.get(); -> returns Option -pub static EVM_BACKEND: OnceLock = OnceLock::new(); -/// Function used to access the global variable holding the chosen backend. -pub fn get_evm_backend_or_default() -> EVM { - EVM_BACKEND.get().unwrap_or(&EVM::default()).clone() -} - // ================== Commonly used functions ====================== // TODO: IMPLEMENT FOR LEVM From cc69d39bb31e3a8d5fa26069a4ec176e75bcfd0b Mon Sep 17 00:00:00 2001 From: JereSalo Date: Fri, 21 Feb 2025 17:46:49 -0300 Subject: [PATCH 04/16] undo changes i guess --- .tool-versions | 2 ++ 1 file changed, 2 insertions(+) create mode 100644 .tool-versions diff --git a/.tool-versions b/.tool-versions new file mode 100644 index 0000000000..6374ee5807 --- /dev/null +++ b/.tool-versions @@ -0,0 +1,2 @@ +rust 1.82.0 +# golang 1.23.2 From ac4c8a8c411f218dbf54b6e78a8f9c624dc15c13 Mon Sep 17 00:00:00 2001 From: JereSalo Date: Fri, 21 Feb 2025 17:47:26 -0300 Subject: [PATCH 05/16] remove tool versions --- .tool-versions | 2 -- 1 file changed, 2 deletions(-) delete mode 100644 .tool-versions diff --git a/.tool-versions b/.tool-versions deleted file mode 100644 index 6374ee5807..0000000000 --- a/.tool-versions +++ /dev/null @@ -1,2 +0,0 @@ -rust 1.82.0 -# golang 1.23.2 From cb902ce30d6271777c25a7b0ca9a034484a894d2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jerem=C3=ADas=20Salom=C3=B3n?= <48994069+JereSalo@users.noreply.github.com> Date: Fri, 21 Feb 2025 17:47:52 -0300 Subject: [PATCH 06/16] Delete .tool-versions~ --- .tool-versions~ | 2 -- 1 file changed, 2 deletions(-) delete mode 100644 .tool-versions~ diff --git a/.tool-versions~ b/.tool-versions~ deleted file mode 100644 index 0e6030dd6e..0000000000 --- a/.tool-versions~ +++ /dev/null @@ -1,2 +0,0 @@ -rust 1.81.0 -# golang 1.23.2 From b9638ae255a7fe1230bf921e9889451a50f17365 Mon Sep 17 00:00:00 2001 From: JereSalo Date: Fri, 21 Feb 2025 17:48:58 -0300 Subject: [PATCH 07/16] undo delete... --- .tool-versions | 2 ++ 1 file changed, 2 insertions(+) create mode 100644 .tool-versions diff --git a/.tool-versions b/.tool-versions new file mode 100644 index 0000000000..6374ee5807 --- /dev/null +++ b/.tool-versions @@ -0,0 +1,2 @@ +rust 1.82.0 +# golang 1.23.2 From 0224d5d501d63540e31f55317292c55ac6c12104 Mon Sep 17 00:00:00 2001 From: JereSalo Date: Fri, 21 Feb 2025 17:59:14 -0300 Subject: [PATCH 08/16] nits --- crates/networking/rpc/engine/payload.rs | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/crates/networking/rpc/engine/payload.rs b/crates/networking/rpc/engine/payload.rs index dec2095578..0eaa9cc46e 100644 --- a/crates/networking/rpc/engine/payload.rs +++ b/crates/networking/rpc/engine/payload.rs @@ -431,7 +431,9 @@ fn execute_payload(block: &Block, context: &RpcApiContext) -> Result Date: Mon, 24 Feb 2025 09:58:49 -0300 Subject: [PATCH 09/16] move import blocks to blockchain struct --- cmd/ethrex/ethrex.rs | 53 ++------------------------------- crates/blockchain/blockchain.rs | 49 ++++++++++++++++++++++++++++++ 2 files changed, 52 insertions(+), 50 deletions(-) diff --git a/cmd/ethrex/ethrex.rs b/cmd/ethrex/ethrex.rs index 056f149391..57c9bf9a0f 100644 --- a/cmd/ethrex/ethrex.rs +++ b/cmd/ethrex/ethrex.rs @@ -1,6 +1,6 @@ use bytes::Bytes; use directories::ProjectDirs; -use ethrex_blockchain::{fork_choice::apply_fork_choice, Blockchain}; +use ethrex_blockchain::Blockchain; use ethrex_common::types::{Block, Genesis}; use ethrex_p2p::{ kademlia::KademliaTable, @@ -181,7 +181,7 @@ async fn main() { if let Some(chain_rlp_path) = matches.get_one::("import") { info!("Importing blocks from chain file: {}", chain_rlp_path); let blocks = read_chain_file(chain_rlp_path); - import_blocks(&store, &blocks, &blockchain); + blockchain.import_blocks(&store, &blocks); } if let Some(blocks_path) = matches.get_one::("import_dir") { @@ -200,7 +200,7 @@ async fn main() { blocks.push(read_block_file(s)); } - import_blocks(&store, &blocks, &blockchain); + blockchain.import_blocks(&store, &blocks); } let jwt_secret = read_jwtsecret_file(authrpc_jwtsecret); @@ -424,53 +424,6 @@ fn set_datadir(datadir: &str) -> String { .to_owned() } -fn import_blocks(store: &Store, blocks: &Vec, blockchain: &Blockchain) { - let size = blocks.len(); - for block in blocks { - let hash = block.hash(); - info!( - "Adding block {} with hash {:#x}.", - block.header.number, hash - ); - if let Err(error) = blockchain.add_block(block, store) { - warn!( - "Failed to add block {} with hash {:#x}: {}.", - block.header.number, hash, error - ); - } - if store - .update_latest_block_number(block.header.number) - .is_err() - { - error!("Fatal: added block {} but could not update the block number -- aborting block import", block.header.number); - break; - }; - if store - .set_canonical_block(block.header.number, hash) - .is_err() - { - error!( - "Fatal: added block {} but could not set it as canonical -- aborting block import", - block.header.number - ); - break; - }; - } - if let Some(last_block) = blocks.last() { - let hash = last_block.hash(); - match blockchain.vm { - EVM::LEVM => { - // We are allowing this not to unwrap so that tests can run even if block execution results in the wrong root hash with LEVM. - let _ = apply_fork_choice(store, hash, hash, hash); - } - EVM::REVM => { - apply_fork_choice(store, hash, hash, hash).unwrap(); - } - } - } - info!("Added {} blocks to blockchain", size); -} - async fn store_known_peers(table: Arc>, file_path: PathBuf) { let mut connected_peers = vec![]; diff --git a/crates/blockchain/blockchain.rs b/crates/blockchain/blockchain.rs index d9b53a829e..cc7adea433 100644 --- a/crates/blockchain/blockchain.rs +++ b/crates/blockchain/blockchain.rs @@ -21,6 +21,8 @@ use ethrex_storage::Store; use ethrex_vm::backends::BlockExecutionResult; use ethrex_vm::backends::EVM; use ethrex_vm::db::evm_state; +use fork_choice::apply_fork_choice; +use tracing::{error, info, warn}; //TODO: Implement a struct Chain or BlockChain to encapsulate //functionality and canonical chain state and config @@ -78,6 +80,53 @@ impl Blockchain { Ok(()) } + + pub fn import_blocks(&self, store: &Store, blocks: &Vec) { + let size = blocks.len(); + for block in blocks { + let hash = block.hash(); + info!( + "Adding block {} with hash {:#x}.", + block.header.number, hash + ); + if let Err(error) = self.add_block(block, store) { + warn!( + "Failed to add block {} with hash {:#x}: {}.", + block.header.number, hash, error + ); + } + if store + .update_latest_block_number(block.header.number) + .is_err() + { + error!("Fatal: added block {} but could not update the block number -- aborting block import", block.header.number); + break; + }; + if store + .set_canonical_block(block.header.number, hash) + .is_err() + { + error!( + "Fatal: added block {} but could not set it as canonical -- aborting block import", + block.header.number + ); + break; + }; + } + if let Some(last_block) = blocks.last() { + let hash = last_block.hash(); + match self.vm { + EVM::LEVM => { + // We are allowing this not to unwrap so that tests can run even if block execution results in the wrong root hash with LEVM. + let _ = apply_fork_choice(store, hash, hash, hash); + } + EVM::REVM => { + apply_fork_choice(store, hash, hash, hash).unwrap(); + } + } + } + info!("Added {} blocks to blockchain", size); + } } pub fn validate_requests_hash( From f551bbc490fd387f58dff989cfb8c0242fb0c918 Mon Sep 17 00:00:00 2001 From: JereSalo Date: Mon, 24 Feb 2025 10:06:47 -0300 Subject: [PATCH 10/16] create blockchain before executing add_block() --- cmd/ef_tests/blockchain/test_runner.rs | 3 +- crates/blockchain/smoke_test.rs | 48 +++++++++++++++++--------- crates/l2/prover/tests/perf_zkvm.rs | 3 +- crates/l2/utils/prover/save_state.rs | 4 ++- crates/l2/utils/test_data_io.rs | 4 ++- 5 files changed, 42 insertions(+), 20 deletions(-) diff --git a/cmd/ef_tests/blockchain/test_runner.rs b/cmd/ef_tests/blockchain/test_runner.rs index 913be77200..62ff943f97 100644 --- a/cmd/ef_tests/blockchain/test_runner.rs +++ b/cmd/ef_tests/blockchain/test_runner.rs @@ -32,8 +32,9 @@ pub fn run_ef_test(test_key: &str, test: &TestUnit) { let block: &CoreBlock = &block_fixture.block().unwrap().clone().into(); let hash = block.hash(); + let blockchain = Blockchain::default(); // Attempt to add the block as the head of the chain - let chain_result = Blockchain::default().add_block(block, &store); + let chain_result = blockchain.add_block(block, &store); match chain_result { Err(error) => { assert!( diff --git a/crates/blockchain/smoke_test.rs b/crates/blockchain/smoke_test.rs index fad51d6b57..1749d4a1a1 100644 --- a/crates/blockchain/smoke_test.rs +++ b/crates/blockchain/smoke_test.rs @@ -23,10 +23,13 @@ mod blockchain_integration_test { let genesis_header = store.get_block_header(0).unwrap().unwrap(); let genesis_hash = genesis_header.compute_block_hash(); + // Create blockchain + let blockchain = Blockchain::default(); + // Add first block. We'll make it canonical. let block_1a = new_block(&store, &genesis_header); let hash_1a = block_1a.hash(); - Blockchain::default().add_block(&block_1a, &store).unwrap(); + blockchain.add_block(&block_1a, &store).unwrap(); store.set_canonical_block(1, hash_1a).unwrap(); let retrieved_1a = store.get_block_header(1).unwrap().unwrap(); @@ -36,7 +39,7 @@ mod blockchain_integration_test { // Add second block at height 1. Will not be canonical. let block_1b = new_block(&store, &genesis_header); let hash_1b = block_1b.hash(); - Blockchain::default() + blockchain .add_block(&block_1b, &store) .expect("Could not add block 1b."); let retrieved_1b = store.get_block_header_by_hash(hash_1b).unwrap().unwrap(); @@ -47,7 +50,7 @@ mod blockchain_integration_test { // Add a third block at height 2, child to the non canonical block. let block_2 = new_block(&store, &block_1b.header); let hash_2 = block_2.hash(); - Blockchain::default() + blockchain .add_block(&block_2, &store) .expect("Could not add block 2."); let retrieved_2 = store.get_block_header_by_hash(hash_2).unwrap(); @@ -76,17 +79,20 @@ mod blockchain_integration_test { let store = test_store(); let genesis_header = store.get_block_header(0).unwrap().unwrap(); + // Create blockchain + let blockchain = Blockchain::default(); + // Build a single valid block. let block_1 = new_block(&store, &genesis_header); let hash_1 = block_1.header.compute_block_hash(); - Blockchain::default().add_block(&block_1, &store).unwrap(); + blockchain.add_block(&block_1, &store).unwrap(); apply_fork_choice(&store, hash_1, H256::zero(), H256::zero()).unwrap(); // Build a child, then change its parent, making it effectively a pending block. let mut block_2 = new_block(&store, &block_1.header); block_2.header.parent_hash = H256::random(); let hash_2 = block_2.header.compute_block_hash(); - let result = Blockchain::default().add_block(&block_2, &store); + let result = blockchain.add_block(&block_2, &store); assert!(matches!(result, Err(ChainError::ParentNotFound))); // block 2 should now be pending. @@ -106,10 +112,13 @@ mod blockchain_integration_test { let genesis_header = store.get_block_header(0).unwrap().unwrap(); let genesis_hash = genesis_header.compute_block_hash(); + // Create blockchain + let blockchain = Blockchain::default(); + // Add first block. Not canonical. let block_1a = new_block(&store, &genesis_header); let hash_1a = block_1a.hash(); - Blockchain::default().add_block(&block_1a, &store).unwrap(); + blockchain.add_block(&block_1a, &store).unwrap(); let retrieved_1a = store.get_block_header_by_hash(hash_1a).unwrap().unwrap(); assert!(!is_canonical(&store, 1, hash_1a).unwrap()); @@ -117,7 +126,7 @@ mod blockchain_integration_test { // Add second block at height 1. Canonical. let block_1b = new_block(&store, &genesis_header); let hash_1b = block_1b.hash(); - Blockchain::default() + blockchain .add_block(&block_1b, &store) .expect("Could not add block 1b."); apply_fork_choice(&store, hash_1b, genesis_hash, genesis_hash).unwrap(); @@ -131,7 +140,7 @@ mod blockchain_integration_test { // Add a third block at height 2, child to the canonical one. let block_2 = new_block(&store, &block_1b.header); let hash_2 = block_2.hash(); - Blockchain::default() + blockchain .add_block(&block_2, &store) .expect("Could not add block 2."); apply_fork_choice(&store, hash_2, genesis_hash, genesis_hash).unwrap(); @@ -165,17 +174,20 @@ mod blockchain_integration_test { let genesis_header = store.get_block_header(0).unwrap().unwrap(); let genesis_hash = genesis_header.compute_block_hash(); + // Create blockchain + let blockchain = Blockchain::default(); + // Add block at height 1. let block_1 = new_block(&store, &genesis_header); let hash_1 = block_1.hash(); - Blockchain::default() + blockchain .add_block(&block_1, &store) .expect("Could not add block 1b."); // Add child at height 2. let block_2 = new_block(&store, &block_1.header); let hash_2 = block_2.hash(); - Blockchain::default() + blockchain .add_block(&block_2, &store) .expect("Could not add block 2."); @@ -211,16 +223,19 @@ mod blockchain_integration_test { let genesis_header = store.get_block_header(0).unwrap().unwrap(); let genesis_hash = genesis_header.compute_block_hash(); + // Create blockchain + let blockchain = Blockchain::default(); + // Add block at height 1. let block_1 = new_block(&store, &genesis_header); - Blockchain::default() + blockchain .add_block(&block_1, &store) .expect("Could not add block 1b."); // Add child at height 2. let block_2 = new_block(&store, &block_1.header); let hash_2 = block_2.hash(); - Blockchain::default() + blockchain .add_block(&block_2, &store) .expect("Could not add block 2."); @@ -234,7 +249,7 @@ mod blockchain_integration_test { // Add a new, non canonical block, starting from genesis. let block_1b = new_block(&store, &genesis_header); let hash_b = block_1b.hash(); - Blockchain::default() + blockchain .add_block(&block_1b, &store) .expect("Could not add block b."); @@ -259,10 +274,11 @@ mod blockchain_integration_test { version: 1, }; + // Create blockchain + let blockchain = Blockchain::default(); + let mut block = create_payload(&args, store).unwrap(); - Blockchain::default() - .build_payload(&mut block, store) - .unwrap(); + blockchain.build_payload(&mut block, store).unwrap(); block } diff --git a/crates/l2/prover/tests/perf_zkvm.rs b/crates/l2/prover/tests/perf_zkvm.rs index 8ec98e68cd..a391c93cc0 100644 --- a/crates/l2/prover/tests/perf_zkvm.rs +++ b/crates/l2/prover/tests/perf_zkvm.rs @@ -74,13 +74,14 @@ async fn setup() -> (ProgramInput, Block) { let blocks = ethrex_l2::utils::test_data_io::read_chain_file(chain_file_path.to_str().unwrap()); info!("Number of blocks to insert: {}", blocks.len()); + let blockchain = Blockchain::default(); for block in &blocks { info!( "txs {} in block{}", block.body.transactions.len(), block.header.number ); - Blockchain::default().add_block(block, &store).unwrap(); + blockchain.add_block(block, &store).unwrap(); } let block_to_prove = blocks.last().unwrap(); diff --git a/crates/l2/utils/prover/save_state.rs b/crates/l2/utils/prover/save_state.rs index eecb4b5694..a5778bd1be 100644 --- a/crates/l2/utils/prover/save_state.rs +++ b/crates/l2/utils/prover/save_state.rs @@ -419,8 +419,10 @@ mod tests { store.add_initial_state(genesis.clone()).unwrap(); let blocks = test_data_io::read_chain_file(chain_file_path.to_str().unwrap()); + // create blockchain + let blockchain = Blockchain::default(); for block in &blocks { - Blockchain::default().add_block(block, &store).unwrap(); + blockchain.add_block(block, &store).unwrap(); } let mut account_updates_vec: Vec> = Vec::new(); diff --git a/crates/l2/utils/test_data_io.rs b/crates/l2/utils/test_data_io.rs index 5bd4780815..14da1a5e71 100644 --- a/crates/l2/utils/test_data_io.rs +++ b/crates/l2/utils/test_data_io.rs @@ -71,8 +71,10 @@ pub fn generate_program_input( // create store let store = Store::new("memory", EngineType::InMemory)?; store.add_initial_state(genesis)?; + // create blockchain + let blockchain = Blockchain::default(); for block in chain { - Blockchain::default().add_block(&block, &store)?; + blockchain.add_block(&block, &store)?; } let parent_block_header = store From 0a9be7646bcafb2f1ac7640029ca1f4880a29e07 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jerem=C3=ADas=20Salom=C3=B3n?= <48994069+JereSalo@users.noreply.github.com> Date: Mon, 24 Feb 2025 13:03:27 -0300 Subject: [PATCH 11/16] refactor(l1): move store to blockchain (#2060) **Motivation** **Description** Closes #issue_number --- cmd/ef_tests/blockchain/test_runner.rs | 5 ++- cmd/ethrex/ethrex.rs | 7 ++-- crates/blockchain/blockchain.rs | 43 ++++++++++++++++--------- crates/blockchain/smoke_test.rs | 38 +++++++++++----------- crates/l2/prover/tests/perf_zkvm.rs | 4 +-- crates/l2/utils/prover/save_state.rs | 4 +-- crates/l2/utils/test_data_io.rs | 4 +-- crates/networking/p2p/sync.rs | 10 +++--- crates/networking/rpc/engine/payload.rs | 2 +- 9 files changed, 65 insertions(+), 52 deletions(-) diff --git a/cmd/ef_tests/blockchain/test_runner.rs b/cmd/ef_tests/blockchain/test_runner.rs index 62ff943f97..63333b36ff 100644 --- a/cmd/ef_tests/blockchain/test_runner.rs +++ b/cmd/ef_tests/blockchain/test_runner.rs @@ -20,8 +20,8 @@ pub fn run_ef_test(test_key: &str, test: &TestUnit) { // Check world_state check_prestate_against_db(test_key, test, &store); + let blockchain = Blockchain::default_with_store(store.clone()); // Execute all blocks in test - for block_fixture in test.blocks.iter() { let expects_exception = block_fixture.expect_exception.is_some(); if exception_in_rlp_decoding(block_fixture) { @@ -32,9 +32,8 @@ pub fn run_ef_test(test_key: &str, test: &TestUnit) { let block: &CoreBlock = &block_fixture.block().unwrap().clone().into(); let hash = block.hash(); - let blockchain = Blockchain::default(); // Attempt to add the block as the head of the chain - let chain_result = blockchain.add_block(block, &store); + let chain_result = blockchain.add_block(block); match chain_result { Err(error) => { assert!( diff --git a/cmd/ethrex/ethrex.rs b/cmd/ethrex/ethrex.rs index 57c9bf9a0f..2ad6ffa257 100644 --- a/cmd/ethrex/ethrex.rs +++ b/cmd/ethrex/ethrex.rs @@ -153,8 +153,6 @@ async fn main() { let evm = matches.get_one::("evm").unwrap_or(&EVM::REVM); - let blockchain = Blockchain::new(evm.clone()); - let path = path::PathBuf::from(data_dir.clone()); let store: Store = if path.ends_with("memory") { Store::new(&data_dir, EngineType::InMemory).expect("Failed to create Store") @@ -172,6 +170,7 @@ async fn main() { } Store::new(&data_dir, engine_type).expect("Failed to create Store") }; + let blockchain = Blockchain::new(evm.clone(), store.clone()); let genesis = read_genesis_file(&network); store @@ -181,7 +180,7 @@ async fn main() { if let Some(chain_rlp_path) = matches.get_one::("import") { info!("Importing blocks from chain file: {}", chain_rlp_path); let blocks = read_chain_file(chain_rlp_path); - blockchain.import_blocks(&store, &blocks); + blockchain.import_blocks(&blocks); } if let Some(blocks_path) = matches.get_one::("import_dir") { @@ -200,7 +199,7 @@ async fn main() { blocks.push(read_block_file(s)); } - blockchain.import_blocks(&store, &blocks); + blockchain.import_blocks(&blocks); } let jwt_secret = read_jwtsecret_file(authrpc_jwtsecret); diff --git a/crates/blockchain/blockchain.rs b/crates/blockchain/blockchain.rs index cc7adea433..7f333bb7ae 100644 --- a/crates/blockchain/blockchain.rs +++ b/crates/blockchain/blockchain.rs @@ -27,26 +27,37 @@ use tracing::{error, info, warn}; //TODO: Implement a struct Chain or BlockChain to encapsulate //functionality and canonical chain state and config -#[derive(Debug, Clone, Default)] +#[derive(Debug, Clone)] pub struct Blockchain { pub vm: EVM, + pub storage: Store, } impl Blockchain { - pub fn new(evm: EVM) -> Self { - Self { vm: evm } + pub fn new(evm: EVM, store: Store) -> Self { + Self { + vm: evm, + storage: store, + } + } + + pub fn default_with_store(store: Store) -> Self { + Self { + vm: Default::default(), + storage: store, + } } - pub fn add_block(&self, block: &Block, storage: &Store) -> Result<(), ChainError> { + pub fn add_block(&self, block: &Block) -> Result<(), ChainError> { let block_hash = block.header.compute_block_hash(); // Validate if it can be the new head and find the parent - let Ok(parent_header) = find_parent_header(&block.header, storage) else { + let Ok(parent_header) = find_parent_header(&block.header, &self.storage) else { // If the parent is not present, we store it as pending. - storage.add_pending_block(block.clone())?; + self.storage.add_pending_block(block.clone())?; return Err(ChainError::ParentNotFound); }; - let mut state = evm_state(storage.clone(), block.header.parent_hash); + let mut state = evm_state(self.storage.clone(), block.header.parent_hash); let chain_config = state.chain_config().map_err(ChainError::from)?; // Validate the block pre-execution @@ -75,13 +86,13 @@ impl Blockchain { // Processes requests from receipts, computes the requests_hash and compares it against the header validate_requests_hash(&block.header, &chain_config, &requests)?; - store_block(storage, block.clone())?; - store_receipts(storage, receipts, block_hash)?; + store_block(&self.storage, block.clone())?; + store_receipts(&self.storage, receipts, block_hash)?; Ok(()) } - pub fn import_blocks(&self, store: &Store, blocks: &Vec) { + pub fn import_blocks(&self, blocks: &Vec) { let size = blocks.len(); for block in blocks { let hash = block.hash(); @@ -89,20 +100,22 @@ impl Blockchain { "Adding block {} with hash {:#x}.", block.header.number, hash ); - if let Err(error) = self.add_block(block, store) { + if let Err(error) = self.add_block(block) { warn!( "Failed to add block {} with hash {:#x}: {}.", block.header.number, hash, error ); } - if store + if self + .storage .update_latest_block_number(block.header.number) .is_err() { error!("Fatal: added block {} but could not update the block number -- aborting block import", block.header.number); break; }; - if store + if self + .storage .set_canonical_block(block.header.number, hash) .is_err() { @@ -118,10 +131,10 @@ impl Blockchain { match self.vm { EVM::LEVM => { // We are allowing this not to unwrap so that tests can run even if block execution results in the wrong root hash with LEVM. - let _ = apply_fork_choice(store, hash, hash, hash); + let _ = apply_fork_choice(&self.storage, hash, hash, hash); } EVM::REVM => { - apply_fork_choice(store, hash, hash, hash).unwrap(); + apply_fork_choice(&self.storage, hash, hash, hash).unwrap(); } } } diff --git a/crates/blockchain/smoke_test.rs b/crates/blockchain/smoke_test.rs index 1749d4a1a1..e7c31a2fed 100644 --- a/crates/blockchain/smoke_test.rs +++ b/crates/blockchain/smoke_test.rs @@ -24,12 +24,12 @@ mod blockchain_integration_test { let genesis_hash = genesis_header.compute_block_hash(); // Create blockchain - let blockchain = Blockchain::default(); + let blockchain = Blockchain::default_with_store(store.clone()); // Add first block. We'll make it canonical. let block_1a = new_block(&store, &genesis_header); let hash_1a = block_1a.hash(); - blockchain.add_block(&block_1a, &store).unwrap(); + blockchain.add_block(&block_1a).unwrap(); store.set_canonical_block(1, hash_1a).unwrap(); let retrieved_1a = store.get_block_header(1).unwrap().unwrap(); @@ -40,7 +40,7 @@ mod blockchain_integration_test { let block_1b = new_block(&store, &genesis_header); let hash_1b = block_1b.hash(); blockchain - .add_block(&block_1b, &store) + .add_block(&block_1b) .expect("Could not add block 1b."); let retrieved_1b = store.get_block_header_by_hash(hash_1b).unwrap().unwrap(); @@ -51,7 +51,7 @@ mod blockchain_integration_test { let block_2 = new_block(&store, &block_1b.header); let hash_2 = block_2.hash(); blockchain - .add_block(&block_2, &store) + .add_block(&block_2) .expect("Could not add block 2."); let retrieved_2 = store.get_block_header_by_hash(hash_2).unwrap(); @@ -80,19 +80,19 @@ mod blockchain_integration_test { let genesis_header = store.get_block_header(0).unwrap().unwrap(); // Create blockchain - let blockchain = Blockchain::default(); + let blockchain = Blockchain::default_with_store(store.clone()); // Build a single valid block. let block_1 = new_block(&store, &genesis_header); let hash_1 = block_1.header.compute_block_hash(); - blockchain.add_block(&block_1, &store).unwrap(); + blockchain.add_block(&block_1).unwrap(); apply_fork_choice(&store, hash_1, H256::zero(), H256::zero()).unwrap(); // Build a child, then change its parent, making it effectively a pending block. let mut block_2 = new_block(&store, &block_1.header); block_2.header.parent_hash = H256::random(); let hash_2 = block_2.header.compute_block_hash(); - let result = blockchain.add_block(&block_2, &store); + let result = blockchain.add_block(&block_2); assert!(matches!(result, Err(ChainError::ParentNotFound))); // block 2 should now be pending. @@ -113,12 +113,12 @@ mod blockchain_integration_test { let genesis_hash = genesis_header.compute_block_hash(); // Create blockchain - let blockchain = Blockchain::default(); + let blockchain = Blockchain::default_with_store(store.clone()); // Add first block. Not canonical. let block_1a = new_block(&store, &genesis_header); let hash_1a = block_1a.hash(); - blockchain.add_block(&block_1a, &store).unwrap(); + blockchain.add_block(&block_1a).unwrap(); let retrieved_1a = store.get_block_header_by_hash(hash_1a).unwrap().unwrap(); assert!(!is_canonical(&store, 1, hash_1a).unwrap()); @@ -127,7 +127,7 @@ mod blockchain_integration_test { let block_1b = new_block(&store, &genesis_header); let hash_1b = block_1b.hash(); blockchain - .add_block(&block_1b, &store) + .add_block(&block_1b) .expect("Could not add block 1b."); apply_fork_choice(&store, hash_1b, genesis_hash, genesis_hash).unwrap(); let retrieved_1b = store.get_block_header(1).unwrap().unwrap(); @@ -141,7 +141,7 @@ mod blockchain_integration_test { let block_2 = new_block(&store, &block_1b.header); let hash_2 = block_2.hash(); blockchain - .add_block(&block_2, &store) + .add_block(&block_2) .expect("Could not add block 2."); apply_fork_choice(&store, hash_2, genesis_hash, genesis_hash).unwrap(); let retrieved_2 = store.get_block_header_by_hash(hash_2).unwrap(); @@ -175,20 +175,20 @@ mod blockchain_integration_test { let genesis_hash = genesis_header.compute_block_hash(); // Create blockchain - let blockchain = Blockchain::default(); + let blockchain = Blockchain::default_with_store(store.clone()); // Add block at height 1. let block_1 = new_block(&store, &genesis_header); let hash_1 = block_1.hash(); blockchain - .add_block(&block_1, &store) + .add_block(&block_1) .expect("Could not add block 1b."); // Add child at height 2. let block_2 = new_block(&store, &block_1.header); let hash_2 = block_2.hash(); blockchain - .add_block(&block_2, &store) + .add_block(&block_2) .expect("Could not add block 2."); assert!(!is_canonical(&store, 1, hash_1).unwrap()); @@ -224,19 +224,19 @@ mod blockchain_integration_test { let genesis_hash = genesis_header.compute_block_hash(); // Create blockchain - let blockchain = Blockchain::default(); + let blockchain = Blockchain::default_with_store(store.clone()); // Add block at height 1. let block_1 = new_block(&store, &genesis_header); blockchain - .add_block(&block_1, &store) + .add_block(&block_1) .expect("Could not add block 1b."); // Add child at height 2. let block_2 = new_block(&store, &block_1.header); let hash_2 = block_2.hash(); blockchain - .add_block(&block_2, &store) + .add_block(&block_2) .expect("Could not add block 2."); assert_eq!(latest_canonical_block_hash(&store).unwrap(), genesis_hash); @@ -250,7 +250,7 @@ mod blockchain_integration_test { let block_1b = new_block(&store, &genesis_header); let hash_b = block_1b.hash(); blockchain - .add_block(&block_1b, &store) + .add_block(&block_1b) .expect("Could not add block b."); // The latest block should be the same. @@ -275,7 +275,7 @@ mod blockchain_integration_test { }; // Create blockchain - let blockchain = Blockchain::default(); + let blockchain = Blockchain::default_with_store(store.clone().clone()); let mut block = create_payload(&args, store).unwrap(); blockchain.build_payload(&mut block, store).unwrap(); diff --git a/crates/l2/prover/tests/perf_zkvm.rs b/crates/l2/prover/tests/perf_zkvm.rs index a391c93cc0..71d08630ee 100644 --- a/crates/l2/prover/tests/perf_zkvm.rs +++ b/crates/l2/prover/tests/perf_zkvm.rs @@ -74,14 +74,14 @@ async fn setup() -> (ProgramInput, Block) { let blocks = ethrex_l2::utils::test_data_io::read_chain_file(chain_file_path.to_str().unwrap()); info!("Number of blocks to insert: {}", blocks.len()); - let blockchain = Blockchain::default(); + let blockchain = Blockchain::default_with_store(store.clone()); for block in &blocks { info!( "txs {} in block{}", block.body.transactions.len(), block.header.number ); - blockchain.add_block(block, &store).unwrap(); + blockchain.add_block(block).unwrap(); } let block_to_prove = blocks.last().unwrap(); diff --git a/crates/l2/utils/prover/save_state.rs b/crates/l2/utils/prover/save_state.rs index a5778bd1be..276206fdd9 100644 --- a/crates/l2/utils/prover/save_state.rs +++ b/crates/l2/utils/prover/save_state.rs @@ -420,9 +420,9 @@ mod tests { let blocks = test_data_io::read_chain_file(chain_file_path.to_str().unwrap()); // create blockchain - let blockchain = Blockchain::default(); + let blockchain = Blockchain::default_with_store(store.clone()); for block in &blocks { - blockchain.add_block(block, &store).unwrap(); + blockchain.add_block(block).unwrap(); } let mut account_updates_vec: Vec> = Vec::new(); diff --git a/crates/l2/utils/test_data_io.rs b/crates/l2/utils/test_data_io.rs index 14da1a5e71..83289f8017 100644 --- a/crates/l2/utils/test_data_io.rs +++ b/crates/l2/utils/test_data_io.rs @@ -72,9 +72,9 @@ pub fn generate_program_input( let store = Store::new("memory", EngineType::InMemory)?; store.add_initial_state(genesis)?; // create blockchain - let blockchain = Blockchain::default(); + let blockchain = Blockchain::default_with_store(store.clone()); for block in chain { - blockchain.add_block(&block, &store)?; + blockchain.add_block(&block)?; } let parent_block_header = store diff --git a/crates/networking/p2p/sync.rs b/crates/networking/p2p/sync.rs index 8b5d3f5de2..47cb86e3f1 100644 --- a/crates/networking/p2p/sync.rs +++ b/crates/networking/p2p/sync.rs @@ -12,7 +12,7 @@ use ethrex_common::{ BigEndianHash, H256, U256, U512, }; use ethrex_rlp::error::RLPDecodeError; -use ethrex_storage::{error::StoreError, Store, STATE_TRIE_SEGMENTS}; +use ethrex_storage::{error::StoreError, EngineType, Store, STATE_TRIE_SEGMENTS}; use ethrex_trie::{Nibbles, Node, TrieError, TrieState}; use state_healing::heal_state_trie; use state_sync::state_sync; @@ -122,7 +122,9 @@ impl SyncManager { trie_rebuilder: None, // This won't be used cancel_token: CancellationToken::new(), - blockchain: Blockchain::default(), + blockchain: Blockchain::default_with_store( + Store::new("", EngineType::InMemory).unwrap(), + ), } } @@ -280,7 +282,7 @@ impl SyncManager { let block = store .get_block_by_hash(*hash)? .ok_or(SyncError::CorruptDB)?; - self.blockchain.add_block(&block, &store)?; + self.blockchain.add_block(&block)?; store.set_canonical_block(block.header.number, *hash)?; store.update_latest_block_number(block.header.number)?; } @@ -331,7 +333,7 @@ async fn download_and_run_blocks( .ok_or(SyncError::CorruptDB)?; let number = header.number; let block = Block::new(header, body); - if let Err(error) = blockchain.add_block(&block, &store) { + if let Err(error) = blockchain.add_block(&block) { invalid_ancestors.insert(hash, last_valid_hash); return Err(error.into()); } diff --git a/crates/networking/rpc/engine/payload.rs b/crates/networking/rpc/engine/payload.rs index 0eaa9cc46e..5475dd63a1 100644 --- a/crates/networking/rpc/engine/payload.rs +++ b/crates/networking/rpc/engine/payload.rs @@ -429,7 +429,7 @@ fn execute_payload(block: &Block, context: &RpcApiContext) -> Result Date: Mon, 24 Feb 2025 13:04:00 -0300 Subject: [PATCH 12/16] add todo comment forkchoice update --- crates/blockchain/blockchain.rs | 1 + 1 file changed, 1 insertion(+) diff --git a/crates/blockchain/blockchain.rs b/crates/blockchain/blockchain.rs index 7f333bb7ae..08f3c28b79 100644 --- a/crates/blockchain/blockchain.rs +++ b/crates/blockchain/blockchain.rs @@ -92,6 +92,7 @@ impl Blockchain { Ok(()) } + //TODO: Forkchoice Update shouldn't be part of this function pub fn import_blocks(&self, blocks: &Vec) { let size = blocks.len(); for block in blocks { From 708bc91fe84647d77636403c562bef02a9ad20db Mon Sep 17 00:00:00 2001 From: JereSalo Date: Mon, 24 Feb 2025 13:07:51 -0300 Subject: [PATCH 13/16] take store out of build_payload --- crates/blockchain/payload.rs | 8 ++------ crates/blockchain/smoke_test.rs | 2 +- crates/networking/rpc/engine/payload.rs | 2 +- 3 files changed, 4 insertions(+), 8 deletions(-) diff --git a/crates/blockchain/payload.rs b/crates/blockchain/payload.rs index a1c5a747dc..de3d8b9162 100644 --- a/crates/blockchain/payload.rs +++ b/crates/blockchain/payload.rs @@ -233,13 +233,9 @@ impl<'a> PayloadBuildContext<'a> { impl Blockchain { /// Completes the payload building process, return the block value - pub fn build_payload( - &self, - payload: &mut Block, - store: &Store, - ) -> Result<(BlobsBundle, U256), ChainError> { + pub fn build_payload(&self, payload: &mut Block) -> Result<(BlobsBundle, U256), ChainError> { debug!("Building payload"); - let mut evm_state = evm_state(store.clone(), payload.header.parent_hash); + let mut evm_state = evm_state(self.storage.clone(), payload.header.parent_hash); let mut context = PayloadBuildContext::new(payload, &mut evm_state)?; self.apply_system_operations(&mut context)?; self.apply_withdrawals(&mut context)?; diff --git a/crates/blockchain/smoke_test.rs b/crates/blockchain/smoke_test.rs index e7c31a2fed..0d85ab7ea2 100644 --- a/crates/blockchain/smoke_test.rs +++ b/crates/blockchain/smoke_test.rs @@ -278,7 +278,7 @@ mod blockchain_integration_test { let blockchain = Blockchain::default_with_store(store.clone().clone()); let mut block = create_payload(&args, store).unwrap(); - blockchain.build_payload(&mut block, store).unwrap(); + blockchain.build_payload(&mut block).unwrap(); block } diff --git a/crates/networking/rpc/engine/payload.rs b/crates/networking/rpc/engine/payload.rs index 5475dd63a1..8d2e191f0b 100644 --- a/crates/networking/rpc/engine/payload.rs +++ b/crates/networking/rpc/engine/payload.rs @@ -551,7 +551,7 @@ fn build_execution_payload_response( .map_err(|_| RpcErr::Internal("Error locking syncer".to_string()))?; syncer .blockchain - .build_payload(&mut payload_block, &context.storage) + .build_payload(&mut payload_block) .map_err(|err| RpcErr::Internal(err.to_string()))? }; From 07415742506c9ba01d40e24158cb230bfe221092 Mon Sep 17 00:00:00 2001 From: JereSalo Date: Mon, 24 Feb 2025 15:35:18 -0300 Subject: [PATCH 14/16] fix tiny errors --- crates/blockchain/blockchain.rs | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/crates/blockchain/blockchain.rs b/crates/blockchain/blockchain.rs index 90f1ca75fd..88807d8d97 100644 --- a/crates/blockchain/blockchain.rs +++ b/crates/blockchain/blockchain.rs @@ -5,9 +5,6 @@ pub mod mempool; pub mod payload; mod smoke_test; -use std::{ops::Div, time::Instant}; -use tracing::info; - use error::{ChainError, InvalidBlockError}; use ethrex_common::constants::GAS_PER_BLOB; use ethrex_common::types::requests::{compute_requests_hash, EncodedRequests, Requests}; @@ -17,6 +14,7 @@ use ethrex_common::types::{ BlockHeader, BlockNumber, ChainConfig, EIP4844Transaction, Receipt, Transaction, }; use ethrex_common::H256; +use std::{ops::Div, time::Instant}; use ethrex_storage::error::StoreError; use ethrex_storage::Store; From 9917653f3d3e8e5daaaec28fbc0fc0b0dd076500 Mon Sep 17 00:00:00 2001 From: JereSalo Date: Mon, 24 Feb 2025 15:41:19 -0300 Subject: [PATCH 15/16] remove comment --- crates/networking/rpc/engine/payload.rs | 1 - 1 file changed, 1 deletion(-) diff --git a/crates/networking/rpc/engine/payload.rs b/crates/networking/rpc/engine/payload.rs index d0915e5c2f..e0c228b1bd 100644 --- a/crates/networking/rpc/engine/payload.rs +++ b/crates/networking/rpc/engine/payload.rs @@ -513,7 +513,6 @@ fn execute_payload(block: &Block, context: &RpcApiContext) -> Result Date: Mon, 24 Feb 2025 16:42:54 -0300 Subject: [PATCH 16/16] Update crates/blockchain/blockchain.rs Co-authored-by: Ivan Litteri <67517699+ilitteri@users.noreply.github.com> --- crates/blockchain/blockchain.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/blockchain/blockchain.rs b/crates/blockchain/blockchain.rs index 88807d8d97..d697424b02 100644 --- a/crates/blockchain/blockchain.rs +++ b/crates/blockchain/blockchain.rs @@ -148,7 +148,7 @@ impl Blockchain { } } } - info!("Added {} blocks to blockchain", size); + info!("Added {size} blocks to blockchain"); } }