test(execution): execution test runner (#426)

* wip run chain test

* dump pre test state

* bug: fix casting to revm spec

* wip move execution to std thread

* wip scoped thread execution

* fmt clippy

* Update crates/stages/src/stages/execution.rs

* Update bin/reth/Cargo.toml

Co-authored-by: Georgios Konstantopoulos <me@gakonst.com>

* Update bin/reth/src/test_eth_chain/models.rs

Co-authored-by: Georgios Konstantopoulos <me@gakonst.com>

* Correct models for chain tests

Co-authored-by: rakita <dragan0rakita@gmail.com>
Co-authored-by: rakita <rakita@users.noreply.github.com>
Co-authored-by: Georgios Konstantopoulos <me@gakonst.com>
This commit is contained in:
Roman Krasiuk
2022-12-15 14:39:55 +02:00
committed by GitHub
parent 4c298ac022
commit 9208f2fd9b
14 changed files with 393 additions and 110 deletions

10
Cargo.lock generated
View File

@ -3057,11 +3057,10 @@ dependencies = [
[[package]]
name = "rayon"
version = "1.6.0"
version = "1.6.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1e060280438193c554f654141c9ea9417886713b7acd75974c85b18a69a88e0b"
checksum = "6db3a213adf02b3bcfd2d3846bb41cb22857d131789e01df434fb7e7bc0759b7"
dependencies = [
"crossbeam-deque",
"either",
"rayon-core",
]
@ -3173,13 +3172,16 @@ version = "0.1.0"
dependencies = [
"clap 4.0.22",
"eyre",
"futures",
"reth-consensus",
"reth-db",
"reth-downloaders",
"reth-executor",
"reth-interfaces",
"reth-network",
"reth-primitives",
"reth-provider",
"reth-rlp",
"reth-rpc",
"reth-stages",
"reth-transaction-pool",
@ -3374,6 +3376,8 @@ dependencies = [
"rlp",
"sha3",
"thiserror",
"tokio",
"tracing",
"triehash",
]

View File

@ -9,14 +9,16 @@ readme = "README.md"
[dependencies]
# reth
reth-primitives = { path = "../../crates/primitives" }
reth-db = {path = "../../crates/storage/db", features = ["mdbx"]}
reth-db = {path = "../../crates/storage/db", features = ["mdbx", "test-utils"]}
# TODO: Temporary use of the test-utils feature
reth-provider = {path = "../../crates/storage/provider", features = ["test-utils"]}
reth-stages = {path = "../../crates/stages"}
reth-interfaces = {path = "../../crates/interfaces", features = ["test-utils"] }
reth-transaction-pool = {path = "../../crates/transaction-pool"}
reth-consensus = {path = "../../crates/consensus"}
reth-executor = { path = "../../crates/executor"}
reth-rpc = {path = "../../crates/net/rpc"}
reth-rlp = { path = "../../crates/common/rlp" }
reth-network = {path = "../../crates/net/network"}
reth-downloaders = {path = "../../crates/net/downloaders"}
@ -35,3 +37,4 @@ tokio = { version = "1.21", features = ["sync", "macros", "rt-multi-thread"] }
serde = "1.0"
serde_json = "1.0"
walkdir = "2.3"
futures = "0.3.25"

View File

@ -3,6 +3,7 @@
use crate::util;
use clap::Parser;
use std::path::PathBuf;
use tracing::{error, info};
/// Models for parsing JSON blockchain tests
pub mod models;
/// Ethereum blockhain test runner
@ -19,35 +20,30 @@ impl Command {
/// Execute the command
pub async fn execute(self) -> eyre::Result<()> {
// note the use of `into_iter()` to consume `items`
let task_group: Vec<_> = self
let futs: Vec<_> = self
.path
.iter()
.map(|item| {
util::find_all_files_with_postfix(item, ".json").into_iter().map(|file| {
let tfile = file.clone();
let join = tokio::spawn(async move { runner::run_test(tfile.as_path()).await });
(join, file)
})
})
.flat_map(|item| util::find_all_files_with_postfix(item, ".json"))
.map(|file| async { (runner::run_test(file.clone()).await, file) })
.collect();
let results = futures::future::join_all(futs).await;
// await the tasks for resolve's to complete and give back our test results
let mut num_of_failed = 0;
let mut num_of_passed = 0;
for tasks in task_group {
for (join, file) in tasks {
match join.await.unwrap() {
Ok(_) => {
num_of_passed += 1;
}
Err(error) => {
num_of_failed += 1;
println!("Test {file:?} failed:\n {error}\n");
}
for (result, file) in results {
match result {
Ok(_) => {
num_of_passed += 1;
}
Err(error) => {
num_of_failed += 1;
error!("Test {file:?} failed:\n {error}\n");
}
}
}
println!("\nPASSED {num_of_passed}/{} tests\n", num_of_passed + num_of_failed);
info!("\nPASSED {num_of_passed}/{} tests\n", num_of_passed + num_of_failed);
Ok(())
}

View File

@ -1,4 +1,7 @@
use reth_primitives::{Address, Bloom, Bytes, JsonU256, H160, H256, H64};
use reth_primitives::{
Address, BigEndianHash, Bloom, Bytes, Header as RethHeader, JsonU256, SealedHeader, H160, H256,
H64,
};
use serde::{self, Deserialize};
use std::collections::BTreeMap;
@ -70,6 +73,32 @@ pub struct Header {
pub base_fee_per_gas: Option<JsonU256>,
}
impl From<Header> for SealedHeader {
fn from(value: Header) -> Self {
SealedHeader::new(
RethHeader {
base_fee_per_gas: value.base_fee_per_gas.map(|v| v.0.as_u64()),
beneficiary: value.coinbase,
difficulty: value.difficulty.0,
extra_data: value.extra_data.0,
gas_limit: value.gas_limit.0.as_u64(),
gas_used: value.gas_used.0.as_u64(),
mix_hash: value.mix_hash,
nonce: value.nonce.into_uint().as_u64(),
number: value.number.0.as_u64(),
timestamp: value.timestamp.0.as_u64(),
transactions_root: value.transactions_trie,
receipts_root: value.receipt_trie,
ommers_hash: value.uncle_hash,
state_root: value.state_root,
parent_hash: value.parent_hash,
logs_bloom: Bloom::default(), // TODO: ?
},
value.hash,
)
}
}
/// Ethereum blockchain test data Block.
#[derive(Debug, PartialEq, Eq, Deserialize)]
#[serde(deny_unknown_fields)]
@ -86,9 +115,12 @@ pub struct Block {
}
/// Ethereum blockchain test data State.
//#[derive(Clone, Debug, Eq, PartialEq, Deserialize)]
//#[serde(deny_unknown_fields)]
//pub struct State(pub RootOrState);
#[derive(Clone, Debug, Eq, PartialEq, Deserialize)]
#[serde(deny_unknown_fields)]
pub struct State(pub RootOrState);
pub struct State(pub BTreeMap<Address, Account>);
/// Merkle root hash or storage accounts.
#[derive(Clone, Debug, PartialEq, Eq, Deserialize)]
@ -96,7 +128,7 @@ pub struct State(pub RootOrState);
pub enum RootOrState {
/// If state is too big, only state root is present
Root(H256),
/// Staet
/// State
State(BTreeMap<Address, Account>),
}
@ -114,45 +146,79 @@ pub struct Account {
pub storage: BTreeMap<JsonU256, JsonU256>,
}
/// Ethereum blockchain test data State.
#[derive(Debug, PartialEq, Eq, Deserialize)]
/// Fork Spec
#[derive(Debug, PartialEq, Eq, PartialOrd, Hash, Ord, Deserialize)]
pub enum ForkSpec {
/// Fork EIP150.
EIP150,
/// Fork EIP158.
EIP158,
/// Fork Frontier.
/// Frontier
Frontier,
/// Fork Homestead.
Homestead,
/// Fork Byzantium.
Byzantium,
/// Fork Constantinople.
Constantinople,
/// Fork ConstantinopleFix.
ConstantinopleFix,
/// Fork Istanbul.
Istanbul,
/// Fork EIP158ToByzantiumAt5.
EIP158ToByzantiumAt5,
/// Fork FrontierToHomesteadAt5.
/// Frontier to Homestead
FrontierToHomesteadAt5,
/// Fork HomesteadToDaoAt5.
/// Homestead
Homestead,
/// Homestead to Tangerine
HomesteadToDaoAt5,
/// Fork HomesteadToEIP150At5.
/// Homestead to Tangerine
HomesteadToEIP150At5,
/// Fork ByzantiumToConstantinopleAt5.
ByzantiumToConstantinopleAt5,
/// Fork ByzantiumToConstantinopleFixAt5.
/// Tangerine
EIP150,
/// Spurious Dragon
EIP158, // EIP-161: State trie clearing
/// Spurious Dragon to Byzantium
EIP158ToByzantiumAt5,
/// Byzantium
Byzantium,
/// Byzantium to Constantinople
ByzantiumToConstantinopleAt5, // SKIPPED
/// Byzantium to Constantinople
ByzantiumToConstantinopleFixAt5,
/// Fork Berlin.
/// Constantinople
Constantinople, // SKIPPED
/// Constantinople fix
ConstantinopleFix,
/// Instanbul
Istanbul,
/// Berlin
Berlin,
/// Fork London.
London,
/// Fork BerlinToLondonAt5.
/// Berlin to London
BerlinToLondonAt5,
/// Fork Merge,
/// London
London,
/// Paris aka The Merge
Merge,
/// Merge EOF test
#[serde(alias = "Merge+3540+3670")]
MergeEOF,
/// After Merge Init Code test
#[serde(alias = "Merge+3860")]
MergeMeterInitCode,
}
impl From<ForkSpec> for reth_executor::SpecUpgrades {
fn from(fork_spec: ForkSpec) -> Self {
match fork_spec {
ForkSpec::Frontier => Self::new_frontier_activated(),
ForkSpec::Homestead | ForkSpec::FrontierToHomesteadAt5 => {
Self::new_homestead_activated()
}
ForkSpec::EIP150 | ForkSpec::HomesteadToDaoAt5 | ForkSpec::HomesteadToEIP150At5 => {
Self::new_tangerine_whistle_activated()
}
ForkSpec::EIP158 => Self::new_spurious_dragon_activated(),
ForkSpec::Byzantium |
ForkSpec::EIP158ToByzantiumAt5 |
ForkSpec::ConstantinopleFix |
ForkSpec::ByzantiumToConstantinopleFixAt5 => Self::new_byzantium_activated(),
ForkSpec::Istanbul => Self::new_istanbul_activated(),
ForkSpec::Berlin => Self::new_berlin_activated(),
ForkSpec::London | ForkSpec::BerlinToLondonAt5 => Self::new_london_activated(),
ForkSpec::Merge => Self::new_paris_activated(),
ForkSpec::MergeEOF => Self::new_paris_activated(),
ForkSpec::MergeMeterInitCode => Self::new_paris_activated(),
ForkSpec::ByzantiumToConstantinopleAt5 | ForkSpec::Constantinople => {
panic!("Overriden with PETERSBURG")
}
}
}
}
/// Json Block test possible engine kind.

View File

@ -1,13 +1,154 @@
use super::models::Test;
use std::path::Path;
use crate::test_eth_chain::models::ForkSpec;
use reth_db::{
database::Database,
mdbx::{test_utils::create_test_rw_db, WriteMap},
tables,
transaction::{DbTx, DbTxMut},
};
use reth_executor::SpecUpgrades;
use reth_primitives::{
keccak256, Account as RethAccount, BigEndianHash, BlockLocked, SealedHeader, StorageEntry, H256,
};
use reth_rlp::Decodable;
use reth_stages::{stages::execution::ExecutionStage, ExecInput, Stage, StageDB};
use std::{
ffi::OsStr,
path::{Path, PathBuf},
};
use tracing::debug;
/// Tests are test edge cases that are not possible to happen on mainnet, so we are skipping them.
pub fn should_skip(path: &Path) -> bool {
// funky test with `bigint 0x00` value in json :) not possible to happen on mainnet and require
// custom json parser. https://github.com/ethereum/tests/issues/971
if path.file_name() == Some(OsStr::new("ValueOverflow.json")) {
return true
}
// txbyte is of type 02 and we dont parse tx bytes for this test to fail.
if path.file_name() == Some(OsStr::new("typeTwoBerlin.json")) {
return true
}
// Test checks if nonce overflows. We are handling this correctly but we are not parsing
// exception in testsuite There are more nonce overflow tests that are in internal
// call/create, and those tests are passing and are enabled.
if path.file_name() == Some(OsStr::new("CreateTransactionHighNonce.json")) {
return true
}
// Test check if gas price overflows, we handle this correctly but does not match tests specific
// exception.
if path.file_name() == Some(OsStr::new("HighGasPrice.json")) {
return true
}
// Skip test where basefee/accesslist/diffuculty is present but it shouldn't be supported in
// London/Berlin/TheMerge. https://github.com/ethereum/tests/blob/5b7e1ab3ffaf026d99d20b17bb30f533a2c80c8b/GeneralStateTests/stExample/eip1559.json#L130
// It is expected to not execute these tests.
if path.file_name() == Some(OsStr::new("accessListExample.json")) ||
path.file_name() == Some(OsStr::new("basefeeExample.json")) ||
path.file_name() == Some(OsStr::new("eip1559.json")) ||
path.file_name() == Some(OsStr::new("mergeTest.json"))
{
return true
}
// These tests are passing, but they take a lot of time to execute so we are going to skip them.
if path.file_name() == Some(OsStr::new("loopExp.json")) ||
path.file_name() == Some(OsStr::new("Call50000_sha256.json")) ||
path.file_name() == Some(OsStr::new("static_Call50000_sha256.json")) ||
path.file_name() == Some(OsStr::new("loopMul.json")) ||
path.file_name() == Some(OsStr::new("CALLBlake2f_MaxRounds.json"))
{
return true
}
false
}
/// Run one JSON-encoded Ethereum blockchain test at the specified path.
pub async fn run_test(path: &Path) -> eyre::Result<()> {
pub async fn run_test(path: PathBuf) -> eyre::Result<()> {
let path = path.as_path();
let json_file = std::fs::read(path)?;
let suits: Test = serde_json::from_reader(&*json_file)?;
let suites: Test = serde_json::from_reader(&*json_file)?;
for suit in suits.0 {
println!("TODO:{:?}", suit.0);
if should_skip(path) {
return Ok(())
}
for (name, suite) in suites.0 {
if matches!(
suite.network,
ForkSpec::ByzantiumToConstantinopleAt5 |
ForkSpec::Constantinople |
ForkSpec::MergeEOF |
ForkSpec::MergeMeterInitCode
) {
continue
}
// if matches!(suite.pre, State(RootOrState::Root(_))) {}
let pre_state = suite.pre.0;
debug!("Executing test: {name} for spec: {:?}", suite.network);
let spec_upgrades: SpecUpgrades = suite.network.into();
// if paris aka merge is not activated we dont have block rewards;
let has_block_reward = spec_upgrades.paris != 0;
// Create db and acquire transaction
let db = create_test_rw_db::<WriteMap>();
let tx = db.tx_mut()?;
// insert genesis
let header: SealedHeader = suite.genesis_block_header.into();
let genesis_block = BlockLocked { header, body: vec![], ommers: vec![] };
reth_provider::insert_canonical_block(&tx, &genesis_block, has_block_reward)?;
suite.blocks.iter().try_for_each(|block| -> eyre::Result<()> {
let decoded = BlockLocked::decode(&mut block.rlp.as_ref())?;
reth_provider::insert_canonical_block(&tx, &decoded, has_block_reward)?;
Ok(())
})?;
pre_state.into_iter().try_for_each(|(address, account)| -> eyre::Result<()> {
let has_code = !account.code.is_empty();
let code_hash = if has_code { Some(keccak256(&account.code)) } else { None };
tx.put::<tables::PlainAccountState>(
address,
RethAccount {
balance: account.balance.0,
nonce: account.nonce.0.as_u64(),
bytecode_hash: code_hash,
},
)?;
if let Some(code_hash) = code_hash {
tx.put::<tables::Bytecodes>(code_hash, account.code.to_vec())?;
}
account.storage.iter().try_for_each(|(k, v)| {
tx.put::<tables::PlainStorageState>(
address,
StorageEntry { key: H256::from_uint(&k.0), value: v.0 },
)
})?;
Ok(())
})?;
// Commit the pre suite state
tx.commit()?;
// Initialize the execution stage
// Hardcode the chain_id to Ethereums 1.
let mut stage =
ExecutionStage::new(reth_executor::Config { chain_id: 1.into(), spec_upgrades });
// Call execution stage
let input = ExecInput::default();
stage.execute(&mut StageDB::new(db.as_ref())?, input).await?;
// Validate post state
//for post in
}
Ok(())
}

View File

@ -24,6 +24,8 @@ async-trait = "0.1.57"
thiserror = "1.0.37"
eyre = "0.6.8"
auto_impl = "1.0"
tracing = "0.1.37"
tokio = { version = "1.21.2", features = ["sync"] }
triehash = "0.8"
# See to replace hashers to simplify libraries

View File

@ -51,7 +51,7 @@ pub struct SpecUpgrades {
impl SpecUpgrades {
/// After merge/peric block reward was removed from execution layer.
pub fn has_block_reward(&self, block_num: BlockNumber) -> bool {
block_num <= self.paris
block_num < self.paris
}
/// Ethereum mainnet spec
@ -79,7 +79,24 @@ impl SpecUpgrades {
/// New homestead enabled spec
pub fn new_homestead_activated() -> Self {
Self { homestead: 0, ..Self::new_ethereum() }
Self {
homestead: 0,
frontier: u64::MAX,
tangerine_whistle: u64::MAX,
spurious_dragon: u64::MAX,
byzantium: u64::MAX,
petersburg: u64::MAX,
istanbul: u64::MAX,
berlin: u64::MAX,
london: u64::MAX,
paris: u64::MAX,
shanghai: u64::MAX,
}
}
/// New homestead enabled spec
pub fn new_frontier_activated() -> Self {
Self { frontier: 0, ..Self::new_ethereum() }
}
/// New tangerine enabled spec
@ -125,18 +142,54 @@ impl SpecUpgrades {
/// return revm_spec from spec configuration.
pub fn revm_spec(&self, for_block: BlockNumber) -> revm::SpecId {
match for_block {
b if self.shanghai >= b => revm::MERGE_EOF,
b if self.paris >= b => revm::MERGE,
b if self.london >= b => revm::LONDON,
b if self.berlin >= b => revm::BERLIN,
b if self.istanbul >= b => revm::ISTANBUL,
b if self.petersburg >= b => revm::PETERSBURG,
b if self.byzantium >= b => revm::BYZANTIUM,
b if self.spurious_dragon >= b => revm::SPURIOUS_DRAGON,
b if self.tangerine_whistle >= b => revm::TANGERINE,
b if self.homestead >= b => revm::HOMESTEAD,
b if self.frontier >= b => revm::FRONTIER,
b if b >= self.shanghai => revm::MERGE_EOF,
b if b >= self.paris => revm::MERGE,
b if b >= self.london => revm::LONDON,
b if b >= self.berlin => revm::BERLIN,
b if b >= self.istanbul => revm::ISTANBUL,
b if b >= self.petersburg => revm::PETERSBURG,
b if b >= self.byzantium => revm::BYZANTIUM,
b if b >= self.spurious_dragon => revm::SPURIOUS_DRAGON,
b if b >= self.tangerine_whistle => revm::TANGERINE,
b if b >= self.homestead => revm::HOMESTEAD,
b if b >= self.frontier => revm::FRONTIER,
_ => panic!("wrong configuration"),
}
}
}
#[cfg(test)]
mod tests {
use super::SpecUpgrades;
#[test]
fn test_to_revm_spec() {
assert_eq!(SpecUpgrades::new_paris_activated().revm_spec(1), revm::MERGE);
assert_eq!(SpecUpgrades::new_london_activated().revm_spec(1), revm::LONDON);
assert_eq!(SpecUpgrades::new_berlin_activated().revm_spec(1), revm::BERLIN);
assert_eq!(SpecUpgrades::new_istanbul_activated().revm_spec(1), revm::ISTANBUL);
assert_eq!(SpecUpgrades::new_petersburg_activated().revm_spec(1), revm::PETERSBURG);
assert_eq!(SpecUpgrades::new_byzantium_activated().revm_spec(1), revm::BYZANTIUM);
assert_eq!(
SpecUpgrades::new_spurious_dragon_activated().revm_spec(1),
revm::SPURIOUS_DRAGON
);
assert_eq!(SpecUpgrades::new_tangerine_whistle_activated().revm_spec(1), revm::TANGERINE);
assert_eq!(SpecUpgrades::new_homestead_activated().revm_spec(1), revm::HOMESTEAD);
assert_eq!(SpecUpgrades::new_frontier_activated().revm_spec(1), revm::FRONTIER);
}
#[test]
fn test_eth_spec() {
let spec = SpecUpgrades::new_ethereum();
assert_eq!(spec.revm_spec(15537394 + 10), revm::MERGE);
assert_eq!(spec.revm_spec(15537394 - 10), revm::LONDON);
assert_eq!(spec.revm_spec(12244000 + 10), revm::BERLIN);
assert_eq!(spec.revm_spec(12244000 - 10), revm::ISTANBUL);
assert_eq!(spec.revm_spec(7280000 + 10), revm::PETERSBURG);
assert_eq!(spec.revm_spec(7280000 - 10), revm::BYZANTIUM);
assert_eq!(spec.revm_spec(2675000 + 10), revm::SPURIOUS_DRAGON);
assert_eq!(spec.revm_spec(2675000 - 10), revm::TANGERINE);
assert_eq!(spec.revm_spec(1150000 + 10), revm::HOMESTEAD);
assert_eq!(spec.revm_spec(1150000 - 10), revm::FRONTIER);
}
}

View File

@ -12,7 +12,8 @@ use reth_primitives::{
};
use reth_provider::StateProvider;
use revm::{
db::AccountState, Account as RevmAccount, AccountInfo, AnalysisKind, Bytecode, Database, EVM,
db::AccountState, Account as RevmAccount, AccountInfo, AnalysisKind, Bytecode, Database,
Return, EVM,
};
use std::collections::BTreeMap;
@ -248,7 +249,7 @@ pub fn execute_and_verify_receipt<DB: StateProvider>(
header: &Header,
transactions: &[TransactionSignedEcRecovered],
config: &Config,
db: &mut SubState<DB>,
db: SubState<DB>,
) -> Result<ExecutionResult, Error> {
let transaction_change_set = execute(header, transactions, config, db)?;
@ -289,7 +290,7 @@ pub fn execute<DB: StateProvider>(
header: &Header,
transactions: &[TransactionSignedEcRecovered],
config: &Config,
db: &mut SubState<DB>,
db: SubState<DB>,
) -> Result<ExecutionResult, Error> {
let mut evm = EVM::new();
evm.database(db);
@ -319,7 +320,10 @@ pub fn execute<DB: StateProvider>(
revm_wrap::fill_tx_env(&mut evm.env.tx, transaction);
// Execute transaction.
let (revm::ExecutionResult { exit_reason, gas_used, logs, .. }, state) = evm.transact();
let (revm::ExecutionResult { exit_reason, gas_used, logs, gas_refunded, .. }, state) =
evm.transact();
tracing::trace!(target:"evm","Executing transaction {:?}, gas:{gas_used} refund:{gas_refunded}",transaction.hash());
// Fatal internal error.
if exit_reason == revm::Return::FatalExternalError {
@ -327,15 +331,11 @@ pub fn execute<DB: StateProvider>(
}
// Success flag was added in `EIP-658: Embedding transaction status code in receipts`.
let is_success = matches!(
exit_reason,
revm::Return::Continue |
revm::Return::Stop |
revm::Return::Return |
revm::Return::SelfDestruct
);
// TODO add handling of other errors
let is_success = match exit_reason {
revm::return_ok!() => true,
revm::return_revert!() => false,
e => return Err(Error::EVMError { error_code: e as u32 }),
};
// Add spend gas.
cumulative_gas_used += gas_used;
@ -509,13 +509,12 @@ mod tests {
// make it berlin fork
config.spec_upgrades = SpecUpgrades::new_berlin_activated();
let mut db = SubState::new(State::new(db));
let db = SubState::new(State::new(db));
let transactions: Vec<TransactionSignedEcRecovered> =
block.body.iter().map(|tx| tx.try_ecrecovered().unwrap()).collect();
// execute chain and verify receipts
let out =
execute_and_verify_receipt(&block.header, &transactions, &config, &mut db).unwrap();
let out = execute_and_verify_receipt(&block.header, &transactions, &config, db).unwrap();
assert_eq!(out.changeset.len(), 1, "Should executed one transaction");

View File

@ -12,4 +12,4 @@ pub mod config;
pub mod executor;
/// Wrapper around revm database and types
pub mod revm_wrap;
pub use config::Config;
pub use config::{Config, SpecUpgrades};

View File

@ -38,6 +38,8 @@ pub enum Error {
},
#[error("Block gas used {got} is different from expected gas used {expected}.")]
BlockGasUsed { got: u64, expected: u64 },
#[error("Revm error {error_code}")]
EVMError { error_code: u32 },
#[error("Provider error")]
ProviderError,
}

View File

@ -104,7 +104,7 @@ pub enum DatabaseIntegrityError {
TransactionsSignerGap { missing: TxNumber },
#[error("Got to the end of transaction table")]
EndOfTransactionTable,
#[error("Got to the end of transaction table")]
#[error("Got to the end of the transaction sender table")]
EndOfTransactionSenderTable,
/// The total difficulty from the block header is missing.
#[error("Total difficulty not found for block #{number}")]

View File

@ -15,7 +15,6 @@
//! - `stage.progress{stage}`: The block number each stage has currently reached.
mod db;
pub use db::StageDB;
mod error;
mod id;
mod pipeline;
@ -28,6 +27,7 @@ mod test_utils;
/// Implementations of stages.
pub mod stages;
pub use db::StageDB;
pub use error::*;
pub use id::*;
pub use pipeline::*;

View File

@ -59,6 +59,13 @@ impl Default for ExecutionStage {
}
}
impl ExecutionStage {
/// Create new execution stage with specified config.
pub fn new(config: Config) -> Self {
Self { config }
}
}
/// Specify batch sizes of block in execution
/// TODO make this as config
const BATCH_SIZE: u64 = 1000;
@ -176,6 +183,8 @@ impl<DB: Database> Stage<DB> for ExecutionStage {
for (header, (start_tx_index, end_tx_index, block_reward_index)) in
headers_batch.iter().zip(tx_index_ranges.iter())
{
let num = header.number;
tracing::trace!(target: "stages::execution",?num, "Execute block num.");
let body_tx_cnt = end_tx_index - start_tx_index;
// iterate over all transactions
let mut tx_walker = tx.walk(*start_tx_index)?;
@ -215,23 +224,27 @@ impl<DB: Database> Stage<DB> for ExecutionStage {
// for now use default eth config
let mut state_provider =
SubState::new(State::new(StateProviderImplRefLatest::new(db_tx)));
let state_provider = SubState::new(State::new(StateProviderImplRefLatest::new(db_tx)));
// execute and store output to results
// ANCHOR: snippet-block_change_patches
block_change_patches.push((
reth_executor::executor::execute_and_verify_receipt(
header,
&recovered_transactions,
&self.config,
&mut state_provider,
)
.map_err(|error| StageError::ExecutionError { block: header.number, error })?,
start_tx_index,
block_reward_index,
));
// ANCHOR_END: snippet-block_change_patches
let change_set = std::thread::scope(|scope| {
let handle = std::thread::Builder::new()
.stack_size(50 * 1024 * 1024)
.spawn_scoped(scope, || {
// execute and store output to results
// ANCHOR: snippet-block_change_patches
reth_executor::executor::execute_and_verify_receipt(
header,
&recovered_transactions,
&self.config,
state_provider,
)
// ANCHOR_END: snippet-block_change_patches
})
.expect("Expects that thread name is not null");
handle.join().expect("Expects for thread to not panic")
})
.map_err(|error| StageError::ExecutionError { block: header.number, error })?;
block_change_patches.push((change_set, start_tx_index, block_reward_index));
}
// apply changes to plain database.

View File

@ -120,6 +120,10 @@ pub mod test_utils {
/// Error during tempdir creation
pub const ERROR_TEMPDIR: &str = "Not able to create a temporary directory.";
/// Create rw database for testing
pub fn create_test_rw_db<E: EnvironmentKind>() -> Arc<Env<E>> {
create_test_db(EnvKind::RW)
}
/// Create database for testing
pub fn create_test_db<E: EnvironmentKind>(kind: EnvKind) -> Arc<Env<E>> {
Arc::new(create_test_db_with_path(