test(execution): chain test state check, execution bug fixes/upgrades (#472)

* temp: header stage backoff stand-in

* feat(execution): Check chain post state, fix StateProviderLatest and evm return

* Disable receipt merkle tree check

* update and merge

* Fix storage double values in dup table

* fmt

* Update bin/reth/src/test_eth_chain/runner.rs

Co-authored-by: Georgios Konstantopoulos <me@gakonst.com>

* Enable receipt root check after byzantium

* Receipt inner rlp without header for proof root

* some cleanup nits

* nit

Co-authored-by: Oliver Nordbjerg <hi@notbjerg.me>
Co-authored-by: Georgios Konstantopoulos <me@gakonst.com>
This commit is contained in:
rakita
2022-12-23 18:42:19 +01:00
committed by GitHub
parent f8d1521c50
commit 284391c181
15 changed files with 494 additions and 161 deletions

76
Cargo.lock generated
View File

@ -1166,7 +1166,7 @@ dependencies = [
"log", "log",
"rand 0.8.5", "rand 0.8.5",
"rlp", "rlp",
"secp256k1", "secp256k1 0.24.2",
"serde", "serde",
"sha3", "sha3",
"zeroize", "zeroize",
@ -3367,7 +3367,7 @@ dependencies = [
"reth-interfaces", "reth-interfaces",
"reth-libmdbx", "reth-libmdbx",
"reth-primitives", "reth-primitives",
"secp256k1", "secp256k1 0.24.2",
"serde", "serde",
"tempfile", "tempfile",
"test-fuzz", "test-fuzz",
@ -3392,7 +3392,7 @@ dependencies = [
"reth-rlp", "reth-rlp",
"reth-rlp-derive", "reth-rlp-derive",
"reth-tracing", "reth-tracing",
"secp256k1", "secp256k1 0.24.2",
"thiserror", "thiserror",
"tokio", "tokio",
"tokio-stream", "tokio-stream",
@ -3438,7 +3438,7 @@ dependencies = [
"rand 0.8.5", "rand 0.8.5",
"reth-primitives", "reth-primitives",
"reth-rlp", "reth-rlp",
"secp256k1", "secp256k1 0.24.2",
"sha2 0.10.6", "sha2 0.10.6",
"sha3", "sha3",
"thiserror", "thiserror",
@ -3465,7 +3465,7 @@ dependencies = [
"reth-primitives", "reth-primitives",
"reth-rlp", "reth-rlp",
"reth-tracing", "reth-tracing",
"secp256k1", "secp256k1 0.24.2",
"serde", "serde",
"smol_str", "smol_str",
"snap", "snap",
@ -3518,7 +3518,7 @@ dependencies = [
"reth-eth-wire", "reth-eth-wire",
"reth-primitives", "reth-primitives",
"reth-rpc-types", "reth-rpc-types",
"secp256k1", "secp256k1 0.24.2",
"serde", "serde",
"thiserror", "thiserror",
"tokio", "tokio",
@ -3608,7 +3608,7 @@ dependencies = [
"reth-tasks", "reth-tasks",
"reth-tracing", "reth-tracing",
"reth-transaction-pool", "reth-transaction-pool",
"secp256k1", "secp256k1 0.24.2",
"serial_test", "serial_test",
"tempfile", "tempfile",
"thiserror", "thiserror",
@ -3635,7 +3635,7 @@ dependencies = [
"plain_hasher", "plain_hasher",
"reth-codecs", "reth-codecs",
"reth-rlp", "reth-rlp",
"secp256k1", "secp256k1 0.24.2",
"serde", "serde",
"serde_json", "serde_json",
"sucds", "sucds",
@ -3665,7 +3665,7 @@ dependencies = [
"reth-interfaces", "reth-interfaces",
"reth-primitives", "reth-primitives",
"reth-rpc-types", "reth-rpc-types",
"secp256k1", "secp256k1 0.24.2",
"serde", "serde",
"test-fuzz", "test-fuzz",
"thiserror", "thiserror",
@ -3689,7 +3689,7 @@ dependencies = [
"reth-rlp", "reth-rlp",
"reth-rlp-derive", "reth-rlp-derive",
"rlp", "rlp",
"secp256k1", "secp256k1 0.24.2",
"smol_str", "smol_str",
] ]
@ -3810,33 +3810,35 @@ dependencies = [
[[package]] [[package]]
name = "revm" name = "revm"
version = "2.3.1" version = "2.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "git+https://github.com/bluealloy/revm?branch=main#488ef8ab62f433b1b434d2d81bc744a2db8f735f"
checksum = "73d84c8f9836efb0f5f5f8de4700a953c4e1f3119e5cfcb0aad8e5be73daf991"
dependencies = [ dependencies = [
"arrayref", "arrayref",
"auto_impl", "auto_impl",
"bytes", "bytes",
"derive_more",
"fixed-hash",
"hashbrown 0.13.1", "hashbrown 0.13.1",
"hex",
"hex-literal",
"num_enum", "num_enum",
"primitive-types",
"revm_precompiles", "revm_precompiles",
"rlp", "rlp",
"ruint",
"sha3", "sha3",
] ]
[[package]] [[package]]
name = "revm_precompiles" name = "revm_precompiles"
version = "1.1.2" version = "1.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "git+https://github.com/bluealloy/revm?branch=main#488ef8ab62f433b1b434d2d81bc744a2db8f735f"
checksum = "0353d456ef3e989dc9190f42c6020f09bc2025930c37895826029304413204b5"
dependencies = [ dependencies = [
"bytes", "bytes",
"hashbrown 0.13.1", "hashbrown 0.13.1",
"num", "num",
"once_cell", "once_cell",
"primitive-types",
"ripemd", "ripemd",
"secp256k1", "ruint",
"secp256k1 0.25.0",
"sha2 0.10.6", "sha2 0.10.6",
"sha3", "sha3",
"substrate-bn", "substrate-bn",
@ -3898,6 +3900,26 @@ dependencies = [
"syn", "syn",
] ]
[[package]]
name = "ruint"
version = "1.7.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0ad3a104dc8c3867f653b0fec89c65e00b0ceb752718ad282177a7e0f33257ac"
dependencies = [
"derive_more",
"primitive-types",
"rlp",
"ruint-macro",
"rustc_version",
"thiserror",
]
[[package]]
name = "ruint-macro"
version = "1.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "62cc5760263ea229d367e7dff3c0cbf09e4797a125bd87059a6c095804f3b2d1"
[[package]] [[package]]
name = "rustc-hash" name = "rustc-hash"
version = "1.1.0" version = "1.1.0"
@ -4058,7 +4080,16 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d9512ffd81e3a3503ed401f79c33168b9148c75038956039166cd750eaa037c3" checksum = "d9512ffd81e3a3503ed401f79c33168b9148c75038956039166cd750eaa037c3"
dependencies = [ dependencies = [
"rand 0.8.5", "rand 0.8.5",
"secp256k1-sys", "secp256k1-sys 0.6.1",
]
[[package]]
name = "secp256k1"
version = "0.25.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "550fc3b723a478be77bf74718947cdcdd75144d508aaa70f0a320036905df2a8"
dependencies = [
"secp256k1-sys 0.7.0",
] ]
[[package]] [[package]]
@ -4070,6 +4101,15 @@ dependencies = [
"cc", "cc",
] ]
[[package]]
name = "secp256k1-sys"
version = "0.7.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8058e28ae464daf5ac14c5c0f78110b58616e796c4e4e28cfcca38fdb13d8f22"
dependencies = [
"cc",
]
[[package]] [[package]]
name = "security-framework" name = "security-framework"
version = "2.7.0" version = "2.7.0"

View File

@ -21,7 +21,7 @@ pub struct BlockchainTestData {
/// Blocks. /// Blocks.
pub blocks: Vec<Block>, pub blocks: Vec<Block>,
/// Post state. /// Post state.
pub post_state: Option<State>, pub post_state: Option<RootOrState>,
/// Pre state. /// Pre state.
pub pre: State, pub pre: State,
/// Hash of best block. /// Hash of best block.
@ -112,6 +112,18 @@ pub struct Block {
pub transactions: Option<Vec<Transaction>>, pub transactions: Option<Vec<Transaction>>,
/// Uncle/ommer headers /// Uncle/ommer headers
pub uncle_headers: Option<Vec<Header>>, pub uncle_headers: Option<Vec<Header>>,
/// Transaction Sequence
pub transaction_sequence: Option<Vec<TransactionSequence>>,
}
/// Transaction Sequence in block
#[derive(Debug, PartialEq, Eq, Deserialize)]
#[serde(deny_unknown_fields)]
#[serde(rename_all = "camelCase")]
pub struct TransactionSequence {
exception: String,
raw_bytes: Bytes,
valid: String,
} }
/// Ethereum blockchain test data State. /// Ethereum blockchain test data State.
@ -191,6 +203,9 @@ pub enum ForkSpec {
/// After Merge Init Code test /// After Merge Init Code test
#[serde(alias = "Merge+3860")] #[serde(alias = "Merge+3860")]
MergeMeterInitCode, MergeMeterInitCode,
/// After Merge plus new PUSH0 opcode
#[serde(alias = "Merge+3855")]
MergePush0,
} }
impl From<ForkSpec> for reth_executor::SpecUpgrades { impl From<ForkSpec> for reth_executor::SpecUpgrades {
@ -214,6 +229,7 @@ impl From<ForkSpec> for reth_executor::SpecUpgrades {
ForkSpec::Merge => Self::new_paris_activated(), ForkSpec::Merge => Self::new_paris_activated(),
ForkSpec::MergeEOF => Self::new_paris_activated(), ForkSpec::MergeEOF => Self::new_paris_activated(),
ForkSpec::MergeMeterInitCode => Self::new_paris_activated(), ForkSpec::MergeMeterInitCode => Self::new_paris_activated(),
ForkSpec::MergePush0 => Self::new_paris_activated(),
ForkSpec::ByzantiumToConstantinopleAt5 | ForkSpec::Constantinople => { ForkSpec::ByzantiumToConstantinopleAt5 | ForkSpec::Constantinople => {
panic!("Overriden with PETERSBURG") panic!("Overriden with PETERSBURG")
} }

View File

@ -1,22 +1,27 @@
use super::models::Test; use super::models::Test;
use crate::test_eth_chain::models::ForkSpec; use crate::test_eth_chain::models::{ForkSpec, RootOrState};
use eyre::eyre;
use reth_db::{ use reth_db::{
cursor::DbCursorRO,
database::Database, database::Database,
mdbx::{test_utils::create_test_rw_db, WriteMap}, mdbx::{test_utils::create_test_rw_db, WriteMap},
tables, tables,
transaction::{DbTx, DbTxMut}, transaction::{DbTx, DbTxMut},
Error as DbError,
}; };
use reth_executor::SpecUpgrades; use reth_executor::SpecUpgrades;
use reth_primitives::{ use reth_primitives::{
keccak256, Account as RethAccount, BigEndianHash, SealedBlock, SealedHeader, StorageEntry, H256, keccak256, Account as RethAccount, Address, JsonU256, SealedBlock, SealedHeader, StorageEntry,
H256, U256,
}; };
use reth_rlp::Decodable; use reth_rlp::Decodable;
use reth_stages::{stages::execution::ExecutionStage, ExecInput, Stage, Transaction}; use reth_stages::{stages::execution::ExecutionStage, ExecInput, Stage, Transaction};
use std::{ use std::{
collections::HashMap,
ffi::OsStr, ffi::OsStr,
path::{Path, PathBuf}, path::{Path, PathBuf},
}; };
use tracing::debug; use tracing::{debug, info};
/// Tests are test edge cases that are not possible to happen on mainnet, so we are skipping them. /// Tests are test edge cases that are not possible to happen on mainnet, so we are skipping them.
pub fn should_skip(path: &Path) -> bool { pub fn should_skip(path: &Path) -> bool {
@ -81,7 +86,8 @@ pub async fn run_test(path: PathBuf) -> eyre::Result<()> {
ForkSpec::ByzantiumToConstantinopleAt5 | ForkSpec::ByzantiumToConstantinopleAt5 |
ForkSpec::Constantinople | ForkSpec::Constantinople |
ForkSpec::MergeEOF | ForkSpec::MergeEOF |
ForkSpec::MergeMeterInitCode ForkSpec::MergeMeterInitCode |
ForkSpec::MergePush0,
) { ) {
continue continue
} }
@ -126,10 +132,10 @@ pub async fn run_test(path: PathBuf) -> eyre::Result<()> {
tx.put::<tables::Bytecodes>(code_hash, account.code.to_vec())?; tx.put::<tables::Bytecodes>(code_hash, account.code.to_vec())?;
} }
account.storage.iter().try_for_each(|(k, v)| { account.storage.iter().try_for_each(|(k, v)| {
tx.put::<tables::PlainStorageState>( tracing::trace!("Update storage: {address} key:{:?} val:{:?}", k.0, v.0);
address, let mut key = H256::zero();
StorageEntry { key: H256::from_uint(&k.0), value: v.0 }, k.0.to_big_endian(&mut key.0);
) tx.put::<tables::PlainStorageState>(address, StorageEntry { key, value: v.0 })
})?; })?;
Ok(()) Ok(())
@ -138,17 +144,120 @@ pub async fn run_test(path: PathBuf) -> eyre::Result<()> {
// Commit the pre suite state // Commit the pre suite state
tx.commit()?; tx.commit()?;
let storage = db.view(|tx| -> Result<_, DbError> {
let mut cursor = tx.cursor_dup::<tables::PlainStorageState>()?;
let walker = cursor.first()?.map(|first| cursor.walk(first.0)).transpose()?;
Ok(walker.map(|mut walker| {
let mut map: HashMap<Address, HashMap<U256, U256>> = HashMap::new();
while let Some(Ok((address, slot))) = walker.next() {
let key = U256::from_big_endian(&slot.key.0);
map.entry(address).or_default().insert(key, slot.value);
}
map
}))
})??;
tracing::trace!("Pre state :{:?}", storage);
// Initialize the execution stage // Initialize the execution stage
// Hardcode the chain_id to Ethereums 1. // Hardcode the chain_id to Ethereum 1.
let mut stage = let mut stage =
ExecutionStage::new(reth_executor::Config { chain_id: 1.into(), spec_upgrades }); ExecutionStage::new(reth_executor::Config { chain_id: 1.into(), spec_upgrades });
// Call execution stage // Call execution stage
let input = ExecInput::default(); let input = ExecInput::default();
stage.execute(&mut Transaction::new(db.as_ref())?, input).await?; {
let mut transaction = Transaction::new(db.as_ref())?;
// ignore error
let _ = stage.execute(&mut transaction, input).await;
transaction.commit()?;
}
// Validate post state // Validate post state
//for post in match suite.post_state {
Some(RootOrState::Root(root)) => {
info!("Post state is root: #{root:?}")
}
Some(RootOrState::State(state)) => db.view(|tx| -> eyre::Result<()> {
let mut cursor = tx.cursor_dup::<tables::PlainStorageState>()?;
let walker = cursor.first()?.map(|first| cursor.walk(first.0)).transpose()?;
let storage = walker.map(|mut walker| {
let mut map: HashMap<Address, HashMap<U256, U256>> = HashMap::new();
while let Some(Ok((address, slot))) = walker.next() {
let key = U256::from_big_endian(&slot.key.0);
map.entry(address).or_default().insert(key, slot.value);
}
map
});
tracing::trace!("Our storage:{:?}", storage);
for (address, test_account) in state.iter() {
// check account
let our_account = tx
.get::<tables::PlainAccountState>(*address)?
.ok_or(eyre!("Account is missing:{address} expected:{:?}", test_account))?;
if test_account.balance.0 != our_account.balance {
return Err(eyre!(
"Account {address} balance diff, expected {} got{}",
test_account.balance.0,
our_account.balance
))
}
if test_account.nonce.0.as_u64() != our_account.nonce {
return Err(eyre!(
"Account {address} nonce diff, expected {} got {}",
test_account.nonce.0,
our_account.nonce
))
}
if let Some(our_bytecode) = our_account.bytecode_hash {
let test_bytecode = keccak256(test_account.code.as_ref());
if our_bytecode != test_bytecode {
return Err(eyre!(
"Account {address} bytecode diff, expected: {} got: {:?}",
test_account.code,
our_account.bytecode_hash
))
}
} else if !test_account.code.is_empty() {
return Err(eyre!(
"Account {address} bytecode diff, expected {} got empty bytecode",
test_account.code,
))
}
// get walker if present
if let Some(storage) = storage.as_ref() {
// iterate over storages
for (JsonU256(key), JsonU256(value)) in test_account.storage.iter() {
let our_value = storage
.get(address)
.ok_or(eyre!(
"Missing storage from test {storage:?} got {:?}",
test_account.storage
))?
.get(key)
.ok_or(eyre!(
"Slot is missing from table {storage:?} got:{:?}",
test_account.storage
))?;
if value != our_value {
return Err(eyre!(
"Storage diff we got {address}: {storage:?} but expect: {:?}",
test_account.storage
))
}
}
} else if !test_account.storage.is_empty() {
return Err(eyre!(
"Walker is not present, but storage is not empty.{:?}",
test_account.storage
))
}
}
Ok(())
})??,
None => info!("Post state is none"),
}
} }
Ok(()) Ok(())
} }

View File

@ -14,7 +14,7 @@ reth-rlp = { path = "../common/rlp" }
reth-db = { path = "../storage/db" } reth-db = { path = "../storage/db" }
reth-provider = { path = "../storage/provider" } reth-provider = { path = "../storage/provider" }
revm = "2.3" revm = { git = "https://github.com/bluealloy/revm", branch = "main"}
# remove from reth and reexport from revm # remove from reth and reexport from revm
hashbrown = "0.13" hashbrown = "0.13"

View File

@ -8,12 +8,12 @@ use reth_db::{models::AccountBeforeTx, tables, transaction::DbTxMut, Error as Db
use reth_interfaces::executor::Error; use reth_interfaces::executor::Error;
use reth_primitives::{ use reth_primitives::{
bloom::logs_bloom, Account, Address, Bloom, Header, Log, Receipt, TransactionSignedEcRecovered, bloom::logs_bloom, Account, Address, Bloom, Header, Log, Receipt, TransactionSignedEcRecovered,
H256, U256, H160, H256, U256,
}; };
use reth_provider::StateProvider; use reth_provider::StateProvider;
use revm::{ use revm::{
db::AccountState, Account as RevmAccount, AccountInfo, AnalysisKind, Bytecode, Database, db::AccountState, Account as RevmAccount, AccountInfo, AnalysisKind, Bytecode, Database,
Return, EVM, Return, B160, EVM, U256 as evmU256,
}; };
use std::collections::BTreeMap; use std::collections::BTreeMap;
@ -108,13 +108,13 @@ pub struct AccountChangeSet {
#[derive(Debug)] #[derive(Debug)]
pub struct ExecutionResult { pub struct ExecutionResult {
/// Transaction changeest contraining [Receipt], changed [Accounts][Account] and Storages. /// Transaction changeest contraining [Receipt], changed [Accounts][Account] and Storages.
pub changeset: Vec<TransactionChangeSet>, pub changesets: Vec<TransactionChangeSet>,
/// Block reward if present. It represent changeset for block reward slot in /// Block reward if present. It represent changeset for block reward slot in
/// [tables::AccountChangeSet] . /// [tables::AccountChangeSet] .
pub block_reward: Option<BTreeMap<Address, AccountInfoChangeSet>>, pub block_reward: Option<BTreeMap<Address, AccountInfoChangeSet>>,
} }
/// Commit chgange to database and return change diff that is used to update state and create /// Commit change to database and return change diff that is used to update state and create
/// history index /// history index
/// ///
/// ChangeDiff consists of: /// ChangeDiff consists of:
@ -124,7 +124,7 @@ pub struct ExecutionResult {
/// BTreeMap is used to have sorted values /// BTreeMap is used to have sorted values
pub fn commit_changes<DB: StateProvider>( pub fn commit_changes<DB: StateProvider>(
db: &mut SubState<DB>, db: &mut SubState<DB>,
changes: hashbrown::HashMap<Address, RevmAccount>, changes: hashbrown::HashMap<B160, RevmAccount>,
) -> (BTreeMap<Address, AccountChangeSet>, BTreeMap<H256, Bytecode>) { ) -> (BTreeMap<Address, AccountChangeSet>, BTreeMap<H256, Bytecode>) {
let mut change = BTreeMap::new(); let mut change = BTreeMap::new();
let mut new_bytecodes = BTreeMap::new(); let mut new_bytecodes = BTreeMap::new();
@ -132,7 +132,7 @@ pub fn commit_changes<DB: StateProvider>(
for (address, account) in changes { for (address, account) in changes {
if account.is_destroyed { if account.is_destroyed {
// get old account that we are destroying. // get old account that we are destroying.
let db_account = match db.accounts.entry(address) { let db_account = match db.accounts.entry(B160(address.0)) {
Entry::Occupied(entry) => entry.into_mut(), Entry::Occupied(entry) => entry.into_mut(),
Entry::Vacant(_entry) => { Entry::Vacant(_entry) => {
panic!("Left panic to critically jumpout if happens, as every account shound be hot loaded."); panic!("Left panic to critically jumpout if happens, as every account shound be hot loaded.");
@ -141,7 +141,7 @@ pub fn commit_changes<DB: StateProvider>(
// Insert into `change` a old account and None for new account // Insert into `change` a old account and None for new account
// and mark storage to be mapped // and mark storage to be mapped
change.insert( change.insert(
address, H160(address.0),
AccountChangeSet { AccountChangeSet {
account: AccountInfoChangeSet::Destroyed { old: to_reth_acc(&db_account.info) }, account: AccountInfoChangeSet::Destroyed { old: to_reth_acc(&db_account.info) },
storage: BTreeMap::new(), storage: BTreeMap::new(),
@ -163,7 +163,7 @@ pub fn commit_changes<DB: StateProvider>(
match db.contracts.entry(account.info.code_hash) { match db.contracts.entry(account.info.code_hash) {
Entry::Vacant(entry) => { Entry::Vacant(entry) => {
entry.insert(code.clone()); entry.insert(code.clone());
new_bytecodes.insert(account.info.code_hash, code.clone()); new_bytecodes.insert(H256(account.info.code_hash.0), code.clone());
} }
Entry::Occupied(mut entry) => { Entry::Occupied(mut entry) => {
entry.insert(code.clone()); entry.insert(code.clone());
@ -175,7 +175,7 @@ pub fn commit_changes<DB: StateProvider>(
// get old account that is going to be overwritten or none if it does not exist // get old account that is going to be overwritten or none if it does not exist
// and get new account that was just inserted. new account mut ref is used for // and get new account that was just inserted. new account mut ref is used for
// inserting storage // inserting storage
let (account_info_changeset, new_account) = match db.accounts.entry(address) { let (account_info_changeset, new_account) = match db.accounts.entry(B160(address.0)) {
Entry::Vacant(entry) => { Entry::Vacant(entry) => {
let entry = entry.insert(Default::default()); let entry = entry.insert(Default::default());
entry.info = account.info.clone(); entry.info = account.info.clone();
@ -217,13 +217,19 @@ pub fn commit_changes<DB: StateProvider>(
// insert storage into new db account. // insert storage into new db account.
new_account.storage.extend(account.storage.into_iter().map(|(key, value)| { new_account.storage.extend(account.storage.into_iter().map(|(key, value)| {
storage.insert(key, (value.original_value(), value.present_value())); storage.insert(
U256(*key.as_limbs()),
(
U256(*value.original_value().as_limbs()),
U256(*value.present_value().as_limbs()),
),
);
(key, value.present_value()) (key, value.present_value())
})); }));
// Insert into change. // Insert into change.
change.insert( change.insert(
address, H160(address.0),
AccountChangeSet { account: account_info_changeset, storage, wipe_storage }, AccountChangeSet { account: account_info_changeset, storage, wipe_storage },
); );
} }
@ -240,7 +246,7 @@ pub struct TransactionChangeSet {
/// Transaction receipt /// Transaction receipt
pub receipt: Receipt, pub receipt: Receipt,
/// State change that this transaction made on state. /// State change that this transaction made on state.
pub state_diff: BTreeMap<Address, AccountChangeSet>, pub changeset: BTreeMap<Address, AccountChangeSet>,
/// new bytecode created as result of transaction execution. /// new bytecode created as result of transaction execution.
pub new_bytecodes: BTreeMap<H256, Bytecode>, pub new_bytecodes: BTreeMap<H256, Bytecode>,
} }
@ -254,8 +260,16 @@ pub fn execute_and_verify_receipt<DB: StateProvider>(
) -> Result<ExecutionResult, Error> { ) -> Result<ExecutionResult, Error> {
let transaction_change_set = execute(header, transactions, config, db)?; let transaction_change_set = execute(header, transactions, config, db)?;
let receipts_iter = transaction_change_set.changeset.iter().map(|changeset| &changeset.receipt); let receipts_iter =
verify_receipt(header.receipts_root, header.logs_bloom, receipts_iter)?; transaction_change_set.changesets.iter().map(|changeset| &changeset.receipt);
if header.number >= config.spec_upgrades.byzantium {
verify_receipt(header.receipts_root, header.logs_bloom, receipts_iter)?;
}
// TODO Before Byzantium receipts contained state root that would mean that expensive operation
// as hashing that is needed for state root got calculated in every transaction
// This was replaced with is_success flag.
// See more about EIP here: https://eips.ethereum.org/EIPS/eip-658
Ok(transaction_change_set) Ok(transaction_change_set)
} }
@ -296,15 +310,15 @@ pub fn execute<DB: StateProvider>(
let mut evm = EVM::new(); let mut evm = EVM::new();
evm.database(db); evm.database(db);
evm.env.cfg.chain_id = config.chain_id; evm.env.cfg.chain_id = evmU256::from_limbs(config.chain_id.0);
evm.env.cfg.spec_id = config.spec_upgrades.revm_spec(header.number); evm.env.cfg.spec_id = config.spec_upgrades.revm_spec(header.number);
evm.env.cfg.perf_all_precompiles_have_balance = true; evm.env.cfg.perf_all_precompiles_have_balance = false;
evm.env.cfg.perf_analyse_created_bytecodes = AnalysisKind::Raw; evm.env.cfg.perf_analyse_created_bytecodes = AnalysisKind::Raw;
revm_wrap::fill_block_env(&mut evm.env.block, header); revm_wrap::fill_block_env(&mut evm.env.block, header);
let mut cumulative_gas_used = 0; let mut cumulative_gas_used = 0;
// output of verification // output of verification
let mut changeset = Vec::with_capacity(transactions.len()); let mut changesets = Vec::with_capacity(transactions.len());
for transaction in transactions.iter() { for transaction in transactions.iter() {
// The sum of the transactions gas limit, Tg, and the gas utilised in this block prior, // The sum of the transactions gas limit, Tg, and the gas utilised in this block prior,
@ -321,10 +335,14 @@ pub fn execute<DB: StateProvider>(
revm_wrap::fill_tx_env(&mut evm.env.tx, transaction); revm_wrap::fill_tx_env(&mut evm.env.tx, transaction);
// Execute transaction. // Execute transaction.
let (revm::ExecutionResult { exit_reason, gas_used, logs, gas_refunded, .. }, state) = let out = evm.transact();
evm.transact();
tracing::trace!(target:"evm","Executing transaction {:?}, gas:{gas_used} refund:{gas_refunded}",transaction.hash()); // Useful for debugging
// let out = evm.inspect(revm::inspectors::CustomPrintTracer::default());
// tracing::trace!(target:"evm","Executing transaction {:?}, \n:{out:?}: {:?}
// \nENV:{:?}",transaction.hash(),transaction,evm.env);
let (revm::ExecutionResult { exit_reason, gas_used, logs, .. }, state) = out;
// Fatal internal error. // Fatal internal error.
if exit_reason == revm::Return::FatalExternalError { if exit_reason == revm::Return::FatalExternalError {
@ -332,10 +350,13 @@ pub fn execute<DB: StateProvider>(
} }
// Success flag was added in `EIP-658: Embedding transaction status code in receipts`. // Success flag was added in `EIP-658: Embedding transaction status code in receipts`.
// TODO for verification (exit_reason): some error should return EVM error as the block with
// that transaction can have consensus error that would make block invalid.
let is_success = match exit_reason { let is_success = match exit_reason {
revm::return_ok!() => true, revm::return_ok!() => true,
revm::return_revert!() => false, revm::return_revert!() => false,
e => return Err(Error::EVMError { error_code: e as u32 }), _ => false,
//e => return Err(Error::EVMError { error_code: e as u32 }),
}; };
// Add spend gas. // Add spend gas.
@ -344,14 +365,18 @@ pub fn execute<DB: StateProvider>(
// Transform logs to reth format. // Transform logs to reth format.
let logs: Vec<Log> = logs let logs: Vec<Log> = logs
.into_iter() .into_iter()
.map(|l| Log { address: l.address, topics: l.topics, data: l.data }) .map(|l| Log {
address: H160(l.address.0),
topics: l.topics.into_iter().map(|h| H256(h.0)).collect(),
data: l.data,
})
.collect(); .collect();
// commit state // commit state
let (state_diff, new_bytecodes) = commit_changes(evm.db().unwrap(), state); let (changeset, new_bytecodes) = commit_changes(evm.db().unwrap(), state);
// Push transaction changeset and calculte header bloom filter for receipt. // Push transaction changeset and calculte header bloom filter for receipt.
changeset.push(TransactionChangeSet { changesets.push(TransactionChangeSet {
receipt: Receipt { receipt: Receipt {
tx_type: transaction.tx_type(), tx_type: transaction.tx_type(),
success: is_success, success: is_success,
@ -359,7 +384,7 @@ pub fn execute<DB: StateProvider>(
bloom: logs_bloom(logs.iter()), bloom: logs_bloom(logs.iter()),
logs, logs,
}, },
state_diff, changeset,
new_bytecodes, new_bytecodes,
}) })
} }
@ -373,7 +398,7 @@ pub fn execute<DB: StateProvider>(
let beneficiary = evm let beneficiary = evm
.db .db
.expect("It is set at the start of the function") .expect("It is set at the start of the function")
.basic(header.beneficiary) .basic(B160(header.beneficiary.0))
.map_err(|_| Error::ProviderError)?; .map_err(|_| Error::ProviderError)?;
// NOTE: Related to Ethereum reward change, for other network this is probably going to be moved // NOTE: Related to Ethereum reward change, for other network this is probably going to be moved
@ -403,7 +428,7 @@ pub fn execute<DB: StateProvider>(
} }
}); });
Ok(ExecutionResult { changeset, block_reward }) Ok(ExecutionResult { changesets, block_reward })
} }
#[cfg(test)] #[cfg(test)]
@ -517,10 +542,10 @@ mod tests {
// execute chain and verify receipts // execute chain and verify receipts
let out = execute_and_verify_receipt(&block.header, &transactions, &config, db).unwrap(); let out = execute_and_verify_receipt(&block.header, &transactions, &config, db).unwrap();
assert_eq!(out.changeset.len(), 1, "Should executed one transaction"); assert_eq!(out.changesets.len(), 1, "Should executed one transaction");
let changeset = out.changeset[0].clone(); let changesets = out.changesets[0].clone();
assert_eq!(changeset.new_bytecodes.len(), 0, "Should have zero new bytecodes"); assert_eq!(changesets.new_bytecodes.len(), 0, "Should have zero new bytecodes");
let account1 = H160(hex!("1000000000000000000000000000000000000000")); let account1 = H160(hex!("1000000000000000000000000000000000000000"));
let _account1_info = Account { balance: 0x00.into(), nonce: 0x00, bytecode_hash: None }; let _account1_info = Account { balance: 0x00.into(), nonce: 0x00, bytecode_hash: None };
@ -536,17 +561,17 @@ mod tests {
Account { balance: 0x3635c9adc5de996b46u128.into(), nonce: 0x01, bytecode_hash: None }; Account { balance: 0x3635c9adc5de996b46u128.into(), nonce: 0x01, bytecode_hash: None };
assert_eq!( assert_eq!(
changeset.state_diff.get(&account1).unwrap().account, changesets.changeset.get(&account1).unwrap().account,
AccountInfoChangeSet::NoChange, AccountInfoChangeSet::NoChange,
"No change to account" "No change to account"
); );
assert_eq!( assert_eq!(
changeset.state_diff.get(&account2).unwrap().account, changesets.changeset.get(&account2).unwrap().account,
AccountInfoChangeSet::Created { new: account2_info }, AccountInfoChangeSet::Created { new: account2_info },
"New acccount" "New acccount"
); );
assert_eq!( assert_eq!(
changeset.state_diff.get(&account3).unwrap().account, changesets.changeset.get(&account3).unwrap().account,
AccountInfoChangeSet::Changed { old: account3_old_info, new: account3_info }, AccountInfoChangeSet::Changed { old: account3_old_info, new: account3_info },
"Change to account state" "Change to account state"
); );
@ -563,10 +588,10 @@ mod tests {
)])) )]))
); );
assert_eq!(changeset.new_bytecodes.len(), 0, "No new bytecodes"); assert_eq!(changesets.new_bytecodes.len(), 0, "No new bytecodes");
// check torage // check torage
let storage = &changeset.state_diff.get(&account1).unwrap().storage; let storage = &changesets.changeset.get(&account1).unwrap().storage;
assert_eq!(storage.len(), 1, "Only one storage change"); assert_eq!(storage.len(), 1, "Only one storage change");
assert_eq!( assert_eq!(
storage.get(&1.into()), storage.get(&1.into()),

View File

@ -6,7 +6,7 @@ use reth_primitives::{
use reth_provider::StateProvider; use reth_provider::StateProvider;
use revm::{ use revm::{
db::{CacheDB, DatabaseRef}, db::{CacheDB, DatabaseRef},
BlockEnv, TransactTo, TxEnv, BlockEnv, TransactTo, TxEnv, B160, B256, U256 as evmU256,
}; };
/// SubState of database. Uses revm internal cache with binding to reth DbExecutor trait. /// SubState of database. Uses revm internal cache with binding to reth DbExecutor trait.
@ -40,45 +40,45 @@ impl<DB: StateProvider> State<DB> {
impl<DB: StateProvider> DatabaseRef for State<DB> { impl<DB: StateProvider> DatabaseRef for State<DB> {
type Error = Error; type Error = Error;
fn basic(&self, address: H160) -> Result<Option<revm::AccountInfo>, Self::Error> { fn basic(&self, address: B160) -> Result<Option<revm::AccountInfo>, Self::Error> {
Ok(self.0.basic_account(address)?.map(|account| revm::AccountInfo { Ok(self.0.basic_account(H160(address.0))?.map(|account| revm::AccountInfo {
balance: account.balance, balance: evmU256::from_limbs(account.balance.0),
nonce: account.nonce, nonce: account.nonce,
code_hash: account.bytecode_hash.unwrap_or(KECCAK_EMPTY), code_hash: B256(account.bytecode_hash.unwrap_or(KECCAK_EMPTY).0),
code: None, code: None,
})) }))
} }
fn code_by_hash(&self, code_hash: H256) -> Result<revm::Bytecode, Self::Error> { fn code_by_hash(&self, code_hash: B256) -> Result<revm::Bytecode, Self::Error> {
let bytecode = self.0.bytecode_by_hash(code_hash)?.unwrap_or_default(); let bytecode = self.0.bytecode_by_hash(H256(code_hash.0))?.unwrap_or_default();
Ok(revm::Bytecode::new_raw(bytecode.0)) Ok(revm::Bytecode::new_raw(bytecode.0))
} }
fn storage(&self, address: H160, index: U256) -> Result<U256, Self::Error> { fn storage(&self, address: B160, index: evmU256) -> Result<evmU256, Self::Error> {
let mut h_index = H256::zero(); let index = H256(index.to_be_bytes());
index.to_big_endian(h_index.as_bytes_mut()); let ret =
evmU256::from_limbs(self.0.storage(H160(address.0), index)?.unwrap_or_default().0);
Ok(self.0.storage(address, h_index)?.unwrap_or_default()) Ok(ret)
} }
fn block_hash(&self, number: U256) -> Result<H256, Self::Error> { fn block_hash(&self, number: evmU256) -> Result<B256, Self::Error> {
Ok(self.0.block_hash(number)?.unwrap_or_default()) Ok(B256(self.0.block_hash(U256(*number.as_limbs()))?.unwrap_or_default().0))
} }
} }
/// Fill block environment from Block. /// Fill block environment from Block.
pub fn fill_block_env(block_env: &mut BlockEnv, header: &Header) { pub fn fill_block_env(block_env: &mut BlockEnv, header: &Header) {
block_env.number = header.number.into(); block_env.number = evmU256::from(header.number);
block_env.coinbase = header.beneficiary; block_env.coinbase = B160(header.beneficiary.0);
block_env.timestamp = header.timestamp.into(); block_env.timestamp = evmU256::from(header.timestamp);
block_env.difficulty = header.difficulty; block_env.difficulty = evmU256::from_limbs(header.difficulty.0);
block_env.basefee = header.base_fee_per_gas.unwrap_or_default().into(); block_env.basefee = evmU256::from(header.base_fee_per_gas.unwrap_or_default());
block_env.gas_limit = header.gas_limit.into(); block_env.gas_limit = evmU256::from(header.gas_limit);
} }
/// Fill transaction environment from Transaction. /// Fill transaction environment from Transaction.
pub fn fill_tx_env(tx_env: &mut TxEnv, transaction: &TransactionSignedEcRecovered) { pub fn fill_tx_env(tx_env: &mut TxEnv, transaction: &TransactionSignedEcRecovered) {
tx_env.caller = transaction.signer(); tx_env.caller = B160(transaction.signer().0);
match transaction.as_ref().as_ref() { match transaction.as_ref().as_ref() {
Transaction::Legacy(TxLegacy { Transaction::Legacy(TxLegacy {
nonce, nonce,
@ -90,13 +90,13 @@ pub fn fill_tx_env(tx_env: &mut TxEnv, transaction: &TransactionSignedEcRecovere
input, input,
}) => { }) => {
tx_env.gas_limit = *gas_limit; tx_env.gas_limit = *gas_limit;
tx_env.gas_price = (*gas_price).into(); tx_env.gas_price = evmU256::from(*gas_price);
tx_env.gas_priority_fee = None; tx_env.gas_priority_fee = None;
tx_env.transact_to = match to { tx_env.transact_to = match to {
TransactionKind::Call(to) => TransactTo::Call(*to), TransactionKind::Call(to) => TransactTo::Call(B160(to.0)),
TransactionKind::Create => TransactTo::create(), TransactionKind::Create => TransactTo::create(),
}; };
tx_env.value = (*value).into(); tx_env.value = evmU256::from(*value);
tx_env.data = input.0.clone(); tx_env.data = input.0.clone();
tx_env.chain_id = *chain_id; tx_env.chain_id = *chain_id;
tx_env.nonce = Some(*nonce); tx_env.nonce = Some(*nonce);
@ -112,13 +112,13 @@ pub fn fill_tx_env(tx_env: &mut TxEnv, transaction: &TransactionSignedEcRecovere
access_list, access_list,
}) => { }) => {
tx_env.gas_limit = *gas_limit; tx_env.gas_limit = *gas_limit;
tx_env.gas_price = (*gas_price).into(); tx_env.gas_price = evmU256::from(*gas_price);
tx_env.gas_priority_fee = None; tx_env.gas_priority_fee = None;
tx_env.transact_to = match to { tx_env.transact_to = match to {
TransactionKind::Call(to) => TransactTo::Call(*to), TransactionKind::Call(to) => TransactTo::Call(B160(to.0)),
TransactionKind::Create => TransactTo::create(), TransactionKind::Create => TransactTo::create(),
}; };
tx_env.value = (*value).into(); tx_env.value = evmU256::from(*value);
tx_env.data = input.0.clone(); tx_env.data = input.0.clone();
tx_env.chain_id = Some(*chain_id); tx_env.chain_id = Some(*chain_id);
tx_env.nonce = Some(*nonce); tx_env.nonce = Some(*nonce);
@ -127,8 +127,11 @@ pub fn fill_tx_env(tx_env: &mut TxEnv, transaction: &TransactionSignedEcRecovere
.iter() .iter()
.map(|l| { .map(|l| {
( (
l.address, B160(l.address.0),
l.storage_keys.iter().map(|k| U256::from_big_endian(k.as_ref())).collect(), l.storage_keys
.iter()
.map(|k| evmU256::from_be_bytes(k.to_fixed_bytes()))
.collect(),
) )
}) })
.collect(); .collect();
@ -145,13 +148,13 @@ pub fn fill_tx_env(tx_env: &mut TxEnv, transaction: &TransactionSignedEcRecovere
access_list, access_list,
}) => { }) => {
tx_env.gas_limit = *gas_limit; tx_env.gas_limit = *gas_limit;
tx_env.gas_price = (*max_fee_per_gas).into(); tx_env.gas_price = evmU256::from(*max_fee_per_gas);
tx_env.gas_priority_fee = Some((*max_priority_fee_per_gas).into()); tx_env.gas_priority_fee = Some(evmU256::from(*max_priority_fee_per_gas));
tx_env.transact_to = match to { tx_env.transact_to = match to {
TransactionKind::Call(to) => TransactTo::Call(*to), TransactionKind::Call(to) => TransactTo::Call(B160(to.0)),
TransactionKind::Create => TransactTo::create(), TransactionKind::Create => TransactTo::create(),
}; };
tx_env.value = (*value).into(); tx_env.value = evmU256::from(*value);
tx_env.data = input.0.clone(); tx_env.data = input.0.clone();
tx_env.chain_id = Some(*chain_id); tx_env.chain_id = Some(*chain_id);
tx_env.nonce = Some(*nonce); tx_env.nonce = Some(*nonce);
@ -160,8 +163,11 @@ pub fn fill_tx_env(tx_env: &mut TxEnv, transaction: &TransactionSignedEcRecovere
.iter() .iter()
.map(|l| { .map(|l| {
( (
l.address, B160(l.address.0),
l.storage_keys.iter().map(|k| U256::from_big_endian(k.as_ref())).collect(), l.storage_keys
.iter()
.map(|k| evmU256::from_be_bytes(k.to_fixed_bytes()))
.collect(),
) )
}) })
.collect(); .collect();
@ -172,25 +178,22 @@ pub fn fill_tx_env(tx_env: &mut TxEnv, transaction: &TransactionSignedEcRecovere
/// Check equality between [`reth_primitives::Log`] and [`revm::Log`] /// Check equality between [`reth_primitives::Log`] and [`revm::Log`]
pub fn is_log_equal(revm_log: &revm::Log, reth_log: &reth_primitives::Log) -> bool { pub fn is_log_equal(revm_log: &revm::Log, reth_log: &reth_primitives::Log) -> bool {
revm_log.topics.len() == reth_log.topics.len() && revm_log.topics.len() == reth_log.topics.len() &&
revm_log.address == reth_log.address && revm_log.address.0 == reth_log.address.0 &&
revm_log.data == reth_log.data && revm_log.data == reth_log.data &&
!revm_log !revm_log
.topics .topics
.iter() .iter()
.zip(reth_log.topics.iter()) .zip(reth_log.topics.iter())
.any(|(revm_topic, reth_topic)| revm_topic != reth_topic) .any(|(revm_topic, reth_topic)| revm_topic.0 != reth_topic.0)
} }
/// Create reth primitive [Account] from [revm::AccountInfo]. /// Create reth primitive [Account] from [revm::AccountInfo].
/// Check if revm bytecode hash is [KECCAK_EMPTY] and put None to reth [Account] /// Check if revm bytecode hash is [KECCAK_EMPTY] and put None to reth [Account]
pub fn to_reth_acc(revm_acc: &revm::AccountInfo) -> Account { pub fn to_reth_acc(revm_acc: &revm::AccountInfo) -> Account {
let code_hash = H256(revm_acc.code_hash.0);
Account { Account {
balance: revm_acc.balance, balance: U256(*revm_acc.balance.as_limbs()),
nonce: revm_acc.nonce, nonce: revm_acc.nonce,
bytecode_hash: if revm_acc.code_hash == KECCAK_EMPTY { bytecode_hash: if code_hash == KECCAK_EMPTY { None } else { Some(code_hash) },
None
} else {
Some(revm_acc.code_hash)
},
} }
} }

View File

@ -2,7 +2,6 @@ use crate::{keccak256, Header, Log, Receipt, TransactionSigned, H256};
use hash_db::Hasher; use hash_db::Hasher;
use hex_literal::hex; use hex_literal::hex;
use plain_hasher::PlainHasher; use plain_hasher::PlainHasher;
use reth_rlp::Encodable;
use triehash::ordered_trie_root; use triehash::ordered_trie_root;
/// Keccak-256 hash of the RLP of an empty list, KEC("\xc0"). /// Keccak-256 hash of the RLP of an empty list, KEC("\xc0").
@ -46,12 +45,12 @@ pub fn calculate_transaction_root<'a>(
pub fn calculate_receipt_root<'a>(receipts: impl Iterator<Item = &'a Receipt>) -> H256 { pub fn calculate_receipt_root<'a>(receipts: impl Iterator<Item = &'a Receipt>) -> H256 {
ordered_trie_root::<KeccakHasher, _>(receipts.into_iter().map(|receipt| { ordered_trie_root::<KeccakHasher, _>(receipts.into_iter().map(|receipt| {
let mut receipt_rlp = Vec::new(); let mut receipt_rlp = Vec::new();
receipt.encode(&mut receipt_rlp); receipt.encode_inner(&mut receipt_rlp, false);
receipt_rlp receipt_rlp
})) }))
} }
/// Calculates the log root for a header. /// Calculates the log root for headers.
pub fn calculate_log_root<'a>(logs: impl Iterator<Item = &'a Log> + Clone) -> H256 { pub fn calculate_log_root<'a>(logs: impl Iterator<Item = &'a Log> + Clone) -> H256 {
//https://github.com/ethereum/go-ethereum/blob/356bbe343a30789e77bb38f25983c8f2f2bfbb47/cmd/evm/internal/t8ntool/execution.go#L255 //https://github.com/ethereum/go-ethereum/blob/356bbe343a30789e77bb38f25983c8f2f2bfbb47/cmd/evm/internal/t8ntool/execution.go#L255
let mut logs_rlp = Vec::new(); let mut logs_rlp = Vec::new();
@ -69,7 +68,13 @@ pub fn calculate_ommers_root<'a>(ommers: impl Iterator<Item = &'a Header> + Clon
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use crate::{hex_literal::hex, proofs::calculate_transaction_root, Block};
use crate::{
hex_literal::hex,
proofs::{calculate_receipt_root, calculate_transaction_root},
Block, Bloom, Log, Receipt, TxType, H160, H256,
};
use bytes::Bytes;
use reth_rlp::Decodable; use reth_rlp::Decodable;
#[test] #[test]
@ -81,4 +86,23 @@ mod tests {
let tx_root = calculate_transaction_root(block.body.iter()); let tx_root = calculate_transaction_root(block.body.iter());
assert_eq!(block.transactions_root, tx_root, "Should be same"); assert_eq!(block.transactions_root, tx_root, "Should be same");
} }
#[test]
fn check_receipt_root() {
let logs = vec![Log { address: H160::zero(), topics: vec![], data: Bytes::default() }];
let bloom = Bloom(hex!("00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001"));
let receipt = Receipt {
tx_type: TxType::EIP2930,
success: true,
cumulative_gas_used: 102068,
bloom,
logs,
};
let receipt = vec![receipt];
let root = calculate_receipt_root(receipt.iter());
assert_eq!(
root,
H256(hex!("fe70ae4a136d98944951b2123859698d59ad251a381abc9960fa81cae3d0d4a0"))
);
}
} }

View File

@ -36,7 +36,7 @@ impl Receipt {
} }
/// Encodes the receipt data. /// Encodes the receipt data.
fn encode_receipt(&self, out: &mut dyn BufMut) { fn encode_fields(&self, out: &mut dyn BufMut) {
self.receipt_rlp_header().encode(out); self.receipt_rlp_header().encode(out);
self.success.encode(out); self.success.encode(out);
self.cumulative_gas_used.encode(out); self.cumulative_gas_used.encode(out);
@ -44,6 +44,34 @@ impl Receipt {
self.logs.encode(out); self.logs.encode(out);
} }
/// Encode receipt with or without the header data.
pub fn encode_inner(&self, out: &mut dyn BufMut, with_header: bool) {
if matches!(self.tx_type, TxType::Legacy) {
self.encode_fields(out);
return
}
let mut payload = BytesMut::new();
self.encode_fields(&mut payload);
if with_header {
let payload_length = payload.len() + 1;
let header = reth_rlp::Header { list: false, payload_length };
header.encode(out);
}
match self.tx_type {
TxType::EIP2930 => {
out.put_u8(0x01);
}
TxType::EIP1559 => {
out.put_u8(0x02);
}
_ => unreachable!("legacy handled; qed."),
}
out.put_slice(payload.as_ref());
}
/// Returns the length of the receipt data. /// Returns the length of the receipt data.
fn receipt_length(&self) -> usize { fn receipt_length(&self) -> usize {
let rlp_head = self.receipt_rlp_header(); let rlp_head = self.receipt_rlp_header();
@ -90,29 +118,7 @@ impl Encodable for Receipt {
payload_len payload_len
} }
fn encode(&self, out: &mut dyn BufMut) { fn encode(&self, out: &mut dyn BufMut) {
if matches!(self.tx_type, TxType::Legacy) { self.encode_inner(out, true)
self.encode_receipt(out);
return
}
let mut payload = BytesMut::new();
self.encode_receipt(&mut payload);
let payload_length = payload.len() + 1;
let header = reth_rlp::Header { list: false, payload_length };
header.encode(out);
match self.tx_type {
TxType::EIP2930 => {
out.put_u8(0x01);
}
TxType::EIP1559 => {
out.put_u8(0x02);
}
_ => unreachable!("legacy handled; qed."),
}
out.put_slice(payload.as_ref());
} }
} }
@ -153,15 +159,15 @@ impl Decodable for Receipt {
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::*; use super::*;
use crate::{Address, H256}; use crate::{hex_literal::hex, Address, H256};
use ethers_core::{types::Bytes, utils::hex}; use ethers_core::types::Bytes;
use reth_rlp::{Decodable, Encodable}; use reth_rlp::{Decodable, Encodable};
use std::str::FromStr; use std::str::FromStr;
#[test] #[test]
// Test vector from: https://eips.ethereum.org/EIPS/eip-2481 // Test vector from: https://eips.ethereum.org/EIPS/eip-2481
fn encode_legacy_receipt() { fn encode_legacy_receipt() {
let expected = hex::decode("f901668001b9010000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000f85ff85d940000000000000000000000000000000000000011f842a0000000000000000000000000000000000000000000000000000000000000deada0000000000000000000000000000000000000000000000000000000000000beef830100ff").unwrap(); let expected = hex!("f901668001b9010000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000f85ff85d940000000000000000000000000000000000000011f842a0000000000000000000000000000000000000000000000000000000000000deada0000000000000000000000000000000000000000000000000000000000000beef830100ff");
let mut data = vec![]; let mut data = vec![];
let receipt = Receipt { let receipt = Receipt {
@ -195,7 +201,7 @@ mod tests {
#[test] #[test]
// Test vector from: https://eips.ethereum.org/EIPS/eip-2481 // Test vector from: https://eips.ethereum.org/EIPS/eip-2481
fn decode_legacy_receipt() { fn decode_legacy_receipt() {
let data = hex::decode("f901668001b9010000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000f85ff85d940000000000000000000000000000000000000011f842a0000000000000000000000000000000000000000000000000000000000000deada0000000000000000000000000000000000000000000000000000000000000beef830100ff").unwrap(); let data = hex!("f901668001b9010000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000f85ff85d940000000000000000000000000000000000000011f842a0000000000000000000000000000000000000000000000000000000000000deada0000000000000000000000000000000000000000000000000000000000000beef830100ff");
// EIP658Receipt // EIP658Receipt
let expected = Receipt { let expected = Receipt {

View File

@ -86,8 +86,7 @@ where
/// Panics if an inner transaction does not exist. This should never be the case unless /// Panics if an inner transaction does not exist. This should never be the case unless
/// [Transaction::close] was called without following up with a call to [Transaction::open]. /// [Transaction::close] was called without following up with a call to [Transaction::open].
pub fn commit(&mut self) -> Result<bool, Error> { pub fn commit(&mut self) -> Result<bool, Error> {
let success = let success = if let Some(tx) = self.tx.take() { tx.commit()? } else { false };
self.tx.take().expect("Tried committing a non-existent transaction").commit()?;
self.tx = Some(self.db.tx_mut()?); self.tx = Some(self.db.tx_mut()?);
Ok(success) Ok(success)
} }

View File

@ -133,6 +133,8 @@ impl<DB: Database> Stage<DB> for ExecutionStage {
// Fetch transactions, execute them and generate results // Fetch transactions, execute them and generate results
let mut block_change_patches = Vec::with_capacity(canonical_batch.len()); let mut block_change_patches = Vec::with_capacity(canonical_batch.len());
for (header, body) in block_batch.iter() { for (header, body) in block_batch.iter() {
let num = header.number;
tracing::trace!(target: "sync::stages::execution", ?num, "Execute block.");
// iterate over all transactions // iterate over all transactions
let mut tx_walker = tx_cursor.walk(body.start_tx_id)?; let mut tx_walker = tx_cursor.walk(body.start_tx_id)?;
let mut transactions = Vec::with_capacity(body.tx_count as usize); let mut transactions = Vec::with_capacity(body.tx_count as usize);
@ -175,7 +177,7 @@ impl<DB: Database> Stage<DB> for ExecutionStage {
let state_provider = SubState::new(State::new(StateProviderImplRefLatest::new(&**tx))); let state_provider = SubState::new(State::new(StateProviderImplRefLatest::new(&**tx)));
trace!(target: "sync::stages::execution", number = header.number, txs = recovered_transactions.len(), "Executing block"); trace!(target: "sync::stages::execution", number = header.number, txs = recovered_transactions.len(), "Executing block");
let change_set = std::thread::scope(|scope| { let changeset = std::thread::scope(|scope| {
let handle = std::thread::Builder::new() let handle = std::thread::Builder::new()
.stack_size(50 * 1024 * 1024) .stack_size(50 * 1024 * 1024)
.spawn_scoped(scope, || { .spawn_scoped(scope, || {
@ -193,7 +195,7 @@ impl<DB: Database> Stage<DB> for ExecutionStage {
handle.join().expect("Expects for thread to not panic") handle.join().expect("Expects for thread to not panic")
}) })
.map_err(|error| StageError::ExecutionError { block: header.number, error })?; .map_err(|error| StageError::ExecutionError { block: header.number, error })?;
block_change_patches.push(change_set); block_change_patches.push(changeset);
} }
// Get last tx count so that we can know amount of transaction in the block. // Get last tx count so that we can know amount of transaction in the block.
@ -203,9 +205,9 @@ impl<DB: Database> Stage<DB> for ExecutionStage {
// apply changes to plain database. // apply changes to plain database.
for results in block_change_patches.into_iter() { for results in block_change_patches.into_iter() {
// insert state change set // insert state change set
for result in results.changeset.into_iter() { for result in results.changesets.into_iter() {
// TODO insert to transitionId to tx_index // TODO insert to transitionId to tx_index
for (address, account_change_set) in result.state_diff.into_iter() { for (address, account_change_set) in result.changeset.into_iter() {
let AccountChangeSet { account, wipe_storage, storage } = account_change_set; let AccountChangeSet { account, wipe_storage, storage } = account_change_set;
// apply account change to db. Updates AccountChangeSet and PlainAccountState // apply account change to db. Updates AccountChangeSet and PlainAccountState
// tables. // tables.
@ -230,13 +232,17 @@ impl<DB: Database> Stage<DB> for ExecutionStage {
storage_id.clone(), storage_id.clone(),
StorageEntry { key: hkey, value: old_value }, StorageEntry { key: hkey, value: old_value },
)?; )?;
tracing::debug!(
target = "sync::stages::execution",
"{address} setting storage:{key} ({old_value} -> {new_value})"
);
if new_value.is_zero() { // Always delete old value as duplicate table put will not override it
tx.delete::<tables::PlainStorageState>( tx.delete::<tables::PlainStorageState>(
address, address,
Some(StorageEntry { key: hkey, value: old_value }), Some(StorageEntry { key: hkey, value: old_value }),
)?; )?;
} else { if !new_value.is_zero() {
tx.put::<tables::PlainStorageState>( tx.put::<tables::PlainStorageState>(
address, address,
StorageEntry { key: hkey, value: new_value }, StorageEntry { key: hkey, value: new_value },
@ -259,7 +265,6 @@ impl<DB: Database> Stage<DB> for ExecutionStage {
} }
// If there is block reward we will add account changeset to db // If there is block reward we will add account changeset to db
// TODO add apply_block_reward_changeset to db tx fn which maybe takes an option.
if let Some(block_reward_changeset) = results.block_reward { if let Some(block_reward_changeset) = results.block_reward {
// we are sure that block reward index is present. // we are sure that block reward index is present.
for (address, changeset) in block_reward_changeset.into_iter() { for (address, changeset) in block_reward_changeset.into_iter() {

View File

@ -50,6 +50,9 @@ pub trait DbDupCursorRO<'tx, T: DupSort> {
/// Returns the next `value` of a duplicate `key`. /// Returns the next `value` of a duplicate `key`.
fn next_dup_val(&mut self) -> ValueOnlyResult<T>; fn next_dup_val(&mut self) -> ValueOnlyResult<T>;
/// Seek by key and subkey
fn seek_by_key_subkey(&mut self, key: T::Key, value: T::SubKey) -> ValueOnlyResult<T>;
/// Returns an iterator starting at a key greater or equal than `start_key` of a DupSort /// Returns an iterator starting at a key greater or equal than `start_key` of a DupSort
/// table. /// table.
fn walk_dup<'cursor>( fn walk_dup<'cursor>(

View File

@ -151,6 +151,14 @@ impl<'tx, T: DupSort> DbDupCursorRO<'tx, T> for CursorMock {
todo!() todo!()
} }
fn seek_by_key_subkey(
&mut self,
_key: <T as Table>::Key,
_subkey: <T as DupSort>::SubKey,
) -> ValueOnlyResult<T> {
todo!()
}
fn walk_dup<'cursor>( fn walk_dup<'cursor>(
&'cursor mut self, &'cursor mut self,
_key: <T>::Key, _key: <T>::Key,

View File

@ -103,6 +103,18 @@ impl<'tx, K: TransactionKind, T: DupSort> DbDupCursorRO<'tx, T> for Cursor<'tx,
self.inner.next_dup().map_err(|e| Error::Read(e.into()))?.map(decode_value::<T>).transpose() self.inner.next_dup().map_err(|e| Error::Read(e.into()))?.map(decode_value::<T>).transpose()
} }
fn seek_by_key_subkey(
&mut self,
key: <T as Table>::Key,
subkey: <T as DupSort>::SubKey,
) -> ValueOnlyResult<T> {
self.inner
.get_both_range(key.encode().as_ref(), subkey.encode().as_ref())
.map_err(|e| Error::Read(e.into()))?
.map(decode_one::<T>)
.transpose()
}
/// Returns an iterator starting at a key greater or equal than `start_key` of a DUPSORT table. /// Returns an iterator starting at a key greater or equal than `start_key` of a DUPSORT table.
fn walk_dup<'cursor>( fn walk_dup<'cursor>(
&'cursor mut self, &'cursor mut self,

View File

@ -367,6 +367,95 @@ mod tests {
} }
} }
#[test]
fn db_iterate_over_all_dup_values() {
let env = test_utils::create_test_db::<NoWriteMap>(EnvKind::RW);
let key1 = Address::from_str("0x1111111111111111111111111111111111111111")
.expect(ERROR_ETH_ADDRESS);
let key2 = Address::from_str("0x2222222222222222222222222222222222222222")
.expect(ERROR_ETH_ADDRESS);
// PUT key1 (0,0)
let value00 = StorageEntry::default();
env.update(|tx| tx.put::<PlainStorageState>(key1, value00.clone()).expect(ERROR_PUT))
.unwrap();
// PUT key1 (1,1)
let value11 = StorageEntry { key: H256::from_low_u64_be(1), value: U256::from(1) };
env.update(|tx| tx.put::<PlainStorageState>(key1, value11.clone()).expect(ERROR_PUT))
.unwrap();
// PUT key2 (2,2)
let value22 = StorageEntry { key: H256::from_low_u64_be(2), value: U256::from(2) };
env.update(|tx| tx.put::<PlainStorageState>(key2, value22.clone()).expect(ERROR_PUT))
.unwrap();
// Iterate with walk_dup
{
let tx = env.tx().expect(ERROR_INIT_TX);
let mut cursor = tx.cursor_dup::<PlainStorageState>().unwrap();
let first = cursor.first().unwrap().unwrap();
let mut walker = cursor.walk_dup(first.0, first.1.key).unwrap();
// Notice that value11 and value22 have been ordered in the DB.
assert_eq!(Some(Ok((key1, value00.clone()))), walker.next());
assert_eq!(Some(Ok((key1, value11.clone()))), walker.next());
// NOTE: Dup cursor does NOT iterates on all values but only on duplicated values of the
// same key. assert_eq!(Ok(Some(value22.clone())), walker.next());
assert_eq!(None, walker.next());
}
// Iterate by using `walk`
{
let tx = env.tx().expect(ERROR_INIT_TX);
let mut cursor = tx.cursor_dup::<PlainStorageState>().unwrap();
let first = cursor.first().unwrap().unwrap();
let mut walker = cursor.walk(first.0).unwrap();
assert_eq!(Some(Ok((key1, value00))), walker.next());
assert_eq!(Some(Ok((key1, value11))), walker.next());
assert_eq!(Some(Ok((key2, value22))), walker.next());
}
}
#[test]
fn dup_value_with_same_subkey() {
let env = test_utils::create_test_db::<NoWriteMap>(EnvKind::RW);
let key1 = Address::from_str("0x1111111111111111111111111111111111111111")
.expect(ERROR_ETH_ADDRESS);
// PUT key1 (0,1)
let value01 = StorageEntry { key: H256::from_low_u64_be(0), value: U256::from(1) };
env.update(|tx| tx.put::<PlainStorageState>(key1, value01.clone()).expect(ERROR_PUT))
.unwrap();
// PUT key1 (0,0)
let value00 = StorageEntry::default();
env.update(|tx| tx.put::<PlainStorageState>(key1, value00.clone()).expect(ERROR_PUT))
.unwrap();
// Iterate with walk
{
let tx = env.tx().expect(ERROR_INIT_TX);
let mut cursor = tx.cursor_dup::<PlainStorageState>().unwrap();
let first = cursor.first().unwrap().unwrap();
let mut walker = cursor.walk(first.0).unwrap();
// NOTE: Both values are present
assert_eq!(Some(Ok((key1, value00.clone()))), walker.next());
assert_eq!(Some(Ok((key1, value01.clone()))), walker.next());
assert_eq!(None, walker.next());
}
// seek_by_key_subkey
{
let tx = env.tx().expect(ERROR_INIT_TX);
let mut cursor = tx.cursor_dup::<PlainStorageState>().unwrap();
// NOTE: There are two values with same SubKey but only first one is shown
assert_eq!(Ok(Some(value00.clone())), cursor.seek_by_key_subkey(key1, value00.key));
}
}
#[test] #[test]
fn db_sharded_key() { fn db_sharded_key() {
let db: Arc<Env<WriteMap>> = test_utils::create_test_db(EnvKind::RW); let db: Arc<Env<WriteMap>> = test_utils::create_test_db(EnvKind::RW);

View File

@ -225,16 +225,10 @@ impl<'a, 'b, TX: DbTx<'a>> StateProvider for StateProviderImplRefLatest<'a, 'b,
/// Get storage. /// Get storage.
fn storage(&self, account: Address, storage_key: StorageKey) -> Result<Option<StorageValue>> { fn storage(&self, account: Address, storage_key: StorageKey) -> Result<Option<StorageValue>> {
let mut cursor = self.db.cursor_dup::<tables::PlainStorageState>()?; let mut cursor = self.db.cursor_dup::<tables::PlainStorageState>()?;
if let Some((_, entry)) = cursor.seek_exact(account)? { if let Some(entry) = cursor.seek_by_key_subkey(account, storage_key)? {
if entry.key == storage_key { if entry.key == storage_key {
return Ok(Some(entry.value)) return Ok(Some(entry.value))
} }
if let Some((_, entry)) = cursor.seek(storage_key)? {
if entry.key == storage_key {
return Ok(Some(entry.value))
}
}
} }
Ok(None) Ok(None)
} }