chore: Add reth rustfmt

This commit is contained in:
sprites0
2025-06-24 02:27:40 +00:00
parent d9e7302e76
commit 0c8229370f
28 changed files with 221 additions and 573 deletions

11
rustfmt.toml Normal file
View File

@ -0,0 +1,11 @@
reorder_imports = true
imports_granularity = "Crate"
use_small_heuristics = "Max"
comment_width = 100
wrap_comments = true
binop_separator = "Back"
trailing_comma = "Vertical"
trailing_semicolon = false
use_field_init_shorthand = true
format_code_in_doc_comments = true
doc_comment_code_block_width = 100

View File

@ -14,19 +14,10 @@ pub static HL_HARDFORKS: LazyLock<ChainHardforks> = LazyLock::new(|| {
(EthereumHardfork::Homestead.boxed(), ForkCondition::Block(0)), (EthereumHardfork::Homestead.boxed(), ForkCondition::Block(0)),
(EthereumHardfork::Dao.boxed(), ForkCondition::Block(0)), (EthereumHardfork::Dao.boxed(), ForkCondition::Block(0)),
(EthereumHardfork::Tangerine.boxed(), ForkCondition::Block(0)), (EthereumHardfork::Tangerine.boxed(), ForkCondition::Block(0)),
( (EthereumHardfork::SpuriousDragon.boxed(), ForkCondition::Block(0)),
EthereumHardfork::SpuriousDragon.boxed(),
ForkCondition::Block(0),
),
(EthereumHardfork::Byzantium.boxed(), ForkCondition::Block(0)), (EthereumHardfork::Byzantium.boxed(), ForkCondition::Block(0)),
( (EthereumHardfork::Constantinople.boxed(), ForkCondition::Block(0)),
EthereumHardfork::Constantinople.boxed(), (EthereumHardfork::Petersburg.boxed(), ForkCondition::Block(0)),
ForkCondition::Block(0),
),
(
EthereumHardfork::Petersburg.boxed(),
ForkCondition::Block(0),
),
(EthereumHardfork::Istanbul.boxed(), ForkCondition::Block(0)), (EthereumHardfork::Istanbul.boxed(), ForkCondition::Block(0)),
(EthereumHardfork::Berlin.boxed(), ForkCondition::Block(0)), (EthereumHardfork::Berlin.boxed(), ForkCondition::Block(0)),
(EthereumHardfork::London.boxed(), ForkCondition::Block(0)), (EthereumHardfork::London.boxed(), ForkCondition::Block(0)),
@ -38,14 +29,8 @@ pub static HL_HARDFORKS: LazyLock<ChainHardforks> = LazyLock::new(|| {
total_difficulty: U256::ZERO, total_difficulty: U256::ZERO,
}, },
), ),
( (EthereumHardfork::Shanghai.boxed(), ForkCondition::Timestamp(0)),
EthereumHardfork::Shanghai.boxed(), (EthereumHardfork::Cancun.boxed(), ForkCondition::Timestamp(0)),
ForkCondition::Timestamp(0),
),
(
EthereumHardfork::Cancun.boxed(),
ForkCondition::Timestamp(0),
),
]) ])
}); });

View File

@ -26,9 +26,7 @@ impl ChainSpecParser for HlChainSpecParser {
/// Currently only mainnet is supported. /// Currently only mainnet is supported.
pub fn chain_value_parser(s: &str) -> eyre::Result<Arc<HlChainSpec>> { pub fn chain_value_parser(s: &str) -> eyre::Result<Arc<HlChainSpec>> {
match s { match s {
"mainnet" => Ok(Arc::new(HlChainSpec { "mainnet" => Ok(Arc::new(HlChainSpec { inner: hl_mainnet() })),
inner: hl_mainnet(),
})),
_ => Err(eyre::eyre!("Unsupported chain: {}", s)), _ => Err(eyre::eyre!("Unsupported chain: {}", s)),
} }
} }

View File

@ -32,10 +32,8 @@ where
number: BlockNumber, number: BlockNumber,
) -> Result<(B256, B256), HlConsensusErr> { ) -> Result<(B256, B256), HlConsensusErr> {
let current_head = self.provider.best_block_number()?; let current_head = self.provider.best_block_number()?;
let current_hash = self let current_hash =
.provider self.provider.block_hash(current_head)?.ok_or(HlConsensusErr::HeadHashNotFound)?;
.block_hash(current_head)?
.ok_or(HlConsensusErr::HeadHashNotFound)?;
match number.cmp(&current_head) { match number.cmp(&current_head) {
Ordering::Greater => Ok((hash, current_hash)), Ordering::Greater => Ok((hash, current_hash)),
@ -64,11 +62,7 @@ mod tests {
fn new(head_number: BlockNumber, head_hash: B256) -> Self { fn new(head_number: BlockNumber, head_hash: B256) -> Self {
let mut blocks = HashMap::new(); let mut blocks = HashMap::new();
blocks.insert(head_number, head_hash); blocks.insert(head_number, head_hash);
Self { Self { blocks, head_number, head_hash }
blocks,
head_number,
head_hash,
}
} }
} }
@ -88,10 +82,7 @@ mod tests {
impl BlockNumReader for MockProvider { impl BlockNumReader for MockProvider {
fn chain_info(&self) -> Result<ChainInfo, ProviderError> { fn chain_info(&self) -> Result<ChainInfo, ProviderError> {
Ok(ChainInfo { Ok(ChainInfo { best_hash: self.head_hash, best_number: self.head_number })
best_hash: self.head_hash,
best_number: self.head_number,
})
} }
fn best_block_number(&self) -> Result<BlockNumber, ProviderError> { fn best_block_number(&self) -> Result<BlockNumber, ProviderError> {
@ -103,10 +94,7 @@ mod tests {
} }
fn block_number(&self, hash: B256) -> Result<Option<BlockNumber>, ProviderError> { fn block_number(&self, hash: B256) -> Result<Option<BlockNumber>, ProviderError> {
Ok(self Ok(self.blocks.iter().find_map(|(num, h)| (*h == hash).then_some(*num)))
.blocks
.iter()
.find_map(|(num, h)| (*h == hash).then_some(*num)))
} }
} }

View File

@ -2,7 +2,8 @@ use revm::{
context::{ContextSetters, Evm as EvmCtx}, context::{ContextSetters, Evm as EvmCtx},
context_interface::ContextTr, context_interface::ContextTr,
handler::{ handler::{
instructions::{EthInstructions, InstructionProvider}, EthPrecompiles, EvmTr, PrecompileProvider instructions::{EthInstructions, InstructionProvider},
EthPrecompiles, EvmTr, PrecompileProvider,
}, },
inspector::{InspectorEvmTr, JournalExt}, inspector::{InspectorEvmTr, JournalExt},
interpreter::{interpreter::EthInterpreter, Interpreter, InterpreterAction, InterpreterTypes}, interpreter::{interpreter::EthInterpreter, Interpreter, InterpreterAction, InterpreterTypes},

View File

@ -18,9 +18,7 @@ pub struct HlHandler<EVM, ERROR, FRAME> {
impl<EVM, ERROR, FRAME> HlHandler<EVM, ERROR, FRAME> { impl<EVM, ERROR, FRAME> HlHandler<EVM, ERROR, FRAME> {
pub fn new() -> Self { pub fn new() -> Self {
Self { Self { mainnet: MainnetHandler::default() }
mainnet: MainnetHandler::default(),
}
} }
} }
@ -87,14 +85,12 @@ where
logs, logs,
output, output,
}, },
SuccessOrHalt::Revert => ExecutionResult::Revert { SuccessOrHalt::Revert => {
gas_used: final_gas_used, ExecutionResult::Revert { gas_used: final_gas_used, output: output.into_data() }
output: output.into_data(), }
}, SuccessOrHalt::Halt(reason) => {
SuccessOrHalt::Halt(reason) => ExecutionResult::Halt { ExecutionResult::Halt { reason, gas_used: final_gas_used }
reason, }
gas_used: final_gas_used,
},
// Only two internal return flags. // Only two internal return flags.
flag @ (SuccessOrHalt::FatalExternalError | SuccessOrHalt::Internal(_)) => { flag @ (SuccessOrHalt::FatalExternalError | SuccessOrHalt::Internal(_)) => {
panic!( panic!(

View File

@ -27,9 +27,7 @@ impl<T: Transaction> HlTxEnv<T> {
impl Default for HlTxEnv<TxEnv> { impl Default for HlTxEnv<TxEnv> {
fn default() -> Self { fn default() -> Self {
Self { Self { base: TxEnv::default() }
base: TxEnv::default(),
}
} }
} }
@ -133,10 +131,7 @@ impl FromRecoveredTx<TransactionSigned> for HlTxEnv<TxEnv> {
fn from_recovered_tx(tx: &TransactionSigned, sender: Address) -> Self { fn from_recovered_tx(tx: &TransactionSigned, sender: Address) -> Self {
if let Some(gas_price) = tx.gas_price() { if let Some(gas_price) = tx.gas_price() {
if gas_price == 0 { if gas_price == 0 {
return Self::new(TxEnv::from_recovered_tx( return Self::new(TxEnv::from_recovered_tx(tx, s_to_address(tx.signature().s())));
tx,
s_to_address(tx.signature().s()),
));
} }
} }

View File

@ -40,19 +40,19 @@ impl HlHardfork {
match_hardfork( match_hardfork(
fork, fork,
|fork| match fork { |fork| match fork {
EthereumHardfork::Frontier EthereumHardfork::Frontier |
| EthereumHardfork::Homestead EthereumHardfork::Homestead |
| EthereumHardfork::Tangerine EthereumHardfork::Tangerine |
| EthereumHardfork::SpuriousDragon EthereumHardfork::SpuriousDragon |
| EthereumHardfork::Byzantium EthereumHardfork::Byzantium |
| EthereumHardfork::Constantinople EthereumHardfork::Constantinople |
| EthereumHardfork::Petersburg EthereumHardfork::Petersburg |
| EthereumHardfork::Istanbul EthereumHardfork::Istanbul |
| EthereumHardfork::MuirGlacier EthereumHardfork::MuirGlacier |
| EthereumHardfork::Berlin EthereumHardfork::Berlin |
| EthereumHardfork::London EthereumHardfork::London |
| EthereumHardfork::Shanghai EthereumHardfork::Shanghai |
| EthereumHardfork::Cancun => Some(0), EthereumHardfork::Cancun => Some(0),
_ => None, _ => None,
}, },
|fork| match fork { |fork| match fork {
@ -68,24 +68,12 @@ impl HlHardfork {
(EthereumHardfork::Frontier.boxed(), ForkCondition::Block(0)), (EthereumHardfork::Frontier.boxed(), ForkCondition::Block(0)),
(EthereumHardfork::Homestead.boxed(), ForkCondition::Block(0)), (EthereumHardfork::Homestead.boxed(), ForkCondition::Block(0)),
(EthereumHardfork::Tangerine.boxed(), ForkCondition::Block(0)), (EthereumHardfork::Tangerine.boxed(), ForkCondition::Block(0)),
( (EthereumHardfork::SpuriousDragon.boxed(), ForkCondition::Block(0)),
EthereumHardfork::SpuriousDragon.boxed(),
ForkCondition::Block(0),
),
(EthereumHardfork::Byzantium.boxed(), ForkCondition::Block(0)), (EthereumHardfork::Byzantium.boxed(), ForkCondition::Block(0)),
( (EthereumHardfork::Constantinople.boxed(), ForkCondition::Block(0)),
EthereumHardfork::Constantinople.boxed(), (EthereumHardfork::Petersburg.boxed(), ForkCondition::Block(0)),
ForkCondition::Block(0),
),
(
EthereumHardfork::Petersburg.boxed(),
ForkCondition::Block(0),
),
(EthereumHardfork::Istanbul.boxed(), ForkCondition::Block(0)), (EthereumHardfork::Istanbul.boxed(), ForkCondition::Block(0)),
( (EthereumHardfork::MuirGlacier.boxed(), ForkCondition::Block(0)),
EthereumHardfork::MuirGlacier.boxed(),
ForkCondition::Block(0),
),
(EthereumHardfork::Berlin.boxed(), ForkCondition::Block(0)), (EthereumHardfork::Berlin.boxed(), ForkCondition::Block(0)),
(EthereumHardfork::London.boxed(), ForkCondition::Block(0)), (EthereumHardfork::London.boxed(), ForkCondition::Block(0)),
(EthereumHardfork::Shanghai.boxed(), ForkCondition::Block(0)), (EthereumHardfork::Shanghai.boxed(), ForkCondition::Block(0)),

View File

@ -2,7 +2,7 @@ use clap::{Args, Parser};
use reth::builder::NodeHandle; use reth::builder::NodeHandle;
use reth_hl::{ use reth_hl::{
chainspec::parser::HlChainSpecParser, chainspec::parser::HlChainSpecParser,
node::{cli::Cli, HlNode, storage::tables::Tables}, node::{cli::Cli, storage::tables::Tables, HlNode},
}; };
// We use jemalloc for performance reasons // We use jemalloc for performance reasons
@ -26,14 +26,10 @@ fn main() -> eyre::Result<()> {
Cli::<HlChainSpecParser, NoArgs>::parse().run(|builder, _| async move { Cli::<HlChainSpecParser, NoArgs>::parse().run(|builder, _| async move {
builder.builder.database.create_tables_for::<Tables>()?; builder.builder.database.create_tables_for::<Tables>()?;
let (node, engine_handle_tx) = HlNode::new(); let (node, engine_handle_tx) = HlNode::new();
let NodeHandle { let NodeHandle { node, node_exit_future: exit_future } =
node, builder.node(node).launch().await?;
node_exit_future: exit_future,
} = builder.node(node).launch().await?;
engine_handle_tx engine_handle_tx.send(node.beacon_engine_handle.clone()).unwrap();
.send(node.beacon_engine_handle.clone())
.unwrap();
exit_future.await exit_future.await
})?; })?;

View File

@ -1,8 +1,4 @@
use crate::{ use crate::{hardforks::HlHardforks, node::HlNode, HlBlock, HlBlockBody, HlPrimitives};
hardforks::HlHardforks,
node::HlNode,
{HlBlock, HlBlockBody, HlPrimitives},
};
use alloy_consensus::BlockHeader as _; use alloy_consensus::BlockHeader as _;
use alloy_eips::eip7685::Requests; use alloy_eips::eip7685::Requests;
use reth::{ use reth::{
@ -50,10 +46,7 @@ pub struct HlConsensus<ChainSpec> {
impl<ChainSpec: EthChainSpec + HlHardforks> HlConsensus<ChainSpec> { impl<ChainSpec: EthChainSpec + HlHardforks> HlConsensus<ChainSpec> {
/// Create a new instance of [`HlConsensus`] /// Create a new instance of [`HlConsensus`]
pub fn new(chain_spec: Arc<ChainSpec>) -> Self { pub fn new(chain_spec: Arc<ChainSpec>) -> Self {
Self { Self { inner: EthBeaconConsensus::new(chain_spec.clone()), chain_spec }
inner: EthBeaconConsensus::new(chain_spec.clone()),
chain_spec,
}
} }
} }
@ -166,16 +159,11 @@ where
// - Filter out system transactions for receipts check // - Filter out system transactions for receipts check
// Check if gas used matches the value set in header. // Check if gas used matches the value set in header.
let cumulative_gas_used = receipts let cumulative_gas_used =
.last() receipts.last().map(|receipt| receipt.cumulative_gas_used()).unwrap_or(0);
.map(|receipt| receipt.cumulative_gas_used())
.unwrap_or(0);
if block.header().gas_used() != cumulative_gas_used { if block.header().gas_used() != cumulative_gas_used {
return Err(ConsensusError::BlockGasUsed { return Err(ConsensusError::BlockGasUsed {
gas: GotExpected { gas: GotExpected { got: cumulative_gas_used, expected: block.header().gas_used() },
got: cumulative_gas_used,
expected: block.header().gas_used(),
},
gas_spent_by_tx: gas_spent_by_transactions(receipts), gas_spent_by_tx: gas_spent_by_transactions(receipts),
}); });
} }
@ -185,9 +173,8 @@ where
// transaction This was replaced with is_success flag. // transaction This was replaced with is_success flag.
// See more about EIP here: https://eips.ethereum.org/EIPS/eip-658 // See more about EIP here: https://eips.ethereum.org/EIPS/eip-658
if chain_spec.is_byzantium_active_at_block(block.header().number()) { if chain_spec.is_byzantium_active_at_block(block.header().number()) {
let receipts_for_root = receipts let receipts_for_root =
.iter().filter(|&r| r.cumulative_gas_used() != 0).cloned() receipts.iter().filter(|&r| r.cumulative_gas_used() != 0).cloned().collect::<Vec<_>>();
.collect::<Vec<_>>();
if let Err(error) = verify_receipts( if let Err(error) = verify_receipts(
block.header().receipts_root(), block.header().receipts_root(),
block.header().logs_bloom(), block.header().logs_bloom(),

View File

@ -14,16 +14,11 @@ pub(super) fn verify_receipts<R: Receipt>(
receipts: &[R], receipts: &[R],
) -> Result<(), ConsensusError> { ) -> Result<(), ConsensusError> {
// Calculate receipts root. // Calculate receipts root.
let receipts_with_bloom = receipts let receipts_with_bloom = receipts.iter().map(TxReceipt::with_bloom_ref).collect::<Vec<_>>();
.iter()
.map(TxReceipt::with_bloom_ref)
.collect::<Vec<_>>();
let receipts_root = calculate_receipt_root(&receipts_with_bloom); let receipts_root = calculate_receipt_root(&receipts_with_bloom);
// Calculate header logs bloom. // Calculate header logs bloom.
let logs_bloom = receipts_with_bloom let logs_bloom = receipts_with_bloom.iter().fold(Bloom::ZERO, |bloom, r| bloom | r.bloom_ref());
.iter()
.fold(Bloom::ZERO, |bloom, r| bloom | r.bloom_ref());
compare_receipts_root_and_logs_bloom( compare_receipts_root_and_logs_bloom(
receipts_root, receipts_root,
@ -45,21 +40,13 @@ pub(super) fn compare_receipts_root_and_logs_bloom(
) -> Result<(), ConsensusError> { ) -> Result<(), ConsensusError> {
if calculated_receipts_root != expected_receipts_root { if calculated_receipts_root != expected_receipts_root {
return Err(ConsensusError::BodyReceiptRootDiff( return Err(ConsensusError::BodyReceiptRootDiff(
GotExpected { GotExpected { got: calculated_receipts_root, expected: expected_receipts_root }.into(),
got: calculated_receipts_root,
expected: expected_receipts_root,
}
.into(),
)); ));
} }
if calculated_logs_bloom != expected_logs_bloom { if calculated_logs_bloom != expected_logs_bloom {
return Err(ConsensusError::BodyBloomLogDiff( return Err(ConsensusError::BodyBloomLogDiff(
GotExpected { GotExpected { got: calculated_logs_bloom, expected: expected_logs_bloom }.into(),
got: calculated_logs_bloom,
expected: expected_logs_bloom,
}
.into(),
)); ));
} }

View File

@ -61,12 +61,7 @@ where
execution_ctx: ctx, execution_ctx: ctx,
parent, parent,
transactions, transactions,
output: output: BlockExecutionResult { receipts, requests, gas_used },
BlockExecutionResult {
receipts,
requests,
gas_used,
},
state_root, state_root,
.. ..
} = input; } = input;
@ -74,16 +69,10 @@ where
let timestamp = evm_env.block_env.timestamp; let timestamp = evm_env.block_env.timestamp;
// Filter out system tx receipts // Filter out system tx receipts
let transactions_for_root: Vec<TransactionSigned> = transactions let transactions_for_root: Vec<TransactionSigned> =
.iter() transactions.iter().filter(|t| !is_system_transaction(t)).cloned().collect::<Vec<_>>();
.filter(|t| !is_system_transaction(t)) let receipts_for_root: Vec<Receipt> =
.cloned() receipts.iter().filter(|r| r.cumulative_gas_used() != 0).cloned().collect::<Vec<_>>();
.collect::<Vec<_>>();
let receipts_for_root: Vec<Receipt> = receipts
.iter()
.filter(|r| r.cumulative_gas_used() != 0)
.cloned()
.collect::<Vec<_>>();
let transactions_root = proofs::calculate_transaction_root(&transactions_for_root); let transactions_root = proofs::calculate_transaction_root(&transactions_for_root);
let receipts_root = Receipt::calculate_receipt_root_no_memo(&receipts_for_root); let receipts_root = Receipt::calculate_receipt_root_no_memo(&receipts_for_root);
@ -92,16 +81,10 @@ where
let withdrawals = inner let withdrawals = inner
.chain_spec .chain_spec
.is_shanghai_active_at_timestamp(timestamp) .is_shanghai_active_at_timestamp(timestamp)
.then(|| { .then(|| ctx.ctx.withdrawals.map(|w| w.into_owned()).unwrap_or_default());
ctx.ctx
.withdrawals
.map(|w| w.into_owned())
.unwrap_or_default()
});
let withdrawals_root = withdrawals let withdrawals_root =
.as_deref() withdrawals.as_deref().map(|w| proofs::calculate_withdrawals_root(w));
.map(|w| proofs::calculate_withdrawals_root(w));
let requests_hash = inner let requests_hash = inner
.chain_spec .chain_spec
.is_prague_active_at_timestamp(timestamp) .is_prague_active_at_timestamp(timestamp)
@ -112,16 +95,9 @@ where
// only determine cancun fields when active // only determine cancun fields when active
if inner.chain_spec.is_cancun_active_at_timestamp(timestamp) { if inner.chain_spec.is_cancun_active_at_timestamp(timestamp) {
blob_gas_used = Some( blob_gas_used =
transactions Some(transactions.iter().map(|tx| tx.blob_gas_used().unwrap_or_default()).sum());
.iter() excess_blob_gas = if inner.chain_spec.is_cancun_active_at_timestamp(parent.timestamp) {
.map(|tx| tx.blob_gas_used().unwrap_or_default())
.sum(),
);
excess_blob_gas = if inner
.chain_spec
.is_cancun_active_at_timestamp(parent.timestamp)
{
parent.maybe_next_block_excess_blob_gas( parent.maybe_next_block_excess_blob_gas(
inner.chain_spec.blob_params_at_timestamp(timestamp), inner.chain_spec.blob_params_at_timestamp(timestamp),
) )
@ -160,11 +136,7 @@ where
Ok(Self::Block { Ok(Self::Block {
header, header,
body: HlBlockBody { body: HlBlockBody {
inner: BlockBody { inner: BlockBody { transactions, ommers: Default::default(), withdrawals },
transactions,
ommers: Default::default(),
withdrawals,
},
sidecars: None, sidecars: None,
read_precompile_calls: Some(read_precompile_calls), read_precompile_calls: Some(read_precompile_calls),
}, },
@ -174,9 +146,7 @@ where
impl HlBlockAssembler { impl HlBlockAssembler {
pub fn new(chain_spec: Arc<HlChainSpec>) -> Self { pub fn new(chain_spec: Arc<HlChainSpec>) -> Self {
Self { Self { inner: EthBlockAssembler::new(chain_spec) }
inner: EthBlockAssembler::new(chain_spec),
}
} }
} }
@ -240,11 +210,7 @@ impl<R, Spec, EvmFactory> HlBlockExecutorFactory<R, Spec, EvmFactory> {
/// Creates a new [`HlBlockExecutorFactory`] with the given spec, [`EvmFactory`], and /// Creates a new [`HlBlockExecutorFactory`] with the given spec, [`EvmFactory`], and
/// [`ReceiptBuilder`]. /// [`ReceiptBuilder`].
pub const fn new(receipt_builder: R, spec: Spec, evm_factory: EvmFactory) -> Self { pub const fn new(receipt_builder: R, spec: Spec, evm_factory: EvmFactory) -> Self {
Self { Self { receipt_builder, spec, evm_factory }
receipt_builder,
spec,
evm_factory,
}
} }
/// Exposes the receipt builder. /// Exposes the receipt builder.
@ -327,9 +293,8 @@ where
); );
// configure evm env based on parent block // configure evm env based on parent block
let mut cfg_env = CfgEnv::new() let mut cfg_env =
.with_chain_id(self.chain_spec().chain().id()) CfgEnv::new().with_chain_id(self.chain_spec().chain().id()).with_spec(spec);
.with_spec(spec);
if let Some(blob_params) = &blob_params { if let Some(blob_params) = &blob_params {
cfg_env.set_blob_max_count(blob_params.max_blob_count); cfg_env.set_blob_max_count(blob_params.max_blob_count);
@ -342,16 +307,10 @@ where
// derive the EIP-4844 blob fees from the header's `excess_blob_gas` and the current // derive the EIP-4844 blob fees from the header's `excess_blob_gas` and the current
// blobparams // blobparams
let blob_excess_gas_and_price = let blob_excess_gas_and_price =
header header.excess_blob_gas.zip(blob_params).map(|(excess_blob_gas, params)| {
.excess_blob_gas let blob_gasprice = params.calc_blob_fee(excess_blob_gas);
.zip(blob_params) BlobExcessGasAndPrice { excess_blob_gas, blob_gasprice }
.map(|(excess_blob_gas, params)| { });
let blob_gasprice = params.calc_blob_fee(excess_blob_gas);
BlobExcessGasAndPrice {
excess_blob_gas,
blob_gasprice,
}
});
let eth_spec = spec.into_eth_spec(); let eth_spec = spec.into_eth_spec();
@ -359,16 +318,8 @@ where
number: header.number(), number: header.number(),
beneficiary: header.beneficiary(), beneficiary: header.beneficiary(),
timestamp: header.timestamp(), timestamp: header.timestamp(),
difficulty: if eth_spec >= SpecId::MERGE { difficulty: if eth_spec >= SpecId::MERGE { U256::ZERO } else { header.difficulty() },
U256::ZERO prevrandao: if eth_spec >= SpecId::MERGE { header.mix_hash() } else { None },
} else {
header.difficulty()
},
prevrandao: if eth_spec >= SpecId::MERGE {
header.mix_hash()
} else {
None
},
gas_limit: header.gas_limit(), gas_limit: header.gas_limit(),
basefee: header.base_fee_per_gas().unwrap_or_default(), basefee: header.base_fee_per_gas().unwrap_or_default(),
blob_excess_gas_and_price, blob_excess_gas_and_price,
@ -390,23 +341,20 @@ where
); );
// configure evm env based on parent block // configure evm env based on parent block
let cfg_env = CfgEnv::new() let cfg_env =
.with_chain_id(self.chain_spec().chain().id()) CfgEnv::new().with_chain_id(self.chain_spec().chain().id()).with_spec(spec_id);
.with_spec(spec_id);
// if the parent block did not have excess blob gas (i.e. it was pre-cancun), but it is // if the parent block did not have excess blob gas (i.e. it was pre-cancun), but it is
// cancun now, we need to set the excess blob gas to the default value(0) // cancun now, we need to set the excess blob gas to the default value(0)
let blob_excess_gas_and_price = parent let blob_excess_gas_and_price = parent
.maybe_next_block_excess_blob_gas( .maybe_next_block_excess_blob_gas(
self.chain_spec() self.chain_spec().blob_params_at_timestamp(attributes.timestamp),
.blob_params_at_timestamp(attributes.timestamp),
) )
.or_else(|| (spec_id.into_eth_spec().is_enabled_in(SpecId::CANCUN)).then_some(0)) .or_else(|| (spec_id.into_eth_spec().is_enabled_in(SpecId::CANCUN)).then_some(0))
.map(|gas| BlobExcessGasAndPrice::new(gas, false)); .map(|gas| BlobExcessGasAndPrice::new(gas, false));
let mut basefee = parent.next_block_base_fee( let mut basefee = parent.next_block_base_fee(
self.chain_spec() self.chain_spec().base_fee_params_at_timestamp(attributes.timestamp),
.base_fee_params_at_timestamp(attributes.timestamp),
); );
let mut gas_limit = U256::from(parent.gas_limit); let mut gas_limit = U256::from(parent.gas_limit);
@ -486,9 +434,9 @@ where
/// Map the latest active hardfork at the given timestamp or block number to a [`HlSpecId`]. /// Map the latest active hardfork at the given timestamp or block number to a [`HlSpecId`].
pub fn revm_spec_by_timestamp_and_block_number( pub fn revm_spec_by_timestamp_and_block_number(
chain_spec: impl HlHardforks, _chain_spec: impl HlHardforks,
timestamp: u64, _timestamp: u64,
block_number: u64, _block_number: u64,
) -> HlSpecId { ) -> HlSpecId {
HlSpecId::V1 HlSpecId::V1
} }

View File

@ -1,5 +1,4 @@
use super::config::HlBlockExecutionCtx; use super::{config::HlBlockExecutionCtx, patch::patch_mainnet_after_tx};
use super::patch::patch_mainnet_after_tx;
use crate::{ use crate::{
evm::transaction::HlTxEnv, evm::transaction::HlTxEnv,
hardforks::HlHardforks, hardforks::HlHardforks,
@ -24,7 +23,10 @@ use revm::{
context::{ context::{
result::{ExecutionResult, ResultAndState}, result::{ExecutionResult, ResultAndState},
TxEnv, TxEnv,
}, precompile::{PrecompileError, PrecompileOutput, PrecompileResult}, primitives::HashMap, DatabaseCommit },
precompile::{PrecompileError, PrecompileOutput, PrecompileResult},
primitives::HashMap,
DatabaseCommit,
}; };
pub fn is_system_transaction(tx: &TransactionSigned) -> bool { pub fn is_system_transaction(tx: &TransactionSigned) -> bool {
@ -59,31 +61,20 @@ fn run_precompile(
data: &[u8], data: &[u8],
gas_limit: u64, gas_limit: u64,
) -> PrecompileResult { ) -> PrecompileResult {
let input = ReadPrecompileInput { let input = ReadPrecompileInput { input: Bytes::copy_from_slice(data), gas_limit };
input: Bytes::copy_from_slice(data),
gas_limit,
};
let Some(get) = precompile_calls.get(&input) else { let Some(get) = precompile_calls.get(&input) else {
return Err(PrecompileError::OutOfGas); return Err(PrecompileError::OutOfGas);
}; };
match *get { match *get {
ReadPrecompileResult::Ok { ReadPrecompileResult::Ok { gas_used, ref bytes } => {
gas_used, Ok(PrecompileOutput { gas_used, bytes: bytes.clone() })
ref bytes,
} => {
Ok(PrecompileOutput {
gas_used,
bytes: bytes.clone(),
})
} }
ReadPrecompileResult::OutOfGas => { ReadPrecompileResult::OutOfGas => {
// Use all the gas passed to this precompile // Use all the gas passed to this precompile
Err(PrecompileError::OutOfGas) Err(PrecompileError::OutOfGas)
} }
ReadPrecompileResult::Error => { ReadPrecompileResult::Error => Err(PrecompileError::OutOfGas),
Err(PrecompileError::OutOfGas)
}
ReadPrecompileResult::UnexpectedError => panic!("unexpected precompile error"), ReadPrecompileResult::UnexpectedError => panic!("unexpected precompile error"),
} }
} }
@ -124,14 +115,7 @@ where
})) }))
}); });
} }
Self { Self { spec, evm, gas_used: 0, receipts: vec![], receipt_builder, ctx }
spec,
evm,
gas_used: 0,
receipts: vec![],
receipt_builder,
ctx,
}
} }
} }
@ -180,13 +164,11 @@ where
let block_available_gas = self.evm.block().gas_limit - self.gas_used; let block_available_gas = self.evm.block().gas_limit - self.gas_used;
if tx.tx().gas_limit() > block_available_gas { if tx.tx().gas_limit() > block_available_gas {
return Err( return Err(BlockValidationError::TransactionGasLimitMoreThanAvailableBlockGas {
BlockValidationError::TransactionGasLimitMoreThanAvailableBlockGas { transaction_gas_limit: tx.tx().gas_limit(),
transaction_gas_limit: tx.tx().gas_limit(), block_available_gas,
block_available_gas, }
} .into());
.into(),
);
} }
let result_and_state = self let result_and_state = self
.evm .evm
@ -207,14 +189,13 @@ where
&mut state, &mut state,
)?; )?;
self.receipts self.receipts.push(self.receipt_builder.build_receipt(ReceiptBuilderCtx {
.push(self.receipt_builder.build_receipt(ReceiptBuilderCtx { tx: tx.tx(),
tx: tx.tx(), evm: &self.evm,
evm: &self.evm, result,
result, state: &state,
state: &state, cumulative_gas_used: self.gas_used,
cumulative_gas_used: self.gas_used, }));
}));
self.evm.db_mut().commit(state); self.evm.db_mut().commit(state);

View File

@ -1,13 +1,11 @@
use super::HlEvm; use super::HlEvm;
use crate::{ use crate::evm::{
evm::{ api::{
api::{ builder::HlBuilder,
builder::HlBuilder, ctx::{DefaultHl, HlContext},
ctx::{DefaultHl, HlContext},
},
spec::HlSpecId,
transaction::HlTxEnv,
}, },
spec::HlSpecId,
transaction::HlTxEnv,
}; };
use reth_evm::{precompiles::PrecompilesMap, EvmEnv, EvmFactory}; use reth_evm::{precompiles::PrecompilesMap, EvmEnv, EvmFactory};
use reth_revm::{Context, Database}; use reth_revm::{Context, Database};

View File

@ -117,12 +117,7 @@ where
} }
fn finish(self) -> (Self::DB, EvmEnv<Self::Spec>) { fn finish(self) -> (Self::DB, EvmEnv<Self::Spec>) {
let Context { let Context { block: block_env, cfg: cfg_env, journaled_state, .. } = self.inner.0.ctx;
block: block_env,
cfg: cfg_env,
journaled_state,
..
} = self.inner.0.ctx;
(journaled_state.database, EvmEnv { block_env, cfg_env }) (journaled_state.database, EvmEnv { block_env, cfg_env })
} }

View File

@ -5,114 +5,24 @@ use revm::{primitives::HashMap, state::Account};
/// Applies storage patches to the state after a transaction is executed. /// Applies storage patches to the state after a transaction is executed.
/// See https://github.com/hyperliquid-dex/hyper-evm-sync/commit/39047242b6260f7764527a2f5057dd9c3a75aa89 for more details. /// See https://github.com/hyperliquid-dex/hyper-evm-sync/commit/39047242b6260f7764527a2f5057dd9c3a75aa89 for more details.
static MAINNET_PATCHES_AFTER_TX: &[(u64, u64, bool, Address)] = &[ static MAINNET_PATCHES_AFTER_TX: &[(u64, u64, bool, Address)] = &[
( (1_467_569, 0, false, address!("0x33f6fe38c55cb100ce27b3138e5d2d041648364f")),
1_467_569, (1_467_631, 0, false, address!("0x33f6fe38c55cb100ce27b3138e5d2d041648364f")),
0, (1_499_313, 2, false, address!("0xe27bfc0a812b38927ff646f24af9149f45deb550")),
false, (1_499_406, 0, false, address!("0xe27bfc0a812b38927ff646f24af9149f45deb550")),
address!("0x33f6fe38c55cb100ce27b3138e5d2d041648364f"), (1_499_685, 0, false, address!("0xfee3932b75a87e86930668a6ab3ed43b404c8a30")),
), (1_514_843, 0, false, address!("0x723e5fbbeed025772a91240fd0956a866a41a603")),
( (1_514_936, 0, false, address!("0x723e5fbbeed025772a91240fd0956a866a41a603")),
1_467_631, (1_530_529, 2, false, address!("0xa694e8fd8f4a177dd23636d838e9f1fb2138d87a")),
0, (1_530_622, 2, false, address!("0xa694e8fd8f4a177dd23636d838e9f1fb2138d87a")),
false, (1_530_684, 3, false, address!("0xa694e8fd8f4a177dd23636d838e9f1fb2138d87a")),
address!("0x33f6fe38c55cb100ce27b3138e5d2d041648364f"), (1_530_777, 3, false, address!("0xa694e8fd8f4a177dd23636d838e9f1fb2138d87a")),
), (1_530_839, 2, false, address!("0x692a343fc401a7755f8fc2facf61af426adaf061")),
( (1_530_901, 0, false, address!("0xfd9716f16596715ce765dabaee11787870e04b8a")),
1_499_313, (1_530_994, 3, false, address!("0xfd9716f16596715ce765dabaee11787870e04b8a")),
2, (1_531_056, 4, false, address!("0xdc67c2b8349ca20f58760e08371fc9271e82b5a4")),
false, (1_531_149, 0, false, address!("0xdc67c2b8349ca20f58760e08371fc9271e82b5a4")),
address!("0xe27bfc0a812b38927ff646f24af9149f45deb550"), (1_531_211, 3, false, address!("0xdc67c2b8349ca20f58760e08371fc9271e82b5a4")),
), (1_531_366, 1, false, address!("0x9a90a517d27a9e60e454c96fefbbe94ff244ed6f")),
(
1_499_406,
0,
false,
address!("0xe27bfc0a812b38927ff646f24af9149f45deb550"),
),
(
1_499_685,
0,
false,
address!("0xfee3932b75a87e86930668a6ab3ed43b404c8a30"),
),
(
1_514_843,
0,
false,
address!("0x723e5fbbeed025772a91240fd0956a866a41a603"),
),
(
1_514_936,
0,
false,
address!("0x723e5fbbeed025772a91240fd0956a866a41a603"),
),
(
1_530_529,
2,
false,
address!("0xa694e8fd8f4a177dd23636d838e9f1fb2138d87a"),
),
(
1_530_622,
2,
false,
address!("0xa694e8fd8f4a177dd23636d838e9f1fb2138d87a"),
),
(
1_530_684,
3,
false,
address!("0xa694e8fd8f4a177dd23636d838e9f1fb2138d87a"),
),
(
1_530_777,
3,
false,
address!("0xa694e8fd8f4a177dd23636d838e9f1fb2138d87a"),
),
(
1_530_839,
2,
false,
address!("0x692a343fc401a7755f8fc2facf61af426adaf061"),
),
(
1_530_901,
0,
false,
address!("0xfd9716f16596715ce765dabaee11787870e04b8a"),
),
(
1_530_994,
3,
false,
address!("0xfd9716f16596715ce765dabaee11787870e04b8a"),
),
(
1_531_056,
4,
false,
address!("0xdc67c2b8349ca20f58760e08371fc9271e82b5a4"),
),
(
1_531_149,
0,
false,
address!("0xdc67c2b8349ca20f58760e08371fc9271e82b5a4"),
),
(
1_531_211,
3,
false,
address!("0xdc67c2b8349ca20f58760e08371fc9271e82b5a4"),
),
(
1_531_366,
1,
false,
address!("0x9a90a517d27a9e60e454c96fefbbe94ff244ed6f"),
),
]; ];
pub(crate) fn patch_mainnet_after_tx( pub(crate) fn patch_mainnet_after_tx(

View File

@ -53,17 +53,9 @@ pub struct HlNode {
} }
impl HlNode { impl HlNode {
pub fn new() -> ( pub fn new() -> (Self, oneshot::Sender<BeaconConsensusEngineHandle<HlPayloadTypes>>) {
Self,
oneshot::Sender<BeaconConsensusEngineHandle<HlPayloadTypes>>,
) {
let (tx, rx) = oneshot::channel(); let (tx, rx) = oneshot::channel();
( (Self { engine_handle_rx: Arc::new(Mutex::new(Some(rx))) }, tx)
Self {
engine_handle_rx: Arc::new(Mutex::new(Some(rx))),
},
tx,
)
} }
} }
@ -86,9 +78,7 @@ impl HlNode {
.pool(EthereumPoolBuilder::default()) .pool(EthereumPoolBuilder::default())
.executor(HlExecutorBuilder::default()) .executor(HlExecutorBuilder::default())
.payload(HlPayloadServiceBuilder::default()) .payload(HlPayloadServiceBuilder::default())
.network(HlNetworkBuilder { .network(HlNetworkBuilder { engine_handle_rx: self.engine_handle_rx.clone() })
engine_handle_rx: self.engine_handle_rx.clone(),
})
.consensus(HlConsensusBuilder::default()) .consensus(HlConsensusBuilder::default())
} }
} }
@ -134,12 +124,7 @@ where
type RpcBlock = alloy_rpc_types::Block; type RpcBlock = alloy_rpc_types::Block;
fn rpc_to_primitive_block(rpc_block: Self::RpcBlock) -> HlBlock { fn rpc_to_primitive_block(rpc_block: Self::RpcBlock) -> HlBlock {
let alloy_rpc_types::Block { let alloy_rpc_types::Block { header, transactions, withdrawals, .. } = rpc_block;
header,
transactions,
withdrawals,
..
} = rpc_block;
HlBlock { HlBlock {
header: header.inner, header: header.inner,
body: HlBlockBody { body: HlBlockBody {

View File

@ -99,11 +99,10 @@ where
match engine.new_payload(payload).await { match engine.new_payload(payload).await {
Ok(payload_status) => match payload_status.status { Ok(payload_status) => match payload_status.status {
PayloadStatusEnum::Valid => Outcome { PayloadStatusEnum::Valid => {
peer: peer_id, Outcome { peer: peer_id, result: Ok(BlockValidation::ValidBlock { block }) }
result: Ok(BlockValidation::ValidBlock { block }), .into()
} }
.into(),
PayloadStatusEnum::Invalid { validation_error } => Outcome { PayloadStatusEnum::Invalid { validation_error } => Outcome {
peer: peer_id, peer: peer_id,
result: Err(BlockImportError::Other(validation_error.into())), result: Err(BlockImportError::Other(validation_error.into())),
@ -136,16 +135,13 @@ where
finalized_block_hash: head_block_hash, finalized_block_hash: head_block_hash,
}; };
match engine match engine.fork_choice_updated(state, None, EngineApiMessageVersion::default()).await
.fork_choice_updated(state, None, EngineApiMessageVersion::default())
.await
{ {
Ok(response) => match response.payload_status.status { Ok(response) => match response.payload_status.status {
PayloadStatusEnum::Valid => Outcome { PayloadStatusEnum::Valid => {
peer: peer_id, Outcome { peer: peer_id, result: Ok(BlockValidation::ValidBlock { block }) }
result: Ok(BlockValidation::ValidBlock { block }), .into()
} }
.into(),
PayloadStatusEnum::Invalid { validation_error } => Outcome { PayloadStatusEnum::Invalid { validation_error } => Outcome {
peer: peer_id, peer: peer_id,
result: Err(BlockImportError::Other(validation_error.into())), result: Err(BlockImportError::Other(validation_error.into())),
@ -189,10 +185,7 @@ where
let td = U128::from(reth_block.header().difficulty()); let td = U128::from(reth_block.header().difficulty());
let msg = NewBlockMessage { let msg = NewBlockMessage {
hash: reth_block.header().hash_slow(), hash: reth_block.header().hash_slow(),
block: Arc::new(HlNewBlock(NewBlock { block: Arc::new(HlNewBlock(NewBlock { block: reth_block, td })),
block: reth_block,
td,
})),
}; };
this.on_new_block(msg, peer_id); this.on_new_block(msg, peer_id);
this.height += 1; this.height += 1;
@ -336,17 +329,12 @@ mod tests {
impl EngineResponses { impl EngineResponses {
fn both_valid() -> Self { fn both_valid() -> Self {
Self { Self { new_payload: PayloadStatusEnum::Valid, fcu: PayloadStatusEnum::Valid }
new_payload: PayloadStatusEnum::Valid,
fcu: PayloadStatusEnum::Valid,
}
} }
fn invalid_new_payload() -> Self { fn invalid_new_payload() -> Self {
Self { Self {
new_payload: PayloadStatusEnum::Invalid { new_payload: PayloadStatusEnum::Invalid { validation_error: "test error".into() },
validation_error: "test error".into(),
},
fcu: PayloadStatusEnum::Valid, fcu: PayloadStatusEnum::Valid,
} }
} }
@ -354,9 +342,7 @@ mod tests {
fn invalid_fcu() -> Self { fn invalid_fcu() -> Self {
Self { Self {
new_payload: PayloadStatusEnum::Valid, new_payload: PayloadStatusEnum::Valid,
fcu: PayloadStatusEnum::Invalid { fcu: PayloadStatusEnum::Invalid { validation_error: "fcu error".into() },
validation_error: "fcu error".into(),
},
} }
} }
} }
@ -369,9 +355,7 @@ mod tests {
impl TestFixture { impl TestFixture {
/// Create a new test fixture with the given engine responses /// Create a new test fixture with the given engine responses
async fn new(responses: EngineResponses) -> Self { async fn new(responses: EngineResponses) -> Self {
let consensus = Arc::new(HlConsensus { let consensus = Arc::new(HlConsensus { provider: MockProvider });
provider: MockProvider,
});
let (to_engine, from_engine) = mpsc::unbounded_channel(); let (to_engine, from_engine) = mpsc::unbounded_channel();
let engine_handle = BeaconConsensusEngineHandle::new(to_engine); let engine_handle = BeaconConsensusEngineHandle::new(to_engine);
@ -435,15 +419,9 @@ mod tests {
read_precompile_calls: None, read_precompile_calls: None,
}, },
}; };
let new_block = HlNewBlock(NewBlock { let new_block = HlNewBlock(NewBlock { block, td: U128::from(1) });
block,
td: U128::from(1),
});
let hash = new_block.0.block.header.hash_slow(); let hash = new_block.0.block.header.hash_slow();
NewBlockMessage { NewBlockMessage { hash, block: Arc::new(new_block) }
hash,
block: Arc::new(new_block),
}
} }
/// Helper function to handle engine messages with specified payload statuses /// Helper function to handle engine messages with specified payload statuses

View File

@ -71,12 +71,7 @@ mod rlp {
header, header,
body: body:
HlBlockBody { HlBlockBody {
inner: inner: BlockBody { transactions, ommers, withdrawals },
BlockBody {
transactions,
ommers,
withdrawals,
},
sidecars, sidecars,
read_precompile_calls, read_precompile_calls,
}, },
@ -111,13 +106,7 @@ mod rlp {
impl Decodable for HlNewBlock { impl Decodable for HlNewBlock {
fn decode(buf: &mut &[u8]) -> alloy_rlp::Result<Self> { fn decode(buf: &mut &[u8]) -> alloy_rlp::Result<Self> {
let HlNewBlockHelper { let HlNewBlockHelper {
block: block: BlockHelper { header, transactions, ommers, withdrawals },
BlockHelper {
header,
transactions,
ommers,
withdrawals,
},
td, td,
sidecars, sidecars,
read_precompile_calls, read_precompile_calls,
@ -180,26 +169,21 @@ impl HlNetworkBuilder {
let (to_network, import_outcome) = mpsc::unbounded_channel(); let (to_network, import_outcome) = mpsc::unbounded_channel();
let handle = ImportHandle::new(to_import, import_outcome); let handle = ImportHandle::new(to_import, import_outcome);
let consensus = Arc::new(HlConsensus { let consensus = Arc::new(HlConsensus { provider: ctx.provider().clone() });
provider: ctx.provider().clone(),
});
let number = ctx.provider().last_block_number().unwrap_or(1); let number = ctx.provider().last_block_number().unwrap_or(1);
let number = std::cmp::max(number, 1); let number = std::cmp::max(number, 1);
ctx.task_executor() ctx.task_executor().spawn_critical("block import", async move {
.spawn_critical("block import", async move { let handle = engine_handle_rx
let handle = engine_handle_rx .lock()
.lock() .await
.await .take()
.take() .expect("node should only be launched once")
.expect("node should only be launched once") .await
.await .unwrap();
.unwrap();
ImportService::new(consensus, handle, from_network, to_network, number) ImportService::new(consensus, handle, from_network, to_network, number).await.unwrap();
.await });
.unwrap();
});
let network_builder = network_builder let network_builder = network_builder
.boot_nodes(boot_nodes()) .boot_nodes(boot_nodes())

View File

@ -46,13 +46,13 @@ pub struct HlBlockBody {
impl InMemorySize for HlBlockBody { impl InMemorySize for HlBlockBody {
fn size(&self) -> usize { fn size(&self) -> usize {
self.inner.size() self.inner.size() +
+ self.sidecars.as_ref().map_or(0, |s| { self.sidecars
s.capacity() * core::mem::size_of::<BlobTransactionSidecar>() .as_ref()
}) .map_or(0, |s| s.capacity() * core::mem::size_of::<BlobTransactionSidecar>()) +
+ self.read_precompile_calls.as_ref().map_or(0, |s| { self.read_precompile_calls
s.0.capacity() * core::mem::size_of::<ReadPrecompileCall>() .as_ref()
}) .map_or(0, |s| s.0.capacity() * core::mem::size_of::<ReadPrecompileCall>())
} }
} }
@ -156,12 +156,7 @@ mod rlp {
impl<'a> From<&'a HlBlockBody> for BlockBodyHelper<'a> { impl<'a> From<&'a HlBlockBody> for BlockBodyHelper<'a> {
fn from(value: &'a HlBlockBody) -> Self { fn from(value: &'a HlBlockBody) -> Self {
let HlBlockBody { let HlBlockBody {
inner: inner: BlockBody { transactions, ommers, withdrawals },
BlockBody {
transactions,
ommers,
withdrawals,
},
sidecars, sidecars,
read_precompile_calls, read_precompile_calls,
} = value; } = value;
@ -182,12 +177,7 @@ mod rlp {
header, header,
body: body:
HlBlockBody { HlBlockBody {
inner: inner: BlockBody { transactions, ommers, withdrawals },
BlockBody {
transactions,
ommers,
withdrawals,
},
sidecars, sidecars,
read_precompile_calls, read_precompile_calls,
}, },
@ -300,11 +290,7 @@ pub mod serde_bincode_compat {
} }
fn from_repr(repr: Self::BincodeRepr<'_>) -> Self { fn from_repr(repr: Self::BincodeRepr<'_>) -> Self {
let HlBlockBodyBincode { let HlBlockBodyBincode { inner, sidecars, read_precompile_calls } = repr;
inner,
sidecars,
read_precompile_calls,
} = repr;
Self { Self {
inner: BlockBody::from_repr(inner), inner: BlockBody::from_repr(inner),
sidecars: sidecars.map(|s| s.into_owned()), sidecars: sidecars.map(|s| s.into_owned()),
@ -317,18 +303,12 @@ pub mod serde_bincode_compat {
type BincodeRepr<'a> = HlBlockBincode<'a>; type BincodeRepr<'a> = HlBlockBincode<'a>;
fn as_repr(&self) -> Self::BincodeRepr<'_> { fn as_repr(&self) -> Self::BincodeRepr<'_> {
HlBlockBincode { HlBlockBincode { header: self.header.as_repr(), body: self.body.as_repr() }
header: self.header.as_repr(),
body: self.body.as_repr(),
}
} }
fn from_repr(repr: Self::BincodeRepr<'_>) -> Self { fn from_repr(repr: Self::BincodeRepr<'_>) -> Self {
let HlBlockBincode { header, body } = repr; let HlBlockBincode { header, body } = repr;
Self { Self { header: Header::from_repr(header), body: HlBlockBody::from_repr(body) }
header: Header::from_repr(header),
body: HlBlockBody::from_repr(body),
}
} }
} }
} }

View File

@ -1,7 +1,9 @@
use crate::{ use crate::{
chainspec::HlChainSpec, chainspec::HlChainSpec,
node::rpc::{HlEthApi, HlNodeCore}, node::{
node::{HlBlock, HlPrimitives}, rpc::{HlEthApi, HlNodeCore},
HlBlock, HlPrimitives,
},
}; };
use alloy_consensus::BlockHeader; use alloy_consensus::BlockHeader;
use alloy_primitives::B256; use alloy_primitives::B256;
@ -52,10 +54,7 @@ where
let block_hash = block.hash(); let block_hash = block.hash();
let excess_blob_gas = block.excess_blob_gas(); let excess_blob_gas = block.excess_blob_gas();
let timestamp = block.timestamp(); let timestamp = block.timestamp();
let blob_params = self let blob_params = self.provider().chain_spec().blob_params_at_timestamp(timestamp);
.provider()
.chain_spec()
.blob_params_at_timestamp(timestamp);
return block return block
.body() .body()
@ -163,10 +162,7 @@ where
.await .await
.map_err(Self::Error::from_eth_err)? .map_err(Self::Error::from_eth_err)?
.ok_or(EthApiError::HeaderNotFound(hash.into()))?; .ok_or(EthApiError::HeaderNotFound(hash.into()))?;
let blob_params = self let blob_params = self.provider().chain_spec().blob_params_at_timestamp(meta.timestamp);
.provider()
.chain_spec()
.blob_params_at_timestamp(meta.timestamp);
Ok(EthReceiptBuilder::new(&tx, meta, &receipt, &all_receipts, blob_params)?.build()) Ok(EthReceiptBuilder::new(&tx, meta, &receipt, &all_receipts, blob_params)?.build())
} }

View File

@ -67,7 +67,7 @@ where
) -> Result<HlTxEnv<TxEnv>, Self::Error> { ) -> Result<HlTxEnv<TxEnv>, Self::Error> {
// Ensure that if versioned hashes are set, they're not empty // Ensure that if versioned hashes are set, they're not empty
if request.blob_versioned_hashes.as_ref().is_some_and(|hashes| hashes.is_empty()) { if request.blob_versioned_hashes.as_ref().is_some_and(|hashes| hashes.is_empty()) {
return Err(RpcInvalidTransactionError::BlobTransactionMissingBlobHashes.into_eth_err()) return Err(RpcInvalidTransactionError::BlobTransactionMissingBlobHashes.into_eth_err());
} }
let tx_type = if request.authorization_list.is_some() { let tx_type = if request.authorization_list.is_some() {

View File

@ -37,9 +37,7 @@ where
type Validator = HlEngineValidator; type Validator = HlEngineValidator;
async fn build(self, ctx: &AddOnsContext<'_, Node>) -> eyre::Result<Self::Validator> { async fn build(self, ctx: &AddOnsContext<'_, Node>) -> eyre::Result<Self::Validator> {
Ok(HlEngineValidator::new(Arc::new( Ok(HlEngineValidator::new(Arc::new(ctx.config.chain.clone().as_ref().clone())))
ctx.config.chain.clone().as_ref().clone(),
)))
} }
} }
@ -52,9 +50,7 @@ pub struct HlEngineValidator {
impl HlEngineValidator { impl HlEngineValidator {
/// Instantiates a new validator. /// Instantiates a new validator.
pub fn new(chain_spec: Arc<HlChainSpec>) -> Self { pub fn new(chain_spec: Arc<HlChainSpec>) -> Self {
Self { Self { inner: HlExecutionPayloadValidator { inner: chain_spec } }
inner: HlExecutionPayloadValidator { inner: chain_spec },
}
} }
} }
@ -99,13 +95,9 @@ impl PayloadValidator for HlEngineValidator {
&self, &self,
payload: Self::ExecutionData, payload: Self::ExecutionData,
) -> Result<RecoveredBlock<Self::Block>, NewPayloadError> { ) -> Result<RecoveredBlock<Self::Block>, NewPayloadError> {
let sealed_block = self let sealed_block =
.inner self.inner.ensure_well_formed_payload(payload).map_err(NewPayloadError::other)?;
.ensure_well_formed_payload(payload) sealed_block.try_recover().map_err(|e| NewPayloadError::Other(e.into()))
.map_err(NewPayloadError::other)?;
sealed_block
.try_recover()
.map_err(|e| NewPayloadError::Other(e.into()))
} }
fn validate_block_post_execution_with_hashed_state( fn validate_block_post_execution_with_hashed_state(

View File

@ -49,7 +49,7 @@ where
request: TransactionRequest, request: TransactionRequest,
) -> Result<TransactionSigned, Self::Error> { ) -> Result<TransactionSigned, Self::Error> {
let Ok(tx) = request.build_typed_tx() else { let Ok(tx) = request.build_typed_tx() else {
return Err(EthApiError::TransactionConversionError) return Err(EthApiError::TransactionConversionError);
}; };
// Create an empty signature for the transaction. // Create an empty signature for the transaction.

View File

@ -51,9 +51,7 @@ fn fetch_spot_meta(chain_id: u64) -> Result<SpotMeta> {
Ok(serde_json::from_str(&response)?) Ok(serde_json::from_str(&response)?)
} }
pub(crate) fn erc20_contract_to_spot_token( pub(crate) fn erc20_contract_to_spot_token(chain_id: u64) -> Result<BTreeMap<Address, SpotId>> {
chain_id: u64,
) -> Result<BTreeMap<Address, SpotId>> {
let meta = fetch_spot_meta(chain_id)?; let meta = fetch_spot_meta(chain_id)?;
let mut map = BTreeMap::new(); let mut map = BTreeMap::new();
for token in &meta.tokens { for token in &meta.tokens {

View File

@ -1,7 +1,4 @@
use crate::{ use crate::{node::types::ReadPrecompileCalls, HlBlock, HlBlockBody, HlPrimitives};
node::types::ReadPrecompileCalls,
{HlBlock, HlBlockBody, HlPrimitives},
};
use alloy_consensus::BlockHeader; use alloy_consensus::BlockHeader;
use alloy_primitives::Bytes; use alloy_primitives::Bytes;
use reth_chainspec::EthereumHardforks; use reth_chainspec::EthereumHardforks;
@ -31,9 +28,8 @@ impl HlStorage {
where where
Provider: DBProvider<Tx: DbTxMut>, Provider: DBProvider<Tx: DbTxMut>,
{ {
let mut precompile_calls_cursor = provider let mut precompile_calls_cursor =
.tx_ref() provider.tx_ref().cursor_write::<tables::BlockReadPrecompileCalls>()?;
.cursor_write::<tables::BlockReadPrecompileCalls>()?;
for (block_number, read_precompile_calls) in inputs { for (block_number, read_precompile_calls) in inputs {
let Some(read_precompile_calls) = read_precompile_calls else { let Some(read_precompile_calls) = read_precompile_calls else {
@ -60,9 +56,8 @@ impl HlStorage {
Provider: DBProvider<Tx: DbTx>, Provider: DBProvider<Tx: DbTx>,
{ {
let mut read_precompile_calls = Vec::with_capacity(inputs.len()); let mut read_precompile_calls = Vec::with_capacity(inputs.len());
let mut precompile_calls_cursor = provider let mut precompile_calls_cursor =
.tx_ref() provider.tx_ref().cursor_read::<tables::BlockReadPrecompileCalls>()?;
.cursor_read::<tables::BlockReadPrecompileCalls>()?;
for (header, _transactions) in inputs { for (header, _transactions) in inputs {
let precompile_calls = precompile_calls_cursor let precompile_calls = precompile_calls_cursor
@ -91,11 +86,7 @@ where
for (block_number, body) in bodies { for (block_number, body) in bodies {
match body { match body {
Some(HlBlockBody { Some(HlBlockBody { inner, sidecars: _, read_precompile_calls: rpc }) => {
inner,
sidecars: _,
read_precompile_calls: rpc,
}) => {
eth_bodies.push((block_number, Some(inner))); eth_bodies.push((block_number, Some(inner)));
read_precompile_calls.push((block_number, rpc)); read_precompile_calls.push((block_number, rpc));
} }
@ -118,11 +109,8 @@ where
block: u64, block: u64,
remove_from: StorageLocation, remove_from: StorageLocation,
) -> ProviderResult<()> { ) -> ProviderResult<()> {
self.0 self.0.remove_block_bodies_above(provider, block, remove_from)?;
.remove_block_bodies_above(provider, block, remove_from)?; provider.tx_ref().unwind_table_by_num::<tables::BlockReadPrecompileCalls>(block)?;
provider
.tx_ref()
.unwind_table_by_num::<tables::BlockReadPrecompileCalls>(block)?;
Ok(()) Ok(())
} }

View File

@ -20,11 +20,7 @@ mod reth_compat;
impl From<ReadPrecompileCalls> for ReadPrecompileMap { impl From<ReadPrecompileCalls> for ReadPrecompileMap {
fn from(calls: ReadPrecompileCalls) -> Self { fn from(calls: ReadPrecompileCalls) -> Self {
calls calls.0.into_iter().map(|(address, calls)| (address, calls.into_iter().collect())).collect()
.0
.into_iter()
.map(|(address, calls)| (address, calls.into_iter().collect()))
.collect()
} }
} }

View File

@ -1,16 +1,18 @@
//! Copy of reth codebase to preserve serialization compatibility //! Copy of reth codebase to preserve serialization compatibility
use alloy_consensus::{ use alloy_consensus::{Header, Signed, TxEip1559, TxEip2930, TxEip4844, TxEip7702, TxLegacy};
Header, Signed, TxEip1559, TxEip2930, TxEip4844, TxEip7702, TxLegacy,
};
use alloy_primitives::{Address, BlockHash, Signature, TxKind, U256}; use alloy_primitives::{Address, BlockHash, Signature, TxKind, U256};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use std::collections::BTreeMap; use std::{
use std::sync::{Arc, LazyLock, Mutex}; collections::BTreeMap,
sync::{Arc, LazyLock, Mutex},
};
use tracing::info; use tracing::info;
use crate::node::spot_meta::{erc20_contract_to_spot_token, SpotId};
use crate::{ use crate::{
node::types::{ReadPrecompileCalls, SystemTx}, node::{
spot_meta::{erc20_contract_to_spot_token, SpotId},
types::{ReadPrecompileCalls, SystemTx},
},
HlBlock, HlBlockBody, HlBlock, HlBlockBody,
}; };
@ -98,10 +100,7 @@ fn system_tx_to_reth_transaction(
break spot.to_s(); break spot.to_s();
} }
info!( info!("Contract not found: {:?} from spot mapping, fetching again...", to);
"Contract not found: {:?} from spot mapping, fetching again...",
to
);
*EVM_MAP.lock().unwrap() = erc20_contract_to_spot_token(chain_id).unwrap(); *EVM_MAP.lock().unwrap() = erc20_contract_to_spot_token(chain_id).unwrap();
} }
}; };
@ -118,17 +117,8 @@ impl SealedBlock {
chain_id: u64, chain_id: u64,
) -> HlBlock { ) -> HlBlock {
let mut merged_txs = vec![]; let mut merged_txs = vec![];
merged_txs.extend( merged_txs.extend(system_txs.iter().map(|tx| system_tx_to_reth_transaction(tx, chain_id)));
system_txs merged_txs.extend(self.body.transactions.iter().map(|tx| tx.to_reth_transaction()));
.iter()
.map(|tx| system_tx_to_reth_transaction(tx, chain_id)),
);
merged_txs.extend(
self.body
.transactions
.iter()
.map(|tx| tx.to_reth_transaction()),
);
let block_body = HlBlockBody { let block_body = HlBlockBody {
inner: reth_primitives::BlockBody { inner: reth_primitives::BlockBody {
transactions: merged_txs, transactions: merged_txs,
@ -138,10 +128,7 @@ impl SealedBlock {
sidecars: None, sidecars: None,
read_precompile_calls: Some(read_precompile_calls), read_precompile_calls: Some(read_precompile_calls),
}; };
HlBlock { HlBlock { header: self.header.header.clone(), body: block_body }
header: self.header.header.clone(),
body: block_body,
}
} }
} }