mirror of
https://github.com/hl-archive-node/nanoreth.git
synced 2025-12-06 02:49:55 +00:00
chore: Add reth rustfmt
This commit is contained in:
11
rustfmt.toml
Normal file
11
rustfmt.toml
Normal file
@ -0,0 +1,11 @@
|
||||
reorder_imports = true
|
||||
imports_granularity = "Crate"
|
||||
use_small_heuristics = "Max"
|
||||
comment_width = 100
|
||||
wrap_comments = true
|
||||
binop_separator = "Back"
|
||||
trailing_comma = "Vertical"
|
||||
trailing_semicolon = false
|
||||
use_field_init_shorthand = true
|
||||
format_code_in_doc_comments = true
|
||||
doc_comment_code_block_width = 100
|
||||
@ -14,19 +14,10 @@ pub static HL_HARDFORKS: LazyLock<ChainHardforks> = LazyLock::new(|| {
|
||||
(EthereumHardfork::Homestead.boxed(), ForkCondition::Block(0)),
|
||||
(EthereumHardfork::Dao.boxed(), ForkCondition::Block(0)),
|
||||
(EthereumHardfork::Tangerine.boxed(), ForkCondition::Block(0)),
|
||||
(
|
||||
EthereumHardfork::SpuriousDragon.boxed(),
|
||||
ForkCondition::Block(0),
|
||||
),
|
||||
(EthereumHardfork::SpuriousDragon.boxed(), ForkCondition::Block(0)),
|
||||
(EthereumHardfork::Byzantium.boxed(), ForkCondition::Block(0)),
|
||||
(
|
||||
EthereumHardfork::Constantinople.boxed(),
|
||||
ForkCondition::Block(0),
|
||||
),
|
||||
(
|
||||
EthereumHardfork::Petersburg.boxed(),
|
||||
ForkCondition::Block(0),
|
||||
),
|
||||
(EthereumHardfork::Constantinople.boxed(), ForkCondition::Block(0)),
|
||||
(EthereumHardfork::Petersburg.boxed(), ForkCondition::Block(0)),
|
||||
(EthereumHardfork::Istanbul.boxed(), ForkCondition::Block(0)),
|
||||
(EthereumHardfork::Berlin.boxed(), ForkCondition::Block(0)),
|
||||
(EthereumHardfork::London.boxed(), ForkCondition::Block(0)),
|
||||
@ -38,14 +29,8 @@ pub static HL_HARDFORKS: LazyLock<ChainHardforks> = LazyLock::new(|| {
|
||||
total_difficulty: U256::ZERO,
|
||||
},
|
||||
),
|
||||
(
|
||||
EthereumHardfork::Shanghai.boxed(),
|
||||
ForkCondition::Timestamp(0),
|
||||
),
|
||||
(
|
||||
EthereumHardfork::Cancun.boxed(),
|
||||
ForkCondition::Timestamp(0),
|
||||
),
|
||||
(EthereumHardfork::Shanghai.boxed(), ForkCondition::Timestamp(0)),
|
||||
(EthereumHardfork::Cancun.boxed(), ForkCondition::Timestamp(0)),
|
||||
])
|
||||
});
|
||||
|
||||
|
||||
@ -26,9 +26,7 @@ impl ChainSpecParser for HlChainSpecParser {
|
||||
/// Currently only mainnet is supported.
|
||||
pub fn chain_value_parser(s: &str) -> eyre::Result<Arc<HlChainSpec>> {
|
||||
match s {
|
||||
"mainnet" => Ok(Arc::new(HlChainSpec {
|
||||
inner: hl_mainnet(),
|
||||
})),
|
||||
"mainnet" => Ok(Arc::new(HlChainSpec { inner: hl_mainnet() })),
|
||||
_ => Err(eyre::eyre!("Unsupported chain: {}", s)),
|
||||
}
|
||||
}
|
||||
|
||||
@ -32,10 +32,8 @@ where
|
||||
number: BlockNumber,
|
||||
) -> Result<(B256, B256), HlConsensusErr> {
|
||||
let current_head = self.provider.best_block_number()?;
|
||||
let current_hash = self
|
||||
.provider
|
||||
.block_hash(current_head)?
|
||||
.ok_or(HlConsensusErr::HeadHashNotFound)?;
|
||||
let current_hash =
|
||||
self.provider.block_hash(current_head)?.ok_or(HlConsensusErr::HeadHashNotFound)?;
|
||||
|
||||
match number.cmp(¤t_head) {
|
||||
Ordering::Greater => Ok((hash, current_hash)),
|
||||
@ -64,11 +62,7 @@ mod tests {
|
||||
fn new(head_number: BlockNumber, head_hash: B256) -> Self {
|
||||
let mut blocks = HashMap::new();
|
||||
blocks.insert(head_number, head_hash);
|
||||
Self {
|
||||
blocks,
|
||||
head_number,
|
||||
head_hash,
|
||||
}
|
||||
Self { blocks, head_number, head_hash }
|
||||
}
|
||||
}
|
||||
|
||||
@ -88,10 +82,7 @@ mod tests {
|
||||
|
||||
impl BlockNumReader for MockProvider {
|
||||
fn chain_info(&self) -> Result<ChainInfo, ProviderError> {
|
||||
Ok(ChainInfo {
|
||||
best_hash: self.head_hash,
|
||||
best_number: self.head_number,
|
||||
})
|
||||
Ok(ChainInfo { best_hash: self.head_hash, best_number: self.head_number })
|
||||
}
|
||||
|
||||
fn best_block_number(&self) -> Result<BlockNumber, ProviderError> {
|
||||
@ -103,10 +94,7 @@ mod tests {
|
||||
}
|
||||
|
||||
fn block_number(&self, hash: B256) -> Result<Option<BlockNumber>, ProviderError> {
|
||||
Ok(self
|
||||
.blocks
|
||||
.iter()
|
||||
.find_map(|(num, h)| (*h == hash).then_some(*num)))
|
||||
Ok(self.blocks.iter().find_map(|(num, h)| (*h == hash).then_some(*num)))
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -2,7 +2,8 @@ use revm::{
|
||||
context::{ContextSetters, Evm as EvmCtx},
|
||||
context_interface::ContextTr,
|
||||
handler::{
|
||||
instructions::{EthInstructions, InstructionProvider}, EthPrecompiles, EvmTr, PrecompileProvider
|
||||
instructions::{EthInstructions, InstructionProvider},
|
||||
EthPrecompiles, EvmTr, PrecompileProvider,
|
||||
},
|
||||
inspector::{InspectorEvmTr, JournalExt},
|
||||
interpreter::{interpreter::EthInterpreter, Interpreter, InterpreterAction, InterpreterTypes},
|
||||
|
||||
@ -18,9 +18,7 @@ pub struct HlHandler<EVM, ERROR, FRAME> {
|
||||
|
||||
impl<EVM, ERROR, FRAME> HlHandler<EVM, ERROR, FRAME> {
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
mainnet: MainnetHandler::default(),
|
||||
}
|
||||
Self { mainnet: MainnetHandler::default() }
|
||||
}
|
||||
}
|
||||
|
||||
@ -87,14 +85,12 @@ where
|
||||
logs,
|
||||
output,
|
||||
},
|
||||
SuccessOrHalt::Revert => ExecutionResult::Revert {
|
||||
gas_used: final_gas_used,
|
||||
output: output.into_data(),
|
||||
},
|
||||
SuccessOrHalt::Halt(reason) => ExecutionResult::Halt {
|
||||
reason,
|
||||
gas_used: final_gas_used,
|
||||
},
|
||||
SuccessOrHalt::Revert => {
|
||||
ExecutionResult::Revert { gas_used: final_gas_used, output: output.into_data() }
|
||||
}
|
||||
SuccessOrHalt::Halt(reason) => {
|
||||
ExecutionResult::Halt { reason, gas_used: final_gas_used }
|
||||
}
|
||||
// Only two internal return flags.
|
||||
flag @ (SuccessOrHalt::FatalExternalError | SuccessOrHalt::Internal(_)) => {
|
||||
panic!(
|
||||
|
||||
@ -27,9 +27,7 @@ impl<T: Transaction> HlTxEnv<T> {
|
||||
|
||||
impl Default for HlTxEnv<TxEnv> {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
base: TxEnv::default(),
|
||||
}
|
||||
Self { base: TxEnv::default() }
|
||||
}
|
||||
}
|
||||
|
||||
@ -133,10 +131,7 @@ impl FromRecoveredTx<TransactionSigned> for HlTxEnv<TxEnv> {
|
||||
fn from_recovered_tx(tx: &TransactionSigned, sender: Address) -> Self {
|
||||
if let Some(gas_price) = tx.gas_price() {
|
||||
if gas_price == 0 {
|
||||
return Self::new(TxEnv::from_recovered_tx(
|
||||
tx,
|
||||
s_to_address(tx.signature().s()),
|
||||
));
|
||||
return Self::new(TxEnv::from_recovered_tx(tx, s_to_address(tx.signature().s())));
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -40,19 +40,19 @@ impl HlHardfork {
|
||||
match_hardfork(
|
||||
fork,
|
||||
|fork| match fork {
|
||||
EthereumHardfork::Frontier
|
||||
| EthereumHardfork::Homestead
|
||||
| EthereumHardfork::Tangerine
|
||||
| EthereumHardfork::SpuriousDragon
|
||||
| EthereumHardfork::Byzantium
|
||||
| EthereumHardfork::Constantinople
|
||||
| EthereumHardfork::Petersburg
|
||||
| EthereumHardfork::Istanbul
|
||||
| EthereumHardfork::MuirGlacier
|
||||
| EthereumHardfork::Berlin
|
||||
| EthereumHardfork::London
|
||||
| EthereumHardfork::Shanghai
|
||||
| EthereumHardfork::Cancun => Some(0),
|
||||
EthereumHardfork::Frontier |
|
||||
EthereumHardfork::Homestead |
|
||||
EthereumHardfork::Tangerine |
|
||||
EthereumHardfork::SpuriousDragon |
|
||||
EthereumHardfork::Byzantium |
|
||||
EthereumHardfork::Constantinople |
|
||||
EthereumHardfork::Petersburg |
|
||||
EthereumHardfork::Istanbul |
|
||||
EthereumHardfork::MuirGlacier |
|
||||
EthereumHardfork::Berlin |
|
||||
EthereumHardfork::London |
|
||||
EthereumHardfork::Shanghai |
|
||||
EthereumHardfork::Cancun => Some(0),
|
||||
_ => None,
|
||||
},
|
||||
|fork| match fork {
|
||||
@ -68,24 +68,12 @@ impl HlHardfork {
|
||||
(EthereumHardfork::Frontier.boxed(), ForkCondition::Block(0)),
|
||||
(EthereumHardfork::Homestead.boxed(), ForkCondition::Block(0)),
|
||||
(EthereumHardfork::Tangerine.boxed(), ForkCondition::Block(0)),
|
||||
(
|
||||
EthereumHardfork::SpuriousDragon.boxed(),
|
||||
ForkCondition::Block(0),
|
||||
),
|
||||
(EthereumHardfork::SpuriousDragon.boxed(), ForkCondition::Block(0)),
|
||||
(EthereumHardfork::Byzantium.boxed(), ForkCondition::Block(0)),
|
||||
(
|
||||
EthereumHardfork::Constantinople.boxed(),
|
||||
ForkCondition::Block(0),
|
||||
),
|
||||
(
|
||||
EthereumHardfork::Petersburg.boxed(),
|
||||
ForkCondition::Block(0),
|
||||
),
|
||||
(EthereumHardfork::Constantinople.boxed(), ForkCondition::Block(0)),
|
||||
(EthereumHardfork::Petersburg.boxed(), ForkCondition::Block(0)),
|
||||
(EthereumHardfork::Istanbul.boxed(), ForkCondition::Block(0)),
|
||||
(
|
||||
EthereumHardfork::MuirGlacier.boxed(),
|
||||
ForkCondition::Block(0),
|
||||
),
|
||||
(EthereumHardfork::MuirGlacier.boxed(), ForkCondition::Block(0)),
|
||||
(EthereumHardfork::Berlin.boxed(), ForkCondition::Block(0)),
|
||||
(EthereumHardfork::London.boxed(), ForkCondition::Block(0)),
|
||||
(EthereumHardfork::Shanghai.boxed(), ForkCondition::Block(0)),
|
||||
|
||||
12
src/main.rs
12
src/main.rs
@ -2,7 +2,7 @@ use clap::{Args, Parser};
|
||||
use reth::builder::NodeHandle;
|
||||
use reth_hl::{
|
||||
chainspec::parser::HlChainSpecParser,
|
||||
node::{cli::Cli, HlNode, storage::tables::Tables},
|
||||
node::{cli::Cli, storage::tables::Tables, HlNode},
|
||||
};
|
||||
|
||||
// We use jemalloc for performance reasons
|
||||
@ -26,14 +26,10 @@ fn main() -> eyre::Result<()> {
|
||||
Cli::<HlChainSpecParser, NoArgs>::parse().run(|builder, _| async move {
|
||||
builder.builder.database.create_tables_for::<Tables>()?;
|
||||
let (node, engine_handle_tx) = HlNode::new();
|
||||
let NodeHandle {
|
||||
node,
|
||||
node_exit_future: exit_future,
|
||||
} = builder.node(node).launch().await?;
|
||||
let NodeHandle { node, node_exit_future: exit_future } =
|
||||
builder.node(node).launch().await?;
|
||||
|
||||
engine_handle_tx
|
||||
.send(node.beacon_engine_handle.clone())
|
||||
.unwrap();
|
||||
engine_handle_tx.send(node.beacon_engine_handle.clone()).unwrap();
|
||||
|
||||
exit_future.await
|
||||
})?;
|
||||
|
||||
@ -1,8 +1,4 @@
|
||||
use crate::{
|
||||
hardforks::HlHardforks,
|
||||
node::HlNode,
|
||||
{HlBlock, HlBlockBody, HlPrimitives},
|
||||
};
|
||||
use crate::{hardforks::HlHardforks, node::HlNode, HlBlock, HlBlockBody, HlPrimitives};
|
||||
use alloy_consensus::BlockHeader as _;
|
||||
use alloy_eips::eip7685::Requests;
|
||||
use reth::{
|
||||
@ -50,10 +46,7 @@ pub struct HlConsensus<ChainSpec> {
|
||||
impl<ChainSpec: EthChainSpec + HlHardforks> HlConsensus<ChainSpec> {
|
||||
/// Create a new instance of [`HlConsensus`]
|
||||
pub fn new(chain_spec: Arc<ChainSpec>) -> Self {
|
||||
Self {
|
||||
inner: EthBeaconConsensus::new(chain_spec.clone()),
|
||||
chain_spec,
|
||||
}
|
||||
Self { inner: EthBeaconConsensus::new(chain_spec.clone()), chain_spec }
|
||||
}
|
||||
}
|
||||
|
||||
@ -166,16 +159,11 @@ where
|
||||
// - Filter out system transactions for receipts check
|
||||
|
||||
// Check if gas used matches the value set in header.
|
||||
let cumulative_gas_used = receipts
|
||||
.last()
|
||||
.map(|receipt| receipt.cumulative_gas_used())
|
||||
.unwrap_or(0);
|
||||
let cumulative_gas_used =
|
||||
receipts.last().map(|receipt| receipt.cumulative_gas_used()).unwrap_or(0);
|
||||
if block.header().gas_used() != cumulative_gas_used {
|
||||
return Err(ConsensusError::BlockGasUsed {
|
||||
gas: GotExpected {
|
||||
got: cumulative_gas_used,
|
||||
expected: block.header().gas_used(),
|
||||
},
|
||||
gas: GotExpected { got: cumulative_gas_used, expected: block.header().gas_used() },
|
||||
gas_spent_by_tx: gas_spent_by_transactions(receipts),
|
||||
});
|
||||
}
|
||||
@ -185,9 +173,8 @@ where
|
||||
// transaction This was replaced with is_success flag.
|
||||
// See more about EIP here: https://eips.ethereum.org/EIPS/eip-658
|
||||
if chain_spec.is_byzantium_active_at_block(block.header().number()) {
|
||||
let receipts_for_root = receipts
|
||||
.iter().filter(|&r| r.cumulative_gas_used() != 0).cloned()
|
||||
.collect::<Vec<_>>();
|
||||
let receipts_for_root =
|
||||
receipts.iter().filter(|&r| r.cumulative_gas_used() != 0).cloned().collect::<Vec<_>>();
|
||||
if let Err(error) = verify_receipts(
|
||||
block.header().receipts_root(),
|
||||
block.header().logs_bloom(),
|
||||
|
||||
@ -14,16 +14,11 @@ pub(super) fn verify_receipts<R: Receipt>(
|
||||
receipts: &[R],
|
||||
) -> Result<(), ConsensusError> {
|
||||
// Calculate receipts root.
|
||||
let receipts_with_bloom = receipts
|
||||
.iter()
|
||||
.map(TxReceipt::with_bloom_ref)
|
||||
.collect::<Vec<_>>();
|
||||
let receipts_with_bloom = receipts.iter().map(TxReceipt::with_bloom_ref).collect::<Vec<_>>();
|
||||
let receipts_root = calculate_receipt_root(&receipts_with_bloom);
|
||||
|
||||
// Calculate header logs bloom.
|
||||
let logs_bloom = receipts_with_bloom
|
||||
.iter()
|
||||
.fold(Bloom::ZERO, |bloom, r| bloom | r.bloom_ref());
|
||||
let logs_bloom = receipts_with_bloom.iter().fold(Bloom::ZERO, |bloom, r| bloom | r.bloom_ref());
|
||||
|
||||
compare_receipts_root_and_logs_bloom(
|
||||
receipts_root,
|
||||
@ -45,21 +40,13 @@ pub(super) fn compare_receipts_root_and_logs_bloom(
|
||||
) -> Result<(), ConsensusError> {
|
||||
if calculated_receipts_root != expected_receipts_root {
|
||||
return Err(ConsensusError::BodyReceiptRootDiff(
|
||||
GotExpected {
|
||||
got: calculated_receipts_root,
|
||||
expected: expected_receipts_root,
|
||||
}
|
||||
.into(),
|
||||
GotExpected { got: calculated_receipts_root, expected: expected_receipts_root }.into(),
|
||||
));
|
||||
}
|
||||
|
||||
if calculated_logs_bloom != expected_logs_bloom {
|
||||
return Err(ConsensusError::BodyBloomLogDiff(
|
||||
GotExpected {
|
||||
got: calculated_logs_bloom,
|
||||
expected: expected_logs_bloom,
|
||||
}
|
||||
.into(),
|
||||
GotExpected { got: calculated_logs_bloom, expected: expected_logs_bloom }.into(),
|
||||
));
|
||||
}
|
||||
|
||||
|
||||
@ -61,12 +61,7 @@ where
|
||||
execution_ctx: ctx,
|
||||
parent,
|
||||
transactions,
|
||||
output:
|
||||
BlockExecutionResult {
|
||||
receipts,
|
||||
requests,
|
||||
gas_used,
|
||||
},
|
||||
output: BlockExecutionResult { receipts, requests, gas_used },
|
||||
state_root,
|
||||
..
|
||||
} = input;
|
||||
@ -74,16 +69,10 @@ where
|
||||
let timestamp = evm_env.block_env.timestamp;
|
||||
|
||||
// Filter out system tx receipts
|
||||
let transactions_for_root: Vec<TransactionSigned> = transactions
|
||||
.iter()
|
||||
.filter(|t| !is_system_transaction(t))
|
||||
.cloned()
|
||||
.collect::<Vec<_>>();
|
||||
let receipts_for_root: Vec<Receipt> = receipts
|
||||
.iter()
|
||||
.filter(|r| r.cumulative_gas_used() != 0)
|
||||
.cloned()
|
||||
.collect::<Vec<_>>();
|
||||
let transactions_for_root: Vec<TransactionSigned> =
|
||||
transactions.iter().filter(|t| !is_system_transaction(t)).cloned().collect::<Vec<_>>();
|
||||
let receipts_for_root: Vec<Receipt> =
|
||||
receipts.iter().filter(|r| r.cumulative_gas_used() != 0).cloned().collect::<Vec<_>>();
|
||||
|
||||
let transactions_root = proofs::calculate_transaction_root(&transactions_for_root);
|
||||
let receipts_root = Receipt::calculate_receipt_root_no_memo(&receipts_for_root);
|
||||
@ -92,16 +81,10 @@ where
|
||||
let withdrawals = inner
|
||||
.chain_spec
|
||||
.is_shanghai_active_at_timestamp(timestamp)
|
||||
.then(|| {
|
||||
ctx.ctx
|
||||
.withdrawals
|
||||
.map(|w| w.into_owned())
|
||||
.unwrap_or_default()
|
||||
});
|
||||
.then(|| ctx.ctx.withdrawals.map(|w| w.into_owned()).unwrap_or_default());
|
||||
|
||||
let withdrawals_root = withdrawals
|
||||
.as_deref()
|
||||
.map(|w| proofs::calculate_withdrawals_root(w));
|
||||
let withdrawals_root =
|
||||
withdrawals.as_deref().map(|w| proofs::calculate_withdrawals_root(w));
|
||||
let requests_hash = inner
|
||||
.chain_spec
|
||||
.is_prague_active_at_timestamp(timestamp)
|
||||
@ -112,16 +95,9 @@ where
|
||||
|
||||
// only determine cancun fields when active
|
||||
if inner.chain_spec.is_cancun_active_at_timestamp(timestamp) {
|
||||
blob_gas_used = Some(
|
||||
transactions
|
||||
.iter()
|
||||
.map(|tx| tx.blob_gas_used().unwrap_or_default())
|
||||
.sum(),
|
||||
);
|
||||
excess_blob_gas = if inner
|
||||
.chain_spec
|
||||
.is_cancun_active_at_timestamp(parent.timestamp)
|
||||
{
|
||||
blob_gas_used =
|
||||
Some(transactions.iter().map(|tx| tx.blob_gas_used().unwrap_or_default()).sum());
|
||||
excess_blob_gas = if inner.chain_spec.is_cancun_active_at_timestamp(parent.timestamp) {
|
||||
parent.maybe_next_block_excess_blob_gas(
|
||||
inner.chain_spec.blob_params_at_timestamp(timestamp),
|
||||
)
|
||||
@ -160,11 +136,7 @@ where
|
||||
Ok(Self::Block {
|
||||
header,
|
||||
body: HlBlockBody {
|
||||
inner: BlockBody {
|
||||
transactions,
|
||||
ommers: Default::default(),
|
||||
withdrawals,
|
||||
},
|
||||
inner: BlockBody { transactions, ommers: Default::default(), withdrawals },
|
||||
sidecars: None,
|
||||
read_precompile_calls: Some(read_precompile_calls),
|
||||
},
|
||||
@ -174,9 +146,7 @@ where
|
||||
|
||||
impl HlBlockAssembler {
|
||||
pub fn new(chain_spec: Arc<HlChainSpec>) -> Self {
|
||||
Self {
|
||||
inner: EthBlockAssembler::new(chain_spec),
|
||||
}
|
||||
Self { inner: EthBlockAssembler::new(chain_spec) }
|
||||
}
|
||||
}
|
||||
|
||||
@ -240,11 +210,7 @@ impl<R, Spec, EvmFactory> HlBlockExecutorFactory<R, Spec, EvmFactory> {
|
||||
/// Creates a new [`HlBlockExecutorFactory`] with the given spec, [`EvmFactory`], and
|
||||
/// [`ReceiptBuilder`].
|
||||
pub const fn new(receipt_builder: R, spec: Spec, evm_factory: EvmFactory) -> Self {
|
||||
Self {
|
||||
receipt_builder,
|
||||
spec,
|
||||
evm_factory,
|
||||
}
|
||||
Self { receipt_builder, spec, evm_factory }
|
||||
}
|
||||
|
||||
/// Exposes the receipt builder.
|
||||
@ -327,9 +293,8 @@ where
|
||||
);
|
||||
|
||||
// configure evm env based on parent block
|
||||
let mut cfg_env = CfgEnv::new()
|
||||
.with_chain_id(self.chain_spec().chain().id())
|
||||
.with_spec(spec);
|
||||
let mut cfg_env =
|
||||
CfgEnv::new().with_chain_id(self.chain_spec().chain().id()).with_spec(spec);
|
||||
|
||||
if let Some(blob_params) = &blob_params {
|
||||
cfg_env.set_blob_max_count(blob_params.max_blob_count);
|
||||
@ -342,15 +307,9 @@ where
|
||||
// derive the EIP-4844 blob fees from the header's `excess_blob_gas` and the current
|
||||
// blobparams
|
||||
let blob_excess_gas_and_price =
|
||||
header
|
||||
.excess_blob_gas
|
||||
.zip(blob_params)
|
||||
.map(|(excess_blob_gas, params)| {
|
||||
header.excess_blob_gas.zip(blob_params).map(|(excess_blob_gas, params)| {
|
||||
let blob_gasprice = params.calc_blob_fee(excess_blob_gas);
|
||||
BlobExcessGasAndPrice {
|
||||
excess_blob_gas,
|
||||
blob_gasprice,
|
||||
}
|
||||
BlobExcessGasAndPrice { excess_blob_gas, blob_gasprice }
|
||||
});
|
||||
|
||||
let eth_spec = spec.into_eth_spec();
|
||||
@ -359,16 +318,8 @@ where
|
||||
number: header.number(),
|
||||
beneficiary: header.beneficiary(),
|
||||
timestamp: header.timestamp(),
|
||||
difficulty: if eth_spec >= SpecId::MERGE {
|
||||
U256::ZERO
|
||||
} else {
|
||||
header.difficulty()
|
||||
},
|
||||
prevrandao: if eth_spec >= SpecId::MERGE {
|
||||
header.mix_hash()
|
||||
} else {
|
||||
None
|
||||
},
|
||||
difficulty: if eth_spec >= SpecId::MERGE { U256::ZERO } else { header.difficulty() },
|
||||
prevrandao: if eth_spec >= SpecId::MERGE { header.mix_hash() } else { None },
|
||||
gas_limit: header.gas_limit(),
|
||||
basefee: header.base_fee_per_gas().unwrap_or_default(),
|
||||
blob_excess_gas_and_price,
|
||||
@ -390,23 +341,20 @@ where
|
||||
);
|
||||
|
||||
// configure evm env based on parent block
|
||||
let cfg_env = CfgEnv::new()
|
||||
.with_chain_id(self.chain_spec().chain().id())
|
||||
.with_spec(spec_id);
|
||||
let cfg_env =
|
||||
CfgEnv::new().with_chain_id(self.chain_spec().chain().id()).with_spec(spec_id);
|
||||
|
||||
// if the parent block did not have excess blob gas (i.e. it was pre-cancun), but it is
|
||||
// cancun now, we need to set the excess blob gas to the default value(0)
|
||||
let blob_excess_gas_and_price = parent
|
||||
.maybe_next_block_excess_blob_gas(
|
||||
self.chain_spec()
|
||||
.blob_params_at_timestamp(attributes.timestamp),
|
||||
self.chain_spec().blob_params_at_timestamp(attributes.timestamp),
|
||||
)
|
||||
.or_else(|| (spec_id.into_eth_spec().is_enabled_in(SpecId::CANCUN)).then_some(0))
|
||||
.map(|gas| BlobExcessGasAndPrice::new(gas, false));
|
||||
|
||||
let mut basefee = parent.next_block_base_fee(
|
||||
self.chain_spec()
|
||||
.base_fee_params_at_timestamp(attributes.timestamp),
|
||||
self.chain_spec().base_fee_params_at_timestamp(attributes.timestamp),
|
||||
);
|
||||
|
||||
let mut gas_limit = U256::from(parent.gas_limit);
|
||||
@ -486,9 +434,9 @@ where
|
||||
|
||||
/// Map the latest active hardfork at the given timestamp or block number to a [`HlSpecId`].
|
||||
pub fn revm_spec_by_timestamp_and_block_number(
|
||||
chain_spec: impl HlHardforks,
|
||||
timestamp: u64,
|
||||
block_number: u64,
|
||||
_chain_spec: impl HlHardforks,
|
||||
_timestamp: u64,
|
||||
_block_number: u64,
|
||||
) -> HlSpecId {
|
||||
HlSpecId::V1
|
||||
}
|
||||
|
||||
@ -1,5 +1,4 @@
|
||||
use super::config::HlBlockExecutionCtx;
|
||||
use super::patch::patch_mainnet_after_tx;
|
||||
use super::{config::HlBlockExecutionCtx, patch::patch_mainnet_after_tx};
|
||||
use crate::{
|
||||
evm::transaction::HlTxEnv,
|
||||
hardforks::HlHardforks,
|
||||
@ -24,7 +23,10 @@ use revm::{
|
||||
context::{
|
||||
result::{ExecutionResult, ResultAndState},
|
||||
TxEnv,
|
||||
}, precompile::{PrecompileError, PrecompileOutput, PrecompileResult}, primitives::HashMap, DatabaseCommit
|
||||
},
|
||||
precompile::{PrecompileError, PrecompileOutput, PrecompileResult},
|
||||
primitives::HashMap,
|
||||
DatabaseCommit,
|
||||
};
|
||||
|
||||
pub fn is_system_transaction(tx: &TransactionSigned) -> bool {
|
||||
@ -59,31 +61,20 @@ fn run_precompile(
|
||||
data: &[u8],
|
||||
gas_limit: u64,
|
||||
) -> PrecompileResult {
|
||||
let input = ReadPrecompileInput {
|
||||
input: Bytes::copy_from_slice(data),
|
||||
gas_limit,
|
||||
};
|
||||
let input = ReadPrecompileInput { input: Bytes::copy_from_slice(data), gas_limit };
|
||||
let Some(get) = precompile_calls.get(&input) else {
|
||||
return Err(PrecompileError::OutOfGas);
|
||||
};
|
||||
|
||||
match *get {
|
||||
ReadPrecompileResult::Ok {
|
||||
gas_used,
|
||||
ref bytes,
|
||||
} => {
|
||||
Ok(PrecompileOutput {
|
||||
gas_used,
|
||||
bytes: bytes.clone(),
|
||||
})
|
||||
ReadPrecompileResult::Ok { gas_used, ref bytes } => {
|
||||
Ok(PrecompileOutput { gas_used, bytes: bytes.clone() })
|
||||
}
|
||||
ReadPrecompileResult::OutOfGas => {
|
||||
// Use all the gas passed to this precompile
|
||||
Err(PrecompileError::OutOfGas)
|
||||
}
|
||||
ReadPrecompileResult::Error => {
|
||||
Err(PrecompileError::OutOfGas)
|
||||
}
|
||||
ReadPrecompileResult::Error => Err(PrecompileError::OutOfGas),
|
||||
ReadPrecompileResult::UnexpectedError => panic!("unexpected precompile error"),
|
||||
}
|
||||
}
|
||||
@ -124,14 +115,7 @@ where
|
||||
}))
|
||||
});
|
||||
}
|
||||
Self {
|
||||
spec,
|
||||
evm,
|
||||
gas_used: 0,
|
||||
receipts: vec![],
|
||||
receipt_builder,
|
||||
ctx,
|
||||
}
|
||||
Self { spec, evm, gas_used: 0, receipts: vec![], receipt_builder, ctx }
|
||||
}
|
||||
}
|
||||
|
||||
@ -180,13 +164,11 @@ where
|
||||
|
||||
let block_available_gas = self.evm.block().gas_limit - self.gas_used;
|
||||
if tx.tx().gas_limit() > block_available_gas {
|
||||
return Err(
|
||||
BlockValidationError::TransactionGasLimitMoreThanAvailableBlockGas {
|
||||
return Err(BlockValidationError::TransactionGasLimitMoreThanAvailableBlockGas {
|
||||
transaction_gas_limit: tx.tx().gas_limit(),
|
||||
block_available_gas,
|
||||
}
|
||||
.into(),
|
||||
);
|
||||
.into());
|
||||
}
|
||||
let result_and_state = self
|
||||
.evm
|
||||
@ -207,8 +189,7 @@ where
|
||||
&mut state,
|
||||
)?;
|
||||
|
||||
self.receipts
|
||||
.push(self.receipt_builder.build_receipt(ReceiptBuilderCtx {
|
||||
self.receipts.push(self.receipt_builder.build_receipt(ReceiptBuilderCtx {
|
||||
tx: tx.tx(),
|
||||
evm: &self.evm,
|
||||
result,
|
||||
|
||||
@ -1,13 +1,11 @@
|
||||
use super::HlEvm;
|
||||
use crate::{
|
||||
evm::{
|
||||
use crate::evm::{
|
||||
api::{
|
||||
builder::HlBuilder,
|
||||
ctx::{DefaultHl, HlContext},
|
||||
},
|
||||
spec::HlSpecId,
|
||||
transaction::HlTxEnv,
|
||||
},
|
||||
};
|
||||
use reth_evm::{precompiles::PrecompilesMap, EvmEnv, EvmFactory};
|
||||
use reth_revm::{Context, Database};
|
||||
|
||||
@ -117,12 +117,7 @@ where
|
||||
}
|
||||
|
||||
fn finish(self) -> (Self::DB, EvmEnv<Self::Spec>) {
|
||||
let Context {
|
||||
block: block_env,
|
||||
cfg: cfg_env,
|
||||
journaled_state,
|
||||
..
|
||||
} = self.inner.0.ctx;
|
||||
let Context { block: block_env, cfg: cfg_env, journaled_state, .. } = self.inner.0.ctx;
|
||||
|
||||
(journaled_state.database, EvmEnv { block_env, cfg_env })
|
||||
}
|
||||
|
||||
@ -5,114 +5,24 @@ use revm::{primitives::HashMap, state::Account};
|
||||
/// Applies storage patches to the state after a transaction is executed.
|
||||
/// See https://github.com/hyperliquid-dex/hyper-evm-sync/commit/39047242b6260f7764527a2f5057dd9c3a75aa89 for more details.
|
||||
static MAINNET_PATCHES_AFTER_TX: &[(u64, u64, bool, Address)] = &[
|
||||
(
|
||||
1_467_569,
|
||||
0,
|
||||
false,
|
||||
address!("0x33f6fe38c55cb100ce27b3138e5d2d041648364f"),
|
||||
),
|
||||
(
|
||||
1_467_631,
|
||||
0,
|
||||
false,
|
||||
address!("0x33f6fe38c55cb100ce27b3138e5d2d041648364f"),
|
||||
),
|
||||
(
|
||||
1_499_313,
|
||||
2,
|
||||
false,
|
||||
address!("0xe27bfc0a812b38927ff646f24af9149f45deb550"),
|
||||
),
|
||||
(
|
||||
1_499_406,
|
||||
0,
|
||||
false,
|
||||
address!("0xe27bfc0a812b38927ff646f24af9149f45deb550"),
|
||||
),
|
||||
(
|
||||
1_499_685,
|
||||
0,
|
||||
false,
|
||||
address!("0xfee3932b75a87e86930668a6ab3ed43b404c8a30"),
|
||||
),
|
||||
(
|
||||
1_514_843,
|
||||
0,
|
||||
false,
|
||||
address!("0x723e5fbbeed025772a91240fd0956a866a41a603"),
|
||||
),
|
||||
(
|
||||
1_514_936,
|
||||
0,
|
||||
false,
|
||||
address!("0x723e5fbbeed025772a91240fd0956a866a41a603"),
|
||||
),
|
||||
(
|
||||
1_530_529,
|
||||
2,
|
||||
false,
|
||||
address!("0xa694e8fd8f4a177dd23636d838e9f1fb2138d87a"),
|
||||
),
|
||||
(
|
||||
1_530_622,
|
||||
2,
|
||||
false,
|
||||
address!("0xa694e8fd8f4a177dd23636d838e9f1fb2138d87a"),
|
||||
),
|
||||
(
|
||||
1_530_684,
|
||||
3,
|
||||
false,
|
||||
address!("0xa694e8fd8f4a177dd23636d838e9f1fb2138d87a"),
|
||||
),
|
||||
(
|
||||
1_530_777,
|
||||
3,
|
||||
false,
|
||||
address!("0xa694e8fd8f4a177dd23636d838e9f1fb2138d87a"),
|
||||
),
|
||||
(
|
||||
1_530_839,
|
||||
2,
|
||||
false,
|
||||
address!("0x692a343fc401a7755f8fc2facf61af426adaf061"),
|
||||
),
|
||||
(
|
||||
1_530_901,
|
||||
0,
|
||||
false,
|
||||
address!("0xfd9716f16596715ce765dabaee11787870e04b8a"),
|
||||
),
|
||||
(
|
||||
1_530_994,
|
||||
3,
|
||||
false,
|
||||
address!("0xfd9716f16596715ce765dabaee11787870e04b8a"),
|
||||
),
|
||||
(
|
||||
1_531_056,
|
||||
4,
|
||||
false,
|
||||
address!("0xdc67c2b8349ca20f58760e08371fc9271e82b5a4"),
|
||||
),
|
||||
(
|
||||
1_531_149,
|
||||
0,
|
||||
false,
|
||||
address!("0xdc67c2b8349ca20f58760e08371fc9271e82b5a4"),
|
||||
),
|
||||
(
|
||||
1_531_211,
|
||||
3,
|
||||
false,
|
||||
address!("0xdc67c2b8349ca20f58760e08371fc9271e82b5a4"),
|
||||
),
|
||||
(
|
||||
1_531_366,
|
||||
1,
|
||||
false,
|
||||
address!("0x9a90a517d27a9e60e454c96fefbbe94ff244ed6f"),
|
||||
),
|
||||
(1_467_569, 0, false, address!("0x33f6fe38c55cb100ce27b3138e5d2d041648364f")),
|
||||
(1_467_631, 0, false, address!("0x33f6fe38c55cb100ce27b3138e5d2d041648364f")),
|
||||
(1_499_313, 2, false, address!("0xe27bfc0a812b38927ff646f24af9149f45deb550")),
|
||||
(1_499_406, 0, false, address!("0xe27bfc0a812b38927ff646f24af9149f45deb550")),
|
||||
(1_499_685, 0, false, address!("0xfee3932b75a87e86930668a6ab3ed43b404c8a30")),
|
||||
(1_514_843, 0, false, address!("0x723e5fbbeed025772a91240fd0956a866a41a603")),
|
||||
(1_514_936, 0, false, address!("0x723e5fbbeed025772a91240fd0956a866a41a603")),
|
||||
(1_530_529, 2, false, address!("0xa694e8fd8f4a177dd23636d838e9f1fb2138d87a")),
|
||||
(1_530_622, 2, false, address!("0xa694e8fd8f4a177dd23636d838e9f1fb2138d87a")),
|
||||
(1_530_684, 3, false, address!("0xa694e8fd8f4a177dd23636d838e9f1fb2138d87a")),
|
||||
(1_530_777, 3, false, address!("0xa694e8fd8f4a177dd23636d838e9f1fb2138d87a")),
|
||||
(1_530_839, 2, false, address!("0x692a343fc401a7755f8fc2facf61af426adaf061")),
|
||||
(1_530_901, 0, false, address!("0xfd9716f16596715ce765dabaee11787870e04b8a")),
|
||||
(1_530_994, 3, false, address!("0xfd9716f16596715ce765dabaee11787870e04b8a")),
|
||||
(1_531_056, 4, false, address!("0xdc67c2b8349ca20f58760e08371fc9271e82b5a4")),
|
||||
(1_531_149, 0, false, address!("0xdc67c2b8349ca20f58760e08371fc9271e82b5a4")),
|
||||
(1_531_211, 3, false, address!("0xdc67c2b8349ca20f58760e08371fc9271e82b5a4")),
|
||||
(1_531_366, 1, false, address!("0x9a90a517d27a9e60e454c96fefbbe94ff244ed6f")),
|
||||
];
|
||||
|
||||
pub(crate) fn patch_mainnet_after_tx(
|
||||
|
||||
@ -53,17 +53,9 @@ pub struct HlNode {
|
||||
}
|
||||
|
||||
impl HlNode {
|
||||
pub fn new() -> (
|
||||
Self,
|
||||
oneshot::Sender<BeaconConsensusEngineHandle<HlPayloadTypes>>,
|
||||
) {
|
||||
pub fn new() -> (Self, oneshot::Sender<BeaconConsensusEngineHandle<HlPayloadTypes>>) {
|
||||
let (tx, rx) = oneshot::channel();
|
||||
(
|
||||
Self {
|
||||
engine_handle_rx: Arc::new(Mutex::new(Some(rx))),
|
||||
},
|
||||
tx,
|
||||
)
|
||||
(Self { engine_handle_rx: Arc::new(Mutex::new(Some(rx))) }, tx)
|
||||
}
|
||||
}
|
||||
|
||||
@ -86,9 +78,7 @@ impl HlNode {
|
||||
.pool(EthereumPoolBuilder::default())
|
||||
.executor(HlExecutorBuilder::default())
|
||||
.payload(HlPayloadServiceBuilder::default())
|
||||
.network(HlNetworkBuilder {
|
||||
engine_handle_rx: self.engine_handle_rx.clone(),
|
||||
})
|
||||
.network(HlNetworkBuilder { engine_handle_rx: self.engine_handle_rx.clone() })
|
||||
.consensus(HlConsensusBuilder::default())
|
||||
}
|
||||
}
|
||||
@ -134,12 +124,7 @@ where
|
||||
type RpcBlock = alloy_rpc_types::Block;
|
||||
|
||||
fn rpc_to_primitive_block(rpc_block: Self::RpcBlock) -> HlBlock {
|
||||
let alloy_rpc_types::Block {
|
||||
header,
|
||||
transactions,
|
||||
withdrawals,
|
||||
..
|
||||
} = rpc_block;
|
||||
let alloy_rpc_types::Block { header, transactions, withdrawals, .. } = rpc_block;
|
||||
HlBlock {
|
||||
header: header.inner,
|
||||
body: HlBlockBody {
|
||||
|
||||
@ -99,11 +99,10 @@ where
|
||||
|
||||
match engine.new_payload(payload).await {
|
||||
Ok(payload_status) => match payload_status.status {
|
||||
PayloadStatusEnum::Valid => Outcome {
|
||||
peer: peer_id,
|
||||
result: Ok(BlockValidation::ValidBlock { block }),
|
||||
PayloadStatusEnum::Valid => {
|
||||
Outcome { peer: peer_id, result: Ok(BlockValidation::ValidBlock { block }) }
|
||||
.into()
|
||||
}
|
||||
.into(),
|
||||
PayloadStatusEnum::Invalid { validation_error } => Outcome {
|
||||
peer: peer_id,
|
||||
result: Err(BlockImportError::Other(validation_error.into())),
|
||||
@ -136,16 +135,13 @@ where
|
||||
finalized_block_hash: head_block_hash,
|
||||
};
|
||||
|
||||
match engine
|
||||
.fork_choice_updated(state, None, EngineApiMessageVersion::default())
|
||||
.await
|
||||
match engine.fork_choice_updated(state, None, EngineApiMessageVersion::default()).await
|
||||
{
|
||||
Ok(response) => match response.payload_status.status {
|
||||
PayloadStatusEnum::Valid => Outcome {
|
||||
peer: peer_id,
|
||||
result: Ok(BlockValidation::ValidBlock { block }),
|
||||
PayloadStatusEnum::Valid => {
|
||||
Outcome { peer: peer_id, result: Ok(BlockValidation::ValidBlock { block }) }
|
||||
.into()
|
||||
}
|
||||
.into(),
|
||||
PayloadStatusEnum::Invalid { validation_error } => Outcome {
|
||||
peer: peer_id,
|
||||
result: Err(BlockImportError::Other(validation_error.into())),
|
||||
@ -189,10 +185,7 @@ where
|
||||
let td = U128::from(reth_block.header().difficulty());
|
||||
let msg = NewBlockMessage {
|
||||
hash: reth_block.header().hash_slow(),
|
||||
block: Arc::new(HlNewBlock(NewBlock {
|
||||
block: reth_block,
|
||||
td,
|
||||
})),
|
||||
block: Arc::new(HlNewBlock(NewBlock { block: reth_block, td })),
|
||||
};
|
||||
this.on_new_block(msg, peer_id);
|
||||
this.height += 1;
|
||||
@ -336,17 +329,12 @@ mod tests {
|
||||
|
||||
impl EngineResponses {
|
||||
fn both_valid() -> Self {
|
||||
Self {
|
||||
new_payload: PayloadStatusEnum::Valid,
|
||||
fcu: PayloadStatusEnum::Valid,
|
||||
}
|
||||
Self { new_payload: PayloadStatusEnum::Valid, fcu: PayloadStatusEnum::Valid }
|
||||
}
|
||||
|
||||
fn invalid_new_payload() -> Self {
|
||||
Self {
|
||||
new_payload: PayloadStatusEnum::Invalid {
|
||||
validation_error: "test error".into(),
|
||||
},
|
||||
new_payload: PayloadStatusEnum::Invalid { validation_error: "test error".into() },
|
||||
fcu: PayloadStatusEnum::Valid,
|
||||
}
|
||||
}
|
||||
@ -354,9 +342,7 @@ mod tests {
|
||||
fn invalid_fcu() -> Self {
|
||||
Self {
|
||||
new_payload: PayloadStatusEnum::Valid,
|
||||
fcu: PayloadStatusEnum::Invalid {
|
||||
validation_error: "fcu error".into(),
|
||||
},
|
||||
fcu: PayloadStatusEnum::Invalid { validation_error: "fcu error".into() },
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -369,9 +355,7 @@ mod tests {
|
||||
impl TestFixture {
|
||||
/// Create a new test fixture with the given engine responses
|
||||
async fn new(responses: EngineResponses) -> Self {
|
||||
let consensus = Arc::new(HlConsensus {
|
||||
provider: MockProvider,
|
||||
});
|
||||
let consensus = Arc::new(HlConsensus { provider: MockProvider });
|
||||
let (to_engine, from_engine) = mpsc::unbounded_channel();
|
||||
let engine_handle = BeaconConsensusEngineHandle::new(to_engine);
|
||||
|
||||
@ -435,15 +419,9 @@ mod tests {
|
||||
read_precompile_calls: None,
|
||||
},
|
||||
};
|
||||
let new_block = HlNewBlock(NewBlock {
|
||||
block,
|
||||
td: U128::from(1),
|
||||
});
|
||||
let new_block = HlNewBlock(NewBlock { block, td: U128::from(1) });
|
||||
let hash = new_block.0.block.header.hash_slow();
|
||||
NewBlockMessage {
|
||||
hash,
|
||||
block: Arc::new(new_block),
|
||||
}
|
||||
NewBlockMessage { hash, block: Arc::new(new_block) }
|
||||
}
|
||||
|
||||
/// Helper function to handle engine messages with specified payload statuses
|
||||
|
||||
@ -71,12 +71,7 @@ mod rlp {
|
||||
header,
|
||||
body:
|
||||
HlBlockBody {
|
||||
inner:
|
||||
BlockBody {
|
||||
transactions,
|
||||
ommers,
|
||||
withdrawals,
|
||||
},
|
||||
inner: BlockBody { transactions, ommers, withdrawals },
|
||||
sidecars,
|
||||
read_precompile_calls,
|
||||
},
|
||||
@ -111,13 +106,7 @@ mod rlp {
|
||||
impl Decodable for HlNewBlock {
|
||||
fn decode(buf: &mut &[u8]) -> alloy_rlp::Result<Self> {
|
||||
let HlNewBlockHelper {
|
||||
block:
|
||||
BlockHelper {
|
||||
header,
|
||||
transactions,
|
||||
ommers,
|
||||
withdrawals,
|
||||
},
|
||||
block: BlockHelper { header, transactions, ommers, withdrawals },
|
||||
td,
|
||||
sidecars,
|
||||
read_precompile_calls,
|
||||
@ -180,14 +169,11 @@ impl HlNetworkBuilder {
|
||||
let (to_network, import_outcome) = mpsc::unbounded_channel();
|
||||
|
||||
let handle = ImportHandle::new(to_import, import_outcome);
|
||||
let consensus = Arc::new(HlConsensus {
|
||||
provider: ctx.provider().clone(),
|
||||
});
|
||||
let consensus = Arc::new(HlConsensus { provider: ctx.provider().clone() });
|
||||
let number = ctx.provider().last_block_number().unwrap_or(1);
|
||||
let number = std::cmp::max(number, 1);
|
||||
|
||||
ctx.task_executor()
|
||||
.spawn_critical("block import", async move {
|
||||
ctx.task_executor().spawn_critical("block import", async move {
|
||||
let handle = engine_handle_rx
|
||||
.lock()
|
||||
.await
|
||||
@ -196,9 +182,7 @@ impl HlNetworkBuilder {
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
ImportService::new(consensus, handle, from_network, to_network, number)
|
||||
.await
|
||||
.unwrap();
|
||||
ImportService::new(consensus, handle, from_network, to_network, number).await.unwrap();
|
||||
});
|
||||
|
||||
let network_builder = network_builder
|
||||
|
||||
@ -46,13 +46,13 @@ pub struct HlBlockBody {
|
||||
|
||||
impl InMemorySize for HlBlockBody {
|
||||
fn size(&self) -> usize {
|
||||
self.inner.size()
|
||||
+ self.sidecars.as_ref().map_or(0, |s| {
|
||||
s.capacity() * core::mem::size_of::<BlobTransactionSidecar>()
|
||||
})
|
||||
+ self.read_precompile_calls.as_ref().map_or(0, |s| {
|
||||
s.0.capacity() * core::mem::size_of::<ReadPrecompileCall>()
|
||||
})
|
||||
self.inner.size() +
|
||||
self.sidecars
|
||||
.as_ref()
|
||||
.map_or(0, |s| s.capacity() * core::mem::size_of::<BlobTransactionSidecar>()) +
|
||||
self.read_precompile_calls
|
||||
.as_ref()
|
||||
.map_or(0, |s| s.0.capacity() * core::mem::size_of::<ReadPrecompileCall>())
|
||||
}
|
||||
}
|
||||
|
||||
@ -156,12 +156,7 @@ mod rlp {
|
||||
impl<'a> From<&'a HlBlockBody> for BlockBodyHelper<'a> {
|
||||
fn from(value: &'a HlBlockBody) -> Self {
|
||||
let HlBlockBody {
|
||||
inner:
|
||||
BlockBody {
|
||||
transactions,
|
||||
ommers,
|
||||
withdrawals,
|
||||
},
|
||||
inner: BlockBody { transactions, ommers, withdrawals },
|
||||
sidecars,
|
||||
read_precompile_calls,
|
||||
} = value;
|
||||
@ -182,12 +177,7 @@ mod rlp {
|
||||
header,
|
||||
body:
|
||||
HlBlockBody {
|
||||
inner:
|
||||
BlockBody {
|
||||
transactions,
|
||||
ommers,
|
||||
withdrawals,
|
||||
},
|
||||
inner: BlockBody { transactions, ommers, withdrawals },
|
||||
sidecars,
|
||||
read_precompile_calls,
|
||||
},
|
||||
@ -300,11 +290,7 @@ pub mod serde_bincode_compat {
|
||||
}
|
||||
|
||||
fn from_repr(repr: Self::BincodeRepr<'_>) -> Self {
|
||||
let HlBlockBodyBincode {
|
||||
inner,
|
||||
sidecars,
|
||||
read_precompile_calls,
|
||||
} = repr;
|
||||
let HlBlockBodyBincode { inner, sidecars, read_precompile_calls } = repr;
|
||||
Self {
|
||||
inner: BlockBody::from_repr(inner),
|
||||
sidecars: sidecars.map(|s| s.into_owned()),
|
||||
@ -317,18 +303,12 @@ pub mod serde_bincode_compat {
|
||||
type BincodeRepr<'a> = HlBlockBincode<'a>;
|
||||
|
||||
fn as_repr(&self) -> Self::BincodeRepr<'_> {
|
||||
HlBlockBincode {
|
||||
header: self.header.as_repr(),
|
||||
body: self.body.as_repr(),
|
||||
}
|
||||
HlBlockBincode { header: self.header.as_repr(), body: self.body.as_repr() }
|
||||
}
|
||||
|
||||
fn from_repr(repr: Self::BincodeRepr<'_>) -> Self {
|
||||
let HlBlockBincode { header, body } = repr;
|
||||
Self {
|
||||
header: Header::from_repr(header),
|
||||
body: HlBlockBody::from_repr(body),
|
||||
}
|
||||
Self { header: Header::from_repr(header), body: HlBlockBody::from_repr(body) }
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -1,7 +1,9 @@
|
||||
use crate::{
|
||||
chainspec::HlChainSpec,
|
||||
node::rpc::{HlEthApi, HlNodeCore},
|
||||
node::{HlBlock, HlPrimitives},
|
||||
node::{
|
||||
rpc::{HlEthApi, HlNodeCore},
|
||||
HlBlock, HlPrimitives,
|
||||
},
|
||||
};
|
||||
use alloy_consensus::BlockHeader;
|
||||
use alloy_primitives::B256;
|
||||
@ -52,10 +54,7 @@ where
|
||||
let block_hash = block.hash();
|
||||
let excess_blob_gas = block.excess_blob_gas();
|
||||
let timestamp = block.timestamp();
|
||||
let blob_params = self
|
||||
.provider()
|
||||
.chain_spec()
|
||||
.blob_params_at_timestamp(timestamp);
|
||||
let blob_params = self.provider().chain_spec().blob_params_at_timestamp(timestamp);
|
||||
|
||||
return block
|
||||
.body()
|
||||
@ -163,10 +162,7 @@ where
|
||||
.await
|
||||
.map_err(Self::Error::from_eth_err)?
|
||||
.ok_or(EthApiError::HeaderNotFound(hash.into()))?;
|
||||
let blob_params = self
|
||||
.provider()
|
||||
.chain_spec()
|
||||
.blob_params_at_timestamp(meta.timestamp);
|
||||
let blob_params = self.provider().chain_spec().blob_params_at_timestamp(meta.timestamp);
|
||||
|
||||
Ok(EthReceiptBuilder::new(&tx, meta, &receipt, &all_receipts, blob_params)?.build())
|
||||
}
|
||||
|
||||
@ -67,7 +67,7 @@ where
|
||||
) -> Result<HlTxEnv<TxEnv>, Self::Error> {
|
||||
// Ensure that if versioned hashes are set, they're not empty
|
||||
if request.blob_versioned_hashes.as_ref().is_some_and(|hashes| hashes.is_empty()) {
|
||||
return Err(RpcInvalidTransactionError::BlobTransactionMissingBlobHashes.into_eth_err())
|
||||
return Err(RpcInvalidTransactionError::BlobTransactionMissingBlobHashes.into_eth_err());
|
||||
}
|
||||
|
||||
let tx_type = if request.authorization_list.is_some() {
|
||||
|
||||
@ -37,9 +37,7 @@ where
|
||||
type Validator = HlEngineValidator;
|
||||
|
||||
async fn build(self, ctx: &AddOnsContext<'_, Node>) -> eyre::Result<Self::Validator> {
|
||||
Ok(HlEngineValidator::new(Arc::new(
|
||||
ctx.config.chain.clone().as_ref().clone(),
|
||||
)))
|
||||
Ok(HlEngineValidator::new(Arc::new(ctx.config.chain.clone().as_ref().clone())))
|
||||
}
|
||||
}
|
||||
|
||||
@ -52,9 +50,7 @@ pub struct HlEngineValidator {
|
||||
impl HlEngineValidator {
|
||||
/// Instantiates a new validator.
|
||||
pub fn new(chain_spec: Arc<HlChainSpec>) -> Self {
|
||||
Self {
|
||||
inner: HlExecutionPayloadValidator { inner: chain_spec },
|
||||
}
|
||||
Self { inner: HlExecutionPayloadValidator { inner: chain_spec } }
|
||||
}
|
||||
}
|
||||
|
||||
@ -99,13 +95,9 @@ impl PayloadValidator for HlEngineValidator {
|
||||
&self,
|
||||
payload: Self::ExecutionData,
|
||||
) -> Result<RecoveredBlock<Self::Block>, NewPayloadError> {
|
||||
let sealed_block = self
|
||||
.inner
|
||||
.ensure_well_formed_payload(payload)
|
||||
.map_err(NewPayloadError::other)?;
|
||||
sealed_block
|
||||
.try_recover()
|
||||
.map_err(|e| NewPayloadError::Other(e.into()))
|
||||
let sealed_block =
|
||||
self.inner.ensure_well_formed_payload(payload).map_err(NewPayloadError::other)?;
|
||||
sealed_block.try_recover().map_err(|e| NewPayloadError::Other(e.into()))
|
||||
}
|
||||
|
||||
fn validate_block_post_execution_with_hashed_state(
|
||||
|
||||
@ -49,7 +49,7 @@ where
|
||||
request: TransactionRequest,
|
||||
) -> Result<TransactionSigned, Self::Error> {
|
||||
let Ok(tx) = request.build_typed_tx() else {
|
||||
return Err(EthApiError::TransactionConversionError)
|
||||
return Err(EthApiError::TransactionConversionError);
|
||||
};
|
||||
|
||||
// Create an empty signature for the transaction.
|
||||
|
||||
@ -51,9 +51,7 @@ fn fetch_spot_meta(chain_id: u64) -> Result<SpotMeta> {
|
||||
Ok(serde_json::from_str(&response)?)
|
||||
}
|
||||
|
||||
pub(crate) fn erc20_contract_to_spot_token(
|
||||
chain_id: u64,
|
||||
) -> Result<BTreeMap<Address, SpotId>> {
|
||||
pub(crate) fn erc20_contract_to_spot_token(chain_id: u64) -> Result<BTreeMap<Address, SpotId>> {
|
||||
let meta = fetch_spot_meta(chain_id)?;
|
||||
let mut map = BTreeMap::new();
|
||||
for token in &meta.tokens {
|
||||
|
||||
@ -1,7 +1,4 @@
|
||||
use crate::{
|
||||
node::types::ReadPrecompileCalls,
|
||||
{HlBlock, HlBlockBody, HlPrimitives},
|
||||
};
|
||||
use crate::{node::types::ReadPrecompileCalls, HlBlock, HlBlockBody, HlPrimitives};
|
||||
use alloy_consensus::BlockHeader;
|
||||
use alloy_primitives::Bytes;
|
||||
use reth_chainspec::EthereumHardforks;
|
||||
@ -31,9 +28,8 @@ impl HlStorage {
|
||||
where
|
||||
Provider: DBProvider<Tx: DbTxMut>,
|
||||
{
|
||||
let mut precompile_calls_cursor = provider
|
||||
.tx_ref()
|
||||
.cursor_write::<tables::BlockReadPrecompileCalls>()?;
|
||||
let mut precompile_calls_cursor =
|
||||
provider.tx_ref().cursor_write::<tables::BlockReadPrecompileCalls>()?;
|
||||
|
||||
for (block_number, read_precompile_calls) in inputs {
|
||||
let Some(read_precompile_calls) = read_precompile_calls else {
|
||||
@ -60,9 +56,8 @@ impl HlStorage {
|
||||
Provider: DBProvider<Tx: DbTx>,
|
||||
{
|
||||
let mut read_precompile_calls = Vec::with_capacity(inputs.len());
|
||||
let mut precompile_calls_cursor = provider
|
||||
.tx_ref()
|
||||
.cursor_read::<tables::BlockReadPrecompileCalls>()?;
|
||||
let mut precompile_calls_cursor =
|
||||
provider.tx_ref().cursor_read::<tables::BlockReadPrecompileCalls>()?;
|
||||
|
||||
for (header, _transactions) in inputs {
|
||||
let precompile_calls = precompile_calls_cursor
|
||||
@ -91,11 +86,7 @@ where
|
||||
|
||||
for (block_number, body) in bodies {
|
||||
match body {
|
||||
Some(HlBlockBody {
|
||||
inner,
|
||||
sidecars: _,
|
||||
read_precompile_calls: rpc,
|
||||
}) => {
|
||||
Some(HlBlockBody { inner, sidecars: _, read_precompile_calls: rpc }) => {
|
||||
eth_bodies.push((block_number, Some(inner)));
|
||||
read_precompile_calls.push((block_number, rpc));
|
||||
}
|
||||
@ -118,11 +109,8 @@ where
|
||||
block: u64,
|
||||
remove_from: StorageLocation,
|
||||
) -> ProviderResult<()> {
|
||||
self.0
|
||||
.remove_block_bodies_above(provider, block, remove_from)?;
|
||||
provider
|
||||
.tx_ref()
|
||||
.unwind_table_by_num::<tables::BlockReadPrecompileCalls>(block)?;
|
||||
self.0.remove_block_bodies_above(provider, block, remove_from)?;
|
||||
provider.tx_ref().unwind_table_by_num::<tables::BlockReadPrecompileCalls>(block)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@ -20,11 +20,7 @@ mod reth_compat;
|
||||
|
||||
impl From<ReadPrecompileCalls> for ReadPrecompileMap {
|
||||
fn from(calls: ReadPrecompileCalls) -> Self {
|
||||
calls
|
||||
.0
|
||||
.into_iter()
|
||||
.map(|(address, calls)| (address, calls.into_iter().collect()))
|
||||
.collect()
|
||||
calls.0.into_iter().map(|(address, calls)| (address, calls.into_iter().collect())).collect()
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -1,16 +1,18 @@
|
||||
//! Copy of reth codebase to preserve serialization compatibility
|
||||
use alloy_consensus::{
|
||||
Header, Signed, TxEip1559, TxEip2930, TxEip4844, TxEip7702, TxLegacy,
|
||||
};
|
||||
use alloy_consensus::{Header, Signed, TxEip1559, TxEip2930, TxEip4844, TxEip7702, TxLegacy};
|
||||
use alloy_primitives::{Address, BlockHash, Signature, TxKind, U256};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::collections::BTreeMap;
|
||||
use std::sync::{Arc, LazyLock, Mutex};
|
||||
use std::{
|
||||
collections::BTreeMap,
|
||||
sync::{Arc, LazyLock, Mutex},
|
||||
};
|
||||
use tracing::info;
|
||||
|
||||
use crate::node::spot_meta::{erc20_contract_to_spot_token, SpotId};
|
||||
use crate::{
|
||||
node::types::{ReadPrecompileCalls, SystemTx},
|
||||
node::{
|
||||
spot_meta::{erc20_contract_to_spot_token, SpotId},
|
||||
types::{ReadPrecompileCalls, SystemTx},
|
||||
},
|
||||
HlBlock, HlBlockBody,
|
||||
};
|
||||
|
||||
@ -98,10 +100,7 @@ fn system_tx_to_reth_transaction(
|
||||
break spot.to_s();
|
||||
}
|
||||
|
||||
info!(
|
||||
"Contract not found: {:?} from spot mapping, fetching again...",
|
||||
to
|
||||
);
|
||||
info!("Contract not found: {:?} from spot mapping, fetching again...", to);
|
||||
*EVM_MAP.lock().unwrap() = erc20_contract_to_spot_token(chain_id).unwrap();
|
||||
}
|
||||
};
|
||||
@ -118,17 +117,8 @@ impl SealedBlock {
|
||||
chain_id: u64,
|
||||
) -> HlBlock {
|
||||
let mut merged_txs = vec![];
|
||||
merged_txs.extend(
|
||||
system_txs
|
||||
.iter()
|
||||
.map(|tx| system_tx_to_reth_transaction(tx, chain_id)),
|
||||
);
|
||||
merged_txs.extend(
|
||||
self.body
|
||||
.transactions
|
||||
.iter()
|
||||
.map(|tx| tx.to_reth_transaction()),
|
||||
);
|
||||
merged_txs.extend(system_txs.iter().map(|tx| system_tx_to_reth_transaction(tx, chain_id)));
|
||||
merged_txs.extend(self.body.transactions.iter().map(|tx| tx.to_reth_transaction()));
|
||||
let block_body = HlBlockBody {
|
||||
inner: reth_primitives::BlockBody {
|
||||
transactions: merged_txs,
|
||||
@ -139,9 +129,6 @@ impl SealedBlock {
|
||||
read_precompile_calls: Some(read_precompile_calls),
|
||||
};
|
||||
|
||||
HlBlock {
|
||||
header: self.header.header.clone(),
|
||||
body: block_body,
|
||||
}
|
||||
HlBlock { header: self.header.header.clone(), body: block_body }
|
||||
}
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user