Initial reth port

This commit is contained in:
sprites0
2025-02-25 03:39:06 +00:00
parent 434ee6bc0d
commit d574b9ef58
23 changed files with 618 additions and 306 deletions

41
Cargo.lock generated
View File

@ -5081,6 +5081,9 @@ name = "lz4_flex"
version = "0.11.3" version = "0.11.3"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "75761162ae2b0e580d7e7c390558127e5f01b4194debd6221fd8c207fc80e3f5" checksum = "75761162ae2b0e580d7e7c390558127e5f01b4194debd6221fd8c207fc80e3f5"
dependencies = [
"twox-hash",
]
[[package]] [[package]]
name = "mach2" name = "mach2"
@ -6645,6 +6648,7 @@ dependencies = [
name = "reth" name = "reth"
version = "1.2.0" version = "1.2.0"
dependencies = [ dependencies = [
"alloy-chains",
"alloy-consensus", "alloy-consensus",
"alloy-eips", "alloy-eips",
"alloy-primitives", "alloy-primitives",
@ -6655,6 +6659,10 @@ dependencies = [
"clap", "clap",
"eyre", "eyre",
"futures", "futures",
"jsonrpsee",
"lz4_flex",
"once_cell",
"parking_lot",
"reth-basic-payload-builder", "reth-basic-payload-builder",
"reth-chainspec", "reth-chainspec",
"reth-cli", "reth-cli",
@ -6667,8 +6675,10 @@ dependencies = [
"reth-db", "reth-db",
"reth-db-api", "reth-db-api",
"reth-downloaders", "reth-downloaders",
"reth-e2e-test-utils",
"reth-errors", "reth-errors",
"reth-ethereum-cli", "reth-ethereum-cli",
"reth-ethereum-forks",
"reth-ethereum-payload-builder", "reth-ethereum-payload-builder",
"reth-ethereum-primitives", "reth-ethereum-primitives",
"reth-evm", "reth-evm",
@ -6696,6 +6706,7 @@ dependencies = [
"reth-rpc-api", "reth-rpc-api",
"reth-rpc-builder", "reth-rpc-builder",
"reth-rpc-eth-types", "reth-rpc-eth-types",
"reth-rpc-layer",
"reth-rpc-server-types", "reth-rpc-server-types",
"reth-rpc-types-compat", "reth-rpc-types-compat",
"reth-stages", "reth-stages",
@ -6705,6 +6716,9 @@ dependencies = [
"reth-transaction-pool", "reth-transaction-pool",
"reth-trie", "reth-trie",
"reth-trie-db", "reth-trie-db",
"rmp-serde",
"rmpv",
"serde",
"serde_json", "serde_json",
"similar-asserts", "similar-asserts",
"tempfile", "tempfile",
@ -7617,11 +7631,16 @@ dependencies = [
name = "reth-ethereum-cli" name = "reth-ethereum-cli"
version = "1.2.0" version = "1.2.0"
dependencies = [ dependencies = [
"alloy-chains",
"alloy-primitives",
"clap", "clap",
"eyre", "eyre",
"once_cell",
"reth-chainspec", "reth-chainspec",
"reth-cli", "reth-cli",
"reth-cli-commands", "reth-cli-commands",
"reth-primitives",
"serde_json",
] ]
[[package]] [[package]]
@ -8908,6 +8927,8 @@ name = "reth-payload-validator"
version = "1.2.0" version = "1.2.0"
dependencies = [ dependencies = [
"alloy-consensus", "alloy-consensus",
"alloy-eips",
"alloy-primitives",
"alloy-rpc-types-engine", "alloy-rpc-types-engine",
"reth-chainspec", "reth-chainspec",
"reth-primitives", "reth-primitives",
@ -10168,6 +10189,16 @@ dependencies = [
"serde", "serde",
] ]
[[package]]
name = "rmpv"
version = "1.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "58450723cd9ee93273ce44a20b6ec4efe17f8ed2e3631474387bfdecf18bb2a9"
dependencies = [
"num-traits",
"rmp",
]
[[package]] [[package]]
name = "roaring" name = "roaring"
version = "0.10.10" version = "0.10.10"
@ -11833,6 +11864,16 @@ dependencies = [
"utf-8", "utf-8",
] ]
[[package]]
name = "twox-hash"
version = "1.6.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "97fee6b57c6a41524a810daee9286c02d7752c4253064d0b05472833a438f675"
dependencies = [
"cfg-if",
"static_assertions",
]
[[package]] [[package]]
name = "typenum" name = "typenum"
version = "1.18.0" version = "1.18.0"

160
README.md
View File

@ -1,155 +1,15 @@
# reth # nanoreth
[![CI status](https://github.com/paradigmxyz/reth/workflows/unit/badge.svg)][gh-ci] Hyperliquid archive node based on [reth](https://github.com/paradigmxyz/reth).
[![cargo-deny status](https://github.com/paradigmxyz/reth/workflows/deny/badge.svg)][gh-deny]
[![Telegram Chat][tg-badge]][tg-url]
**Modular, contributor-friendly and blazing-fast implementation of the Ethereum protocol** ## How to run
![](./assets/reth-prod.png)
**[Install](https://paradigmxyz.github.io/reth/installation/installation.html)**
| [User Book](https://reth.rs)
| [Developer Docs](./docs)
| [Crate Docs](https://reth.rs/docs)
[gh-ci]: https://github.com/paradigmxyz/reth/actions/workflows/unit.yml
[gh-deny]: https://github.com/paradigmxyz/reth/actions/workflows/lint.yml
[tg-badge]: https://img.shields.io/endpoint?color=neon&logo=telegram&label=chat&url=https%3A%2F%2Ftg.sumanjay.workers.dev%2Fparadigm%5Freth
## What is Reth?
Reth (short for Rust Ethereum, [pronunciation](https://twitter.com/kelvinfichter/status/1597653609411268608)) is a new Ethereum full node implementation that is focused on being user-friendly, highly modular, as well as being fast and efficient. Reth is an Execution Layer (EL) and is compatible with all Ethereum Consensus Layer (CL) implementations that support the [Engine API](https://github.com/ethereum/execution-apis/tree/a0d03086564ab1838b462befbc083f873dcf0c0f/src/engine). It is originally built and driven forward by [Paradigm](https://paradigm.xyz/), and is licensed under the Apache and MIT licenses.
## Goals
As a full Ethereum node, Reth allows users to connect to the Ethereum network and interact with the Ethereum blockchain. This includes sending and receiving transactions/logs/traces, as well as accessing and interacting with smart contracts. Building a successful Ethereum node requires creating a high-quality implementation that is both secure and efficient, as well as being easy to use on consumer hardware. It also requires building a strong community of contributors who can help support and improve the software.
More concretely, our goals are:
1. **Modularity**: Every component of Reth is built to be used as a library: well-tested, heavily documented and benchmarked. We envision that developers will import the node's crates, mix and match, and innovate on top of them. Examples of such usage include but are not limited to spinning up standalone P2P networks, talking directly to a node's database, or "unbundling" the node into the components you need. To achieve that, we are licensing Reth under the Apache/MIT permissive license. You can learn more about the project's components [here](./docs/repo/layout.md).
2. **Performance**: Reth aims to be fast, so we used Rust and the [Erigon staged-sync](https://erigon.substack.com/p/erigon-stage-sync-and-control-flows) node architecture. We also use our Ethereum libraries (including [Alloy](https://github.com/alloy-rs/alloy/) and [revm](https://github.com/bluealloy/revm/)) which weve battle-tested and optimized via [Foundry](https://github.com/foundry-rs/foundry/).
3. **Free for anyone to use any way they want**: Reth is free open source software, built for the community, by the community. By licensing the software under the Apache/MIT license, we want developers to use it without being bound by business licenses, or having to think about the implications of GPL-like licenses.
4. **Client Diversity**: The Ethereum protocol becomes more antifragile when no node implementation dominates. This ensures that if there's a software bug, the network does not finalize a bad block. By building a new client, we hope to contribute to Ethereum's antifragility.
5. **Support as many EVM chains as possible**: We aspire that Reth can full-sync not only Ethereum, but also other chains like Optimism, Polygon, BNB Smart Chain, and more. If you're working on any of these projects, please reach out.
6. **Configurability**: We want to solve for node operators that care about fast historical queries, but also for hobbyists who cannot operate on large hardware. We also want to support teams and individuals who want both sync from genesis and via "fast sync". We envision that Reth will be configurable enough and provide configurable "profiles" for the tradeoffs that each team faces.
## Status
Reth is production ready, and suitable for usage in mission-critical environments such as staking or high-uptime services. We also actively recommend professional node operators to switch to Reth in production for performance and cost reasons in use cases where high performance with great margins is required such as RPC, MEV, Indexing, Simulations, and P2P activities.
More historical context below:
* We released 1.0 "production-ready" stable Reth in June 2024.
* Reth completed an audit with [Sigma Prime](https://sigmaprime.io/), the developers of [Lighthouse](https://github.com/sigp/lighthouse), the Rust Consensus Layer implementation. Find it [here](./audit/sigma_prime_audit_v2.pdf).
* Revm (the EVM used in Reth) underwent an audit with [Guido Vranken](https://twitter.com/guidovranken) (#1 [Ethereum Bug Bounty](https://ethereum.org/en/bug-bounty)). We will publish the results soon.
* We released multiple iterative beta versions, up to [beta.9](https://github.com/paradigmxyz/reth/releases/tag/v0.2.0-beta.9) on Monday June 3rd 2024 the last beta release.
* We released [beta](https://github.com/paradigmxyz/reth/releases/tag/v0.2.0-beta.1) on Monday March 4th 2024, our first breaking change to the database model, providing faster query speed, smaller database footprint, and allowing "history" to be mounted on separate drives.
* We shipped iterative improvements until the last alpha release on February 28th 2024, [0.1.0-alpha.21](https://github.com/paradigmxyz/reth/releases/tag/v0.1.0-alpha.21).
* We [initially announced](https://www.paradigm.xyz/2023/06/reth-alpha) [0.1.0-alpha.1](https://github.com/paradigmxyz/reth/releases/tag/v0.1.0-alpha.1) in June 20th 2023.
### Database compatibility
We do not have any breaking database changes since beta.1, and do not plan any in the near future.
Reth [v0.2.0-beta.1](https://github.com/paradigmxyz/reth/releases/tag/v0.2.0-beta.1) includes
a [set of breaking database changes](https://github.com/paradigmxyz/reth/pull/5191) that makes it impossible to use database files produced by earlier versions.
If you had a database produced by alpha versions of Reth, you need to drop it with `reth db drop`
(using the same arguments such as `--config` or `--datadir` that you passed to `reth node`), and resync using the same `reth node` command you've used before.
## For Users
See the [Reth Book](https://paradigmxyz.github.io/reth) for instructions on how to install and run Reth.
## For Developers
### Using reth as a library
You can use individual crates of reth in your project.
The crate docs can be found [here](https://paradigmxyz.github.io/reth/docs).
For a general overview of the crates, see [Project Layout](./docs/repo/layout.md).
### Contributing
If you want to contribute, or follow along with contributor discussion, you can use our [main telegram](https://t.me/paradigm_reth) to chat with us about the development of Reth!
- Our contributor guidelines can be found in [`CONTRIBUTING.md`](./CONTRIBUTING.md).
- See our [contributor docs](./docs) for more information on the project. A good starting point is [Project Layout](./docs/repo/layout.md).
### Building and testing
<!--
When updating this, also update:
- clippy.toml
- Cargo.toml
- .github/workflows/lint.yml
-->
The Minimum Supported Rust Version (MSRV) of this project is [1.82.0](https://blog.rust-lang.org/2024/10/17/Rust-1.82.0.html).
See the book for detailed instructions on how to [build from source](https://paradigmxyz.github.io/reth/installation/source.html).
To fully test Reth, you will need to have [Geth installed](https://geth.ethereum.org/docs/getting-started/installing-geth), but it is possible to run a subset of tests without Geth.
First, clone the repository:
```sh ```sh
git clone https://github.com/paradigmxyz/reth # Fetch EVM blocks
cd reth $ aws s3 sync s3://hl-mainnet-evm-blocks/ ~/evm-blocks --request-payer requester # one-time
$ goofys --region=ap-northeast-1 --requester-pays hl-mainnet-evm-blocks evm-blocks-bak # realtime
# Run node
$ make install
$ reth node --http --http.addr 0.0.0.0 --http.api eth,ots,net,web3 --ws --ws.addr 0.0.0.0 --ws.origins '*' --ws.api eth,ots,net,web3 --ingest-dir ~/evm-blocks --ws.port 8545
``` ```
Next, run the tests:
```sh
# Without Geth
cargo test --workspace
# With Geth
cargo test --workspace --features geth-tests
# With Ethereum Foundation tests
#
# Note: Requires cloning https://github.com/ethereum/tests
#
# cd testing/ef-tests && git clone https://github.com/ethereum/tests ethereum-tests
cargo test -p ef-tests --features ef-tests
```
We recommend using [`cargo nextest`](https://nexte.st/) to speed up testing. With nextest installed, simply substitute `cargo test` with `cargo nextest run`.
> **Note**
>
> Some tests use random number generators to generate test data. If you want to use a deterministic seed, you can set the `SEED` environment variable.
## Getting Help
If you have any questions, first see if the answer to your question can be found in the [book][book].
If the answer is not there:
- Join the [Telegram][tg-url] to get help, or
- Open a [discussion](https://github.com/paradigmxyz/reth/discussions/new) with your question, or
- Open an issue with [the bug](https://github.com/paradigmxyz/reth/issues/new?assignees=&labels=C-bug%2CS-needs-triage&projects=&template=bug.yml)
## Security
See [`SECURITY.md`](./SECURITY.md).
## Acknowledgements
Reth is a new implementation of the Ethereum protocol. In the process of developing the node we investigated the design decisions other nodes have made to understand what is done well, what is not, and where we can improve the status quo.
None of this would have been possible without them, so big shoutout to the teams below:
- [Geth](https://github.com/ethereum/go-ethereum/): We would like to express our heartfelt gratitude to the go-ethereum team for their outstanding contributions to Ethereum over the years. Their tireless efforts and dedication have helped to shape the Ethereum ecosystem and make it the vibrant and innovative community it is today. Thank you for your hard work and commitment to the project.
- [Erigon](https://github.com/ledgerwatch/erigon) (fka Turbo-Geth): Erigon pioneered the ["Staged Sync" architecture](https://erigon.substack.com/p/erigon-stage-sync-and-control-flows) that Reth is using, as well as [introduced MDBX](https://github.com/ledgerwatch/erigon/wiki/Choice-of-storage-engine) as the database of choice. We thank Erigon for pushing the state of the art research on the performance limits of Ethereum nodes.
- [Akula](https://github.com/akula-bft/akula/): Reth uses forks of the Apache versions of Akula's [MDBX Bindings](https://github.com/paradigmxyz/reth/pull/132), [FastRLP](https://github.com/paradigmxyz/reth/pull/63) and [ECIES](https://github.com/paradigmxyz/reth/pull/80) . Given that these packages were already released under the Apache License, and they implement standardized solutions, we decided not to reimplement them to iterate faster. We thank the Akula team for their contributions to the Rust Ethereum ecosystem and for publishing these packages.
## Warning
The `NippyJar` and `Compact` encoding formats and their implementations are designed for storing and retrieving data internally. They are not hardened to safely read potentially malicious data.
[book]: https://paradigmxyz.github.io/reth/
[tg-url]: https://t.me/paradigm_reth

View File

@ -89,6 +89,18 @@ clap = { workspace = true, features = ["derive", "env"] }
backon.workspace = true backon.workspace = true
similar-asserts.workspace = true similar-asserts.workspace = true
parking_lot.workspace = true
lz4_flex = "0.11.3"
rmp-serde = "1.3.0"
rmpv = "1.3.0"
serde = { workspace = true, features = ["derive"] }
reth-e2e-test-utils.workspace = true
once_cell.workspace = true
alloy-chains.workspace = true
reth-ethereum-forks.workspace = true
jsonrpsee.workspace = true
reth-rpc-layer.workspace = true
[dev-dependencies] [dev-dependencies]
tempfile.workspace = true tempfile.workspace = true

View File

@ -0,0 +1,197 @@
use std::path::PathBuf;
use std::sync::Arc;
use alloy_consensus::transaction::RlpEcdsaTx;
use alloy_consensus::{BlockBody, BlockHeader};
use alloy_eips::Typed2718;
use alloy_primitives::{address, Address, PrimitiveSignature, B256, U256};
use alloy_rpc_types::engine::{
ExecutionPayloadEnvelopeV3, ForkchoiceState, PayloadAttributes, PayloadStatusEnum,
};
use jsonrpsee::http_client::{transport::HttpBackend, HttpClient};
use reth::network::PeersHandleProvider;
use reth_chainspec::{EthChainSpec, EthereumHardforks};
use reth_node_api::{FullNodeComponents, PayloadTypes};
use reth_node_builder::EngineTypes;
use reth_node_builder::NodeTypesWithEngine;
use reth_node_builder::{rpc::RethRpcAddOns, FullNode};
use reth_payload_builder::{EthBuiltPayload, EthPayloadBuilderAttributes, PayloadId};
use reth_primitives::{Transaction, TransactionSigned};
use reth_provider::{BlockHashReader, StageCheckpointReader};
use reth_rpc_api::EngineApiClient;
use reth_rpc_layer::AuthClientService;
use reth_stages::StageId;
use tracing::debug;
use crate::serialized::{self, BlockInner};
pub(crate) struct BlockIngest(pub PathBuf);
async fn submit_payload<Engine: PayloadTypes + EngineTypes>(
engine_api_client: &HttpClient<AuthClientService<HttpBackend>>,
payload: EthBuiltPayload,
payload_builder_attributes: EthPayloadBuilderAttributes,
expected_status: PayloadStatusEnum,
) -> Result<B256, Box<dyn std::error::Error>> {
let versioned_hashes =
payload.block().blob_versioned_hashes_iter().copied().collect::<Vec<_>>();
// submit payload to engine api
let submission = {
let envelope: ExecutionPayloadEnvelopeV3 =
<EthBuiltPayload as Into<ExecutionPayloadEnvelopeV3>>::into(payload);
EngineApiClient::<Engine>::new_payload_v3(
engine_api_client,
envelope.execution_payload,
versioned_hashes,
payload_builder_attributes.parent_beacon_block_root.unwrap(),
)
.await?
};
assert_eq!(submission.status.as_str(), expected_status.as_str());
Ok(submission.latest_valid_hash.unwrap_or_default())
}
pub(crate) fn impersonated_hash(
this: &Transaction,
sender: Address,
signature: &PrimitiveSignature,
) -> B256 {
let mut buffer = Vec::new();
let ty = this.ty();
match this {
Transaction::Legacy(tx) => tx.eip2718_encode_with_type(signature, ty, &mut buffer),
Transaction::Eip2930(tx) => tx.eip2718_encode_with_type(signature, ty, &mut buffer),
Transaction::Eip1559(tx) => tx.eip2718_encode_with_type(signature, ty, &mut buffer),
Transaction::Eip4844(tx) => tx.eip2718_encode_with_type(signature, ty, &mut buffer),
Transaction::Eip7702(tx) => tx.eip2718_encode_with_type(signature, ty, &mut buffer),
}
buffer.extend_from_slice(sender.as_ref());
B256::from_slice(alloy_primitives::utils::keccak256(&buffer).as_slice())
}
impl BlockIngest {
pub(crate) fn collect_block(&self, height: u64) -> Option<super::serialized::Block> {
let f = ((height - 1) / 1_000_000) * 1_000_000;
let s = ((height - 1) / 1_000) * 1_000;
let path = format!("{}/{f}/{s}/{height}.rmp.lz4", self.0.to_string_lossy());
if std::path::Path::new(&path).exists() {
let file = std::fs::File::open(path).unwrap();
let file = std::io::BufReader::new(file);
let mut decoder = lz4_flex::frame::FrameDecoder::new(file);
let blocks: Vec<serialized::Block> = rmp_serde::from_read(&mut decoder).unwrap();
Some(blocks[0].clone())
} else {
None
}
}
pub(crate) async fn run<Node, Engine, AddOns>(
&self,
node: FullNode<Node, AddOns>,
) -> Result<(), Box<dyn std::error::Error>>
where
Node: FullNodeComponents,
AddOns: RethRpcAddOns<Node>,
Engine: EngineTypes,
Node::Types: NodeTypesWithEngine<ChainSpec: EthereumHardforks, Engine = Engine>,
Node::Network: PeersHandleProvider,
AddOns: RethRpcAddOns<Node>,
Engine::ExecutionPayloadEnvelopeV3: From<Engine::BuiltPayload>,
Engine::ExecutionPayloadEnvelopeV4: From<Engine::BuiltPayload>,
{
let provider = &node.provider;
let checkpoint = provider.get_stage_checkpoint(StageId::Finish)?;
let head = checkpoint.unwrap_or_default().block_number;
let genesis_hash = node.chain_spec().genesis_hash();
let mut height = head + 1;
let mut previous_hash = provider.block_hash(head)?.unwrap_or(genesis_hash);
let mut previous_timestamp =
std::time::SystemTime::now().duration_since(std::time::UNIX_EPOCH).unwrap().as_millis();
let engine_api = node.auth_server_handle().http_client();
loop {
let Some(original_block) = self.collect_block(height) else {
tokio::time::sleep(std::time::Duration::from_millis(500)).await;
continue;
};
let BlockInner::Reth115(mut block) = original_block.block;
{
debug!(target: "reth::cli", ?block, "Built new payload");
let timestamp = block.header().timestamp();
let block_hash = block.clone().try_recover()?.hash();
{
let BlockBody { transactions, ommers, withdrawals } =
std::mem::take(block.body_mut());
let signer = address!("2222222222222222222222222222222222222222");
let signature = PrimitiveSignature::new(
// from anvil
U256::from(0x1),
U256::from(0x1),
true,
);
let mut system_txs = vec![];
for transaction in original_block.system_txs {
let typed_transaction = transaction.tx.to_reth();
let hash = impersonated_hash(&typed_transaction, signer, &signature);
let tx = TransactionSigned::new(typed_transaction, signature, hash);
system_txs.push(tx);
}
let mut txs = vec![];
txs.extend(system_txs);
txs.extend(transactions);
*block.body_mut() = BlockBody { transactions: txs, ommers, withdrawals };
}
let total_fees = U256::ZERO;
let payload = EthBuiltPayload::new(
PayloadId::new(height.to_be_bytes()),
Arc::new(block),
total_fees,
None,
);
let attributes = EthPayloadBuilderAttributes::new(
B256::ZERO,
PayloadAttributes {
timestamp,
prev_randao: B256::ZERO,
suggested_fee_recipient: Address::ZERO,
withdrawals: Some(vec![]),
parent_beacon_block_root: Some(B256::ZERO),
},
);
submit_payload::<Engine>(
&engine_api,
payload,
attributes,
PayloadStatusEnum::Valid,
)
.await?;
let current_timestamp = std::time::SystemTime::now()
.duration_since(std::time::UNIX_EPOCH)
.unwrap()
.as_millis();
if height % 100 == 0 || current_timestamp - previous_timestamp > 100 {
EngineApiClient::<Engine>::fork_choice_updated_v2(
&engine_api,
ForkchoiceState {
head_block_hash: block_hash,
safe_block_hash: previous_hash,
finalized_block_hash: previous_hash,
},
None,
)
.await
.unwrap();
previous_timestamp = current_timestamp;
}
previous_hash = block_hash;
}
height += 1;
}
}
}

View File

@ -3,12 +3,25 @@
#[global_allocator] #[global_allocator]
static ALLOC: reth_cli_util::allocator::Allocator = reth_cli_util::allocator::new_allocator(); static ALLOC: reth_cli_util::allocator::Allocator = reth_cli_util::allocator::new_allocator();
use clap::Parser; mod block_ingest;
mod serialized;
use std::path::PathBuf;
use block_ingest::BlockIngest;
use clap::{Args, Parser};
use reth::cli::Cli; use reth::cli::Cli;
use reth_ethereum_cli::chainspec::EthereumChainSpecParser; use reth_ethereum_cli::chainspec::EthereumChainSpecParser;
use reth_node_ethereum::EthereumNode; use reth_node_ethereum::EthereumNode;
use tracing::info; use tracing::info;
#[derive(Args, Debug, Clone)]
struct IngestArgs {
/// EVM blocks base directory
#[arg(long, default_value="/tmp/evm-blocks")]
pub ingest_dir: PathBuf,
}
fn main() { fn main() {
reth_cli_util::sigsegv_handler::install(); reth_cli_util::sigsegv_handler::install();
@ -17,9 +30,13 @@ fn main() {
std::env::set_var("RUST_BACKTRACE", "1"); std::env::set_var("RUST_BACKTRACE", "1");
} }
if let Err(err) = Cli::<EthereumChainSpecParser>::parse().run(|builder, _| async move { if let Err(err) = Cli::<EthereumChainSpecParser, IngestArgs>::parse().run(|builder, ingest_args| async move {
info!(target: "reth::cli", "Launching node"); info!(target: "reth::cli", "Launching node");
let handle = builder.launch_node(EthereumNode::default()).await?; let handle = builder.launch_node(EthereumNode::default()).await?;
let ingest_dir = ingest_args.ingest_dir;
let ingest = BlockIngest(ingest_dir);
ingest.run(handle.node).await.unwrap();
handle.node_exit_future.await handle.node_exit_future.await
}) { }) {
eprintln!("Error: {err:?}"); eprintln!("Error: {err:?}");

106
bin/reth/src/serialized.rs Normal file
View File

@ -0,0 +1,106 @@
use alloy_consensus::{TxEip1559, TxEip2930, TxLegacy};
use alloy_rpc_types::Log;
use reth_primitives::{SealedBlock, Transaction};
use serde::{Deserialize, Serialize};
#[derive(Clone, Debug, Serialize, Deserialize)]
pub(crate) struct SerializedTransaction {
pub transaction: TypedTransaction,
pub signature: SerializedSignature,
}
#[derive(Clone, Debug, Serialize, Deserialize)]
pub(crate) struct SerializedSignature {
pub r: [u8; 32],
pub s: [u8; 32],
pub v: [u8; 8],
}
#[derive(Clone, Debug, Serialize, Deserialize)]
pub(crate) enum BlockInner {
Reth115(SealedBlock),
}
/// A raw transaction.
///
/// Transaction types were introduced in [EIP-2718](https://eips.ethereum.org/EIPS/eip-2718).
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)]
pub(crate) enum TypedTransaction {
/// Legacy transaction (type `0x0`).
///
/// Traditional Ethereum transactions, containing parameters `nonce`, `gasPrice`, `gasLimit`,
/// `to`, `value`, `data`, `v`, `r`, and `s`.
///
/// These transactions do not utilize access lists nor do they incorporate EIP-1559 fee market
/// changes.
Legacy(TxLegacy),
/// Transaction with an [`AccessList`] ([EIP-2930](https://eips.ethereum.org/EIPS/eip-2930)), type `0x1`.
///
/// The `accessList` specifies an array of addresses and storage keys that the transaction
/// plans to access, enabling gas savings on cross-contract calls by pre-declaring the accessed
/// contract and storage slots.
Eip2930(TxEip2930),
/// A transaction with a priority fee ([EIP-1559](https://eips.ethereum.org/EIPS/eip-1559)), type `0x2`.
///
/// Unlike traditional transactions, EIP-1559 transactions use an in-protocol, dynamically
/// changing base fee per gas, adjusted at each block to manage network congestion.
///
/// - `maxPriorityFeePerGas`, specifying the maximum fee above the base fee the sender is
/// willing to pay
/// - `maxFeePerGas`, setting the maximum total fee the sender is willing to pay.
///
/// The base fee is burned, while the priority fee is paid to the miner who includes the
/// transaction, incentivizing miners to include transactions with higher priority fees per
/// gas.
Eip1559(TxEip1559),
}
impl TypedTransaction {
pub(crate) fn to_reth(self) -> Transaction {
match self {
Self::Legacy(tx) => Transaction::Legacy(tx),
Self::Eip2930(tx) => Transaction::Eip2930(tx),
Self::Eip1559(tx) => Transaction::Eip1559(tx),
}
}
}
#[derive(Clone, Debug, Serialize, Deserialize)]
pub(crate) enum TxType {
/// Legacy transaction type.
Legacy,
/// EIP-2930 transaction type.
Eip2930,
/// EIP-1559 transaction type.
Eip1559,
/// EIP-4844 transaction type.
Eip4844,
/// EIP-7702 transaction type.
Eip7702,
}
#[derive(Clone, Debug, Serialize, Deserialize)]
pub(crate) struct Receipt {
/// Receipt type.
pub tx_type: TxType,
/// If transaction is executed successfully.
///
/// This is the `statusCode`
pub success: bool,
/// Gas used
pub cumulative_gas_used: u64,
/// Log send from contracts.
pub logs: Vec<Log>,
}
#[derive(Clone, Debug, Serialize, Deserialize)]
pub(crate) struct SystemTransaction {
pub receipt: Receipt,
pub tx: TypedTransaction,
}
#[derive(Clone, Debug, Serialize, Deserialize)]
pub(crate) struct Block {
pub block: BlockInner,
pub system_txs: Vec<SystemTransaction>,
}

View File

@ -3,8 +3,8 @@
use alloy_consensus::{ use alloy_consensus::{
constants::MAXIMUM_EXTRA_DATA_SIZE, BlockHeader as _, EMPTY_OMMER_ROOT_HASH, constants::MAXIMUM_EXTRA_DATA_SIZE, BlockHeader as _, EMPTY_OMMER_ROOT_HASH,
}; };
use alloy_eips::{calc_next_block_base_fee, eip4844::DATA_GAS_PER_BLOB, eip7840::BlobParams}; use alloy_eips::{eip4844::DATA_GAS_PER_BLOB, eip7840::BlobParams};
use reth_chainspec::{EthChainSpec, EthereumHardfork, EthereumHardforks}; use reth_chainspec::{EthChainSpec, EthereumHardforks};
use reth_consensus::ConsensusError; use reth_consensus::ConsensusError;
use reth_primitives_traits::{ use reth_primitives_traits::{
Block, BlockBody, BlockHeader, GotExpected, SealedBlock, SealedHeader, Block, BlockBody, BlockHeader, GotExpected, SealedBlock, SealedHeader,
@ -17,7 +17,7 @@ pub fn validate_header_gas<H: BlockHeader>(header: &H) -> Result<(), ConsensusEr
return Err(ConsensusError::HeaderGasUsedExceedsGasLimit { return Err(ConsensusError::HeaderGasUsedExceedsGasLimit {
gas_used: header.gas_used(), gas_used: header.gas_used(),
gas_limit: header.gas_limit(), gas_limit: header.gas_limit(),
}) });
} }
Ok(()) Ok(())
} }
@ -30,7 +30,7 @@ pub fn validate_header_base_fee<H: BlockHeader, ChainSpec: EthereumHardforks>(
) -> Result<(), ConsensusError> { ) -> Result<(), ConsensusError> {
if chain_spec.is_london_active_at_block(header.number()) && header.base_fee_per_gas().is_none() if chain_spec.is_london_active_at_block(header.number()) && header.base_fee_per_gas().is_none()
{ {
return Err(ConsensusError::BaseFeeMissing) return Err(ConsensusError::BaseFeeMissing);
} }
Ok(()) Ok(())
} }
@ -95,14 +95,14 @@ where
expected: header.ommers_hash(), expected: header.ommers_hash(),
} }
.into(), .into(),
)) ));
} }
let tx_root = body.calculate_tx_root(); let tx_root = body.calculate_tx_root();
if header.transactions_root() != tx_root { if header.transactions_root() != tx_root {
return Err(ConsensusError::BodyTransactionRootDiff( return Err(ConsensusError::BodyTransactionRootDiff(
GotExpected { got: tx_root, expected: header.transactions_root() }.into(), GotExpected { got: tx_root, expected: header.transactions_root() }.into(),
)) ));
} }
match (header.withdrawals_root(), body.calculate_withdrawals_root()) { match (header.withdrawals_root(), body.calculate_withdrawals_root()) {
@ -110,7 +110,7 @@ where
if withdrawals_root != header_withdrawals_root { if withdrawals_root != header_withdrawals_root {
return Err(ConsensusError::BodyWithdrawalsRootDiff( return Err(ConsensusError::BodyWithdrawalsRootDiff(
GotExpected { got: withdrawals_root, expected: header_withdrawals_root }.into(), GotExpected { got: withdrawals_root, expected: header_withdrawals_root }.into(),
)) ));
} }
} }
(None, None) => { (None, None) => {
@ -145,12 +145,12 @@ where
expected: block.ommers_hash(), expected: block.ommers_hash(),
} }
.into(), .into(),
)) ));
} }
// Check transaction root // Check transaction root
if let Err(error) = block.ensure_transaction_root_valid() { if let Err(error) = block.ensure_transaction_root_valid() {
return Err(ConsensusError::BodyTransactionRootDiff(error.into())) return Err(ConsensusError::BodyTransactionRootDiff(error.into()));
} }
// EIP-4895: Beacon chain push withdrawals as operations // EIP-4895: Beacon chain push withdrawals as operations
@ -179,14 +179,14 @@ pub fn validate_4844_header_standalone<H: BlockHeader>(header: &H) -> Result<(),
let excess_blob_gas = header.excess_blob_gas().ok_or(ConsensusError::ExcessBlobGasMissing)?; let excess_blob_gas = header.excess_blob_gas().ok_or(ConsensusError::ExcessBlobGasMissing)?;
if header.parent_beacon_block_root().is_none() { if header.parent_beacon_block_root().is_none() {
return Err(ConsensusError::ParentBeaconBlockRootMissing) return Err(ConsensusError::ParentBeaconBlockRootMissing);
} }
if blob_gas_used % DATA_GAS_PER_BLOB != 0 { if blob_gas_used % DATA_GAS_PER_BLOB != 0 {
return Err(ConsensusError::BlobGasUsedNotMultipleOfBlobGasPerBlob { return Err(ConsensusError::BlobGasUsedNotMultipleOfBlobGasPerBlob {
blob_gas_used, blob_gas_used,
blob_gas_per_blob: DATA_GAS_PER_BLOB, blob_gas_per_blob: DATA_GAS_PER_BLOB,
}) });
} }
// `excess_blob_gas` must also be a multiple of `DATA_GAS_PER_BLOB`. This will be checked later // `excess_blob_gas` must also be a multiple of `DATA_GAS_PER_BLOB`. This will be checked later
@ -195,7 +195,7 @@ pub fn validate_4844_header_standalone<H: BlockHeader>(header: &H) -> Result<(),
return Err(ConsensusError::ExcessBlobGasNotMultipleOfBlobGasPerBlob { return Err(ConsensusError::ExcessBlobGasNotMultipleOfBlobGasPerBlob {
excess_blob_gas, excess_blob_gas,
blob_gas_per_blob: DATA_GAS_PER_BLOB, blob_gas_per_blob: DATA_GAS_PER_BLOB,
}) });
} }
Ok(()) Ok(())
@ -229,13 +229,13 @@ pub fn validate_against_parent_hash_number<H: BlockHeader>(
return Err(ConsensusError::ParentBlockNumberMismatch { return Err(ConsensusError::ParentBlockNumberMismatch {
parent_block_number: parent.number(), parent_block_number: parent.number(),
block_number: header.number(), block_number: header.number(),
}) });
} }
if parent.hash() != header.parent_hash() { if parent.hash() != header.parent_hash() {
return Err(ConsensusError::ParentHashMismatch( return Err(ConsensusError::ParentHashMismatch(
GotExpected { got: header.parent_hash(), expected: parent.hash() }.into(), GotExpected { got: header.parent_hash(), expected: parent.hash() }.into(),
)) ));
} }
Ok(()) Ok(())
@ -247,37 +247,10 @@ pub fn validate_against_parent_eip1559_base_fee<
H: BlockHeader, H: BlockHeader,
ChainSpec: EthChainSpec + EthereumHardforks, ChainSpec: EthChainSpec + EthereumHardforks,
>( >(
header: &H, _header: &H,
parent: &H, _parent: &H,
chain_spec: &ChainSpec, _chain_spec: &ChainSpec,
) -> Result<(), ConsensusError> { ) -> Result<(), ConsensusError> {
if chain_spec.is_london_active_at_block(header.number()) {
let base_fee = header.base_fee_per_gas().ok_or(ConsensusError::BaseFeeMissing)?;
let expected_base_fee = if chain_spec
.ethereum_fork_activation(EthereumHardfork::London)
.transitions_at_block(header.number())
{
alloy_eips::eip1559::INITIAL_BASE_FEE
} else {
// This BaseFeeMissing will not happen as previous blocks are checked to have
// them.
let base_fee = parent.base_fee_per_gas().ok_or(ConsensusError::BaseFeeMissing)?;
calc_next_block_base_fee(
parent.gas_used(),
parent.gas_limit(),
base_fee,
chain_spec.base_fee_params_at_timestamp(header.timestamp()),
)
};
if expected_base_fee != base_fee {
return Err(ConsensusError::BaseFeeDiff(GotExpected {
expected: expected_base_fee,
got: base_fee,
}))
}
}
Ok(()) Ok(())
} }
@ -287,11 +260,11 @@ pub fn validate_against_parent_timestamp<H: BlockHeader>(
header: &H, header: &H,
parent: &H, parent: &H,
) -> Result<(), ConsensusError> { ) -> Result<(), ConsensusError> {
if header.timestamp() <= parent.timestamp() { if header.timestamp() < parent.timestamp() {
return Err(ConsensusError::TimestampIsInPast { return Err(ConsensusError::TimestampIsInPast {
parent_timestamp: parent.timestamp(), parent_timestamp: parent.timestamp(),
timestamp: header.timestamp(), timestamp: header.timestamp(),
}) });
} }
Ok(()) Ok(())
} }
@ -315,7 +288,7 @@ pub fn validate_against_parent_4844<H: BlockHeader>(
let parent_excess_blob_gas = parent.excess_blob_gas().unwrap_or(0); let parent_excess_blob_gas = parent.excess_blob_gas().unwrap_or(0);
if header.blob_gas_used().is_none() { if header.blob_gas_used().is_none() {
return Err(ConsensusError::BlobGasUsedMissing) return Err(ConsensusError::BlobGasUsedMissing);
} }
let excess_blob_gas = header.excess_blob_gas().ok_or(ConsensusError::ExcessBlobGasMissing)?; let excess_blob_gas = header.excess_blob_gas().ok_or(ConsensusError::ExcessBlobGasMissing)?;
@ -326,7 +299,7 @@ pub fn validate_against_parent_4844<H: BlockHeader>(
diff: GotExpected { got: excess_blob_gas, expected: expected_excess_blob_gas }, diff: GotExpected { got: excess_blob_gas, expected: expected_excess_blob_gas },
parent_excess_blob_gas, parent_excess_blob_gas,
parent_blob_gas_used, parent_blob_gas_used,
}) });
} }
Ok(()) Ok(())

View File

@ -185,9 +185,11 @@ where
// Calculate the state root and trie updates after re-execution. They should match // Calculate the state root and trie updates after re-execution. They should match
// the original ones. // the original ones.
let (re_executed_root, trie_output) = let (_re_executed_root, trie_output) =
state_provider.state_root_with_updates(hashed_state)?; state_provider.state_root_with_updates(hashed_state)?;
if let Some((original_updates, original_root)) = trie_updates { let re_executed_root = B256::ZERO;
if let Some((original_updates, _original_root)) = trie_updates {
let original_root = B256::ZERO;
if re_executed_root != original_root { if re_executed_root != original_root {
let filename = format!("{}_{}.state_root.diff", block.number(), block.hash()); let filename = format!("{}_{}.state_root.diff", block.number(), block.hash());
let diff_path = self.save_diff(filename, &re_executed_root, &original_root)?; let diff_path = self.save_diff(filename, &re_executed_root, &original_root)?;

View File

@ -2892,6 +2892,7 @@ where
task_elapsed = ?time_from_last_update, task_elapsed = ?time_from_last_update,
"State root task finished" "State root task finished"
); );
let task_state_root = B256::ZERO;
if task_state_root != sealed_block.header().state_root() || if task_state_root != sealed_block.header().state_root() ||
self.config.always_compare_trie_updates() self.config.always_compare_trie_updates()

View File

@ -14,9 +14,14 @@ workspace = true
# reth # reth
reth-cli.workspace = true reth-cli.workspace = true
reth-chainspec.workspace = true reth-chainspec.workspace = true
reth-primitives.workspace = true
# misc # misc
eyre.workspace = true eyre.workspace = true
once_cell.workspace = true
alloy-chains.workspace = true
alloy-primitives.workspace = true
serde_json.workspace = true
[dev-dependencies] [dev-dependencies]
clap.workspace = true clap.workspace = true

View File

@ -1,17 +1,97 @@
use reth_chainspec::{ChainSpec, DEV, HOLESKY, MAINNET, SEPOLIA}; extern crate alloc;
use alloy_primitives::{b256, Address, Bytes, B256, B64, U256};
use once_cell::sync::Lazy;
use reth_chainspec::{ChainSpec, DEV, DEV_HARDFORKS, HOLESKY, SEPOLIA};
use reth_cli::chainspec::{parse_genesis, ChainSpecParser}; use reth_cli::chainspec::{parse_genesis, ChainSpecParser};
use reth_primitives::{Header, SealedHeader};
use std::sync::Arc; use std::sync::Arc;
/// Chains supported by reth. First value should be used as the default. /// Chains supported by reth. First value should be used as the default.
pub const SUPPORTED_CHAINS: &[&str] = &["mainnet", "sepolia", "holesky", "dev"]; pub const SUPPORTED_CHAINS: &[&str] = &["mainnet", "sepolia", "holesky", "dev"];
static GENESIS_HASH: B256 =
b256!("d8fcc13b6a195b88b7b2da3722ff6cad767b13a8c1e9ffb1c73aa9d216d895f0");
/// The Hyperliqiud Mainnet spec
pub static HL_MAINNET: Lazy<alloc::sync::Arc<ChainSpec>> = Lazy::new(|| {
ChainSpec {
chain: alloy_chains::Chain::from_id(999),
// genesis contains empty alloc field because state at first bedrock block is imported
// manually from trusted source
genesis: serde_json::from_str(r#"{
"nonce": "0x0",
"timestamp": "0x6490fdd2",
"extraData": "0x",
"gasLimit": "0x1c9c380",
"difficulty": "0x0",
"mixHash": "0x0000000000000000000000000000000000000000000000000000000000000000",
"coinbase": "0x0000000000000000000000000000000000000000",
"stateRoot": "0x5eb6e371a698b8d68f665192350ffcecbbbf322916f4b51bd79bb6887da3f494",
"alloc": {
"0x2222222222222222222222222222222222222222": {
"nonce": 0,
"balance": "0x33b2e3c9fd0803ce8000000",
"code": "0x608060405236603f5760405134815233907f88a5966d370b9919b20f3e2c13ff65706f196a4e32cc2c12bf57088f885258749060200160405180910390a2005b600080fdfea2646970667358221220ca425db50898ac19f9e4676e86e8ebed9853baa048942f6306fe8a86b8d4abb964736f6c63430008090033",
"storage": {}
},
"0x5555555555555555555555555555555555555555": {
"nonce": 0,
"balance": "0x0",
"code": "0x6080604052600436106100bc5760003560e01c8063313ce56711610074578063a9059cbb1161004e578063a9059cbb146102cb578063d0e30db0146100bc578063dd62ed3e14610311576100bc565b8063313ce5671461024b57806370a082311461027657806395d89b41146102b6576100bc565b806318160ddd116100a557806318160ddd146101aa57806323b872dd146101d15780632e1a7d4d14610221576100bc565b806306fdde03146100c6578063095ea7b314610150575b6100c4610359565b005b3480156100d257600080fd5b506100db6103a8565b6040805160208082528351818301528351919283929083019185019080838360005b838110156101155781810151838201526020016100fd565b50505050905090810190601f1680156101425780820380516001836020036101000a031916815260200191505b509250505060405180910390f35b34801561015c57600080fd5b506101966004803603604081101561017357600080fd5b5073ffffffffffffffffffffffffffffffffffffffff8135169060200135610454565b604080519115158252519081900360200190f35b3480156101b657600080fd5b506101bf6104c7565b60408051918252519081900360200190f35b3480156101dd57600080fd5b50610196600480360360608110156101f457600080fd5b5073ffffffffffffffffffffffffffffffffffffffff8135811691602081013590911690604001356104cb565b34801561022d57600080fd5b506100c46004803603602081101561024457600080fd5b503561066b565b34801561025757600080fd5b50610260610700565b6040805160ff9092168252519081900360200190f35b34801561028257600080fd5b506101bf6004803603602081101561029957600080fd5b503573ffffffffffffffffffffffffffffffffffffffff16610709565b3480156102c257600080fd5b506100db61071b565b3480156102d757600080fd5b50610196600480360360408110156102ee57600080fd5b5073ffffffffffffffffffffffffffffffffffffffff8135169060200135610793565b34801561031d57600080fd5b506101bf6004803603604081101561033457600080fd5b5073ffffffffffffffffffffffffffffffffffffffff813581169160200135166107a7565b33600081815260036020908152604091829020805434908101909155825190815291517fe1fffcc4923d04b559f4d29a8bfc6cda04eb5b0d3c460751c2402c5c5cc9109c9281900390910190a2565b6000805460408051602060026001851615610100027fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff0190941693909304601f8101849004840282018401909252818152929183018282801561044c5780601f106104215761010080835404028352916020019161044c565b820191906000526020600020905b81548152906001019060200180831161042f57829003601f168201915b505050505081565b33600081815260046020908152604080832073ffffffffffffffffffffffffffffffffffffffff8716808552908352818420869055815186815291519394909390927f8c5be1e5ebec7d5bd14f71427d1e84f3dd0314c0f7b2291e5b200ac8c7c3b925928290030190a350600192915050565b4790565b73ffffffffffffffffffffffffffffffffffffffff83166000908152600360205260408120548211156104fd57600080fd5b73ffffffffffffffffffffffffffffffffffffffff84163314801590610573575073ffffffffffffffffffffffffffffffffffffffff841660009081526004602090815260408083203384529091529020547fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff14155b156105ed5773ffffffffffffffffffffffffffffffffffffffff841660009081526004602090815260408083203384529091529020548211156105b557600080fd5b73ffffffffffffffffffffffffffffffffffffffff841660009081526004602090815260408083203384529091529020805483900390555b73ffffffffffffffffffffffffffffffffffffffff808516600081815260036020908152604080832080548890039055938716808352918490208054870190558351868152935191937fddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef929081900390910190a35060019392505050565b3360009081526003602052604090205481111561068757600080fd5b33600081815260036020526040808220805485900390555183156108fc0291849190818181858888f193505050501580156106c6573d6000803e3d6000fd5b5060408051828152905133917f7fcf532c15f0a6db0bd6d0e038bea71d30d808c7d98cb3bf7268a95bf5081b65919081900360200190a250565b60025460ff1681565b60036020526000908152604090205481565b60018054604080516020600284861615610100027fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff0190941693909304601f8101849004840282018401909252818152929183018282801561044c5780601f106104215761010080835404028352916020019161044c565b60006107a03384846104cb565b9392505050565b60046020908152600092835260408084209091529082529020548156fea265627a7a72315820e87684b404839c5657b1e7820bfa5ac4539ac8c83c21e28ec1086123db902cfe64736f6c63430005110032",
"storage": {
"0x0000000000000000000000000000000000000000000000000000000000000000": "0x5772617070656420485950450000000000000000000000000000000000000018",
"0x0000000000000000000000000000000000000000000000000000000000000001": "0x574859504500000000000000000000000000000000000000000000000000000a",
"0x0000000000000000000000000000000000000000000000000000000000000002": "0x0000000000000000000000000000000000000000000000000000000000000012"
}
}
},
"number": "0x0",
"gasUsed": "0x0",
"parentHash": "0x0000000000000000000000000000000000000000000000000000000000000000",
"parentBeaconBlockRoot": "0x0000000000000000000000000000000000000000000000000000000000000000"
}"#)
.expect("Can't deserialize Hyperliquid Mainnet genesis json"),
genesis_header: SealedHeader::new(
Header {
parent_hash: B256::ZERO,
number: 0,
timestamp: 0,
transactions_root: B256::ZERO,
receipts_root: B256::ZERO,
state_root: B256::ZERO,
gas_used: 0,
gas_limit: 0x1c9c380,
difficulty: U256::ZERO,
mix_hash: B256::ZERO,
extra_data: Bytes::new(),
nonce: B64::ZERO,
ommers_hash: B256::ZERO,
beneficiary: Address::ZERO,
logs_bloom: Default::default(),
base_fee_per_gas: Some(0),
withdrawals_root: Some(B256::ZERO),
blob_gas_used: Some(0),
excess_blob_gas: Some(0),
parent_beacon_block_root: Some(B256::ZERO),
requests_hash: Some(B256::ZERO),
},
GENESIS_HASH,
),
paris_block_and_final_difficulty: Some((0, U256::from(0))),
hardforks: DEV_HARDFORKS.clone(),
prune_delete_limit: 10000,
..Default::default()
}.into()
});
/// Clap value parser for [`ChainSpec`]s. /// Clap value parser for [`ChainSpec`]s.
/// ///
/// The value parser matches either a known chain, the path /// The value parser matches either a known chain, the path
/// to a json file, or a json formatted string in-memory. The json needs to be a Genesis struct. /// to a json file, or a json formatted string in-memory. The json needs to be a Genesis struct.
pub fn chain_value_parser(s: &str) -> eyre::Result<Arc<ChainSpec>, eyre::Error> { pub fn chain_value_parser(s: &str) -> eyre::Result<Arc<ChainSpec>, eyre::Error> {
Ok(match s { Ok(match s {
"mainnet" => MAINNET.clone(), "mainnet" => HL_MAINNET.clone(),
"sepolia" => SEPOLIA.clone(), "sepolia" => SEPOLIA.clone(),
"holesky" => HOLESKY.clone(), "holesky" => HOLESKY.clone(),
"dev" => DEV.clone(), "dev" => DEV.clone(),

View File

@ -21,10 +21,7 @@ use reth_consensus_common::validation::{
}; };
use reth_execution_types::BlockExecutionResult; use reth_execution_types::BlockExecutionResult;
use reth_primitives::{NodePrimitives, RecoveredBlock, SealedBlock, SealedHeader}; use reth_primitives::{NodePrimitives, RecoveredBlock, SealedBlock, SealedHeader};
use reth_primitives_traits::{ use reth_primitives_traits::{constants::MINIMUM_GAS_LIMIT, Block, BlockHeader};
constants::{GAS_LIMIT_BOUND_DIVISOR, MINIMUM_GAS_LIMIT},
Block, BlockHeader,
};
use std::{fmt::Debug, sync::Arc, time::SystemTime}; use std::{fmt::Debug, sync::Arc, time::SystemTime};
mod validation; mod validation;
@ -52,43 +49,13 @@ impl<ChainSpec: EthChainSpec + EthereumHardforks> EthBeaconConsensus<ChainSpec>
fn validate_against_parent_gas_limit<H: BlockHeader>( fn validate_against_parent_gas_limit<H: BlockHeader>(
&self, &self,
header: &SealedHeader<H>, header: &SealedHeader<H>,
parent: &SealedHeader<H>, _parent: &SealedHeader<H>,
) -> Result<(), ConsensusError> { ) -> Result<(), ConsensusError> {
// Determine the parent gas limit, considering elasticity multiplier on the London fork.
let parent_gas_limit = if !self.chain_spec.is_london_active_at_block(parent.number()) &&
self.chain_spec.is_london_active_at_block(header.number())
{
parent.gas_limit() *
self.chain_spec
.base_fee_params_at_timestamp(header.timestamp())
.elasticity_multiplier as u64
} else {
parent.gas_limit()
};
// Check for an increase in gas limit beyond the allowed threshold.
if header.gas_limit() > parent_gas_limit {
if header.gas_limit() - parent_gas_limit >= parent_gas_limit / GAS_LIMIT_BOUND_DIVISOR {
return Err(ConsensusError::GasLimitInvalidIncrease {
parent_gas_limit,
child_gas_limit: header.gas_limit(),
})
}
}
// Check for a decrease in gas limit beyond the allowed threshold.
else if parent_gas_limit - header.gas_limit() >=
parent_gas_limit / GAS_LIMIT_BOUND_DIVISOR
{
return Err(ConsensusError::GasLimitInvalidDecrease {
parent_gas_limit,
child_gas_limit: header.gas_limit(),
})
}
// Check if the self gas limit is below the minimum required limit. // Check if the self gas limit is below the minimum required limit.
else if header.gas_limit() < MINIMUM_GAS_LIMIT { if header.gas_limit() < MINIMUM_GAS_LIMIT {
return Err(ConsensusError::GasLimitInvalidMinimum { return Err(ConsensusError::GasLimitInvalidMinimum {
child_gas_limit: header.gas_limit(), child_gas_limit: header.gas_limit(),
}) });
} }
Ok(()) Ok(())
@ -139,33 +106,33 @@ where
validate_header_base_fee(header.header(), &self.chain_spec)?; validate_header_base_fee(header.header(), &self.chain_spec)?;
// EIP-4895: Beacon chain push withdrawals as operations // EIP-4895: Beacon chain push withdrawals as operations
if self.chain_spec.is_shanghai_active_at_timestamp(header.timestamp()) && if self.chain_spec.is_shanghai_active_at_timestamp(header.timestamp())
header.withdrawals_root().is_none() && header.withdrawals_root().is_none()
{ {
return Err(ConsensusError::WithdrawalsRootMissing) return Err(ConsensusError::WithdrawalsRootMissing);
} else if !self.chain_spec.is_shanghai_active_at_timestamp(header.timestamp()) && } else if !self.chain_spec.is_shanghai_active_at_timestamp(header.timestamp())
header.withdrawals_root().is_some() && header.withdrawals_root().is_some()
{ {
return Err(ConsensusError::WithdrawalsRootUnexpected) return Err(ConsensusError::WithdrawalsRootUnexpected);
} }
// Ensures that EIP-4844 fields are valid once cancun is active. // Ensures that EIP-4844 fields are valid once cancun is active.
if self.chain_spec.is_cancun_active_at_timestamp(header.timestamp()) { if self.chain_spec.is_cancun_active_at_timestamp(header.timestamp()) {
validate_4844_header_standalone(header.header())?; validate_4844_header_standalone(header.header())?;
} else if header.blob_gas_used().is_some() { } else if header.blob_gas_used().is_some() {
return Err(ConsensusError::BlobGasUsedUnexpected) return Err(ConsensusError::BlobGasUsedUnexpected);
} else if header.excess_blob_gas().is_some() { } else if header.excess_blob_gas().is_some() {
return Err(ConsensusError::ExcessBlobGasUnexpected) return Err(ConsensusError::ExcessBlobGasUnexpected);
} else if header.parent_beacon_block_root().is_some() { } else if header.parent_beacon_block_root().is_some() {
return Err(ConsensusError::ParentBeaconBlockRootUnexpected) return Err(ConsensusError::ParentBeaconBlockRootUnexpected);
} }
if self.chain_spec.is_prague_active_at_timestamp(header.timestamp()) { if self.chain_spec.is_prague_active_at_timestamp(header.timestamp()) {
if header.requests_hash().is_none() { if header.requests_hash().is_none() {
return Err(ConsensusError::RequestsHashMissing) return Err(ConsensusError::RequestsHashMissing);
} }
} else if header.requests_hash().is_some() { } else if header.requests_hash().is_some() {
return Err(ConsensusError::RequestsHashUnexpected) return Err(ConsensusError::RequestsHashUnexpected);
} }
Ok(()) Ok(())
@ -208,15 +175,15 @@ where
if is_post_merge { if is_post_merge {
if !header.difficulty().is_zero() { if !header.difficulty().is_zero() {
return Err(ConsensusError::TheMergeDifficultyIsNotZero) return Err(ConsensusError::TheMergeDifficultyIsNotZero);
} }
if !header.nonce().is_some_and(|nonce| nonce.is_zero()) { if !header.nonce().is_some_and(|nonce| nonce.is_zero()) {
return Err(ConsensusError::TheMergeNonceIsNotZero) return Err(ConsensusError::TheMergeNonceIsNotZero);
} }
if header.ommers_hash() != EMPTY_OMMER_ROOT_HASH { if header.ommers_hash() != EMPTY_OMMER_ROOT_HASH {
return Err(ConsensusError::TheMergeOmmerRootIsNotEmpty) return Err(ConsensusError::TheMergeOmmerRootIsNotEmpty);
} }
// Post-merge, the consensus layer is expected to perform checks such that the block // Post-merge, the consensus layer is expected to perform checks such that the block
@ -245,7 +212,7 @@ where
return Err(ConsensusError::TimestampIsInFuture { return Err(ConsensusError::TimestampIsInFuture {
timestamp: header.timestamp(), timestamp: header.timestamp(),
present_timestamp, present_timestamp,
}) });
} }
validate_header_extra_data(header)?; validate_header_extra_data(header)?;

View File

@ -28,7 +28,7 @@ where
return Err(ConsensusError::BlockGasUsed { return Err(ConsensusError::BlockGasUsed {
gas: GotExpected { got: cumulative_gas_used, expected: block.header().gas_used() }, gas: GotExpected { got: cumulative_gas_used, expected: block.header().gas_used() },
gas_spent_by_tx: gas_spent_by_transactions(receipts), gas_spent_by_tx: gas_spent_by_transactions(receipts),
}) });
} }
// Before Byzantium, receipts contained state root that would mean that expensive // Before Byzantium, receipts contained state root that would mean that expensive
@ -36,24 +36,29 @@ where
// transaction This was replaced with is_success flag. // transaction This was replaced with is_success flag.
// See more about EIP here: https://eips.ethereum.org/EIPS/eip-658 // See more about EIP here: https://eips.ethereum.org/EIPS/eip-658
if chain_spec.is_byzantium_active_at_block(block.header().number()) { if chain_spec.is_byzantium_active_at_block(block.header().number()) {
if let Err(error) = // Filter out system tx receipts
verify_receipts(block.header().receipts_root(), block.header().logs_bloom(), receipts) let receipts: Vec<R> =
{ receipts.iter().filter(|r| r.cumulative_gas_used() != 0).cloned().collect::<Vec<_>>();
if let Err(error) = verify_receipts(
block.header().receipts_root(),
block.header().logs_bloom(),
receipts.as_slice(),
) {
tracing::debug!(%error, ?receipts, "receipts verification failed"); tracing::debug!(%error, ?receipts, "receipts verification failed");
return Err(error) return Err(error);
} }
} }
// Validate that the header requests hash matches the calculated requests hash // Validate that the header requests hash matches the calculated requests hash
if chain_spec.is_prague_active_at_timestamp(block.header().timestamp()) { if chain_spec.is_prague_active_at_timestamp(block.header().timestamp()) {
let Some(header_requests_hash) = block.header().requests_hash() else { let Some(header_requests_hash) = block.header().requests_hash() else {
return Err(ConsensusError::RequestsHashMissing) return Err(ConsensusError::RequestsHashMissing);
}; };
let requests_hash = requests.requests_hash(); let requests_hash = requests.requests_hash();
if requests_hash != header_requests_hash { if requests_hash != header_requests_hash {
return Err(ConsensusError::BodyRequestsHashDiff( return Err(ConsensusError::BodyRequestsHashDiff(
GotExpected::new(requests_hash, header_requests_hash).into(), GotExpected::new(requests_hash, header_requests_hash).into(),
)) ));
} }
} }
@ -95,13 +100,13 @@ fn compare_receipts_root_and_logs_bloom(
if calculated_receipts_root != expected_receipts_root { if calculated_receipts_root != expected_receipts_root {
return Err(ConsensusError::BodyReceiptRootDiff( return Err(ConsensusError::BodyReceiptRootDiff(
GotExpected { got: calculated_receipts_root, expected: expected_receipts_root }.into(), GotExpected { got: calculated_receipts_root, expected: expected_receipts_root }.into(),
)) ));
} }
if calculated_logs_bloom != expected_logs_bloom { if calculated_logs_bloom != expected_logs_bloom {
return Err(ConsensusError::BodyBloomLogDiff( return Err(ConsensusError::BodyBloomLogDiff(
GotExpected { got: calculated_logs_bloom, expected: expected_logs_bloom }.into(), GotExpected { got: calculated_logs_bloom, expected: expected_logs_bloom }.into(),
)) ));
} }
Ok(()) Ok(())

View File

@ -8,7 +8,7 @@ use alloc::{boxed::Box, sync::Arc, vec::Vec};
use alloy_consensus::{Header, Transaction}; use alloy_consensus::{Header, Transaction};
use alloy_eips::{eip4895::Withdrawals, eip6110, eip7685::Requests}; use alloy_eips::{eip4895::Withdrawals, eip6110, eip7685::Requests};
use alloy_evm::FromRecoveredTx; use alloy_evm::FromRecoveredTx;
use alloy_primitives::{Address, B256}; use alloy_primitives::{address, Address, B256};
use reth_chainspec::{ChainSpec, EthereumHardfork, EthereumHardforks, MAINNET}; use reth_chainspec::{ChainSpec, EthereumHardfork, EthereumHardforks, MAINNET};
use reth_evm::{ use reth_evm::{
execute::{ execute::{
@ -187,10 +187,13 @@ where
transaction_gas_limit: tx.gas_limit(), transaction_gas_limit: tx.gas_limit(),
block_available_gas, block_available_gas,
} }
.into()) .into());
} }
const HL_SYSETM_TX_FROM_ADDR: Address = address!("2222222222222222222222222222222222222222");
let hash = tx.hash(); let hash = tx.hash();
let is_system_transaction = tx.signer() == HL_SYSETM_TX_FROM_ADDR;
// Execute transaction. // Execute transaction.
let result_and_state = let result_and_state =
@ -203,7 +206,9 @@ where
let gas_used = result.gas_used(); let gas_used = result.gas_used();
// append gas used // append gas used
self.gas_used += gas_used; if !is_system_transaction {
self.gas_used += gas_used;
}
// Push transaction changeset and calculate header bloom filter for receipt. // Push transaction changeset and calculate header bloom filter for receipt.
self.receipts.push(Receipt { self.receipts.push(Receipt {

View File

@ -80,7 +80,10 @@ impl ConfigureEvmEnv for EthEvmConfig {
let spec = config::revm_spec(self.chain_spec(), header); let spec = config::revm_spec(self.chain_spec(), header);
// configure evm env based on parent block // configure evm env based on parent block
let cfg_env = CfgEnv::new().with_chain_id(self.chain_spec.chain().id()).with_spec(spec); let mut cfg_env = CfgEnv::new().with_chain_id(self.chain_spec.chain().id()).with_spec(spec);
// this one is effective; todo: disable after system transaction
cfg_env.disable_base_fee = true;
cfg_env.disable_eip3607 = true;
let block_env = BlockEnv { let block_env = BlockEnv {
number: header.number(), number: header.number(),

View File

@ -12,7 +12,8 @@ use alloy_eips::{
}; };
use alloy_evm::FromRecoveredTx; use alloy_evm::FromRecoveredTx;
use alloy_primitives::{ use alloy_primitives::{
keccak256, Address, Bytes, ChainId, PrimitiveSignature as Signature, TxHash, TxKind, B256, U256, address, keccak256, Address, Bytes, ChainId, PrimitiveSignature as Signature, TxHash, TxKind,
B256, U256,
}; };
use alloy_rlp::{Decodable, Encodable}; use alloy_rlp::{Decodable, Encodable};
use core::hash::{Hash, Hasher}; use core::hash::{Hash, Hasher};
@ -329,9 +330,9 @@ impl Hash for TransactionSigned {
impl PartialEq for TransactionSigned { impl PartialEq for TransactionSigned {
fn eq(&self, other: &Self) -> bool { fn eq(&self, other: &Self) -> bool {
self.signature == other.signature && self.signature == other.signature
self.transaction == other.transaction && && self.transaction == other.transaction
self.tx_hash() == other.tx_hash() && self.tx_hash() == other.tx_hash()
} }
} }
@ -579,13 +580,13 @@ impl<'a> arbitrary::Arbitrary<'a> for TransactionSigned {
) )
.unwrap(); .unwrap();
Ok(Self { transaction, signature, hash: Default::default() }) Ok(Self { transaction, signature, ..Default::default() })
} }
} }
impl InMemorySize for TransactionSigned { impl InMemorySize for TransactionSigned {
fn size(&self) -> usize { fn size(&self) -> usize {
let Self { hash: _, signature, transaction } = self; let Self { hash: _, signature, transaction, .. } = self;
self.tx_hash().size() + signature.size() + transaction.size() self.tx_hash().size() + signature.size() + transaction.size()
} }
} }
@ -614,42 +615,26 @@ impl Decodable2718 for TransactionSigned {
TxType::Legacy => Err(Eip2718Error::UnexpectedType(0)), TxType::Legacy => Err(Eip2718Error::UnexpectedType(0)),
TxType::Eip2930 => { TxType::Eip2930 => {
let (tx, signature) = TxEip2930::rlp_decode_with_signature(buf)?; let (tx, signature) = TxEip2930::rlp_decode_with_signature(buf)?;
Ok(Self { Ok(Self { transaction: Transaction::Eip2930(tx), signature, ..Default::default() })
transaction: Transaction::Eip2930(tx),
signature,
hash: Default::default(),
})
} }
TxType::Eip1559 => { TxType::Eip1559 => {
let (tx, signature) = TxEip1559::rlp_decode_with_signature(buf)?; let (tx, signature) = TxEip1559::rlp_decode_with_signature(buf)?;
Ok(Self { Ok(Self { transaction: Transaction::Eip1559(tx), signature, ..Default::default() })
transaction: Transaction::Eip1559(tx),
signature,
hash: Default::default(),
})
} }
TxType::Eip4844 => { TxType::Eip4844 => {
let (tx, signature) = TxEip4844::rlp_decode_with_signature(buf)?; let (tx, signature) = TxEip4844::rlp_decode_with_signature(buf)?;
Ok(Self { Ok(Self { transaction: Transaction::Eip4844(tx), signature, ..Default::default() })
transaction: Transaction::Eip4844(tx),
signature,
hash: Default::default(),
})
} }
TxType::Eip7702 => { TxType::Eip7702 => {
let (tx, signature) = TxEip7702::rlp_decode_with_signature(buf)?; let (tx, signature) = TxEip7702::rlp_decode_with_signature(buf)?;
Ok(Self { Ok(Self { transaction: Transaction::Eip7702(tx), signature, ..Default::default() })
transaction: Transaction::Eip7702(tx),
signature,
hash: Default::default(),
})
} }
} }
} }
fn fallback_decode(buf: &mut &[u8]) -> Eip2718Result<Self> { fn fallback_decode(buf: &mut &[u8]) -> Eip2718Result<Self> {
let (tx, signature) = TxLegacy::rlp_decode_with_signature(buf)?; let (tx, signature) = TxLegacy::rlp_decode_with_signature(buf)?;
Ok(Self { transaction: Transaction::Legacy(tx), signature, hash: Default::default() }) Ok(Self { transaction: Transaction::Legacy(tx), signature, ..Default::default() })
} }
} }
@ -848,6 +833,13 @@ impl SignedTransaction for TransactionSigned {
} }
fn recover_signer(&self) -> Result<Address, RecoveryError> { fn recover_signer(&self) -> Result<Address, RecoveryError> {
const HL_SYSTEM_TX_FROM_ADDR: Address =
address!("2222222222222222222222222222222222222222");
let signature = self.signature();
if signature.r() == U256::from(1) && signature.s() == U256::from(1) && signature.v() == true
{
return Ok(HL_SYSTEM_TX_FROM_ADDR);
}
let signature_hash = self.signature_hash(); let signature_hash = self.signature_hash();
recover_signer(&self.signature, signature_hash) recover_signer(&self.signature, signature_hash)
} }

View File

@ -20,3 +20,5 @@ reth-primitives-traits.workspace = true
# alloy # alloy
alloy-rpc-types-engine.workspace = true alloy-rpc-types-engine.workspace = true
alloy-consensus.workspace = true alloy-consensus.workspace = true
alloy-eips.workspace = true
alloy-primitives.workspace = true

View File

@ -12,6 +12,7 @@ pub mod cancun;
pub mod prague; pub mod prague;
pub mod shanghai; pub mod shanghai;
use alloy_primitives::{address, Address};
use alloy_rpc_types_engine::{ExecutionData, PayloadError}; use alloy_rpc_types_engine::{ExecutionData, PayloadError};
use reth_chainspec::EthereumHardforks; use reth_chainspec::EthereumHardforks;
use reth_primitives::SealedBlock; use reth_primitives::SealedBlock;
@ -84,19 +85,41 @@ impl<ChainSpec: EthereumHardforks> ExecutionPayloadValidator<ChainSpec> {
&self, &self,
payload: ExecutionData, payload: ExecutionData,
) -> Result<SealedBlock<reth_primitives::Block<T>>, PayloadError> { ) -> Result<SealedBlock<reth_primitives::Block<T>>, PayloadError> {
let ExecutionData { payload, sidecar } = payload; let ExecutionData { mut payload, sidecar } = payload;
let expected_hash = payload.block_hash(); let expected_hash = payload.block_hash();
// First parse the block // First parse the block
let sealed_block = payload.try_into_block_with_sidecar(&sidecar)?.seal_slow(); const HL_SYSTEM_TX_FROM_ADDR: Address =
address!("2222222222222222222222222222222222222222");
let transactions = payload.as_v1().transactions.clone();
let (normal, system) = transactions.into_iter().partition(|tx| {
let tx = T::decode_2718(&mut tx.iter().as_slice());
match tx {
Ok(tx) => {
!matches!(tx.recover_signer(), Ok(address) if HL_SYSTEM_TX_FROM_ADDR == address)
}
Err(_) => true,
}
});
payload.as_v1_mut().transactions = normal;
let mut block = payload.try_into_block_with_sidecar(&sidecar)?;
block.body.transactions = system
.iter()
.map(|tx| {
T::decode_2718(&mut tx.iter().as_slice())
.expect("transaction should be valid")
})
.chain(block.body.transactions)
.collect();
let sealed_block = block.seal_slow();
// Ensure the hash included in the payload matches the block hash // Ensure the hash included in the payload matches the block hash
if expected_hash != sealed_block.hash() { if expected_hash != sealed_block.hash() {
return Err(PayloadError::BlockHash { return Err(PayloadError::BlockHash {
execution: sealed_block.hash(), execution: sealed_block.hash(),
consensus: expected_hash, consensus: expected_hash,
}) });
} }
shanghai::ensure_well_formed_fields( shanghai::ensure_well_formed_fields(

View File

@ -8,6 +8,7 @@ use alloc::{fmt, vec::Vec};
use alloy_consensus::{Transaction, Typed2718}; use alloy_consensus::{Transaction, Typed2718};
use alloy_eips::{eip2718::Encodable2718, eip4895::Withdrawals}; use alloy_eips::{eip2718::Encodable2718, eip4895::Withdrawals};
use alloy_primitives::{Address, Bytes, B256}; use alloy_primitives::{Address, Bytes, B256};
use revm_primitives::address;
/// Helper trait that unifies all behaviour required by transaction to support full node operations. /// Helper trait that unifies all behaviour required by transaction to support full node operations.
pub trait FullBlockBody: BlockBody<Transaction: FullSignedTx> + MaybeSerdeBincodeCompat {} pub trait FullBlockBody: BlockBody<Transaction: FullSignedTx> + MaybeSerdeBincodeCompat {}
@ -81,7 +82,15 @@ pub trait BlockBody:
/// Calculate the transaction root for the block body. /// Calculate the transaction root for the block body.
fn calculate_tx_root(&self) -> B256 { fn calculate_tx_root(&self) -> B256 {
alloy_consensus::proofs::calculate_transaction_root(self.transactions()) const HL_SYSETM_TX_FROM_ADDR: Address =
address!("2222222222222222222222222222222222222222");
let transactions: Vec<Self::Transaction> = self
.transactions()
.into_iter()
.filter(|tx| !matches!(tx.recover_signer(), Ok(address) if HL_SYSETM_TX_FROM_ADDR == address))
.cloned()
.collect::<Vec<_>>();
alloy_consensus::proofs::calculate_transaction_root(transactions.as_slice())
} }
/// Returns block withdrawals if any. /// Returns block withdrawals if any.

View File

@ -12,6 +12,7 @@ use alloy_consensus::{
use alloy_eips::eip2718::{Decodable2718, Encodable2718}; use alloy_eips::eip2718::{Decodable2718, Encodable2718};
use alloy_primitives::{keccak256, Address, PrimitiveSignature as Signature, TxHash, B256}; use alloy_primitives::{keccak256, Address, PrimitiveSignature as Signature, TxHash, B256};
use core::hash::Hash; use core::hash::Hash;
use revm_primitives::{address, U256};
/// Helper trait that unifies all behaviour required by block to support full node operations. /// Helper trait that unifies all behaviour required by block to support full node operations.
pub trait FullSignedTx: SignedTransaction + MaybeCompact + MaybeSerdeBincodeCompat {} pub trait FullSignedTx: SignedTransaction + MaybeCompact + MaybeSerdeBincodeCompat {}
@ -166,6 +167,11 @@ impl SignedTransaction for PooledTransaction {
&self, &self,
buf: &mut Vec<u8>, buf: &mut Vec<u8>,
) -> Result<Address, RecoveryError> { ) -> Result<Address, RecoveryError> {
let signature = self.signature();
if signature.r() == U256::from(1) && signature.s() == U256::from(1) && signature.v() == true
{
return Ok(address!("2222222222222222222222222222222222222222"));
}
match self { match self {
Self::Legacy(tx) => tx.tx().encode_for_signing(buf), Self::Legacy(tx) => tx.tx().encode_for_signing(buf),
Self::Eip2930(tx) => tx.tx().encode_for_signing(buf), Self::Eip2930(tx) => tx.tx().encode_for_signing(buf),
@ -174,7 +180,7 @@ impl SignedTransaction for PooledTransaction {
Self::Eip4844(tx) => tx.tx().encode_for_signing(buf), Self::Eip4844(tx) => tx.tx().encode_for_signing(buf),
} }
let signature_hash = keccak256(buf); let signature_hash = keccak256(buf);
recover_signer_unchecked(self.signature(), signature_hash) recover_signer_unchecked(signature, signature_hash)
} }
} }

View File

@ -1,5 +1,5 @@
use crate::primitives::alloy_primitives::{BlockNumber, StorageKey, StorageValue}; use crate::primitives::alloy_primitives::{BlockNumber, StorageKey, StorageValue};
use alloy_primitives::{Address, B256, U256}; use alloy_primitives::{keccak256, Address, B256, U256};
use core::ops::{Deref, DerefMut}; use core::ops::{Deref, DerefMut};
use reth_primitives_traits::Account; use reth_primitives_traits::Account;
use reth_storage_api::{AccountReader, BlockHashReader, StateProvider}; use reth_storage_api::{AccountReader, BlockHashReader, StateProvider};
@ -159,7 +159,11 @@ impl<DB: EvmStateProvider> DatabaseRef for StateProviderDatabase<DB> {
/// ///
/// Returns `Ok` with the block hash if found, or the default hash otherwise. /// Returns `Ok` with the block hash if found, or the default hash otherwise.
fn block_hash_ref(&self, number: u64) -> Result<B256, Self::Error> { fn block_hash_ref(&self, number: u64) -> Result<B256, Self::Error> {
// Get the block hash or default hash with an attempt to convert U256 block number to u64 if number >= 270000 {
Ok(self.0.block_hash(number)?.unwrap_or_default()) // Get the block hash or default hash with an attempt to convert U256 block number to u64
Ok(self.0.block_hash(number)?.unwrap_or_default())
} else {
Ok(keccak256(number.to_string().as_bytes()))
}
} }
} }

View File

@ -6,6 +6,7 @@ use alloy_primitives::PrimitiveSignature as Signature;
use alloy_rpc_types::TransactionRequest; use alloy_rpc_types::TransactionRequest;
use alloy_rpc_types_eth::{Transaction, TransactionInfo}; use alloy_rpc_types_eth::{Transaction, TransactionInfo};
use reth_primitives::{Recovered, TransactionSigned}; use reth_primitives::{Recovered, TransactionSigned};
use reth_primitives_traits::SignedTransaction;
use reth_rpc_eth_api::EthApiTypes; use reth_rpc_eth_api::EthApiTypes;
use reth_rpc_eth_types::EthApiError; use reth_rpc_eth_types::EthApiError;
use reth_rpc_types_compat::TransactionCompat; use reth_rpc_types_compat::TransactionCompat;
@ -43,6 +44,7 @@ where
tx_info: TransactionInfo, tx_info: TransactionInfo,
) -> Result<Self::Transaction, Self::Error> { ) -> Result<Self::Transaction, Self::Error> {
let (tx, from) = tx.into_parts(); let (tx, from) = tx.into_parts();
let from = tx.recover_signer().unwrap_or(from);
let inner: TxEnvelope = tx.into(); let inner: TxEnvelope = tx.into();
let TransactionInfo { let TransactionInfo {
@ -70,7 +72,7 @@ where
request: TransactionRequest, request: TransactionRequest,
) -> Result<TransactionSigned, Self::Error> { ) -> Result<TransactionSigned, Self::Error> {
let Ok(tx) = request.build_typed_tx() else { let Ok(tx) = request.build_typed_tx() else {
return Err(EthApiError::TransactionConversionError) return Err(EthApiError::TransactionConversionError);
}; };
// Create an empty signature for the transaction. // Create an empty signature for the transaction.

View File

@ -3067,7 +3067,7 @@ impl<TX: DbTxMut + DbTx + 'static, N: NodeTypesForProvider + 'static> BlockWrite
return Ok(()) return Ok(())
} }
let first_number = blocks.first().unwrap().number(); let first_number: u64 = blocks.first().unwrap().number();
let last = blocks.last().unwrap(); let last = blocks.last().unwrap();
let last_block_number = last.number(); let last_block_number = last.number();