first commit

This commit is contained in:
sprites0
2025-06-11 04:31:22 +09:00
commit f95c755f26
19 changed files with 13535 additions and 0 deletions

1
.gitignore vendored Normal file
View File

@ -0,0 +1 @@
/target

11772
Cargo.lock generated Normal file

File diff suppressed because it is too large Load Diff

138
Cargo.toml Normal file
View File

@ -0,0 +1,138 @@
[package]
name = "reth_hl"
version = "0.1.0"
edition = "2021"
[lib]
name = "reth_hl"
path = "src/lib.rs"
[[bin]]
name = "reth-hl"
path = "src/main.rs"
[dependencies]
reth = { path = "../reth/bin/reth" }
reth-cli = { path = "../reth/crates/cli/cli" }
reth-cli-commands = { path = "../reth/crates/cli/commands" }
reth-basic-payload-builder = { path = "../reth/crates/payload/basic" }
reth-db = { path = "../reth/crates/storage/db", default-features = false }
reth-chainspec = { path = "../reth/crates/chainspec", default-features = false }
reth-cli-util = { path = "../reth/crates/cli/util" }
reth-discv4 = { path = "../reth/crates/net/discv4", features = ["test-utils"] }
reth-engine-primitives = { path = "../reth/crates/engine/primitives", default-features = false }
reth-ethereum-forks = { path = "../reth/crates/ethereum/hardforks", default-features = false, features = ["serde"] }
reth-ethereum-payload-builder = { path = "../reth/crates/ethereum/payload" }
reth-ethereum-primitives = { path = "../reth/crates/ethereum/primitives", default-features = false }
reth-eth-wire = { path = "../reth/crates/net/eth-wire" }
reth-eth-wire-types = { path = "../reth/crates/net/eth-wire-types" }
reth-evm = { path = "../reth/crates/evm/evm", default-features = false }
reth-evm-ethereum = { path = "../reth/crates/ethereum/evm", default-features = false }
reth-node-core = { path = "../reth/crates/node/core" }
reth-revm = { path = "../reth/crates/revm", default-features = false }
reth-network = { path = "../reth/crates/net/network", features = ["test-utils"] }
reth-network-p2p = { path = "../reth/crates/net/p2p" }
reth-network-api = { path = "../reth/crates/net/network-api" }
reth-node-ethereum = { path = "../reth/crates/ethereum/node", features = ["test-utils"] }
reth-network-peers = { path = "../reth/crates/net/peers", default-features = false }
reth-optimism-rpc = { path = "../reth/crates/optimism/rpc" }
reth-payload-primitives = { path = "../reth/crates/payload/primitives" }
reth-primitives = { path = "../reth/crates/primitives", default-features = false }
reth-primitives-traits = { path = "../reth/crates/primitives-traits", default-features = false }
reth-provider = { path = "../reth/crates/storage/provider", features = ["test-utils"] }
reth-rpc-eth-api = { path = "../reth/crates/rpc/rpc-eth-api" }
reth-rpc-engine-api = { path = "../reth/crates/rpc/rpc-engine-api" }
reth-tracing = { path = "../reth/crates/tracing" }
reth-trie-common = { path = "../reth/crates/trie/common", default-features = false }
reth-trie-db = { path = "../reth/crates/trie/db" }
revm = { version = "24.0.1" }
# alloy dependencies
alloy-genesis = "1.0.9"
alloy-consensus = "1.0.9"
alloy-chains = "0.2.0"
alloy-eips = "1.0.9"
alloy-evm = "0.10"
alloy-json-abi = { version = "1.0.0", default-features = false }
alloy-dyn-abi = "1.1.0"
alloy-network = "1.0.9"
alloy-primitives = { version = "1.1.0", default-features = false, features = ["map-foldhash"] }
alloy-rlp = { version = "0.3.10", default-features = false, features = ["core-net"] }
alloy-rpc-types = { version = "1.0.9", features = ["engine"] }
alloy-rpc-types-eth = "1.0.9"
alloy-rpc-types-engine = "1.0.9"
alloy-signer = "1.0.9"
alloy-sol-macro = "1.1.0"
alloy-sol-types = { version = "1.1.0", default-features = false }
jsonrpsee = "0.25.1"
jsonrpsee-core = { version = "0.25.1" }
jsonrpsee-types = "0.25.1"
# misc dependencies
auto_impl = "1"
async-trait = "0.1"
bytes = "1.5"
clap = { version = "4.4", features = ["derive"] }
cfg-if = { version = "1.0", default-features = false }
derive_more = "0.99"
eyre = "0.6"
futures = "0.3"
lazy_static = "1.4.0"
once_cell = { version = "1.19", default-features = false, features = ["alloc"] }
parity-bytes = { version = "0.1.2", default-features = false }
parking_lot = "0.12"
secp256k1 = { version = "0.28", features = ["global-context", "std", "recovery"], optional = true }
serde = { version = "1.0", features = ["derive"], default-features = false }
serde_json = "1.0"
thiserror = "1.0"
tokio = { version = "1.36", features = ["full"] }
tokio-stream = "0.1"
tracing = "0.1"
rmp-serde = "1.0.0"
itertools = "0.14.0"
[target.'cfg(unix)'.dependencies]
tikv-jemalloc-ctl = "0.6"
tikv-jemallocator = { version = "0.6", optional = true }
libc = "0.2"
[features]
default = ["secp256k1", "jemalloc"]
jemalloc = ["dep:tikv-jemallocator"]
dev = [
"reth-cli-commands/arbitrary",
"reth/dev",
"revm/dev",
]
secp256k1 = ["dep:secp256k1"]
serde = [
"alloy-chains/serde",
"alloy-consensus/serde",
"alloy-eips/serde",
"alloy-primitives/serde",
"alloy-rpc-types-engine/serde",
"alloy-rpc-types-eth/serde",
"bytes/serde",
"parking_lot/serde",
"reth-eth-wire/serde",
"reth-eth-wire-types/serde",
"reth-ethereum-forks/serde",
"reth-ethereum-primitives/serde",
"reth-network/serde",
"reth-network-api/serde",
"reth-primitives-traits/serde",
"reth-revm/serde",
"reth-trie-common/serde",
"reth-trie-db/serde",
"revm/serde",
"secp256k1/serde",
]
client = [
"jsonrpsee/client",
"jsonrpsee/async-client",
"reth-rpc-eth-api/client",
]

View File

@ -0,0 +1,32 @@
{
"nonce": "0x0",
"timestamp": "0x6490fdd2",
"extraData": "0x",
"gasLimit": "0x1c9c380",
"difficulty": "0x0",
"mixHash": "0x0000000000000000000000000000000000000000000000000000000000000000",
"coinbase": "0x0000000000000000000000000000000000000000",
"stateRoot": "0x5eb6e371a698b8d68f665192350ffcecbbbf322916f4b51bd79bb6887da3f494",
"alloc": {
"0x2222222222222222222222222222222222222222": {
"nonce": 0,
"balance": "0x33b2e3c9fd0803ce8000000",
"code": "0x608060405236603f5760405134815233907f88a5966d370b9919b20f3e2c13ff65706f196a4e32cc2c12bf57088f885258749060200160405180910390a2005b600080fdfea2646970667358221220ca425db50898ac19f9e4676e86e8ebed9853baa048942f6306fe8a86b8d4abb964736f6c63430008090033",
"storage": {}
},
"0x5555555555555555555555555555555555555555": {
"nonce": 0,
"balance": "0x0",
"code": "0x6080604052600436106100bc5760003560e01c8063313ce56711610074578063a9059cbb1161004e578063a9059cbb146102cb578063d0e30db0146100bc578063dd62ed3e14610311576100bc565b8063313ce5671461024b57806370a082311461027657806395d89b41146102b6576100bc565b806318160ddd116100a557806318160ddd146101aa57806323b872dd146101d15780632e1a7d4d14610221576100bc565b806306fdde03146100c6578063095ea7b314610150575b6100c4610359565b005b3480156100d257600080fd5b506100db6103a8565b6040805160208082528351818301528351919283929083019185019080838360005b838110156101155781810151838201526020016100fd565b50505050905090810190601f1680156101425780820380516001836020036101000a031916815260200191505b509250505060405180910390f35b34801561015c57600080fd5b506101966004803603604081101561017357600080fd5b5073ffffffffffffffffffffffffffffffffffffffff8135169060200135610454565b604080519115158252519081900360200190f35b3480156101b657600080fd5b506101bf6104c7565b60408051918252519081900360200190f35b3480156101dd57600080fd5b50610196600480360360608110156101f457600080fd5b5073ffffffffffffffffffffffffffffffffffffffff8135811691602081013590911690604001356104cb565b34801561022d57600080fd5b506100c46004803603602081101561024457600080fd5b503561066b565b34801561025757600080fd5b50610260610700565b6040805160ff9092168252519081900360200190f35b34801561028257600080fd5b506101bf6004803603602081101561029957600080fd5b503573ffffffffffffffffffffffffffffffffffffffff16610709565b3480156102c257600080fd5b506100db61071b565b3480156102d757600080fd5b50610196600480360360408110156102ee57600080fd5b5073ffffffffffffffffffffffffffffffffffffffff8135169060200135610793565b34801561031d57600080fd5b506101bf6004803603604081101561033457600080fd5b5073ffffffffffffffffffffffffffffffffffffffff813581169160200135166107a7565b33600081815260036020908152604091829020805434908101909155825190815291517fe1fffcc4923d04b559f4d29a8bfc6cda04eb5b0d3c460751c2402c5c5cc9109c9281900390910190a2565b6000805460408051602060026001851615610100027fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff0190941693909304601f8101849004840282018401909252818152929183018282801561044c5780601f106104215761010080835404028352916020019161044c565b820191906000526020600020905b81548152906001019060200180831161042f57829003601f168201915b505050505081565b33600081815260046020908152604080832073ffffffffffffffffffffffffffffffffffffffff8716808552908352818420869055815186815291519394909390927f8c5be1e5ebec7d5bd14f71427d1e84f3dd0314c0f7b2291e5b200ac8c7c3b925928290030190a350600192915050565b4790565b73ffffffffffffffffffffffffffffffffffffffff83166000908152600360205260408120548211156104fd57600080fd5b73ffffffffffffffffffffffffffffffffffffffff84163314801590610573575073ffffffffffffffffffffffffffffffffffffffff841660009081526004602090815260408083203384529091529020547fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff14155b156105ed5773ffffffffffffffffffffffffffffffffffffffff841660009081526004602090815260408083203384529091529020548211156105b557600080fd5b73ffffffffffffffffffffffffffffffffffffffff841660009081526004602090815260408083203384529091529020805483900390555b73ffffffffffffffffffffffffffffffffffffffff808516600081815260036020908152604080832080548890039055938716808352918490208054870190558351868152935191937fddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef929081900390910190a35060019392505050565b3360009081526003602052604090205481111561068757600080fd5b33600081815260036020526040808220805485900390555183156108fc0291849190818181858888f193505050501580156106c6573d6000803e3d6000fd5b5060408051828152905133917f7fcf532c15f0a6db0bd6d0e038bea71d30d808c7d98cb3bf7268a95bf5081b65919081900360200190a250565b60025460ff1681565b60036020526000908152604090205481565b60018054604080516020600284861615610100027fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff0190941693909304601f8101849004840282018401909252818152929183018282801561044c5780601f106104215761010080835404028352916020019161044c565b60006107a03384846104cb565b9392505050565b60046020908152600092835260408084209091529082529020548156fea265627a7a72315820e87684b404839c5657b1e7820bfa5ac4539ac8c83c21e28ec1086123db902cfe64736f6c63430005110032",
"storage": {
"0x0000000000000000000000000000000000000000000000000000000000000000": "0x5772617070656420485950450000000000000000000000000000000000000018",
"0x0000000000000000000000000000000000000000000000000000000000000001": "0x574859504500000000000000000000000000000000000000000000000000000a",
"0x0000000000000000000000000000000000000000000000000000000000000002": "0x0000000000000000000000000000000000000000000000000000000000000012"
}
}
},
"number": "0x0",
"gasUsed": "0x0",
"parentHash": "0x0000000000000000000000000000000000000000000000000000000000000000",
"parentBeaconBlockRoot": "0x0000000000000000000000000000000000000000000000000000000000000000"
}

57
src/chainspec/hl.rs Normal file
View File

@ -0,0 +1,57 @@
use alloy_chains::{Chain, NamedChain};
use alloy_primitives::{b256, Address, Bytes, B256, B64, U256};
use std::sync::Arc;
use once_cell::sync::Lazy;
use reth_chainspec::{ChainSpec, DEV_HARDFORKS};
use reth_primitives::{Header, SealedHeader};
static GENESIS_HASH: B256 =
b256!("d8fcc13b6a195b88b7b2da3722ff6cad767b13a8c1e9ffb1c73aa9d216d895f0");
/// The Hyperliqiud Mainnet spec
pub static HL_MAINNET: Lazy<Arc<ChainSpec>> = Lazy::new(|| {
ChainSpec {
chain: Chain::from_named(NamedChain::Hyperliquid),
genesis: serde_json::from_str(include_str!("genesis.json"))
.expect("Can't deserialize Hyperliquid Mainnet genesis json"),
genesis_header: empty_genesis_header(),
paris_block_and_final_difficulty: Some((0, U256::from(0))),
hardforks: DEV_HARDFORKS.clone(),
prune_delete_limit: 10000,
..Default::default()
}
.into()
});
/// Empty genesis header for Hyperliquid Mainnet.
///
/// The exact value is not known per se, but the parent hash of block 1 is known to be
/// [GENESIS_HASH].
fn empty_genesis_header() -> SealedHeader {
SealedHeader::new(
Header {
parent_hash: B256::ZERO,
number: 0,
timestamp: 0,
transactions_root: B256::ZERO,
receipts_root: B256::ZERO,
state_root: B256::ZERO,
gas_used: 0,
gas_limit: 0x1c9c380,
difficulty: U256::ZERO,
mix_hash: B256::ZERO,
extra_data: Bytes::new(),
nonce: B64::ZERO,
ommers_hash: B256::ZERO,
beneficiary: Address::ZERO,
logs_bloom: Default::default(),
base_fee_per_gas: Some(0),
withdrawals_root: Some(B256::ZERO),
blob_gas_used: Some(0),
excess_blob_gas: Some(0),
parent_beacon_block_root: Some(B256::ZERO),
requests_hash: Some(B256::ZERO),
},
GENESIS_HASH,
)
}

139
src/chainspec/mod.rs Normal file
View File

@ -0,0 +1,139 @@
//! Chain specification for HyperEVM.
pub mod hl;
pub mod parser;
use crate::hardforks::{hl::HlHardfork, HlHardforks};
use alloy_consensus::Header;
use alloy_eips::eip7840::BlobParams;
use alloy_genesis::Genesis;
use alloy_primitives::{Address, B256, U256};
use reth_chainspec::{
BaseFeeParams, ChainSpec, DepositContract, EthChainSpec, EthereumHardfork, EthereumHardforks,
ForkCondition, ForkFilter, ForkId, Hardforks, Head,
};
use reth_discv4::NodeRecord;
use reth_evm::eth::spec::EthExecutorSpec;
use std::{fmt::Display, sync::Arc};
/// Hl chain spec type.
#[derive(Debug, Default, Clone, PartialEq, Eq)]
pub struct HlChainSpec {
/// [`ChainSpec`].
pub inner: ChainSpec,
}
impl EthChainSpec for HlChainSpec {
type Header = Header;
fn blob_params_at_timestamp(&self, timestamp: u64) -> Option<BlobParams> {
self.inner.blob_params_at_timestamp(timestamp)
}
fn final_paris_total_difficulty(&self) -> Option<U256> {
self.inner.final_paris_total_difficulty()
}
fn chain(&self) -> alloy_chains::Chain {
self.inner.chain()
}
fn base_fee_params_at_block(&self, block_number: u64) -> BaseFeeParams {
self.inner.base_fee_params_at_block(block_number)
}
fn base_fee_params_at_timestamp(&self, timestamp: u64) -> BaseFeeParams {
self.inner.base_fee_params_at_timestamp(timestamp)
}
fn deposit_contract(&self) -> Option<&DepositContract> {
None
}
fn genesis_hash(&self) -> B256 {
self.inner.genesis_hash()
}
fn prune_delete_limit(&self) -> usize {
self.inner.prune_delete_limit()
}
fn display_hardforks(&self) -> Box<dyn Display> {
Box::new(self.inner.display_hardforks())
}
fn genesis_header(&self) -> &Header {
self.inner.genesis_header()
}
fn genesis(&self) -> &Genesis {
self.inner.genesis()
}
fn bootnodes(&self) -> Option<Vec<NodeRecord>> {
self.inner.bootnodes()
}
fn is_optimism(&self) -> bool {
false
}
}
impl Hardforks for HlChainSpec {
fn fork<H: reth_chainspec::Hardfork>(&self, fork: H) -> reth_chainspec::ForkCondition {
self.inner.fork(fork)
}
fn forks_iter(
&self,
) -> impl Iterator<Item = (&dyn reth_chainspec::Hardfork, reth_chainspec::ForkCondition)> {
self.inner.forks_iter()
}
fn fork_id(&self, head: &Head) -> ForkId {
self.inner.fork_id(head)
}
fn latest_fork_id(&self) -> ForkId {
self.inner.latest_fork_id()
}
fn fork_filter(&self, head: Head) -> ForkFilter {
self.inner.fork_filter(head)
}
}
impl From<ChainSpec> for HlChainSpec {
fn from(value: ChainSpec) -> Self {
Self { inner: value }
}
}
impl EthereumHardforks for HlChainSpec {
fn ethereum_fork_activation(&self, fork: EthereumHardfork) -> ForkCondition {
self.inner.ethereum_fork_activation(fork)
}
}
impl HlHardforks for HlChainSpec {
fn hl_fork_activation(&self, fork: HlHardfork) -> ForkCondition {
self.fork(fork)
}
}
impl EthExecutorSpec for HlChainSpec {
fn deposit_contract_address(&self) -> Option<Address> {
None
}
}
impl From<HlChainSpec> for ChainSpec {
fn from(value: HlChainSpec) -> Self {
value.inner
}
}
impl HlHardforks for Arc<HlChainSpec> {
fn hl_fork_activation(&self, fork: HlHardfork) -> ForkCondition {
self.as_ref().hl_fork_activation(fork)
}
}

31
src/chainspec/parser.rs Normal file
View File

@ -0,0 +1,31 @@
use super::hl::HL_MAINNET;
use reth_chainspec::ChainSpec;
use reth_cli::chainspec::ChainSpecParser;
use std::sync::Arc;
/// Chains supported by HyperEVM. First value should be used as the default.
pub const SUPPORTED_CHAINS: &[&str] = &["mainnet"];
/// Hyperliquid chain specification parser.
#[derive(Debug, Clone, Default)]
pub struct HlChainSpecParser;
impl ChainSpecParser for HlChainSpecParser {
type ChainSpec = ChainSpec;
const SUPPORTED_CHAINS: &'static [&'static str] = SUPPORTED_CHAINS;
fn parse(s: &str) -> eyre::Result<Arc<ChainSpec>> {
chain_value_parser(s)
}
}
/// Clap value parser for [`ChainSpec`]s.
///
/// Currently only mainnet is supported.
pub fn chain_value_parser(s: &str) -> eyre::Result<Arc<ChainSpec>> {
match s {
"mainnet" => Ok(HL_MAINNET.clone().into()),
_ => Err(eyre::eyre!("Unsupported chain: {}", s)),
}
}

138
src/consensus.rs Normal file
View File

@ -0,0 +1,138 @@
use alloy_primitives::{BlockNumber, B256};
use reth_provider::{BlockNumReader, ProviderError};
use std::cmp::Ordering;
/// Errors that can occur in Hl consensus
#[derive(Debug, thiserror::Error)]
pub enum HlConsensusErr {
/// Error from the provider
#[error(transparent)]
Provider(#[from] ProviderError),
/// Head block hash not found
#[error("Head block hash not found")]
HeadHashNotFound,
}
/// Hl consensus implementation
pub struct HlConsensus<P> {
/// The provider for reading block information
pub provider: P,
}
impl<P> HlConsensus<P>
where
P: BlockNumReader + Clone,
{
/// Determines the head block hash according to Hl consensus rules:
/// 1. Follow the highest block number
/// 2. For same height blocks, pick the one with lower hash
pub(crate) fn canonical_head(
&self,
hash: B256,
number: BlockNumber,
) -> Result<(B256, B256), HlConsensusErr> {
let current_head = self.provider.best_block_number()?;
let current_hash = self
.provider
.block_hash(current_head)?
.ok_or(HlConsensusErr::HeadHashNotFound)?;
match number.cmp(&current_head) {
Ordering::Greater => Ok((hash, current_hash)),
Ordering::Equal => Ok((hash.min(current_hash), current_hash)),
Ordering::Less => Ok((current_hash, current_hash)),
}
}
}
#[cfg(test)]
mod tests {
use super::*;
use alloy_primitives::hex;
use reth_chainspec::ChainInfo;
use reth_provider::BlockHashReader;
use std::collections::HashMap;
#[derive(Clone)]
struct MockProvider {
blocks: HashMap<BlockNumber, B256>,
head_number: BlockNumber,
head_hash: B256,
}
impl MockProvider {
fn new(head_number: BlockNumber, head_hash: B256) -> Self {
let mut blocks = HashMap::new();
blocks.insert(head_number, head_hash);
Self {
blocks,
head_number,
head_hash,
}
}
}
impl BlockHashReader for MockProvider {
fn block_hash(&self, number: BlockNumber) -> Result<Option<B256>, ProviderError> {
Ok(self.blocks.get(&number).copied())
}
fn canonical_hashes_range(
&self,
_start: BlockNumber,
_end: BlockNumber,
) -> Result<Vec<B256>, ProviderError> {
Ok(vec![])
}
}
impl BlockNumReader for MockProvider {
fn chain_info(&self) -> Result<ChainInfo, ProviderError> {
Ok(ChainInfo {
best_hash: self.head_hash,
best_number: self.head_number,
})
}
fn best_block_number(&self) -> Result<BlockNumber, ProviderError> {
Ok(self.head_number)
}
fn last_block_number(&self) -> Result<BlockNumber, ProviderError> {
Ok(self.head_number)
}
fn block_number(&self, hash: B256) -> Result<Option<BlockNumber>, ProviderError> {
Ok(self
.blocks
.iter()
.find_map(|(num, h)| (*h == hash).then_some(*num)))
}
}
#[test]
fn test_canonical_head() {
let hash1 = B256::from_slice(&hex!(
"1111111111111111111111111111111111111111111111111111111111111111"
));
let hash2 = B256::from_slice(&hex!(
"2222222222222222222222222222222222222222222222222222222222222222"
));
let test_cases = [
((hash1, 2, 1, hash2), hash1), // Higher block wins
((hash1, 1, 2, hash2), hash2), // Lower block stays
((hash1, 1, 1, hash2), hash1), // Same height, lower hash wins
((hash2, 1, 1, hash1), hash1), // Same height, lower hash stays
];
for ((curr_hash, curr_num, head_num, head_hash), expected) in test_cases {
let provider = MockProvider::new(head_num, head_hash);
let consensus = HlConsensus { provider };
let (head_block_hash, current_hash) =
consensus.canonical_head(curr_hash, curr_num).unwrap();
assert_eq!(head_block_hash, expected);
assert_eq!(current_hash, head_hash);
}
}
}

116
src/hardforks/hl.rs Normal file
View File

@ -0,0 +1,116 @@
#![allow(unused)]
use alloy_chains::{Chain, NamedChain};
use core::any::Any;
use reth_chainspec::ForkCondition;
use reth_ethereum_forks::{hardfork, ChainHardforks, EthereumHardfork, Hardfork};
hardfork!(
/// The name of a bsc hardfork.
///
/// When building a list of hardforks for a chain, it's still expected to mix with [`EthereumHardfork`].
/// There is no name for these hardforks; just some bugfixes on the evm chain.
#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
HlHardfork {
/// Initial version
V1,
/// block.number bugfix
V2,
/// gas mismatch bugfix
V3,
}
);
impl HlHardfork {
/// Retrieves the activation block for the specified hardfork on the given chain.
pub fn activation_block<H: Hardfork>(self, fork: H, chain: Chain) -> Option<u64> {
if chain == Chain::from_named(NamedChain::Hyperliquid) {
return Self::hl_mainnet_activation_block(fork);
}
None
}
/// Retrieves the activation timestamp for the specified hardfork on the given chain.
pub fn activation_timestamp<H: Hardfork>(self, fork: H, chain: Chain) -> Option<u64> {
None
}
/// Retrieves the activation block for the specified hardfork on the HyperLiquid mainnet.
pub fn hl_mainnet_activation_block<H: Hardfork>(fork: H) -> Option<u64> {
match_hardfork(
fork,
|fork| match fork {
EthereumHardfork::Frontier
| EthereumHardfork::Homestead
| EthereumHardfork::Tangerine
| EthereumHardfork::SpuriousDragon
| EthereumHardfork::Byzantium
| EthereumHardfork::Constantinople
| EthereumHardfork::Petersburg
| EthereumHardfork::Istanbul
| EthereumHardfork::MuirGlacier
| EthereumHardfork::Berlin
| EthereumHardfork::London
| EthereumHardfork::Shanghai
| EthereumHardfork::Cancun => Some(0),
_ => None,
},
|fork| match fork {
Self::V1 | Self::V2 | Self::V3 => Some(0),
_ => None,
},
)
}
/// Bsc mainnet list of hardforks.
pub fn bsc_mainnet() -> ChainHardforks {
ChainHardforks::new(vec![
(EthereumHardfork::Frontier.boxed(), ForkCondition::Block(0)),
(EthereumHardfork::Homestead.boxed(), ForkCondition::Block(0)),
(EthereumHardfork::Tangerine.boxed(), ForkCondition::Block(0)),
(
EthereumHardfork::SpuriousDragon.boxed(),
ForkCondition::Block(0),
),
(EthereumHardfork::Byzantium.boxed(), ForkCondition::Block(0)),
(
EthereumHardfork::Constantinople.boxed(),
ForkCondition::Block(0),
),
(
EthereumHardfork::Petersburg.boxed(),
ForkCondition::Block(0),
),
(EthereumHardfork::Istanbul.boxed(), ForkCondition::Block(0)),
(
EthereumHardfork::MuirGlacier.boxed(),
ForkCondition::Block(0),
),
(
EthereumHardfork::Berlin.boxed(),
ForkCondition::Block(31302048),
),
(
EthereumHardfork::London.boxed(),
ForkCondition::Block(31302048),
),
(Self::V1.boxed(), ForkCondition::Block(0)),
(Self::V2.boxed(), ForkCondition::Block(0)),
(Self::V3.boxed(), ForkCondition::Block(0)),
])
}
}
/// Match helper method since it's not possible to match on `dyn Hardfork`
fn match_hardfork<H, HF, HHF>(fork: H, hardfork_fn: HF, hl_hardfork_fn: HHF) -> Option<u64>
where
H: Hardfork,
HF: Fn(&EthereumHardfork) -> Option<u64>,
HHF: Fn(&HlHardfork) -> Option<u64>,
{
let fork: &dyn Any = &fork;
if let Some(fork) = fork.downcast_ref::<EthereumHardfork>() {
return hardfork_fn(fork);
}
fork.downcast_ref::<HlHardfork>().and_then(hl_hardfork_fn)
}

13
src/hardforks/mod.rs Normal file
View File

@ -0,0 +1,13 @@
//! Hard forks of hl protocol.
#![allow(unused)]
use hl::HlHardfork;
use reth_chainspec::{EthereumHardforks, ForkCondition};
pub mod hl;
/// Extends [`EthereumHardforks`] with hl helper methods.
pub trait HlHardforks: EthereumHardforks {
/// Retrieves [`ForkCondition`] by an [`HlHardfork`]. If `fork` is not present, returns
/// [`ForkCondition::Never`].
fn hl_fork_activation(&self, fork: HlHardfork) -> ForkCondition;
}

6
src/lib.rs Normal file
View File

@ -0,0 +1,6 @@
mod chainspec;
mod consensus;
mod hardforks;
mod node;
pub use node::primitives;

40
src/main.rs Normal file
View File

@ -0,0 +1,40 @@
use clap::{Args, Parser};
use reth::builder::NodeHandle;
use reth_hl::{
chainspec::HlChainSpecParser,
node::{cli::Cli, HlNode},
};
// We use jemalloc for performance reasons
#[cfg(all(feature = "jemalloc", unix))]
#[global_allocator]
static ALLOC: tikv_jemallocator::Jemalloc = tikv_jemallocator::Jemalloc;
/// No Additional arguments
#[derive(Debug, Clone, Copy, Default, Args)]
#[non_exhaustive]
struct NoArgs;
fn main() -> eyre::Result<()> {
reth_cli_util::sigsegv_handler::install();
// Enable backtraces unless a RUST_BACKTRACE value has already been explicitly provided.
if std::env::var_os("RUST_BACKTRACE").is_none() {
std::env::set_var("RUST_BACKTRACE", "1");
}
Cli::<HlChainSpecParser, NoArgs>::parse().run(|builder, _| async move {
let (node, engine_handle_tx) = HlNode::new();
let NodeHandle {
node,
node_exit_future: exit_future,
} = builder.node(node).launch().await?;
engine_handle_tx
.send(node.beacon_engine_handle.clone())
.unwrap();
exit_future.await
})?;
Ok(())
}

126
src/node/cli.rs Normal file
View File

@ -0,0 +1,126 @@
use crate::{
chainspec::{parser::HlChainSpecParser, HlChainSpec},
node::{
consensus::HlConsensus, evm::config::HlEvmConfig, network::HlNetworkPrimitives, HlNode,
},
};
use clap::Parser;
use reth::{
args::LogArgs,
builder::{NodeBuilder, WithLaunchContext},
cli::Commands,
prometheus_exporter::install_prometheus_recorder,
version::{LONG_VERSION, SHORT_VERSION},
CliRunner,
};
use reth_chainspec::EthChainSpec;
use reth_cli::chainspec::ChainSpecParser;
use reth_cli_commands::{launcher::FnLauncher, node::NoArgs};
use reth_db::DatabaseEnv;
use reth_tracing::FileWorkerGuard;
use std::{
fmt::{self},
future::Future,
sync::Arc,
};
use tracing::info;
/// The main reth_hl cli interface.
///
/// This is the entrypoint to the executable.
#[derive(Debug, Parser)]
#[command(author, version = SHORT_VERSION, long_version = LONG_VERSION, about = "Reth", long_about = None)]
pub struct Cli<Spec: ChainSpecParser = HlChainSpecParser, Ext: clap::Args + fmt::Debug = NoArgs> {
/// The command to run
#[command(subcommand)]
pub command: Commands<Spec, Ext>,
#[command(flatten)]
logs: LogArgs,
}
impl<C, Ext> Cli<C, Ext>
where
C: ChainSpecParser<ChainSpec = HlChainSpec>,
Ext: clap::Args + fmt::Debug,
{
/// Execute the configured cli command.
///
/// This accepts a closure that is used to launch the node via the
/// [`NodeCommand`](reth_cli_commands::node::NodeCommand).
pub fn run<L, Fut>(self, launcher: L) -> eyre::Result<()>
where
L: FnOnce(WithLaunchContext<NodeBuilder<Arc<DatabaseEnv>, C::ChainSpec>>, Ext) -> Fut,
Fut: Future<Output = eyre::Result<()>>,
{
self.with_runner(CliRunner::try_default_runtime()?, launcher)
}
/// Execute the configured cli command with the provided [`CliRunner`].
pub fn with_runner<L, Fut>(mut self, runner: CliRunner, launcher: L) -> eyre::Result<()>
where
L: FnOnce(WithLaunchContext<NodeBuilder<Arc<DatabaseEnv>, C::ChainSpec>>, Ext) -> Fut,
Fut: Future<Output = eyre::Result<()>>,
{
// Add network name if available to the logs dir
if let Some(chain_spec) = self.command.chain_spec() {
self.logs.log_file_directory =
self.logs.log_file_directory.join(chain_spec.chain().to_string());
}
let _guard = self.init_tracing()?;
info!(target: "reth::cli", "Initialized tracing, debug log directory: {}", self.logs.log_file_directory);
// Install the prometheus recorder to be sure to record all metrics
let _ = install_prometheus_recorder();
let components =
|spec: Arc<C::ChainSpec>| (HlEvmConfig::new(spec.clone()), HlConsensus::new(spec));
match self.command {
Commands::Node(command) => runner.run_command_until_exit(|ctx| {
command.execute(ctx, FnLauncher::new::<C, Ext>(launcher))
}),
Commands::Init(command) => {
runner.run_blocking_until_ctrl_c(command.execute::<HlNode>())
}
Commands::InitState(command) => {
runner.run_blocking_until_ctrl_c(command.execute::<HlNode>())
}
Commands::DumpGenesis(command) => runner.run_blocking_until_ctrl_c(command.execute()),
Commands::Db(command) => runner.run_blocking_until_ctrl_c(command.execute::<HlNode>()),
Commands::Stage(command) => runner.run_command_until_exit(|ctx| {
command.execute::<HlNode, _, _, HlNetworkPrimitives>(ctx, components)
}),
Commands::P2P(command) => {
runner.run_until_ctrl_c(command.execute::<HlNetworkPrimitives>())
}
Commands::Config(command) => runner.run_until_ctrl_c(command.execute()),
Commands::Recover(command) => {
runner.run_command_until_exit(|ctx| command.execute::<HlNode>(ctx))
}
Commands::Prune(command) => runner.run_until_ctrl_c(command.execute::<HlNode>()),
Commands::Import(command) => {
runner.run_blocking_until_ctrl_c(command.execute::<HlNode, _, _>(components))
}
Commands::Debug(_command) => todo!(),
#[cfg(feature = "dev")]
Commands::TestVectors(_command) => todo!(),
Commands::ImportEra(_command) => {
todo!()
}
Commands::Download(_command) => {
todo!()
}
}
}
/// Initializes tracing with the configured options.
///
/// If file logging is enabled, this function returns a guard that must be kept alive to ensure
/// that all logs are flushed to disk.
pub fn init_tracing(&self) -> eyre::Result<Option<FileWorkerGuard>> {
let guard = self.logs.init_tracing()?;
Ok(guard)
}
}

138
src/node/consensus.rs Normal file
View File

@ -0,0 +1,138 @@
use crate::{
hardforks::HlHardforks,
node::HlNode,
primitives::{HlBlock, HlBlockBody, HlPrimitives},
};
use reth::{
api::FullNodeTypes,
beacon_consensus::EthBeaconConsensus,
builder::{components::ConsensusBuilder, BuilderContext},
consensus::{Consensus, ConsensusError, FullConsensus, HeaderValidator},
consensus_common::validation::{
validate_against_parent_4844, validate_against_parent_eip1559_base_fee,
validate_against_parent_hash_number, validate_against_parent_timestamp,
},
};
use reth_chainspec::EthChainSpec;
use reth_primitives::{Receipt, RecoveredBlock, SealedBlock, SealedHeader};
use reth_provider::BlockExecutionResult;
use std::sync::Arc;
/// A basic Hl consensus builder.
#[derive(Debug, Default, Clone, Copy)]
#[non_exhaustive]
pub struct HlConsensusBuilder;
impl<Node> ConsensusBuilder<Node> for HlConsensusBuilder
where
Node: FullNodeTypes<Types = HlNode>,
{
type Consensus = Arc<dyn FullConsensus<HlPrimitives, Error = ConsensusError>>;
async fn build_consensus(self, ctx: &BuilderContext<Node>) -> eyre::Result<Self::Consensus> {
Ok(Arc::new(HlConsensus::new(ctx.chain_spec())))
}
}
/// HL consensus implementation.
///
/// Provides basic checks as outlined in the execution specs.
#[derive(Debug, Clone)]
pub struct HlConsensus<ChainSpec> {
inner: EthBeaconConsensus<ChainSpec>,
chain_spec: Arc<ChainSpec>,
}
impl<ChainSpec: EthChainSpec + HlHardforks> HlConsensus<ChainSpec> {
/// Create a new instance of [`HlConsensus`]
pub fn new(chain_spec: Arc<ChainSpec>) -> Self {
Self {
inner: EthBeaconConsensus::new(chain_spec.clone()),
chain_spec,
}
}
}
impl<ChainSpec: EthChainSpec + HlHardforks> HeaderValidator for HlConsensus<ChainSpec> {
fn validate_header(&self, _header: &SealedHeader) -> Result<(), ConsensusError> {
// TODO: doesn't work because of extradata check
// self.inner.validate_header(header)
Ok(())
}
fn validate_header_against_parent(
&self,
header: &SealedHeader,
parent: &SealedHeader,
) -> Result<(), ConsensusError> {
validate_against_parent_hash_number(header.header(), parent)?;
validate_against_parent_timestamp(header.header(), parent.header())?;
validate_against_parent_eip1559_base_fee(
header.header(),
parent.header(),
&self.chain_spec,
)?;
// ensure that the blob gas fields for this block
if let Some(blob_params) = self.chain_spec.blob_params_at_timestamp(header.timestamp) {
validate_against_parent_4844(header.header(), parent.header(), blob_params)?;
}
Ok(())
}
}
impl<ChainSpec: EthChainSpec + HlHardforks> Consensus<HlBlock> for HlConsensus<ChainSpec> {
type Error = ConsensusError;
fn validate_body_against_header(
&self,
body: &HlBlockBody,
header: &SealedHeader,
) -> Result<(), ConsensusError> {
Consensus::<HlBlock>::validate_body_against_header(&self.inner, body, header)
}
fn validate_block_pre_execution(
&self,
_block: &SealedBlock<HlBlock>,
) -> Result<(), ConsensusError> {
// Check ommers hash
// let ommers_hash = block.body().calculate_ommers_root();
// if Some(block.ommers_hash()) != ommers_hash {
// return Err(ConsensusError::BodyOmmersHashDiff(
// GotExpected {
// got: ommers_hash.unwrap_or(EMPTY_OMMER_ROOT_HASH),
// expected: block.ommers_hash(),
// }
// .into(),
// ))
// }
// // Check transaction root
// if let Err(error) = block.ensure_transaction_root_valid() {
// return Err(ConsensusError::BodyTransactionRootDiff(error.into()))
// }
// if self.chain_spec.is_cancun_active_at_timestamp(block.timestamp()) {
// validate_cancun_gas(block)?;
// } else {
// return Ok(())
// }
Ok(())
}
}
impl<ChainSpec: EthChainSpec + HlHardforks> FullConsensus<HlPrimitives> for HlConsensus<ChainSpec> {
fn validate_block_post_execution(
&self,
block: &RecoveredBlock<HlBlock>,
result: &BlockExecutionResult<Receipt>,
) -> Result<(), ConsensusError> {
FullConsensus::<HlPrimitives>::validate_block_post_execution(&self.inner, block, result)
}
}

158
src/node/mod.rs Normal file
View File

@ -0,0 +1,158 @@
use crate::{
chainspec::HlChainSpec,
node::{
primitives::{HlBlock, HlBlockBody, HlPrimitives},
rpc::{
engine_api::{
builder::HlEngineApiBuilder, payload::HlPayloadTypes,
validator::HlEngineValidatorBuilder,
},
HlEthApiBuilder,
},
storage::HlStorage,
},
};
use consensus::HlConsensusBuilder;
use engine::HlPayloadServiceBuilder;
use evm::HlExecutorBuilder;
use network::HlNetworkBuilder;
use reth::{
api::{FullNodeComponents, FullNodeTypes, NodeTypes},
builder::{
components::ComponentsBuilder, rpc::RpcAddOns, DebugNode, Node, NodeAdapter,
NodeComponentsBuilder,
},
};
use reth_engine_primitives::BeaconConsensusEngineHandle;
use reth_node_ethereum::node::EthereumPoolBuilder;
use reth_primitives::BlockBody;
use reth_trie_db::MerklePatriciaTrie;
use std::sync::Arc;
use tokio::sync::{oneshot, Mutex};
pub mod cli;
pub mod consensus;
pub mod engine;
pub mod evm;
pub mod network;
pub mod primitives;
pub mod rpc;
pub mod storage;
pub mod types;
/// Hl addons configuring RPC types
pub type HlNodeAddOns<N> =
RpcAddOns<N, HlEthApiBuilder, HlEngineValidatorBuilder, HlEngineApiBuilder>;
/// Type configuration for a regular Hl node.
#[derive(Debug, Clone)]
pub struct HlNode {
engine_handle_rx:
Arc<Mutex<Option<oneshot::Receiver<BeaconConsensusEngineHandle<HlPayloadTypes>>>>>,
}
impl HlNode {
pub fn new() -> (
Self,
oneshot::Sender<BeaconConsensusEngineHandle<HlPayloadTypes>>,
) {
let (tx, rx) = oneshot::channel();
(
Self {
engine_handle_rx: Arc::new(Mutex::new(Some(rx))),
},
tx,
)
}
}
impl HlNode {
pub fn components<Node>(
&self,
) -> ComponentsBuilder<
Node,
EthereumPoolBuilder,
HlPayloadServiceBuilder,
HlNetworkBuilder,
HlExecutorBuilder,
HlConsensusBuilder,
>
where
Node: FullNodeTypes<Types = Self>,
{
ComponentsBuilder::default()
.node_types::<Node>()
.pool(EthereumPoolBuilder::default())
.executor(HlExecutorBuilder::default())
.payload(HlPayloadServiceBuilder::default())
.network(HlNetworkBuilder {
engine_handle_rx: self.engine_handle_rx.clone(),
})
.consensus(HlConsensusBuilder::default())
}
}
impl NodeTypes for HlNode {
type Primitives = HlPrimitives;
type ChainSpec = HlChainSpec;
type StateCommitment = MerklePatriciaTrie;
type Storage = HlStorage;
type Payload = HlPayloadTypes;
}
impl<N> Node<N> for HlNode
where
N: FullNodeTypes<Types = Self>,
{
type ComponentsBuilder = ComponentsBuilder<
N,
EthereumPoolBuilder,
HlPayloadServiceBuilder,
HlNetworkBuilder,
HlExecutorBuilder,
HlConsensusBuilder,
>;
type AddOns = HlNodeAddOns<
NodeAdapter<N, <Self::ComponentsBuilder as NodeComponentsBuilder<N>>::Components>,
>;
fn components_builder(&self) -> Self::ComponentsBuilder {
Self::components(self)
}
fn add_ons(&self) -> Self::AddOns {
HlNodeAddOns::default()
}
}
impl<N> DebugNode<N> for HlNode
where
N: FullNodeComponents<Types = Self>,
{
type RpcBlock = alloy_rpc_types::Block;
fn rpc_to_primitive_block(rpc_block: Self::RpcBlock) -> HlBlock {
let alloy_rpc_types::Block {
header,
transactions,
withdrawals,
..
} = rpc_block;
HlBlock {
header: header.inner,
body: HlBlockBody {
inner: BlockBody {
transactions: transactions
.into_transactions()
.map(|tx| tx.inner.into_inner().into())
.collect(),
ommers: Default::default(),
withdrawals,
},
sidecars: None,
read_precompile_calls: None,
},
}
}
}

334
src/node/primitives.rs Normal file
View File

@ -0,0 +1,334 @@
#![allow(clippy::owned_cow)]
use alloy_consensus::{BlobTransactionSidecar, Header};
use alloy_rlp::{Encodable, RlpDecodable, RlpEncodable};
use reth_ethereum_primitives::{BlockBody, Receipt};
use reth_primitives::{NodePrimitives, TransactionSigned};
use reth_primitives_traits::{Block, BlockBody as BlockBodyTrait, InMemorySize};
use serde::{Deserialize, Serialize};
use std::borrow::Cow;
use crate::node::types::{ReadPrecompileCall, ReadPrecompileCalls};
/// Primitive types for HyperEVM.
#[derive(Debug, Clone, Copy, Default, PartialEq, Eq)]
#[non_exhaustive]
pub struct HlPrimitives;
impl NodePrimitives for HlPrimitives {
type Block = HlBlock;
type BlockHeader = Header;
type BlockBody = HlBlockBody;
type SignedTx = TransactionSigned;
type Receipt = Receipt;
}
/// Block body for HL. It is equivalent to Ethereum [`BlockBody`] but additionally stores sidecars
/// for blob transactions.
#[derive(
Debug,
Clone,
Default,
PartialEq,
Eq,
Serialize,
Deserialize,
derive_more::Deref,
derive_more::DerefMut,
)]
pub struct HlBlockBody {
#[serde(flatten)]
#[deref]
#[deref_mut]
pub inner: BlockBody,
pub sidecars: Option<Vec<BlobTransactionSidecar>>,
pub read_precompile_calls: Option<ReadPrecompileCalls>,
}
impl InMemorySize for HlBlockBody {
fn size(&self) -> usize {
self.inner.size()
+ self.sidecars.as_ref().map_or(0, |s| {
s.capacity() * core::mem::size_of::<BlobTransactionSidecar>()
})
+ self.read_precompile_calls.as_ref().map_or(0, |s| {
s.0.capacity() * core::mem::size_of::<ReadPrecompileCall>()
})
}
}
impl BlockBodyTrait for HlBlockBody {
type Transaction = TransactionSigned;
type OmmerHeader = Header;
fn transactions(&self) -> &[Self::Transaction] {
BlockBodyTrait::transactions(&self.inner)
}
fn into_ethereum_body(self) -> BlockBody {
self.inner
}
fn into_transactions(self) -> Vec<Self::Transaction> {
self.inner.into_transactions()
}
fn withdrawals(&self) -> Option<&alloy_rpc_types::Withdrawals> {
self.inner.withdrawals()
}
fn ommers(&self) -> Option<&[Self::OmmerHeader]> {
self.inner.ommers()
}
}
/// Block for HL
#[derive(Debug, Clone, Default, PartialEq, Eq, Serialize, Deserialize)]
pub struct HlBlock {
pub header: Header,
pub body: HlBlockBody,
}
impl InMemorySize for HlBlock {
fn size(&self) -> usize {
self.header.size() + self.body.size()
}
}
impl Block for HlBlock {
type Header = Header;
type Body = HlBlockBody;
fn new(header: Self::Header, body: Self::Body) -> Self {
Self { header, body }
}
fn header(&self) -> &Self::Header {
&self.header
}
fn body(&self) -> &Self::Body {
&self.body
}
fn split(self) -> (Self::Header, Self::Body) {
(self.header, self.body)
}
fn rlp_length(header: &Self::Header, body: &Self::Body) -> usize {
rlp::BlockHelper {
header: Cow::Borrowed(header),
transactions: Cow::Borrowed(&body.inner.transactions),
ommers: Cow::Borrowed(&body.inner.ommers),
withdrawals: body.inner.withdrawals.as_ref().map(Cow::Borrowed),
sidecars: body.sidecars.as_ref().map(Cow::Borrowed),
read_precompile_calls: body.read_precompile_calls.as_ref().map(Cow::Borrowed),
}
.length()
}
}
mod rlp {
use super::*;
use alloy_eips::eip4895::Withdrawals;
use alloy_rlp::Decodable;
#[derive(RlpEncodable, RlpDecodable)]
#[rlp(trailing)]
struct BlockBodyHelper<'a> {
transactions: Cow<'a, Vec<TransactionSigned>>,
ommers: Cow<'a, Vec<Header>>,
withdrawals: Option<Cow<'a, Withdrawals>>,
sidecars: Option<Cow<'a, Vec<BlobTransactionSidecar>>>,
read_precompile_calls: Option<Cow<'a, ReadPrecompileCalls>>,
}
#[derive(RlpEncodable, RlpDecodable)]
#[rlp(trailing)]
pub(crate) struct BlockHelper<'a> {
pub(crate) header: Cow<'a, Header>,
pub(crate) transactions: Cow<'a, Vec<TransactionSigned>>,
pub(crate) ommers: Cow<'a, Vec<Header>>,
pub(crate) withdrawals: Option<Cow<'a, Withdrawals>>,
pub(crate) sidecars: Option<Cow<'a, Vec<BlobTransactionSidecar>>>,
pub(crate) read_precompile_calls: Option<Cow<'a, ReadPrecompileCalls>>,
}
impl<'a> From<&'a HlBlockBody> for BlockBodyHelper<'a> {
fn from(value: &'a HlBlockBody) -> Self {
let HlBlockBody {
inner:
BlockBody {
transactions,
ommers,
withdrawals,
},
sidecars,
read_precompile_calls,
} = value;
Self {
transactions: Cow::Borrowed(transactions),
ommers: Cow::Borrowed(ommers),
withdrawals: withdrawals.as_ref().map(Cow::Borrowed),
sidecars: sidecars.as_ref().map(Cow::Borrowed),
read_precompile_calls: read_precompile_calls.as_ref().map(Cow::Borrowed),
}
}
}
impl<'a> From<&'a HlBlock> for BlockHelper<'a> {
fn from(value: &'a HlBlock) -> Self {
let HlBlock {
header,
body:
HlBlockBody {
inner:
BlockBody {
transactions,
ommers,
withdrawals,
},
sidecars,
read_precompile_calls,
},
} = value;
Self {
header: Cow::Borrowed(header),
transactions: Cow::Borrowed(transactions),
ommers: Cow::Borrowed(ommers),
withdrawals: withdrawals.as_ref().map(Cow::Borrowed),
sidecars: sidecars.as_ref().map(Cow::Borrowed),
read_precompile_calls: read_precompile_calls.as_ref().map(Cow::Borrowed),
}
}
}
impl Encodable for HlBlockBody {
fn encode(&self, out: &mut dyn bytes::BufMut) {
BlockBodyHelper::from(self).encode(out);
}
fn length(&self) -> usize {
BlockBodyHelper::from(self).length()
}
}
impl Decodable for HlBlockBody {
fn decode(buf: &mut &[u8]) -> alloy_rlp::Result<Self> {
let BlockBodyHelper {
transactions,
ommers,
withdrawals,
sidecars,
read_precompile_calls,
} = BlockBodyHelper::decode(buf)?;
Ok(Self {
inner: BlockBody {
transactions: transactions.into_owned(),
ommers: ommers.into_owned(),
withdrawals: withdrawals.map(|w| w.into_owned()),
},
sidecars: sidecars.map(|s| s.into_owned()),
read_precompile_calls: read_precompile_calls.map(|s| s.into_owned()),
})
}
}
impl Encodable for HlBlock {
fn encode(&self, out: &mut dyn bytes::BufMut) {
BlockHelper::from(self).encode(out);
}
fn length(&self) -> usize {
BlockHelper::from(self).length()
}
}
impl Decodable for HlBlock {
fn decode(buf: &mut &[u8]) -> alloy_rlp::Result<Self> {
let BlockHelper {
header,
transactions,
ommers,
withdrawals,
sidecars,
read_precompile_calls,
} = BlockHelper::decode(buf)?;
Ok(Self {
header: header.into_owned(),
body: HlBlockBody {
inner: BlockBody {
transactions: transactions.into_owned(),
ommers: ommers.into_owned(),
withdrawals: withdrawals.map(|w| w.into_owned()),
},
sidecars: sidecars.map(|s| s.into_owned()),
read_precompile_calls: read_precompile_calls.map(|s| s.into_owned()),
},
})
}
}
}
pub mod serde_bincode_compat {
use super::*;
use reth_primitives_traits::serde_bincode_compat::{BincodeReprFor, SerdeBincodeCompat};
#[derive(Debug, Serialize, Deserialize)]
pub struct HlBlockBodyBincode<'a> {
inner: BincodeReprFor<'a, BlockBody>,
sidecars: Option<Cow<'a, Vec<BlobTransactionSidecar>>>,
read_precompile_calls: Option<Cow<'a, ReadPrecompileCalls>>,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct HlBlockBincode<'a> {
header: BincodeReprFor<'a, Header>,
body: BincodeReprFor<'a, HlBlockBody>,
}
impl SerdeBincodeCompat for HlBlockBody {
type BincodeRepr<'a> = HlBlockBodyBincode<'a>;
fn as_repr(&self) -> Self::BincodeRepr<'_> {
HlBlockBodyBincode {
inner: self.inner.as_repr(),
sidecars: self.sidecars.as_ref().map(Cow::Borrowed),
read_precompile_calls: self.read_precompile_calls.as_ref().map(Cow::Borrowed),
}
}
fn from_repr(repr: Self::BincodeRepr<'_>) -> Self {
let HlBlockBodyBincode {
inner,
sidecars,
read_precompile_calls,
} = repr;
Self {
inner: BlockBody::from_repr(inner),
sidecars: sidecars.map(|s| s.into_owned()),
read_precompile_calls: read_precompile_calls.map(|s| s.into_owned()),
}
}
}
impl SerdeBincodeCompat for HlBlock {
type BincodeRepr<'a> = HlBlockBincode<'a>;
fn as_repr(&self) -> Self::BincodeRepr<'_> {
HlBlockBincode {
header: self.header.as_repr(),
body: self.body.as_repr(),
}
}
fn from_repr(repr: Self::BincodeRepr<'_>) -> Self {
let HlBlockBincode { header, body } = repr;
Self {
header: Header::from_repr(header),
body: HlBlockBody::from_repr(body),
}
}
}
}

180
src/node/storage/mod.rs Normal file
View File

@ -0,0 +1,180 @@
use crate::{
node::types::ReadPrecompileCalls,
primitives::{HlBlock, HlBlockBody, HlPrimitives},
};
use alloy_consensus::BlockHeader;
use alloy_primitives::Bytes;
use itertools::izip;
use reth_chainspec::EthereumHardforks;
use reth_db::{
cursor::DbCursorRW,
transaction::{DbTx, DbTxMut},
DbTxUnwindExt,
};
use reth_provider::{
providers::{ChainStorage, NodeTypesForProvider},
BlockBodyReader, BlockBodyWriter, ChainSpecProvider, ChainStorageReader, ChainStorageWriter,
DBProvider, DatabaseProvider, EthStorage, ProviderResult, ReadBodyInput, StorageLocation,
};
pub mod tables;
#[derive(Debug, Clone, Default)]
#[non_exhaustive]
pub struct HlStorage(EthStorage);
impl HlStorage {
fn write_precompile_calls<Provider>(
&self,
provider: &Provider,
inputs: Vec<(u64, Option<ReadPrecompileCalls>)>,
) -> ProviderResult<()>
where
Provider: DBProvider<Tx: DbTxMut>,
{
let mut precompile_calls_cursor = provider
.tx_ref()
.cursor_write::<tables::BlockReadPrecompileCalls>()?;
for (block_number, read_precompile_calls) in inputs {
let Some(read_precompile_calls) = read_precompile_calls else {
continue;
};
precompile_calls_cursor.append(
block_number,
&Bytes::copy_from_slice(
&rmp_serde::to_vec(&read_precompile_calls)
.expect("Failed to serialize read precompile calls"),
),
)?;
}
Ok(())
}
fn read_precompile_calls<Provider>(
&self,
provider: &Provider,
inputs: Vec<ReadBodyInput<'_, HlBlock>>,
) -> ProviderResult<Vec<ReadPrecompileCalls>>
where
Provider: DBProvider<Tx: DbTx>,
{
let mut read_precompile_calls = Vec::with_capacity(inputs.len());
let mut precompile_calls_cursor = provider
.tx_ref()
.cursor_read::<tables::BlockReadPrecompileCalls>()?;
for (header, _transactions) in inputs {
let precompile_calls = precompile_calls_cursor
.seek_exact(header.number())?
.map(|(_, calls)| calls)
.unwrap_or_default();
read_precompile_calls.push(precompile_calls);
}
Ok(read_precompile_calls)
}
}
impl<Provider> BlockBodyWriter<Provider, HlBlockBody> for HlStorage
where
Provider: DBProvider<Tx: DbTxMut>,
{
fn write_block_bodies(
&self,
provider: &Provider,
bodies: Vec<(u64, Option<HlBlockBody>)>,
write_to: StorageLocation,
) -> ProviderResult<()> {
let (eth_bodies, _sidecars, read_precompile_calls) =
izip!(bodies.into_iter().map(|(block_number, body)| {
if let Some(HlBlockBody {
inner,
sidecars,
read_precompile_calls,
}) = body
{
(
(block_number, Some(inner)),
(block_number, Some(sidecars)),
(block_number, Some(read_precompile_calls)),
)
} else {
(
(block_number, None),
(block_number, None),
(block_number, None),
)
}
}));
self.0.write_block_bodies(provider, eth_bodies, write_to)?;
self.write_precompile_calls(provider, read_precompile_calls)?;
Ok(())
}
fn remove_block_bodies_above(
&self,
provider: &Provider,
block: u64,
remove_from: StorageLocation,
) -> ProviderResult<()> {
self.0
.remove_block_bodies_above(provider, block, remove_from)?;
provider
.tx_ref()
.unwind_table_by_num::<tables::BlockReadPrecompileCalls>(block)?;
Ok(())
}
}
impl<Provider> BlockBodyReader<Provider> for HlStorage
where
Provider: DBProvider + ChainSpecProvider<ChainSpec: EthereumHardforks>,
{
type Block = HlBlock;
fn read_block_bodies(
&self,
provider: &Provider,
inputs: Vec<ReadBodyInput<'_, Self::Block>>,
) -> ProviderResult<Vec<HlBlockBody>> {
let eth_bodies = self.0.read_block_bodies(provider, inputs)?;
let read_precompile_calls = self.read_precompile_calls(provider, inputs)?;
// NOTE: sidecars are not used in HyperEVM yet.
Ok(eth_bodies
.into_iter()
.map(|inner| HlBlockBody {
inner,
sidecars: None,
read_precompile_calls,
})
.collect())
}
}
impl ChainStorage<HlPrimitives> for HlStorage {
fn reader<TX, Types>(
&self,
) -> impl ChainStorageReader<DatabaseProvider<TX, Types>, HlPrimitives>
where
TX: DbTx + 'static,
Types: NodeTypesForProvider<Primitives = HlPrimitives>,
{
self
}
fn writer<TX, Types>(
&self,
) -> impl ChainStorageWriter<DatabaseProvider<TX, Types>, HlPrimitives>
where
TX: DbTxMut + DbTx + 'static,
Types: NodeTypesForProvider<Primitives = HlPrimitives>,
{
self
}
}

View File

@ -0,0 +1,24 @@
//! Tables and data models.
//!
//! # Overview
//!
//! This module defines the tables in reth, as well as some table-related abstractions:
//!
//! - [`codecs`] integrates different codecs into [`Encode`] and [`Decode`]
//! - [`models`](crate::models) defines the values written to tables
//!
//! # Database Tour
//!
//! TODO(onbjerg): Find appropriate format for this...
use alloy_primitives::{BlockNumber, Bytes};
use reth_db::{table::TableInfo, tables, TableSet, TableType, TableViewer};
use std::fmt;
tables! {
/// Read precompile calls for each block.
table BlockReadPrecompileCalls {
type Key = BlockNumber;
type Value = Bytes;
}
}

92
src/node/types.rs Normal file
View File

@ -0,0 +1,92 @@
//! Extends from https://github.com/hyperliquid-dex/hyper-evm-sync
//!
//! Changes:
//! - ReadPrecompileCalls supports RLP encoding / decoding
use alloy_primitives::{Address, Bytes, Log};
use alloy_rlp::{Decodable, Encodable, RlpDecodable, RlpEncodable};
use bytes::BufMut;
use reth_primitives::{SealedBlock, Transaction};
use serde::{Deserialize, Serialize};
pub type ReadPrecompileCall = (Address, Vec<(ReadPrecompileInput, ReadPrecompileResult)>);
#[derive(Debug, Clone, Serialize, Deserialize, Eq, PartialEq, Default)]
pub struct ReadPrecompileCalls(pub Vec<ReadPrecompileCall>);
impl Encodable for ReadPrecompileCalls {
fn encode(&self, out: &mut dyn BufMut) {
rmp_serde::encode::write(&mut out.writer(), &self.0).unwrap();
}
}
impl Decodable for ReadPrecompileCalls {
fn decode(buf: &mut &[u8]) -> alloy_rlp::Result<Self> {
let calls = rmp_serde::decode::from_slice(buf)
.map_err(|_| alloy_rlp::Error::Custom("Failed to decode ReadPrecompileCalls"))?;
Ok(Self(calls))
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct BlockAndReceipts {
pub block: EvmBlock,
pub receipts: Vec<LegacyReceipt>,
#[serde(default)]
pub system_txs: Vec<SystemTx>,
#[serde(default)]
pub read_precompile_calls: ReadPrecompileCalls,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub enum EvmBlock {
Reth115(SealedBlock),
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct LegacyReceipt {
tx_type: LegacyTxType,
success: bool,
cumulative_gas_used: u64,
logs: Vec<Log>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
enum LegacyTxType {
Legacy = 0,
Eip2930 = 1,
Eip1559 = 2,
Eip4844 = 3,
Eip7702 = 4,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct SystemTx {
pub tx: Transaction,
pub receipt: Option<LegacyReceipt>,
}
#[derive(
Debug,
Clone,
PartialEq,
Eq,
PartialOrd,
Ord,
Serialize,
Deserialize,
Hash,
RlpEncodable,
RlpDecodable,
)]
pub struct ReadPrecompileInput {
pub input: Bytes,
pub gas_limit: u64,
}
#[derive(Debug, Clone, Serialize, Deserialize, Eq, PartialEq)]
pub enum ReadPrecompileResult {
Ok { gas_used: u64, bytes: Bytes },
OutOfGas,
Error,
UnexpectedError,
}