mirror of
https://github.com/hl-archive-node/nanoreth.git
synced 2025-12-06 10:59:55 +00:00
perf: use alloy hash map in trie related code (#12956)
This commit is contained in:
@ -25,7 +25,11 @@ use alloy_eips::{
|
||||
eip4895::{Withdrawal, Withdrawals},
|
||||
BlockHashOrNumber,
|
||||
};
|
||||
use alloy_primitives::{keccak256, Address, BlockHash, BlockNumber, TxHash, TxNumber, B256, U256};
|
||||
use alloy_primitives::{
|
||||
keccak256,
|
||||
map::{hash_map, HashMap, HashSet},
|
||||
Address, BlockHash, BlockNumber, TxHash, TxNumber, B256, U256,
|
||||
};
|
||||
use itertools::Itertools;
|
||||
use rayon::slice::ParallelSliceMut;
|
||||
use reth_chainspec::{ChainInfo, ChainSpecProvider, EthChainSpec, EthereumHardforks};
|
||||
@ -71,7 +75,7 @@ use revm::{
|
||||
};
|
||||
use std::{
|
||||
cmp::Ordering,
|
||||
collections::{hash_map, BTreeMap, BTreeSet, HashMap, HashSet},
|
||||
collections::{BTreeMap, BTreeSet},
|
||||
fmt::Debug,
|
||||
ops::{Deref, DerefMut, Range, RangeBounds, RangeInclusive},
|
||||
sync::{mpsc, Arc},
|
||||
@ -2442,7 +2446,7 @@ impl<TX: DbTxMut + DbTx + 'static, N: NodeTypes> HashingWriter for DatabaseProvi
|
||||
|
||||
// Apply values to HashedState, and remove the account if it's None.
|
||||
let mut hashed_storage_keys: HashMap<B256, BTreeSet<B256>> =
|
||||
HashMap::with_capacity(hashed_storages.len());
|
||||
HashMap::with_capacity_and_hasher(hashed_storages.len(), Default::default());
|
||||
let mut hashed_storage = self.tx.cursor_dup_write::<tables::HashedStorages>()?;
|
||||
for (hashed_address, key, value) in hashed_storages.into_iter().rev() {
|
||||
hashed_storage_keys.entry(hashed_address).or_default().insert(key);
|
||||
|
||||
@ -1,10 +1,10 @@
|
||||
use alloy_primitives::{Address, BlockNumber, B256};
|
||||
use alloy_primitives::{map::HashMap, Address, BlockNumber, B256};
|
||||
use auto_impl::auto_impl;
|
||||
use reth_db::models::{AccountBeforeTx, BlockNumberAddress};
|
||||
use reth_primitives::{Account, StorageEntry};
|
||||
use reth_storage_errors::provider::ProviderResult;
|
||||
use std::{
|
||||
collections::{BTreeMap, BTreeSet, HashMap},
|
||||
collections::{BTreeMap, BTreeSet},
|
||||
ops::{RangeBounds, RangeInclusive},
|
||||
};
|
||||
|
||||
|
||||
@ -114,7 +114,7 @@ pub trait StorageTrieWriter: Send + Sync {
|
||||
/// Returns the number of entries modified.
|
||||
fn write_storage_trie_updates(
|
||||
&self,
|
||||
storage_tries: &std::collections::HashMap<B256, StorageTrieUpdates>,
|
||||
storage_tries: &HashMap<B256, StorageTrieUpdates>,
|
||||
) -> ProviderResult<usize>;
|
||||
|
||||
/// Writes storage trie updates for the given hashed address.
|
||||
|
||||
@ -1,9 +1,9 @@
|
||||
use crate::Nibbles;
|
||||
use alloy_primitives::B256;
|
||||
use std::{
|
||||
collections::{HashMap, HashSet},
|
||||
sync::Arc,
|
||||
use alloy_primitives::{
|
||||
map::{HashMap, HashSet},
|
||||
B256,
|
||||
};
|
||||
use std::sync::Arc;
|
||||
|
||||
/// Collection of mutable prefix sets.
|
||||
#[derive(Clone, Default, Debug)]
|
||||
|
||||
@ -1,6 +1,8 @@
|
||||
use crate::{BranchNodeCompact, HashBuilder, Nibbles};
|
||||
use alloy_primitives::B256;
|
||||
use std::collections::{HashMap, HashSet};
|
||||
use alloy_primitives::{
|
||||
map::{HashMap, HashSet},
|
||||
B256,
|
||||
};
|
||||
|
||||
/// The aggregation of trie updates.
|
||||
#[derive(PartialEq, Eq, Clone, Default, Debug)]
|
||||
@ -228,8 +230,8 @@ impl StorageTrieUpdates {
|
||||
#[cfg(any(test, feature = "serde"))]
|
||||
mod serde_nibbles_set {
|
||||
use crate::Nibbles;
|
||||
use alloy_primitives::map::HashSet;
|
||||
use serde::{de::Error, Deserialize, Deserializer, Serialize, Serializer};
|
||||
use std::collections::HashSet;
|
||||
|
||||
pub(super) fn serialize<S>(map: &HashSet<Nibbles>, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
@ -264,13 +266,13 @@ mod serde_nibbles_set {
|
||||
#[cfg(any(test, feature = "serde"))]
|
||||
mod serde_nibbles_map {
|
||||
use crate::Nibbles;
|
||||
use alloy_primitives::hex;
|
||||
use alloy_primitives::{hex, map::HashMap};
|
||||
use serde::{
|
||||
de::{Error, MapAccess, Visitor},
|
||||
ser::SerializeMap,
|
||||
Deserialize, Deserializer, Serialize, Serializer,
|
||||
};
|
||||
use std::{collections::HashMap, marker::PhantomData};
|
||||
use std::marker::PhantomData;
|
||||
|
||||
pub(super) fn serialize<S, T>(
|
||||
map: &HashMap<Nibbles, T>,
|
||||
@ -314,7 +316,10 @@ mod serde_nibbles_map {
|
||||
where
|
||||
A: MapAccess<'de>,
|
||||
{
|
||||
let mut result = HashMap::with_capacity(map.size_hint().unwrap_or(0));
|
||||
let mut result = HashMap::with_capacity_and_hasher(
|
||||
map.size_hint().unwrap_or(0),
|
||||
Default::default(),
|
||||
);
|
||||
|
||||
while let Some((key, value)) = map.next_entry::<String, T>()? {
|
||||
let decoded_key =
|
||||
@ -406,13 +411,13 @@ fn exclude_empty_from_pair<V>(
|
||||
#[cfg(feature = "serde-bincode-compat")]
|
||||
pub mod serde_bincode_compat {
|
||||
use crate::{BranchNodeCompact, Nibbles};
|
||||
use alloy_primitives::B256;
|
||||
use alloy_primitives::{
|
||||
map::{HashMap, HashSet},
|
||||
B256,
|
||||
};
|
||||
use serde::{Deserialize, Deserializer, Serialize, Serializer};
|
||||
use serde_with::{DeserializeAs, SerializeAs};
|
||||
use std::{
|
||||
borrow::Cow,
|
||||
collections::{HashMap, HashSet},
|
||||
};
|
||||
use std::borrow::Cow;
|
||||
|
||||
/// Bincode-compatible [`super::TrieUpdates`] serde implementation.
|
||||
///
|
||||
|
||||
@ -123,7 +123,7 @@ impl<'a, TX: DbTx> DatabaseStorageProof<'a, TX>
|
||||
let prefix_set = storage.construct_prefix_set();
|
||||
let state_sorted = HashedPostStateSorted::new(
|
||||
Default::default(),
|
||||
HashMap::from([(hashed_address, storage.into_sorted())]),
|
||||
HashMap::from_iter([(hashed_address, storage.into_sorted())]),
|
||||
);
|
||||
Self::from_tx(tx, address)
|
||||
.with_hashed_cursor_factory(HashedPostStateCursorFactory::new(
|
||||
@ -145,7 +145,7 @@ impl<'a, TX: DbTx> DatabaseStorageProof<'a, TX>
|
||||
let prefix_set = storage.construct_prefix_set();
|
||||
let state_sorted = HashedPostStateSorted::new(
|
||||
Default::default(),
|
||||
HashMap::from([(hashed_address, storage.into_sorted())]),
|
||||
HashMap::from_iter([(hashed_address, storage.into_sorted())]),
|
||||
);
|
||||
Self::from_tx(tx, address)
|
||||
.with_hashed_cursor_factory(HashedPostStateCursorFactory::new(
|
||||
|
||||
@ -1,13 +1,14 @@
|
||||
#![allow(missing_docs)]
|
||||
|
||||
use alloy_consensus::EMPTY_ROOT_HASH;
|
||||
use alloy_primitives::{hex_literal::hex, keccak256, Address, B256, U256};
|
||||
use alloy_primitives::{hex_literal::hex, keccak256, map::HashMap, Address, B256, U256};
|
||||
use alloy_rlp::Encodable;
|
||||
use proptest::{prelude::ProptestConfig, proptest};
|
||||
use proptest_arbitrary_interop::arb;
|
||||
use reth_db::{tables, test_utils::TempDatabase, DatabaseEnv};
|
||||
use reth_db_api::{
|
||||
cursor::{DbCursorRO, DbCursorRW, DbDupCursorRO},
|
||||
transaction::DbTxMut,
|
||||
transaction::{DbTx, DbTxMut},
|
||||
};
|
||||
use reth_primitives::{Account, StorageEntry};
|
||||
use reth_provider::{
|
||||
@ -15,25 +16,15 @@ use reth_provider::{
|
||||
StorageTrieWriter, TrieWriter,
|
||||
};
|
||||
use reth_trie::{
|
||||
prefix_set::PrefixSetMut,
|
||||
prefix_set::{PrefixSetMut, TriePrefixSets},
|
||||
test_utils::{state_root, state_root_prehashed, storage_root, storage_root_prehashed},
|
||||
triehash::KeccakHasher,
|
||||
BranchNodeCompact, StateRoot, StorageRoot, TrieMask,
|
||||
updates::StorageTrieUpdates,
|
||||
BranchNodeCompact, HashBuilder, IntermediateStateRootState, Nibbles, StateRoot,
|
||||
StateRootProgress, StorageRoot, TrieAccount, TrieMask,
|
||||
};
|
||||
use reth_trie_db::{DatabaseStateRoot, DatabaseStorageRoot};
|
||||
use std::{
|
||||
collections::{BTreeMap, HashMap},
|
||||
ops::Mul,
|
||||
str::FromStr,
|
||||
sync::Arc,
|
||||
};
|
||||
|
||||
use alloy_rlp::Encodable;
|
||||
use reth_db_api::transaction::DbTx;
|
||||
use reth_trie::{
|
||||
prefix_set::TriePrefixSets, updates::StorageTrieUpdates, HashBuilder,
|
||||
IntermediateStateRootState, Nibbles, StateRootProgress, TrieAccount,
|
||||
};
|
||||
use std::{collections::BTreeMap, ops::Mul, str::FromStr, sync::Arc};
|
||||
|
||||
fn insert_account(
|
||||
tx: &impl DbTxMut,
|
||||
|
||||
@ -27,7 +27,7 @@ fn includes_empty_node_preimage() {
|
||||
assert_eq!(
|
||||
TrieWitness::from_tx(provider.tx_ref())
|
||||
.compute(HashedPostState {
|
||||
accounts: HashMap::from([(hashed_address, Some(Account::default()))]),
|
||||
accounts: HashMap::from_iter([(hashed_address, Some(Account::default()))]),
|
||||
storages: HashMap::default(),
|
||||
})
|
||||
.unwrap(),
|
||||
@ -44,8 +44,8 @@ fn includes_empty_node_preimage() {
|
||||
|
||||
let witness = TrieWitness::from_tx(provider.tx_ref())
|
||||
.compute(HashedPostState {
|
||||
accounts: HashMap::from([(hashed_address, Some(Account::default()))]),
|
||||
storages: HashMap::from([(
|
||||
accounts: HashMap::from_iter([(hashed_address, Some(Account::default()))]),
|
||||
storages: HashMap::from_iter([(
|
||||
hashed_address,
|
||||
HashedStorage::from_iter(false, [(hashed_slot, U256::from(1))]),
|
||||
)]),
|
||||
@ -80,12 +80,16 @@ fn includes_nodes_for_destroyed_storage_nodes() {
|
||||
.multiproof(HashMap::from_iter([(hashed_address, HashSet::from_iter([hashed_slot]))]))
|
||||
.unwrap();
|
||||
|
||||
let witness = TrieWitness::from_tx(provider.tx_ref())
|
||||
.compute(HashedPostState {
|
||||
accounts: HashMap::from([(hashed_address, Some(Account::default()))]),
|
||||
storages: HashMap::from([(hashed_address, HashedStorage::from_iter(true, []))]), // destroyed
|
||||
})
|
||||
.unwrap();
|
||||
let witness =
|
||||
TrieWitness::from_tx(provider.tx_ref())
|
||||
.compute(HashedPostState {
|
||||
accounts: HashMap::from_iter([(hashed_address, Some(Account::default()))]),
|
||||
storages: HashMap::from_iter([(
|
||||
hashed_address,
|
||||
HashedStorage::from_iter(true, []),
|
||||
)]), // destroyed
|
||||
})
|
||||
.unwrap();
|
||||
assert!(witness.contains_key(&state_root));
|
||||
for node in multiproof.account_subtree.values() {
|
||||
assert_eq!(witness.get(&keccak256(node)), Some(node));
|
||||
@ -126,8 +130,8 @@ fn correctly_decodes_branch_node_values() {
|
||||
|
||||
let witness = TrieWitness::from_tx(provider.tx_ref())
|
||||
.compute(HashedPostState {
|
||||
accounts: HashMap::from([(hashed_address, Some(Account::default()))]),
|
||||
storages: HashMap::from([(
|
||||
accounts: HashMap::from_iter([(hashed_address, Some(Account::default()))]),
|
||||
storages: HashMap::from_iter([(
|
||||
hashed_address,
|
||||
HashedStorage::from_iter(
|
||||
false,
|
||||
|
||||
@ -3,10 +3,9 @@ use crate::{
|
||||
forward_cursor::ForwardInMemoryCursor, HashedAccountsSorted, HashedPostStateSorted,
|
||||
HashedStorageSorted,
|
||||
};
|
||||
use alloy_primitives::{B256, U256};
|
||||
use alloy_primitives::{map::HashSet, B256, U256};
|
||||
use reth_primitives::Account;
|
||||
use reth_storage_errors::db::DatabaseError;
|
||||
use std::collections::HashSet;
|
||||
|
||||
/// The hashed cursor factory for the post state.
|
||||
#[derive(Clone, Debug)]
|
||||
|
||||
@ -2,15 +2,16 @@ use crate::{
|
||||
prefix_set::{PrefixSetMut, TriePrefixSetsMut},
|
||||
Nibbles,
|
||||
};
|
||||
use alloy_primitives::{keccak256, Address, B256, U256};
|
||||
use alloy_primitives::{
|
||||
keccak256,
|
||||
map::{hash_map, HashMap, HashSet},
|
||||
Address, B256, U256,
|
||||
};
|
||||
use itertools::Itertools;
|
||||
use rayon::prelude::{IntoParallelIterator, ParallelIterator};
|
||||
use reth_primitives::Account;
|
||||
use revm::db::{states::CacheAccount, AccountStatus, BundleAccount};
|
||||
use std::{
|
||||
borrow::Cow,
|
||||
collections::{hash_map, HashMap, HashSet},
|
||||
};
|
||||
use std::borrow::Cow;
|
||||
|
||||
/// Representation of in-memory hashed state.
|
||||
#[derive(PartialEq, Eq, Clone, Default, Debug)]
|
||||
@ -41,8 +42,8 @@ impl HashedPostState {
|
||||
})
|
||||
.collect::<Vec<(B256, (Option<Account>, HashedStorage))>>();
|
||||
|
||||
let mut accounts = HashMap::with_capacity(hashed.len());
|
||||
let mut storages = HashMap::with_capacity(hashed.len());
|
||||
let mut accounts = HashMap::with_capacity_and_hasher(hashed.len(), Default::default());
|
||||
let mut storages = HashMap::with_capacity_and_hasher(hashed.len(), Default::default());
|
||||
for (address, (account, storage)) in hashed {
|
||||
accounts.insert(address, account);
|
||||
storages.insert(address, storage);
|
||||
@ -68,8 +69,8 @@ impl HashedPostState {
|
||||
})
|
||||
.collect::<Vec<(B256, (Option<Account>, HashedStorage))>>();
|
||||
|
||||
let mut accounts = HashMap::with_capacity(hashed.len());
|
||||
let mut storages = HashMap::with_capacity(hashed.len());
|
||||
let mut accounts = HashMap::with_capacity_and_hasher(hashed.len(), Default::default());
|
||||
let mut storages = HashMap::with_capacity_and_hasher(hashed.len(), Default::default());
|
||||
for (address, (account, storage)) in hashed {
|
||||
accounts.insert(address, account);
|
||||
storages.insert(address, storage);
|
||||
@ -79,7 +80,10 @@ impl HashedPostState {
|
||||
|
||||
/// Construct [`HashedPostState`] from a single [`HashedStorage`].
|
||||
pub fn from_hashed_storage(hashed_address: B256, storage: HashedStorage) -> Self {
|
||||
Self { accounts: HashMap::default(), storages: HashMap::from([(hashed_address, storage)]) }
|
||||
Self {
|
||||
accounts: HashMap::default(),
|
||||
storages: HashMap::from_iter([(hashed_address, storage)]),
|
||||
}
|
||||
}
|
||||
|
||||
/// Set account entries on hashed state.
|
||||
@ -121,7 +125,8 @@ impl HashedPostState {
|
||||
}
|
||||
|
||||
// Populate storage prefix sets.
|
||||
let mut storage_prefix_sets = HashMap::with_capacity(self.storages.len());
|
||||
let mut storage_prefix_sets =
|
||||
HashMap::with_capacity_and_hasher(self.storages.len(), Default::default());
|
||||
for (hashed_address, hashed_storage) in &self.storages {
|
||||
account_prefix_set.insert(Nibbles::unpack(hashed_address));
|
||||
storage_prefix_sets.insert(*hashed_address, hashed_storage.construct_prefix_set());
|
||||
|
||||
@ -3,10 +3,9 @@ use crate::{
|
||||
forward_cursor::ForwardInMemoryCursor,
|
||||
updates::{StorageTrieUpdatesSorted, TrieUpdatesSorted},
|
||||
};
|
||||
use alloy_primitives::B256;
|
||||
use alloy_primitives::{map::HashSet, B256};
|
||||
use reth_storage_errors::db::DatabaseError;
|
||||
use reth_trie_common::{BranchNodeCompact, Nibbles};
|
||||
use std::collections::HashSet;
|
||||
|
||||
/// The trie cursor factory for the trie updates.
|
||||
#[derive(Debug, Clone)]
|
||||
|
||||
@ -3,9 +3,8 @@ use crate::{
|
||||
trie_cursor::{CursorSubNode, TrieCursor},
|
||||
BranchNodeCompact, Nibbles,
|
||||
};
|
||||
use alloy_primitives::B256;
|
||||
use alloy_primitives::{map::HashSet, B256};
|
||||
use reth_storage_errors::db::DatabaseError;
|
||||
use std::collections::HashSet;
|
||||
|
||||
#[cfg(feature = "metrics")]
|
||||
use crate::metrics::WalkerMetrics;
|
||||
|
||||
Reference in New Issue
Block a user