mirror of
https://github.com/hl-archive-node/nanoreth.git
synced 2025-12-06 10:59:55 +00:00
fix: do not record trie root metrics for ParallelProof (#13960)
This commit is contained in:
@ -27,10 +27,10 @@ use reth_trie_db::{DatabaseHashedCursorFactory, DatabaseTrieCursorFactory};
|
|||||||
use std::{sync::Arc, time::Instant};
|
use std::{sync::Arc, time::Instant};
|
||||||
use tracing::{debug, trace};
|
use tracing::{debug, trace};
|
||||||
|
|
||||||
#[cfg(feature = "metrics")]
|
/// Parallel proof calculator.
|
||||||
use crate::metrics::ParallelStateRootMetrics;
|
///
|
||||||
|
/// This can collect proof for many targets in parallel, spawning a task for each hashed address
|
||||||
/// TODO:
|
/// that has proof targets.
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub struct ParallelProof<Factory> {
|
pub struct ParallelProof<Factory> {
|
||||||
/// Consistent view of the database.
|
/// Consistent view of the database.
|
||||||
@ -48,14 +48,11 @@ pub struct ParallelProof<Factory> {
|
|||||||
collect_branch_node_masks: bool,
|
collect_branch_node_masks: bool,
|
||||||
/// Thread pool for local tasks
|
/// Thread pool for local tasks
|
||||||
thread_pool: Arc<rayon::ThreadPool>,
|
thread_pool: Arc<rayon::ThreadPool>,
|
||||||
/// Parallel state root metrics.
|
|
||||||
#[cfg(feature = "metrics")]
|
|
||||||
metrics: ParallelStateRootMetrics,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<Factory> ParallelProof<Factory> {
|
impl<Factory> ParallelProof<Factory> {
|
||||||
/// Create new state proof generator.
|
/// Create new state proof generator.
|
||||||
pub fn new(
|
pub const fn new(
|
||||||
view: ConsistentDbView<Factory>,
|
view: ConsistentDbView<Factory>,
|
||||||
nodes_sorted: Arc<TrieUpdatesSorted>,
|
nodes_sorted: Arc<TrieUpdatesSorted>,
|
||||||
state_sorted: Arc<HashedPostStateSorted>,
|
state_sorted: Arc<HashedPostStateSorted>,
|
||||||
@ -69,8 +66,6 @@ impl<Factory> ParallelProof<Factory> {
|
|||||||
prefix_sets,
|
prefix_sets,
|
||||||
collect_branch_node_masks: false,
|
collect_branch_node_masks: false,
|
||||||
thread_pool,
|
thread_pool,
|
||||||
#[cfg(feature = "metrics")]
|
|
||||||
metrics: ParallelStateRootMetrics::default(),
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -119,7 +114,7 @@ where
|
|||||||
let storage_root_targets_len = storage_root_targets.len();
|
let storage_root_targets_len = storage_root_targets.len();
|
||||||
|
|
||||||
debug!(
|
debug!(
|
||||||
target: "trie::parallel_state_root",
|
target: "trie::parallel_proof",
|
||||||
total_targets = storage_root_targets_len,
|
total_targets = storage_root_targets_len,
|
||||||
"Starting parallel proof generation"
|
"Starting parallel proof generation"
|
||||||
);
|
);
|
||||||
@ -143,7 +138,7 @@ where
|
|||||||
|
|
||||||
self.thread_pool.spawn_fifo(move || {
|
self.thread_pool.spawn_fifo(move || {
|
||||||
debug!(
|
debug!(
|
||||||
target: "trie::parallel",
|
target: "trie::parallel_proof",
|
||||||
?hashed_address,
|
?hashed_address,
|
||||||
"Starting proof calculation"
|
"Starting proof calculation"
|
||||||
);
|
);
|
||||||
@ -153,7 +148,7 @@ where
|
|||||||
let provider_start = Instant::now();
|
let provider_start = Instant::now();
|
||||||
let provider_ro = view.provider_ro()?;
|
let provider_ro = view.provider_ro()?;
|
||||||
trace!(
|
trace!(
|
||||||
target: "trie::parallel",
|
target: "trie::parallel_proof",
|
||||||
?hashed_address,
|
?hashed_address,
|
||||||
provider_time = ?provider_start.elapsed(),
|
provider_time = ?provider_start.elapsed(),
|
||||||
"Got provider"
|
"Got provider"
|
||||||
@ -169,7 +164,7 @@ where
|
|||||||
&hashed_state_sorted,
|
&hashed_state_sorted,
|
||||||
);
|
);
|
||||||
trace!(
|
trace!(
|
||||||
target: "trie::parallel",
|
target: "trie::parallel_proof",
|
||||||
?hashed_address,
|
?hashed_address,
|
||||||
cursor_time = ?cursor_start.elapsed(),
|
cursor_time = ?cursor_start.elapsed(),
|
||||||
"Created cursors"
|
"Created cursors"
|
||||||
@ -188,7 +183,7 @@ where
|
|||||||
.map_err(|e| ParallelStateRootError::Other(e.to_string()));
|
.map_err(|e| ParallelStateRootError::Other(e.to_string()));
|
||||||
|
|
||||||
trace!(
|
trace!(
|
||||||
target: "trie::parallel",
|
target: "trie::parallel_proof",
|
||||||
?hashed_address,
|
?hashed_address,
|
||||||
prefix_set = ?prefix_set.len(),
|
prefix_set = ?prefix_set.len(),
|
||||||
target_slots = ?target_slots_len,
|
target_slots = ?target_slots_len,
|
||||||
@ -204,7 +199,7 @@ where
|
|||||||
// `account_node_iter` below.
|
// `account_node_iter` below.
|
||||||
if let Err(e) = tx.send(result) {
|
if let Err(e) = tx.send(result) {
|
||||||
debug!(
|
debug!(
|
||||||
target: "trie::parallel",
|
target: "trie::parallel_proof",
|
||||||
?hashed_address,
|
?hashed_address,
|
||||||
error = ?e,
|
error = ?e,
|
||||||
task_time = ?task_start.elapsed(),
|
task_time = ?task_start.elapsed(),
|
||||||
@ -300,9 +295,6 @@ where
|
|||||||
}
|
}
|
||||||
let _ = hash_builder.root();
|
let _ = hash_builder.root();
|
||||||
|
|
||||||
#[cfg(feature = "metrics")]
|
|
||||||
self.metrics.record_state_trie(tracker.finish());
|
|
||||||
|
|
||||||
let account_subtree = hash_builder.take_proof_nodes();
|
let account_subtree = hash_builder.take_proof_nodes();
|
||||||
let (branch_node_hash_masks, branch_node_tree_masks) = if self.collect_branch_node_masks {
|
let (branch_node_hash_masks, branch_node_tree_masks) = if self.collect_branch_node_masks {
|
||||||
let updated_branch_nodes = hash_builder.updated_branch_nodes.unwrap_or_default();
|
let updated_branch_nodes = hash_builder.updated_branch_nodes.unwrap_or_default();
|
||||||
|
|||||||
Reference in New Issue
Block a user