feat: downgrade trie debug logs to trace, cleanup debug logs more (#5285)

This commit is contained in:
Alexey Shekhirin
2023-11-07 12:58:36 +00:00
committed by GitHub
parent e6ea2510c7
commit d762d2d9f7
8 changed files with 75 additions and 58 deletions

View File

@ -87,7 +87,7 @@ codegen-units = 1
incremental = false
[workspace.dependencies]
## reth
# reth
reth = { path = "./bin/reth" }
reth-primitives = { path = "./crates/primitives" }
reth-interfaces = { path = "./crates/interfaces" }
@ -110,11 +110,12 @@ reth-eth-wire = { path = "./crates/net/eth-wire" }
reth-ecies = { path = "./crates/net/ecies" }
reth-tracing = { path = "./crates/tracing" }
reth-tokio-util = { path = "crates/tokio-util" }
# revm
revm = { git = "https://github.com/bluealloy/revm", rev = "1609e07c68048909ad1682c98cf2b9baa76310b5" }
revm-primitives = { git = "https://github.com/bluealloy/revm", rev = "1609e07c68048909ad1682c98cf2b9baa76310b5" }
## eth
# eth
alloy-primitives = "0.4"
alloy-dyn-abi = "0.4"
alloy-sol-types = "0.4"
@ -126,11 +127,11 @@ ethers-middleware = { version = "2.0", default-features = false }
discv5 = { git = "https://github.com/sigp/discv5", rev = "f289bbd4c57d499bb1bdb393af3c249600a1c662" }
igd = { git = "https://github.com/stevefan1999-personal/rust-igd", rev = "c2d1f83eb1612a462962453cb0703bc93258b173" }
## js
# js
boa_engine = "0.17"
boa_gc = "0.17"
## misc
# misc
aquamarine = "0.3"
bytes = "1.5"
bitflags = "2.4"
@ -151,27 +152,27 @@ hex-literal = "0.4"
once_cell = "1.17"
syn = "2.0"
### proc-macros
# proc-macros
proc-macro2 = "1.0"
quote = "1.0"
## tokio
# tokio
tokio-stream = "0.1.11"
tokio = { version = "1.21", default-features = false }
tokio-util = { version = "0.7.4", features = ["codec"] }
## async
# async
async-trait = "0.1.68"
futures = "0.3.26"
pin-project = "1.0.12"
futures-util = "0.3.25"
## json
# rpc
jsonrpsee = { version = "0.20" }
jsonrpsee-core = { version = "0.20" }
jsonrpsee-types = { version = "0.20" }
## crypto
# crypto
secp256k1 = { version = "0.27.0", default-features = false, features = [
"global-context",
"rand-std",
@ -181,11 +182,11 @@ enr = { version = "0.9", default-features = false, features = ["k256"] }
# for eip-4844
c-kzg = "0.4.0"
## config
# config
confy = "0.5"
toml = "0.8"
### misc-testing
# misc-testing
arbitrary = "1.1"
assert_matches = "1.5.0"
tempfile = "3.8"

View File

@ -23,6 +23,11 @@ pub mod components;
pub mod config;
pub mod ext;
/// Default [Directive] for [EnvFilter] which disables high-frequency debug logs from `hyper` and
/// `trust-dns`
const DEFAULT_ENV_FILTER_DIRECTIVE: &str =
"hyper::proto::h1=off,trust_dns_proto=off,trust_dns_resolver=off";
/// The main reth cli interface.
///
/// This is the entrypoint to the executable.
@ -235,14 +240,18 @@ impl Logs {
if self.journald {
layers.push(
reth_tracing::journald(EnvFilter::builder().parse(&self.journald_filter)?)
reth_tracing::journald(
EnvFilter::try_new(DEFAULT_ENV_FILTER_DIRECTIVE)?
.add_directive(self.journald_filter.parse()?),
)
.expect("Could not connect to journald"),
);
}
let file_guard = if self.log_file_max_files > 0 {
let (layer, guard) = reth_tracing::file(
EnvFilter::builder().parse(&self.log_file_filter)?,
EnvFilter::try_new(DEFAULT_ENV_FILTER_DIRECTIVE)?
.add_directive(self.log_file_filter.parse()?),
&self.log_file_directory,
"reth.log",
self.log_file_max_size * MB_TO_BYTES,

View File

@ -16,6 +16,7 @@ use std::{
task::{Context, Poll},
};
use tokio_stream::Stream;
use tracing::{debug, trace};
/// [`MAX_MESSAGE_SIZE`] is the maximum cap on the size of a protocol message.
// https://github.com/ethereum/go-ethereum/blob/30602163d5d8321fbc68afdcbbaf2362b2641bde/eth/protocols/eth/protocol.go#L50
@ -55,7 +56,7 @@ where
status: Status,
fork_filter: ForkFilter,
) -> Result<(EthStream<S>, Status), EthStreamError> {
tracing::trace!(
trace!(
%status,
"sending eth status to peer"
);
@ -86,7 +87,7 @@ where
let msg = match ProtocolMessage::decode_message(version, &mut their_msg.as_ref()) {
Ok(m) => m,
Err(err) => {
tracing::debug!("decode error in eth handshake: msg={their_msg:x}");
debug!("decode error in eth handshake: msg={their_msg:x}");
self.inner.disconnect(DisconnectReason::DisconnectRequested).await?;
return Err(err)
}
@ -96,7 +97,7 @@ where
// https://github.com/ethereum/go-ethereum/blob/9244d5cd61f3ea5a7645fdf2a1a96d53421e412f/eth/protocols/eth/handshake.go#L87-L89
match msg.message {
EthMessage::Status(resp) => {
tracing::trace!(
trace!(
status=%resp,
"validating incoming eth status from peer"
);
@ -241,7 +242,11 @@ where
let msg = match ProtocolMessage::decode_message(*this.version, &mut bytes.as_ref()) {
Ok(m) => m,
Err(err) => {
tracing::debug!("decode error: msg={bytes:x}");
debug!(
version=?this.version,
msg=format!("{:02x?}...{:x?}", &bytes[..10], &bytes[bytes.len() - 10..]),
"failed to decode protocol message"
);
return Poll::Ready(Some(Err(err)))
}
};

View File

@ -9,7 +9,7 @@ repository.workspace = true
description = "A basic payload builder for reth that uses the txpool API to build payloads."
[dependencies]
## reth
# reth
reth-primitives.workspace = true
reth-revm = { path = "../../revm" }
reth-transaction-pool.workspace = true
@ -18,11 +18,11 @@ reth-payload-builder.workspace = true
reth-tasks.workspace = true
reth-interfaces.workspace = true
## ethereum
# ethereum
alloy-rlp.workspace = true
revm.workspace = true
## async
# async
tokio = { workspace = true, features = ["sync", "time"] }
futures-core = "0.3"
futures-util.workspace = true
@ -31,7 +31,7 @@ futures-util.workspace = true
reth-metrics.workspace = true
metrics.workspace = true
## misc
# misc
tracing.workspace = true
[features]

View File

@ -9,27 +9,27 @@ repository.workspace = true
description = "reth payload builder"
[dependencies]
## reth
# reth
reth-primitives.workspace = true
reth-rpc-types.workspace = true
reth-transaction-pool.workspace = true
reth-interfaces.workspace = true
reth-rpc-types-compat.workspace = true
## ethereum
# ethereum
alloy-rlp.workspace = true
revm-primitives.workspace = true
## async
# async
tokio = { workspace = true, features = ["sync"] }
tokio-stream.workspace = true
futures-util.workspace = true
## metrics
# metrics
reth-metrics.workspace = true
metrics.workspace = true
## misc
# misc
thiserror.workspace = true
sha2 = { version = "0.10", default-features = false }
tracing.workspace = true

View File

@ -7,6 +7,7 @@ use std::{
collections::{BTreeMap, HashMap},
fmt::Debug,
};
use tracing::trace;
mod state;
pub use state::HashBuilderState;
@ -172,10 +173,10 @@ impl HashBuilder {
}
fn set_key_value<T: Into<HashBuilderValue>>(&mut self, key: Nibbles, value: T) {
tracing::trace!(target: "trie::hash_builder", key = ?self.key, value = ?self.value, "old key/value");
trace!(target: "trie::hash_builder", key = ?self.key, value = ?self.value, "old key/value");
self.key = key;
self.value = value.into();
tracing::trace!(target: "trie::hash_builder", key = ?self.key, value = ?self.value, "new key/value");
trace!(target: "trie::hash_builder", key = ?self.key, value = ?self.value, "new key/value");
}
fn current_root(&self) -> B256 {
@ -199,7 +200,7 @@ impl HashBuilder {
// current / self.key is always the latest added element in the trie
let mut current = self.key.clone();
tracing::debug!(target: "trie::hash_builder", ?current, ?succeeding, "updating merkle tree");
trace!(target: "trie::hash_builder", ?current, ?succeeding, "updating merkle tree");
let mut i = 0;
loop {
@ -220,7 +221,7 @@ impl HashBuilder {
let len = std::cmp::max(preceding_len, common_prefix_len);
assert!(len < current.len());
tracing::trace!(
trace!(
target: "trie::hash_builder",
?len,
?common_prefix_len,
@ -233,11 +234,11 @@ impl HashBuilder {
let extra_digit = current[len];
if self.groups.len() <= len {
let new_len = len + 1;
tracing::trace!(target: "trie::hash_builder", new_len, old_len = self.groups.len(), "scaling state masks to fit");
trace!(target: "trie::hash_builder", new_len, old_len = self.groups.len(), "scaling state masks to fit");
self.groups.resize(new_len, TrieMask::default());
}
self.groups[len] |= TrieMask::from_nibble(extra_digit);
tracing::trace!(
trace!(
target: "trie::hash_builder",
?extra_digit,
groups = self.groups.iter().map(|x| format!("{x:?}")).collect::<Vec<_>>().join(","),
@ -252,19 +253,19 @@ impl HashBuilder {
if !succeeding.is_empty() || preceding_exists {
len_from += 1;
}
tracing::trace!(target: "trie::hash_builder", "skipping {} nibbles", len_from);
trace!(target: "trie::hash_builder", "skipping {} nibbles", len_from);
// The key without the common prefix
let short_node_key = current.slice_from(len_from);
tracing::trace!(target: "trie::hash_builder", ?short_node_key);
trace!(target: "trie::hash_builder", ?short_node_key);
// Concatenate the 2 nodes together
if !build_extensions {
match &self.value {
HashBuilderValue::Bytes(leaf_value) => {
let leaf_node = LeafNode::new(&short_node_key, leaf_value);
tracing::debug!(target: "trie::hash_builder", ?leaf_node, "pushing leaf node");
tracing::trace!(target: "trie::hash_builder", rlp = {
trace!(target: "trie::hash_builder", ?leaf_node, "pushing leaf node");
trace!(target: "trie::hash_builder", rlp = {
self.rlp_buf.clear();
crate::hex::encode(&leaf_node.rlp(&mut self.rlp_buf))
}, "leaf node rlp");
@ -274,7 +275,7 @@ impl HashBuilder {
self.retain_proof_from_buf(&current);
}
HashBuilderValue::Hash(hash) => {
tracing::debug!(target: "trie::hash_builder", ?hash, "pushing branch node hash");
trace!(target: "trie::hash_builder", ?hash, "pushing branch node hash");
self.stack.push(rlp_hash(*hash));
if self.stored_in_database {
@ -294,8 +295,8 @@ impl HashBuilder {
let stack_last =
self.stack.pop().expect("there should be at least one stack item; qed");
let extension_node = ExtensionNode::new(&short_node_key, &stack_last);
tracing::debug!(target: "trie::hash_builder", ?extension_node, "pushing extension node");
tracing::trace!(target: "trie::hash_builder", rlp = {
trace!(target: "trie::hash_builder", ?extension_node, "pushing extension node");
trace!(target: "trie::hash_builder", rlp = {
self.rlp_buf.clear();
crate::hex::encode(&extension_node.rlp(&mut self.rlp_buf))
}, "extension node rlp");
@ -306,7 +307,7 @@ impl HashBuilder {
}
if preceding_len <= common_prefix_len && !succeeding.is_empty() {
tracing::trace!(target: "trie::hash_builder", "no common prefix to create branch nodes from, returning");
trace!(target: "trie::hash_builder", "no common prefix to create branch nodes from, returning");
return
}
@ -323,14 +324,14 @@ impl HashBuilder {
self.resize_masks(len);
if preceding_len == 0 {
tracing::trace!(target: "trie::hash_builder", "0 or 1 state masks means we have no more elements to process");
trace!(target: "trie::hash_builder", "0 or 1 state masks means we have no more elements to process");
return
}
current.truncate(preceding_len);
tracing::trace!(target: "trie::hash_builder", ?current, "truncated nibbles to {} bytes", preceding_len);
trace!(target: "trie::hash_builder", ?current, "truncated nibbles to {} bytes", preceding_len);
tracing::trace!(target: "trie::hash_builder", groups = ?self.groups, "popping empty state masks");
trace!(target: "trie::hash_builder", groups = ?self.groups, "popping empty state masks");
while self.groups.last() == Some(&TrieMask::default()) {
self.groups.pop();
}
@ -356,7 +357,7 @@ impl HashBuilder {
// Clears the stack from the branch node elements
let first_child_idx = self.stack.len() - state_mask.count_ones() as usize;
tracing::debug!(
trace!(
target: "trie::hash_builder",
new_len = first_child_idx,
old_len = self.stack.len(),
@ -364,8 +365,8 @@ impl HashBuilder {
);
self.stack.resize(first_child_idx, vec![]);
tracing::debug!(target: "trie::hash_builder", "pushing branch node with {:?} mask from stack", state_mask);
tracing::trace!(target: "trie::hash_builder", rlp = crate::hex::encode(&rlp), "branch node rlp");
trace!(target: "trie::hash_builder", "pushing branch node with {:?} mask from stack", state_mask);
trace!(target: "trie::hash_builder", rlp = crate::hex::encode(&rlp), "branch node rlp");
self.stack.push(rlp);
children
}
@ -401,7 +402,7 @@ impl HashBuilder {
// Send it over to the provided channel which will handle it on the
// other side of the HashBuilder
tracing::debug!(target: "trie::hash_builder", node = ?n, "intermediate node");
trace!(target: "trie::hash_builder", node = ?n, "intermediate node");
let common_prefix = current.slice(0, len);
if let Some(nodes) = self.updated_branch_nodes.as_mut() {
nodes.insert(common_prefix, n);
@ -428,7 +429,7 @@ impl HashBuilder {
}
fn resize_masks(&mut self, new_len: usize) {
tracing::trace!(
trace!(
target: "trie::hash_builder",
new_len,
old_tree_mask_len = self.tree_masks.len(),

View File

@ -10,16 +10,16 @@ description = "Task management"
[dependencies]
## async
# async
tokio = { workspace = true, features = ["sync", "rt"] }
tracing-futures = "0.2"
futures-util.workspace = true
## metrics
# metrics
reth-metrics.workspace = true
metrics.workspace = true
## misc
# misc
tracing.workspace = true
thiserror.workspace = true
dyn-clone = "1.0"

View File

@ -21,6 +21,7 @@ use std::{
collections::{HashMap, HashSet},
ops::RangeInclusive,
};
use tracing::{debug, trace};
/// StateRoot is used to compute the root node of a state trie.
#[derive(Debug)]
@ -143,7 +144,7 @@ impl<'a, TX: DbTx> StateRoot<'a, TX, &'a TX> {
tx: &'a TX,
range: RangeInclusive<BlockNumber>,
) -> Result<B256, StateRootError> {
tracing::debug!(target: "trie::loader", "incremental state root");
debug!(target: "trie::loader", ?range, "incremental state root");
Self::incremental_root_calculator(tx, range)?.root()
}
@ -159,7 +160,7 @@ impl<'a, TX: DbTx> StateRoot<'a, TX, &'a TX> {
tx: &'a TX,
range: RangeInclusive<BlockNumber>,
) -> Result<(B256, TrieUpdates), StateRootError> {
tracing::debug!(target: "trie::loader", "incremental state root");
debug!(target: "trie::loader", ?range, "incremental state root");
Self::incremental_root_calculator(tx, range)?.root_with_updates()
}
@ -173,7 +174,7 @@ impl<'a, TX: DbTx> StateRoot<'a, TX, &'a TX> {
tx: &'a TX,
range: RangeInclusive<BlockNumber>,
) -> Result<StateRootProgress, StateRootError> {
tracing::debug!(target: "trie::loader", "incremental state root with progress");
debug!(target: "trie::loader", ?range, "incremental state root with progress");
Self::incremental_root_calculator(tx, range)?.root_with_progress()
}
}
@ -222,7 +223,7 @@ where
}
fn calculate(self, retain_updates: bool) -> Result<StateRootProgress, StateRootError> {
tracing::debug!(target: "trie::loader", "calculating state root");
trace!(target: "trie::loader", "calculating state root");
let mut trie_updates = TrieUpdates::default();
let hashed_account_cursor = self.hashed_cursor_factory.hashed_account_cursor()?;
@ -432,7 +433,7 @@ where
&self,
retain_updates: bool,
) -> Result<(B256, usize, TrieUpdates), StorageRootError> {
tracing::debug!(target: "trie::storage_root", hashed_address = ?self.hashed_address, "calculating storage root");
trace!(target: "trie::storage_root", hashed_address = ?self.hashed_address, "calculating storage root");
let mut hashed_storage_cursor = self.hashed_cursor_factory.hashed_storage_cursor()?;
// short circuit on empty storage
@ -480,7 +481,7 @@ where
trie_updates.extend(walker_updates.into_iter());
trie_updates.extend_with_storage_updates(self.hashed_address, hash_builder_updates);
tracing::debug!(target: "trie::storage_root", ?root, hashed_address = ?self.hashed_address, "calculated storage root");
trace!(target: "trie::storage_root", ?root, hashed_address = ?self.hashed_address, "calculated storage root");
Ok((root, storage_slots_walked, trie_updates))
}
}