mirror of
https://github.com/hl-archive-node/nanoreth.git
synced 2025-12-06 02:49:55 +00:00
feat: set up codspeed (#13372)
This commit is contained in:
23
.github/scripts/codspeed-build.sh
vendored
Executable file
23
.github/scripts/codspeed-build.sh
vendored
Executable file
@ -0,0 +1,23 @@
|
||||
#!/usr/bin/env bash
|
||||
set -eo pipefail
|
||||
|
||||
# TODO: Benchmarks run WAY too slow due to excessive amount of iterations.
|
||||
|
||||
cmd=(cargo codspeed build --profile profiling)
|
||||
excludes=(
|
||||
# Unnecessary
|
||||
--exclude reth-libmdbx
|
||||
# Build is too slow
|
||||
--exclude reth-network
|
||||
# Built separately
|
||||
--exclude reth-transaction-pool
|
||||
# TODO: some benchmarks panic: https://github.com/paradigmxyz/reth/actions/runs/12307046814/job/34349955788
|
||||
--exclude reth-db
|
||||
--exclude reth-trie-parallel
|
||||
--exclude reth-engine-tree
|
||||
)
|
||||
|
||||
"${cmd[@]}" --features test-utils --workspace "${excludes[@]}"
|
||||
|
||||
# TODO: Slow benchmarks due to too many iterations
|
||||
## "${cmd[@]}" -p reth-transaction-pool --features test-utils,arbitrary
|
||||
50
.github/workflows/bench.yml
vendored
50
.github/workflows/bench.yml
vendored
@ -1,4 +1,4 @@
|
||||
# Runs benchmarks on serialization/deserialization of storage values and keys.
|
||||
# Runs benchmarks.
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
@ -9,7 +9,6 @@ on:
|
||||
env:
|
||||
CARGO_TERM_COLOR: always
|
||||
BASELINE: base
|
||||
IAI_CALLGRIND_RUNNER: iai-callgrind-runner
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
|
||||
@ -17,47 +16,26 @@ concurrency:
|
||||
|
||||
name: bench
|
||||
jobs:
|
||||
iai:
|
||||
codspeed:
|
||||
runs-on:
|
||||
group: Reth
|
||||
# Only run benchmarks in merge groups and on main
|
||||
if: github.event_name != 'pull_request'
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Install Valgrind
|
||||
run: sudo apt update && sudo apt install valgrind
|
||||
with:
|
||||
submodules: true
|
||||
- uses: rui314/setup-mold@v1
|
||||
- uses: dtolnay/rust-toolchain@stable
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
with:
|
||||
cache-on-failure: true
|
||||
- name: Install cargo-binstall
|
||||
uses: taiki-e/install-action@cargo-binstall
|
||||
- name: Install iai-callgrind-runner
|
||||
run: |
|
||||
echo "::group::Install"
|
||||
version=$(cargo metadata --format-version=1 |\
|
||||
jq '.packages[] | select(.name == "iai-callgrind").version' |\
|
||||
tr -d '"'
|
||||
)
|
||||
cargo binstall iai-callgrind-runner --version $version --no-confirm --no-symlinks --force
|
||||
echo "::endgroup::"
|
||||
echo "::group::Verification"
|
||||
which iai-callgrind-runner
|
||||
echo "::endgroup::"
|
||||
- name: Checkout base
|
||||
uses: actions/checkout@v4
|
||||
- name: Install cargo-codspeed
|
||||
uses: taiki-e/install-action@v2
|
||||
with:
|
||||
ref: ${{ github.base_ref || 'main' }}
|
||||
# On `main` branch, generates test vectors and serializes them to disk using `serde-json`.
|
||||
- name: Generate test vectors
|
||||
run: cargo run --bin reth --features dev -- test-vectors tables
|
||||
# Runs iai and stores `main` baseline report for comparison later on $BASELINE.
|
||||
- name: Save baseline
|
||||
run: cargo bench -p reth-db --bench iai --profile profiling --features test-utils -- --save-baseline=$BASELINE
|
||||
- name: Checkout PR
|
||||
uses: actions/checkout@v4
|
||||
tool: cargo-codspeed
|
||||
- name: Build the benchmark target(s)
|
||||
run: ./.github/scripts/codspeed-build.sh
|
||||
- name: Run the benchmarks
|
||||
uses: CodSpeedHQ/action@v3
|
||||
with:
|
||||
clean: false
|
||||
# Runs iai on incoming merge using previously generated test-vectors and compares the report against `main` report.
|
||||
- name: Compare PR benchmarks
|
||||
run: cargo bench -p reth-db --bench iai --profile profiling --features test-utils -- --baseline=$BASELINE
|
||||
run: cargo codspeed run --workspace
|
||||
token: ${{ secrets.CODSPEED_TOKEN }}
|
||||
|
||||
2
.gitignore
vendored
2
.gitignore
vendored
@ -19,7 +19,7 @@ target/
|
||||
testdata/micro/db
|
||||
|
||||
# Generated data for stage benchmarks
|
||||
crates/stages/testdata
|
||||
crates/stages/stages/testdata
|
||||
|
||||
# Prometheus data dir
|
||||
data/
|
||||
|
||||
93
Cargo.lock
generated
93
Cargo.lock
generated
@ -1843,6 +1843,30 @@ version = "0.7.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "afb84c814227b90d6895e01398aee0d8033c00e7466aca416fb6a8e0eb19d8a7"
|
||||
|
||||
[[package]]
|
||||
name = "codspeed"
|
||||
version = "2.7.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "450a0e9df9df1c154156f4344f99d8f6f6e69d0fc4de96ef6e2e68b2ec3bce97"
|
||||
dependencies = [
|
||||
"colored",
|
||||
"libc",
|
||||
"serde_json",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "codspeed-criterion-compat"
|
||||
version = "2.7.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8eb1a6cb9c20e177fde58cdef97c1c7c9264eb1424fe45c4fccedc2fb078a569"
|
||||
dependencies = [
|
||||
"codspeed",
|
||||
"colored",
|
||||
"criterion",
|
||||
"futures",
|
||||
"tokio",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "coins-bip32"
|
||||
version = "0.12.0"
|
||||
@ -1900,6 +1924,16 @@ version = "1.0.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5b63caa9aa9397e2d9480a9b13673856c78d8ac123288526c37d7839f2a86990"
|
||||
|
||||
[[package]]
|
||||
name = "colored"
|
||||
version = "2.1.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "cbf2150cce219b664a8a70df7a1f933836724b503f8a413af9365b4dcc4d90b8"
|
||||
dependencies = [
|
||||
"lazy_static",
|
||||
"windows-sys 0.48.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "combine"
|
||||
version = "4.6.7"
|
||||
@ -3934,42 +3968,6 @@ dependencies = [
|
||||
"tracing",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "iai-callgrind"
|
||||
version = "0.14.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "22275f8051874cd2f05b2aa1e0098d5cbec34df30ff92f1a1e2686a4cefed870"
|
||||
dependencies = [
|
||||
"bincode",
|
||||
"derive_more",
|
||||
"iai-callgrind-macros",
|
||||
"iai-callgrind-runner",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "iai-callgrind-macros"
|
||||
version = "0.5.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e8e6677dc52bd798b988e62ffd6831bf7eb46e4348cb1c74c1164954ebd0e5a1"
|
||||
dependencies = [
|
||||
"derive_more",
|
||||
"proc-macro-error2",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"syn 2.0.90",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "iai-callgrind-runner"
|
||||
version = "0.14.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a02dd95fe4949513b45a328b5b18f527ee02e96f3428b48090aa7cf9043ab0b8"
|
||||
dependencies = [
|
||||
"serde",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "iana-time-zone"
|
||||
version = "0.1.61"
|
||||
@ -6998,10 +6996,9 @@ dependencies = [
|
||||
"arbitrary",
|
||||
"assert_matches",
|
||||
"bytes",
|
||||
"criterion",
|
||||
"codspeed-criterion-compat",
|
||||
"derive_more",
|
||||
"eyre",
|
||||
"iai-callgrind",
|
||||
"metrics",
|
||||
"page_size",
|
||||
"parking_lot",
|
||||
@ -7391,7 +7388,7 @@ dependencies = [
|
||||
"alloy-rlp",
|
||||
"alloy-rpc-types-engine",
|
||||
"assert_matches",
|
||||
"criterion",
|
||||
"codspeed-criterion-compat",
|
||||
"crossbeam-channel",
|
||||
"derive_more",
|
||||
"futures",
|
||||
@ -7912,7 +7909,7 @@ version = "1.1.4"
|
||||
dependencies = [
|
||||
"bitflags 2.6.0",
|
||||
"byteorder",
|
||||
"criterion",
|
||||
"codspeed-criterion-compat",
|
||||
"dashmap 6.1.0",
|
||||
"derive_more",
|
||||
"indexmap 2.6.0",
|
||||
@ -7979,7 +7976,7 @@ dependencies = [
|
||||
"alloy-rlp",
|
||||
"aquamarine",
|
||||
"auto_impl",
|
||||
"criterion",
|
||||
"codspeed-criterion-compat",
|
||||
"derive_more",
|
||||
"discv5",
|
||||
"enr",
|
||||
@ -8762,7 +8759,7 @@ dependencies = [
|
||||
"bincode",
|
||||
"bytes",
|
||||
"c-kzg",
|
||||
"criterion",
|
||||
"codspeed-criterion-compat",
|
||||
"derive_more",
|
||||
"modular-bitfield",
|
||||
"once_cell",
|
||||
@ -9287,7 +9284,7 @@ dependencies = [
|
||||
"alloy-rlp",
|
||||
"assert_matches",
|
||||
"bincode",
|
||||
"criterion",
|
||||
"codspeed-criterion-compat",
|
||||
"futures-util",
|
||||
"itertools 0.13.0",
|
||||
"num-traits",
|
||||
@ -9514,7 +9511,7 @@ dependencies = [
|
||||
"assert_matches",
|
||||
"auto_impl",
|
||||
"bitflags 2.6.0",
|
||||
"criterion",
|
||||
"codspeed-criterion-compat",
|
||||
"futures-util",
|
||||
"metrics",
|
||||
"parking_lot",
|
||||
@ -9560,7 +9557,7 @@ dependencies = [
|
||||
"alloy-rlp",
|
||||
"alloy-trie",
|
||||
"auto_impl",
|
||||
"criterion",
|
||||
"codspeed-criterion-compat",
|
||||
"itertools 0.13.0",
|
||||
"metrics",
|
||||
"proptest",
|
||||
@ -9593,7 +9590,7 @@ dependencies = [
|
||||
"arbitrary",
|
||||
"bincode",
|
||||
"bytes",
|
||||
"criterion",
|
||||
"codspeed-criterion-compat",
|
||||
"derive_more",
|
||||
"hash-db",
|
||||
"itertools 0.13.0",
|
||||
@ -9643,7 +9640,7 @@ version = "1.1.4"
|
||||
dependencies = [
|
||||
"alloy-primitives",
|
||||
"alloy-rlp",
|
||||
"criterion",
|
||||
"codspeed-criterion-compat",
|
||||
"derive_more",
|
||||
"itertools 0.13.0",
|
||||
"metrics",
|
||||
@ -9671,7 +9668,7 @@ dependencies = [
|
||||
"alloy-rlp",
|
||||
"arbitrary",
|
||||
"assert_matches",
|
||||
"criterion",
|
||||
"codspeed-criterion-compat",
|
||||
"itertools 0.13.0",
|
||||
"pretty_assertions",
|
||||
"proptest",
|
||||
|
||||
@ -601,8 +601,7 @@ toml = "0.8"
|
||||
# misc-testing
|
||||
arbitrary = "1.3"
|
||||
assert_matches = "1.5.0"
|
||||
criterion = "0.5"
|
||||
iai-callgrind = "0.14"
|
||||
criterion = { package = "codspeed-criterion-compat", version = "2.7" }
|
||||
pprof = "0.14"
|
||||
proptest = "1.4"
|
||||
proptest-derive = "0.5"
|
||||
|
||||
@ -77,7 +77,8 @@ test-utils = [
|
||||
"reth-db-api/test-utils",
|
||||
"reth-provider/test-utils",
|
||||
"reth-trie-db/test-utils",
|
||||
"reth-trie/test-utils"
|
||||
"reth-trie/test-utils",
|
||||
"reth-trie-parallel/test-utils"
|
||||
]
|
||||
optimism = [
|
||||
"reth-primitives/optimism",
|
||||
|
||||
@ -120,6 +120,8 @@ test-utils = [
|
||||
"reth-static-file",
|
||||
"reth-tracing",
|
||||
"reth-trie/test-utils",
|
||||
"reth-trie-db/test-utils",
|
||||
"reth-trie-sparse/test-utils",
|
||||
"reth-prune-types?/test-utils",
|
||||
"reth-trie-db/test-utils",
|
||||
"reth-trie-parallel/test-utils",
|
||||
]
|
||||
|
||||
@ -7,7 +7,7 @@ use revm_primitives::{
|
||||
Account, AccountInfo, AccountStatus, Address, EvmState, EvmStorage, EvmStorageSlot, HashMap,
|
||||
B256, U256,
|
||||
};
|
||||
use std::thread;
|
||||
use std::{hint::black_box, thread};
|
||||
|
||||
/// Creates a mock state with the specified number of accounts for benchmarking
|
||||
fn create_bench_state(num_accounts: usize) -> EvmState {
|
||||
@ -47,7 +47,7 @@ impl StdStateRootTask {
|
||||
|
||||
fn run(self) {
|
||||
while let Ok(state) = self.rx.recv() {
|
||||
criterion::black_box(state);
|
||||
black_box(state);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -64,7 +64,7 @@ impl CrossbeamStateRootTask {
|
||||
|
||||
fn run(self) {
|
||||
while let Ok(state) = self.rx.recv() {
|
||||
criterion::black_box(state);
|
||||
black_box(state);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -3,7 +3,7 @@
|
||||
|
||||
#![allow(missing_docs)]
|
||||
|
||||
use criterion::{black_box, criterion_group, criterion_main, BenchmarkId, Criterion};
|
||||
use criterion::{criterion_group, criterion_main, BenchmarkId, Criterion};
|
||||
use reth_engine_tree::tree::root::{StateRootConfig, StateRootTask};
|
||||
use reth_evm::system_calls::OnStateHook;
|
||||
use reth_primitives::{Account as RethAccount, StorageEntry};
|
||||
@ -22,6 +22,7 @@ use revm_primitives::{
|
||||
Account as RevmAccount, AccountInfo, AccountStatus, Address, EvmState, EvmStorageSlot, HashMap,
|
||||
B256, KECCAK_EMPTY, U256,
|
||||
};
|
||||
use std::hint::black_box;
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
struct BenchParams {
|
||||
|
||||
@ -975,6 +975,5 @@ mod tests {
|
||||
&mut dest,
|
||||
);
|
||||
}
|
||||
std::hint::black_box(());
|
||||
}
|
||||
}
|
||||
|
||||
@ -69,12 +69,10 @@ fn validate_blob_tx(
|
||||
|
||||
// for now we just use the default SubPoolLimit
|
||||
group.bench_function(group_id, |b| {
|
||||
let kzg_settings = kzg_settings.get();
|
||||
b.iter_with_setup(setup, |(tx, blob_sidecar)| {
|
||||
if let Err(err) =
|
||||
std::hint::black_box(tx.validate_blob(&blob_sidecar, kzg_settings.get()))
|
||||
{
|
||||
println!("Validation failed: {err:?}");
|
||||
}
|
||||
let r = tx.validate_blob(&blob_sidecar, kzg_settings);
|
||||
(r, tx, blob_sidecar)
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
@ -1,11 +1,10 @@
|
||||
#![allow(missing_docs)]
|
||||
use criterion::{criterion_main, measurement::WallTime, BenchmarkGroup, Criterion};
|
||||
#[cfg(not(target_os = "windows"))]
|
||||
use pprof::criterion::{Output, PProfProfiler};
|
||||
use reth_config::config::{EtlConfig, TransactionLookupConfig};
|
||||
use reth_db::{test_utils::TempDatabase, Database, DatabaseEnv};
|
||||
#![allow(unexpected_cfgs)]
|
||||
|
||||
use alloy_primitives::BlockNumber;
|
||||
use criterion::{criterion_main, measurement::WallTime, BenchmarkGroup, Criterion};
|
||||
use reth_config::config::{EtlConfig, TransactionLookupConfig};
|
||||
use reth_db::{test_utils::TempDatabase, Database, DatabaseEnv};
|
||||
use reth_provider::{test_utils::MockNodeTypesWithDB, DatabaseProvider, DatabaseProviderFactory};
|
||||
use reth_stages::{
|
||||
stages::{MerkleStage, SenderRecoveryStage, TransactionLookupStage},
|
||||
@ -22,25 +21,30 @@ use setup::StageRange;
|
||||
// Expanded form of `criterion_group!`
|
||||
//
|
||||
// This is currently needed to only instantiate the tokio runtime once.
|
||||
#[cfg(not(codspeed))]
|
||||
fn benches() {
|
||||
#[cfg(not(target_os = "windows"))]
|
||||
let mut criterion = Criterion::default()
|
||||
.with_profiler(PProfProfiler::new(1000, Output::Flamegraph(None)))
|
||||
.configure_from_args();
|
||||
#[cfg(not(windows))]
|
||||
use pprof::criterion::{Output, PProfProfiler};
|
||||
|
||||
let runtime = Runtime::new().unwrap();
|
||||
let _guard = runtime.enter();
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
let mut criterion = Criterion::default().configure_from_args();
|
||||
|
||||
transaction_lookup(&mut criterion, &runtime);
|
||||
account_hashing(&mut criterion, &runtime);
|
||||
senders(&mut criterion, &runtime);
|
||||
merkle(&mut criterion, &runtime);
|
||||
let criterion = Criterion::default();
|
||||
#[cfg(not(windows))]
|
||||
let criterion = criterion.with_profiler(PProfProfiler::new(1000, Output::Flamegraph(None)));
|
||||
run_benches(&mut criterion.configure_from_args());
|
||||
}
|
||||
|
||||
fn run_benches(criterion: &mut Criterion) {
|
||||
let runtime = Runtime::new().unwrap();
|
||||
let _guard = runtime.enter();
|
||||
transaction_lookup(criterion, &runtime);
|
||||
account_hashing(criterion, &runtime);
|
||||
senders(criterion, &runtime);
|
||||
merkle(criterion, &runtime);
|
||||
}
|
||||
|
||||
#[cfg(not(codspeed))]
|
||||
criterion_main!(benches);
|
||||
#[cfg(codspeed)]
|
||||
criterion_main!(run_benches);
|
||||
|
||||
const DEFAULT_NUM_BLOCKS: u64 = 10_000;
|
||||
|
||||
|
||||
@ -88,6 +88,12 @@ where
|
||||
// Helper for generating testdata for the benchmarks.
|
||||
// Returns the path to the database file.
|
||||
pub(crate) fn txs_testdata(num_blocks: u64) -> TestStageDB {
|
||||
// This is way too slow.
|
||||
#[allow(unexpected_cfgs)]
|
||||
if cfg!(codspeed) {
|
||||
std::process::exit(0);
|
||||
}
|
||||
|
||||
let txs_range = 100..150;
|
||||
|
||||
// number of storage changes per transition
|
||||
|
||||
@ -69,7 +69,6 @@ pprof = { workspace = true, features = [
|
||||
"criterion",
|
||||
] }
|
||||
criterion.workspace = true
|
||||
iai-callgrind.workspace = true
|
||||
|
||||
arbitrary = { workspace = true, features = ["derive"] }
|
||||
proptest.workspace = true
|
||||
@ -125,11 +124,6 @@ name = "criterion"
|
||||
required-features = ["test-utils"]
|
||||
harness = false
|
||||
|
||||
[[bench]]
|
||||
name = "iai"
|
||||
required-features = ["test-utils"]
|
||||
harness = false
|
||||
|
||||
[[bench]]
|
||||
name = "get"
|
||||
required-features = ["test-utils"]
|
||||
|
||||
@ -3,7 +3,7 @@
|
||||
use std::{path::Path, sync::Arc};
|
||||
|
||||
use criterion::{
|
||||
black_box, criterion_group, criterion_main, measurement::WallTime, BenchmarkGroup, Criterion,
|
||||
criterion_group, criterion_main, measurement::WallTime, BenchmarkGroup, Criterion,
|
||||
};
|
||||
use pprof::criterion::{Output, PProfProfiler};
|
||||
use reth_db::{tables::*, test_utils::create_test_rw_db_with_path};
|
||||
@ -71,12 +71,9 @@ where
|
||||
b.iter_with_setup(
|
||||
|| input.clone(),
|
||||
|input| {
|
||||
{
|
||||
for (k, _, _, _) in input {
|
||||
k.encode();
|
||||
}
|
||||
};
|
||||
black_box(());
|
||||
for (k, _, _, _) in input {
|
||||
k.encode();
|
||||
}
|
||||
},
|
||||
)
|
||||
});
|
||||
@ -85,12 +82,9 @@ where
|
||||
b.iter_with_setup(
|
||||
|| input.clone(),
|
||||
|input| {
|
||||
{
|
||||
for (_, k, _, _) in input {
|
||||
let _ = <T as Table>::Key::decode(&k);
|
||||
}
|
||||
};
|
||||
black_box(());
|
||||
for (_, k, _, _) in input {
|
||||
let _ = <T as Table>::Key::decode(&k);
|
||||
}
|
||||
},
|
||||
)
|
||||
});
|
||||
@ -99,12 +93,9 @@ where
|
||||
b.iter_with_setup(
|
||||
|| input.clone(),
|
||||
|input| {
|
||||
{
|
||||
for (_, _, v, _) in input {
|
||||
v.compress();
|
||||
}
|
||||
};
|
||||
black_box(());
|
||||
for (_, _, v, _) in input {
|
||||
v.compress();
|
||||
}
|
||||
},
|
||||
)
|
||||
});
|
||||
@ -113,12 +104,9 @@ where
|
||||
b.iter_with_setup(
|
||||
|| input.clone(),
|
||||
|input| {
|
||||
{
|
||||
for (_, _, _, v) in input {
|
||||
let _ = <T as Table>::Value::decompress(&v);
|
||||
}
|
||||
};
|
||||
black_box(());
|
||||
for (_, _, _, v) in input {
|
||||
let _ = <T as Table>::Value::decompress(&v);
|
||||
}
|
||||
},
|
||||
)
|
||||
});
|
||||
@ -148,14 +136,10 @@ where
|
||||
// Create TX
|
||||
let tx = db.tx_mut().expect("tx");
|
||||
let mut crsr = tx.cursor_write::<T>().expect("cursor");
|
||||
|
||||
black_box({
|
||||
for (k, _, v, _) in input {
|
||||
crsr.append(k, v).expect("submit");
|
||||
}
|
||||
|
||||
tx.inner.commit().unwrap()
|
||||
});
|
||||
for (k, _, v, _) in input {
|
||||
crsr.append(k, v).expect("submit");
|
||||
}
|
||||
tx.inner.commit().unwrap()
|
||||
},
|
||||
)
|
||||
});
|
||||
@ -171,15 +155,12 @@ where
|
||||
// Create TX
|
||||
let tx = db.tx_mut().expect("tx");
|
||||
let mut crsr = tx.cursor_write::<T>().expect("cursor");
|
||||
for index in RANDOM_INDEXES {
|
||||
let (k, _, v, _) = input.get(index).unwrap().clone();
|
||||
crsr.insert(k, v).expect("submit");
|
||||
}
|
||||
|
||||
black_box({
|
||||
for index in RANDOM_INDEXES {
|
||||
let (k, _, v, _) = input.get(index).unwrap().clone();
|
||||
crsr.insert(k, v).expect("submit");
|
||||
}
|
||||
|
||||
tx.inner.commit().unwrap()
|
||||
});
|
||||
tx.inner.commit().unwrap()
|
||||
},
|
||||
)
|
||||
});
|
||||
@ -190,15 +171,11 @@ where
|
||||
b.iter(|| {
|
||||
// Create TX
|
||||
let tx = db.tx().expect("tx");
|
||||
|
||||
{
|
||||
let mut cursor = tx.cursor_read::<T>().expect("cursor");
|
||||
let walker = cursor.walk(Some(input.first().unwrap().0.clone())).unwrap();
|
||||
for element in walker {
|
||||
element.unwrap();
|
||||
}
|
||||
};
|
||||
black_box(());
|
||||
let mut cursor = tx.cursor_read::<T>().expect("cursor");
|
||||
let walker = cursor.walk(Some(input.first().unwrap().0.clone())).unwrap();
|
||||
for element in walker {
|
||||
element.unwrap();
|
||||
}
|
||||
})
|
||||
});
|
||||
|
||||
@ -208,14 +185,10 @@ where
|
||||
b.iter(|| {
|
||||
// Create TX
|
||||
let tx = db.tx().expect("tx");
|
||||
|
||||
{
|
||||
for index in RANDOM_INDEXES {
|
||||
let mut cursor = tx.cursor_read::<T>().expect("cursor");
|
||||
cursor.seek_exact(input.get(index).unwrap().0.clone()).unwrap();
|
||||
}
|
||||
};
|
||||
black_box(());
|
||||
for index in RANDOM_INDEXES {
|
||||
let mut cursor = tx.cursor_read::<T>().expect("cursor");
|
||||
cursor.seek_exact(input.get(index).unwrap().0.clone()).unwrap();
|
||||
}
|
||||
})
|
||||
});
|
||||
}
|
||||
@ -245,14 +218,10 @@ where
|
||||
// Create TX
|
||||
let tx = db.tx_mut().expect("tx");
|
||||
let mut crsr = tx.cursor_dup_write::<T>().expect("cursor");
|
||||
|
||||
black_box({
|
||||
for (k, _, v, _) in input {
|
||||
crsr.append_dup(k, v).expect("submit");
|
||||
}
|
||||
|
||||
tx.inner.commit().unwrap()
|
||||
});
|
||||
for (k, _, v, _) in input {
|
||||
crsr.append_dup(k, v).expect("submit");
|
||||
}
|
||||
tx.inner.commit().unwrap()
|
||||
},
|
||||
)
|
||||
});
|
||||
@ -268,12 +237,10 @@ where
|
||||
|(input, db)| {
|
||||
// Create TX
|
||||
let tx = db.tx_mut().expect("tx");
|
||||
|
||||
for index in RANDOM_INDEXES {
|
||||
let (k, _, v, _) = input.get(index).unwrap().clone();
|
||||
tx.put::<T>(k, v).unwrap();
|
||||
}
|
||||
|
||||
tx.inner.commit().unwrap();
|
||||
},
|
||||
)
|
||||
@ -286,14 +253,11 @@ where
|
||||
// Create TX
|
||||
let tx = db.tx().expect("tx");
|
||||
|
||||
{
|
||||
let mut cursor = tx.cursor_dup_read::<T>().expect("cursor");
|
||||
let walker = cursor.walk_dup(None, Some(T::SubKey::default())).unwrap();
|
||||
for element in walker {
|
||||
element.unwrap();
|
||||
}
|
||||
};
|
||||
black_box(());
|
||||
let mut cursor = tx.cursor_dup_read::<T>().expect("cursor");
|
||||
let walker = cursor.walk_dup(None, Some(T::SubKey::default())).unwrap();
|
||||
for element in walker {
|
||||
element.unwrap();
|
||||
}
|
||||
})
|
||||
});
|
||||
|
||||
|
||||
@ -3,7 +3,7 @@
|
||||
use std::{collections::HashSet, path::Path, sync::Arc};
|
||||
|
||||
use criterion::{
|
||||
black_box, criterion_group, criterion_main, measurement::WallTime, BenchmarkGroup, Criterion,
|
||||
criterion_group, criterion_main, measurement::WallTime, BenchmarkGroup, Criterion,
|
||||
};
|
||||
use pprof::criterion::{Output, PProfProfiler};
|
||||
use proptest::{
|
||||
@ -20,6 +20,7 @@ use reth_db_api::{
|
||||
transaction::DbTxMut,
|
||||
};
|
||||
use reth_fs_util as fs;
|
||||
use std::hint::black_box;
|
||||
|
||||
mod utils;
|
||||
use utils::*;
|
||||
@ -46,6 +47,12 @@ pub fn hash_keys(c: &mut Criterion) {
|
||||
group.sample_size(10);
|
||||
|
||||
for size in [10_000, 100_000, 1_000_000] {
|
||||
// Too slow.
|
||||
#[allow(unexpected_cfgs)]
|
||||
if cfg!(codspeed) && size > 10_000 {
|
||||
continue;
|
||||
}
|
||||
|
||||
measure_table_insertion::<TransactionHashNumbers>(&mut group, size);
|
||||
}
|
||||
}
|
||||
|
||||
@ -1,102 +0,0 @@
|
||||
#![allow(missing_docs, non_snake_case, unreachable_pub)]
|
||||
|
||||
use iai_callgrind::{
|
||||
library_benchmark, library_benchmark_group, LibraryBenchmarkConfig, RegressionConfig,
|
||||
};
|
||||
use paste::paste;
|
||||
use reth_db_api::table::{Compress, Decode, Decompress, Encode, Table};
|
||||
|
||||
mod utils;
|
||||
use utils::*;
|
||||
|
||||
macro_rules! impl_iai_callgrind_inner {
|
||||
(
|
||||
$(($name:ident, $group_name:ident, $mod:ident, $compress:ident, $decompress:ident, $encode:ident, $decode:ident, $seqread:ident, $randread:ident, $seqwrite:ident, $randwrite:ident))+
|
||||
) => {
|
||||
use std::hint::black_box;
|
||||
$(
|
||||
#[library_benchmark]
|
||||
pub fn $compress() {
|
||||
for (_, _, v, _) in black_box(load_vectors::<reth_db::tables::$name>()) {
|
||||
black_box(v.compress());
|
||||
}
|
||||
}
|
||||
|
||||
#[library_benchmark]
|
||||
pub fn $decompress() {
|
||||
for (_, _, _, comp) in black_box(load_vectors::<reth_db::tables::$name>()) {
|
||||
let _ = black_box(<reth_db::tables::$name as Table>::Value::decompress(&comp));
|
||||
}
|
||||
}
|
||||
|
||||
#[library_benchmark]
|
||||
pub fn $encode() {
|
||||
for (k, _, _, _) in black_box(load_vectors::<reth_db::tables::$name>()) {
|
||||
black_box(k.encode());
|
||||
}
|
||||
}
|
||||
|
||||
#[library_benchmark]
|
||||
pub fn $decode() {
|
||||
for (_, enc, _, _) in black_box(load_vectors::<reth_db::tables::$name>()) {
|
||||
let _ = black_box(<reth_db::tables::$name as Table>::Key::decode(&enc));
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
pub const fn $seqread() {}
|
||||
|
||||
#[allow(dead_code)]
|
||||
pub const fn $randread() {}
|
||||
|
||||
#[allow(dead_code)]
|
||||
pub const fn $seqwrite() {}
|
||||
|
||||
#[allow(dead_code)]
|
||||
pub const fn $randwrite() {}
|
||||
|
||||
|
||||
library_benchmark_group!(
|
||||
name = $group_name;
|
||||
config = LibraryBenchmarkConfig::default()
|
||||
.regression(
|
||||
RegressionConfig::default().fail_fast(false)
|
||||
);
|
||||
benchmarks =
|
||||
$compress,
|
||||
$decompress,
|
||||
$encode,
|
||||
$decode,
|
||||
);
|
||||
)+
|
||||
|
||||
iai_callgrind::main!(
|
||||
config = LibraryBenchmarkConfig::default();
|
||||
library_benchmark_groups = $($group_name),+);
|
||||
};
|
||||
}
|
||||
|
||||
macro_rules! impl_iai_callgrind {
|
||||
($($name:ident),+) => {
|
||||
paste! {
|
||||
impl_iai_callgrind_inner!(
|
||||
$(
|
||||
( $name, [<$name _group>],[<$name _mod>], [<$name _ValueCompress>], [<$name _ValueDecompress>], [<$name _ValueEncode>], [<$name _ValueDecode>], [<$name _SeqRead>], [<$name _RandomRead>], [<$name _SeqWrite>], [<$name _RandomWrite>])
|
||||
)+
|
||||
);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
impl_iai_callgrind!(
|
||||
CanonicalHeaders,
|
||||
HeaderTerminalDifficulties,
|
||||
HeaderNumbers,
|
||||
Headers,
|
||||
BlockBodyIndices,
|
||||
BlockOmmers,
|
||||
TransactionHashNumbers,
|
||||
Transactions,
|
||||
PlainStorageState,
|
||||
PlainAccountState
|
||||
);
|
||||
@ -60,15 +60,15 @@ pub mod test_utils {
|
||||
use tempfile::TempDir;
|
||||
|
||||
/// Error during database open
|
||||
pub const ERROR_DB_OPEN: &str = "Not able to open the database file.";
|
||||
pub const ERROR_DB_OPEN: &str = "could not open the database file";
|
||||
/// Error during database creation
|
||||
pub const ERROR_DB_CREATION: &str = "Not able to create the database file.";
|
||||
pub const ERROR_DB_CREATION: &str = "could not create the database file";
|
||||
/// Error during database creation
|
||||
pub const ERROR_STATIC_FILES_CREATION: &str = "Not able to create the static file path.";
|
||||
pub const ERROR_STATIC_FILES_CREATION: &str = "could not create the static file path";
|
||||
/// Error during table creation
|
||||
pub const ERROR_TABLE_CREATION: &str = "Not able to create tables in the database.";
|
||||
pub const ERROR_TABLE_CREATION: &str = "could not create tables in the database";
|
||||
/// Error during tempdir creation
|
||||
pub const ERROR_TEMPDIR: &str = "Not able to create a temporary directory.";
|
||||
pub const ERROR_TEMPDIR: &str = "could not create a temporary directory";
|
||||
|
||||
/// A database will delete the db dir when dropped.
|
||||
pub struct TempDatabase<DB> {
|
||||
|
||||
@ -1,10 +1,10 @@
|
||||
#![allow(missing_docs)]
|
||||
mod utils;
|
||||
|
||||
use criterion::{black_box, criterion_group, criterion_main, Criterion};
|
||||
use criterion::{criterion_group, criterion_main, Criterion};
|
||||
use pprof::criterion::{Output, PProfProfiler};
|
||||
use reth_libmdbx::{ffi::*, *};
|
||||
use std::ptr;
|
||||
use std::{hint::black_box, ptr};
|
||||
use utils::*;
|
||||
|
||||
/// Benchmark of iterator sequential read performance.
|
||||
|
||||
@ -1,11 +1,11 @@
|
||||
#![allow(missing_docs, unreachable_pub)]
|
||||
mod utils;
|
||||
|
||||
use criterion::{black_box, criterion_group, criterion_main, Criterion};
|
||||
use criterion::{criterion_group, criterion_main, Criterion};
|
||||
use rand::{prelude::SliceRandom, SeedableRng};
|
||||
use rand_xorshift::XorShiftRng;
|
||||
use reth_libmdbx::{ffi::*, ObjectLength, WriteFlags};
|
||||
use std::ptr;
|
||||
use std::{hint::black_box, ptr};
|
||||
use utils::*;
|
||||
|
||||
fn bench_get_rand(c: &mut Criterion) {
|
||||
|
||||
@ -76,45 +76,45 @@ serde_json.workspace = true
|
||||
[features]
|
||||
default = ["serde"]
|
||||
serde = [
|
||||
"dep:serde",
|
||||
"reth-execution-types/serde",
|
||||
"reth-eth-wire-types/serde",
|
||||
"reth-provider/serde",
|
||||
"alloy-consensus/serde",
|
||||
"alloy-eips/serde",
|
||||
"alloy-primitives/serde",
|
||||
"bitflags/serde",
|
||||
"parking_lot/serde",
|
||||
"rand?/serde",
|
||||
"smallvec/serde",
|
||||
"reth-primitives-traits/serde",
|
||||
"revm-interpreter/serde",
|
||||
"revm-primitives/serde"
|
||||
"dep:serde",
|
||||
"reth-execution-types/serde",
|
||||
"reth-eth-wire-types/serde",
|
||||
"reth-provider/serde",
|
||||
"alloy-consensus/serde",
|
||||
"alloy-eips/serde",
|
||||
"alloy-primitives/serde",
|
||||
"bitflags/serde",
|
||||
"parking_lot/serde",
|
||||
"rand?/serde",
|
||||
"smallvec/serde",
|
||||
"reth-primitives-traits/serde",
|
||||
"revm-interpreter/serde",
|
||||
"revm-primitives/serde",
|
||||
]
|
||||
test-utils = [
|
||||
"rand",
|
||||
"paste",
|
||||
"serde",
|
||||
"reth-chain-state/test-utils",
|
||||
"reth-chainspec/test-utils",
|
||||
"reth-primitives/test-utils",
|
||||
"reth-provider/test-utils",
|
||||
"reth-primitives-traits/test-utils",
|
||||
"rand",
|
||||
"paste",
|
||||
"serde",
|
||||
"reth-chain-state/test-utils",
|
||||
"reth-chainspec/test-utils",
|
||||
"reth-primitives/test-utils",
|
||||
"reth-provider/test-utils",
|
||||
"reth-primitives-traits/test-utils",
|
||||
]
|
||||
arbitrary = [
|
||||
"proptest",
|
||||
"reth-primitives/arbitrary",
|
||||
"proptest-arbitrary-interop",
|
||||
"reth-chainspec/arbitrary",
|
||||
"reth-eth-wire-types/arbitrary",
|
||||
"alloy-consensus/arbitrary",
|
||||
"alloy-eips/arbitrary",
|
||||
"alloy-primitives/arbitrary",
|
||||
"bitflags/arbitrary",
|
||||
"reth-primitives-traits/arbitrary",
|
||||
"smallvec/arbitrary",
|
||||
"revm-interpreter/arbitrary",
|
||||
"revm-primitives/arbitrary"
|
||||
"proptest",
|
||||
"reth-primitives/arbitrary",
|
||||
"proptest-arbitrary-interop",
|
||||
"reth-chainspec/arbitrary",
|
||||
"reth-eth-wire-types/arbitrary",
|
||||
"alloy-consensus/arbitrary",
|
||||
"alloy-eips/arbitrary",
|
||||
"alloy-primitives/arbitrary",
|
||||
"bitflags/arbitrary",
|
||||
"reth-primitives-traits/arbitrary",
|
||||
"smallvec/arbitrary",
|
||||
"revm-interpreter/arbitrary",
|
||||
"revm-primitives/arbitrary",
|
||||
]
|
||||
|
||||
[[bench]]
|
||||
|
||||
@ -1,9 +1,10 @@
|
||||
#![allow(missing_docs)]
|
||||
use criterion::{
|
||||
black_box, criterion_group, criterion_main, measurement::WallTime, BenchmarkGroup, Criterion,
|
||||
criterion_group, criterion_main, measurement::WallTime, BenchmarkGroup, Criterion,
|
||||
};
|
||||
use proptest::{prelude::*, strategy::ValueTree, test_runner::TestRunner};
|
||||
use reth_transaction_pool::{blob_tx_priority, fee_delta};
|
||||
use std::hint::black_box;
|
||||
|
||||
fn generate_test_data_fee_delta() -> (u128, u128) {
|
||||
let config = ProptestConfig::default();
|
||||
|
||||
@ -75,19 +75,17 @@ fn txpool_reordering_bench<T: BenchTxPool>(
|
||||
);
|
||||
group.bench_function(group_id, |b| {
|
||||
b.iter_with_setup(setup, |(mut txpool, new_txs)| {
|
||||
{
|
||||
// Reorder with new base fee
|
||||
let bigger_base_fee = base_fee.saturating_add(10);
|
||||
txpool.reorder(bigger_base_fee);
|
||||
// Reorder with new base fee
|
||||
let bigger_base_fee = base_fee.saturating_add(10);
|
||||
txpool.reorder(bigger_base_fee);
|
||||
|
||||
// Reorder with new base fee after adding transactions.
|
||||
for new_tx in new_txs {
|
||||
txpool.add_transaction(new_tx);
|
||||
}
|
||||
let smaller_base_fee = base_fee.saturating_sub(10);
|
||||
txpool.reorder(smaller_base_fee)
|
||||
};
|
||||
std::hint::black_box(());
|
||||
// Reorder with new base fee after adding transactions.
|
||||
for new_tx in new_txs {
|
||||
txpool.add_transaction(new_tx);
|
||||
}
|
||||
let smaller_base_fee = base_fee.saturating_sub(10);
|
||||
txpool.reorder(smaller_base_fee);
|
||||
txpool
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
@ -161,7 +161,7 @@ fn truncate_pending(
|
||||
group.bench_function(group_id, |b| {
|
||||
b.iter_with_setup(setup, |mut txpool| {
|
||||
txpool.truncate_pool(SubPoolLimit::default());
|
||||
std::hint::black_box(());
|
||||
txpool
|
||||
});
|
||||
});
|
||||
}
|
||||
@ -195,7 +195,7 @@ fn truncate_queued(
|
||||
group.bench_function(group_id, |b| {
|
||||
b.iter_with_setup(setup, |mut txpool| {
|
||||
txpool.truncate_pool(SubPoolLimit::default());
|
||||
std::hint::black_box(());
|
||||
txpool
|
||||
});
|
||||
});
|
||||
}
|
||||
@ -229,7 +229,7 @@ fn truncate_basefee(
|
||||
group.bench_function(group_id, |b| {
|
||||
b.iter_with_setup(setup, |mut txpool| {
|
||||
txpool.truncate_pool(SubPoolLimit::default());
|
||||
std::hint::black_box(());
|
||||
txpool
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
@ -1,6 +1,6 @@
|
||||
#![allow(missing_docs, unreachable_pub)]
|
||||
use criterion::{
|
||||
black_box, criterion_group, criterion_main, measurement::WallTime, BenchmarkGroup, Criterion,
|
||||
criterion_group, criterion_main, measurement::WallTime, BenchmarkGroup, Criterion,
|
||||
};
|
||||
use proptest::{
|
||||
prelude::*,
|
||||
@ -11,7 +11,7 @@ use reth_trie_common::{
|
||||
prefix_set::{PrefixSet, PrefixSetMut},
|
||||
Nibbles,
|
||||
};
|
||||
use std::collections::BTreeSet;
|
||||
use std::{collections::BTreeSet, hint::black_box};
|
||||
|
||||
/// Abstraction for aggregating nibbles and freezing it to a type
|
||||
/// that can be later used for benching.
|
||||
@ -48,6 +48,12 @@ pub fn prefix_set_lookups(c: &mut Criterion) {
|
||||
let mut group = c.benchmark_group("Prefix Set Lookups");
|
||||
|
||||
for size in [10, 100, 1_000, 10_000] {
|
||||
// Too slow.
|
||||
#[allow(unexpected_cfgs)]
|
||||
if cfg!(codspeed) && size > 1_000 {
|
||||
continue;
|
||||
}
|
||||
|
||||
let test_data = generate_test_data(size);
|
||||
|
||||
use implementations::*;
|
||||
|
||||
@ -54,6 +54,14 @@ proptest-arbitrary-interop.workspace = true
|
||||
[features]
|
||||
default = ["metrics"]
|
||||
metrics = ["reth-metrics", "dep:metrics", "reth-trie/metrics"]
|
||||
test-utils = [
|
||||
"reth-trie/test-utils",
|
||||
"reth-trie-common/test-utils",
|
||||
"reth-db/test-utils",
|
||||
"reth-primitives/test-utils",
|
||||
"reth-provider/test-utils",
|
||||
"reth-trie-db/test-utils",
|
||||
]
|
||||
|
||||
[[bench]]
|
||||
name = "root"
|
||||
|
||||
@ -20,6 +20,12 @@ pub fn calculate_state_root(c: &mut Criterion) {
|
||||
group.sample_size(20);
|
||||
|
||||
for size in [1_000, 3_000, 5_000, 10_000] {
|
||||
// Too slow.
|
||||
#[allow(unexpected_cfgs)]
|
||||
if cfg!(codspeed) && size > 3_000 {
|
||||
continue;
|
||||
}
|
||||
|
||||
let (db_state, updated_state) = generate_test_data(size);
|
||||
let provider_factory = create_test_provider_factory();
|
||||
{
|
||||
|
||||
@ -41,6 +41,19 @@ proptest-arbitrary-interop.workspace = true
|
||||
proptest.workspace = true
|
||||
rand.workspace = true
|
||||
|
||||
[features]
|
||||
test-utils = [
|
||||
"reth-primitives-traits/test-utils",
|
||||
"reth-trie/test-utils",
|
||||
"reth-trie-common/test-utils",
|
||||
]
|
||||
arbitrary = [
|
||||
"reth-primitives-traits/arbitrary",
|
||||
"reth-trie-common/arbitrary",
|
||||
"alloy-primitives/arbitrary",
|
||||
"smallvec/arbitrary",
|
||||
]
|
||||
|
||||
[[bench]]
|
||||
name = "root"
|
||||
harness = false
|
||||
|
||||
@ -1,6 +1,4 @@
|
||||
#![allow(missing_docs, unreachable_pub)]
|
||||
|
||||
use std::time::{Duration, Instant};
|
||||
#![allow(missing_docs)]
|
||||
|
||||
use alloy_primitives::{B256, U256};
|
||||
use criterion::{criterion_group, criterion_main, Criterion};
|
||||
@ -11,7 +9,7 @@ use reth_testing_utils::generators;
|
||||
use reth_trie::Nibbles;
|
||||
use reth_trie_sparse::RevealedSparseTrie;
|
||||
|
||||
pub fn update_rlp_node_level(c: &mut Criterion) {
|
||||
fn update_rlp_node_level(c: &mut Criterion) {
|
||||
let mut rng = generators::rng();
|
||||
|
||||
let mut group = c.benchmark_group("update rlp node level");
|
||||
@ -53,20 +51,11 @@ pub fn update_rlp_node_level(c: &mut Criterion) {
|
||||
group.bench_function(
|
||||
format!("size {size} | updated {updated_leaves}% | depth {depth}"),
|
||||
|b| {
|
||||
// Use `iter_custom` to avoid measuring clones and drops
|
||||
b.iter_custom(|iters| {
|
||||
let mut elapsed = Duration::ZERO;
|
||||
|
||||
let mut cloned = sparse.clone();
|
||||
for _ in 0..iters {
|
||||
let start = Instant::now();
|
||||
cloned.update_rlp_node_level(depth);
|
||||
elapsed += start.elapsed();
|
||||
cloned = sparse.clone();
|
||||
}
|
||||
|
||||
elapsed
|
||||
})
|
||||
b.iter_batched_ref(
|
||||
|| sparse.clone(),
|
||||
|cloned| cloned.update_rlp_node_level(depth),
|
||||
criterion::BatchSize::PerIteration,
|
||||
)
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
#![allow(missing_docs, unreachable_pub)]
|
||||
#![allow(missing_docs)]
|
||||
|
||||
use alloy_primitives::{map::B256HashMap, B256, U256};
|
||||
use criterion::{criterion_group, criterion_main, BenchmarkId, Criterion};
|
||||
@ -15,11 +15,17 @@ use reth_trie::{
|
||||
use reth_trie_common::{HashBuilder, Nibbles};
|
||||
use reth_trie_sparse::SparseTrie;
|
||||
|
||||
pub fn calculate_root_from_leaves(c: &mut Criterion) {
|
||||
fn calculate_root_from_leaves(c: &mut Criterion) {
|
||||
let mut group = c.benchmark_group("calculate root from leaves");
|
||||
group.sample_size(20);
|
||||
|
||||
for size in [1_000, 5_000, 10_000, 100_000] {
|
||||
// Too slow.
|
||||
#[allow(unexpected_cfgs)]
|
||||
if cfg!(codspeed) && size > 5_000 {
|
||||
continue;
|
||||
}
|
||||
|
||||
let state = generate_test_data(size);
|
||||
|
||||
// hash builder
|
||||
@ -29,6 +35,7 @@ pub fn calculate_root_from_leaves(c: &mut Criterion) {
|
||||
hb.add_leaf(Nibbles::unpack(key), &alloy_rlp::encode_fixed_size(value));
|
||||
}
|
||||
hb.root();
|
||||
hb
|
||||
})
|
||||
});
|
||||
|
||||
@ -44,19 +51,32 @@ pub fn calculate_root_from_leaves(c: &mut Criterion) {
|
||||
.unwrap();
|
||||
}
|
||||
sparse.root().unwrap();
|
||||
sparse
|
||||
})
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
pub fn calculate_root_from_leaves_repeated(c: &mut Criterion) {
|
||||
fn calculate_root_from_leaves_repeated(c: &mut Criterion) {
|
||||
let mut group = c.benchmark_group("calculate root from leaves repeated");
|
||||
group.sample_size(20);
|
||||
|
||||
for init_size in [1_000, 10_000, 100_000] {
|
||||
// Too slow.
|
||||
#[allow(unexpected_cfgs)]
|
||||
if cfg!(codspeed) && init_size > 10_000 {
|
||||
continue;
|
||||
}
|
||||
|
||||
let init_state = generate_test_data(init_size);
|
||||
|
||||
for update_size in [100, 1_000, 5_000, 10_000] {
|
||||
// Too slow.
|
||||
#[allow(unexpected_cfgs)]
|
||||
if cfg!(codspeed) && update_size > 1_000 {
|
||||
continue;
|
||||
}
|
||||
|
||||
for num_updates in [1, 3, 5, 10] {
|
||||
let updates =
|
||||
(0..num_updates).map(|_| generate_test_data(update_size)).collect::<Vec<_>>();
|
||||
|
||||
@ -61,19 +61,20 @@ criterion.workspace = true
|
||||
[features]
|
||||
metrics = ["reth-metrics", "dep:metrics"]
|
||||
serde = [
|
||||
"alloy-primitives/serde",
|
||||
"alloy-consensus/serde",
|
||||
"alloy-trie/serde",
|
||||
"alloy-primitives/serde",
|
||||
"alloy-consensus/serde",
|
||||
"alloy-trie/serde",
|
||||
"alloy-eips/serde",
|
||||
"revm/serde",
|
||||
"reth-trie-common/serde"
|
||||
"revm/serde",
|
||||
"reth-trie-common/serde",
|
||||
]
|
||||
test-utils = [
|
||||
"triehash",
|
||||
"revm/test-utils",
|
||||
"reth-primitives/test-utils",
|
||||
"reth-trie-common/test-utils",
|
||||
"reth-stages-types/test-utils"
|
||||
"triehash",
|
||||
"revm/test-utils",
|
||||
"reth-primitives/test-utils",
|
||||
"reth-trie-common/test-utils",
|
||||
"reth-trie-sparse/test-utils",
|
||||
"reth-stages-types/test-utils",
|
||||
]
|
||||
|
||||
[[bench]]
|
||||
|
||||
@ -10,6 +10,12 @@ pub fn hash_post_state(c: &mut Criterion) {
|
||||
group.sample_size(20);
|
||||
|
||||
for size in [100, 1_000, 3_000, 5_000, 10_000] {
|
||||
// Too slow.
|
||||
#[allow(unexpected_cfgs)]
|
||||
if cfg!(codspeed) && size > 1_000 {
|
||||
continue;
|
||||
}
|
||||
|
||||
let state = generate_test_data(size);
|
||||
|
||||
// sequence
|
||||
|
||||
@ -1,10 +1,11 @@
|
||||
#![allow(missing_docs, unreachable_pub)]
|
||||
use alloy_primitives::B256;
|
||||
use criterion::{black_box, criterion_group, criterion_main, Criterion};
|
||||
use criterion::{criterion_group, criterion_main, Criterion};
|
||||
use proptest::{prelude::*, strategy::ValueTree, test_runner::TestRunner};
|
||||
use proptest_arbitrary_interop::arb;
|
||||
use reth_primitives::{Receipt, ReceiptWithBloom};
|
||||
use reth_trie::triehash::KeccakHasher;
|
||||
use std::hint::black_box;
|
||||
|
||||
/// Benchmarks different implementations of the root calculation.
|
||||
pub fn trie_root_benchmark(c: &mut Criterion) {
|
||||
|
||||
@ -9,7 +9,7 @@
|
||||
|
||||
- We want Reth's serialized format to be able to trade off read/write speed for size, depending on who the user is.
|
||||
- To achieve that, we created the [Encode/Decode/Compress/Decompress traits](https://github.com/paradigmxyz/reth/blob/0d9b9a392d4196793736522f3fc2ac804991b45d/crates/interfaces/src/db/table.rs#L9-L36) to make the (de)serialization of database `Table::Key` and `Table::Values` generic.
|
||||
- This allows for [out-of-the-box benchmarking](https://github.com/paradigmxyz/reth/blob/0d9b9a392d4196793736522f3fc2ac804991b45d/crates/db/benches/encoding_iai.rs#L5) (using [Criterion](https://github.com/bheisler/criterion.rs) and [Iai](https://github.com/bheisler/iai))
|
||||
- This allows for [out-of-the-box benchmarking](https://github.com/paradigmxyz/reth/blob/0d9b9a392d4196793736522f3fc2ac804991b45d/crates/db/benches/encoding_iai.rs#L5) (using [Criterion](https://github.com/bheisler/criterion.rs))
|
||||
- It also enables [out-of-the-box fuzzing](https://github.com/paradigmxyz/reth/blob/0d9b9a392d4196793736522f3fc2ac804991b45d/crates/interfaces/src/db/codecs/fuzz/mod.rs) using [trailofbits/test-fuzz](https://github.com/trailofbits/test-fuzz).
|
||||
- We implemented that trait for the following encoding formats:
|
||||
- [Ethereum-specific Compact Encoding](https://github.com/paradigmxyz/reth/blob/0d9b9a392d4196793736522f3fc2ac804991b45d/crates/codecs/derive/src/compact/mod.rs): A lot of Ethereum datatypes have unnecessary zeros when serialized, or optional (e.g. on empty hashes) which would be nice not to pay in storage costs.
|
||||
|
||||
Reference in New Issue
Block a user