mirror of
https://github.com/hl-archive-node/nanoreth.git
synced 2025-12-06 10:59:55 +00:00
feat: set up codspeed (#13372)
This commit is contained in:
@ -69,7 +69,6 @@ pprof = { workspace = true, features = [
|
||||
"criterion",
|
||||
] }
|
||||
criterion.workspace = true
|
||||
iai-callgrind.workspace = true
|
||||
|
||||
arbitrary = { workspace = true, features = ["derive"] }
|
||||
proptest.workspace = true
|
||||
@ -125,11 +124,6 @@ name = "criterion"
|
||||
required-features = ["test-utils"]
|
||||
harness = false
|
||||
|
||||
[[bench]]
|
||||
name = "iai"
|
||||
required-features = ["test-utils"]
|
||||
harness = false
|
||||
|
||||
[[bench]]
|
||||
name = "get"
|
||||
required-features = ["test-utils"]
|
||||
|
||||
@ -3,7 +3,7 @@
|
||||
use std::{path::Path, sync::Arc};
|
||||
|
||||
use criterion::{
|
||||
black_box, criterion_group, criterion_main, measurement::WallTime, BenchmarkGroup, Criterion,
|
||||
criterion_group, criterion_main, measurement::WallTime, BenchmarkGroup, Criterion,
|
||||
};
|
||||
use pprof::criterion::{Output, PProfProfiler};
|
||||
use reth_db::{tables::*, test_utils::create_test_rw_db_with_path};
|
||||
@ -71,12 +71,9 @@ where
|
||||
b.iter_with_setup(
|
||||
|| input.clone(),
|
||||
|input| {
|
||||
{
|
||||
for (k, _, _, _) in input {
|
||||
k.encode();
|
||||
}
|
||||
};
|
||||
black_box(());
|
||||
for (k, _, _, _) in input {
|
||||
k.encode();
|
||||
}
|
||||
},
|
||||
)
|
||||
});
|
||||
@ -85,12 +82,9 @@ where
|
||||
b.iter_with_setup(
|
||||
|| input.clone(),
|
||||
|input| {
|
||||
{
|
||||
for (_, k, _, _) in input {
|
||||
let _ = <T as Table>::Key::decode(&k);
|
||||
}
|
||||
};
|
||||
black_box(());
|
||||
for (_, k, _, _) in input {
|
||||
let _ = <T as Table>::Key::decode(&k);
|
||||
}
|
||||
},
|
||||
)
|
||||
});
|
||||
@ -99,12 +93,9 @@ where
|
||||
b.iter_with_setup(
|
||||
|| input.clone(),
|
||||
|input| {
|
||||
{
|
||||
for (_, _, v, _) in input {
|
||||
v.compress();
|
||||
}
|
||||
};
|
||||
black_box(());
|
||||
for (_, _, v, _) in input {
|
||||
v.compress();
|
||||
}
|
||||
},
|
||||
)
|
||||
});
|
||||
@ -113,12 +104,9 @@ where
|
||||
b.iter_with_setup(
|
||||
|| input.clone(),
|
||||
|input| {
|
||||
{
|
||||
for (_, _, _, v) in input {
|
||||
let _ = <T as Table>::Value::decompress(&v);
|
||||
}
|
||||
};
|
||||
black_box(());
|
||||
for (_, _, _, v) in input {
|
||||
let _ = <T as Table>::Value::decompress(&v);
|
||||
}
|
||||
},
|
||||
)
|
||||
});
|
||||
@ -148,14 +136,10 @@ where
|
||||
// Create TX
|
||||
let tx = db.tx_mut().expect("tx");
|
||||
let mut crsr = tx.cursor_write::<T>().expect("cursor");
|
||||
|
||||
black_box({
|
||||
for (k, _, v, _) in input {
|
||||
crsr.append(k, v).expect("submit");
|
||||
}
|
||||
|
||||
tx.inner.commit().unwrap()
|
||||
});
|
||||
for (k, _, v, _) in input {
|
||||
crsr.append(k, v).expect("submit");
|
||||
}
|
||||
tx.inner.commit().unwrap()
|
||||
},
|
||||
)
|
||||
});
|
||||
@ -171,15 +155,12 @@ where
|
||||
// Create TX
|
||||
let tx = db.tx_mut().expect("tx");
|
||||
let mut crsr = tx.cursor_write::<T>().expect("cursor");
|
||||
for index in RANDOM_INDEXES {
|
||||
let (k, _, v, _) = input.get(index).unwrap().clone();
|
||||
crsr.insert(k, v).expect("submit");
|
||||
}
|
||||
|
||||
black_box({
|
||||
for index in RANDOM_INDEXES {
|
||||
let (k, _, v, _) = input.get(index).unwrap().clone();
|
||||
crsr.insert(k, v).expect("submit");
|
||||
}
|
||||
|
||||
tx.inner.commit().unwrap()
|
||||
});
|
||||
tx.inner.commit().unwrap()
|
||||
},
|
||||
)
|
||||
});
|
||||
@ -190,15 +171,11 @@ where
|
||||
b.iter(|| {
|
||||
// Create TX
|
||||
let tx = db.tx().expect("tx");
|
||||
|
||||
{
|
||||
let mut cursor = tx.cursor_read::<T>().expect("cursor");
|
||||
let walker = cursor.walk(Some(input.first().unwrap().0.clone())).unwrap();
|
||||
for element in walker {
|
||||
element.unwrap();
|
||||
}
|
||||
};
|
||||
black_box(());
|
||||
let mut cursor = tx.cursor_read::<T>().expect("cursor");
|
||||
let walker = cursor.walk(Some(input.first().unwrap().0.clone())).unwrap();
|
||||
for element in walker {
|
||||
element.unwrap();
|
||||
}
|
||||
})
|
||||
});
|
||||
|
||||
@ -208,14 +185,10 @@ where
|
||||
b.iter(|| {
|
||||
// Create TX
|
||||
let tx = db.tx().expect("tx");
|
||||
|
||||
{
|
||||
for index in RANDOM_INDEXES {
|
||||
let mut cursor = tx.cursor_read::<T>().expect("cursor");
|
||||
cursor.seek_exact(input.get(index).unwrap().0.clone()).unwrap();
|
||||
}
|
||||
};
|
||||
black_box(());
|
||||
for index in RANDOM_INDEXES {
|
||||
let mut cursor = tx.cursor_read::<T>().expect("cursor");
|
||||
cursor.seek_exact(input.get(index).unwrap().0.clone()).unwrap();
|
||||
}
|
||||
})
|
||||
});
|
||||
}
|
||||
@ -245,14 +218,10 @@ where
|
||||
// Create TX
|
||||
let tx = db.tx_mut().expect("tx");
|
||||
let mut crsr = tx.cursor_dup_write::<T>().expect("cursor");
|
||||
|
||||
black_box({
|
||||
for (k, _, v, _) in input {
|
||||
crsr.append_dup(k, v).expect("submit");
|
||||
}
|
||||
|
||||
tx.inner.commit().unwrap()
|
||||
});
|
||||
for (k, _, v, _) in input {
|
||||
crsr.append_dup(k, v).expect("submit");
|
||||
}
|
||||
tx.inner.commit().unwrap()
|
||||
},
|
||||
)
|
||||
});
|
||||
@ -268,12 +237,10 @@ where
|
||||
|(input, db)| {
|
||||
// Create TX
|
||||
let tx = db.tx_mut().expect("tx");
|
||||
|
||||
for index in RANDOM_INDEXES {
|
||||
let (k, _, v, _) = input.get(index).unwrap().clone();
|
||||
tx.put::<T>(k, v).unwrap();
|
||||
}
|
||||
|
||||
tx.inner.commit().unwrap();
|
||||
},
|
||||
)
|
||||
@ -286,14 +253,11 @@ where
|
||||
// Create TX
|
||||
let tx = db.tx().expect("tx");
|
||||
|
||||
{
|
||||
let mut cursor = tx.cursor_dup_read::<T>().expect("cursor");
|
||||
let walker = cursor.walk_dup(None, Some(T::SubKey::default())).unwrap();
|
||||
for element in walker {
|
||||
element.unwrap();
|
||||
}
|
||||
};
|
||||
black_box(());
|
||||
let mut cursor = tx.cursor_dup_read::<T>().expect("cursor");
|
||||
let walker = cursor.walk_dup(None, Some(T::SubKey::default())).unwrap();
|
||||
for element in walker {
|
||||
element.unwrap();
|
||||
}
|
||||
})
|
||||
});
|
||||
|
||||
|
||||
@ -3,7 +3,7 @@
|
||||
use std::{collections::HashSet, path::Path, sync::Arc};
|
||||
|
||||
use criterion::{
|
||||
black_box, criterion_group, criterion_main, measurement::WallTime, BenchmarkGroup, Criterion,
|
||||
criterion_group, criterion_main, measurement::WallTime, BenchmarkGroup, Criterion,
|
||||
};
|
||||
use pprof::criterion::{Output, PProfProfiler};
|
||||
use proptest::{
|
||||
@ -20,6 +20,7 @@ use reth_db_api::{
|
||||
transaction::DbTxMut,
|
||||
};
|
||||
use reth_fs_util as fs;
|
||||
use std::hint::black_box;
|
||||
|
||||
mod utils;
|
||||
use utils::*;
|
||||
@ -46,6 +47,12 @@ pub fn hash_keys(c: &mut Criterion) {
|
||||
group.sample_size(10);
|
||||
|
||||
for size in [10_000, 100_000, 1_000_000] {
|
||||
// Too slow.
|
||||
#[allow(unexpected_cfgs)]
|
||||
if cfg!(codspeed) && size > 10_000 {
|
||||
continue;
|
||||
}
|
||||
|
||||
measure_table_insertion::<TransactionHashNumbers>(&mut group, size);
|
||||
}
|
||||
}
|
||||
|
||||
@ -1,102 +0,0 @@
|
||||
#![allow(missing_docs, non_snake_case, unreachable_pub)]
|
||||
|
||||
use iai_callgrind::{
|
||||
library_benchmark, library_benchmark_group, LibraryBenchmarkConfig, RegressionConfig,
|
||||
};
|
||||
use paste::paste;
|
||||
use reth_db_api::table::{Compress, Decode, Decompress, Encode, Table};
|
||||
|
||||
mod utils;
|
||||
use utils::*;
|
||||
|
||||
macro_rules! impl_iai_callgrind_inner {
|
||||
(
|
||||
$(($name:ident, $group_name:ident, $mod:ident, $compress:ident, $decompress:ident, $encode:ident, $decode:ident, $seqread:ident, $randread:ident, $seqwrite:ident, $randwrite:ident))+
|
||||
) => {
|
||||
use std::hint::black_box;
|
||||
$(
|
||||
#[library_benchmark]
|
||||
pub fn $compress() {
|
||||
for (_, _, v, _) in black_box(load_vectors::<reth_db::tables::$name>()) {
|
||||
black_box(v.compress());
|
||||
}
|
||||
}
|
||||
|
||||
#[library_benchmark]
|
||||
pub fn $decompress() {
|
||||
for (_, _, _, comp) in black_box(load_vectors::<reth_db::tables::$name>()) {
|
||||
let _ = black_box(<reth_db::tables::$name as Table>::Value::decompress(&comp));
|
||||
}
|
||||
}
|
||||
|
||||
#[library_benchmark]
|
||||
pub fn $encode() {
|
||||
for (k, _, _, _) in black_box(load_vectors::<reth_db::tables::$name>()) {
|
||||
black_box(k.encode());
|
||||
}
|
||||
}
|
||||
|
||||
#[library_benchmark]
|
||||
pub fn $decode() {
|
||||
for (_, enc, _, _) in black_box(load_vectors::<reth_db::tables::$name>()) {
|
||||
let _ = black_box(<reth_db::tables::$name as Table>::Key::decode(&enc));
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
pub const fn $seqread() {}
|
||||
|
||||
#[allow(dead_code)]
|
||||
pub const fn $randread() {}
|
||||
|
||||
#[allow(dead_code)]
|
||||
pub const fn $seqwrite() {}
|
||||
|
||||
#[allow(dead_code)]
|
||||
pub const fn $randwrite() {}
|
||||
|
||||
|
||||
library_benchmark_group!(
|
||||
name = $group_name;
|
||||
config = LibraryBenchmarkConfig::default()
|
||||
.regression(
|
||||
RegressionConfig::default().fail_fast(false)
|
||||
);
|
||||
benchmarks =
|
||||
$compress,
|
||||
$decompress,
|
||||
$encode,
|
||||
$decode,
|
||||
);
|
||||
)+
|
||||
|
||||
iai_callgrind::main!(
|
||||
config = LibraryBenchmarkConfig::default();
|
||||
library_benchmark_groups = $($group_name),+);
|
||||
};
|
||||
}
|
||||
|
||||
macro_rules! impl_iai_callgrind {
|
||||
($($name:ident),+) => {
|
||||
paste! {
|
||||
impl_iai_callgrind_inner!(
|
||||
$(
|
||||
( $name, [<$name _group>],[<$name _mod>], [<$name _ValueCompress>], [<$name _ValueDecompress>], [<$name _ValueEncode>], [<$name _ValueDecode>], [<$name _SeqRead>], [<$name _RandomRead>], [<$name _SeqWrite>], [<$name _RandomWrite>])
|
||||
)+
|
||||
);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
impl_iai_callgrind!(
|
||||
CanonicalHeaders,
|
||||
HeaderTerminalDifficulties,
|
||||
HeaderNumbers,
|
||||
Headers,
|
||||
BlockBodyIndices,
|
||||
BlockOmmers,
|
||||
TransactionHashNumbers,
|
||||
Transactions,
|
||||
PlainStorageState,
|
||||
PlainAccountState
|
||||
);
|
||||
@ -60,15 +60,15 @@ pub mod test_utils {
|
||||
use tempfile::TempDir;
|
||||
|
||||
/// Error during database open
|
||||
pub const ERROR_DB_OPEN: &str = "Not able to open the database file.";
|
||||
pub const ERROR_DB_OPEN: &str = "could not open the database file";
|
||||
/// Error during database creation
|
||||
pub const ERROR_DB_CREATION: &str = "Not able to create the database file.";
|
||||
pub const ERROR_DB_CREATION: &str = "could not create the database file";
|
||||
/// Error during database creation
|
||||
pub const ERROR_STATIC_FILES_CREATION: &str = "Not able to create the static file path.";
|
||||
pub const ERROR_STATIC_FILES_CREATION: &str = "could not create the static file path";
|
||||
/// Error during table creation
|
||||
pub const ERROR_TABLE_CREATION: &str = "Not able to create tables in the database.";
|
||||
pub const ERROR_TABLE_CREATION: &str = "could not create tables in the database";
|
||||
/// Error during tempdir creation
|
||||
pub const ERROR_TEMPDIR: &str = "Not able to create a temporary directory.";
|
||||
pub const ERROR_TEMPDIR: &str = "could not create a temporary directory";
|
||||
|
||||
/// A database will delete the db dir when dropped.
|
||||
pub struct TempDatabase<DB> {
|
||||
|
||||
Reference in New Issue
Block a user