mirror of
https://github.com/hl-archive-node/nanoreth.git
synced 2025-12-06 10:59:55 +00:00
feat(trie): sparse trie leaf removal (#11752)
Co-authored-by: Roman Krasiuk <rokrassyuk@gmail.com>
This commit is contained in:
8
Cargo.lock
generated
8
Cargo.lock
generated
@ -4464,7 +4464,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "4979f22fdb869068da03c9f7528f8297c6fd2606bc3a4affe42e6a823fdb8da4"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"windows-targets 0.48.5",
|
||||
"windows-targets 0.52.6",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -9248,9 +9248,13 @@ dependencies = [
|
||||
"assert_matches",
|
||||
"criterion",
|
||||
"itertools 0.13.0",
|
||||
"pretty_assertions",
|
||||
"proptest",
|
||||
"rand 0.8.5",
|
||||
"rayon",
|
||||
"reth-primitives",
|
||||
"reth-testing-utils",
|
||||
"reth-tracing",
|
||||
"reth-trie",
|
||||
"reth-trie-common",
|
||||
"smallvec",
|
||||
@ -11388,7 +11392,7 @@ version = "0.1.9"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "cf221c93e13a30d793f7645a0e7762c55d169dbb0a49671918a2319d289b10bb"
|
||||
dependencies = [
|
||||
"windows-sys 0.48.0",
|
||||
"windows-sys 0.59.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
||||
@ -15,6 +15,7 @@ workspace = true
|
||||
[dependencies]
|
||||
# reth
|
||||
reth-primitives.workspace = true
|
||||
reth-tracing.workspace = true
|
||||
reth-trie-common.workspace = true
|
||||
reth-trie.workspace = true
|
||||
|
||||
@ -26,18 +27,22 @@ alloy-rlp.workspace = true
|
||||
tracing.workspace = true
|
||||
|
||||
# misc
|
||||
thiserror.workspace = true
|
||||
rayon.workspace = true
|
||||
smallvec = { workspace = true, features = ["const_new"] }
|
||||
thiserror.workspace = true
|
||||
|
||||
[dev-dependencies]
|
||||
reth-primitives = { workspace = true, features = ["test-utils", "arbitrary"] }
|
||||
reth-trie-common = { workspace = true, features = ["test-utils", "arbitrary"] }
|
||||
reth-testing-utils.workspace = true
|
||||
reth-trie = { workspace = true, features = ["test-utils"] }
|
||||
reth-trie-common = { workspace = true, features = ["test-utils", "arbitrary"] }
|
||||
|
||||
assert_matches.workspace = true
|
||||
itertools.workspace = true
|
||||
proptest.workspace = true
|
||||
criterion.workspace = true
|
||||
itertools.workspace = true
|
||||
pretty_assertions = "1.4"
|
||||
proptest.workspace = true
|
||||
rand.workspace = true
|
||||
|
||||
[[bench]]
|
||||
name = "root"
|
||||
|
||||
@ -1,6 +1,7 @@
|
||||
use crate::{SparseTrieError, SparseTrieResult};
|
||||
use alloy_primitives::{hex, keccak256, map::HashMap, B256};
|
||||
use alloy_rlp::Decodable;
|
||||
use reth_tracing::tracing::debug;
|
||||
use reth_trie::{
|
||||
prefix_set::{PrefixSet, PrefixSetMut},
|
||||
RlpNode,
|
||||
@ -264,8 +265,244 @@ impl RevealedSparseTrie {
|
||||
}
|
||||
|
||||
/// Remove leaf node from the trie.
|
||||
pub fn remove_leaf(&mut self, _path: Nibbles) {
|
||||
unimplemented!()
|
||||
pub fn remove_leaf(&mut self, path: Nibbles) -> SparseTrieResult<()> {
|
||||
self.prefix_set.insert(path.clone());
|
||||
let existing = self.values.remove(&path);
|
||||
if existing.is_none() {
|
||||
// trie structure unchanged, return immediately
|
||||
return Ok(())
|
||||
}
|
||||
|
||||
let mut removed_nodes = self.take_nodes_for_path(&path)?;
|
||||
debug!(target: "trie::sparse", ?path, ?removed_nodes, "Removed nodes for path");
|
||||
// Pop the first node from the stack which is the leaf node we want to remove.
|
||||
let mut child = removed_nodes.pop().expect("leaf exists");
|
||||
#[cfg(debug_assertions)]
|
||||
{
|
||||
let mut child_path = child.path.clone();
|
||||
let SparseNode::Leaf { key, .. } = &child.node else { panic!("expected leaf node") };
|
||||
child_path.extend_from_slice_unchecked(key);
|
||||
assert_eq!(child_path, path);
|
||||
}
|
||||
|
||||
// If we don't have any other removed nodes, insert an empty node at the root.
|
||||
if removed_nodes.is_empty() {
|
||||
debug_assert!(self.nodes.is_empty());
|
||||
self.nodes.insert(Nibbles::default(), SparseNode::Empty);
|
||||
|
||||
return Ok(())
|
||||
}
|
||||
|
||||
// Walk the stack of removed nodes from the back and re-insert them back into the trie,
|
||||
// adjusting the node type as needed.
|
||||
while let Some(removed_node) = removed_nodes.pop() {
|
||||
let removed_path = removed_node.path;
|
||||
|
||||
let new_node = match &removed_node.node {
|
||||
SparseNode::Empty => return Err(SparseTrieError::Blind),
|
||||
SparseNode::Hash(hash) => {
|
||||
return Err(SparseTrieError::BlindedNode { path: removed_path, hash: *hash })
|
||||
}
|
||||
SparseNode::Leaf { .. } => {
|
||||
unreachable!("we already popped the leaf node")
|
||||
}
|
||||
SparseNode::Extension { key, .. } => {
|
||||
// If the node is an extension node, we need to look at its child to see if we
|
||||
// need to merge them.
|
||||
match &child.node {
|
||||
SparseNode::Empty => return Err(SparseTrieError::Blind),
|
||||
SparseNode::Hash(hash) => {
|
||||
return Err(SparseTrieError::BlindedNode {
|
||||
path: child.path,
|
||||
hash: *hash,
|
||||
})
|
||||
}
|
||||
// For a leaf node, we collapse the extension node into a leaf node,
|
||||
// extending the key. While it's impossible to encounter an extension node
|
||||
// followed by a leaf node in a complete trie, it's possible here because we
|
||||
// could have downgraded the extension node's child into a leaf node from
|
||||
// another node type.
|
||||
SparseNode::Leaf { key: leaf_key, .. } => {
|
||||
self.nodes.remove(&child.path);
|
||||
|
||||
let mut new_key = key.clone();
|
||||
new_key.extend_from_slice_unchecked(leaf_key);
|
||||
SparseNode::new_leaf(new_key)
|
||||
}
|
||||
// For an extension node, we collapse them into one extension node,
|
||||
// extending the key
|
||||
SparseNode::Extension { key: extension_key, .. } => {
|
||||
self.nodes.remove(&child.path);
|
||||
|
||||
let mut new_key = key.clone();
|
||||
new_key.extend_from_slice_unchecked(extension_key);
|
||||
SparseNode::new_ext(new_key)
|
||||
}
|
||||
// For a branch node, we just leave the extension node as-is.
|
||||
SparseNode::Branch { .. } => removed_node.node,
|
||||
}
|
||||
}
|
||||
SparseNode::Branch { mut state_mask, hash: _ } => {
|
||||
// If the node is a branch node, we need to check the number of children left
|
||||
// after deleting the child at the given nibble.
|
||||
|
||||
if let Some(removed_nibble) = removed_node.unset_branch_nibble {
|
||||
state_mask.unset_bit(removed_nibble);
|
||||
}
|
||||
|
||||
// If only one child is left set in the branch node, we need to collapse it.
|
||||
if state_mask.count_bits() == 1 {
|
||||
let child_nibble =
|
||||
state_mask.first_set_bit_index().expect("state mask is not empty");
|
||||
|
||||
// Get full path of the only child node left.
|
||||
let mut child_path = removed_path.clone();
|
||||
child_path.push_unchecked(child_nibble);
|
||||
|
||||
// Remove the only child node.
|
||||
let child = self.nodes.get(&child_path).unwrap();
|
||||
|
||||
debug!(target: "trie::sparse", ?removed_path, ?child_path, ?child, "Branch node has only one child");
|
||||
|
||||
let mut delete_child = false;
|
||||
let new_node = match child {
|
||||
SparseNode::Empty => return Err(SparseTrieError::Blind),
|
||||
SparseNode::Hash(hash) => {
|
||||
return Err(SparseTrieError::BlindedNode {
|
||||
path: child_path,
|
||||
hash: *hash,
|
||||
})
|
||||
}
|
||||
// If the only child is a leaf node, we downgrade the branch node into a
|
||||
// leaf node, prepending the nibble to the key, and delete the old
|
||||
// child.
|
||||
SparseNode::Leaf { key, .. } => {
|
||||
delete_child = true;
|
||||
|
||||
let mut new_key = Nibbles::from_nibbles_unchecked([child_nibble]);
|
||||
new_key.extend_from_slice_unchecked(key);
|
||||
SparseNode::new_leaf(new_key)
|
||||
}
|
||||
// If the only child node is an extension node, we downgrade the branch
|
||||
// node into an even longer extension node, prepending the nibble to the
|
||||
// key, and delete the old child.
|
||||
SparseNode::Extension { key, .. } => {
|
||||
delete_child = true;
|
||||
|
||||
let mut new_key = Nibbles::from_nibbles_unchecked([child_nibble]);
|
||||
new_key.extend_from_slice_unchecked(key);
|
||||
SparseNode::new_ext(new_key)
|
||||
}
|
||||
// If the only child is a branch node, we downgrade the current branch
|
||||
// node into a one-nibble extension node.
|
||||
SparseNode::Branch { .. } => {
|
||||
SparseNode::new_ext(Nibbles::from_nibbles_unchecked([child_nibble]))
|
||||
}
|
||||
};
|
||||
|
||||
if delete_child {
|
||||
self.nodes.remove(&child_path);
|
||||
}
|
||||
|
||||
new_node
|
||||
}
|
||||
// If more than one child is left set in the branch, we just re-insert it
|
||||
// as-is.
|
||||
else {
|
||||
SparseNode::new_branch(state_mask)
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
child = RemovedSparseNode {
|
||||
path: removed_path.clone(),
|
||||
node: new_node.clone(),
|
||||
unset_branch_nibble: None,
|
||||
};
|
||||
debug!(target: "trie::sparse", ?removed_path, ?new_node, "Re-inserting the node");
|
||||
self.nodes.insert(removed_path, new_node);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Traverse trie nodes down to the leaf node and collect all nodes along the path.
|
||||
fn take_nodes_for_path(&mut self, path: &Nibbles) -> SparseTrieResult<Vec<RemovedSparseNode>> {
|
||||
let mut current = Nibbles::default(); // Start traversal from the root
|
||||
let mut nodes = Vec::new(); // Collect traversed nodes
|
||||
|
||||
while let Some(node) = self.nodes.remove(¤t) {
|
||||
match &node {
|
||||
SparseNode::Empty => return Err(SparseTrieError::Blind),
|
||||
SparseNode::Hash(hash) => {
|
||||
return Err(SparseTrieError::BlindedNode { path: current, hash: *hash })
|
||||
}
|
||||
SparseNode::Leaf { key: _key, .. } => {
|
||||
// Leaf node is always the one that we're deleting, and no other leaf nodes can
|
||||
// be found during traversal.
|
||||
|
||||
#[cfg(debug_assertions)]
|
||||
{
|
||||
let mut current = current.clone();
|
||||
current.extend_from_slice_unchecked(_key);
|
||||
assert_eq!(¤t, path);
|
||||
}
|
||||
|
||||
nodes.push(RemovedSparseNode {
|
||||
path: current.clone(),
|
||||
node,
|
||||
unset_branch_nibble: None,
|
||||
});
|
||||
break
|
||||
}
|
||||
SparseNode::Extension { key, .. } => {
|
||||
#[cfg(debug_assertions)]
|
||||
{
|
||||
let mut current = current.clone();
|
||||
current.extend_from_slice_unchecked(key);
|
||||
assert!(path.starts_with(¤t));
|
||||
}
|
||||
|
||||
let path = current.clone();
|
||||
current.extend_from_slice_unchecked(key);
|
||||
nodes.push(RemovedSparseNode { path, node, unset_branch_nibble: None });
|
||||
}
|
||||
SparseNode::Branch { state_mask, .. } => {
|
||||
let nibble = path[current.len()];
|
||||
debug_assert!(state_mask.is_bit_set(nibble));
|
||||
|
||||
// If the branch node has a child that is a leaf node that we're removing,
|
||||
// we need to unset this nibble.
|
||||
// Any other branch nodes will not require unsetting the nibble, because
|
||||
// deleting one leaf node can not remove the whole path
|
||||
// where the branch node is located.
|
||||
let mut child_path =
|
||||
Nibbles::from_nibbles([current.as_slice(), &[nibble]].concat());
|
||||
let unset_branch_nibble = self
|
||||
.nodes
|
||||
.get(&child_path)
|
||||
.map_or(false, move |node| match node {
|
||||
SparseNode::Leaf { key, .. } => {
|
||||
// Get full path of the leaf node
|
||||
child_path.extend_from_slice_unchecked(key);
|
||||
&child_path == path
|
||||
}
|
||||
_ => false,
|
||||
})
|
||||
.then_some(nibble);
|
||||
|
||||
nodes.push(RemovedSparseNode {
|
||||
path: current.clone(),
|
||||
node,
|
||||
unset_branch_nibble,
|
||||
});
|
||||
|
||||
current.push_unchecked(nibble);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(nodes)
|
||||
}
|
||||
|
||||
/// Return the root of the sparse trie.
|
||||
@ -476,13 +713,87 @@ impl SparseNode {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
struct RemovedSparseNode {
|
||||
path: Nibbles,
|
||||
node: SparseNode,
|
||||
unset_branch_nibble: Option<u8>,
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use std::collections::BTreeMap;
|
||||
|
||||
use super::*;
|
||||
use alloy_primitives::U256;
|
||||
use itertools::Itertools;
|
||||
use proptest::prelude::*;
|
||||
use reth_trie_common::HashBuilder;
|
||||
use rand::seq::IteratorRandom;
|
||||
use reth_testing_utils::generators;
|
||||
use reth_trie::{BranchNode, ExtensionNode, LeafNode};
|
||||
use reth_trie_common::{
|
||||
proof::{ProofNodes, ProofRetainer},
|
||||
HashBuilder,
|
||||
};
|
||||
|
||||
/// Calculate the state root by feeding the provided state to the hash builder and retaining the
|
||||
/// proofs for the provided targets.
|
||||
///
|
||||
/// Returns the state root and the retained proof nodes.
|
||||
fn hash_builder_root_with_proofs<V: AsRef<[u8]>>(
|
||||
state: impl IntoIterator<Item = (Nibbles, V)>,
|
||||
proof_targets: impl IntoIterator<Item = Nibbles>,
|
||||
) -> (B256, ProofNodes) {
|
||||
let mut hash_builder =
|
||||
HashBuilder::default().with_proof_retainer(ProofRetainer::from_iter(proof_targets));
|
||||
for (key, value) in state {
|
||||
hash_builder.add_leaf(key, value.as_ref());
|
||||
}
|
||||
(hash_builder.root(), hash_builder.take_proof_nodes())
|
||||
}
|
||||
|
||||
/// Assert that the sparse trie nodes and the proof nodes from the hash builder are equal.
|
||||
fn assert_eq_sparse_trie_proof_nodes(
|
||||
sparse_trie: &RevealedSparseTrie,
|
||||
proof_nodes: ProofNodes,
|
||||
) {
|
||||
let proof_nodes = proof_nodes
|
||||
.into_nodes_sorted()
|
||||
.into_iter()
|
||||
.map(|(path, node)| (path, TrieNode::decode(&mut node.as_ref()).unwrap()));
|
||||
|
||||
let sparse_nodes = sparse_trie.nodes.iter().sorted_by_key(|(path, _)| *path);
|
||||
|
||||
for ((proof_node_path, proof_node), (sparse_node_path, sparse_node)) in
|
||||
proof_nodes.zip(sparse_nodes)
|
||||
{
|
||||
assert_eq!(&proof_node_path, sparse_node_path);
|
||||
|
||||
let equals = match (&proof_node, &sparse_node) {
|
||||
// Both nodes are empty
|
||||
(TrieNode::EmptyRoot, SparseNode::Empty) => true,
|
||||
// Both nodes are branches and have the same state mask
|
||||
(
|
||||
TrieNode::Branch(BranchNode { state_mask: proof_state_mask, .. }),
|
||||
SparseNode::Branch { state_mask: sparse_state_mask, .. },
|
||||
) => proof_state_mask == sparse_state_mask,
|
||||
// Both nodes are extensions and have the same key
|
||||
(
|
||||
TrieNode::Extension(ExtensionNode { key: proof_key, .. }),
|
||||
SparseNode::Extension { key: sparse_key, .. },
|
||||
) |
|
||||
// Both nodes are leaves and have the same key
|
||||
(
|
||||
TrieNode::Leaf(LeafNode { key: proof_key, .. }),
|
||||
SparseNode::Leaf { key: sparse_key, .. },
|
||||
) => proof_key == sparse_key,
|
||||
// Empty and hash nodes are specific to the sparse trie, skip them
|
||||
(_, SparseNode::Empty | SparseNode::Hash(_)) => continue,
|
||||
_ => false,
|
||||
};
|
||||
assert!(equals, "proof node: {:?}, sparse node: {:?}", proof_node, sparse_node);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn sparse_trie_is_blind() {
|
||||
@ -495,14 +806,15 @@ mod tests {
|
||||
let path = Nibbles::unpack(B256::with_last_byte(42));
|
||||
let value = alloy_rlp::encode_fixed_size(&U256::from(1));
|
||||
|
||||
let mut hash_builder = HashBuilder::default();
|
||||
hash_builder.add_leaf(path.clone(), &value);
|
||||
let expected = hash_builder.root();
|
||||
let (hash_builder_root, hash_builder_proof_nodes) =
|
||||
hash_builder_root_with_proofs([(path.clone(), &value)], [path.clone()]);
|
||||
|
||||
let mut sparse = RevealedSparseTrie::default();
|
||||
sparse.update_leaf(path, value.to_vec()).unwrap();
|
||||
let root = sparse.root();
|
||||
assert_eq!(root, expected);
|
||||
let sparse_root = sparse.root();
|
||||
|
||||
assert_eq!(sparse_root, hash_builder_root);
|
||||
assert_eq_sparse_trie_proof_nodes(&sparse, hash_builder_proof_nodes);
|
||||
}
|
||||
|
||||
#[test]
|
||||
@ -510,18 +822,19 @@ mod tests {
|
||||
let paths = (0..=16).map(|b| Nibbles::unpack(B256::with_last_byte(b))).collect::<Vec<_>>();
|
||||
let value = alloy_rlp::encode_fixed_size(&U256::from(1));
|
||||
|
||||
let mut hash_builder = HashBuilder::default();
|
||||
for path in &paths {
|
||||
hash_builder.add_leaf(path.clone(), &value);
|
||||
}
|
||||
let expected = hash_builder.root();
|
||||
let (hash_builder_root, hash_builder_proof_nodes) = hash_builder_root_with_proofs(
|
||||
paths.iter().cloned().zip(std::iter::repeat_with(|| value.clone())),
|
||||
paths.clone(),
|
||||
);
|
||||
|
||||
let mut sparse = RevealedSparseTrie::default();
|
||||
for path in &paths {
|
||||
sparse.update_leaf(path.clone(), value.to_vec()).unwrap();
|
||||
}
|
||||
let root = sparse.root();
|
||||
assert_eq!(root, expected);
|
||||
let sparse_root = sparse.root();
|
||||
|
||||
assert_eq!(sparse_root, hash_builder_root);
|
||||
assert_eq_sparse_trie_proof_nodes(&sparse, hash_builder_proof_nodes);
|
||||
}
|
||||
|
||||
#[test]
|
||||
@ -529,18 +842,19 @@ mod tests {
|
||||
let paths = (239..=255).map(|b| Nibbles::unpack(B256::repeat_byte(b))).collect::<Vec<_>>();
|
||||
let value = alloy_rlp::encode_fixed_size(&U256::from(1));
|
||||
|
||||
let mut hash_builder = HashBuilder::default();
|
||||
for path in &paths {
|
||||
hash_builder.add_leaf(path.clone(), &value);
|
||||
}
|
||||
let expected = hash_builder.root();
|
||||
let (hash_builder_root, hash_builder_proof_nodes) = hash_builder_root_with_proofs(
|
||||
paths.iter().cloned().zip(std::iter::repeat_with(|| value.clone())),
|
||||
paths.clone(),
|
||||
);
|
||||
|
||||
let mut sparse = RevealedSparseTrie::default();
|
||||
for path in &paths {
|
||||
sparse.update_leaf(path.clone(), value.to_vec()).unwrap();
|
||||
}
|
||||
let root = sparse.root();
|
||||
assert_eq!(root, expected);
|
||||
let sparse_root = sparse.root();
|
||||
|
||||
assert_eq!(sparse_root, hash_builder_root);
|
||||
assert_eq_sparse_trie_proof_nodes(&sparse, hash_builder_proof_nodes);
|
||||
}
|
||||
|
||||
#[test]
|
||||
@ -556,18 +870,19 @@ mod tests {
|
||||
.collect::<Vec<_>>();
|
||||
let value = alloy_rlp::encode_fixed_size(&U256::from(1));
|
||||
|
||||
let mut hash_builder = HashBuilder::default();
|
||||
for path in paths.iter().sorted_unstable_by_key(|key| *key) {
|
||||
hash_builder.add_leaf(path.clone(), &value);
|
||||
}
|
||||
let expected = hash_builder.root();
|
||||
let (hash_builder_root, hash_builder_proof_nodes) = hash_builder_root_with_proofs(
|
||||
paths.iter().sorted_unstable().cloned().zip(std::iter::repeat_with(|| value.clone())),
|
||||
paths.clone(),
|
||||
);
|
||||
|
||||
let mut sparse = RevealedSparseTrie::default();
|
||||
for path in &paths {
|
||||
sparse.update_leaf(path.clone(), value.to_vec()).unwrap();
|
||||
}
|
||||
let root = sparse.root();
|
||||
assert_eq!(root, expected);
|
||||
let sparse_root = sparse.root();
|
||||
|
||||
assert_eq!(sparse_root, hash_builder_root);
|
||||
assert_eq_sparse_trie_proof_nodes(&sparse, hash_builder_proof_nodes);
|
||||
}
|
||||
|
||||
#[test]
|
||||
@ -576,52 +891,339 @@ mod tests {
|
||||
let old_value = alloy_rlp::encode_fixed_size(&U256::from(1));
|
||||
let new_value = alloy_rlp::encode_fixed_size(&U256::from(2));
|
||||
|
||||
let mut hash_builder = HashBuilder::default();
|
||||
for path in paths.iter().sorted_unstable_by_key(|key| *key) {
|
||||
hash_builder.add_leaf(path.clone(), &old_value);
|
||||
}
|
||||
let expected = hash_builder.root();
|
||||
let (hash_builder_root, hash_builder_proof_nodes) = hash_builder_root_with_proofs(
|
||||
paths.iter().cloned().zip(std::iter::repeat_with(|| old_value.clone())),
|
||||
paths.clone(),
|
||||
);
|
||||
|
||||
let mut sparse = RevealedSparseTrie::default();
|
||||
for path in &paths {
|
||||
sparse.update_leaf(path.clone(), old_value.to_vec()).unwrap();
|
||||
}
|
||||
let root = sparse.root();
|
||||
assert_eq!(root, expected);
|
||||
let sparse_root = sparse.root();
|
||||
|
||||
let mut hash_builder = HashBuilder::default();
|
||||
for path in paths.iter().sorted_unstable_by_key(|key| *key) {
|
||||
hash_builder.add_leaf(path.clone(), &new_value);
|
||||
}
|
||||
let expected = hash_builder.root();
|
||||
assert_eq!(sparse_root, hash_builder_root);
|
||||
assert_eq_sparse_trie_proof_nodes(&sparse, hash_builder_proof_nodes);
|
||||
|
||||
let (hash_builder_root, hash_builder_proof_nodes) = hash_builder_root_with_proofs(
|
||||
paths.iter().cloned().zip(std::iter::repeat_with(|| new_value.clone())),
|
||||
paths.clone(),
|
||||
);
|
||||
|
||||
for path in &paths {
|
||||
sparse.update_leaf(path.clone(), new_value.to_vec()).unwrap();
|
||||
}
|
||||
let root = sparse.root();
|
||||
assert_eq!(root, expected);
|
||||
let sparse_root = sparse.root();
|
||||
|
||||
assert_eq!(sparse_root, hash_builder_root);
|
||||
assert_eq_sparse_trie_proof_nodes(&sparse, hash_builder_proof_nodes);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn sparse_trie_empty_update_fuzz() {
|
||||
fn sparse_trie_remove_leaf() {
|
||||
reth_tracing::init_test_tracing();
|
||||
|
||||
let mut sparse = RevealedSparseTrie::default();
|
||||
|
||||
let value = alloy_rlp::encode_fixed_size(&U256::ZERO).to_vec();
|
||||
|
||||
sparse
|
||||
.update_leaf(Nibbles::from_nibbles([0x5, 0x0, 0x2, 0x3, 0x1]), value.clone())
|
||||
.unwrap();
|
||||
sparse
|
||||
.update_leaf(Nibbles::from_nibbles([0x5, 0x0, 0x2, 0x3, 0x3]), value.clone())
|
||||
.unwrap();
|
||||
sparse
|
||||
.update_leaf(Nibbles::from_nibbles([0x5, 0x2, 0x0, 0x1, 0x3]), value.clone())
|
||||
.unwrap();
|
||||
sparse
|
||||
.update_leaf(Nibbles::from_nibbles([0x5, 0x3, 0x1, 0x0, 0x2]), value.clone())
|
||||
.unwrap();
|
||||
sparse
|
||||
.update_leaf(Nibbles::from_nibbles([0x5, 0x3, 0x3, 0x0, 0x2]), value.clone())
|
||||
.unwrap();
|
||||
sparse.update_leaf(Nibbles::from_nibbles([0x5, 0x3, 0x3, 0x2, 0x0]), value).unwrap();
|
||||
|
||||
// Extension (Key = 5)
|
||||
// └── Branch (Mask = 1011)
|
||||
// ├── 0 -> Extension (Key = 23)
|
||||
// │ └── Branch (Mask = 0101)
|
||||
// │ ├── 1 -> Leaf (Key = 1, Path = 50231)
|
||||
// │ └── 3 -> Leaf (Key = 3, Path = 50233)
|
||||
// ├── 2 -> Leaf (Key = 013, Path = 52013)
|
||||
// └── 3 -> Branch (Mask = 0101)
|
||||
// ├── 1 -> Leaf (Key = 3102, Path = 53102)
|
||||
// └── 3 -> Branch (Mask = 1010)
|
||||
// ├── 0 -> Leaf (Key = 3302, Path = 53302)
|
||||
// └── 2 -> Leaf (Key = 3320, Path = 53320)
|
||||
pretty_assertions::assert_eq!(
|
||||
sparse.nodes.clone().into_iter().collect::<BTreeMap<_, _>>(),
|
||||
BTreeMap::from_iter([
|
||||
(Nibbles::new(), SparseNode::new_ext(Nibbles::from_nibbles([0x5]))),
|
||||
(Nibbles::from_nibbles([0x5]), SparseNode::new_branch(0b1101.into())),
|
||||
(
|
||||
Nibbles::from_nibbles([0x5, 0x0]),
|
||||
SparseNode::new_ext(Nibbles::from_nibbles([0x2, 0x3]))
|
||||
),
|
||||
(
|
||||
Nibbles::from_nibbles([0x5, 0x0, 0x2, 0x3]),
|
||||
SparseNode::new_branch(0b1010.into())
|
||||
),
|
||||
(
|
||||
Nibbles::from_nibbles([0x5, 0x0, 0x2, 0x3, 0x1]),
|
||||
SparseNode::new_leaf(Nibbles::new())
|
||||
),
|
||||
(
|
||||
Nibbles::from_nibbles([0x5, 0x0, 0x2, 0x3, 0x3]),
|
||||
SparseNode::new_leaf(Nibbles::new())
|
||||
),
|
||||
(
|
||||
Nibbles::from_nibbles([0x5, 0x2]),
|
||||
SparseNode::new_leaf(Nibbles::from_nibbles([0x0, 0x1, 0x3]))
|
||||
),
|
||||
(Nibbles::from_nibbles([0x5, 0x3]), SparseNode::new_branch(0b1010.into())),
|
||||
(
|
||||
Nibbles::from_nibbles([0x5, 0x3, 0x1]),
|
||||
SparseNode::new_leaf(Nibbles::from_nibbles([0x0, 0x2]))
|
||||
),
|
||||
(Nibbles::from_nibbles([0x5, 0x3, 0x3]), SparseNode::new_branch(0b0101.into())),
|
||||
(
|
||||
Nibbles::from_nibbles([0x5, 0x3, 0x3, 0x0]),
|
||||
SparseNode::new_leaf(Nibbles::from_nibbles([0x2]))
|
||||
),
|
||||
(
|
||||
Nibbles::from_nibbles([0x5, 0x3, 0x3, 0x2]),
|
||||
SparseNode::new_leaf(Nibbles::from_nibbles([0x0]))
|
||||
)
|
||||
])
|
||||
);
|
||||
|
||||
sparse.remove_leaf(Nibbles::from_nibbles([0x5, 0x2, 0x0, 0x1, 0x3])).unwrap();
|
||||
|
||||
// Extension (Key = 5)
|
||||
// └── Branch (Mask = 1001)
|
||||
// ├── 0 -> Extension (Key = 23)
|
||||
// │ └── Branch (Mask = 0101)
|
||||
// │ ├── 1 -> Leaf (Key = 0231, Path = 50231)
|
||||
// │ └── 3 -> Leaf (Key = 0233, Path = 50233)
|
||||
// └── 3 -> Branch (Mask = 0101)
|
||||
// ├── 1 -> Leaf (Key = 3102, Path = 53102)
|
||||
// └── 3 -> Branch (Mask = 1010)
|
||||
// ├── 0 -> Leaf (Key = 3302, Path = 53302)
|
||||
// └── 2 -> Leaf (Key = 3320, Path = 53320)
|
||||
pretty_assertions::assert_eq!(
|
||||
sparse.nodes.clone().into_iter().collect::<BTreeMap<_, _>>(),
|
||||
BTreeMap::from_iter([
|
||||
(Nibbles::new(), SparseNode::new_ext(Nibbles::from_nibbles([0x5]))),
|
||||
(Nibbles::from_nibbles([0x5]), SparseNode::new_branch(0b1001.into())),
|
||||
(
|
||||
Nibbles::from_nibbles([0x5, 0x0]),
|
||||
SparseNode::new_ext(Nibbles::from_nibbles([0x2, 0x3]))
|
||||
),
|
||||
(
|
||||
Nibbles::from_nibbles([0x5, 0x0, 0x2, 0x3]),
|
||||
SparseNode::new_branch(0b1010.into())
|
||||
),
|
||||
(
|
||||
Nibbles::from_nibbles([0x5, 0x0, 0x2, 0x3, 0x1]),
|
||||
SparseNode::new_leaf(Nibbles::new())
|
||||
),
|
||||
(
|
||||
Nibbles::from_nibbles([0x5, 0x0, 0x2, 0x3, 0x3]),
|
||||
SparseNode::new_leaf(Nibbles::new())
|
||||
),
|
||||
(Nibbles::from_nibbles([0x5, 0x3]), SparseNode::new_branch(0b1010.into())),
|
||||
(
|
||||
Nibbles::from_nibbles([0x5, 0x3, 0x1]),
|
||||
SparseNode::new_leaf(Nibbles::from_nibbles([0x0, 0x2]))
|
||||
),
|
||||
(Nibbles::from_nibbles([0x5, 0x3, 0x3]), SparseNode::new_branch(0b0101.into())),
|
||||
(
|
||||
Nibbles::from_nibbles([0x5, 0x3, 0x3, 0x0]),
|
||||
SparseNode::new_leaf(Nibbles::from_nibbles([0x2]))
|
||||
),
|
||||
(
|
||||
Nibbles::from_nibbles([0x5, 0x3, 0x3, 0x2]),
|
||||
SparseNode::new_leaf(Nibbles::from_nibbles([0x0]))
|
||||
)
|
||||
])
|
||||
);
|
||||
|
||||
sparse.remove_leaf(Nibbles::from_nibbles([0x5, 0x0, 0x2, 0x3, 0x1])).unwrap();
|
||||
|
||||
// Extension (Key = 5)
|
||||
// └── Branch (Mask = 1001)
|
||||
// ├── 0 -> Leaf (Key = 0233, Path = 50233)
|
||||
// └── 3 -> Branch (Mask = 0101)
|
||||
// ├── 1 -> Leaf (Key = 3102, Path = 53102)
|
||||
// └── 3 -> Branch (Mask = 1010)
|
||||
// ├── 0 -> Leaf (Key = 3302, Path = 53302)
|
||||
// └── 2 -> Leaf (Key = 3320, Path = 53320)
|
||||
pretty_assertions::assert_eq!(
|
||||
sparse.nodes.clone().into_iter().collect::<BTreeMap<_, _>>(),
|
||||
BTreeMap::from_iter([
|
||||
(Nibbles::new(), SparseNode::new_ext(Nibbles::from_nibbles([0x5]))),
|
||||
(Nibbles::from_nibbles([0x5]), SparseNode::new_branch(0b1001.into())),
|
||||
(
|
||||
Nibbles::from_nibbles([0x5, 0x0]),
|
||||
SparseNode::new_leaf(Nibbles::from_nibbles([0x2, 0x3, 0x3]))
|
||||
),
|
||||
(Nibbles::from_nibbles([0x5, 0x3]), SparseNode::new_branch(0b1010.into())),
|
||||
(
|
||||
Nibbles::from_nibbles([0x5, 0x3, 0x1]),
|
||||
SparseNode::new_leaf(Nibbles::from_nibbles([0x0, 0x2]))
|
||||
),
|
||||
(Nibbles::from_nibbles([0x5, 0x3, 0x3]), SparseNode::new_branch(0b0101.into())),
|
||||
(
|
||||
Nibbles::from_nibbles([0x5, 0x3, 0x3, 0x0]),
|
||||
SparseNode::new_leaf(Nibbles::from_nibbles([0x2]))
|
||||
),
|
||||
(
|
||||
Nibbles::from_nibbles([0x5, 0x3, 0x3, 0x2]),
|
||||
SparseNode::new_leaf(Nibbles::from_nibbles([0x0]))
|
||||
)
|
||||
])
|
||||
);
|
||||
|
||||
sparse.remove_leaf(Nibbles::from_nibbles([0x5, 0x3, 0x1, 0x0, 0x2])).unwrap();
|
||||
|
||||
// Extension (Key = 5)
|
||||
// └── Branch (Mask = 1001)
|
||||
// ├── 0 -> Leaf (Key = 0233, Path = 50233)
|
||||
// └── 3 -> Branch (Mask = 1010)
|
||||
// ├── 0 -> Leaf (Key = 3302, Path = 53302)
|
||||
// └── 2 -> Leaf (Key = 3320, Path = 53320)
|
||||
pretty_assertions::assert_eq!(
|
||||
sparse.nodes.clone().into_iter().collect::<BTreeMap<_, _>>(),
|
||||
BTreeMap::from_iter([
|
||||
(Nibbles::new(), SparseNode::new_ext(Nibbles::from_nibbles([0x5]))),
|
||||
(Nibbles::from_nibbles([0x5]), SparseNode::new_branch(0b1001.into())),
|
||||
(
|
||||
Nibbles::from_nibbles([0x5, 0x0]),
|
||||
SparseNode::new_leaf(Nibbles::from_nibbles([0x2, 0x3, 0x3]))
|
||||
),
|
||||
(
|
||||
Nibbles::from_nibbles([0x5, 0x3]),
|
||||
SparseNode::new_ext(Nibbles::from_nibbles([0x3]))
|
||||
),
|
||||
(Nibbles::from_nibbles([0x5, 0x3, 0x3]), SparseNode::new_branch(0b0101.into())),
|
||||
(
|
||||
Nibbles::from_nibbles([0x5, 0x3, 0x3, 0x0]),
|
||||
SparseNode::new_leaf(Nibbles::from_nibbles([0x2]))
|
||||
),
|
||||
(
|
||||
Nibbles::from_nibbles([0x5, 0x3, 0x3, 0x2]),
|
||||
SparseNode::new_leaf(Nibbles::from_nibbles([0x0]))
|
||||
)
|
||||
])
|
||||
);
|
||||
|
||||
sparse.remove_leaf(Nibbles::from_nibbles([0x5, 0x3, 0x3, 0x2, 0x0])).unwrap();
|
||||
|
||||
// Extension (Key = 5)
|
||||
// └── Branch (Mask = 1001)
|
||||
// ├── 0 -> Leaf (Key = 0233, Path = 50233)
|
||||
// └── 3 -> Leaf (Key = 3302, Path = 53302)
|
||||
pretty_assertions::assert_eq!(
|
||||
sparse.nodes.clone().into_iter().collect::<BTreeMap<_, _>>(),
|
||||
BTreeMap::from_iter([
|
||||
(Nibbles::new(), SparseNode::new_ext(Nibbles::from_nibbles([0x5]))),
|
||||
(Nibbles::from_nibbles([0x5]), SparseNode::new_branch(0b1001.into())),
|
||||
(
|
||||
Nibbles::from_nibbles([0x5, 0x0]),
|
||||
SparseNode::new_leaf(Nibbles::from_nibbles([0x2, 0x3, 0x3]))
|
||||
),
|
||||
(
|
||||
Nibbles::from_nibbles([0x5, 0x3]),
|
||||
SparseNode::new_leaf(Nibbles::from_nibbles([0x3, 0x0, 0x2]))
|
||||
),
|
||||
])
|
||||
);
|
||||
|
||||
sparse.remove_leaf(Nibbles::from_nibbles([0x5, 0x0, 0x2, 0x3, 0x3])).unwrap();
|
||||
|
||||
// Leaf (Key = 53302)
|
||||
pretty_assertions::assert_eq!(
|
||||
sparse.nodes.clone().into_iter().collect::<BTreeMap<_, _>>(),
|
||||
BTreeMap::from_iter([(
|
||||
Nibbles::new(),
|
||||
SparseNode::new_leaf(Nibbles::from_nibbles([0x5, 0x3, 0x3, 0x0, 0x2]))
|
||||
),])
|
||||
);
|
||||
|
||||
sparse.remove_leaf(Nibbles::from_nibbles([0x5, 0x3, 0x3, 0x0, 0x2])).unwrap();
|
||||
|
||||
// Empty
|
||||
pretty_assertions::assert_eq!(
|
||||
sparse.nodes.clone().into_iter().collect::<BTreeMap<_, _>>(),
|
||||
BTreeMap::from_iter([(Nibbles::new(), SparseNode::Empty),])
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn sparse_trie_fuzz() {
|
||||
proptest!(ProptestConfig::with_cases(10), |(updates: Vec<HashMap<B256, U256>>)| {
|
||||
let mut state = std::collections::BTreeMap::default();
|
||||
let mut rng = generators::rng();
|
||||
|
||||
let mut state = BTreeMap::default();
|
||||
let mut unpacked_state = BTreeMap::default();
|
||||
let mut sparse = RevealedSparseTrie::default();
|
||||
|
||||
for update in updates {
|
||||
for (key, value) in &update {
|
||||
sparse.update_leaf(Nibbles::unpack(key), alloy_rlp::encode_fixed_size(value).to_vec()).unwrap();
|
||||
}
|
||||
let root = sparse.root();
|
||||
let keys_to_delete_len = update.len() / 2;
|
||||
|
||||
let unpacked_update = update.iter().map(|(key, value)| (
|
||||
Nibbles::unpack(key),
|
||||
alloy_rlp::encode_fixed_size(value).to_vec()
|
||||
));
|
||||
|
||||
// Insert state updates into the sparse trie and calculate the root
|
||||
for (key, value) in unpacked_update.clone() {
|
||||
sparse.update_leaf(key, value).unwrap();
|
||||
}
|
||||
let sparse_root = sparse.root();
|
||||
|
||||
// Insert state updates into the hash builder and calculate the root
|
||||
unpacked_state.extend(unpacked_update);
|
||||
state.extend(update);
|
||||
let mut hash_builder = HashBuilder::default();
|
||||
for (key, value) in &state {
|
||||
hash_builder.add_leaf(Nibbles::unpack(key), &alloy_rlp::encode_fixed_size(value));
|
||||
}
|
||||
let expected = hash_builder.root();
|
||||
let keys = state.keys().map(Nibbles::unpack).collect::<Vec<_>>();
|
||||
let (hash_builder_root, hash_builder_proof_nodes) = hash_builder_root_with_proofs(
|
||||
unpacked_state.clone(),
|
||||
keys,
|
||||
);
|
||||
|
||||
assert_eq!(root, expected);
|
||||
// Assert that the sparse trie root matches the hash builder root
|
||||
assert_eq!(sparse_root, hash_builder_root);
|
||||
// Assert that the sparse trie nodes match the hash builder proof nodes
|
||||
assert_eq_sparse_trie_proof_nodes(&sparse, hash_builder_proof_nodes);
|
||||
|
||||
// Delete some keys from both the hash builder and the sparse trie and check
|
||||
// that the sparse trie root still matches the hash builder root
|
||||
|
||||
let keys_to_delete = state
|
||||
.keys()
|
||||
.choose_multiple(&mut rng, keys_to_delete_len)
|
||||
.into_iter()
|
||||
.copied()
|
||||
.collect::<Vec<_>>();
|
||||
for key in keys_to_delete {
|
||||
state.remove(&key).unwrap();
|
||||
unpacked_state.remove(&Nibbles::unpack(key)).unwrap();
|
||||
sparse.remove_leaf(Nibbles::unpack(key)).unwrap();
|
||||
}
|
||||
|
||||
let sparse_root = sparse.root();
|
||||
|
||||
let keys = state.keys().map(Nibbles::unpack).collect::<Vec<_>>();
|
||||
let (hash_builder_root, hash_builder_proof_nodes) = hash_builder_root_with_proofs(
|
||||
unpacked_state.clone(),
|
||||
keys,
|
||||
);
|
||||
|
||||
// Assert that the sparse trie root matches the hash builder root
|
||||
assert_eq!(sparse_root, hash_builder_root);
|
||||
// Assert that the sparse trie nodes match the hash builder proof nodes
|
||||
assert_eq_sparse_trie_proof_nodes(&sparse, hash_builder_proof_nodes);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user