feat: modified MultiConsumerLruCache to track inMemory usage (#14034)

Co-authored-by: Matthias Seitz <matthias.seitz@outlook.de>
This commit is contained in:
Poulav Bhowmick
2025-01-29 15:31:28 +05:30
committed by GitHub
parent c7152ee9fa
commit fd7074eac2
3 changed files with 76 additions and 7 deletions

View File

@ -14,4 +14,6 @@ pub(crate) struct CacheMetrics {
pub(crate) hits_total: Counter,
/// The number of cache misses.
pub(crate) misses_total: Counter,
/// The memory usage of the cache.
pub(crate) memory_usage: Gauge,
}

View File

@ -1,16 +1,15 @@
//! Metered cache, which also provides storage for senders in order to queue queries that result in
//! a cache miss.
use super::metrics::CacheMetrics;
use reth_primitives_traits::InMemorySize;
use schnellru::{ByLength, Limiter, LruMap};
use std::{
collections::{hash_map::Entry, HashMap},
fmt::{self, Debug, Formatter},
hash::Hash,
};
use schnellru::{ByLength, Limiter, LruMap};
use super::metrics::CacheMetrics;
/// A multi-consumer LRU cache.
pub struct MultiConsumerLruCache<K, V, L, S>
where
@ -23,6 +22,8 @@ where
queued: HashMap<K, Vec<S>>,
/// Cache metrics
metrics: CacheMetrics,
// Tracked heap usage
memory_usage: usize,
}
impl<K, V, L, S> Debug for MultiConsumerLruCache<K, V, L, S>
@ -35,6 +36,7 @@ where
.field("cache_length", &self.cache.len())
.field("cache_memory_usage", &self.cache.memory_usage())
.field("queued_length", &self.queued.len())
.field("memory_usage", &self.memory_usage)
.finish()
}
}
@ -62,8 +64,13 @@ where
}
/// Remove consumers for a given key, this will also remove the key from the cache.
pub fn remove(&mut self, key: &K) -> Option<Vec<S>> {
let _ = self.cache.remove(key);
pub fn remove(&mut self, key: &K) -> Option<Vec<S>>
where
V: InMemorySize,
{
self.cache
.remove(key)
.inspect(|value| self.memory_usage = self.memory_usage.saturating_sub(value.size()));
self.queued
.remove(key)
.inspect(|removed| self.metrics.queued_consumers_count.decrement(removed.len() as f64))
@ -89,14 +96,22 @@ where
pub fn insert<'a>(&mut self, key: L::KeyToInsert<'a>, value: V) -> bool
where
L::KeyToInsert<'a>: Hash + PartialEq<K>,
V: InMemorySize,
{
self.cache.insert(key, value)
let size = value.size();
if self.cache.insert(key, value) {
self.memory_usage = self.memory_usage.saturating_add(size);
true
} else {
false
}
}
/// Update metrics for the inner cache.
#[inline]
pub fn update_cached_metrics(&self) {
self.metrics.cached_count.set(self.cache.len() as f64);
self.metrics.memory_usage.set(self.memory_usage as f64);
}
}
@ -110,6 +125,7 @@ where
cache: LruMap::new(ByLength::new(max_len)),
queued: Default::default(),
metrics: CacheMetrics::new_with_labels(&[("cache", cache_id.to_string())]),
memory_usage: 0,
}
}
}

View File

@ -8309,6 +8309,57 @@
"refId": "D",
"useBackend": false
},
{
"datasource": {
"type": "prometheus",
"uid": "${DS_PROMETHEUS}"
},
"disableTextWrap": false,
"editorMode": "builder",
"expr": "reth_rpc_eth_cache_memory_usage{instance=\"$instance\", cache=\"receipts\"}",
"fullMetaSearch": false,
"hide": false,
"includeNullMetadata": true,
"instant": false,
"legendFormat": "Receipts cache memory usage",
"range": true,
"refId": "D",
"useBackend": false
},
{
"datasource": {
"type": "prometheus",
"uid": "${DS_PROMETHEUS}"
},
"disableTextWrap": false,
"editorMode": "builder",
"expr": "reth_rpc_eth_cache_memory_usage{instance=\"$instance\", cache=\"blocks\"}",
"fullMetaSearch": false,
"hide": false,
"includeNullMetadata": true,
"instant": false,
"legendFormat": "Blocks cache memory usage",
"range": true,
"refId": "E",
"useBackend": false
},
{
"datasource": {
"type": "prometheus",
"uid": "${DS_PROMETHEUS}"
},
"disableTextWrap": false,
"editorMode": "builder",
"expr": "reth_rpc_eth_cache_memory_usage{instance=\"$instance\", cache=\"headers\"}",
"fullMetaSearch": false,
"hide": false,
"includeNullMetadata": true,
"instant": false,
"legendFormat": "Headers cache memory usage",
"range": true,
"refId": "F",
"useBackend": false
},
{
"datasource": {
"type": "prometheus",