extract max_size stat update for disk buckets (#29487)

This commit is contained in:
HaoranYi 2023-01-07 09:25:04 -06:00 committed by GitHub
parent 43a0745b37
commit aa3aa5cc9c
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
3 changed files with 13 additions and 10 deletions

View File

@ -390,11 +390,7 @@ impl<T: Clone + Copy> Bucket<T> {
}
}
if valid {
let sz = index.capacity();
{
let mut max = self.stats.index.max_size.lock().unwrap();
*max = std::cmp::max(*max, sz);
}
self.stats.index.update_max_size(index.capacity());
let mut items = self.reallocated.items.lock().unwrap();
items.index = Some((random, index));
self.reallocated.add_reallocation();

View File

@ -11,6 +11,13 @@ pub struct BucketStats {
pub find_entry_mut_us: AtomicU64,
}
impl BucketStats {
pub fn update_max_size(&self, size: u64) {
let mut max = self.max_size.lock().unwrap();
*max = std::cmp::max(*max, size);
}
}
#[derive(Debug, Default)]
pub struct BucketMapStats {
pub index: Arc<BucketStats>,

View File

@ -341,6 +341,10 @@ impl BucketStorage {
self.stats.resize_us.fetch_add(m.as_us(), Ordering::Relaxed);
}
pub fn update_max_size(&self) {
self.stats.update_max_size(self.capacity());
}
/// allocate a new bucket, copying data from 'bucket'
pub fn new_resized(
drives: &Arc<Vec<PathBuf>>,
@ -365,11 +369,7 @@ impl BucketStorage {
if let Some(bucket) = bucket {
new_bucket.copy_contents(bucket);
}
let sz = new_bucket.capacity();
{
let mut max = new_bucket.stats.max_size.lock().unwrap();
*max = std::cmp::max(*max, sz);
}
new_bucket.update_max_size();
new_bucket
}