2019-07-11 13:58:33 -07:00
|
|
|
use crate::erasure::ErasureConfig;
|
2019-05-10 08:33:58 -07:00
|
|
|
use solana_metrics::datapoint;
|
2019-08-22 16:32:38 -07:00
|
|
|
use std::cmp::Ordering;
|
|
|
|
use std::{collections::BTreeSet, ops::Range, ops::RangeBounds};
|
2019-04-11 14:14:57 -07:00
|
|
|
|
|
|
|
#[derive(Clone, Debug, Default, Deserialize, Serialize, Eq, PartialEq)]
|
|
|
|
// The Meta column family
|
|
|
|
pub struct SlotMeta {
|
|
|
|
// The number of slots above the root (the genesis block). The first
|
|
|
|
// slot has slot 0.
|
|
|
|
pub slot: u64,
|
|
|
|
// The total number of consecutive blobs starting from index 0
|
|
|
|
// we have received for this slot.
|
|
|
|
pub consumed: u64,
|
|
|
|
// The index *plus one* of the highest blob received for this slot. Useful
|
|
|
|
// for checking if the slot has received any blobs yet, and to calculate the
|
|
|
|
// range where there is one or more holes: `(consumed..received)`.
|
|
|
|
pub received: u64,
|
|
|
|
// The index of the blob that is flagged as the last blob for this slot.
|
|
|
|
pub last_index: u64,
|
|
|
|
// The slot height of the block this one derives from.
|
|
|
|
pub parent_slot: u64,
|
|
|
|
// The list of slot heights, each of which contains a block that derives
|
|
|
|
// from this one.
|
|
|
|
pub next_slots: Vec<u64>,
|
|
|
|
// True if this slot is full (consumed == last_index + 1) and if every
|
|
|
|
// slot that is a parent of this slot is also connected.
|
|
|
|
pub is_connected: bool,
|
|
|
|
}
|
|
|
|
|
2019-08-22 16:32:38 -07:00
|
|
|
#[derive(Clone, Debug, Default, Deserialize, Serialize, Eq, PartialEq)]
|
|
|
|
pub struct ErasureSetRanges {
|
|
|
|
r: Vec<Range<u64>>,
|
|
|
|
}
|
|
|
|
|
|
|
|
impl ErasureSetRanges {
|
|
|
|
pub fn insert(&mut self, start: u64, end: u64) -> Result<usize, Range<u64>> {
|
|
|
|
let range = if start < end {
|
|
|
|
(start..end)
|
|
|
|
} else {
|
|
|
|
(end..start)
|
|
|
|
};
|
|
|
|
|
|
|
|
match self.pos(range.start) {
|
|
|
|
Ok(pos) => Err(self.r[pos].clone()),
|
|
|
|
Err(pos) => {
|
|
|
|
self.r.insert(pos, range);
|
|
|
|
Ok(pos)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
fn pos(&self, seek: u64) -> Result<usize, usize> {
|
|
|
|
self.r.binary_search_by(|probe| {
|
|
|
|
if probe.contains(&seek) {
|
|
|
|
Ordering::Equal
|
|
|
|
} else {
|
|
|
|
probe.start.cmp(&seek)
|
|
|
|
}
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn lookup(&self, seek: u64) -> Result<Range<u64>, usize> {
|
|
|
|
self.pos(seek)
|
|
|
|
.map(|pos| self.r[pos].clone())
|
|
|
|
.or_else(|epos| {
|
|
|
|
if epos < self.r.len() && self.r[epos].contains(&seek) {
|
|
|
|
Ok(self.r[epos].clone())
|
|
|
|
} else {
|
|
|
|
Err(epos)
|
|
|
|
}
|
|
|
|
})
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-07-10 11:08:17 -07:00
|
|
|
#[derive(Clone, Debug, Default, Deserialize, Serialize, PartialEq)]
|
|
|
|
/// Index recording presence/absence of blobs
|
|
|
|
pub struct Index {
|
|
|
|
pub slot: u64,
|
|
|
|
data: DataIndex,
|
|
|
|
coding: CodingIndex,
|
|
|
|
}
|
|
|
|
|
|
|
|
#[derive(Clone, Debug, Default, Deserialize, Serialize, PartialEq)]
|
|
|
|
pub struct DataIndex {
|
|
|
|
/// Map representing presence/absence of data blobs
|
|
|
|
index: BTreeSet<u64>,
|
|
|
|
}
|
|
|
|
|
|
|
|
#[derive(Clone, Debug, Default, Deserialize, Serialize, PartialEq)]
|
|
|
|
/// Erasure coding information
|
|
|
|
pub struct CodingIndex {
|
|
|
|
/// Map from set index, to hashmap from blob index to presence bool
|
|
|
|
index: BTreeSet<u64>,
|
|
|
|
}
|
|
|
|
|
|
|
|
#[derive(Clone, Copy, Debug, Default, Deserialize, Serialize, Eq, PartialEq)]
|
|
|
|
/// Erasure coding information
|
|
|
|
pub struct ErasureMeta {
|
|
|
|
/// Which erasure set in the slot this is
|
|
|
|
pub set_index: u64,
|
|
|
|
/// Size of shards in this erasure set
|
|
|
|
pub size: usize,
|
2019-07-11 13:58:33 -07:00
|
|
|
/// Erasure configuration for this erasure set
|
2019-08-22 16:32:38 -07:00
|
|
|
pub config: ErasureConfig,
|
2019-07-10 11:08:17 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
#[derive(Debug, PartialEq)]
|
|
|
|
pub enum ErasureMetaStatus {
|
|
|
|
CanRecover,
|
|
|
|
DataFull,
|
|
|
|
StillNeed(usize),
|
|
|
|
}
|
|
|
|
|
|
|
|
impl Index {
|
|
|
|
pub(in crate::blocktree) fn new(slot: u64) -> Self {
|
|
|
|
Index {
|
|
|
|
slot,
|
|
|
|
data: DataIndex::default(),
|
|
|
|
coding: CodingIndex::default(),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn data(&self) -> &DataIndex {
|
|
|
|
&self.data
|
|
|
|
}
|
|
|
|
pub fn coding(&self) -> &CodingIndex {
|
|
|
|
&self.coding
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn data_mut(&mut self) -> &mut DataIndex {
|
|
|
|
&mut self.data
|
|
|
|
}
|
|
|
|
pub fn coding_mut(&mut self) -> &mut CodingIndex {
|
|
|
|
&mut self.coding
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
/// TODO: Mark: Change this when coding
|
|
|
|
impl CodingIndex {
|
|
|
|
pub fn present_in_bounds(&self, bounds: impl RangeBounds<u64>) -> usize {
|
|
|
|
self.index.range(bounds).count()
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn is_present(&self, index: u64) -> bool {
|
|
|
|
self.index.contains(&index)
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn set_present(&mut self, index: u64, presence: bool) {
|
|
|
|
if presence {
|
|
|
|
self.index.insert(index);
|
|
|
|
} else {
|
|
|
|
self.index.remove(&index);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn set_many_present(&mut self, presence: impl IntoIterator<Item = (u64, bool)>) {
|
|
|
|
for (idx, present) in presence.into_iter() {
|
|
|
|
self.set_present(idx, present);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl DataIndex {
|
|
|
|
pub fn present_in_bounds(&self, bounds: impl RangeBounds<u64>) -> usize {
|
|
|
|
self.index.range(bounds).count()
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn is_present(&self, index: u64) -> bool {
|
|
|
|
self.index.contains(&index)
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn set_present(&mut self, index: u64, presence: bool) {
|
|
|
|
if presence {
|
|
|
|
self.index.insert(index);
|
|
|
|
} else {
|
|
|
|
self.index.remove(&index);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn set_many_present(&mut self, presence: impl IntoIterator<Item = (u64, bool)>) {
|
|
|
|
for (idx, present) in presence.into_iter() {
|
|
|
|
self.set_present(idx, present);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-04-11 14:14:57 -07:00
|
|
|
impl SlotMeta {
|
|
|
|
pub fn is_full(&self) -> bool {
|
|
|
|
// last_index is std::u64::MAX when it has no information about how
|
|
|
|
// many blobs will fill this slot.
|
|
|
|
// Note: A full slot with zero blobs is not possible.
|
|
|
|
if self.last_index == std::u64::MAX {
|
|
|
|
return false;
|
|
|
|
}
|
2019-04-25 00:04:49 -07:00
|
|
|
|
|
|
|
// Should never happen
|
|
|
|
if self.consumed > self.last_index + 1 {
|
2019-05-10 08:33:58 -07:00
|
|
|
datapoint!(
|
|
|
|
"blocktree_error",
|
|
|
|
(
|
|
|
|
"error",
|
|
|
|
format!(
|
|
|
|
"Observed a slot meta with consumed: {} > meta.last_index + 1: {}",
|
|
|
|
self.consumed,
|
|
|
|
self.last_index + 1
|
|
|
|
),
|
|
|
|
String
|
|
|
|
)
|
2019-04-25 00:04:49 -07:00
|
|
|
);
|
|
|
|
}
|
2019-04-11 14:14:57 -07:00
|
|
|
|
|
|
|
self.consumed == self.last_index + 1
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn is_parent_set(&self) -> bool {
|
|
|
|
self.parent_slot != std::u64::MAX
|
|
|
|
}
|
|
|
|
|
|
|
|
pub(in crate::blocktree) fn new(slot: u64, parent_slot: u64) -> Self {
|
|
|
|
SlotMeta {
|
|
|
|
slot,
|
|
|
|
consumed: 0,
|
|
|
|
received: 0,
|
|
|
|
parent_slot,
|
|
|
|
next_slots: vec![],
|
|
|
|
is_connected: slot == 0,
|
|
|
|
last_index: std::u64::MAX,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-06-20 20:15:33 -07:00
|
|
|
impl ErasureMeta {
|
2019-07-11 13:58:33 -07:00
|
|
|
pub fn new(set_index: u64, config: &ErasureConfig) -> ErasureMeta {
|
|
|
|
ErasureMeta {
|
|
|
|
set_index,
|
|
|
|
size: 0,
|
|
|
|
config: *config,
|
|
|
|
}
|
2019-04-11 14:14:57 -07:00
|
|
|
}
|
|
|
|
|
2019-07-10 11:08:17 -07:00
|
|
|
pub fn status(&self, index: &Index) -> ErasureMetaStatus {
|
|
|
|
use ErasureMetaStatus::*;
|
2019-06-20 20:15:33 -07:00
|
|
|
|
2019-07-10 11:08:17 -07:00
|
|
|
let start_idx = self.start_index();
|
|
|
|
let (data_end_idx, coding_end_idx) = self.end_indexes();
|
2019-05-02 17:04:40 -07:00
|
|
|
|
2019-07-10 11:08:17 -07:00
|
|
|
let num_coding = index.coding().present_in_bounds(start_idx..coding_end_idx);
|
|
|
|
let num_data = index.data().present_in_bounds(start_idx..data_end_idx);
|
|
|
|
|
2019-07-11 13:58:33 -07:00
|
|
|
let (data_missing, coding_missing) = (
|
|
|
|
self.config.num_data() - num_data,
|
|
|
|
self.config.num_coding() - num_coding,
|
|
|
|
);
|
2019-06-20 20:15:33 -07:00
|
|
|
|
2019-07-10 11:08:17 -07:00
|
|
|
let total_missing = data_missing + coding_missing;
|
|
|
|
|
2019-07-11 13:58:33 -07:00
|
|
|
if data_missing > 0 && total_missing <= self.config.num_coding() {
|
2019-07-10 11:08:17 -07:00
|
|
|
CanRecover
|
|
|
|
} else if data_missing == 0 {
|
|
|
|
DataFull
|
2019-06-20 20:15:33 -07:00
|
|
|
} else {
|
2019-07-11 13:58:33 -07:00
|
|
|
StillNeed(total_missing - self.config.num_coding())
|
2019-06-20 20:15:33 -07:00
|
|
|
}
|
2019-04-19 20:22:51 -07:00
|
|
|
}
|
|
|
|
|
2019-06-20 20:15:33 -07:00
|
|
|
pub fn set_size(&mut self, size: usize) {
|
|
|
|
self.size = size;
|
2019-04-19 20:22:51 -07:00
|
|
|
}
|
|
|
|
|
2019-06-20 20:15:33 -07:00
|
|
|
pub fn size(&self) -> usize {
|
|
|
|
self.size
|
2019-04-11 14:14:57 -07:00
|
|
|
}
|
|
|
|
|
2019-07-11 13:58:33 -07:00
|
|
|
pub fn set_index_for(index: u64, num_data: usize) -> u64 {
|
|
|
|
index / num_data as u64
|
2019-04-24 15:53:01 -07:00
|
|
|
}
|
|
|
|
|
2019-06-20 20:15:33 -07:00
|
|
|
pub fn start_index(&self) -> u64 {
|
2019-08-26 18:27:45 -07:00
|
|
|
self.set_index
|
2019-04-11 14:14:57 -07:00
|
|
|
}
|
2019-04-18 21:56:43 -07:00
|
|
|
|
2019-06-20 20:15:33 -07:00
|
|
|
/// returns a tuple of (data_end, coding_end)
|
|
|
|
pub fn end_indexes(&self) -> (u64, u64) {
|
|
|
|
let start = self.start_index();
|
2019-07-11 13:58:33 -07:00
|
|
|
(
|
|
|
|
start + self.config.num_data() as u64,
|
|
|
|
start + self.config.num_coding() as u64,
|
|
|
|
)
|
2019-06-20 20:15:33 -07:00
|
|
|
}
|
|
|
|
}
|
2019-04-18 21:56:43 -07:00
|
|
|
|
2019-07-10 11:08:17 -07:00
|
|
|
#[cfg(test)]
|
|
|
|
mod test {
|
|
|
|
use super::*;
|
2019-06-20 20:15:33 -07:00
|
|
|
use rand::{seq::SliceRandom, thread_rng};
|
2019-07-10 11:08:17 -07:00
|
|
|
use std::iter::repeat;
|
2019-04-11 14:14:57 -07:00
|
|
|
|
2019-07-10 11:08:17 -07:00
|
|
|
#[test]
|
|
|
|
fn test_erasure_meta_status() {
|
|
|
|
use ErasureMetaStatus::*;
|
2019-04-11 14:14:57 -07:00
|
|
|
|
2019-07-10 11:08:17 -07:00
|
|
|
let set_index = 0;
|
2019-07-11 13:58:33 -07:00
|
|
|
let erasure_config = ErasureConfig::default();
|
2019-04-11 14:14:57 -07:00
|
|
|
|
2019-07-11 13:58:33 -07:00
|
|
|
let mut e_meta = ErasureMeta::new(set_index, &erasure_config);
|
2019-07-10 11:08:17 -07:00
|
|
|
let mut rng = thread_rng();
|
|
|
|
let mut index = Index::new(0);
|
|
|
|
e_meta.size = 1;
|
2019-04-11 14:14:57 -07:00
|
|
|
|
2019-07-11 13:58:33 -07:00
|
|
|
let data_indexes = 0..erasure_config.num_data() as u64;
|
|
|
|
let coding_indexes = 0..erasure_config.num_coding() as u64;
|
2019-04-11 14:14:57 -07:00
|
|
|
|
2019-07-11 13:58:33 -07:00
|
|
|
assert_eq!(e_meta.status(&index), StillNeed(erasure_config.num_data()));
|
2019-04-11 14:14:57 -07:00
|
|
|
|
2019-07-10 11:08:17 -07:00
|
|
|
index
|
|
|
|
.data_mut()
|
|
|
|
.set_many_present(data_indexes.clone().zip(repeat(true)));
|
2019-04-11 14:14:57 -07:00
|
|
|
|
2019-07-10 11:08:17 -07:00
|
|
|
assert_eq!(e_meta.status(&index), DataFull);
|
2019-04-11 14:14:57 -07:00
|
|
|
|
2019-07-10 11:08:17 -07:00
|
|
|
index
|
|
|
|
.coding_mut()
|
|
|
|
.set_many_present(coding_indexes.clone().zip(repeat(true)));
|
2019-04-18 21:56:43 -07:00
|
|
|
|
2019-07-10 11:08:17 -07:00
|
|
|
for &idx in data_indexes
|
|
|
|
.clone()
|
|
|
|
.collect::<Vec<_>>()
|
2019-07-11 13:58:33 -07:00
|
|
|
.choose_multiple(&mut rng, erasure_config.num_data())
|
2019-07-10 11:08:17 -07:00
|
|
|
{
|
|
|
|
index.data_mut().set_present(idx, false);
|
2019-04-18 21:56:43 -07:00
|
|
|
|
2019-07-10 11:08:17 -07:00
|
|
|
assert_eq!(e_meta.status(&index), CanRecover);
|
|
|
|
}
|
2019-04-18 21:56:43 -07:00
|
|
|
|
2019-07-10 11:08:17 -07:00
|
|
|
index
|
|
|
|
.data_mut()
|
|
|
|
.set_many_present(data_indexes.zip(repeat(true)));
|
2019-06-20 20:15:33 -07:00
|
|
|
|
2019-07-10 11:08:17 -07:00
|
|
|
for &idx in coding_indexes
|
|
|
|
.collect::<Vec<_>>()
|
2019-07-11 13:58:33 -07:00
|
|
|
.choose_multiple(&mut rng, erasure_config.num_coding())
|
2019-07-10 11:08:17 -07:00
|
|
|
{
|
|
|
|
index.coding_mut().set_present(idx, false);
|
2019-06-20 20:15:33 -07:00
|
|
|
|
2019-07-10 11:08:17 -07:00
|
|
|
assert_eq!(e_meta.status(&index), DataFull);
|
|
|
|
}
|
2019-04-18 21:56:43 -07:00
|
|
|
}
|
2019-08-22 16:32:38 -07:00
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn test_erasure_set_ranges() {
|
|
|
|
let mut ranges = ErasureSetRanges::default();
|
|
|
|
|
|
|
|
// Test empty ranges
|
|
|
|
(0..100 as u64).for_each(|i| {
|
|
|
|
assert_eq!(ranges.lookup(i), Err(0));
|
|
|
|
});
|
|
|
|
|
|
|
|
// Test adding one range and all boundary condition lookups
|
|
|
|
assert_eq!(ranges.insert(5, 13), Ok(0));
|
|
|
|
assert_eq!(ranges.lookup(0), Err(0));
|
|
|
|
assert_eq!(ranges.lookup(4), Err(0));
|
|
|
|
assert_eq!(ranges.lookup(5), Ok(5..13));
|
|
|
|
assert_eq!(ranges.lookup(12), Ok(5..13));
|
|
|
|
assert_eq!(ranges.lookup(13), Err(1));
|
|
|
|
assert_eq!(ranges.lookup(100), Err(1));
|
|
|
|
|
|
|
|
// Test adding second range (with backwards values) and all boundary condition lookups
|
|
|
|
assert_eq!(ranges.insert(55, 33), Ok(1));
|
|
|
|
assert_eq!(ranges.lookup(0), Err(0));
|
|
|
|
assert_eq!(ranges.lookup(4), Err(0));
|
|
|
|
assert_eq!(ranges.lookup(5), Ok(5..13));
|
|
|
|
assert_eq!(ranges.lookup(12), Ok(5..13));
|
|
|
|
assert_eq!(ranges.lookup(13), Err(1));
|
|
|
|
assert_eq!(ranges.lookup(32), Err(1));
|
|
|
|
assert_eq!(ranges.lookup(33), Ok(33..55));
|
|
|
|
assert_eq!(ranges.lookup(54), Ok(33..55));
|
|
|
|
assert_eq!(ranges.lookup(55), Err(2));
|
|
|
|
|
|
|
|
// Add a third range between previous two ranges
|
|
|
|
assert_eq!(ranges.insert(23, 30), Ok(1));
|
|
|
|
assert_eq!(ranges.lookup(0), Err(0));
|
|
|
|
assert_eq!(ranges.lookup(4), Err(0));
|
|
|
|
assert_eq!(ranges.lookup(5), Ok(5..13));
|
|
|
|
assert_eq!(ranges.lookup(12), Ok(5..13));
|
|
|
|
assert_eq!(ranges.lookup(13), Err(1));
|
|
|
|
assert_eq!(ranges.lookup(23), Ok(23..30));
|
|
|
|
assert_eq!(ranges.lookup(29), Ok(23..30));
|
|
|
|
assert_eq!(ranges.lookup(30), Err(2));
|
|
|
|
assert_eq!(ranges.lookup(32), Err(2));
|
|
|
|
assert_eq!(ranges.lookup(33), Ok(33..55));
|
|
|
|
assert_eq!(ranges.lookup(54), Ok(33..55));
|
|
|
|
assert_eq!(ranges.lookup(55), Err(3));
|
|
|
|
}
|
2019-04-11 14:14:57 -07:00
|
|
|
}
|