2019-11-04 21:14:55 -08:00
|
|
|
use clap::{
|
2019-11-13 20:10:38 -08:00
|
|
|
crate_description, crate_name, value_t, value_t_or_exit, values_t_or_exit, App, Arg, SubCommand,
|
2019-11-04 21:14:55 -08:00
|
|
|
};
|
2019-12-12 15:54:50 -08:00
|
|
|
use histogram;
|
|
|
|
use serde_json::json;
|
2020-01-13 13:13:52 -08:00
|
|
|
use solana_ledger::blockstore_db::Database;
|
2019-11-04 18:10:06 -08:00
|
|
|
use solana_ledger::{
|
2019-11-04 22:18:30 -08:00
|
|
|
bank_forks::{BankForks, SnapshotConfig},
|
|
|
|
bank_forks_utils,
|
2020-01-13 13:13:52 -08:00
|
|
|
blockstore::Blockstore,
|
|
|
|
blockstore_db,
|
|
|
|
blockstore_db::Column,
|
|
|
|
blockstore_processor,
|
2019-11-04 18:10:06 -08:00
|
|
|
rooted_slot_iterator::RootedSlotIterator,
|
|
|
|
};
|
2019-11-11 22:22:20 -08:00
|
|
|
use solana_sdk::{
|
|
|
|
clock::Slot, genesis_config::GenesisConfig, instruction_processor_utils::limited_deserialize,
|
2019-11-12 09:13:16 -08:00
|
|
|
native_token::lamports_to_sol, pubkey::Pubkey,
|
2019-11-11 22:22:20 -08:00
|
|
|
};
|
2019-11-20 10:12:43 -08:00
|
|
|
use solana_vote_program::vote_state::VoteState;
|
2019-11-04 18:10:06 -08:00
|
|
|
use std::{
|
2019-11-04 22:18:30 -08:00
|
|
|
collections::{BTreeMap, HashMap, HashSet},
|
2019-11-12 19:27:15 -08:00
|
|
|
ffi::OsStr,
|
2019-11-04 18:10:06 -08:00
|
|
|
fs::File,
|
2019-11-12 19:27:15 -08:00
|
|
|
io::{self, stdout, Write},
|
|
|
|
path::{Path, PathBuf},
|
|
|
|
process::{exit, Command, Stdio},
|
2019-11-04 18:10:06 -08:00
|
|
|
str::FromStr,
|
|
|
|
};
|
2018-08-04 14:31:12 -07:00
|
|
|
|
2019-07-11 20:33:36 -07:00
|
|
|
#[derive(PartialEq)]
|
|
|
|
enum LedgerOutputMethod {
|
|
|
|
Print,
|
|
|
|
Json,
|
|
|
|
}
|
2019-08-09 15:57:31 -07:00
|
|
|
|
2020-01-13 13:13:52 -08:00
|
|
|
fn output_slot(blockstore: &Blockstore, slot: Slot, method: &LedgerOutputMethod) {
|
|
|
|
println!("Slot Meta {:?}", blockstore.meta(slot));
|
|
|
|
let entries = blockstore
|
2019-08-09 15:57:31 -07:00
|
|
|
.get_slot_entries(slot, 0, None)
|
|
|
|
.unwrap_or_else(|err| {
|
|
|
|
eprintln!("Failed to load entries for slot {}: {:?}", slot, err);
|
|
|
|
exit(1);
|
|
|
|
});
|
|
|
|
|
2019-11-11 22:22:20 -08:00
|
|
|
for (entry_index, entry) in entries.iter().enumerate() {
|
2019-08-09 15:57:31 -07:00
|
|
|
match method {
|
2019-11-11 22:22:20 -08:00
|
|
|
LedgerOutputMethod::Print => {
|
|
|
|
println!(
|
|
|
|
" Entry {} - num_hashes: {}, hashes: {}, transactions: {}",
|
|
|
|
entry_index,
|
|
|
|
entry.num_hashes,
|
|
|
|
entry.hash,
|
|
|
|
entry.transactions.len()
|
|
|
|
);
|
|
|
|
for (transactions_index, transaction) in entry.transactions.iter().enumerate() {
|
|
|
|
let message = &transaction.message;
|
|
|
|
println!(" Transaction {}", transactions_index);
|
|
|
|
println!(" Recent Blockhash: {:?}", message.recent_blockhash);
|
|
|
|
for (signature_index, signature) in transaction.signatures.iter().enumerate() {
|
|
|
|
println!(" Signature {}: {:?}", signature_index, signature);
|
|
|
|
}
|
|
|
|
println!(" Header: {:?}", message.header);
|
|
|
|
for (account_index, account) in message.account_keys.iter().enumerate() {
|
|
|
|
println!(" Account {}: {:?}", account_index, account);
|
|
|
|
}
|
|
|
|
for (instruction_index, instruction) in message.instructions.iter().enumerate()
|
|
|
|
{
|
|
|
|
let program_pubkey =
|
|
|
|
message.account_keys[instruction.program_id_index as usize];
|
|
|
|
println!(" Instruction {}", instruction_index);
|
|
|
|
println!(
|
|
|
|
" Program: {} ({})",
|
|
|
|
program_pubkey, instruction.program_id_index
|
|
|
|
);
|
|
|
|
for (account_index, account) in instruction.accounts.iter().enumerate() {
|
|
|
|
let account_pubkey = message.account_keys[*account as usize];
|
|
|
|
println!(
|
|
|
|
" Account {}: {} ({})",
|
|
|
|
account_index, account_pubkey, account
|
|
|
|
);
|
|
|
|
}
|
|
|
|
|
|
|
|
let mut raw = true;
|
2019-11-20 10:12:43 -08:00
|
|
|
if program_pubkey == solana_vote_program::id() {
|
2019-11-11 22:22:20 -08:00
|
|
|
if let Ok(vote_instruction) =
|
|
|
|
limited_deserialize::<
|
2019-11-20 10:12:43 -08:00
|
|
|
solana_vote_program::vote_instruction::VoteInstruction,
|
2019-11-11 22:22:20 -08:00
|
|
|
>(&instruction.data)
|
|
|
|
{
|
|
|
|
println!(" {:?}", vote_instruction);
|
|
|
|
raw = false;
|
|
|
|
}
|
|
|
|
} else if program_pubkey == solana_sdk::system_program::id() {
|
|
|
|
if let Ok(system_instruction) =
|
|
|
|
limited_deserialize::<
|
|
|
|
solana_sdk::system_instruction::SystemInstruction,
|
|
|
|
>(&instruction.data)
|
|
|
|
{
|
|
|
|
println!(" {:?}", system_instruction);
|
|
|
|
raw = false;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if raw {
|
|
|
|
println!(" Data: {:?}", instruction.data);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2019-08-09 15:57:31 -07:00
|
|
|
LedgerOutputMethod::Json => {
|
|
|
|
serde_json::to_writer(stdout(), &entry).expect("serialize entry");
|
|
|
|
stdout().write_all(b",\n").expect("newline");
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-01-13 13:13:52 -08:00
|
|
|
fn output_ledger(blockstore: Blockstore, starting_slot: Slot, method: LedgerOutputMethod) {
|
2019-10-18 09:28:51 -07:00
|
|
|
let rooted_slot_iterator =
|
2020-01-13 13:13:52 -08:00
|
|
|
RootedSlotIterator::new(starting_slot, &blockstore).unwrap_or_else(|err| {
|
2019-07-11 20:33:36 -07:00
|
|
|
eprintln!(
|
|
|
|
"Failed to load entries starting from slot {}: {:?}",
|
|
|
|
starting_slot, err
|
|
|
|
);
|
|
|
|
exit(1);
|
|
|
|
});
|
|
|
|
|
|
|
|
if method == LedgerOutputMethod::Json {
|
|
|
|
stdout().write_all(b"{\"ledger\":[\n").expect("open array");
|
|
|
|
}
|
|
|
|
|
|
|
|
for (slot, slot_meta) in rooted_slot_iterator {
|
|
|
|
match method {
|
|
|
|
LedgerOutputMethod::Print => println!("Slot {}", slot),
|
|
|
|
LedgerOutputMethod::Json => {
|
|
|
|
serde_json::to_writer(stdout(), &slot_meta).expect("serialize slot_meta");
|
|
|
|
stdout().write_all(b",\n").expect("newline");
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-01-13 13:13:52 -08:00
|
|
|
output_slot(&blockstore, slot, &method);
|
2019-07-11 20:33:36 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
if method == LedgerOutputMethod::Json {
|
|
|
|
stdout().write_all(b"\n]}\n").expect("close array");
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-11-12 19:27:15 -08:00
|
|
|
fn render_dot(dot: String, output_file: &str, output_format: &str) -> io::Result<()> {
|
|
|
|
let mut child = Command::new("dot")
|
|
|
|
.arg(format!("-T{}", output_format))
|
|
|
|
.arg(format!("-o{}", output_file))
|
|
|
|
.stdin(Stdio::piped())
|
|
|
|
.spawn()
|
|
|
|
.map_err(|err| {
|
|
|
|
eprintln!("Failed to spawn dot: {:?}", err);
|
|
|
|
err
|
|
|
|
})?;
|
|
|
|
|
|
|
|
let stdin = child.stdin.as_mut().unwrap();
|
|
|
|
stdin.write_all(&dot.into_bytes())?;
|
|
|
|
|
|
|
|
let status = child.wait_with_output()?.status;
|
|
|
|
if !status.success() {
|
|
|
|
return Err(io::Error::new(
|
|
|
|
io::ErrorKind::Other,
|
|
|
|
format!("dot failed with error {}", status.code().unwrap_or(-1)),
|
|
|
|
));
|
|
|
|
}
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
2019-11-12 09:13:16 -08:00
|
|
|
#[allow(clippy::cognitive_complexity)]
|
2019-11-04 22:18:30 -08:00
|
|
|
fn graph_forks(
|
|
|
|
bank_forks: BankForks,
|
2020-01-13 13:13:52 -08:00
|
|
|
bank_forks_info: Vec<blockstore_processor::BankForksInfo>,
|
2019-11-12 09:13:16 -08:00
|
|
|
include_all_votes: bool,
|
|
|
|
) -> String {
|
2019-11-04 22:18:30 -08:00
|
|
|
// Search all forks and collect the last vote made by each validator
|
|
|
|
let mut last_votes = HashMap::new();
|
|
|
|
for bfi in &bank_forks_info {
|
|
|
|
let bank = bank_forks.banks.get(&bfi.bank_slot).unwrap();
|
|
|
|
|
|
|
|
let total_stake = bank
|
|
|
|
.vote_accounts()
|
|
|
|
.iter()
|
|
|
|
.fold(0, |acc, (_, (stake, _))| acc + stake);
|
|
|
|
for (_, (stake, vote_account)) in bank.vote_accounts() {
|
2019-11-12 09:13:16 -08:00
|
|
|
let vote_state = VoteState::from(&vote_account).unwrap_or_default();
|
2019-11-04 22:18:30 -08:00
|
|
|
if let Some(last_vote) = vote_state.votes.iter().last() {
|
2019-11-05 18:40:00 -08:00
|
|
|
let entry = last_votes.entry(vote_state.node_pubkey).or_insert((
|
|
|
|
last_vote.slot,
|
|
|
|
vote_state.clone(),
|
|
|
|
stake,
|
|
|
|
total_stake,
|
|
|
|
));
|
2019-11-04 22:18:30 -08:00
|
|
|
if entry.0 < last_vote.slot {
|
2019-11-05 18:40:00 -08:00
|
|
|
*entry = (last_vote.slot, vote_state, stake, total_stake);
|
2019-11-04 22:18:30 -08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// Figure the stake distribution at all the nodes containing the last vote from each
|
|
|
|
// validator
|
|
|
|
let mut slot_stake_and_vote_count = HashMap::new();
|
|
|
|
for (last_vote_slot, _, stake, total_stake) in last_votes.values() {
|
|
|
|
let entry = slot_stake_and_vote_count
|
|
|
|
.entry(last_vote_slot)
|
|
|
|
.or_insert((0, 0, *total_stake));
|
|
|
|
entry.0 += 1;
|
|
|
|
entry.1 += stake;
|
|
|
|
assert_eq!(entry.2, *total_stake)
|
|
|
|
}
|
|
|
|
|
|
|
|
let mut dot = vec!["digraph {".to_string()];
|
|
|
|
|
|
|
|
// Build a subgraph consisting of all banks and links to their parent banks
|
|
|
|
dot.push(" subgraph cluster_banks {".to_string());
|
|
|
|
dot.push(" style=invis".to_string());
|
|
|
|
let mut styled_slots = HashSet::new();
|
2019-11-12 09:13:16 -08:00
|
|
|
let mut all_votes: HashMap<Pubkey, HashMap<Slot, VoteState>> = HashMap::new();
|
2019-11-04 22:18:30 -08:00
|
|
|
for bfi in &bank_forks_info {
|
|
|
|
let bank = bank_forks.banks.get(&bfi.bank_slot).unwrap();
|
|
|
|
let mut bank = bank.clone();
|
|
|
|
|
|
|
|
let mut first = true;
|
|
|
|
loop {
|
2019-11-12 09:13:16 -08:00
|
|
|
for (_, (_, vote_account)) in bank.vote_accounts() {
|
|
|
|
let vote_state = VoteState::from(&vote_account).unwrap_or_default();
|
|
|
|
if let Some(last_vote) = vote_state.votes.iter().last() {
|
|
|
|
let validator_votes = all_votes.entry(vote_state.node_pubkey).or_default();
|
|
|
|
validator_votes
|
|
|
|
.entry(last_vote.slot)
|
|
|
|
.or_insert_with(|| vote_state.clone());
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-11-04 22:18:30 -08:00
|
|
|
if !styled_slots.contains(&bank.slot()) {
|
|
|
|
dot.push(format!(
|
2019-11-12 09:13:16 -08:00
|
|
|
r#" "{}"[label="{} (epoch {})\nleader: {}{}{}",style="{}{}"];"#,
|
2019-11-04 22:18:30 -08:00
|
|
|
bank.slot(),
|
|
|
|
bank.slot(),
|
|
|
|
bank.epoch(),
|
|
|
|
bank.collector_id(),
|
2019-11-12 09:13:16 -08:00
|
|
|
if let Some(parent) = bank.parent() {
|
|
|
|
format!(
|
|
|
|
"\ntransactions: {}",
|
|
|
|
bank.transaction_count() - parent.transaction_count(),
|
|
|
|
)
|
|
|
|
} else {
|
|
|
|
"".to_string()
|
|
|
|
},
|
2019-11-04 22:18:30 -08:00
|
|
|
if let Some((votes, stake, total_stake)) =
|
|
|
|
slot_stake_and_vote_count.get(&bank.slot())
|
|
|
|
{
|
|
|
|
format!(
|
|
|
|
"\nvotes: {}, stake: {:.1} SOL ({:.1}%)",
|
|
|
|
votes,
|
|
|
|
lamports_to_sol(*stake),
|
|
|
|
*stake as f64 / *total_stake as f64 * 100.,
|
|
|
|
)
|
|
|
|
} else {
|
|
|
|
"".to_string()
|
|
|
|
},
|
|
|
|
if first { "filled," } else { "" },
|
2019-11-06 01:02:26 -08:00
|
|
|
""
|
2019-11-04 22:18:30 -08:00
|
|
|
));
|
|
|
|
styled_slots.insert(bank.slot());
|
|
|
|
}
|
|
|
|
first = false;
|
|
|
|
|
|
|
|
match bank.parent() {
|
|
|
|
None => {
|
|
|
|
if bank.slot() > 0 {
|
2019-11-05 18:40:00 -08:00
|
|
|
dot.push(format!(r#" "{}" -> "..." [dir=back]"#, bank.slot(),));
|
2019-11-04 22:18:30 -08:00
|
|
|
}
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
Some(parent) => {
|
|
|
|
let slot_distance = bank.slot() - parent.slot();
|
|
|
|
let penwidth = if bank.epoch() > parent.epoch() {
|
|
|
|
"5"
|
|
|
|
} else {
|
|
|
|
"1"
|
|
|
|
};
|
|
|
|
let link_label = if slot_distance > 1 {
|
|
|
|
format!("label=\"{} slots\",color=red", slot_distance)
|
|
|
|
} else {
|
|
|
|
"color=blue".to_string()
|
|
|
|
};
|
|
|
|
dot.push(format!(
|
2019-11-05 18:40:00 -08:00
|
|
|
r#" "{}" -> "{}"[{},dir=back,penwidth={}];"#,
|
2019-11-04 22:18:30 -08:00
|
|
|
bank.slot(),
|
|
|
|
parent.slot(),
|
|
|
|
link_label,
|
|
|
|
penwidth
|
|
|
|
));
|
|
|
|
|
|
|
|
bank = parent.clone();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
dot.push(" }".to_string());
|
|
|
|
|
|
|
|
// Strafe the banks with links from validators to the bank they last voted on,
|
|
|
|
// while collecting information about the absent votes and stakes
|
|
|
|
let mut absent_stake = 0;
|
|
|
|
let mut absent_votes = 0;
|
|
|
|
let mut lowest_last_vote_slot = std::u64::MAX;
|
|
|
|
let mut lowest_total_stake = 0;
|
2019-11-05 18:40:00 -08:00
|
|
|
for (node_pubkey, (last_vote_slot, vote_state, stake, total_stake)) in &last_votes {
|
2019-11-12 09:13:16 -08:00
|
|
|
all_votes.entry(*node_pubkey).and_modify(|validator_votes| {
|
|
|
|
validator_votes.remove(&last_vote_slot);
|
|
|
|
});
|
|
|
|
|
2019-11-04 22:18:30 -08:00
|
|
|
dot.push(format!(
|
2019-11-12 09:13:16 -08:00
|
|
|
r#" "last vote {}"[shape=box,label="Latest validator vote: {}\nstake: {} SOL\nroot slot: {}\nvote history:\n{}"];"#,
|
2019-11-05 18:40:00 -08:00
|
|
|
node_pubkey,
|
|
|
|
node_pubkey,
|
|
|
|
lamports_to_sol(*stake),
|
|
|
|
vote_state.root_slot.unwrap_or(0),
|
|
|
|
vote_state
|
|
|
|
.votes
|
|
|
|
.iter()
|
|
|
|
.map(|vote| format!("slot {} (conf={})", vote.slot, vote.confirmation_count))
|
|
|
|
.collect::<Vec<_>>()
|
|
|
|
.join("\n")
|
|
|
|
));
|
|
|
|
|
2019-11-04 22:18:30 -08:00
|
|
|
dot.push(format!(
|
2019-11-12 09:13:16 -08:00
|
|
|
r#" "last vote {}" -> "{}" [style=dashed,label="latest vote"];"#,
|
2019-11-04 22:18:30 -08:00
|
|
|
node_pubkey,
|
|
|
|
if styled_slots.contains(&last_vote_slot) {
|
|
|
|
last_vote_slot.to_string()
|
|
|
|
} else {
|
|
|
|
if *last_vote_slot < lowest_last_vote_slot {
|
|
|
|
lowest_last_vote_slot = *last_vote_slot;
|
|
|
|
lowest_total_stake = *total_stake;
|
|
|
|
}
|
|
|
|
absent_votes += 1;
|
|
|
|
absent_stake += stake;
|
2019-11-05 18:40:00 -08:00
|
|
|
|
2019-11-04 22:18:30 -08:00
|
|
|
"...".to_string()
|
2019-11-05 18:40:00 -08:00
|
|
|
},
|
2019-11-04 22:18:30 -08:00
|
|
|
));
|
|
|
|
}
|
|
|
|
|
|
|
|
// Annotate the final "..." node with absent vote and stake information
|
|
|
|
if absent_votes > 0 {
|
|
|
|
dot.push(format!(
|
|
|
|
r#" "..."[label="...\nvotes: {}, stake: {:.1} SOL {:.1}%"];"#,
|
|
|
|
absent_votes,
|
|
|
|
lamports_to_sol(absent_stake),
|
|
|
|
absent_stake as f64 / lowest_total_stake as f64 * 100.,
|
|
|
|
));
|
|
|
|
}
|
|
|
|
|
2019-11-12 09:13:16 -08:00
|
|
|
// Add for vote information from all banks.
|
|
|
|
if include_all_votes {
|
|
|
|
for (node_pubkey, validator_votes) in &all_votes {
|
|
|
|
for (vote_slot, vote_state) in validator_votes {
|
|
|
|
dot.push(format!(
|
|
|
|
r#" "{} vote {}"[shape=box,style=dotted,label="validator vote: {}\nroot slot: {}\nvote history:\n{}"];"#,
|
|
|
|
node_pubkey,
|
|
|
|
vote_slot,
|
|
|
|
node_pubkey,
|
|
|
|
vote_state.root_slot.unwrap_or(0),
|
|
|
|
vote_state
|
|
|
|
.votes
|
|
|
|
.iter()
|
|
|
|
.map(|vote| format!("slot {} (conf={})", vote.slot, vote.confirmation_count))
|
|
|
|
.collect::<Vec<_>>()
|
|
|
|
.join("\n")
|
|
|
|
));
|
2019-11-04 22:18:30 -08:00
|
|
|
|
2019-11-12 09:13:16 -08:00
|
|
|
dot.push(format!(
|
|
|
|
r#" "{} vote {}" -> "{}" [style=dotted,label="vote"];"#,
|
|
|
|
node_pubkey,
|
|
|
|
vote_slot,
|
|
|
|
if styled_slots.contains(&vote_slot) {
|
|
|
|
vote_slot.to_string()
|
|
|
|
} else {
|
|
|
|
"...".to_string()
|
|
|
|
},
|
|
|
|
));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
dot.push("}".to_string());
|
|
|
|
dot.join("\n")
|
2019-11-04 22:18:30 -08:00
|
|
|
}
|
|
|
|
|
2020-01-13 17:21:39 -08:00
|
|
|
fn analyze_column<
|
|
|
|
T: solana_ledger::blockstore_db::Column + solana_ledger::blockstore_db::ColumnName,
|
|
|
|
>(
|
2019-12-12 15:54:50 -08:00
|
|
|
db: &Database,
|
|
|
|
name: &str,
|
|
|
|
key_size: usize,
|
|
|
|
) -> Result<(), String> {
|
|
|
|
let mut key_tot: u64 = 0;
|
|
|
|
let mut val_hist = histogram::Histogram::new();
|
|
|
|
let mut val_tot: u64 = 0;
|
|
|
|
let mut row_hist = histogram::Histogram::new();
|
|
|
|
let a = key_size as u64;
|
2020-01-13 13:13:52 -08:00
|
|
|
for (_x, y) in db.iter::<T>(blockstore_db::IteratorMode::Start).unwrap() {
|
2019-12-12 15:54:50 -08:00
|
|
|
let b = y.len() as u64;
|
|
|
|
key_tot += a;
|
|
|
|
val_hist.increment(b).unwrap();
|
|
|
|
val_tot += b;
|
|
|
|
row_hist.increment(a + b).unwrap();
|
|
|
|
}
|
|
|
|
|
|
|
|
let json_result = if val_hist.entries() > 0 {
|
|
|
|
json!({
|
|
|
|
"column":name,
|
|
|
|
"entries":val_hist.entries(),
|
|
|
|
"key_stats":{
|
|
|
|
"max":a,
|
|
|
|
"total_bytes":key_tot,
|
|
|
|
},
|
|
|
|
"val_stats":{
|
|
|
|
"p50":val_hist.percentile(50.0).unwrap(),
|
|
|
|
"p90":val_hist.percentile(90.0).unwrap(),
|
|
|
|
"p99":val_hist.percentile(99.0).unwrap(),
|
|
|
|
"p999":val_hist.percentile(99.9).unwrap(),
|
|
|
|
"min":val_hist.minimum().unwrap(),
|
|
|
|
"max":val_hist.maximum().unwrap(),
|
|
|
|
"stddev":val_hist.stddev().unwrap(),
|
|
|
|
"total_bytes":val_tot,
|
|
|
|
},
|
|
|
|
"row_stats":{
|
|
|
|
"p50":row_hist.percentile(50.0).unwrap(),
|
|
|
|
"p90":row_hist.percentile(90.0).unwrap(),
|
|
|
|
"p99":row_hist.percentile(99.0).unwrap(),
|
|
|
|
"p999":row_hist.percentile(99.9).unwrap(),
|
|
|
|
"min":row_hist.minimum().unwrap(),
|
|
|
|
"max":row_hist.maximum().unwrap(),
|
|
|
|
"stddev":row_hist.stddev().unwrap(),
|
|
|
|
"total_bytes":key_tot + val_tot,
|
|
|
|
},
|
|
|
|
})
|
|
|
|
} else {
|
|
|
|
json!({
|
|
|
|
"column":name,
|
|
|
|
"entries":val_hist.entries(),
|
|
|
|
"key_stats":{
|
|
|
|
"max":a,
|
|
|
|
"total_bytes":0,
|
|
|
|
},
|
|
|
|
"val_stats":{
|
|
|
|
"total_bytes":0,
|
|
|
|
},
|
|
|
|
"row_stats":{
|
|
|
|
"total_bytes":0,
|
|
|
|
},
|
|
|
|
})
|
|
|
|
};
|
|
|
|
|
|
|
|
println!("{}", serde_json::to_string_pretty(&json_result).unwrap());
|
|
|
|
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
2019-12-18 10:00:15 -08:00
|
|
|
fn analyze_storage(database: &Database) -> Result<(), String> {
|
2020-01-13 13:13:52 -08:00
|
|
|
use blockstore_db::columns::*;
|
2019-12-18 10:00:15 -08:00
|
|
|
analyze_column::<SlotMeta>(database, "SlotMeta", SlotMeta::key_size())?;
|
|
|
|
analyze_column::<Orphans>(database, "Orphans", Orphans::key_size())?;
|
|
|
|
analyze_column::<DeadSlots>(database, "DeadSlots", DeadSlots::key_size())?;
|
|
|
|
analyze_column::<ErasureMeta>(database, "ErasureMeta", ErasureMeta::key_size())?;
|
|
|
|
analyze_column::<Root>(database, "Root", Root::key_size())?;
|
|
|
|
analyze_column::<Index>(database, "Index", Index::key_size())?;
|
|
|
|
analyze_column::<ShredData>(database, "ShredData", ShredData::key_size())?;
|
|
|
|
analyze_column::<ShredCode>(database, "ShredCode", ShredCode::key_size())?;
|
2019-12-12 15:54:50 -08:00
|
|
|
analyze_column::<TransactionStatus>(
|
2019-12-18 10:00:15 -08:00
|
|
|
database,
|
2019-12-12 15:54:50 -08:00
|
|
|
"TransactionStatus",
|
|
|
|
TransactionStatus::key_size(),
|
|
|
|
)?;
|
|
|
|
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
2019-12-18 10:00:15 -08:00
|
|
|
fn open_genesis_config(ledger_path: &Path) -> GenesisConfig {
|
|
|
|
GenesisConfig::load(&ledger_path).unwrap_or_else(|err| {
|
|
|
|
eprintln!(
|
|
|
|
"Failed to open ledger genesis_config at {:?}: {}",
|
|
|
|
ledger_path, err
|
|
|
|
);
|
|
|
|
exit(1);
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
2020-01-13 13:13:52 -08:00
|
|
|
fn open_blockstore(ledger_path: &Path) -> Blockstore {
|
|
|
|
match Blockstore::open(ledger_path) {
|
|
|
|
Ok(blockstore) => blockstore,
|
2019-12-18 10:00:15 -08:00
|
|
|
Err(err) => {
|
|
|
|
eprintln!("Failed to open ledger at {:?}: {:?}", ledger_path, err);
|
|
|
|
exit(1);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
fn open_database(ledger_path: &Path) -> Database {
|
|
|
|
match Database::open(&ledger_path.join("rocksdb")) {
|
|
|
|
Ok(database) => database,
|
|
|
|
Err(err) => {
|
|
|
|
eprintln!("Unable to read the Ledger rocksdb: {:?}", err);
|
|
|
|
exit(1);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-11-14 11:27:01 -08:00
|
|
|
#[allow(clippy::cognitive_complexity)]
|
2018-08-04 14:31:12 -07:00
|
|
|
fn main() {
|
2019-07-12 16:58:13 -07:00
|
|
|
const DEFAULT_ROOT_COUNT: &str = "1";
|
2020-01-08 09:19:12 -08:00
|
|
|
solana_logger::setup_with_default("solana=info");
|
2019-08-09 15:57:31 -07:00
|
|
|
|
|
|
|
let starting_slot_arg = Arg::with_name("starting_slot")
|
|
|
|
.long("starting-slot")
|
|
|
|
.value_name("NUM")
|
|
|
|
.takes_value(true)
|
|
|
|
.default_value("0")
|
|
|
|
.help("Start at this slot");
|
|
|
|
|
2019-07-11 20:33:36 -07:00
|
|
|
let matches = App::new(crate_name!())
|
|
|
|
.about(crate_description!())
|
2019-11-13 20:10:38 -08:00
|
|
|
.version(solana_clap_utils::version!())
|
2018-08-04 14:31:12 -07:00
|
|
|
.arg(
|
|
|
|
Arg::with_name("ledger")
|
|
|
|
.short("l")
|
|
|
|
.long("ledger")
|
|
|
|
.value_name("DIR")
|
|
|
|
.takes_value(true)
|
2019-08-09 15:57:31 -07:00
|
|
|
.global(true)
|
2018-09-14 15:32:57 -07:00
|
|
|
.help("Use directory for ledger location"),
|
2018-08-04 14:31:12 -07:00
|
|
|
)
|
2019-11-04 22:18:30 -08:00
|
|
|
.subcommand(
|
|
|
|
SubCommand::with_name("print")
|
|
|
|
.about("Print the ledger")
|
|
|
|
.arg(&starting_slot_arg)
|
|
|
|
)
|
|
|
|
.subcommand(
|
|
|
|
SubCommand::with_name("print-slot")
|
2019-11-11 22:22:20 -08:00
|
|
|
.about("Print the contents of one or more slots")
|
2019-11-04 22:18:30 -08:00
|
|
|
.arg(
|
2019-11-11 22:22:20 -08:00
|
|
|
Arg::with_name("slots")
|
2019-11-04 22:18:30 -08:00
|
|
|
.index(1)
|
2019-11-11 22:22:20 -08:00
|
|
|
.value_name("SLOTS")
|
2019-11-04 22:18:30 -08:00
|
|
|
.takes_value(true)
|
2019-11-11 22:22:20 -08:00
|
|
|
.multiple(true)
|
2019-11-04 22:18:30 -08:00
|
|
|
.required(true)
|
2019-11-11 22:22:20 -08:00
|
|
|
.help("List of slots to print"),
|
2019-11-04 22:18:30 -08:00
|
|
|
)
|
|
|
|
)
|
2019-11-08 22:17:48 -08:00
|
|
|
.subcommand(
|
|
|
|
SubCommand::with_name("print-genesis-hash")
|
|
|
|
.about("Prints the ledger's genesis hash")
|
|
|
|
)
|
2019-11-04 22:18:30 -08:00
|
|
|
.subcommand(
|
|
|
|
SubCommand::with_name("bounds")
|
2019-12-20 19:43:53 -08:00
|
|
|
.about("Print lowest and highest non-empty slots. Note that there may be empty slots within the bounds")
|
|
|
|
.arg(
|
|
|
|
Arg::with_name("all")
|
|
|
|
.long("all")
|
|
|
|
.takes_value(false)
|
|
|
|
.required(false)
|
|
|
|
.help("Additionally print all the non-empty slots within the bounds"),
|
|
|
|
)
|
2019-11-04 22:18:30 -08:00
|
|
|
)
|
|
|
|
.subcommand(
|
|
|
|
SubCommand::with_name("json")
|
|
|
|
.about("Print the ledger in JSON format")
|
|
|
|
.arg(&starting_slot_arg)
|
|
|
|
)
|
2019-11-04 21:14:55 -08:00
|
|
|
.subcommand(
|
|
|
|
SubCommand::with_name("verify")
|
2019-11-04 22:18:30 -08:00
|
|
|
.about("Verify the ledger")
|
2019-11-04 21:14:55 -08:00
|
|
|
.arg(
|
|
|
|
Arg::with_name("no_snapshot")
|
|
|
|
.long("no-snapshot")
|
|
|
|
.takes_value(false)
|
|
|
|
.help("Do not start from a local snapshot if present"),
|
|
|
|
)
|
|
|
|
.arg(
|
|
|
|
Arg::with_name("account_paths")
|
|
|
|
.long("accounts")
|
|
|
|
.value_name("PATHS")
|
|
|
|
.takes_value(true)
|
|
|
|
.help("Comma separated persistent accounts location"),
|
|
|
|
)
|
2019-11-04 22:18:30 -08:00
|
|
|
.arg(
|
|
|
|
Arg::with_name("halt_at_slot")
|
|
|
|
.long("halt-at-slot")
|
|
|
|
.value_name("SLOT")
|
|
|
|
.takes_value(true)
|
|
|
|
.help("Halt processing at the given slot"),
|
|
|
|
)
|
|
|
|
.arg(
|
|
|
|
Arg::with_name("skip_poh_verify")
|
|
|
|
.long("skip-poh-verify")
|
|
|
|
.takes_value(false)
|
|
|
|
.help("Skip ledger PoH verification"),
|
|
|
|
)
|
|
|
|
.arg(
|
|
|
|
Arg::with_name("graph_forks")
|
|
|
|
.long("graph-forks")
|
2019-11-12 19:27:15 -08:00
|
|
|
.value_name("FILENAME")
|
2019-11-04 22:18:30 -08:00
|
|
|
.takes_value(true)
|
|
|
|
.help("Create a Graphviz DOT file representing the active forks once the ledger is verified"),
|
|
|
|
)
|
2019-11-12 09:13:16 -08:00
|
|
|
.arg(
|
|
|
|
Arg::with_name("graph_forks_include_all_votes")
|
|
|
|
.long("graph-forks-include-all-votes")
|
|
|
|
.requires("graph_forks")
|
|
|
|
.help("Include all votes in forks graph"),
|
|
|
|
)
|
2019-11-04 22:18:30 -08:00
|
|
|
).subcommand(
|
|
|
|
SubCommand::with_name("prune")
|
|
|
|
.about("Prune the ledger at the block height")
|
|
|
|
.arg(
|
|
|
|
Arg::with_name("slot_list")
|
|
|
|
.long("slot-list")
|
|
|
|
.value_name("FILENAME")
|
|
|
|
.takes_value(true)
|
|
|
|
.required(true)
|
|
|
|
.help("The location of the YAML file with a list of rollback slot heights and hashes"),
|
|
|
|
)
|
2019-11-04 21:14:55 -08:00
|
|
|
)
|
2019-08-09 15:57:31 -07:00
|
|
|
.subcommand(
|
|
|
|
SubCommand::with_name("list-roots")
|
|
|
|
.about("Output upto last <num-roots> root hashes and their heights starting at the given block height")
|
|
|
|
.arg(
|
|
|
|
Arg::with_name("max_height")
|
|
|
|
.long("max-height")
|
|
|
|
.value_name("NUM")
|
|
|
|
.takes_value(true)
|
|
|
|
.required(true)
|
2019-11-04 22:18:30 -08:00
|
|
|
.help("Maximum block height")
|
|
|
|
)
|
|
|
|
.arg(
|
|
|
|
Arg::with_name("slot_list")
|
|
|
|
.long("slot-list")
|
|
|
|
.value_name("FILENAME")
|
|
|
|
.required(false)
|
|
|
|
.takes_value(true)
|
|
|
|
.help("The location of the output YAML file. A list of rollback slot heights and hashes will be written to the file.")
|
|
|
|
)
|
|
|
|
.arg(
|
|
|
|
Arg::with_name("num_roots")
|
|
|
|
.long("num-roots")
|
|
|
|
.value_name("NUM")
|
|
|
|
.takes_value(true)
|
|
|
|
.default_value(DEFAULT_ROOT_COUNT)
|
|
|
|
.required(false)
|
|
|
|
.help("Number of roots in the output"),
|
|
|
|
)
|
|
|
|
)
|
2019-12-12 15:54:50 -08:00
|
|
|
.subcommand(
|
|
|
|
SubCommand::with_name("analyze-storage")
|
|
|
|
.about("Output statistics in JSON format about all column families in the ledger rocksDB")
|
|
|
|
)
|
2018-08-04 14:31:12 -07:00
|
|
|
.get_matches();
|
|
|
|
|
2019-08-09 15:57:31 -07:00
|
|
|
let ledger_path = PathBuf::from(value_t_or_exit!(matches, "ledger", String));
|
2018-08-10 18:41:26 -07:00
|
|
|
|
2018-08-06 16:03:08 -07:00
|
|
|
match matches.subcommand() {
|
2019-08-09 15:57:31 -07:00
|
|
|
("print", Some(args_matches)) => {
|
|
|
|
let starting_slot = value_t_or_exit!(args_matches, "starting_slot", Slot);
|
2019-12-18 10:00:15 -08:00
|
|
|
output_ledger(
|
2020-01-13 13:13:52 -08:00
|
|
|
open_blockstore(&ledger_path),
|
2019-12-18 10:00:15 -08:00
|
|
|
starting_slot,
|
|
|
|
LedgerOutputMethod::Print,
|
|
|
|
);
|
2018-08-09 22:14:04 -07:00
|
|
|
}
|
2019-11-08 22:17:48 -08:00
|
|
|
("print-genesis-hash", Some(_args_matches)) => {
|
2019-12-18 10:00:15 -08:00
|
|
|
println!("{}", open_genesis_config(&ledger_path).hash());
|
2019-11-08 22:17:48 -08:00
|
|
|
}
|
2019-08-09 15:57:31 -07:00
|
|
|
("print-slot", Some(args_matches)) => {
|
2019-11-11 22:22:20 -08:00
|
|
|
let slots = values_t_or_exit!(args_matches, "slots", Slot);
|
|
|
|
for slot in slots {
|
|
|
|
println!("Slot {}", slot);
|
2019-12-18 10:00:15 -08:00
|
|
|
output_slot(
|
2020-01-13 13:13:52 -08:00
|
|
|
&open_blockstore(&ledger_path),
|
2019-12-18 10:00:15 -08:00
|
|
|
slot,
|
|
|
|
&LedgerOutputMethod::Print,
|
|
|
|
);
|
2019-11-11 22:22:20 -08:00
|
|
|
}
|
2019-08-09 15:57:31 -07:00
|
|
|
}
|
|
|
|
("json", Some(args_matches)) => {
|
|
|
|
let starting_slot = value_t_or_exit!(args_matches, "starting_slot", Slot);
|
2019-12-18 10:00:15 -08:00
|
|
|
output_ledger(
|
2020-01-13 13:13:52 -08:00
|
|
|
open_blockstore(&ledger_path),
|
2019-12-18 10:00:15 -08:00
|
|
|
starting_slot,
|
|
|
|
LedgerOutputMethod::Json,
|
|
|
|
);
|
2019-07-11 20:33:36 -07:00
|
|
|
}
|
2019-11-04 21:14:55 -08:00
|
|
|
("verify", Some(arg_matches)) => {
|
2019-07-11 20:33:36 -07:00
|
|
|
println!("Verifying ledger...");
|
2019-11-04 21:14:55 -08:00
|
|
|
|
|
|
|
let dev_halt_at_slot = value_t!(arg_matches, "halt_at_slot", Slot).ok();
|
|
|
|
let poh_verify = !arg_matches.is_present("skip_poh_verify");
|
|
|
|
|
|
|
|
let snapshot_config = if arg_matches.is_present("no_snapshot") {
|
|
|
|
None
|
|
|
|
} else {
|
|
|
|
Some(SnapshotConfig {
|
|
|
|
snapshot_interval_slots: 0, // Value doesn't matter
|
|
|
|
snapshot_package_output_path: ledger_path.clone(),
|
|
|
|
snapshot_path: ledger_path.clone().join("snapshot"),
|
|
|
|
})
|
|
|
|
};
|
|
|
|
let account_paths = if let Some(account_paths) = matches.value_of("account_paths") {
|
2019-12-05 18:41:29 -08:00
|
|
|
account_paths.split(',').map(PathBuf::from).collect()
|
2019-11-04 21:14:55 -08:00
|
|
|
} else {
|
2019-12-05 18:41:29 -08:00
|
|
|
vec![ledger_path.join("accounts")]
|
2019-11-04 21:14:55 -08:00
|
|
|
};
|
|
|
|
|
2020-01-13 13:13:52 -08:00
|
|
|
let process_options = blockstore_processor::ProcessOptions {
|
2019-11-04 21:14:55 -08:00
|
|
|
poh_verify,
|
|
|
|
dev_halt_at_slot,
|
2020-01-13 13:13:52 -08:00
|
|
|
..blockstore_processor::ProcessOptions::default()
|
2019-10-08 14:58:49 -07:00
|
|
|
};
|
2019-11-04 21:14:55 -08:00
|
|
|
|
|
|
|
match bank_forks_utils::load(
|
2019-12-18 10:00:15 -08:00
|
|
|
&open_genesis_config(&ledger_path),
|
2020-01-13 13:13:52 -08:00
|
|
|
&open_blockstore(&ledger_path),
|
2019-11-04 21:14:55 -08:00
|
|
|
account_paths,
|
|
|
|
snapshot_config.as_ref(),
|
|
|
|
process_options,
|
|
|
|
) {
|
2019-11-04 22:18:30 -08:00
|
|
|
Ok((bank_forks, bank_forks_info, _leader_schedule_cache)) => {
|
2019-11-04 21:14:55 -08:00
|
|
|
println!("Ok");
|
2019-11-04 22:18:30 -08:00
|
|
|
|
2019-11-12 19:27:15 -08:00
|
|
|
if let Some(output_file) = arg_matches.value_of("graph_forks") {
|
2019-11-12 09:13:16 -08:00
|
|
|
let dot = graph_forks(
|
|
|
|
bank_forks,
|
|
|
|
bank_forks_info,
|
|
|
|
arg_matches.is_present("graph_forks_include_all_votes"),
|
|
|
|
);
|
|
|
|
|
2019-11-12 19:27:15 -08:00
|
|
|
let extension = Path::new(output_file).extension();
|
|
|
|
let result = if extension == Some(OsStr::new("pdf")) {
|
|
|
|
render_dot(dot, output_file, "pdf")
|
|
|
|
} else if extension == Some(OsStr::new("png")) {
|
|
|
|
render_dot(dot, output_file, "png")
|
|
|
|
} else {
|
|
|
|
File::create(output_file)
|
|
|
|
.and_then(|mut file| file.write_all(&dot.into_bytes()))
|
2019-11-12 09:13:16 -08:00
|
|
|
};
|
2019-11-12 19:27:15 -08:00
|
|
|
|
|
|
|
match result {
|
|
|
|
Ok(_) => println!("Wrote {}", output_file),
|
|
|
|
Err(err) => eprintln!("Unable to write {}: {}", output_file, err),
|
|
|
|
}
|
2019-11-04 22:18:30 -08:00
|
|
|
}
|
2018-08-09 22:14:04 -07:00
|
|
|
}
|
2019-07-11 20:33:36 -07:00
|
|
|
Err(err) => {
|
|
|
|
eprintln!("Ledger verification failed: {:?}", err);
|
|
|
|
exit(1);
|
2018-12-07 20:44:59 -08:00
|
|
|
}
|
2018-08-06 16:03:08 -07:00
|
|
|
}
|
|
|
|
}
|
2019-07-12 16:58:13 -07:00
|
|
|
("prune", Some(args_matches)) => {
|
|
|
|
if let Some(prune_file_path) = args_matches.value_of("slot_list") {
|
2020-01-13 13:13:52 -08:00
|
|
|
let blockstore = open_blockstore(&ledger_path);
|
2019-07-12 16:58:13 -07:00
|
|
|
let prune_file = File::open(prune_file_path.to_string()).unwrap();
|
|
|
|
let slot_hashes: BTreeMap<u64, String> =
|
|
|
|
serde_yaml::from_reader(prune_file).unwrap();
|
|
|
|
|
2019-10-18 09:28:51 -07:00
|
|
|
let iter =
|
2020-01-13 13:13:52 -08:00
|
|
|
RootedSlotIterator::new(0, &blockstore).expect("Failed to get rooted slot");
|
2019-07-12 16:58:13 -07:00
|
|
|
|
|
|
|
let potential_hashes: Vec<_> = iter
|
2019-10-24 22:20:52 -07:00
|
|
|
.filter_map(|(slot, _meta)| {
|
2020-01-13 13:13:52 -08:00
|
|
|
let blockhash = blockstore
|
2019-10-24 22:20:52 -07:00
|
|
|
.get_slot_entries(slot, 0, None)
|
2019-07-12 16:58:13 -07:00
|
|
|
.unwrap()
|
2019-10-24 22:20:52 -07:00
|
|
|
.last()
|
2019-07-12 16:58:13 -07:00
|
|
|
.unwrap()
|
|
|
|
.hash
|
|
|
|
.to_string();
|
|
|
|
|
|
|
|
slot_hashes.get(&slot).and_then(|hash| {
|
|
|
|
if *hash == blockhash {
|
|
|
|
Some((slot, blockhash))
|
|
|
|
} else {
|
|
|
|
None
|
|
|
|
}
|
|
|
|
})
|
|
|
|
})
|
|
|
|
.collect();
|
|
|
|
|
|
|
|
let (target_slot, target_hash) = potential_hashes
|
|
|
|
.last()
|
|
|
|
.expect("Failed to find a valid slot");
|
|
|
|
println!("Prune at slot {:?} hash {:?}", target_slot, target_hash);
|
2020-01-13 13:13:52 -08:00
|
|
|
blockstore.prune(*target_slot);
|
2019-07-12 16:58:13 -07:00
|
|
|
}
|
|
|
|
}
|
|
|
|
("list-roots", Some(args_matches)) => {
|
2020-01-13 13:13:52 -08:00
|
|
|
let blockstore = open_blockstore(&ledger_path);
|
2019-07-12 16:58:13 -07:00
|
|
|
let max_height = if let Some(height) = args_matches.value_of("max_height") {
|
|
|
|
usize::from_str(height).expect("Maximum height must be a number")
|
|
|
|
} else {
|
|
|
|
panic!("Maximum height must be provided");
|
|
|
|
};
|
|
|
|
let num_roots = if let Some(roots) = args_matches.value_of("num_roots") {
|
|
|
|
usize::from_str(roots).expect("Number of roots must be a number")
|
|
|
|
} else {
|
|
|
|
usize::from_str(DEFAULT_ROOT_COUNT).unwrap()
|
|
|
|
};
|
|
|
|
|
2020-01-13 13:13:52 -08:00
|
|
|
let iter = RootedSlotIterator::new(0, &blockstore).expect("Failed to get rooted slot");
|
2019-07-12 16:58:13 -07:00
|
|
|
|
|
|
|
let slot_hash: Vec<_> = iter
|
2019-10-24 22:20:52 -07:00
|
|
|
.filter_map(|(slot, _meta)| {
|
2019-07-12 16:58:13 -07:00
|
|
|
if slot <= max_height as u64 {
|
2020-01-13 13:13:52 -08:00
|
|
|
let blockhash = blockstore
|
2019-10-24 22:20:52 -07:00
|
|
|
.get_slot_entries(slot, 0, None)
|
2019-07-12 16:58:13 -07:00
|
|
|
.unwrap()
|
2019-10-24 22:20:52 -07:00
|
|
|
.last()
|
2019-07-12 16:58:13 -07:00
|
|
|
.unwrap()
|
|
|
|
.hash;
|
|
|
|
Some((slot, blockhash))
|
|
|
|
} else {
|
|
|
|
None
|
|
|
|
}
|
|
|
|
})
|
|
|
|
.collect();
|
|
|
|
|
2019-08-15 13:00:09 -07:00
|
|
|
let mut output_file: Box<dyn Write> =
|
|
|
|
if let Some(path) = args_matches.value_of("slot_list") {
|
|
|
|
match File::create(path) {
|
|
|
|
Ok(file) => Box::new(file),
|
|
|
|
_ => Box::new(stdout()),
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
Box::new(stdout())
|
|
|
|
};
|
2019-07-12 16:58:13 -07:00
|
|
|
|
|
|
|
slot_hash
|
|
|
|
.into_iter()
|
|
|
|
.rev()
|
|
|
|
.enumerate()
|
|
|
|
.for_each(|(i, (slot, hash))| {
|
|
|
|
if i < num_roots {
|
|
|
|
output_file
|
|
|
|
.write_all(format!("{:?}: {:?}\n", slot, hash).as_bytes())
|
|
|
|
.expect("failed to write");
|
|
|
|
}
|
|
|
|
});
|
|
|
|
}
|
2019-12-20 19:43:53 -08:00
|
|
|
("bounds", Some(args_matches)) => {
|
2020-01-13 13:13:52 -08:00
|
|
|
match open_blockstore(&ledger_path).slot_meta_iterator(0) {
|
2019-12-20 19:43:53 -08:00
|
|
|
Ok(metas) => {
|
|
|
|
let all = args_matches.is_present("all");
|
|
|
|
|
|
|
|
println!("Collecting Ledger information...");
|
|
|
|
let slots: Vec<_> = metas.map(|(slot, _)| slot).collect();
|
|
|
|
if slots.is_empty() {
|
|
|
|
println!("Ledger is empty. No slots found.");
|
2019-10-24 22:20:52 -07:00
|
|
|
} else {
|
2019-12-20 19:43:53 -08:00
|
|
|
let first = slots.first().unwrap();
|
|
|
|
let last = slots.last().unwrap_or_else(|| first);
|
|
|
|
if first != last {
|
|
|
|
println!("Ledger contains data from slots {:?} to {:?}", first, last);
|
|
|
|
if all {
|
|
|
|
println!("Non-empty slots: {:?}", slots);
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
println!("Ledger only contains some data for slot {:?}", first);
|
|
|
|
}
|
2019-10-24 22:20:52 -07:00
|
|
|
}
|
|
|
|
}
|
2019-12-20 19:43:53 -08:00
|
|
|
Err(err) => {
|
|
|
|
eprintln!("Unable to read the Ledger: {:?}", err);
|
|
|
|
exit(1);
|
|
|
|
}
|
2019-10-24 22:20:52 -07:00
|
|
|
}
|
2019-12-20 19:43:53 -08:00
|
|
|
}
|
2019-12-18 10:00:15 -08:00
|
|
|
("analyze-storage", _) => match analyze_storage(&open_database(&ledger_path)) {
|
2019-12-12 15:54:50 -08:00
|
|
|
Ok(()) => {
|
|
|
|
println!("Ok.");
|
|
|
|
}
|
|
|
|
Err(err) => {
|
|
|
|
eprintln!("Unable to read the Ledger: {:?}", err);
|
|
|
|
exit(1);
|
|
|
|
}
|
|
|
|
},
|
2018-08-06 16:03:08 -07:00
|
|
|
("", _) => {
|
2018-08-09 22:14:04 -07:00
|
|
|
eprintln!("{}", matches.usage());
|
|
|
|
exit(1);
|
2018-08-06 16:03:08 -07:00
|
|
|
}
|
|
|
|
_ => unreachable!(),
|
|
|
|
};
|
2018-08-04 14:31:12 -07:00
|
|
|
}
|