Strictly validate the contents of snapshot/genesis (#8959)

automerge
This commit is contained in:
Ryo Onodera 2020-03-25 18:46:41 +09:00 committed by GitHub
parent 5d3f43c10b
commit f987c18a7e
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
9 changed files with 549 additions and 76 deletions

View File

@ -137,10 +137,12 @@ mod tests {
.collect();
// Create some fake snapshot
fs::create_dir_all(&snapshots_dir).unwrap();
let snapshots_paths: Vec<_> = (0..5)
.map(|i| {
let fake_snapshot_path = snapshots_dir.join(format!("fake_snapshot_{}", i));
let snapshot_file_name = format!("{}", i);
let snapshots_dir = snapshots_dir.join(&snapshot_file_name);
fs::create_dir_all(&snapshots_dir).unwrap();
let fake_snapshot_path = snapshots_dir.join(&snapshot_file_name);
let mut fake_snapshot_file = OpenOptions::new()
.read(true)
.write(true)
@ -157,7 +159,9 @@ mod tests {
let link_snapshots_dir = tempfile::tempdir_in(&temp_dir).unwrap();
for snapshots_path in snapshots_paths {
let snapshot_file_name = snapshots_path.file_name().unwrap();
let link_path = link_snapshots_dir.path().join(snapshot_file_name);
let link_snapshots_dir = link_snapshots_dir.path().join(snapshot_file_name);
fs::create_dir_all(&link_snapshots_dir).unwrap();
let link_path = link_snapshots_dir.join(snapshot_file_name);
fs::hard_link(&snapshots_path, &link_path).unwrap();
}

View File

@ -30,6 +30,7 @@ use solana_ledger::{
blockstore::{Blockstore, CompletedSlotsReceiver},
blockstore_processor::{self, BankForksInfo},
create_new_tmp_ledger,
hardened_unpack::open_genesis_config,
leader_schedule::FixedSchedule,
leader_schedule_cache::LeaderScheduleCache,
};
@ -565,10 +566,7 @@ fn new_banks_from_blockstore(
LeaderScheduleCache,
Option<(Slot, Hash)>,
) {
let genesis_config = GenesisConfig::load(blockstore_path).unwrap_or_else(|err| {
error!("Failed to load genesis from {:?}: {}", blockstore_path, err);
process::exit(1);
});
let genesis_config = open_genesis_config(blockstore_path);
// This needs to be limited otherwise the state in the VoteAccount data
// grows too large

View File

@ -1,15 +1,13 @@
use bzip2::bufread::BzDecoder;
use console::Emoji;
use indicatif::{ProgressBar, ProgressStyle};
use log::*;
use solana_sdk::clock::Slot;
use solana_sdk::genesis_config::GenesisConfig;
use solana_sdk::hash::Hash;
use std::fs::{self, File};
use std::io;
use std::io::Read;
use std::net::SocketAddr;
use std::path::Path;
use std::path::{Path, PathBuf};
use std::time::Instant;
static TRUCK: Emoji = Emoji("🚚 ", "");
@ -108,35 +106,12 @@ pub fn download_file(url: &str, destination_file: &Path) -> Result<(), String> {
Ok(())
}
fn extract_archive(archive_filename: &Path, destination_dir: &Path) -> Result<(), String> {
info!("Extracting {:?}...", archive_filename);
let extract_start = Instant::now();
fs::create_dir_all(destination_dir).map_err(|err| err.to_string())?;
let tar_bz2 = File::open(&archive_filename)
.map_err(|err| format!("Unable to open {:?}: {:?}", archive_filename, err))?;
let tar = BzDecoder::new(std::io::BufReader::new(tar_bz2));
let mut archive = tar::Archive::new(tar);
archive
.unpack(destination_dir)
.map_err(|err| format!("Unable to unpack {:?}: {:?}", archive_filename, err))?;
info!(
"Extracted {:?} in {:?}",
archive_filename,
Instant::now().duration_since(extract_start)
);
Ok(())
}
pub fn download_genesis(
pub fn download_genesis_if_missing(
rpc_addr: &SocketAddr,
ledger_path: &Path,
expected_genesis_hash: Option<Hash>,
) -> Result<Hash, String> {
let genesis_package = ledger_path.join("genesis.tar.bz2");
let genesis_config = if !genesis_package.exists() {
let tmp_genesis_path = ledger_path.join("tmp-genesis");
genesis_package: &Path,
) -> Result<PathBuf, String> {
if !genesis_package.exists() {
let tmp_genesis_path = genesis_package.parent().unwrap().join("tmp-genesis");
let tmp_genesis_package = tmp_genesis_path.join("genesis.tar.bz2");
let _ignored = fs::remove_dir_all(&tmp_genesis_path);
@ -144,30 +119,11 @@ pub fn download_genesis(
&format!("http://{}/{}", rpc_addr, "genesis.tar.bz2"),
&tmp_genesis_package,
)?;
extract_archive(&tmp_genesis_package, &ledger_path)?;
let tmp_genesis_config = GenesisConfig::load(&ledger_path)
.map_err(|err| format!("Failed to load downloaded genesis config: {}", err))?;
if let Some(expected_genesis_hash) = expected_genesis_hash {
if expected_genesis_hash != tmp_genesis_config.hash() {
return Err(format!(
"Genesis hash mismatch: expected {} but downloaded genesis hash is {}",
expected_genesis_hash,
tmp_genesis_config.hash(),
));
}
}
std::fs::rename(tmp_genesis_package, genesis_package)
.map_err(|err| format!("Unable to rename: {:?}", err))?;
tmp_genesis_config
Ok(tmp_genesis_package)
} else {
GenesisConfig::load(&ledger_path)
.map_err(|err| format!("Failed to load genesis config: {}", err))?
};
Ok(genesis_config.hash())
Err("genesis already exists".to_string())
}
}
pub fn download_snapshot(

View File

@ -11,6 +11,7 @@ use solana_ledger::{
blockstore::Blockstore,
blockstore_db::{self, Column, Database},
blockstore_processor::{BankForksInfo, ProcessOptions},
hardened_unpack::open_genesis_config,
rooted_slot_iterator::RootedSlotIterator,
snapshot_utils,
};
@ -572,16 +573,6 @@ fn analyze_storage(database: &Database) -> Result<(), String> {
Ok(())
}
fn open_genesis_config(ledger_path: &Path) -> GenesisConfig {
GenesisConfig::load(&ledger_path).unwrap_or_else(|err| {
eprintln!(
"Failed to open ledger genesis_config at {:?}: {}",
ledger_path, err
);
exit(1);
})
}
fn open_blockstore(ledger_path: &Path) -> Blockstore {
match Blockstore::open(ledger_path) {
Ok(blockstore) => blockstore,

View File

@ -0,0 +1,471 @@
use bzip2::bufread::BzDecoder;
use log::*;
use regex::Regex;
use solana_sdk::genesis_config::GenesisConfig;
use std::{
fs::{self, File},
io::{BufReader, Read},
path::{
Component::{CurDir, Normal},
Path,
},
time::Instant,
};
use tar::{
Archive,
EntryType::{Directory, GNUSparse, Regular},
};
use thiserror::Error;
#[derive(Error, Debug)]
pub enum UnpackError {
#[error("IO error")]
IO(#[from] std::io::Error),
#[error("Archive error")]
Archive(String),
}
pub type Result<T> = std::result::Result<T, UnpackError>;
const MAX_SNAPSHOT_ARCHIVE_UNPACKED_SIZE: u64 = 500 * 1024 * 1024 * 1024; // 500 GiB
const MAX_SNAPSHOT_ARCHIVE_UNPACKED_COUNT: u64 = 500_000;
const MAX_GENESIS_ARCHIVE_UNPACKED_SIZE: u64 = 10 * 1024 * 1024; // 10 MiB
const MAX_GENESIS_ARCHIVE_UNPACKED_COUNT: u64 = 100;
fn checked_total_size_sum(total_size: u64, entry_size: u64, limit_size: u64) -> Result<u64> {
let total_size = total_size.saturating_add(entry_size);
if total_size > limit_size {
return Err(UnpackError::Archive(format!(
"too large snapshot: {:?}",
total_size
)));
}
Ok(total_size)
}
fn checked_total_count_increment(total_count: u64, limit_count: u64) -> Result<u64> {
let total_count = total_count + 1;
if total_count > limit_count {
return Err(UnpackError::Archive(format!(
"too many files in snapshot: {:?}",
total_count
)));
}
Ok(total_count)
}
fn check_unpack_result(unpack_result: bool, path: String) -> Result<()> {
if !unpack_result {
return Err(UnpackError::Archive(format!(
"failed to unpack: {:?}",
path
)));
}
Ok(())
}
fn unpack_archive<A: Read, P: AsRef<Path>, C>(
archive: &mut Archive<A>,
unpack_dir: P,
limit_size: u64,
limit_count: u64,
entry_checker: C,
) -> Result<()>
where
C: Fn(&[&str], tar::EntryType) -> bool,
{
let mut total_size: u64 = 0;
let mut total_count: u64 = 0;
for entry in archive.entries()? {
let mut entry = entry?;
let path = entry.path()?;
let path_str = path.display().to_string();
// Although the `tar` crate safely skips at the actual unpacking, fail
// first by ourselves when there are odd paths like including `..` or /
// for our clearer pattern matching reasoning:
// https://docs.rs/tar/0.4.26/src/tar/entry.rs.html#371
let parts = path.components().map(|p| match p {
CurDir => Some("."),
Normal(c) => c.to_str(),
_ => None, // Prefix (for Windows) and RootDir are forbidden
});
if parts.clone().any(|p| p.is_none()) {
return Err(UnpackError::Archive(format!(
"invalid path found: {:?}",
path_str
)));
}
let parts: Vec<_> = parts.map(|p| p.unwrap()).collect();
if !entry_checker(parts.as_slice(), entry.header().entry_type()) {
return Err(UnpackError::Archive(format!(
"extra entry found: {:?}",
path_str
)));
}
total_size = checked_total_size_sum(total_size, entry.header().size()?, limit_size)?;
total_count = checked_total_count_increment(total_count, limit_count)?;
// unpack_in does its own sanitization
// ref: https://docs.rs/tar/*/tar/struct.Entry.html#method.unpack_in
check_unpack_result(entry.unpack_in(&unpack_dir)?, path_str)?
}
Ok(())
}
pub fn unpack_snapshot<A: Read, P: AsRef<Path>>(
archive: &mut Archive<A>,
unpack_dir: P,
) -> Result<()> {
unpack_archive(
archive,
unpack_dir,
MAX_SNAPSHOT_ARCHIVE_UNPACKED_SIZE,
MAX_SNAPSHOT_ARCHIVE_UNPACKED_COUNT,
is_valid_snapshot_archive_entry,
)
}
fn is_valid_snapshot_archive_entry(parts: &[&str], kind: tar::EntryType) -> bool {
let like_storage = Regex::new(r"^\d+\.\d+$").unwrap();
let like_slot = Regex::new(r"^\d+$").unwrap();
trace!("validating: {:?} {:?}", parts, kind);
match (parts, kind) {
(["version"], Regular) => true,
(["accounts"], Directory) => true,
(["accounts", file], GNUSparse) if like_storage.is_match(file) => true,
(["accounts", file], Regular) if like_storage.is_match(file) => true,
(["snapshots"], Directory) => true,
(["snapshots", "status_cache"], Regular) => true,
(["snapshots", dir, file], Regular)
if like_slot.is_match(dir) && like_slot.is_match(file) =>
{
true
}
(["snapshots", dir], Directory) if like_slot.is_match(dir) => true,
_ => false,
}
}
pub fn open_genesis_config(ledger_path: &Path) -> GenesisConfig {
GenesisConfig::load(&ledger_path).unwrap_or_else(|load_err| {
let genesis_package = ledger_path.join("genesis.tar.bz2");
unpack_genesis_archive(&genesis_package, ledger_path).unwrap_or_else(|unpack_err| {
warn!(
"Failed to open ledger genesis_config at {:?}: {}, {}",
ledger_path, load_err, unpack_err,
);
std::process::exit(1);
});
// loading must succeed at this moment
GenesisConfig::load(&ledger_path).unwrap()
})
}
pub fn unpack_genesis_archive(
archive_filename: &Path,
destination_dir: &Path,
) -> std::result::Result<(), String> {
info!("Extracting {:?}...", archive_filename);
let extract_start = Instant::now();
fs::create_dir_all(destination_dir).map_err(|err| err.to_string())?;
let tar_bz2 = File::open(&archive_filename)
.map_err(|err| format!("Unable to open {:?}: {:?}", archive_filename, err))?;
let tar = BzDecoder::new(BufReader::new(tar_bz2));
let mut archive = Archive::new(tar);
unpack_genesis(&mut archive, destination_dir)
.map_err(|err| format!("Unable to unpack {:?}: {:?}", archive_filename, err))?;
info!(
"Extracted {:?} in {:?}",
archive_filename,
Instant::now().duration_since(extract_start)
);
Ok(())
}
fn unpack_genesis<A: Read, P: AsRef<Path>>(archive: &mut Archive<A>, unpack_dir: P) -> Result<()> {
unpack_archive(
archive,
unpack_dir,
MAX_GENESIS_ARCHIVE_UNPACKED_SIZE,
MAX_GENESIS_ARCHIVE_UNPACKED_COUNT,
is_valid_genesis_archive_entry,
)
}
fn is_valid_genesis_archive_entry(parts: &[&str], kind: tar::EntryType) -> bool {
trace!("validating: {:?} {:?}", parts, kind);
match (parts, kind) {
(["genesis.bin"], Regular) => true,
(["rocksdb"], Directory) => true,
(["rocksdb", ..], GNUSparse) => true,
(["rocksdb", ..], Regular) => true,
_ => false,
}
}
#[cfg(test)]
mod tests {
use super::*;
use matches::assert_matches;
use tar::{Builder, Header};
#[test]
fn test_archive_is_valid_entry() {
assert!(is_valid_snapshot_archive_entry(
&["accounts", "0.0"],
tar::EntryType::Regular
));
assert!(is_valid_snapshot_archive_entry(
&["snapshots"],
tar::EntryType::Directory
));
assert!(is_valid_snapshot_archive_entry(
&["snapshots", "3"],
tar::EntryType::Directory
));
assert!(is_valid_snapshot_archive_entry(
&["snapshots", "3", "3"],
tar::EntryType::Regular
));
assert!(is_valid_snapshot_archive_entry(
&["version"],
tar::EntryType::Regular
));
assert!(is_valid_snapshot_archive_entry(
&["accounts"],
tar::EntryType::Directory
));
assert!(!is_valid_snapshot_archive_entry(
&["accounts", "0x0"],
tar::EntryType::Regular
));
assert!(!is_valid_snapshot_archive_entry(
&["snapshots"],
tar::EntryType::Regular
));
assert!(!is_valid_snapshot_archive_entry(
&["snapshots", "x0"],
tar::EntryType::Directory
));
assert!(!is_valid_snapshot_archive_entry(
&["snapshots", "0x"],
tar::EntryType::Directory
));
assert!(!is_valid_snapshot_archive_entry(
&["snapshots", "0", "aa"],
tar::EntryType::Regular
));
assert!(!is_valid_snapshot_archive_entry(
&["aaaa"],
tar::EntryType::Regular
));
}
#[test]
fn test_archive_is_valid_archive_entry() {
assert!(is_valid_genesis_archive_entry(
&["genesis.bin"],
tar::EntryType::Regular
));
assert!(is_valid_genesis_archive_entry(
&["rocksdb"],
tar::EntryType::Directory
));
assert!(is_valid_genesis_archive_entry(
&["rocksdb", "foo"],
tar::EntryType::Regular
));
assert!(is_valid_genesis_archive_entry(
&["rocksdb", "foo", "bar"],
tar::EntryType::Regular
));
assert!(!is_valid_genesis_archive_entry(
&["aaaa"],
tar::EntryType::Regular
));
}
fn with_finalize_and_unpack<C>(archive: tar::Builder<Vec<u8>>, checker: C) -> Result<()>
where
C: Fn(&mut Archive<BufReader<&[u8]>>, &Path) -> Result<()>,
{
let data = archive.into_inner().unwrap();
let reader = BufReader::new(&data[..]);
let mut archive: Archive<std::io::BufReader<&[u8]>> = Archive::new(reader);
let temp_dir = tempfile::TempDir::new().unwrap();
checker(&mut archive, &temp_dir.into_path())
}
fn finalize_and_unpack_snapshot(archive: tar::Builder<Vec<u8>>) -> Result<()> {
with_finalize_and_unpack(archive, |a, b| unpack_snapshot(a, b))
}
fn finalize_and_unpack_genesis(archive: tar::Builder<Vec<u8>>) -> Result<()> {
with_finalize_and_unpack(archive, |a, b| unpack_genesis(a, b))
}
#[test]
fn test_archive_unpack_snapshot_ok() {
let mut header = Header::new_gnu();
header.set_path("version").unwrap();
header.set_size(4);
header.set_cksum();
let data: &[u8] = &[1, 2, 3, 4];
let mut archive = Builder::new(Vec::new());
archive.append(&header, data).unwrap();
let result = finalize_and_unpack_snapshot(archive);
assert_matches!(result, Ok(()));
}
#[test]
fn test_archive_unpack_genesis_ok() {
let mut header = Header::new_gnu();
header.set_path("genesis.bin").unwrap();
header.set_size(4);
header.set_cksum();
let data: &[u8] = &[1, 2, 3, 4];
let mut archive = Builder::new(Vec::new());
archive.append(&header, data).unwrap();
let result = finalize_and_unpack_genesis(archive);
assert_matches!(result, Ok(()));
}
#[test]
fn test_archive_unpack_snapshot_invalid_path() {
let mut header = Header::new_gnu();
// bypass the sanitization of the .set_path()
for (p, c) in header
.as_old_mut()
.name
.iter_mut()
.zip(b"foo/../../../dangerous".iter().chain(Some(&0)))
{
*p = *c;
}
header.set_size(4);
header.set_cksum();
let data: &[u8] = &[1, 2, 3, 4];
let mut archive = Builder::new(Vec::new());
archive.append(&header, data).unwrap();
let result = finalize_and_unpack_snapshot(archive);
assert_matches!(result, Err(UnpackError::Archive(ref message)) if message.to_string() == *"invalid path found: \"foo/../../../dangerous\"");
}
fn with_archive_unpack_snapshot_invalid_path(path: &str) -> Result<()> {
let mut header = Header::new_gnu();
// bypass the sanitization of the .set_path()
for (p, c) in header
.as_old_mut()
.name
.iter_mut()
.zip(path.as_bytes().iter().chain(Some(&0)))
{
*p = *c;
}
header.set_size(4);
header.set_cksum();
let data: &[u8] = &[1, 2, 3, 4];
let mut archive = Builder::new(Vec::new());
archive.append(&header, data).unwrap();
with_finalize_and_unpack(archive, |unpacking_archive, path| {
for entry in unpacking_archive.entries()? {
if entry?.unpack_in(path)? == false {
return Err(UnpackError::Archive("failed!".to_string()));
} else if !path.join(path).exists() {
return Err(UnpackError::Archive("not existing!".to_string()));
}
}
Ok(())
})
}
#[test]
fn test_archive_unpack_itself() {
assert_matches!(
with_archive_unpack_snapshot_invalid_path("ryoqun/work"),
Ok(())
);
// Absolute paths are neutralized as relative
assert_matches!(
with_archive_unpack_snapshot_invalid_path("/etc/passwd"),
Ok(())
);
assert_matches!(with_archive_unpack_snapshot_invalid_path("../../../dangerous"), Err(UnpackError::Archive(ref message)) if message.to_string() == "failed!");
}
#[test]
fn test_archive_unpack_snapshot_invalid_entry() {
let mut header = Header::new_gnu();
header.set_path("foo").unwrap();
header.set_size(4);
header.set_cksum();
let data: &[u8] = &[1, 2, 3, 4];
let mut archive = Builder::new(Vec::new());
archive.append(&header, data).unwrap();
let result = finalize_and_unpack_snapshot(archive);
assert_matches!(result, Err(UnpackError::Archive(ref message)) if message.to_string() == *"extra entry found: \"foo\"");
}
#[test]
fn test_archive_unpack_snapshot_too_large() {
let mut header = Header::new_gnu();
header.set_path("version").unwrap();
header.set_size(1 * 1024 * 1024 * 1024 * 1024 * 1024);
header.set_cksum();
let data: &[u8] = &[1, 2, 3, 4];
let mut archive = Builder::new(Vec::new());
archive.append(&header, data).unwrap();
let result = finalize_and_unpack_snapshot(archive);
assert_matches!(result, Err(UnpackError::Archive(ref message)) if message.to_string() == *"too large snapshot: 1125899906842624");
}
#[test]
fn test_archive_unpack_snapshot_bad_unpack() {
let result = check_unpack_result(false, "abc".to_string());
assert_matches!(result, Err(UnpackError::Archive(ref message)) if message.to_string() == *"failed to unpack: \"abc\"");
}
#[test]
fn test_archive_checked_total_size_sum() {
let result = checked_total_size_sum(500, 500, MAX_SNAPSHOT_ARCHIVE_UNPACKED_SIZE);
assert_matches!(result, Ok(1000));
let result =
checked_total_size_sum(u64::max_value() - 2, 2, MAX_SNAPSHOT_ARCHIVE_UNPACKED_SIZE);
assert_matches!(result, Err(UnpackError::Archive(ref message)) if message.to_string() == *"too large snapshot: 18446744073709551615");
}
#[test]
fn test_archive_checked_total_size_count() {
let result = checked_total_count_increment(101, MAX_SNAPSHOT_ARCHIVE_UNPACKED_COUNT);
assert_matches!(result, Ok(102));
let result =
checked_total_count_increment(999_999_999_999, MAX_SNAPSHOT_ARCHIVE_UNPACKED_COUNT);
assert_matches!(result, Err(UnpackError::Archive(ref message)) if message.to_string() == *"too many files in snapshot: 1000000000000");
}
}

View File

@ -9,6 +9,7 @@ pub mod blockstore_processor;
pub mod entry;
pub mod erasure;
pub mod genesis_utils;
pub mod hardened_unpack;
pub mod leader_schedule;
pub mod leader_schedule_cache;
pub mod leader_schedule_utils;

View File

@ -1,3 +1,4 @@
use crate::hardened_unpack::{unpack_snapshot, UnpackError};
use crate::snapshot_package::SnapshotPackage;
use bincode::serialize_into;
use bzip2::bufread::BzDecoder;
@ -54,6 +55,9 @@ pub enum SnapshotError {
#[error("storage path symlink is invalid")]
StoragePathSymlinkInvalid,
#[error("Unpack error")]
UnpackError(#[from] UnpackError),
}
pub type Result<T> = std::result::Result<T, SnapshotError>;
@ -558,7 +562,7 @@ pub fn untar_snapshot_in<P: AsRef<Path>, Q: AsRef<Path>>(
let tar = BzDecoder::new(BufReader::new(tar_bz2));
let mut archive = Archive::new(tar);
if !is_snapshot_compression_disabled() {
archive.unpack(&unpack_dir)?;
unpack_snapshot(&mut archive, unpack_dir)?;
} else if let Err(e) = archive.unpack(&unpack_dir) {
warn!(
"Trying to unpack as uncompressed tar because an error occurred: {:?}",
@ -567,7 +571,7 @@ pub fn untar_snapshot_in<P: AsRef<Path>, Q: AsRef<Path>>(
let tar_bz2 = File::open(snapshot_tar)?;
let tar = BufReader::new(tar_bz2);
let mut archive = Archive::new(tar);
archive.unpack(&unpack_dir)?;
unpack_snapshot(&mut archive, unpack_dir)?;
}
measure.stop();
info!("{}", measure);

2
run.sh
View File

@ -60,7 +60,7 @@ else
solana-keygen new --no-passphrase -fso "$faucet"
fi
if [[ -e "$ledgerDir"/genesis.bin ]]; then
if [[ -e "$ledgerDir"/genesis.bin || -e "$ledgerDir"/genesis.tar.bz2 ]]; then
echo "Use existing genesis"
else
solana-genesis \

View File

@ -18,11 +18,12 @@ use solana_core::{
rpc::JsonRpcConfig,
validator::{Validator, ValidatorConfig},
};
use solana_download_utils::{download_genesis, download_snapshot};
use solana_ledger::bank_forks::SnapshotConfig;
use solana_download_utils::{download_genesis_if_missing, download_snapshot};
use solana_ledger::{bank_forks::SnapshotConfig, hardened_unpack::unpack_genesis_archive};
use solana_perf::recycler::enable_recycler_warming;
use solana_sdk::{
clock::Slot,
genesis_config::GenesisConfig,
hash::Hash,
pubkey::Pubkey,
signature::{Keypair, Signer},
@ -349,6 +350,53 @@ fn hardforks_of(matches: &ArgMatches<'_>, name: &str) -> Option<Vec<Slot>> {
}
}
fn check_genesis_hash(
genesis_config: &GenesisConfig,
expected_genesis_hash: Option<Hash>,
) -> Result<(), String> {
let genesis_hash = genesis_config.hash();
if let Some(expected_genesis_hash) = expected_genesis_hash {
if expected_genesis_hash != genesis_hash {
return Err(format!(
"Genesis hash mismatch: expected {} but downloaded genesis hash is {}",
expected_genesis_hash, genesis_hash,
));
}
}
Ok(())
}
fn download_then_check_genesis_hash(
rpc_addr: &SocketAddr,
ledger_path: &std::path::Path,
expected_genesis_hash: Option<Hash>,
) -> Result<Hash, String> {
let genesis_package = ledger_path.join("genesis.tar.bz2");
let genesis_config =
if let Ok(tmp_genesis_package) = download_genesis_if_missing(rpc_addr, &genesis_package) {
unpack_genesis_archive(&tmp_genesis_package, &ledger_path)?;
let downloaded_genesis = GenesisConfig::load(&ledger_path)
.map_err(|err| format!("Failed to load downloaded genesis config: {}", err))?;
check_genesis_hash(&downloaded_genesis, expected_genesis_hash)?;
std::fs::rename(tmp_genesis_package, genesis_package)
.map_err(|err| format!("Unable to rename: {:?}", err))?;
downloaded_genesis
} else {
let existing_genesis = GenesisConfig::load(&ledger_path)
.map_err(|err| format!("Failed to load genesis config: {}", err))?;
check_genesis_hash(&existing_genesis, expected_genesis_hash)?;
existing_genesis
};
Ok(genesis_config.hash())
}
#[allow(clippy::cognitive_complexity)]
pub fn main() {
let default_dynamic_port_range =
@ -1016,7 +1064,7 @@ pub fn main() {
Err(err) => Err(format!("Failed to get RPC node version: {}", err)),
}
.and_then(|_| {
let genesis_hash = download_genesis(
let genesis_hash = download_then_check_genesis_hash(
&rpc_contact_info.rpc,
&ledger_path,
validator_config.expected_genesis_hash,