hash account state on store (#5573)

This commit is contained in:
sakridge 2019-09-20 13:21:12 -07:00 committed by GitHub
parent 5dd3a07a23
commit 19ae556857
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
14 changed files with 484 additions and 54 deletions

1
Cargo.lock generated
View File

@ -3601,6 +3601,7 @@ dependencies = [
"num-derive 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)",
"num-traits 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
"rand 0.6.5 (registry+https://github.com/rust-lang/crates.io-index)",
"rand_chacha 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
"rayon 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
"serde 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_derive 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)",

View File

@ -236,26 +236,30 @@ fn main() {
assert!(txs_processed < bank.transaction_count());
txs_processed = bank.transaction_count();
tx_total += duration_as_us(&now.elapsed());
let mut new_bank_time = Measure::start("new_bank");
new_bank_time.stop();
let mut insert_time = Measure::start("insert_time");
insert_time.stop();
let mut poh_time = Measure::start("poh_time");
poh_recorder.lock().unwrap().reset(
bank.last_blockhash(),
bank.slot(),
Some((bank.slot(), bank.slot() + 1)),
);
poh_time.stop();
let mut new_bank_time = Measure::start("new_bank");
let new_bank = Bank::new_from_parent(&bank, &collector, bank.slot() + 1);
new_bank_time.stop();
let mut insert_time = Measure::start("insert_time");
bank_forks.insert(new_bank);
bank = bank_forks.working_bank();
insert_time.stop();
poh_recorder.lock().unwrap().set_bank(&bank);
assert!(poh_recorder.lock().unwrap().bank().is_some());
if bank.slot() > 32 {
bank_forks.set_root(root, &None);
root += 1;
}
poh_time.stop();
debug!(
"new_bank_time: {}us insert_time: {}us poh_time: {}us",
new_bank_time.as_us(),

View File

@ -220,6 +220,12 @@ pub fn bank_from_archive<P: AsRef<Path>>(
let snapshot_paths = get_snapshot_paths(&unpacked_snapshots_dir);
let bank = rebuild_bank_from_snapshots(account_paths, &snapshot_paths, unpacked_accounts_dir)?;
if !bank.verify_hash_internal_state() {
warn!("Invalid snapshot hash value!");
} else {
info!("Snapshot hash value matches.");
}
// Move the unpacked snapshots into `snapshot_config.snapshot_path`
let dir_files = fs::read_dir(&unpacked_snapshots_dir).unwrap_or_else(|err| {
panic!(

View File

@ -1462,6 +1462,7 @@ dependencies = [
"num-derive 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)",
"num-traits 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
"rand 0.6.5 (registry+https://github.com/rust-lang/crates.io-index)",
"rand_chacha 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
"rayon 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
"serde 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_derive 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)",

View File

@ -4,6 +4,7 @@ extern crate test;
use rand::{thread_rng, Rng};
use solana_runtime::append_vec::test_utils::{create_test_account, get_append_vec_path};
use solana_runtime::append_vec::AppendVec;
use solana_sdk::hash::Hash;
use std::sync::{Arc, Mutex};
use std::thread::sleep;
use std::thread::spawn;
@ -16,7 +17,10 @@ fn append_vec_append(bencher: &mut Bencher) {
let vec = AppendVec::new(&path.path, true, 64 * 1024);
bencher.iter(|| {
let (meta, account) = create_test_account(0);
if vec.append_account(meta, &account).is_none() {
if vec
.append_account(meta, &account, Hash::default())
.is_none()
{
vec.reset();
}
});
@ -27,7 +31,8 @@ fn add_test_accounts(vec: &AppendVec, size: usize) -> Vec<(usize, usize)> {
.into_iter()
.filter_map(|sample| {
let (meta, account) = create_test_account(sample);
vec.append_account(meta, &account).map(|pos| (sample, pos))
vec.append_account(meta, &account, Hash::default())
.map(|pos| (sample, pos))
})
.collect()
}
@ -71,7 +76,7 @@ fn append_vec_concurrent_append_read(bencher: &mut Bencher) {
spawn(move || loop {
let sample = indexes1.lock().unwrap().len();
let (meta, account) = create_test_account(sample);
if let Some(pos) = vec1.append_account(meta, &account) {
if let Some(pos) = vec1.append_account(meta, &account, Hash::default()) {
indexes1.lock().unwrap().push((sample, pos))
} else {
break;
@ -116,7 +121,7 @@ fn append_vec_concurrent_read_append(bencher: &mut Bencher) {
bencher.iter(|| {
let sample: usize = thread_rng().gen_range(0, 256);
let (meta, account) = create_test_account(sample);
if let Some(pos) = vec.append_account(meta, &account) {
if let Some(pos) = vec.append_account(meta, &account, Hash::default()) {
indexes.lock().unwrap().push((sample, pos))
}
});

View File

@ -4,17 +4,15 @@ use crate::append_vec::StoredAccount;
use crate::blockhash_queue::BlockhashQueue;
use crate::message_processor::has_duplicates;
use crate::rent_collector::RentCollector;
use bincode::serialize;
use log::*;
use rayon::slice::ParallelSliceMut;
use solana_metrics::inc_new_counter_error;
use solana_sdk::account::Account;
use solana_sdk::hash::{Hash, Hasher};
use solana_sdk::bank_hash::BankHash;
use solana_sdk::message::Message;
use solana_sdk::native_loader;
use solana_sdk::pubkey::Pubkey;
use solana_sdk::system_program;
use solana_sdk::sysvar;
use solana_sdk::transaction::Result;
use solana_sdk::transaction::{Transaction, TransactionError};
use std::collections::{HashMap, HashSet};
@ -69,8 +67,9 @@ impl Accounts {
credit_only_account_locks: Arc::new(RwLock::new(Some(HashMap::new()))),
}
}
pub fn new_from_parent(parent: &Accounts) -> Self {
pub fn new_from_parent(parent: &Accounts, slot: Fork, parent_slot: Fork) -> Self {
let accounts_db = parent.accounts_db.clone();
accounts_db.set_hash(slot, parent_slot);
Accounts {
accounts_db,
account_locks: Mutex::new(HashSet::new()),
@ -317,6 +316,10 @@ impl Accounts {
})
}
pub fn verify_hash_internal_state(&self, fork: Fork, ancestors: &HashMap<Fork, usize>) -> bool {
self.accounts_db.verify_hash_internal_state(fork, ancestors)
}
pub fn load_by_program(
&self,
ancestors: &HashMap<Fork, usize>,
@ -450,30 +453,13 @@ impl Accounts {
}
}
fn hash_account(stored_account: &StoredAccount) -> Hash {
let mut hasher = Hasher::default();
hasher.hash(&serialize(&stored_account.balance).unwrap());
hasher.hash(stored_account.data);
hasher.result()
}
pub fn hash_internal_state(&self, fork_id: Fork) -> Option<Hash> {
let account_hashes = self.scan_fork(fork_id, |stored_account| {
if !sysvar::check_id(&stored_account.balance.owner) {
Some(Self::hash_account(stored_account))
} else {
None
}
});
if account_hashes.is_empty() {
None
pub fn hash_internal_state(&self, fork_id: Fork) -> Option<BankHash> {
let fork_hashes = self.accounts_db.fork_hashes.read().unwrap();
let fork_hash = fork_hashes.get(&fork_id)?;
if fork_hash.0 {
Some(fork_hash.1)
} else {
let mut hasher = Hasher::default();
for hash in account_hashes {
hasher.hash(hash.as_ref());
}
Some(hasher.result())
None
}
}

View File

@ -21,6 +21,7 @@
use crate::accounts_index::{AccountsIndex, Fork};
use crate::append_vec::{AppendVec, StorageMeta, StoredAccount};
use bincode::{deserialize_from, serialize_into};
use byteorder::{ByteOrder, LittleEndian};
use fs_extra::dir::CopyOptions;
use log::*;
use rand::{thread_rng, Rng};
@ -32,7 +33,10 @@ use serde::{Deserialize, Serialize};
use solana_measure::measure::Measure;
use solana_rayon_threadlimit::get_thread_count;
use solana_sdk::account::Account;
use solana_sdk::bank_hash::BankHash;
use solana_sdk::hash::{Hash, Hasher};
use solana_sdk::pubkey::Pubkey;
use solana_sdk::sysvar;
use std::collections::{HashMap, HashSet};
use std::fmt;
use std::io::{BufReader, Cursor, Error as IOError, ErrorKind, Read, Result as IOResult};
@ -333,6 +337,8 @@ impl<'a> Serialize for AccountsDBSerialize<'a> {
let account_storage_serialize = AccountStorageSerialize::new(&*storage, self.slot);
serialize_into(&mut wr, &account_storage_serialize).map_err(Error::custom)?;
serialize_into(&mut wr, &version).map_err(Error::custom)?;
let fork_hashes = self.accounts_db.fork_hashes.read().unwrap();
serialize_into(&mut wr, &*fork_hashes).map_err(Error::custom)?;
let len = wr.position() as usize;
serializer.serialize_bytes(&wr.into_inner()[..len])
}
@ -365,7 +371,12 @@ pub struct AccountsDB {
/// Thread pool used for par_iter
pub thread_pool: ThreadPool,
/// Number of append vecs to create to maximize parallelism when scanning
/// the accounts
min_num_stores: usize,
/// fork to BankHash and a status flag to indicate if the hash has been initialized or not
pub fork_hashes: RwLock<HashMap<Fork, (bool, BankHash)>>,
}
impl Default for AccountsDB {
@ -385,6 +396,7 @@ impl Default for AccountsDB {
.build()
.unwrap(),
min_num_stores: num_threads,
fork_hashes: RwLock::new(HashMap::default()),
}
}
}
@ -489,6 +501,10 @@ impl AccountsDB {
let version: u64 = deserialize_from(&mut stream)
.map_err(|_| AccountsDB::get_io_error("write version deserialize error"))?;
let fork_hashes: HashMap<Fork, (bool, BankHash)> = deserialize_from(&mut stream)
.map_err(|_| AccountsDB::get_io_error("fork hashes deserialize error"))?;
*self.fork_hashes.write().unwrap() = fork_hashes;
// Process deserialized data, set necessary fields in self
*self.paths.write().unwrap() = local_account_paths;
let max_id: usize = *storage
@ -500,12 +516,6 @@ impl AccountsDB {
{
let mut stores = self.storage.write().unwrap();
/*if let Some((_, store0)) = storage.0.remove_entry(&0) {
let fork_storage0 = stores.0.entry(0).or_insert_with(HashMap::new);
for (id, store) in store0.iter() {
fork_storage0.insert(*id, store.clone());
}
}*/
stores.0.extend(storage.0);
}
@ -599,6 +609,14 @@ impl AccountsDB {
})
}
pub fn set_hash(&self, slot: Fork, parent_slot: Fork) {
let mut fork_hashes = self.fork_hashes.write().unwrap();
let hash = *fork_hashes
.get(&parent_slot)
.expect("accounts_db::set_hash::no parent slot");
fork_hashes.insert(slot, (false, hash.1));
}
pub fn load(
storage: &AccountStorage,
ancestors: &HashMap<Fork, usize>,
@ -702,7 +720,42 @@ impl AccountsDB {
}
}
fn store_accounts(&self, fork_id: Fork, accounts: &[(&Pubkey, &Account)]) -> Vec<AccountInfo> {
pub fn hash_stored_account(fork: Fork, account: &StoredAccount) -> Hash {
Self::hash_account_data(
fork,
account.balance.lamports,
account.data,
&account.meta.pubkey,
)
}
pub fn hash_account(fork: Fork, account: &Account, pubkey: &Pubkey) -> Hash {
Self::hash_account_data(fork, account.lamports, &account.data, pubkey)
}
pub fn hash_account_data(fork: Fork, lamports: u64, data: &[u8], pubkey: &Pubkey) -> Hash {
let mut hasher = Hasher::default();
let mut buf = [0u8; 8];
LittleEndian::write_u64(&mut buf[..], lamports);
hasher.hash(&buf);
LittleEndian::write_u64(&mut buf[..], fork);
hasher.hash(&buf);
hasher.hash(&data);
hasher.hash(&pubkey.as_ref());
hasher.result()
}
fn store_accounts(
&self,
fork_id: Fork,
accounts: &[(&Pubkey, &Account)],
hashes: &[Hash],
) -> Vec<AccountInfo> {
let with_meta: Vec<(StorageMeta, &Account)> = accounts
.iter()
.map(|(pubkey, account)| {
@ -724,7 +777,9 @@ impl AccountsDB {
let mut infos: Vec<AccountInfo> = vec![];
while infos.len() < with_meta.len() {
let storage = self.find_storage_candidate(fork_id);
let rvs = storage.accounts.append_accounts(&with_meta[infos.len()..]);
let rvs = storage
.accounts
.append_accounts(&with_meta[infos.len()..], &hashes);
if rvs.is_empty() {
storage.set_status(AccountStorageStatus::Full);
@ -749,6 +804,40 @@ impl AccountsDB {
infos
}
pub fn verify_hash_internal_state(&self, fork: Fork, ancestors: &HashMap<Fork, usize>) -> bool {
let mut hash_state = BankHash::default();
let hashes: Vec<_> = self.scan_accounts(
ancestors,
|collector: &mut Vec<BankHash>, option: Option<(&Pubkey, Account, Fork)>| {
if let Some((pubkey, account, fork)) = option {
if !sysvar::check_id(&account.owner) {
let hash = BankHash::from_hash(&Self::hash_account(fork, &account, pubkey));
debug!("xoring..{} key: {}", hash, pubkey);
collector.push(hash);
}
}
},
);
for hash in hashes {
hash_state.xor(hash);
}
let fork_hashes = self.fork_hashes.read().unwrap();
if let Some((_, state)) = fork_hashes.get(&fork) {
hash_state == *state
} else {
false
}
}
pub fn xor_in_hash_state(&self, fork_id: Fork, hash: BankHash) {
let mut fork_hashes = self.fork_hashes.write().unwrap();
let fork_hash_state = fork_hashes
.entry(fork_id)
.or_insert((false, BankHash::default()));
fork_hash_state.1.xor(hash);
fork_hash_state.0 = true;
}
fn update_index(
&self,
fork_id: Fork,
@ -820,10 +909,44 @@ impl AccountsDB {
}
}
fn hash_accounts(&self, fork_id: Fork, accounts: &[(&Pubkey, &Account)]) -> Vec<Hash> {
let mut hashes = vec![];
let mut hash_state = BankHash::default();
let mut had_account = false;
for (pubkey, account) in accounts {
if !sysvar::check_id(&account.owner) {
let hash = BankHash::from_hash(&account.hash);
let new_hash = Self::hash_account(fork_id, account, pubkey);
let new_bank_hash = BankHash::from_hash(&new_hash);
debug!(
"hash_accounts: key: {} xor {} current: {}",
pubkey, hash, hash_state
);
if !had_account {
hash_state = hash;
had_account = true;
} else {
hash_state.xor(hash);
}
hash_state.xor(new_bank_hash);
hashes.push(new_hash);
} else {
hashes.push(Hash::default());
}
}
if had_account {
self.xor_in_hash_state(fork_id, hash_state);
}
hashes
}
/// Store the account update.
pub fn store(&self, fork_id: Fork, accounts: &[(&Pubkey, &Account)]) {
let hashes = self.hash_accounts(fork_id, accounts);
let mut store_accounts = Measure::start("store::store_accounts");
let infos = self.store_accounts(fork_id, accounts);
let infos = self.store_accounts(fork_id, accounts, &hashes);
store_accounts.stop();
let mut update_index = Measure::start("store::update_index");

View File

@ -1,7 +1,7 @@
use bincode::{deserialize_from, serialize_into, serialized_size};
use memmap::MmapMut;
use serde::{Deserialize, Serialize};
use solana_sdk::{account::Account, clock::Epoch, pubkey::Pubkey};
use solana_sdk::{account::Account, clock::Epoch, hash::Hash, pubkey::Pubkey};
use std::fmt;
use std::fs::{create_dir_all, remove_file, OpenOptions};
use std::io;
@ -50,6 +50,7 @@ pub struct StoredAccount<'a> {
pub balance: &'a AccountBalance,
pub data: &'a [u8],
pub offset: usize,
pub hash: &'a Hash,
}
impl<'a> StoredAccount<'a> {
@ -60,6 +61,7 @@ impl<'a> StoredAccount<'a> {
executable: self.balance.executable,
rent_epoch: self.balance.rent_epoch,
data: self.data.to_vec(),
hash: *self.hash,
}
}
}
@ -243,6 +245,7 @@ impl AppendVec {
pub fn get_account<'a>(&'a self, offset: usize) -> Option<(StoredAccount<'a>, usize)> {
let (meta, next): (&'a StorageMeta, _) = self.get_type(offset)?;
let (balance, next): (&'a AccountBalance, _) = self.get_type(next)?;
let (hash, next): (&'a Hash, _) = self.get_type(next)?;
let (data, next) = self.get_slice(next, meta.data_len as usize)?;
Some((
StoredAccount {
@ -250,6 +253,7 @@ impl AppendVec {
balance,
data,
offset,
hash,
},
next,
))
@ -274,10 +278,14 @@ impl AppendVec {
}
#[allow(clippy::mutex_atomic)]
pub fn append_accounts(&self, accounts: &[(StorageMeta, &Account)]) -> Vec<usize> {
pub fn append_accounts(
&self,
accounts: &[(StorageMeta, &Account)],
hashes: &[Hash],
) -> Vec<usize> {
let mut offset = self.append_offset.lock().unwrap();
let mut rv = vec![];
for (storage_meta, account) in accounts {
for ((storage_meta, account), hash) in accounts.iter().zip(hashes) {
let meta_ptr = storage_meta as *const StorageMeta;
let balance = AccountBalance {
lamports: account.lamports,
@ -288,9 +296,11 @@ impl AppendVec {
let balance_ptr = &balance as *const AccountBalance;
let data_len = storage_meta.data_len as usize;
let data_ptr = account.data.as_ptr();
let hash_ptr = hash.as_ref().as_ptr();
let ptrs = [
(meta_ptr as *const u8, mem::size_of::<StorageMeta>()),
(balance_ptr as *const u8, mem::size_of::<AccountBalance>()),
(hash_ptr as *const u8, mem::size_of::<Hash>()),
(data_ptr, data_len),
];
if let Some(res) = self.append_ptrs_locked(&mut offset, &ptrs) {
@ -302,14 +312,19 @@ impl AppendVec {
rv
}
pub fn append_account(&self, storage_meta: StorageMeta, account: &Account) -> Option<usize> {
self.append_accounts(&[(storage_meta, account)])
pub fn append_account(
&self,
storage_meta: StorageMeta,
account: &Account,
hash: Hash,
) -> Option<usize> {
self.append_accounts(&[(storage_meta, account)], &[hash])
.first()
.cloned()
}
pub fn append_account_test(&self, data: &(StorageMeta, Account)) -> Option<usize> {
self.append_account(data.0.clone(), &data.1)
self.append_account(data.0.clone(), &data.1, Hash::default())
}
}

View File

@ -285,7 +285,11 @@ impl Bank {
assert_ne!(slot, parent.slot());
let rc = BankRc {
accounts: Arc::new(Accounts::new_from_parent(&parent.rc.accounts)),
accounts: Arc::new(Accounts::new_from_parent(
&parent.rc.accounts,
slot,
parent.slot(),
)),
parent: RwLock::new(Some(parent.clone())),
slot,
};
@ -1338,6 +1342,14 @@ impl Bank {
}
}
/// Recalculate the hash_internal_state from the account stores. Would be used to verify a
/// snaphsot.
pub fn verify_hash_internal_state(&self) -> bool {
self.rc
.accounts
.verify_hash_internal_state(self.slot(), &self.ancestors)
}
/// Return the number of ticks per slot
pub fn ticks_per_slot(&self) -> u64 {
self.ticks_per_slot
@ -1807,6 +1819,7 @@ mod tests {
#[test]
fn test_transfer_to_newb() {
solana_logger::setup();
let (genesis_block, mint_keypair) = create_genesis_block(10_000);
let bank = Bank::new(&genesis_block);
let pubkey = Pubkey::new_rand();
@ -2316,6 +2329,55 @@ mod tests {
// Checkpointing should not change its state
let bank2 = new_from_parent(&Arc::new(bank1));
assert_eq!(bank0.hash_internal_state(), bank2.hash_internal_state());
let pubkey2 = Pubkey::new_rand();
info!("transfer 2 {}", pubkey2);
bank2.transfer(10, &mint_keypair, &pubkey2).unwrap();
assert!(bank2.verify_hash_internal_state());
}
#[test]
fn test_bank_hash_internal_state_verify() {
solana_logger::setup();
let (genesis_block, mint_keypair) = create_genesis_block(2_000);
let bank0 = Bank::new(&genesis_block);
let pubkey = Pubkey::new_rand();
info!("transfer 0 {} mint: {}", pubkey, mint_keypair.pubkey());
bank0.transfer(1_000, &mint_keypair, &pubkey).unwrap();
let bank0_state = bank0.hash_internal_state();
// Checkpointing should not change its state
let bank2 = new_from_parent(&Arc::new(bank0));
assert_eq!(bank0_state, bank2.hash_internal_state());
let pubkey2 = Pubkey::new_rand();
info!("transfer 2 {}", pubkey2);
bank2.transfer(10, &mint_keypair, &pubkey2).unwrap();
assert!(bank2.verify_hash_internal_state());
}
// Test that two bank forks with the same accounts should not hash to the same value.
#[test]
fn test_bank_hash_internal_state_same_account_different_fork() {
solana_logger::setup();
let (genesis_block, mint_keypair) = create_genesis_block(2_000);
let bank0 = Arc::new(Bank::new(&genesis_block));
let initial_state = bank0.hash_internal_state();
let bank1 = Bank::new_from_parent(&bank0.clone(), &Pubkey::default(), 1);
assert_eq!(bank1.hash_internal_state(), initial_state);
info!("transfer bank1");
let pubkey = Pubkey::new_rand();
bank1.transfer(1_000, &mint_keypair, &pubkey).unwrap();
assert_ne!(bank1.hash_internal_state(), initial_state);
info!("transfer bank2");
// bank2 should not hash the same as bank1
let bank2 = Bank::new_from_parent(&bank0, &Pubkey::default(), 2);
bank2.transfer(1_000, &mint_keypair, &pubkey).unwrap();
assert_ne!(bank2.hash_internal_state(), initial_state);
assert_ne!(bank1.hash_internal_state(), bank2.hash_internal_state());
}
#[test]
@ -2325,6 +2387,44 @@ mod tests {
assert_ne!(bank0.hash_internal_state(), bank1.hash_internal_state());
}
// See that the order of two transfers does not affect the result
// of hash_internal_state
#[test]
fn test_hash_internal_state_order() {
let (genesis_block, mint_keypair) = create_genesis_block(100);
let bank0 = Bank::new(&genesis_block);
let bank1 = Bank::new(&genesis_block);
assert_eq!(bank0.hash_internal_state(), bank1.hash_internal_state());
let key0 = Pubkey::new_rand();
let key1 = Pubkey::new_rand();
bank0.transfer(10, &mint_keypair, &key0).unwrap();
bank0.transfer(20, &mint_keypair, &key1).unwrap();
bank1.transfer(20, &mint_keypair, &key1).unwrap();
bank1.transfer(10, &mint_keypair, &key0).unwrap();
assert_eq!(bank0.hash_internal_state(), bank1.hash_internal_state());
}
#[test]
fn test_hash_internal_state_error() {
solana_logger::setup();
let (genesis_block, mint_keypair) = create_genesis_block(100);
let bank = Bank::new(&genesis_block);
let key0 = Pubkey::new_rand();
bank.transfer(10, &mint_keypair, &key0).unwrap();
let orig = bank.hash_internal_state();
// Transfer will error but still take a fee
assert!(bank.transfer(1000, &mint_keypair, &key0).is_err());
assert_ne!(orig, bank.hash_internal_state());
let orig = bank.hash_internal_state();
let empty_keypair = Keypair::new();
assert!(bank.transfer(1000, &empty_keypair, &key0).is_err());
assert_eq!(orig, bank.hash_internal_state());
}
#[test]
fn test_bank_hash_internal_state_squash() {
let collector_id = Pubkey::default();

View File

@ -18,6 +18,7 @@ default = [
"chrono",
"memmap",
"rand",
"rand_chacha",
"rayon",
"serde_json",
"solana-ed25519-dalek",
@ -40,6 +41,7 @@ memmap = { version = "0.6.2", optional = true }
num-derive = { version = "0.2" }
num-traits = { version = "0.2" }
rand = { version = "0.6.5", optional = true }
rand_chacha = { version = "0.1.1", optional = true }
rayon = { version = "1.2.0", optional = true }
serde = "1.0.101"
serde_derive = "1.0.101"

View File

@ -1,9 +1,10 @@
use crate::hash::Hash;
use crate::{clock::Epoch, pubkey::Pubkey};
use std::{cmp, fmt};
/// An Account with data that is stored on chain
#[repr(C)]
#[derive(Serialize, Deserialize, Clone, Default, Eq, PartialEq)]
#[derive(Serialize, Deserialize, Clone, Default)]
pub struct Account {
/// lamports in the account
pub lamports: u64,
@ -15,8 +16,26 @@ pub struct Account {
pub executable: bool,
/// the epoch at which this account will next owe rent
pub rent_epoch: Epoch,
/// Hash of this account's state, skip serializing as to not expose to external api
/// Used for keeping the accounts state hash updated.
#[serde(skip_serializing, skip_deserializing)]
pub hash: Hash,
}
/// skip comparison of account.hash, since it is only meaningful when the account is loaded in a
/// given fork and some tests do not have that.
impl PartialEq for Account {
fn eq(&self, other: &Self) -> bool {
self.lamports == other.lamports
&& self.data == other.data
&& self.owner == other.owner
&& self.executable == other.executable
&& self.rent_epoch == other.rent_epoch
}
}
impl Eq for Account {}
impl fmt::Debug for Account {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let data_len = cmp::min(64, self.data.len());
@ -27,13 +46,14 @@ impl fmt::Debug for Account {
};
write!(
f,
"Account {{ lamports: {} data.len: {} owner: {} executable: {} rent_epoch: {}{} }}",
"Account {{ lamports: {} data.len: {} owner: {} executable: {} rent_epoch: {}{} hash: {} }}",
self.lamports,
self.data.len(),
self.owner,
self.executable,
self.rent_epoch,
data_str,
self.hash,
)
}
}

163
sdk/src/bank_hash.rs Normal file
View File

@ -0,0 +1,163 @@
use crate::hash::Hash;
use bincode::deserialize_from;
use rand::{Rng, SeedableRng};
use rand_chacha::ChaChaRng;
use serde::{Deserialize, Serialize, Serializer};
use std::fmt;
use std::io::Cursor;
// Type for representing a bank accounts state.
// Taken by xor of a sha256 of accounts state for lower 32-bytes, and
// then generating the rest of the bytes with a chacha rng init'ed with that state.
// 440 bytes solves the birthday problem when xor'ing of preventing an attacker of
// finding a value or set of values that could be xor'ed to match the bitpattern
// of an existing state value.
const BANK_HASH_BYTES: usize = 448;
#[derive(Clone, Copy)]
pub struct BankHash([u8; BANK_HASH_BYTES]);
impl fmt::Debug for BankHash {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "BankHash {}", hex::encode(&self.0[..32]))
}
}
impl fmt::Display for BankHash {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{:?}", self)
}
}
impl PartialEq for BankHash {
fn eq(&self, other: &Self) -> bool {
self.0[..] == other.0[..]
}
}
impl Eq for BankHash {}
impl Default for BankHash {
fn default() -> Self {
BankHash([0u8; BANK_HASH_BYTES])
}
}
impl BankHash {
pub fn from_hash(hash: &Hash) -> Self {
let mut new = BankHash::default();
// default hash should result in all 0s thus nop for xor
if *hash == Hash::default() {
return new;
}
new.0[..32].copy_from_slice(hash.as_ref());
let mut seed = [0u8; 32];
seed.copy_from_slice(hash.as_ref());
let mut generator = ChaChaRng::from_seed(seed);
generator.fill(&mut new.0[32..]);
new
}
pub fn is_default(&self) -> bool {
self.0[0..32] == Hash::default().as_ref()[0..32]
}
pub fn xor(self: &mut BankHash, hash: BankHash) {
for (i, b) in hash.as_ref().iter().enumerate() {
self.0.as_mut()[i] ^= b;
}
}
}
impl Serialize for BankHash {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
serializer.serialize_bytes(&self.0[..])
}
}
struct BankHashVisitor;
impl<'a> serde::de::Visitor<'a> for BankHashVisitor {
type Value = BankHash;
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
formatter.write_str("Expecting BankHash")
}
#[allow(clippy::mutex_atomic)]
fn visit_bytes<E>(self, data: &[u8]) -> std::result::Result<Self::Value, E>
where
E: serde::de::Error,
{
use serde::de::Error;
let mut new = BankHash::default();
let mut rd = Cursor::new(&data[..]);
for i in 0..BANK_HASH_BYTES {
new.0[i] = deserialize_from(&mut rd).map_err(Error::custom)?;
}
Ok(new)
}
}
impl<'de> Deserialize<'de> for BankHash {
fn deserialize<D>(deserializer: D) -> std::result::Result<Self, D::Error>
where
D: ::serde::Deserializer<'de>,
{
deserializer.deserialize_bytes(BankHashVisitor)
}
}
impl AsRef<[u8]> for BankHash {
fn as_ref(&self) -> &[u8] {
&self.0[..]
}
}
impl AsMut<[u8]> for BankHash {
fn as_mut(&mut self) -> &mut [u8] {
&mut self.0[..]
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::hash::hash;
use bincode::{deserialize, serialize};
use log::*;
#[test]
fn test_bankhash() {
let hash = hash(&[1, 2, 3, 4]);
let bank_hash = BankHash::from_hash(&hash);
assert!(!bank_hash.is_default());
let default = BankHash::default();
assert!(default.is_default());
assert!(bank_hash != default);
assert!(bank_hash == bank_hash);
for i in 0..BANK_HASH_BYTES / 32 {
let start = i * 32;
let end = start + 32;
assert!(bank_hash.0[start..end] != [0u8; 32]);
}
}
#[test]
fn test_serialize() {
solana_logger::setup();
let hash = hash(&[1, 2, 3, 4]);
let bank_hash = BankHash::from_hash(&hash);
info!("{}", bank_hash);
let bytes = serialize(&bank_hash).unwrap();
let new: BankHash = deserialize(&bytes).unwrap();
info!("{}", new);
assert_eq!(new, bank_hash);
}
}

View File

@ -28,6 +28,8 @@ pub mod log;
// Modules not usable by on-chain programs
#[cfg(not(feature = "program"))]
pub mod bank_hash;
#[cfg(not(feature = "program"))]
pub mod client;
#[cfg(not(feature = "program"))]
pub mod genesis_block;

View File

@ -1,4 +1,5 @@
use crate::account::Account;
use crate::hash::Hash;
const ID: [u8; 32] = [
5, 135, 132, 191, 20, 139, 164, 40, 47, 176, 18, 87, 72, 136, 169, 241, 83, 160, 125, 173, 247,
@ -15,5 +16,6 @@ pub fn create_loadable_account(name: &str) -> Account {
data: name.as_bytes().to_vec(),
executable: true,
rent_epoch: 0,
hash: Hash::default(),
}
}