remove IncludeSlotInHash after feature activation on mnb (#33816)

* remove IncludeSlotInHash after feature activation on mnb

* fix compile errors

* compile errors

* fix tests

* fix test results
This commit is contained in:
Jeff Washington (jwash) 2023-10-23 15:12:02 -07:00 committed by GitHub
parent a099c7a0b8
commit b0b4e1f0c0
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
20 changed files with 147 additions and 532 deletions

View File

@ -9,7 +9,7 @@ use {
accounts::Accounts, accounts::Accounts,
accounts_db::{ accounts_db::{
test_utils::{create_test_accounts, update_accounts_bench}, test_utils::{create_test_accounts, update_accounts_bench},
AccountShrinkThreshold, CalcAccountsHashDataSource, INCLUDE_SLOT_IN_HASH_TESTS, AccountShrinkThreshold, CalcAccountsHashDataSource,
}, },
accounts_index::AccountSecondaryIndexes, accounts_index::AccountSecondaryIndexes,
ancestors::Ancestors, ancestors::Ancestors,
@ -134,7 +134,6 @@ fn main() {
&EpochSchedule::default(), &EpochSchedule::default(),
&RentCollector::default(), &RentCollector::default(),
true, true,
INCLUDE_SLOT_IN_HASH_TESTS,
); );
time_store.stop(); time_store.stop();
if results != results_store { if results != results_store {

View File

@ -7,7 +7,6 @@ use {
account_storage::meta::{ account_storage::meta::{
StorableAccountsWithHashesAndWriteVersions, StoredAccountInfo, StoredMeta, StorableAccountsWithHashesAndWriteVersions, StoredAccountInfo, StoredMeta,
}, },
accounts_db::INCLUDE_SLOT_IN_HASH_TESTS,
accounts_hash::AccountHash, accounts_hash::AccountHash,
append_vec::{ append_vec::{
test_utils::{create_test_account, get_append_vec_path}, test_utils::{create_test_account, get_append_vec_path},
@ -39,7 +38,7 @@ fn append_account(
let slot_ignored = Slot::MAX; let slot_ignored = Slot::MAX;
let accounts = [(&storage_meta.pubkey, account)]; let accounts = [(&storage_meta.pubkey, account)];
let slice = &accounts[..]; let slice = &accounts[..];
let accounts = (slot_ignored, slice, INCLUDE_SLOT_IN_HASH_TESTS); let accounts = (slot_ignored, slice);
let storable_accounts = let storable_accounts =
StorableAccountsWithHashesAndWriteVersions::new_with_hashes_and_write_versions( StorableAccountsWithHashesAndWriteVersions::new_with_hashes_and_write_versions(
&accounts, &accounts,

View File

@ -3,10 +3,9 @@ use {
account_overrides::AccountOverrides, account_overrides::AccountOverrides,
account_rent_state::{check_rent_state_with_account, RentState}, account_rent_state::{check_rent_state_with_account, RentState},
accounts_db::{ accounts_db::{
AccountShrinkThreshold, AccountsAddRootTiming, AccountsDb, AccountsDbConfig, AccountShrinkThreshold, AccountsAddRootTiming, AccountsDb, AccountsDbConfig, LoadHint,
IncludeSlotInHash, LoadHint, LoadedAccount, ScanStorageResult, LoadedAccount, ScanStorageResult, VerifyAccountsHashAndLamportsConfig,
VerifyAccountsHashAndLamportsConfig, ACCOUNTS_DB_CONFIG_FOR_BENCHMARKS, ACCOUNTS_DB_CONFIG_FOR_BENCHMARKS, ACCOUNTS_DB_CONFIG_FOR_TESTING,
ACCOUNTS_DB_CONFIG_FOR_TESTING,
}, },
accounts_index::{ accounts_index::{
AccountSecondaryIndexes, IndexKey, ScanConfig, ScanError, ScanResult, ZeroLamport, AccountSecondaryIndexes, IndexKey, ScanConfig, ScanError, ScanResult, ZeroLamport,
@ -1311,7 +1310,6 @@ impl Accounts {
rent_collector: &RentCollector, rent_collector: &RentCollector,
durable_nonce: &DurableNonce, durable_nonce: &DurableNonce,
lamports_per_signature: u64, lamports_per_signature: u64,
include_slot_in_hash: IncludeSlotInHash,
) { ) {
let (accounts_to_store, transactions) = self.collect_accounts_to_store( let (accounts_to_store, transactions) = self.collect_accounts_to_store(
txs, txs,
@ -1321,10 +1319,8 @@ impl Accounts {
durable_nonce, durable_nonce,
lamports_per_signature, lamports_per_signature,
); );
self.accounts_db.store_cached_inline_update_index( self.accounts_db
(slot, &accounts_to_store[..], include_slot_in_hash), .store_cached_inline_update_index((slot, &accounts_to_store[..]), Some(&transactions));
Some(&transactions),
);
} }
pub fn store_accounts_cached<'a, T: ReadableAccount + Sync + ZeroLamport + 'a>( pub fn store_accounts_cached<'a, T: ReadableAccount + Sync + ZeroLamport + 'a>(

View File

@ -1,8 +1,5 @@
use { use {
crate::{ crate::{accounts_db::AccountsDb, accounts_hash::AccountHash},
accounts_db::{AccountsDb, IncludeSlotInHash},
accounts_hash::AccountHash,
},
dashmap::DashMap, dashmap::DashMap,
seqlock::SeqLock, seqlock::SeqLock,
solana_sdk::{ solana_sdk::{
@ -68,20 +65,12 @@ impl SlotCacheInner {
self.cache.iter().map(|item| *item.key()).collect() self.cache.iter().map(|item| *item.key()).collect()
} }
pub fn insert( pub fn insert(&self, pubkey: &Pubkey, account: AccountSharedData) -> CachedAccount {
&self,
pubkey: &Pubkey,
account: AccountSharedData,
slot: Slot,
include_slot_in_hash: IncludeSlotInHash,
) -> CachedAccount {
let data_len = account.data().len() as u64; let data_len = account.data().len() as u64;
let item = Arc::new(CachedAccountInner { let item = Arc::new(CachedAccountInner {
account, account,
hash: SeqLock::new(None), hash: SeqLock::new(None),
slot,
pubkey: *pubkey, pubkey: *pubkey,
include_slot_in_hash,
}); });
if let Some(old) = self.cache.insert(*pubkey, item.clone()) { if let Some(old) = self.cache.insert(*pubkey, item.clone()) {
self.same_account_writes.fetch_add(1, Ordering::Relaxed); self.same_account_writes.fetch_add(1, Ordering::Relaxed);
@ -145,11 +134,7 @@ pub type CachedAccount = Arc<CachedAccountInner>;
pub struct CachedAccountInner { pub struct CachedAccountInner {
pub account: AccountSharedData, pub account: AccountSharedData,
hash: SeqLock<Option<AccountHash>>, hash: SeqLock<Option<AccountHash>>,
slot: Slot,
pubkey: Pubkey, pubkey: Pubkey,
/// temporarily here during feature activation
/// since we calculate the hash later, or in the background, we need knowledge of whether this slot uses the slot in the hash or not
pub include_slot_in_hash: IncludeSlotInHash,
} }
impl CachedAccountInner { impl CachedAccountInner {
@ -158,12 +143,7 @@ impl CachedAccountInner {
match hash { match hash {
Some(hash) => hash, Some(hash) => hash,
None => { None => {
let hash = AccountsDb::hash_account( let hash = AccountsDb::hash_account(&self.account, &self.pubkey);
self.slot,
&self.account,
&self.pubkey,
self.include_slot_in_hash,
);
*self.hash.lock_write() = Some(hash); *self.hash.lock_write() = Some(hash);
hash hash
} }
@ -228,13 +208,7 @@ impl AccountsCache {
); );
} }
pub fn store( pub fn store(&self, slot: Slot, pubkey: &Pubkey, account: AccountSharedData) -> CachedAccount {
&self,
slot: Slot,
pubkey: &Pubkey,
account: AccountSharedData,
include_slot_in_hash: IncludeSlotInHash,
) -> CachedAccount {
let slot_cache = self.slot_cache(slot).unwrap_or_else(|| let slot_cache = self.slot_cache(slot).unwrap_or_else(||
// DashMap entry.or_insert() returns a RefMut, essentially a write lock, // DashMap entry.or_insert() returns a RefMut, essentially a write lock,
// which is dropped after this block ends, minimizing time held by the lock. // which is dropped after this block ends, minimizing time held by the lock.
@ -246,7 +220,7 @@ impl AccountsCache {
.or_insert(self.new_inner()) .or_insert(self.new_inner())
.clone()); .clone());
slot_cache.insert(pubkey, account, slot, include_slot_in_hash) slot_cache.insert(pubkey, account)
} }
pub fn load(&self, slot: Slot, pubkey: &Pubkey) -> Option<CachedAccount> { pub fn load(&self, slot: Slot, pubkey: &Pubkey) -> Option<CachedAccount> {
@ -338,7 +312,7 @@ impl AccountsCache {
#[cfg(test)] #[cfg(test)]
pub mod tests { pub mod tests {
use {super::*, crate::accounts_db::INCLUDE_SLOT_IN_HASH_TESTS}; use super::*;
#[test] #[test]
fn test_remove_slots_le() { fn test_remove_slots_le() {
@ -350,7 +324,6 @@ pub mod tests {
inserted_slot, inserted_slot,
&Pubkey::new_unique(), &Pubkey::new_unique(),
AccountSharedData::new(1, 0, &Pubkey::default()), AccountSharedData::new(1, 0, &Pubkey::default()),
INCLUDE_SLOT_IN_HASH_TESTS,
); );
// If the cache is told the size limit is 0, it should return the one slot // If the cache is told the size limit is 0, it should return the one slot
let removed = cache.remove_slots_le(0); let removed = cache.remove_slots_le(0);
@ -368,7 +341,6 @@ pub mod tests {
inserted_slot, inserted_slot,
&Pubkey::new_unique(), &Pubkey::new_unique(),
AccountSharedData::new(1, 0, &Pubkey::default()), AccountSharedData::new(1, 0, &Pubkey::default()),
INCLUDE_SLOT_IN_HASH_TESTS,
); );
// If the cache is told the size limit is 0, it should return nothing, because there's no // If the cache is told the size limit is 0, it should return nothing, because there's no

View File

@ -143,22 +143,6 @@ const MAX_ITEMS_PER_CHUNK: Slot = 2_500;
// This allows us to split up accounts index accesses across multiple threads. // This allows us to split up accounts index accesses across multiple threads.
const SHRINK_COLLECT_CHUNK_SIZE: usize = 50; const SHRINK_COLLECT_CHUNK_SIZE: usize = 50;
/// temporary enum during feature activation of
/// ignore slot when calculating an account hash #28420
#[derive(Debug, Clone, Copy, PartialEq)]
pub enum IncludeSlotInHash {
/// this is the status quo, prior to feature activation
/// INCLUDE the slot in the account hash calculation
IncludeSlot,
/// this is the value once feature activation occurs
/// do NOT include the slot in the account hash calculation
RemoveSlot,
/// this option should not be used.
/// If it is, this is a panic worthy event.
/// There are code paths where the feature activation status isn't known, but this value should not possibly be used.
IrrelevantAssertOnUse,
}
#[derive(Debug, Default, Clone, Copy, PartialEq, Eq)] #[derive(Debug, Default, Clone, Copy, PartialEq, Eq)]
pub enum CreateAncientStorage { pub enum CreateAncientStorage {
/// ancient storages are created by appending /// ancient storages are created by appending
@ -227,7 +211,6 @@ pub struct VerifyAccountsHashAndLamportsConfig<'a> {
pub store_detailed_debug_info: bool, pub store_detailed_debug_info: bool,
/// true to use dedicated background thread pool for verification /// true to use dedicated background thread pool for verification
pub use_bg_thread_pool: bool, pub use_bg_thread_pool: bool,
pub include_slot_in_hash: IncludeSlotInHash,
} }
pub(crate) trait ShrinkCollectRefs<'a>: Sync + Send { pub(crate) trait ShrinkCollectRefs<'a>: Sync + Send {
@ -329,20 +312,6 @@ impl<'a> ShrinkCollectRefs<'a> for ShrinkCollectAliveSeparatedByRefs<'a> {
} }
} }
/// used by tests for 'include_slot_in_hash' parameter
/// Tests just need to be self-consistent, so any value should work here.
pub const INCLUDE_SLOT_IN_HASH_TESTS: IncludeSlotInHash = IncludeSlotInHash::IncludeSlot;
// This value is irrelevant because we are reading from append vecs and the hash is already computed and saved.
// The hash will just be loaded from the append vec as opposed to being calculated initially.
// A shrink-type operation involves reading from an append vec and writing a subset of the read accounts to a new append vec.
// So, by definition, we will just read hashes and write hashes. The hash will not be calculated.
// The 'store' apis are shared, such that the initial store from a bank (where we need to know whether to include the slot)
// must include a feature-based value for 'include_slot_in_hash'. Other uses, specifically shrink, do NOT need to pass this
// parameter, but the shared api requires a value.
pub const INCLUDE_SLOT_IN_HASH_IRRELEVANT_APPEND_VEC_OPERATION: IncludeSlotInHash =
IncludeSlotInHash::IrrelevantAssertOnUse;
pub enum StoreReclaims { pub enum StoreReclaims {
/// normal reclaim mode /// normal reclaim mode
Default, Default,
@ -414,12 +383,7 @@ impl CurrentAncientAppendVec {
let accounts = accounts_to_store.get(storage_selector); let accounts = accounts_to_store.get(storage_selector);
db.store_accounts_frozen( db.store_accounts_frozen(
( (self.slot(), accounts, accounts_to_store.slot),
self.slot(),
accounts,
INCLUDE_SLOT_IN_HASH_IRRELEVANT_APPEND_VEC_OPERATION,
accounts_to_store.slot,
),
None::<Vec<AccountHash>>, None::<Vec<AccountHash>>,
self.append_vec(), self.append_vec(),
None, None,
@ -944,21 +908,13 @@ impl<'a> LoadedAccount<'a> {
} }
} }
pub fn compute_hash( pub fn compute_hash(&self, pubkey: &Pubkey) -> AccountHash {
&self,
slot: Slot,
pubkey: &Pubkey,
include_slot: IncludeSlotInHash,
) -> AccountHash {
match self { match self {
LoadedAccount::Stored(stored_account_meta) => AccountsDb::hash_account( LoadedAccount::Stored(stored_account_meta) => {
slot, AccountsDb::hash_account(stored_account_meta, stored_account_meta.pubkey())
stored_account_meta, }
stored_account_meta.pubkey(),
include_slot,
),
LoadedAccount::Cached(cached_account) => { LoadedAccount::Cached(cached_account) => {
AccountsDb::hash_account(slot, &cached_account.account, pubkey, include_slot) AccountsDb::hash_account(&cached_account.account, pubkey)
} }
} }
} }
@ -2451,11 +2407,7 @@ impl<'a> AppendVecScan for ScanState<'a> {
if (self.config.check_hash || hash_is_missing) if (self.config.check_hash || hash_is_missing)
&& !AccountsDb::is_filler_account_helper(pubkey, self.filler_account_suffix) && !AccountsDb::is_filler_account_helper(pubkey, self.filler_account_suffix)
{ {
let computed_hash = loaded_account.compute_hash( let computed_hash = loaded_account.compute_hash(pubkey);
self.current_slot,
pubkey,
self.config.include_slot_in_hash,
);
if hash_is_missing { if hash_is_missing {
loaded_hash = computed_hash; loaded_hash = computed_hash;
} else if self.config.check_hash && computed_hash != loaded_hash { } else if self.config.check_hash && computed_hash != loaded_hash {
@ -4176,11 +4128,7 @@ impl AccountsDb {
// without use of rather wide locks in this whole function, because we're // without use of rather wide locks in this whole function, because we're
// mutating rooted slots; There should be no writers to them. // mutating rooted slots; There should be no writers to them.
stats_sub.store_accounts_timing = self.store_accounts_frozen( stats_sub.store_accounts_timing = self.store_accounts_frozen(
( (slot, &shrink_collect.alive_accounts.alive_accounts()[..]),
slot,
&shrink_collect.alive_accounts.alive_accounts()[..],
INCLUDE_SLOT_IN_HASH_IRRELEVANT_APPEND_VEC_OPERATION,
),
None::<Vec<AccountHash>>, None::<Vec<AccountHash>>,
shrink_in_progress.new_storage(), shrink_in_progress.new_storage(),
None, None,
@ -6242,33 +6190,24 @@ impl AccountsDb {
} }
} }
pub fn hash_account<T: ReadableAccount>( pub fn hash_account<T: ReadableAccount>(account: &T, pubkey: &Pubkey) -> AccountHash {
slot: Slot,
account: &T,
pubkey: &Pubkey,
include_slot: IncludeSlotInHash,
) -> AccountHash {
Self::hash_account_data( Self::hash_account_data(
slot,
account.lamports(), account.lamports(),
account.owner(), account.owner(),
account.executable(), account.executable(),
account.rent_epoch(), account.rent_epoch(),
account.data(), account.data(),
pubkey, pubkey,
include_slot,
) )
} }
fn hash_account_data( fn hash_account_data(
slot: Slot,
lamports: u64, lamports: u64,
owner: &Pubkey, owner: &Pubkey,
executable: bool, executable: bool,
rent_epoch: Epoch, rent_epoch: Epoch,
data: &[u8], data: &[u8],
pubkey: &Pubkey, pubkey: &Pubkey,
include_slot: IncludeSlotInHash,
) -> AccountHash { ) -> AccountHash {
if lamports == 0 { if lamports == 0 {
return AccountHash(Hash::default()); return AccountHash(Hash::default());
@ -6285,16 +6224,6 @@ impl AccountsDb {
// collect lamports, slot, rent_epoch into buffer to hash // collect lamports, slot, rent_epoch into buffer to hash
buffer.extend_from_slice(&lamports.to_le_bytes()); buffer.extend_from_slice(&lamports.to_le_bytes());
match include_slot {
IncludeSlotInHash::IncludeSlot => {
// upon feature activation, stop including slot# in the account hash
buffer.extend_from_slice(&slot.to_le_bytes());
}
IncludeSlotInHash::RemoveSlot => {}
IncludeSlotInHash::IrrelevantAssertOnUse => {
panic!("IncludeSlotInHash is irrelevant, but we are calculating hash");
}
}
buffer.extend_from_slice(&rent_epoch.to_le_bytes()); buffer.extend_from_slice(&rent_epoch.to_le_bytes());
if data.len() > DATA_SIZE_CAN_FIT { if data.len() > DATA_SIZE_CAN_FIT {
@ -6681,10 +6610,8 @@ impl AccountsDb {
// updates to the index happen, so anybody that sees a real entry in the index, // updates to the index happen, so anybody that sees a real entry in the index,
// will be able to find the account in storage // will be able to find the account in storage
let flushed_store = self.create_and_insert_store(slot, total_size, "flush_slot_cache"); let flushed_store = self.create_and_insert_store(slot, total_size, "flush_slot_cache");
// irrelevant - account will already be hashed since it was used in bank hash previously
let include_slot_in_hash = IncludeSlotInHash::IrrelevantAssertOnUse;
self.store_accounts_frozen( self.store_accounts_frozen(
(slot, &accounts[..], include_slot_in_hash), (slot, &accounts[..]),
Some(hashes), Some(hashes),
&flushed_store, &flushed_store,
None, None,
@ -6702,7 +6629,7 @@ impl AccountsDb {
hashes.push(hash); hashes.push(hash);
}); });
self.store_accounts_frozen( self.store_accounts_frozen(
(slot, &accounts[..], include_slot_in_hash), (slot, &accounts[..]),
Some(hashes), Some(hashes),
&flushed_store, &flushed_store,
None, None,
@ -6765,11 +6692,7 @@ impl AccountsDb {
// store all unique accounts into new storage // store all unique accounts into new storage
let accounts = accum.values().collect::<Vec<_>>(); let accounts = accum.values().collect::<Vec<_>>();
let to_store = ( let to_store = (slot, &accounts[..]);
slot,
&accounts[..],
INCLUDE_SLOT_IN_HASH_IRRELEVANT_APPEND_VEC_OPERATION,
);
let storable = let storable =
StorableAccountsWithHashesAndWriteVersions::<'_, '_, _, _, &AccountHash>::new( StorableAccountsWithHashesAndWriteVersions::<'_, '_, _, _, &AccountHash>::new(
&to_store, &to_store,
@ -6835,7 +6758,6 @@ impl AccountsDb {
slot: Slot, slot: Slot,
accounts_and_meta_to_store: &impl StorableAccounts<'b, T>, accounts_and_meta_to_store: &impl StorableAccounts<'b, T>,
txn_iter: Box<dyn std::iter::Iterator<Item = &Option<&SanitizedTransaction>> + 'a>, txn_iter: Box<dyn std::iter::Iterator<Item = &Option<&SanitizedTransaction>> + 'a>,
include_slot_in_hash: IncludeSlotInHash,
mut write_version_producer: P, mut write_version_producer: P,
) -> Vec<AccountInfo> ) -> Vec<AccountInfo>
where where
@ -6858,12 +6780,9 @@ impl AccountsDb {
&mut write_version_producer, &mut write_version_producer,
); );
let cached_account = self.accounts_cache.store( let cached_account =
slot, self.accounts_cache
accounts_and_meta_to_store.pubkey(i), .store(slot, accounts_and_meta_to_store.pubkey(i), account);
account,
include_slot_in_hash,
);
// hash this account in the bg // hash this account in the bg
match &self.sender_bg_hasher { match &self.sender_bg_hasher {
Some(ref sender) => { Some(ref sender) => {
@ -6912,13 +6831,7 @@ impl AccountsDb {
None => Box::new(std::iter::repeat(&None).take(accounts.len())), None => Box::new(std::iter::repeat(&None).take(accounts.len())),
}; };
self.write_accounts_to_cache( self.write_accounts_to_cache(slot, accounts, txn_iter, write_version_producer)
slot,
accounts,
txn_iter,
accounts.include_slot_in_hash(),
write_version_producer,
)
} }
StoreTo::Storage(storage) => { StoreTo::Storage(storage) => {
if accounts.has_hash_and_write_version() { if accounts.has_hash_and_write_version() {
@ -6951,10 +6864,8 @@ impl AccountsDb {
for index in 0..accounts.len() { for index in 0..accounts.len() {
let (pubkey, account) = (accounts.pubkey(index), accounts.account(index)); let (pubkey, account) = (accounts.pubkey(index), accounts.account(index));
let hash = Self::hash_account( let hash = Self::hash_account(
slot,
account, account,
pubkey, pubkey,
accounts.include_slot_in_hash(),
); );
hashes.push(hash); hashes.push(hash);
} }
@ -7108,7 +7019,7 @@ impl AccountsDb {
let hash_is_missing = loaded_hash == AccountHash(Hash::default()); let hash_is_missing = loaded_hash == AccountHash(Hash::default());
if (config.check_hash || hash_is_missing) && !self.is_filler_account(pubkey) { if (config.check_hash || hash_is_missing) && !self.is_filler_account(pubkey) {
let computed_hash = let computed_hash =
loaded_account.compute_hash(*slot, pubkey, config.include_slot_in_hash); loaded_account.compute_hash(pubkey);
if hash_is_missing { if hash_is_missing {
loaded_hash = computed_hash; loaded_hash = computed_hash;
} }
@ -7188,7 +7099,6 @@ impl AccountsDb {
&EpochSchedule::default(), &EpochSchedule::default(),
&RentCollector::default(), &RentCollector::default(),
is_startup, is_startup,
INCLUDE_SLOT_IN_HASH_TESTS,
) )
} }
@ -7536,7 +7446,6 @@ impl AccountsDb {
epoch_schedule: &EpochSchedule, epoch_schedule: &EpochSchedule,
rent_collector: &RentCollector, rent_collector: &RentCollector,
is_startup: bool, is_startup: bool,
include_slot_in_hash: IncludeSlotInHash,
) -> (AccountsHash, u64) { ) -> (AccountsHash, u64) {
let check_hash = false; let check_hash = false;
let (accounts_hash, total_lamports) = self let (accounts_hash, total_lamports) = self
@ -7551,7 +7460,6 @@ impl AccountsDb {
epoch_schedule, epoch_schedule,
rent_collector, rent_collector,
store_detailed_debug_info_on_failure: false, store_detailed_debug_info_on_failure: false,
include_slot_in_hash,
}, },
expected_capitalization, expected_capitalization,
) )
@ -7931,7 +7839,6 @@ impl AccountsDb {
epoch_schedule: config.epoch_schedule, epoch_schedule: config.epoch_schedule,
rent_collector: config.rent_collector, rent_collector: config.rent_collector,
store_detailed_debug_info_on_failure: config.store_detailed_debug_info, store_detailed_debug_info_on_failure: config.store_detailed_debug_info,
include_slot_in_hash: config.include_slot_in_hash,
}; };
let hash_mismatch_is_error = !config.ignore_mismatch; let hash_mismatch_is_error = !config.ignore_mismatch;
@ -8586,7 +8493,7 @@ impl AccountsDb {
pub fn store_uncached(&self, slot: Slot, accounts: &[(&Pubkey, &AccountSharedData)]) { pub fn store_uncached(&self, slot: Slot, accounts: &[(&Pubkey, &AccountSharedData)]) {
let storage = self.find_storage_candidate(slot, 1); let storage = self.find_storage_candidate(slot, 1);
self.store( self.store(
(slot, accounts, INCLUDE_SLOT_IN_HASH_TESTS), (slot, accounts),
&StoreTo::Storage(&storage), &StoreTo::Storage(&storage),
None, None,
StoreReclaims::Default, StoreReclaims::Default,
@ -9826,7 +9733,7 @@ impl AccountsDb {
/// callers used to call store_uncached. But, this is not allowed anymore. /// callers used to call store_uncached. But, this is not allowed anymore.
pub fn store_for_tests(&self, slot: Slot, accounts: &[(&Pubkey, &AccountSharedData)]) { pub fn store_for_tests(&self, slot: Slot, accounts: &[(&Pubkey, &AccountSharedData)]) {
self.store( self.store(
(slot, accounts, INCLUDE_SLOT_IN_HASH_TESTS), (slot, accounts),
&StoreTo::Cache, &StoreTo::Cache,
None, None,
StoreReclaims::Default, StoreReclaims::Default,
@ -9966,7 +9873,6 @@ impl<'a> VerifyAccountsHashAndLamportsConfig<'a> {
ignore_mismatch: false, ignore_mismatch: false,
store_detailed_debug_info: false, store_detailed_debug_info: false,
use_bg_thread_pool: false, use_bg_thread_pool: false,
include_slot_in_hash: INCLUDE_SLOT_IN_HASH_TESTS,
} }
} }
} }
@ -10076,7 +9982,6 @@ pub mod tests {
bins: usize, bins: usize,
bin_range: &Range<usize>, bin_range: &Range<usize>,
check_hash: bool, check_hash: bool,
include_slot_in_hash: IncludeSlotInHash,
) -> Result<Vec<CacheHashDataFile>, AccountsHashVerificationError> { ) -> Result<Vec<CacheHashDataFile>, AccountsHashVerificationError> {
let temp_dir = TempDir::new().unwrap(); let temp_dir = TempDir::new().unwrap();
let accounts_hash_cache_path = temp_dir.path().to_path_buf(); let accounts_hash_cache_path = temp_dir.path().to_path_buf();
@ -10088,7 +9993,6 @@ pub mod tests {
bin_range, bin_range,
&CalcAccountsHashConfig { &CalcAccountsHashConfig {
check_hash, check_hash,
include_slot_in_hash,
..CalcAccountsHashConfig::default() ..CalcAccountsHashConfig::default()
}, },
None, None,
@ -10106,32 +10010,6 @@ pub mod tests {
} }
} }
/// This impl exists until this feature is activated:
/// ignore slot when calculating an account hash #28420
/// For now, all test code will continue to work thanks to this impl
/// Tests will use INCLUDE_SLOT_IN_HASH_TESTS for 'include_slot_in_hash' calls.
impl<'a, T: ReadableAccount + Sync> StorableAccounts<'a, T> for (Slot, &'a [(&'a Pubkey, &'a T)]) {
fn pubkey(&self, index: usize) -> &Pubkey {
self.1[index].0
}
fn account(&self, index: usize) -> &T {
self.1[index].1
}
fn slot(&self, _index: usize) -> Slot {
// per-index slot is not unique per slot when per-account slot is not included in the source data
self.target_slot()
}
fn target_slot(&self) -> Slot {
self.0
}
fn len(&self) -> usize {
self.1.len()
}
fn include_slot_in_hash(&self) -> IncludeSlotInHash {
INCLUDE_SLOT_IN_HASH_TESTS
}
}
/// this tuple contains slot info PER account /// this tuple contains slot info PER account
impl<'a, T: ReadableAccount + Sync> StorableAccounts<'a, T> impl<'a, T: ReadableAccount + Sync> StorableAccounts<'a, T>
for (Slot, &'a [(&'a Pubkey, &'a T, Slot)]) for (Slot, &'a [(&'a Pubkey, &'a T, Slot)])
@ -10162,9 +10040,6 @@ pub mod tests {
false false
} }
} }
fn include_slot_in_hash(&self) -> IncludeSlotInHash {
INCLUDE_SLOT_IN_HASH_TESTS
}
} }
impl CurrentAncientAppendVec { impl CurrentAncientAppendVec {
@ -10202,7 +10077,7 @@ pub mod tests {
} }
let expected_accounts_data_len = data.last().unwrap().1.data().len(); let expected_accounts_data_len = data.last().unwrap().1.data().len();
let expected_alive_bytes = aligned_stored_size(expected_accounts_data_len); let expected_alive_bytes = aligned_stored_size(expected_accounts_data_len);
let storable = (slot0, &data[..], INCLUDE_SLOT_IN_HASH_TESTS); let storable = (slot0, &data[..]);
let hashes = data let hashes = data
.iter() .iter()
.map(|_| AccountHash(Hash::default())) .map(|_| AccountHash(Hash::default()))
@ -10480,14 +10355,7 @@ pub mod tests {
let accounts_db = AccountsDb::new_single_for_tests(); let accounts_db = AccountsDb::new_single_for_tests();
accounts_db accounts_db
.scan_snapshot_stores( .scan_snapshot_stores(&empty_storages(), &mut stats, 2, &bounds, false)
&empty_storages(),
&mut stats,
2,
&bounds,
false,
INCLUDE_SLOT_IN_HASH_TESTS,
)
.unwrap(); .unwrap();
} }
#[test] #[test]
@ -10500,14 +10368,7 @@ pub mod tests {
let accounts_db = AccountsDb::new_single_for_tests(); let accounts_db = AccountsDb::new_single_for_tests();
accounts_db accounts_db
.scan_snapshot_stores( .scan_snapshot_stores(&empty_storages(), &mut stats, 2, &bounds, false)
&empty_storages(),
&mut stats,
2,
&bounds,
false,
INCLUDE_SLOT_IN_HASH_TESTS,
)
.unwrap(); .unwrap();
} }
@ -10521,21 +10382,13 @@ pub mod tests {
let accounts_db = AccountsDb::new_single_for_tests(); let accounts_db = AccountsDb::new_single_for_tests();
accounts_db accounts_db
.scan_snapshot_stores( .scan_snapshot_stores(&empty_storages(), &mut stats, 2, &bounds, false)
&empty_storages(),
&mut stats,
2,
&bounds,
false,
INCLUDE_SLOT_IN_HASH_TESTS,
)
.unwrap(); .unwrap();
} }
fn sample_storages_and_account_in_slot( fn sample_storages_and_account_in_slot(
slot: Slot, slot: Slot,
accounts: &AccountsDb, accounts: &AccountsDb,
include_slot_in_hash: IncludeSlotInHash,
) -> ( ) -> (
Vec<Arc<AccountStorageEntry>>, Vec<Arc<AccountStorageEntry>>,
Vec<CalculateHashIntermediate>, Vec<CalculateHashIntermediate>,
@ -10569,10 +10422,10 @@ pub mod tests {
]; ];
let expected_hashes = [ let expected_hashes = [
AccountHash(Hash::from_str("5K3NW73xFHwgTWVe4LyCg4QfQda8f88uZj2ypDx2kmmH").unwrap()), AccountHash(Hash::from_str("EkyjPt4oL7KpRMEoAdygngnkhtVwCxqJ2MkwaGV4kUU4").unwrap()),
AccountHash(Hash::from_str("84ozw83MZ8oeSF4hRAg7SeW1Tqs9LMXagX1BrDRjtZEx").unwrap()), AccountHash(Hash::from_str("4N7T4C2MK3GbHudqhfGsCyi2GpUU3roN6nhwViA41LYL").unwrap()),
AccountHash(Hash::from_str("5XqtnEJ41CG2JWNp7MAg9nxkRUAnyjLxfsKsdrLxQUbC").unwrap()), AccountHash(Hash::from_str("HzWMbUEnSfkrPiMdZeM6zSTdU5czEvGkvDcWBApToGC9").unwrap()),
AccountHash(Hash::from_str("DpvwJcznzwULYh19Zu5CuAA4AT6WTBe4H6n15prATmqj").unwrap()), AccountHash(Hash::from_str("AsWzo1HphgrrgQ6V2zFUVDssmfaBipx2XfwGZRqcJjir").unwrap()),
]; ];
let mut raw_accounts = Vec::default(); let mut raw_accounts = Vec::default();
@ -10583,15 +10436,8 @@ pub mod tests {
1, 1,
AccountSharedData::default().owner(), AccountSharedData::default().owner(),
)); ));
let hash = AccountsDb::hash_account( let hash = AccountsDb::hash_account(&raw_accounts[i], &raw_expected[i].pubkey);
slot, assert_eq!(hash, expected_hashes[i]);
&raw_accounts[i],
&raw_expected[i].pubkey,
include_slot_in_hash,
);
if slot == 1 && matches!(include_slot_in_hash, IncludeSlotInHash::IncludeSlot) {
assert_eq!(hash, expected_hashes[i]);
}
raw_expected[i].hash = hash.0; raw_expected[i].hash = hash.0;
} }
@ -10617,12 +10463,11 @@ pub mod tests {
fn sample_storages_and_accounts( fn sample_storages_and_accounts(
accounts: &AccountsDb, accounts: &AccountsDb,
include_slot_in_hash: IncludeSlotInHash,
) -> ( ) -> (
Vec<Arc<AccountStorageEntry>>, Vec<Arc<AccountStorageEntry>>,
Vec<CalculateHashIntermediate>, Vec<CalculateHashIntermediate>,
) { ) {
sample_storages_and_account_in_slot(1, accounts, include_slot_in_hash) sample_storages_and_account_in_slot(1, accounts)
} }
fn get_storage_refs(input: &[Arc<AccountStorageEntry>]) -> SortedStorages { fn get_storage_refs(input: &[Arc<AccountStorageEntry>]) -> SortedStorages {
@ -10688,54 +10533,47 @@ pub mod tests {
#[test] #[test]
fn test_accountsdb_scan_snapshot_stores_hash_not_stored() { fn test_accountsdb_scan_snapshot_stores_hash_not_stored() {
solana_logger::setup(); let accounts_db = AccountsDb::new_single_for_tests();
for include_slot_in_hash in [ let (storages, raw_expected) = sample_storages_and_accounts(&accounts_db);
IncludeSlotInHash::IncludeSlot, storages.iter().for_each(|storage| {
IncludeSlotInHash::RemoveSlot, accounts_db.storage.remove(&storage.slot(), false);
] { });
let accounts_db = AccountsDb::new_single_for_tests();
let (storages, raw_expected) =
sample_storages_and_accounts(&accounts_db, include_slot_in_hash);
storages.iter().for_each(|storage| {
accounts_db.storage.remove(&storage.slot(), false);
});
let hash = AccountHash(Hash::default()); let hash = AccountHash(Hash::default());
// replace the sample storages, storing default hash values so that we rehash during scan // replace the sample storages, storing default hash values so that we rehash during scan
let storages = storages let storages = storages
.iter() .iter()
.map(|storage| { .map(|storage| {
let slot = storage.slot(); let slot = storage.slot();
let copied_storage = accounts_db.create_and_insert_store(slot, 10000, "test"); let copied_storage = accounts_db.create_and_insert_store(slot, 10000, "test");
let all_accounts = storage let all_accounts = storage
.all_accounts() .all_accounts()
.iter() .iter()
.map(|acct| (*acct.pubkey(), acct.to_account_shared_data())) .map(|acct| (*acct.pubkey(), acct.to_account_shared_data()))
.collect::<Vec<_>>(); .collect::<Vec<_>>();
let accounts = all_accounts let accounts = all_accounts
.iter() .iter()
.map(|stored| (&stored.0, &stored.1)) .map(|stored| (&stored.0, &stored.1))
.collect::<Vec<_>>(); .collect::<Vec<_>>();
let slice = &accounts[..]; let slice = &accounts[..];
let account_data = (slot, slice, include_slot_in_hash); let account_data = (slot, slice);
let hashes = (0..account_data.len()).map(|_| &hash).collect(); let hashes = (0..account_data.len()).map(|_| &hash).collect();
let write_versions = (0..account_data.len()).map(|_| 0).collect(); let write_versions = (0..account_data.len()).map(|_| 0).collect();
let storable_accounts = let storable_accounts =
StorableAccountsWithHashesAndWriteVersions::new_with_hashes_and_write_versions( StorableAccountsWithHashesAndWriteVersions::new_with_hashes_and_write_versions(
&account_data, &account_data,
hashes, hashes,
write_versions, write_versions,
); );
copied_storage copied_storage
.accounts .accounts
.append_accounts(&storable_accounts, 0); .append_accounts(&storable_accounts, 0);
copied_storage copied_storage
}) })
.collect::<Vec<_>>(); .collect::<Vec<_>>();
assert_test_scan(accounts_db, storages, raw_expected, include_slot_in_hash); assert_test_scan(accounts_db, storages, raw_expected);
}
} }
#[test] #[test]
@ -10743,8 +10581,7 @@ pub mod tests {
fn test_accountsdb_scan_snapshot_stores_check_hash() { fn test_accountsdb_scan_snapshot_stores_check_hash() {
solana_logger::setup(); solana_logger::setup();
let accounts_db = AccountsDb::new_single_for_tests(); let accounts_db = AccountsDb::new_single_for_tests();
let (storages, _raw_expected) = let (storages, _raw_expected) = sample_storages_and_accounts(&accounts_db);
sample_storages_and_accounts(&accounts_db, INCLUDE_SLOT_IN_HASH_TESTS);
let max_slot = storages.iter().map(|storage| storage.slot()).max().unwrap(); let max_slot = storages.iter().map(|storage| storage.slot()).max().unwrap();
let hash = let hash =
@ -10766,7 +10603,7 @@ pub mod tests {
.map(|stored| (&stored.0, &stored.1)) .map(|stored| (&stored.0, &stored.1))
.collect::<Vec<_>>(); .collect::<Vec<_>>();
let slice = &accounts[..]; let slice = &accounts[..];
let account_data = (slot, slice, INCLUDE_SLOT_IN_HASH_TESTS); let account_data = (slot, slice);
let hashes = (0..account_data.len()).map(|_| &hash).collect(); let hashes = (0..account_data.len()).map(|_| &hash).collect();
let write_versions = (0..account_data.len()).map(|_| 0).collect(); let write_versions = (0..account_data.len()).map(|_| 0).collect();
let storable_accounts = let storable_accounts =
@ -10795,7 +10632,6 @@ pub mod tests {
end: bins, end: bins,
}, },
true, // checking hash here true, // checking hash here
INCLUDE_SLOT_IN_HASH_TESTS,
) )
.unwrap(); .unwrap();
} }
@ -10804,22 +10640,15 @@ pub mod tests {
fn test_accountsdb_scan_snapshot_stores() { fn test_accountsdb_scan_snapshot_stores() {
solana_logger::setup(); solana_logger::setup();
let accounts_db = AccountsDb::new_single_for_tests(); let accounts_db = AccountsDb::new_single_for_tests();
let (storages, raw_expected) = let (storages, raw_expected) = sample_storages_and_accounts(&accounts_db);
sample_storages_and_accounts(&accounts_db, INCLUDE_SLOT_IN_HASH_TESTS);
assert_test_scan( assert_test_scan(accounts_db, storages, raw_expected);
accounts_db,
storages,
raw_expected,
INCLUDE_SLOT_IN_HASH_TESTS,
);
} }
fn assert_test_scan( fn assert_test_scan(
accounts_db: AccountsDb, accounts_db: AccountsDb,
storages: Vec<Arc<AccountStorageEntry>>, storages: Vec<Arc<AccountStorageEntry>>,
raw_expected: Vec<CalculateHashIntermediate>, raw_expected: Vec<CalculateHashIntermediate>,
include_slot_in_hash: IncludeSlotInHash,
) { ) {
let bins = 1; let bins = 1;
let mut stats = HashStats::default(); let mut stats = HashStats::default();
@ -10834,7 +10663,6 @@ pub mod tests {
end: bins, end: bins,
}, },
true, // checking hash here true, // checking hash here
include_slot_in_hash,
) )
.unwrap(); .unwrap();
assert_scan(result, vec![vec![raw_expected.clone()]], bins, 0, bins); assert_scan(result, vec![vec![raw_expected.clone()]], bins, 0, bins);
@ -10851,7 +10679,6 @@ pub mod tests {
end: bins, end: bins,
}, },
false, false,
include_slot_in_hash,
) )
.unwrap(); .unwrap();
let mut expected = vec![Vec::new(); bins]; let mut expected = vec![Vec::new(); bins];
@ -10873,7 +10700,6 @@ pub mod tests {
end: bins, end: bins,
}, },
false, false,
include_slot_in_hash,
) )
.unwrap(); .unwrap();
let mut expected = vec![Vec::new(); bins]; let mut expected = vec![Vec::new(); bins];
@ -10895,7 +10721,6 @@ pub mod tests {
end: bins, end: bins,
}, },
false, false,
include_slot_in_hash,
) )
.unwrap(); .unwrap();
let mut expected = vec![Vec::new(); bins]; let mut expected = vec![Vec::new(); bins];
@ -10912,8 +10737,7 @@ pub mod tests {
// enough stores to get to 2nd chunk // enough stores to get to 2nd chunk
let bins = 1; let bins = 1;
let slot = MAX_ITEMS_PER_CHUNK as Slot; let slot = MAX_ITEMS_PER_CHUNK as Slot;
let (storages, raw_expected) = let (storages, raw_expected) = sample_storages_and_account_in_slot(slot, &accounts_db);
sample_storages_and_account_in_slot(slot, &accounts_db, INCLUDE_SLOT_IN_HASH_TESTS);
let storage_data = [(&storages[0], slot)]; let storage_data = [(&storages[0], slot)];
let sorted_storages = let sorted_storages =
@ -10930,7 +10754,6 @@ pub mod tests {
end: bins, end: bins,
}, },
false, false,
INCLUDE_SLOT_IN_HASH_TESTS,
) )
.unwrap(); .unwrap();
@ -10941,8 +10764,7 @@ pub mod tests {
fn test_accountsdb_scan_snapshot_stores_binning() { fn test_accountsdb_scan_snapshot_stores_binning() {
let mut stats = HashStats::default(); let mut stats = HashStats::default();
let accounts_db = AccountsDb::new_single_for_tests(); let accounts_db = AccountsDb::new_single_for_tests();
let (storages, raw_expected) = let (storages, raw_expected) = sample_storages_and_accounts(&accounts_db);
sample_storages_and_accounts(&accounts_db, INCLUDE_SLOT_IN_HASH_TESTS);
// just the first bin of 2 // just the first bin of 2
let bins = 2; let bins = 2;
@ -10957,7 +10779,6 @@ pub mod tests {
end: half_bins, end: half_bins,
}, },
false, false,
INCLUDE_SLOT_IN_HASH_TESTS,
) )
.unwrap(); .unwrap();
let mut expected = vec![Vec::new(); half_bins]; let mut expected = vec![Vec::new(); half_bins];
@ -10977,7 +10798,6 @@ pub mod tests {
end: bins, end: bins,
}, },
false, false,
INCLUDE_SLOT_IN_HASH_TESTS,
) )
.unwrap(); .unwrap();
@ -11002,7 +10822,6 @@ pub mod tests {
end: bin + 1, end: bin + 1,
}, },
false, false,
INCLUDE_SLOT_IN_HASH_TESTS,
) )
.unwrap(); .unwrap();
let mut expected = vec![Vec::new(); 1]; let mut expected = vec![Vec::new(); 1];
@ -11025,7 +10844,6 @@ pub mod tests {
end: bin + range, end: bin + range,
}, },
false, false,
INCLUDE_SLOT_IN_HASH_TESTS,
) )
.unwrap(); .unwrap();
let mut expected = vec![]; let mut expected = vec![];
@ -11051,8 +10869,7 @@ pub mod tests {
// range is for only 1 bin out of 256. // range is for only 1 bin out of 256.
let bins = 256; let bins = 256;
let slot = MAX_ITEMS_PER_CHUNK as Slot; let slot = MAX_ITEMS_PER_CHUNK as Slot;
let (storages, raw_expected) = let (storages, raw_expected) = sample_storages_and_account_in_slot(slot, &accounts_db);
sample_storages_and_account_in_slot(slot, &accounts_db, INCLUDE_SLOT_IN_HASH_TESTS);
let storage_data = [(&storages[0], slot)]; let storage_data = [(&storages[0], slot)];
let sorted_storages = let sorted_storages =
@ -11071,7 +10888,6 @@ pub mod tests {
end: start + range, end: start + range,
}, },
false, false,
INCLUDE_SLOT_IN_HASH_TESTS,
) )
.unwrap(); .unwrap();
assert_eq!(result.len(), 1); // 2 chunks, but 1 is empty so not included assert_eq!(result.len(), 1); // 2 chunks, but 1 is empty so not included
@ -11106,8 +10922,7 @@ pub mod tests {
solana_logger::setup(); solana_logger::setup();
let db = AccountsDb::new(Vec::new(), &ClusterType::Development); let db = AccountsDb::new(Vec::new(), &ClusterType::Development);
let (storages, raw_expected) = let (storages, raw_expected) = sample_storages_and_accounts(&db);
sample_storages_and_accounts(&db, INCLUDE_SLOT_IN_HASH_TESTS);
let expected_hash = let expected_hash =
AccountsHasher::compute_merkle_root_loop(raw_expected.clone(), MERKLE_FANOUT, |item| { AccountsHasher::compute_merkle_root_loop(raw_expected.clone(), MERKLE_FANOUT, |item| {
&item.hash &item.hash
@ -12744,7 +12559,6 @@ pub mod tests {
#[test] #[test]
fn test_hash_stored_account() { fn test_hash_stored_account() {
// Number are just sequential. // Number are just sequential.
let slot: Slot = 0x01_02_03_04_05_06_07_08;
let meta = StoredMeta { let meta = StoredMeta {
write_version_obsolete: 0x09_0a_0b_0c_0d_0e_0f_10, write_version_obsolete: 0x09_0a_0b_0c_0d_0e_0f_10,
data_len: 0x11_12_13_14_15_16_17_18, data_len: 0x11_12_13_14_15_16_17_18,
@ -12784,25 +12598,15 @@ pub mod tests {
let account = stored_account.to_account_shared_data(); let account = stored_account.to_account_shared_data();
let expected_account_hash = let expected_account_hash =
AccountHash(Hash::from_str("6VeAL4x4PVkECKL1hD1avwPE1uMCRoWiZJzVMvVNYhTq").unwrap()); AccountHash(Hash::from_str("4xuaE8UfH8EYsPyDZvJXUScoZSyxUJf2BpzVMLTFh497").unwrap());
assert_eq!( assert_eq!(
AccountsDb::hash_account( AccountsDb::hash_account(&stored_account, stored_account.pubkey(),),
slot,
&stored_account,
stored_account.pubkey(),
INCLUDE_SLOT_IN_HASH_TESTS
),
expected_account_hash, expected_account_hash,
"StoredAccountMeta's data layout might be changed; update hashing if needed." "StoredAccountMeta's data layout might be changed; update hashing if needed."
); );
assert_eq!( assert_eq!(
AccountsDb::hash_account( AccountsDb::hash_account(&account, stored_account.pubkey(),),
slot,
&account,
stored_account.pubkey(),
INCLUDE_SLOT_IN_HASH_TESTS
),
expected_account_hash, expected_account_hash,
"Account-based hashing must be consistent with StoredAccountMeta-based one." "Account-based hashing must be consistent with StoredAccountMeta-based one."
); );
@ -12893,7 +12697,6 @@ pub mod tests {
epoch_schedule: &EPOCH_SCHEDULE, epoch_schedule: &EPOCH_SCHEDULE,
rent_collector: &RENT_COLLECTOR, rent_collector: &RENT_COLLECTOR,
store_detailed_debug_info_on_failure: false, store_detailed_debug_info_on_failure: false,
include_slot_in_hash: INCLUDE_SLOT_IN_HASH_TESTS,
} }
} }
} }
@ -18241,9 +18044,9 @@ pub mod tests {
// Calculate the expected full accounts hash here and ensure it matches. // Calculate the expected full accounts hash here and ensure it matches.
// Ensure the zero-lamport accounts are NOT included in the full accounts hash. // Ensure the zero-lamport accounts are NOT included in the full accounts hash.
let full_account_hashes = [(2, 0), (3, 0), (4, 1)].into_iter().map(|(index, slot)| { let full_account_hashes = [(2, 0), (3, 0), (4, 1)].into_iter().map(|(index, _slot)| {
let (pubkey, account) = &accounts[index]; let (pubkey, account) = &accounts[index];
AccountsDb::hash_account(slot, account, pubkey, INCLUDE_SLOT_IN_HASH_TESTS).0 AccountsDb::hash_account(account, pubkey).0
}); });
let expected_accounts_hash = AccountsHash(compute_merkle_root(full_account_hashes)); let expected_accounts_hash = AccountsHash(compute_merkle_root(full_account_hashes));
assert_eq!(full_accounts_hash.0, expected_accounts_hash); assert_eq!(full_accounts_hash.0, expected_accounts_hash);
@ -18316,7 +18119,7 @@ pub mod tests {
let incremental_account_hashes = let incremental_account_hashes =
[(2, 2), (3, 3), (5, 3), (6, 2), (7, 3)] [(2, 2), (3, 3), (5, 3), (6, 2), (7, 3)]
.into_iter() .into_iter()
.map(|(index, slot)| { .map(|(index, _slot)| {
let (pubkey, account) = &accounts[index]; let (pubkey, account) = &accounts[index];
if account.is_zero_lamport() { if account.is_zero_lamport() {
// For incremental accounts hash, the hash of a zero lamport account is the hash of its pubkey. // For incremental accounts hash, the hash of a zero lamport account is the hash of its pubkey.
@ -18324,8 +18127,7 @@ pub mod tests {
let hash = blake3::hash(bytemuck::bytes_of(pubkey)); let hash = blake3::hash(bytemuck::bytes_of(pubkey));
Hash::new_from_array(hash.into()) Hash::new_from_array(hash.into())
} else { } else {
AccountsDb::hash_account(slot, account, pubkey, INCLUDE_SLOT_IN_HASH_TESTS) AccountsDb::hash_account(account, pubkey).0
.0
} }
}); });
let expected_accounts_hash = let expected_accounts_hash =

View File

@ -1,6 +1,6 @@
use { use {
crate::{ crate::{
accounts_db::{AccountStorageEntry, IncludeSlotInHash, PUBKEY_BINS_FOR_CALCULATING_HASHES}, accounts_db::{AccountStorageEntry, PUBKEY_BINS_FOR_CALCULATING_HASHES},
active_stats::{ActiveStatItem, ActiveStats}, active_stats::{ActiveStatItem, ActiveStats},
ancestors::Ancestors, ancestors::Ancestors,
pubkey_bins::PubkeyBinCalculator24, pubkey_bins::PubkeyBinCalculator24,
@ -177,7 +177,6 @@ pub struct CalcAccountsHashConfig<'a> {
pub rent_collector: &'a RentCollector, pub rent_collector: &'a RentCollector,
/// used for tracking down hash mismatches after the fact /// used for tracking down hash mismatches after the fact
pub store_detailed_debug_info_on_failure: bool, pub store_detailed_debug_info_on_failure: bool,
pub include_slot_in_hash: IncludeSlotInHash,
} }
// smallest, 3 quartiles, largest, average // smallest, 3 quartiles, largest, average

View File

@ -9,7 +9,6 @@ use {
accounts_db::{ accounts_db::{
AccountStorageEntry, AccountsDb, AliveAccounts, GetUniqueAccountsResult, ShrinkCollect, AccountStorageEntry, AccountsDb, AliveAccounts, GetUniqueAccountsResult, ShrinkCollect,
ShrinkCollectAliveSeparatedByRefs, ShrinkStatsSub, StoreReclaims, ShrinkCollectAliveSeparatedByRefs, ShrinkStatsSub, StoreReclaims,
INCLUDE_SLOT_IN_HASH_IRRELEVANT_APPEND_VEC_OPERATION,
}, },
accounts_file::AccountsFile, accounts_file::AccountsFile,
accounts_hash::AccountHash, accounts_hash::AccountHash,
@ -693,11 +692,7 @@ impl AccountsDb {
bytes: bytes_total, bytes: bytes_total,
accounts: accounts_to_write, accounts: accounts_to_write,
} = packed; } = packed;
let accounts_to_write = StorableAccountsBySlot::new( let accounts_to_write = StorableAccountsBySlot::new(target_slot, accounts_to_write);
target_slot,
accounts_to_write,
INCLUDE_SLOT_IN_HASH_IRRELEVANT_APPEND_VEC_OPERATION,
);
self.shrink_ancient_stats self.shrink_ancient_stats
.bytes_ancient_created .bytes_ancient_created
@ -953,7 +948,7 @@ pub mod tests {
create_db_with_storages_and_index, create_storages_and_update_index, create_db_with_storages_and_index, create_storages_and_update_index,
get_all_accounts, remove_account_for_tests, CAN_RANDOMLY_SHRINK_FALSE, get_all_accounts, remove_account_for_tests, CAN_RANDOMLY_SHRINK_FALSE,
}, },
ShrinkCollectRefs, INCLUDE_SLOT_IN_HASH_TESTS, MAX_RECYCLE_STORES, ShrinkCollectRefs, MAX_RECYCLE_STORES,
}, },
accounts_index::UpsertReclaim, accounts_index::UpsertReclaim,
append_vec::{aligned_stored_size, AppendVec, AppendVecStoredAccountMeta}, append_vec::{aligned_stored_size, AppendVec, AppendVecStoredAccountMeta},
@ -2699,11 +2694,7 @@ pub mod tests {
.collect::<Vec<_>>(); .collect::<Vec<_>>();
let target_slot = slots.clone().nth(combine_into).unwrap_or(slots.start); let target_slot = slots.clone().nth(combine_into).unwrap_or(slots.start);
let accounts_to_write = StorableAccountsBySlot::new( let accounts_to_write = StorableAccountsBySlot::new(target_slot, &accounts);
target_slot,
&accounts,
INCLUDE_SLOT_IN_HASH_TESTS,
);
let bytes = storages let bytes = storages
.iter() .iter()

View File

@ -648,7 +648,6 @@ impl AppendVec {
pub mod tests { pub mod tests {
use { use {
super::{test_utils::*, *}, super::{test_utils::*, *},
crate::accounts_db::INCLUDE_SLOT_IN_HASH_TESTS,
assert_matches::assert_matches, assert_matches::assert_matches,
memoffset::offset_of, memoffset::offset_of,
rand::{thread_rng, Rng}, rand::{thread_rng, Rng},
@ -738,19 +737,18 @@ pub mod tests {
#[should_panic(expected = "accounts.has_hash_and_write_version()")] #[should_panic(expected = "accounts.has_hash_and_write_version()")]
fn test_storable_accounts_with_hashes_and_write_versions_new() { fn test_storable_accounts_with_hashes_and_write_versions_new() {
let account = AccountSharedData::default(); let account = AccountSharedData::default();
// for (Slot, &'a [(&'a Pubkey, &'a T)], IncludeSlotInHash) // for (Slot, &'a [(&'a Pubkey, &'a T)])
let slot = 0 as Slot; let slot = 0 as Slot;
let pubkey = Pubkey::default(); let pubkey = Pubkey::default();
StorableAccountsWithHashesAndWriteVersions::<'_, '_, _, _, &AccountHash>::new(&( StorableAccountsWithHashesAndWriteVersions::<'_, '_, _, _, &AccountHash>::new(&(
slot, slot,
&[(&pubkey, &account)][..], &[(&pubkey, &account)][..],
INCLUDE_SLOT_IN_HASH_TESTS,
)); ));
} }
fn test_mismatch(correct_hashes: bool, correct_write_versions: bool) { fn test_mismatch(correct_hashes: bool, correct_write_versions: bool) {
let account = AccountSharedData::default(); let account = AccountSharedData::default();
// for (Slot, &'a [(&'a Pubkey, &'a T)], IncludeSlotInHash) // for (Slot, &'a [(&'a Pubkey, &'a T)])
let slot = 0 as Slot; let slot = 0 as Slot;
let pubkey = Pubkey::default(); let pubkey = Pubkey::default();
// mismatch between lens of accounts, hashes, write_versions // mismatch between lens of accounts, hashes, write_versions
@ -763,7 +761,7 @@ pub mod tests {
write_versions.push(0); write_versions.push(0);
} }
StorableAccountsWithHashesAndWriteVersions::new_with_hashes_and_write_versions( StorableAccountsWithHashesAndWriteVersions::new_with_hashes_and_write_versions(
&(slot, &[(&pubkey, &account)][..], INCLUDE_SLOT_IN_HASH_TESTS), &(slot, &[(&pubkey, &account)][..]),
hashes, hashes,
write_versions, write_versions,
); );
@ -795,7 +793,7 @@ pub mod tests {
#[test] #[test]
fn test_storable_accounts_with_hashes_and_write_versions_empty() { fn test_storable_accounts_with_hashes_and_write_versions_empty() {
// for (Slot, &'a [(&'a Pubkey, &'a T)], IncludeSlotInHash) // for (Slot, &'a [(&'a Pubkey, &'a T)])
let account = AccountSharedData::default(); let account = AccountSharedData::default();
let slot = 0 as Slot; let slot = 0 as Slot;
let pubkeys = [Pubkey::default()]; let pubkeys = [Pubkey::default()];
@ -803,7 +801,7 @@ pub mod tests {
let write_versions = Vec::default(); let write_versions = Vec::default();
let mut accounts = vec![(&pubkeys[0], &account)]; let mut accounts = vec![(&pubkeys[0], &account)];
accounts.clear(); accounts.clear();
let accounts2 = (slot, &accounts[..], INCLUDE_SLOT_IN_HASH_TESTS); let accounts2 = (slot, &accounts[..]);
let storable = let storable =
StorableAccountsWithHashesAndWriteVersions::new_with_hashes_and_write_versions( StorableAccountsWithHashesAndWriteVersions::new_with_hashes_and_write_versions(
&accounts2, &accounts2,
@ -816,7 +814,7 @@ pub mod tests {
#[test] #[test]
fn test_storable_accounts_with_hashes_and_write_versions_hash_and_write_version() { fn test_storable_accounts_with_hashes_and_write_versions_hash_and_write_version() {
// for (Slot, &'a [(&'a Pubkey, &'a T)], IncludeSlotInHash) // for (Slot, &'a [(&'a Pubkey, &'a T)])
let account = AccountSharedData::default(); let account = AccountSharedData::default();
let slot = 0 as Slot; let slot = 0 as Slot;
let pubkeys = [Pubkey::from([5; 32]), Pubkey::from([6; 32])]; let pubkeys = [Pubkey::from([5; 32]), Pubkey::from([6; 32])];
@ -826,7 +824,7 @@ pub mod tests {
]; ];
let write_versions = vec![42, 43]; let write_versions = vec![42, 43];
let accounts = [(&pubkeys[0], &account), (&pubkeys[1], &account)]; let accounts = [(&pubkeys[0], &account), (&pubkeys[1], &account)];
let accounts2 = (slot, &accounts[..], INCLUDE_SLOT_IN_HASH_TESTS); let accounts2 = (slot, &accounts[..]);
let storable = let storable =
StorableAccountsWithHashesAndWriteVersions::new_with_hashes_and_write_versions( StorableAccountsWithHashesAndWriteVersions::new_with_hashes_and_write_versions(
&accounts2, &accounts2,
@ -851,13 +849,13 @@ pub mod tests {
..Account::default() ..Account::default()
} }
.to_account_shared_data(); .to_account_shared_data();
// for (Slot, &'a [(&'a Pubkey, &'a T)], IncludeSlotInHash) // for (Slot, &'a [(&'a Pubkey, &'a T)])
let slot = 0 as Slot; let slot = 0 as Slot;
let pubkey = Pubkey::default(); let pubkey = Pubkey::default();
let hashes = vec![AccountHash(Hash::default())]; let hashes = vec![AccountHash(Hash::default())];
let write_versions = vec![0]; let write_versions = vec![0];
let accounts = [(&pubkey, &account)]; let accounts = [(&pubkey, &account)];
let accounts2 = (slot, &accounts[..], INCLUDE_SLOT_IN_HASH_TESTS); let accounts2 = (slot, &accounts[..]);
let storable = let storable =
StorableAccountsWithHashesAndWriteVersions::new_with_hashes_and_write_versions( StorableAccountsWithHashesAndWriteVersions::new_with_hashes_and_write_versions(
&accounts2, &accounts2,
@ -874,9 +872,9 @@ pub mod tests {
..Account::default() ..Account::default()
} }
.to_account_shared_data(); .to_account_shared_data();
// for (Slot, &'a [(&'a Pubkey, &'a T)], IncludeSlotInHash) // for (Slot, &'a [(&'a Pubkey, &'a T)])
let accounts = [(&pubkey, &account)]; let accounts = [(&pubkey, &account)];
let accounts2 = (slot, &accounts[..], INCLUDE_SLOT_IN_HASH_TESTS); let accounts2 = (slot, &accounts[..]);
let storable = let storable =
StorableAccountsWithHashesAndWriteVersions::new_with_hashes_and_write_versions( StorableAccountsWithHashesAndWriteVersions::new_with_hashes_and_write_versions(
&accounts2, &accounts2,

View File

@ -1,7 +1,7 @@
//! Code for stake and vote rewards //! Code for stake and vote rewards
use { use {
crate::{accounts_db::IncludeSlotInHash, storable_accounts::StorableAccounts}, crate::storable_accounts::StorableAccounts,
solana_sdk::{ solana_sdk::{
account::AccountSharedData, clock::Slot, pubkey::Pubkey, reward_type::RewardType, account::AccountSharedData, clock::Slot, pubkey::Pubkey, reward_type::RewardType,
}, },
@ -32,7 +32,7 @@ impl StakeReward {
} }
/// allow [StakeReward] to be passed to `StoreAccounts` directly without copies or vec construction /// allow [StakeReward] to be passed to `StoreAccounts` directly without copies or vec construction
impl<'a> StorableAccounts<'a, AccountSharedData> for (Slot, &'a [StakeReward], IncludeSlotInHash) { impl<'a> StorableAccounts<'a, AccountSharedData> for (Slot, &'a [StakeReward]) {
fn pubkey(&self, index: usize) -> &Pubkey { fn pubkey(&self, index: usize) -> &Pubkey {
&self.1[index].stake_pubkey &self.1[index].stake_pubkey
} }
@ -49,9 +49,6 @@ impl<'a> StorableAccounts<'a, AccountSharedData> for (Slot, &'a [StakeReward], I
fn len(&self) -> usize { fn len(&self) -> usize {
self.1.len() self.1.len()
} }
fn include_slot_in_hash(&self) -> IncludeSlotInHash {
self.2
}
} }
#[cfg(feature = "dev-context-only-utils")] #[cfg(feature = "dev-context-only-utils")]

View File

@ -1,9 +1,6 @@
//! trait for abstracting underlying storage of pubkey and account pairs to be written //! trait for abstracting underlying storage of pubkey and account pairs to be written
use { use {
crate::{ crate::{account_storage::meta::StoredAccountMeta, accounts_hash::AccountHash},
account_storage::meta::StoredAccountMeta, accounts_db::IncludeSlotInHash,
accounts_hash::AccountHash,
},
solana_sdk::{account::ReadableAccount, clock::Slot, pubkey::Pubkey}, solana_sdk::{account::ReadableAccount, clock::Slot, pubkey::Pubkey},
}; };
@ -37,8 +34,6 @@ pub trait StorableAccounts<'a, T: ReadableAccount + Sync>: Sync {
fn contains_multiple_slots(&self) -> bool { fn contains_multiple_slots(&self) -> bool {
false false
} }
/// true iff hashing these accounts should include the slot
fn include_slot_in_hash(&self) -> IncludeSlotInHash;
/// true iff the impl can provide hash and write_version /// true iff the impl can provide hash and write_version
/// Otherwise, hash and write_version have to be provided separately to store functions. /// Otherwise, hash and write_version have to be provided separately to store functions.
@ -71,8 +66,6 @@ pub struct StorableAccountsMovingSlots<'a, T: ReadableAccount + Sync> {
pub target_slot: Slot, pub target_slot: Slot,
/// slot where accounts are currently stored /// slot where accounts are currently stored
pub old_slot: Slot, pub old_slot: Slot,
/// This is temporarily here until feature activation.
pub include_slot_in_hash: IncludeSlotInHash,
} }
impl<'a, T: ReadableAccount + Sync> StorableAccounts<'a, T> for StorableAccountsMovingSlots<'a, T> { impl<'a, T: ReadableAccount + Sync> StorableAccounts<'a, T> for StorableAccountsMovingSlots<'a, T> {
@ -92,16 +85,9 @@ impl<'a, T: ReadableAccount + Sync> StorableAccounts<'a, T> for StorableAccounts
fn len(&self) -> usize { fn len(&self) -> usize {
self.accounts.len() self.accounts.len()
} }
fn include_slot_in_hash(&self) -> IncludeSlotInHash {
self.include_slot_in_hash
}
} }
/// The last parameter exists until this feature is activated: impl<'a, T: ReadableAccount + Sync> StorableAccounts<'a, T> for (Slot, &'a [(&'a Pubkey, &'a T)]) {
/// ignore slot when calculating an account hash #28420
impl<'a, T: ReadableAccount + Sync> StorableAccounts<'a, T>
for (Slot, &'a [(&'a Pubkey, &'a T)], IncludeSlotInHash)
{
fn pubkey(&self, index: usize) -> &Pubkey { fn pubkey(&self, index: usize) -> &Pubkey {
self.1[index].0 self.1[index].0
} }
@ -118,17 +104,10 @@ impl<'a, T: ReadableAccount + Sync> StorableAccounts<'a, T>
fn len(&self) -> usize { fn len(&self) -> usize {
self.1.len() self.1.len()
} }
fn include_slot_in_hash(&self) -> IncludeSlotInHash {
self.2
}
} }
#[allow(dead_code)] #[allow(dead_code)]
/// The last parameter exists until this feature is activated: impl<'a, T: ReadableAccount + Sync> StorableAccounts<'a, T> for (Slot, &'a [&'a (Pubkey, T)]) {
/// ignore slot when calculating an account hash #28420
impl<'a, T: ReadableAccount + Sync> StorableAccounts<'a, T>
for (Slot, &'a [&'a (Pubkey, T)], IncludeSlotInHash)
{
fn pubkey(&self, index: usize) -> &Pubkey { fn pubkey(&self, index: usize) -> &Pubkey {
&self.1[index].0 &self.1[index].0
} }
@ -145,16 +124,9 @@ impl<'a, T: ReadableAccount + Sync> StorableAccounts<'a, T>
fn len(&self) -> usize { fn len(&self) -> usize {
self.1.len() self.1.len()
} }
fn include_slot_in_hash(&self) -> IncludeSlotInHash {
self.2
}
} }
/// The last parameter exists until this feature is activated: impl<'a> StorableAccounts<'a, StoredAccountMeta<'a>> for (Slot, &'a [&'a StoredAccountMeta<'a>]) {
/// ignore slot when calculating an account hash #28420
impl<'a> StorableAccounts<'a, StoredAccountMeta<'a>>
for (Slot, &'a [&'a StoredAccountMeta<'a>], IncludeSlotInHash)
{
fn pubkey(&self, index: usize) -> &Pubkey { fn pubkey(&self, index: usize) -> &Pubkey {
self.account(index).pubkey() self.account(index).pubkey()
} }
@ -171,9 +143,6 @@ impl<'a> StorableAccounts<'a, StoredAccountMeta<'a>>
fn len(&self) -> usize { fn len(&self) -> usize {
self.1.len() self.1.len()
} }
fn include_slot_in_hash(&self) -> IncludeSlotInHash {
self.2
}
fn has_hash_and_write_version(&self) -> bool { fn has_hash_and_write_version(&self) -> bool {
true true
} }
@ -190,7 +159,6 @@ pub struct StorableAccountsBySlot<'a> {
target_slot: Slot, target_slot: Slot,
/// each element is (source slot, accounts moving FROM source slot) /// each element is (source slot, accounts moving FROM source slot)
slots_and_accounts: &'a [(Slot, &'a [&'a StoredAccountMeta<'a>])], slots_and_accounts: &'a [(Slot, &'a [&'a StoredAccountMeta<'a>])],
include_slot_in_hash: IncludeSlotInHash,
/// This is calculated based off slots_and_accounts. /// This is calculated based off slots_and_accounts.
/// cumulative offset of all account slices prior to this one /// cumulative offset of all account slices prior to this one
@ -209,7 +177,6 @@ impl<'a> StorableAccountsBySlot<'a> {
pub fn new( pub fn new(
target_slot: Slot, target_slot: Slot,
slots_and_accounts: &'a [(Slot, &'a [&'a StoredAccountMeta<'a>])], slots_and_accounts: &'a [(Slot, &'a [&'a StoredAccountMeta<'a>])],
include_slot_in_hash: IncludeSlotInHash,
) -> Self { ) -> Self {
let mut cumulative_len = 0usize; let mut cumulative_len = 0usize;
let mut starting_offsets = Vec::with_capacity(slots_and_accounts.len()); let mut starting_offsets = Vec::with_capacity(slots_and_accounts.len());
@ -227,7 +194,6 @@ impl<'a> StorableAccountsBySlot<'a> {
target_slot, target_slot,
slots_and_accounts, slots_and_accounts,
starting_offsets, starting_offsets,
include_slot_in_hash,
contains_multiple_slots, contains_multiple_slots,
len: cumulative_len, len: cumulative_len,
} }
@ -252,8 +218,6 @@ impl<'a> StorableAccountsBySlot<'a> {
} }
} }
/// The last parameter exists until this feature is activated:
/// ignore slot when calculating an account hash #28420
impl<'a> StorableAccounts<'a, StoredAccountMeta<'a>> for StorableAccountsBySlot<'a> { impl<'a> StorableAccounts<'a, StoredAccountMeta<'a>> for StorableAccountsBySlot<'a> {
fn pubkey(&self, index: usize) -> &Pubkey { fn pubkey(&self, index: usize) -> &Pubkey {
self.account(index).pubkey() self.account(index).pubkey()
@ -275,9 +239,6 @@ impl<'a> StorableAccounts<'a, StoredAccountMeta<'a>> for StorableAccountsBySlot<
fn contains_multiple_slots(&self) -> bool { fn contains_multiple_slots(&self) -> bool {
self.contains_multiple_slots self.contains_multiple_slots
} }
fn include_slot_in_hash(&self) -> IncludeSlotInHash {
self.include_slot_in_hash
}
fn has_hash_and_write_version(&self) -> bool { fn has_hash_and_write_version(&self) -> bool {
true true
} }
@ -292,12 +253,7 @@ impl<'a> StorableAccounts<'a, StoredAccountMeta<'a>> for StorableAccountsBySlot<
/// this tuple contains a single different source slot that applies to all accounts /// this tuple contains a single different source slot that applies to all accounts
/// accounts are StoredAccountMeta /// accounts are StoredAccountMeta
impl<'a> StorableAccounts<'a, StoredAccountMeta<'a>> impl<'a> StorableAccounts<'a, StoredAccountMeta<'a>>
for ( for (Slot, &'a [&'a StoredAccountMeta<'a>], Slot)
Slot,
&'a [&'a StoredAccountMeta<'a>],
IncludeSlotInHash,
Slot,
)
{ {
fn pubkey(&self, index: usize) -> &Pubkey { fn pubkey(&self, index: usize) -> &Pubkey {
self.account(index).pubkey() self.account(index).pubkey()
@ -307,7 +263,7 @@ impl<'a> StorableAccounts<'a, StoredAccountMeta<'a>>
} }
fn slot(&self, _index: usize) -> Slot { fn slot(&self, _index: usize) -> Slot {
// same other slot for all accounts // same other slot for all accounts
self.3 self.2
} }
fn target_slot(&self) -> Slot { fn target_slot(&self) -> Slot {
self.0 self.0
@ -315,9 +271,6 @@ impl<'a> StorableAccounts<'a, StoredAccountMeta<'a>>
fn len(&self) -> usize { fn len(&self) -> usize {
self.1.len() self.1.len()
} }
fn include_slot_in_hash(&self) -> IncludeSlotInHash {
self.2
}
fn has_hash_and_write_version(&self) -> bool { fn has_hash_and_write_version(&self) -> bool {
true true
} }
@ -335,7 +288,6 @@ pub mod tests {
super::*, super::*,
crate::{ crate::{
account_storage::meta::{AccountMeta, StoredAccountMeta, StoredMeta}, account_storage::meta::{AccountMeta, StoredAccountMeta, StoredMeta},
accounts_db::INCLUDE_SLOT_IN_HASH_TESTS,
append_vec::AppendVecStoredAccountMeta, append_vec::AppendVecStoredAccountMeta,
}, },
solana_sdk::{ solana_sdk::{
@ -355,7 +307,6 @@ pub mod tests {
assert_eq!(a.target_slot(), b.target_slot()); assert_eq!(a.target_slot(), b.target_slot());
assert_eq!(a.len(), b.len()); assert_eq!(a.len(), b.len());
assert_eq!(a.is_empty(), b.is_empty()); assert_eq!(a.is_empty(), b.is_empty());
assert_eq!(a.include_slot_in_hash(), b.include_slot_in_hash());
(0..a.len()).for_each(|i| { (0..a.len()).for_each(|i| {
assert_eq!(a.pubkey(i), b.pubkey(i)); assert_eq!(a.pubkey(i), b.pubkey(i));
assert!(accounts_equal(a.account(i), b.account(i))); assert!(accounts_equal(a.account(i), b.account(i)));
@ -394,12 +345,7 @@ pub mod tests {
hash: &hash, hash: &hash,
}); });
let test3 = ( let test3 = (slot, &vec![&stored_account, &stored_account][..], slot);
slot,
&vec![&stored_account, &stored_account][..],
INCLUDE_SLOT_IN_HASH_TESTS,
slot,
);
assert!(!test3.contains_multiple_slots()); assert!(!test3.contains_multiple_slots());
} }
@ -466,33 +412,19 @@ pub mod tests {
three.push(raw2); three.push(raw2);
four_pubkey_and_account_value.push(raw4); four_pubkey_and_account_value.push(raw4);
}); });
let test2 = (target_slot, &two[..], INCLUDE_SLOT_IN_HASH_TESTS); let test2 = (target_slot, &two[..]);
let test4 = ( let test4 = (target_slot, &four_pubkey_and_account_value[..]);
target_slot,
&four_pubkey_and_account_value[..],
INCLUDE_SLOT_IN_HASH_TESTS,
);
let source_slot = starting_slot % max_slots; let source_slot = starting_slot % max_slots;
let test3 = ( let test3 = (target_slot, &three[..], source_slot);
target_slot,
&three[..],
INCLUDE_SLOT_IN_HASH_TESTS,
source_slot,
);
let old_slot = starting_slot; let old_slot = starting_slot;
let test_moving_slots = StorableAccountsMovingSlots { let test_moving_slots = StorableAccountsMovingSlots {
accounts: &two[..], accounts: &two[..],
target_slot, target_slot,
old_slot, old_slot,
include_slot_in_hash: INCLUDE_SLOT_IN_HASH_TESTS,
}; };
let for_slice = [(old_slot, &three[..])]; let for_slice = [(old_slot, &three[..])];
let test_moving_slots2 = StorableAccountsBySlot::new( let test_moving_slots2 = StorableAccountsBySlot::new(target_slot, &for_slice);
target_slot,
&for_slice,
INCLUDE_SLOT_IN_HASH_TESTS,
);
compare(&test2, &test3); compare(&test2, &test3);
compare(&test2, &test4); compare(&test2, &test4);
compare(&test2, &test_moving_slots); compare(&test2, &test_moving_slots);
@ -594,11 +526,7 @@ pub mod tests {
}) })
}) })
.collect::<Vec<_>>(); .collect::<Vec<_>>();
let storable = StorableAccountsBySlot::new( let storable = StorableAccountsBySlot::new(99, &slots_and_accounts[..]);
99,
&slots_and_accounts[..],
INCLUDE_SLOT_IN_HASH_TESTS,
);
assert!(storable.has_hash_and_write_version()); assert!(storable.has_hash_and_write_version());
assert_eq!(99, storable.target_slot()); assert_eq!(99, storable.target_slot());
assert_eq!(entries0 != entries, storable.contains_multiple_slots()); assert_eq!(entries0 != entries, storable.contains_multiple_slots());

View File

@ -400,7 +400,6 @@ impl AccountsHashVerifier {
epoch_schedule: &accounts_package.epoch_schedule, epoch_schedule: &accounts_package.epoch_schedule,
rent_collector: &accounts_package.rent_collector, rent_collector: &accounts_package.rent_collector,
store_detailed_debug_info_on_failure: false, store_detailed_debug_info_on_failure: false,
include_slot_in_hash: accounts_package.include_slot_in_hash,
}; };
let slot = accounts_package.slot; let slot = accounts_package.slot;
@ -480,7 +479,6 @@ impl AccountsHashVerifier {
epoch_schedule: &accounts_package.epoch_schedule, epoch_schedule: &accounts_package.epoch_schedule,
rent_collector: &accounts_package.rent_collector, rent_collector: &accounts_package.rent_collector,
store_detailed_debug_info_on_failure: false, store_detailed_debug_info_on_failure: false,
include_slot_in_hash: accounts_package.include_slot_in_hash,
}; };
let (incremental_accounts_hash, measure_hash_us) = measure_us!( let (incremental_accounts_hash, measure_hash_us) = measure_us!(

View File

@ -5,9 +5,7 @@ use {
crate::snapshot_utils::create_tmp_accounts_dir_for_tests, crate::snapshot_utils::create_tmp_accounts_dir_for_tests,
log::*, log::*,
solana_accounts_db::{ solana_accounts_db::{
accounts_db::{ accounts_db::{AccountShrinkThreshold, CalcAccountsHashDataSource},
AccountShrinkThreshold, CalcAccountsHashDataSource, INCLUDE_SLOT_IN_HASH_TESTS,
},
accounts_hash::CalcAccountsHashConfig, accounts_hash::CalcAccountsHashConfig,
accounts_index::AccountSecondaryIndexes, accounts_index::AccountSecondaryIndexes,
epoch_accounts_hash::EpochAccountsHash, epoch_accounts_hash::EpochAccountsHash,
@ -331,7 +329,6 @@ fn test_epoch_accounts_hash_basic(test_environment: TestEnvironment) {
epoch_schedule: bank.epoch_schedule(), epoch_schedule: bank.epoch_schedule(),
rent_collector: bank.rent_collector(), rent_collector: bank.rent_collector(),
store_detailed_debug_info_on_failure: false, store_detailed_debug_info_on_failure: false,
include_slot_in_hash: INCLUDE_SLOT_IN_HASH_TESTS,
}, },
) )
.unwrap(); .unwrap();

View File

@ -11,7 +11,7 @@ use {
accounts::{AccountAddressFilter, Accounts}, accounts::{AccountAddressFilter, Accounts},
accounts_db::{ accounts_db::{
test_utils::create_test_accounts, AccountShrinkThreshold, test_utils::create_test_accounts, AccountShrinkThreshold,
VerifyAccountsHashAndLamportsConfig, INCLUDE_SLOT_IN_HASH_TESTS, VerifyAccountsHashAndLamportsConfig,
}, },
accounts_index::{AccountSecondaryIndexes, ScanConfig}, accounts_index::{AccountSecondaryIndexes, ScanConfig},
ancestors::Ancestors, ancestors::Ancestors,
@ -118,7 +118,6 @@ fn test_accounts_hash_bank_hash(bencher: &mut Bencher) {
ignore_mismatch: false, ignore_mismatch: false,
store_detailed_debug_info: false, store_detailed_debug_info: false,
use_bg_thread_pool: false, use_bg_thread_pool: false,
include_slot_in_hash: INCLUDE_SLOT_IN_HASH_TESTS,
} }
)) ))
}); });

View File

@ -370,7 +370,6 @@ impl SnapshotRequestHandler {
epoch_schedule: snapshot_root_bank.epoch_schedule(), epoch_schedule: snapshot_root_bank.epoch_schedule(),
rent_collector: snapshot_root_bank.rent_collector(), rent_collector: snapshot_root_bank.rent_collector(),
store_detailed_debug_info_on_failure: false, store_detailed_debug_info_on_failure: false,
include_slot_in_hash: snapshot_root_bank.include_slot_in_hash(),
}, },
) )
.unwrap(); .unwrap();

View File

@ -76,7 +76,7 @@ use {
}, },
accounts_db::{ accounts_db::{
AccountShrinkThreshold, AccountStorageEntry, AccountsDbConfig, AccountShrinkThreshold, AccountStorageEntry, AccountsDbConfig,
CalcAccountsHashDataSource, IncludeSlotInHash, VerifyAccountsHashAndLamportsConfig, CalcAccountsHashDataSource, VerifyAccountsHashAndLamportsConfig,
ACCOUNTS_DB_CONFIG_FOR_BENCHMARKS, ACCOUNTS_DB_CONFIG_FOR_TESTING, ACCOUNTS_DB_CONFIG_FOR_BENCHMARKS, ACCOUNTS_DB_CONFIG_FOR_TESTING,
}, },
accounts_hash::{AccountsHash, CalcAccountsHashConfig, HashStats, IncrementalAccountsHash}, accounts_hash::{AccountsHash, CalcAccountsHashConfig, HashStats, IncrementalAccountsHash},
@ -3305,7 +3305,6 @@ impl Bank {
// because credits observed has changed // because credits observed has changed
let now = Instant::now(); let now = Instant::now();
let slot = self.slot(); let slot = self.slot();
let include_slot_in_hash = self.include_slot_in_hash();
self.stakes_cache.update_stake_accounts( self.stakes_cache.update_stake_accounts(
thread_pool, thread_pool,
stake_rewards, stake_rewards,
@ -3313,11 +3312,9 @@ impl Bank {
); );
assert!(!self.freeze_started()); assert!(!self.freeze_started());
thread_pool.install(|| { thread_pool.install(|| {
stake_rewards.par_chunks(512).for_each(|chunk| { stake_rewards
self.rc .par_chunks(512)
.accounts .for_each(|chunk| self.rc.accounts.store_accounts_cached((slot, chunk)))
.store_accounts_cached((slot, chunk, include_slot_in_hash))
})
}); });
metrics metrics
.store_stake_accounts_us .store_stake_accounts_us
@ -3350,7 +3347,7 @@ impl Bank {
} }
} }
self.store_accounts((self.slot(), stake_rewards, self.include_slot_in_hash())); self.store_accounts((self.slot(), stake_rewards));
stake_rewards stake_rewards
.iter() .iter()
.map(|stake_reward| stake_reward.stake_reward_info.lamports) .map(|stake_reward| stake_reward.stake_reward_info.lamports)
@ -3373,7 +3370,7 @@ impl Bank {
.enumerate() .enumerate()
.map(|(i, account)| (&vote_account_rewards.rewards[i].0, account)) .map(|(i, account)| (&vote_account_rewards.rewards[i].0, account))
.collect::<Vec<_>>(); .collect::<Vec<_>>();
self.store_accounts((self.slot(), &to_store[..], self.include_slot_in_hash())); self.store_accounts((self.slot(), &to_store[..]));
}); });
metrics metrics
@ -3838,16 +3835,6 @@ impl Bank {
// Bootstrap validator collects fees until `new_from_parent` is called. // Bootstrap validator collects fees until `new_from_parent` is called.
self.fee_rate_governor = genesis_config.fee_rate_governor.clone(); self.fee_rate_governor = genesis_config.fee_rate_governor.clone();
// Make sure to activate the account_hash_ignore_slot feature
// before calculating any account hashes.
if genesis_config
.accounts
.iter()
.any(|(pubkey, _)| pubkey == &feature_set::account_hash_ignore_slot::id())
{
self.activate_feature(&feature_set::account_hash_ignore_slot::id());
}
for (pubkey, account) in genesis_config.accounts.iter() { for (pubkey, account) in genesis_config.accounts.iter() {
assert!( assert!(
self.get_account(pubkey).is_none(), self.get_account(pubkey).is_none(),
@ -5594,7 +5581,6 @@ impl Bank {
&self.rent_collector, &self.rent_collector,
&durable_nonce, &durable_nonce,
lamports_per_signature, lamports_per_signature,
self.include_slot_in_hash(),
); );
let rent_debits = self.collect_rent(&execution_results, loaded_txs); let rent_debits = self.collect_rent(&execution_results, loaded_txs);
@ -6081,11 +6067,8 @@ impl Bank {
if !accounts_to_store.is_empty() { if !accounts_to_store.is_empty() {
// TODO: Maybe do not call `store_accounts()` here. Instead return `accounts_to_store` // TODO: Maybe do not call `store_accounts()` here. Instead return `accounts_to_store`
// and have `collect_rent_in_partition()` perform all the stores. // and have `collect_rent_in_partition()` perform all the stores.
let (_, measure) = measure!(self.store_accounts(( let (_, measure) =
self.slot(), measure!(self.store_accounts((self.slot(), &accounts_to_store[..],)));
&accounts_to_store[..],
self.include_slot_in_hash()
)));
time_storing_accounts_us += measure.as_us(); time_storing_accounts_us += measure.as_us();
} }
@ -6098,19 +6081,6 @@ impl Bank {
} }
} }
/// true if we should include the slot in account hash
/// This is governed by a feature.
pub(crate) fn include_slot_in_hash(&self) -> IncludeSlotInHash {
if self
.feature_set
.is_active(&feature_set::account_hash_ignore_slot::id())
{
IncludeSlotInHash::RemoveSlot
} else {
IncludeSlotInHash::IncludeSlot
}
}
/// convert 'partition' to a pubkey range and 'collect_rent_in_range' /// convert 'partition' to a pubkey range and 'collect_rent_in_range'
fn collect_rent_in_partition(&self, partition: Partition, metrics: &RentMetrics) { fn collect_rent_in_partition(&self, partition: Partition, metrics: &RentMetrics) {
let subrange_full = accounts_partition::pubkey_range_from_partition(partition); let subrange_full = accounts_partition::pubkey_range_from_partition(partition);
@ -6620,11 +6590,7 @@ impl Bank {
pubkey: &Pubkey, pubkey: &Pubkey,
account: &T, account: &T,
) { ) {
self.store_accounts(( self.store_accounts((self.slot(), &[(pubkey, account)][..]))
self.slot(),
&[(pubkey, account)][..],
self.include_slot_in_hash(),
))
} }
pub fn store_accounts<'a, T: ReadableAccount + Sync + ZeroLamport + 'a>( pub fn store_accounts<'a, T: ReadableAccount + Sync + ZeroLamport + 'a>(
@ -7243,7 +7209,6 @@ impl Bank {
let cap = self.capitalization(); let cap = self.capitalization();
let epoch_schedule = self.epoch_schedule(); let epoch_schedule = self.epoch_schedule();
let rent_collector = self.rent_collector(); let rent_collector = self.rent_collector();
let include_slot_in_hash = self.include_slot_in_hash();
if config.run_in_background { if config.run_in_background {
let ancestors = ancestors.clone(); let ancestors = ancestors.clone();
let accounts = Arc::clone(accounts); let accounts = Arc::clone(accounts);
@ -7267,7 +7232,6 @@ impl Bank {
ignore_mismatch: config.ignore_mismatch, ignore_mismatch: config.ignore_mismatch,
store_detailed_debug_info: config.store_hash_raw_data_for_debug, store_detailed_debug_info: config.store_hash_raw_data_for_debug,
use_bg_thread_pool: true, use_bg_thread_pool: true,
include_slot_in_hash,
}, },
); );
accounts_ accounts_
@ -7293,7 +7257,6 @@ impl Bank {
ignore_mismatch: config.ignore_mismatch, ignore_mismatch: config.ignore_mismatch,
store_detailed_debug_info: config.store_hash_raw_data_for_debug, store_detailed_debug_info: config.store_hash_raw_data_for_debug,
use_bg_thread_pool: false, // fg is waiting for this to run, so we can use the fg thread pool use_bg_thread_pool: false, // fg is waiting for this to run, so we can use the fg thread pool
include_slot_in_hash,
}, },
); );
self.set_initial_accounts_hash_verification_completed(); self.set_initial_accounts_hash_verification_completed();
@ -7426,7 +7389,6 @@ impl Bank {
self.epoch_schedule(), self.epoch_schedule(),
&self.rent_collector, &self.rent_collector,
is_startup, is_startup,
self.include_slot_in_hash(),
) )
.1 .1
} }
@ -7535,7 +7497,6 @@ impl Bank {
self.epoch_schedule(), self.epoch_schedule(),
&self.rent_collector, &self.rent_collector,
is_startup, is_startup,
self.include_slot_in_hash(),
); );
if total_lamports != self.capitalization() { if total_lamports != self.capitalization() {
datapoint_info!( datapoint_info!(
@ -7561,7 +7522,6 @@ impl Bank {
self.epoch_schedule(), self.epoch_schedule(),
&self.rent_collector, &self.rent_collector,
is_startup, is_startup,
self.include_slot_in_hash(),
); );
} }
@ -7588,7 +7548,6 @@ impl Bank {
epoch_schedule: &self.epoch_schedule, epoch_schedule: &self.epoch_schedule,
rent_collector: &self.rent_collector, rent_collector: &self.rent_collector,
store_detailed_debug_info_on_failure: false, store_detailed_debug_info_on_failure: false,
include_slot_in_hash: self.include_slot_in_hash(),
}; };
let storages = self.get_snapshot_storages(Some(base_slot)); let storages = self.get_snapshot_storages(Some(base_slot));
let sorted_storages = SortedStorages::new(&storages); let sorted_storages = SortedStorages::new(&storages);

View File

@ -6555,25 +6555,25 @@ fn test_bank_hash_consistency() {
if bank.slot == 0 { if bank.slot == 0 {
assert_eq!( assert_eq!(
bank.hash().to_string(), bank.hash().to_string(),
"3kzRo3M5q9j47Dxfdp9ZeEXfUTA5rxVud7jRKuttHxFz" "3KE2bigpBiiMLGYNqmWkgbrQGSqMt5ccG6ED87CFCVpt"
); );
} }
if bank.slot == 32 { if bank.slot == 32 {
assert_eq!( assert_eq!(
bank.hash().to_string(), bank.hash().to_string(),
"bWPR5AQjsfhMypn1nLUjugmitbjHwV4rmnyTDFqCdv1" "FpNDsd21HXznXf6tRpMNiWhFyhZ4aCCECQm3gL4jGV22"
); );
} }
if bank.slot == 64 { if bank.slot == 64 {
assert_eq!( assert_eq!(
bank.hash().to_string(), bank.hash().to_string(),
"74hNYEVcvKU5JZwSNBYUcUWgf9Jw2Mag4b55967VPVjG" "7gDCoXPfFtKPALi212akhhQHEuLdAqyf7DE3yUN4bR2p"
); );
} }
if bank.slot == 128 { if bank.slot == 128 {
assert_eq!( assert_eq!(
bank.hash().to_string(), bank.hash().to_string(),
"BvYViztQiksU8vDvMqZYBo9Lc4cgjJEmijPpqktBRMkS" "6FREbeHdTNYnEXg4zobL2mqGfevukg75frkQJqKpYnk4"
); );
break; break;
} }
@ -12922,7 +12922,7 @@ fn test_epoch_credit_rewards_and_history_update() {
.map(|_| StakeReward::new_random()) .map(|_| StakeReward::new_random())
.collect::<Vec<_>>(); .collect::<Vec<_>>();
bank.store_accounts((bank.slot(), &stake_rewards[..], bank.include_slot_in_hash())); bank.store_accounts((bank.slot(), &stake_rewards[..]));
// Simulate rewards // Simulate rewards
let mut expected_rewards = 0; let mut expected_rewards = 0;

View File

@ -2310,7 +2310,6 @@ mod tests {
epoch_schedule: deserialized_bank.epoch_schedule(), epoch_schedule: deserialized_bank.epoch_schedule(),
rent_collector: deserialized_bank.rent_collector(), rent_collector: deserialized_bank.rent_collector(),
store_detailed_debug_info_on_failure: false, store_detailed_debug_info_on_failure: false,
include_slot_in_hash: bank.include_slot_in_hash(),
}, },
&SortedStorages::new(&other_incremental_snapshot_storages), &SortedStorages::new(&other_incremental_snapshot_storages),
HashStats::default(), HashStats::default(),

View File

@ -368,11 +368,7 @@ impl<'a> SnapshotMinimizer<'a> {
shrink_in_progress = Some(self.accounts_db().get_store_for_shrink(slot, aligned_total)); shrink_in_progress = Some(self.accounts_db().get_store_for_shrink(slot, aligned_total));
let new_storage = shrink_in_progress.as_ref().unwrap().new_storage(); let new_storage = shrink_in_progress.as_ref().unwrap().new_storage();
self.accounts_db().store_accounts_frozen( self.accounts_db().store_accounts_frozen(
( (slot, &accounts[..]),
slot,
&accounts[..],
solana_accounts_db::accounts_db::INCLUDE_SLOT_IN_HASH_IRRELEVANT_APPEND_VEC_OPERATION,
),
Some(hashes), Some(hashes),
new_storage, new_storage,
Some(Box::new(write_versions.into_iter())), Some(Box::new(write_versions.into_iter())),

View File

@ -8,7 +8,7 @@ use {
log::*, log::*,
solana_accounts_db::{ solana_accounts_db::{
accounts::Accounts, accounts::Accounts,
accounts_db::{AccountStorageEntry, IncludeSlotInHash, INCLUDE_SLOT_IN_HASH_TESTS}, accounts_db::AccountStorageEntry,
accounts_hash::{AccountsHash, AccountsHashKind}, accounts_hash::{AccountsHash, AccountsHashKind},
epoch_accounts_hash::EpochAccountsHash, epoch_accounts_hash::EpochAccountsHash,
rent_collector::RentCollector, rent_collector::RentCollector,
@ -36,7 +36,6 @@ pub struct AccountsPackage {
pub epoch_schedule: EpochSchedule, pub epoch_schedule: EpochSchedule,
pub rent_collector: RentCollector, pub rent_collector: RentCollector,
pub is_incremental_accounts_hash_feature_enabled: bool, pub is_incremental_accounts_hash_feature_enabled: bool,
pub include_slot_in_hash: IncludeSlotInHash,
/// Supplemental information needed for snapshots /// Supplemental information needed for snapshots
pub snapshot_info: Option<SupplementalSnapshotInfo>, pub snapshot_info: Option<SupplementalSnapshotInfo>,
@ -152,7 +151,6 @@ impl AccountsPackage {
epoch_schedule: *bank.epoch_schedule(), epoch_schedule: *bank.epoch_schedule(),
rent_collector: bank.rent_collector().clone(), rent_collector: bank.rent_collector().clone(),
is_incremental_accounts_hash_feature_enabled, is_incremental_accounts_hash_feature_enabled,
include_slot_in_hash: bank.include_slot_in_hash(),
snapshot_info, snapshot_info,
enqueued: Instant::now(), enqueued: Instant::now(),
} }
@ -172,7 +170,6 @@ impl AccountsPackage {
epoch_schedule: EpochSchedule::default(), epoch_schedule: EpochSchedule::default(),
rent_collector: RentCollector::default(), rent_collector: RentCollector::default(),
is_incremental_accounts_hash_feature_enabled: bool::default(), is_incremental_accounts_hash_feature_enabled: bool::default(),
include_slot_in_hash: INCLUDE_SLOT_IN_HASH_TESTS,
snapshot_info: Some(SupplementalSnapshotInfo { snapshot_info: Some(SupplementalSnapshotInfo {
bank_snapshot_dir: PathBuf::default(), bank_snapshot_dir: PathBuf::default(),
archive_format: ArchiveFormat::Tar, archive_format: ArchiveFormat::Tar,

View File

@ -3,7 +3,7 @@ use {
rand::{thread_rng, Rng}, rand::{thread_rng, Rng},
rayon::prelude::*, rayon::prelude::*,
solana_accounts_db::{ solana_accounts_db::{
accounts_db::{AccountsDb, LoadHint, INCLUDE_SLOT_IN_HASH_TESTS}, accounts_db::{AccountsDb, LoadHint},
ancestors::Ancestors, ancestors::Ancestors,
}, },
solana_sdk::{ solana_sdk::{
@ -60,14 +60,7 @@ fn test_shrink_and_clean() {
for (pubkey, account) in alive_accounts.iter_mut() { for (pubkey, account) in alive_accounts.iter_mut() {
account.checked_sub_lamports(1).unwrap(); account.checked_sub_lamports(1).unwrap();
accounts.store_cached( accounts.store_cached((current_slot, &[(&*pubkey, &*account)][..]), None);
(
current_slot,
&[(&*pubkey, &*account)][..],
INCLUDE_SLOT_IN_HASH_TESTS,
),
None,
);
} }
accounts.add_root(current_slot); accounts.add_root(current_slot);
accounts.flush_accounts_cache(true, Some(current_slot)); accounts.flush_accounts_cache(true, Some(current_slot));
@ -133,16 +126,13 @@ fn test_bad_bank_hash() {
.iter() .iter()
.map(|idx| (&accounts_keys[*idx].0, &accounts_keys[*idx].1)) .map(|idx| (&accounts_keys[*idx].0, &accounts_keys[*idx].1))
.collect(); .collect();
db.store_cached( db.store_cached((some_slot, &account_refs[..]), None);
(some_slot, &account_refs[..], INCLUDE_SLOT_IN_HASH_TESTS),
None,
);
for pass in 0..2 { for pass in 0..2 {
for (key, account) in &account_refs { for (key, account) in &account_refs {
assert_eq!( assert_eq!(
db.load_account_hash(&ancestors, key, Some(some_slot), LoadHint::Unspecified) db.load_account_hash(&ancestors, key, Some(some_slot), LoadHint::Unspecified)
.unwrap(), .unwrap(),
AccountsDb::hash_account(some_slot, *account, key, INCLUDE_SLOT_IN_HASH_TESTS) AccountsDb::hash_account(*account, key)
); );
} }
if pass == 0 { if pass == 0 {