2018-03-30 10:43:38 -07:00
|
|
|
//! The `entry` module is a fundamental building block of Proof of History. It contains a
|
|
|
|
//! unique ID that is the hash of the Entry before it, plus the hash of the
|
|
|
|
//! transactions within it. Entries cannot be reordered, and its field `num_hashes`
|
2018-03-29 11:20:54 -07:00
|
|
|
//! represents an approximate amount of time since the last Entry was created.
|
2019-09-26 13:36:51 -07:00
|
|
|
use crate::perf_libs;
|
2018-12-07 19:16:27 -08:00
|
|
|
use crate::poh::Poh;
|
2019-10-18 09:28:51 -07:00
|
|
|
use log::*;
|
2019-01-09 14:33:44 -08:00
|
|
|
use rayon::prelude::*;
|
2019-06-19 16:31:32 -07:00
|
|
|
use rayon::ThreadPool;
|
2019-10-18 09:28:51 -07:00
|
|
|
use serde::{Deserialize, Serialize};
|
2019-10-23 12:11:04 -07:00
|
|
|
use solana_measure::measure::Measure;
|
2019-06-25 13:44:27 -07:00
|
|
|
use solana_merkle_tree::MerkleTree;
|
2019-10-18 09:28:51 -07:00
|
|
|
use solana_metrics::*;
|
2019-09-26 13:36:51 -07:00
|
|
|
use solana_rayon_threadlimit::get_thread_count;
|
2019-06-25 13:44:27 -07:00
|
|
|
use solana_sdk::hash::Hash;
|
2019-09-12 11:39:39 -07:00
|
|
|
use solana_sdk::timing;
|
2018-11-29 16:18:47 -08:00
|
|
|
use solana_sdk::transaction::Transaction;
|
2019-06-19 16:31:32 -07:00
|
|
|
use std::cell::RefCell;
|
2018-09-21 16:01:24 -07:00
|
|
|
use std::sync::mpsc::{Receiver, Sender};
|
2019-10-17 09:59:40 -07:00
|
|
|
use std::sync::{Arc, Mutex};
|
2019-06-08 09:21:43 -07:00
|
|
|
use std::thread;
|
2019-10-23 12:11:04 -07:00
|
|
|
use std::thread::JoinHandle;
|
2019-06-19 16:31:32 -07:00
|
|
|
use std::time::Instant;
|
|
|
|
|
|
|
|
thread_local!(static PAR_THREAD_POOL: RefCell<ThreadPool> = RefCell::new(rayon::ThreadPoolBuilder::new()
|
2019-09-12 11:39:39 -07:00
|
|
|
.num_threads(get_thread_count())
|
2019-06-19 16:31:32 -07:00
|
|
|
.build()
|
|
|
|
.unwrap()));
|
2019-06-08 09:21:43 -07:00
|
|
|
|
2019-02-28 01:14:37 -08:00
|
|
|
pub type EntrySender = Sender<Vec<Entry>>;
|
|
|
|
pub type EntryReceiver = Receiver<Vec<Entry>>;
|
2018-09-21 16:01:24 -07:00
|
|
|
|
2018-03-29 11:20:54 -07:00
|
|
|
/// Each Entry contains three pieces of data. The `num_hashes` field is the number
|
2019-03-01 08:57:42 -08:00
|
|
|
/// of hashes performed since the previous entry. The `hash` field is the result
|
|
|
|
/// of hashing `hash` from the previous entry `num_hashes` times. The `transactions`
|
|
|
|
/// field points to Transactions that took place shortly before `hash` was generated.
|
2018-03-29 11:20:54 -07:00
|
|
|
///
|
|
|
|
/// If you divide `num_hashes` by the amount of time it takes to generate a new hash, you
|
|
|
|
/// get a duration estimate since the last Entry. Since processing power increases
|
|
|
|
/// over time, one should expect the duration `num_hashes` represents to decrease proportionally.
|
2018-06-06 10:19:56 -07:00
|
|
|
/// An upper bound on Duration can be estimated by assuming each hash was generated by the
|
|
|
|
/// world's fastest processor at the time the entry was recorded. Or said another way, it
|
|
|
|
/// is physically not possible for a shorter duration to have occurred if one assumes the
|
|
|
|
/// hash was computed by the world's fastest processor at that time. The hash chain is both
|
2018-08-06 23:36:09 -07:00
|
|
|
/// a Verifiable Delay Function (VDF) and a Proof of Work (not to be confused with Proof of
|
2018-06-06 10:19:56 -07:00
|
|
|
/// Work consensus!)
|
|
|
|
|
2019-06-12 16:43:05 -07:00
|
|
|
#[derive(Serialize, Deserialize, Debug, Default, PartialEq, Eq, Clone)]
|
2018-03-06 19:22:30 -08:00
|
|
|
pub struct Entry {
|
2018-06-06 10:19:56 -07:00
|
|
|
/// The number of hashes since the previous Entry ID.
|
2018-03-06 16:31:17 -08:00
|
|
|
pub num_hashes: u64,
|
2018-06-06 10:19:56 -07:00
|
|
|
|
|
|
|
/// The SHA-256 hash `num_hashes` after the previous Entry ID.
|
2019-03-01 08:57:42 -08:00
|
|
|
pub hash: Hash,
|
2018-06-06 10:19:56 -07:00
|
|
|
|
|
|
|
/// An unordered list of transactions that were observed before the Entry ID was
|
2018-08-06 23:55:00 -07:00
|
|
|
/// generated. They may have been observed before a previous Entry ID but were
|
2018-06-06 10:19:56 -07:00
|
|
|
/// pushed back into this list to ensure deterministic interpretation of the ledger.
|
2018-05-25 14:51:41 -07:00
|
|
|
pub transactions: Vec<Transaction>,
|
2018-03-06 16:31:17 -08:00
|
|
|
}
|
|
|
|
|
2018-03-06 19:22:30 -08:00
|
|
|
impl Entry {
|
2018-05-16 16:49:58 -07:00
|
|
|
/// Creates the next Entry `num_hashes` after `start_hash`.
|
2019-03-01 08:44:55 -08:00
|
|
|
pub fn new(prev_hash: &Hash, num_hashes: u64, transactions: Vec<Transaction>) -> Self {
|
2019-04-18 14:45:41 -07:00
|
|
|
if num_hashes == 0 && transactions.is_empty() {
|
|
|
|
Entry {
|
|
|
|
num_hashes: 0,
|
|
|
|
hash: *prev_hash,
|
|
|
|
transactions,
|
|
|
|
}
|
|
|
|
} else if num_hashes == 0 {
|
|
|
|
// If you passed in transactions, but passed in num_hashes == 0, then
|
|
|
|
// next_hash will generate the next hash and set num_hashes == 1
|
|
|
|
let hash = next_hash(prev_hash, 1, &transactions);
|
|
|
|
Entry {
|
|
|
|
num_hashes: 1,
|
|
|
|
hash,
|
|
|
|
transactions,
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
// Otherwise, the next Entry `num_hashes` after `start_hash`.
|
|
|
|
// If you wanted a tick for instance, then pass in num_hashes = 1
|
|
|
|
// and transactions = empty
|
|
|
|
let hash = next_hash(prev_hash, num_hashes, &transactions);
|
|
|
|
Entry {
|
|
|
|
num_hashes,
|
|
|
|
hash,
|
|
|
|
transactions,
|
|
|
|
}
|
2018-07-24 11:14:33 -07:00
|
|
|
}
|
2018-05-16 16:49:58 -07:00
|
|
|
}
|
|
|
|
|
2018-05-25 14:51:41 -07:00
|
|
|
pub fn new_mut(
|
|
|
|
start_hash: &mut Hash,
|
2018-08-09 07:31:00 -07:00
|
|
|
num_hashes: &mut u64,
|
2018-05-25 14:51:41 -07:00
|
|
|
transactions: Vec<Transaction>,
|
|
|
|
) -> Self {
|
2019-02-19 22:18:57 -08:00
|
|
|
let entry = Self::new(start_hash, *num_hashes, transactions);
|
2019-03-01 08:57:42 -08:00
|
|
|
*start_hash = entry.hash;
|
2018-08-09 07:31:00 -07:00
|
|
|
*num_hashes = 0;
|
2019-04-18 14:45:41 -07:00
|
|
|
|
2018-05-16 16:49:58 -07:00
|
|
|
entry
|
|
|
|
}
|
|
|
|
|
2018-12-10 20:03:04 -08:00
|
|
|
#[cfg(test)]
|
2019-03-01 08:57:42 -08:00
|
|
|
pub fn new_tick(num_hashes: u64, hash: &Hash) -> Self {
|
2018-03-06 16:31:17 -08:00
|
|
|
Entry {
|
|
|
|
num_hashes,
|
2019-03-01 08:57:42 -08:00
|
|
|
hash: *hash,
|
2018-05-25 14:51:41 -07:00
|
|
|
transactions: vec![],
|
2018-03-06 16:31:17 -08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-03-01 08:57:42 -08:00
|
|
|
/// Verifies self.hash is the result of hashing a `start_hash` `self.num_hashes` times.
|
2018-05-25 14:51:41 -07:00
|
|
|
/// If the transaction is not a Tick, then hash that as well.
|
2018-03-06 16:36:45 -08:00
|
|
|
pub fn verify(&self, start_hash: &Hash) -> bool {
|
2018-07-23 16:13:24 -07:00
|
|
|
let ref_hash = next_hash(start_hash, self.num_hashes, &self.transactions);
|
2019-03-01 08:57:42 -08:00
|
|
|
if self.hash != ref_hash {
|
2018-07-31 16:10:02 -07:00
|
|
|
warn!(
|
2018-07-23 16:13:24 -07:00
|
|
|
"next_hash is invalid expected: {:?} actual: {:?}",
|
2019-03-01 08:57:42 -08:00
|
|
|
self.hash, ref_hash
|
2018-07-23 16:13:24 -07:00
|
|
|
);
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
true
|
2018-03-06 16:31:17 -08:00
|
|
|
}
|
2018-10-18 22:57:48 -07:00
|
|
|
|
|
|
|
pub fn is_tick(&self) -> bool {
|
|
|
|
self.transactions.is_empty()
|
|
|
|
}
|
2018-03-06 16:31:17 -08:00
|
|
|
}
|
|
|
|
|
2019-03-28 12:37:41 -07:00
|
|
|
pub fn hash_transactions(transactions: &[Transaction]) -> Hash {
|
|
|
|
// a hash of a slice of transactions only needs to hash the signatures
|
2019-06-25 13:44:27 -07:00
|
|
|
let signatures: Vec<_> = transactions
|
|
|
|
.iter()
|
2019-07-11 15:15:08 -07:00
|
|
|
.flat_map(|tx| tx.signatures.iter())
|
2019-06-25 13:44:27 -07:00
|
|
|
.collect();
|
|
|
|
let merkle_tree = MerkleTree::new(&signatures);
|
|
|
|
if let Some(root_hash) = merkle_tree.get_root() {
|
|
|
|
*root_hash
|
|
|
|
} else {
|
|
|
|
Hash::default()
|
|
|
|
}
|
2019-03-28 12:37:41 -07:00
|
|
|
}
|
|
|
|
|
2018-05-25 14:51:41 -07:00
|
|
|
/// Creates the hash `num_hashes` after `start_hash`. If the transaction contains
|
2018-04-16 13:38:31 -07:00
|
|
|
/// a signature, the final hash will be a hash of both the previous ID and
|
2018-06-18 13:23:15 -07:00
|
|
|
/// the signature. If num_hashes is zero and there's no transaction data,
|
2018-06-21 16:09:17 -07:00
|
|
|
/// start_hash is returned.
|
2019-08-28 08:38:32 -07:00
|
|
|
pub fn next_hash(start_hash: &Hash, num_hashes: u64, transactions: &[Transaction]) -> Hash {
|
2018-09-24 12:26:47 -07:00
|
|
|
if num_hashes == 0 && transactions.is_empty() {
|
2018-09-21 21:01:13 -07:00
|
|
|
return *start_hash;
|
2018-03-06 16:31:17 -08:00
|
|
|
}
|
2018-03-09 15:16:29 -08:00
|
|
|
|
2019-05-18 14:01:36 -07:00
|
|
|
let mut poh = Poh::new(*start_hash, None);
|
|
|
|
poh.hash(num_hashes.saturating_sub(1));
|
2018-09-25 15:01:51 -07:00
|
|
|
if transactions.is_empty() {
|
2019-05-18 14:01:36 -07:00
|
|
|
poh.tick().unwrap().hash
|
2018-09-25 15:01:51 -07:00
|
|
|
} else {
|
2019-05-18 14:01:36 -07:00
|
|
|
poh.record(hash_transactions(transactions)).unwrap().hash
|
2018-09-25 15:01:51 -07:00
|
|
|
}
|
2018-03-06 16:31:17 -08:00
|
|
|
}
|
|
|
|
|
2019-10-23 12:11:04 -07:00
|
|
|
pub struct EntryVerifyState {
|
|
|
|
thread_h: Option<JoinHandle<u64>>,
|
|
|
|
hashes: Option<Arc<Mutex<Vec<Hash>>>>,
|
|
|
|
verified: bool,
|
|
|
|
tx_hashes: Vec<Option<Hash>>,
|
|
|
|
start_time_ms: u64,
|
|
|
|
}
|
|
|
|
|
|
|
|
impl EntryVerifyState {
|
|
|
|
pub fn finish_verify(&mut self, entries: &[Entry]) -> bool {
|
|
|
|
if self.hashes.is_some() {
|
|
|
|
let gpu_time_ms = self.thread_h.take().unwrap().join().unwrap();
|
|
|
|
|
|
|
|
let mut verify_check_time = Measure::start("verify_check");
|
|
|
|
let hashes = self.hashes.take().expect("hashes.as_ref");
|
|
|
|
let hashes = Arc::try_unwrap(hashes)
|
|
|
|
.expect("unwrap Arc")
|
|
|
|
.into_inner()
|
|
|
|
.expect("into_inner");
|
|
|
|
let res = PAR_THREAD_POOL.with(|thread_pool| {
|
|
|
|
thread_pool.borrow().install(|| {
|
|
|
|
hashes
|
|
|
|
.into_par_iter()
|
|
|
|
.zip(&self.tx_hashes)
|
|
|
|
.zip(entries)
|
|
|
|
.all(|((hash, tx_hash), answer)| {
|
|
|
|
if answer.num_hashes == 0 {
|
|
|
|
hash == answer.hash
|
|
|
|
} else {
|
|
|
|
let mut poh = Poh::new(hash, None);
|
|
|
|
if let Some(mixin) = tx_hash {
|
|
|
|
poh.record(*mixin).unwrap().hash == answer.hash
|
|
|
|
} else {
|
|
|
|
poh.tick().unwrap().hash == answer.hash
|
|
|
|
}
|
|
|
|
}
|
|
|
|
})
|
|
|
|
})
|
|
|
|
});
|
|
|
|
verify_check_time.stop();
|
|
|
|
inc_new_counter_warn!(
|
|
|
|
"entry_verify-duration",
|
|
|
|
(gpu_time_ms + verify_check_time.as_ms() + self.start_time_ms) as usize
|
|
|
|
);
|
|
|
|
res
|
|
|
|
} else {
|
|
|
|
self.verified
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-01-09 14:33:44 -08:00
|
|
|
// an EntrySlice is a slice of Entries
|
|
|
|
pub trait EntrySlice {
|
|
|
|
/// Verifies the hashes and counts of a slice of transactions are all consistent.
|
2019-10-23 12:11:04 -07:00
|
|
|
fn verify_cpu(&self, start_hash: &Hash) -> EntryVerifyState;
|
|
|
|
fn start_verify(&self, start_hash: &Hash) -> EntryVerifyState;
|
2019-01-09 14:33:44 -08:00
|
|
|
fn verify(&self, start_hash: &Hash) -> bool;
|
|
|
|
}
|
|
|
|
|
|
|
|
impl EntrySlice for [Entry] {
|
2019-10-23 12:11:04 -07:00
|
|
|
fn verify(&self, start_hash: &Hash) -> bool {
|
|
|
|
self.start_verify(start_hash).finish_verify(self)
|
|
|
|
}
|
|
|
|
fn verify_cpu(&self, start_hash: &Hash) -> EntryVerifyState {
|
2019-06-19 16:31:32 -07:00
|
|
|
let now = Instant::now();
|
2019-01-09 14:33:44 -08:00
|
|
|
let genesis = [Entry {
|
|
|
|
num_hashes: 0,
|
2019-03-01 08:57:42 -08:00
|
|
|
hash: *start_hash,
|
2019-01-09 14:33:44 -08:00
|
|
|
transactions: vec![],
|
|
|
|
}];
|
|
|
|
let entry_pairs = genesis.par_iter().chain(self).zip(self);
|
2019-06-19 16:31:32 -07:00
|
|
|
let res = PAR_THREAD_POOL.with(|thread_pool| {
|
|
|
|
thread_pool.borrow().install(|| {
|
|
|
|
entry_pairs.all(|(x0, x1)| {
|
|
|
|
let r = x1.verify(&x0.hash);
|
|
|
|
if !r {
|
|
|
|
warn!(
|
|
|
|
"entry invalid!: x0: {:?}, x1: {:?} num txs: {}",
|
|
|
|
x0.hash,
|
|
|
|
x1.hash,
|
|
|
|
x1.transactions.len()
|
|
|
|
);
|
|
|
|
}
|
|
|
|
r
|
|
|
|
})
|
|
|
|
})
|
|
|
|
});
|
|
|
|
inc_new_counter_warn!(
|
|
|
|
"entry_verify-duration",
|
|
|
|
timing::duration_as_ms(&now.elapsed()) as usize
|
|
|
|
);
|
2019-10-23 12:11:04 -07:00
|
|
|
EntryVerifyState {
|
|
|
|
thread_h: None,
|
|
|
|
verified: res,
|
|
|
|
hashes: None,
|
|
|
|
tx_hashes: vec![],
|
|
|
|
start_time_ms: 0,
|
|
|
|
}
|
2019-06-19 16:31:32 -07:00
|
|
|
}
|
|
|
|
|
2019-10-23 12:11:04 -07:00
|
|
|
fn start_verify(&self, start_hash: &Hash) -> EntryVerifyState {
|
2019-09-26 13:36:51 -07:00
|
|
|
let api = perf_libs::api();
|
|
|
|
if api.is_none() {
|
|
|
|
return self.verify_cpu(start_hash);
|
|
|
|
}
|
|
|
|
let api = api.unwrap();
|
2019-06-19 16:31:32 -07:00
|
|
|
inc_new_counter_warn!("entry_verify-num_entries", self.len() as usize);
|
|
|
|
|
|
|
|
let start = Instant::now();
|
|
|
|
|
2019-06-08 09:21:43 -07:00
|
|
|
let genesis = [Entry {
|
|
|
|
num_hashes: 0,
|
|
|
|
hash: *start_hash,
|
|
|
|
transactions: vec![],
|
|
|
|
}];
|
|
|
|
|
|
|
|
let hashes: Vec<Hash> = genesis
|
2019-06-19 16:31:32 -07:00
|
|
|
.iter()
|
2019-06-08 09:21:43 -07:00
|
|
|
.chain(self)
|
|
|
|
.map(|entry| entry.hash)
|
|
|
|
.take(self.len())
|
|
|
|
.collect();
|
|
|
|
|
|
|
|
let num_hashes_vec: Vec<u64> = self
|
2019-09-26 13:36:51 -07:00
|
|
|
.iter()
|
2019-06-08 09:21:43 -07:00
|
|
|
.map(|entry| entry.num_hashes.saturating_sub(1))
|
|
|
|
.collect();
|
|
|
|
|
|
|
|
let length = self.len();
|
|
|
|
let hashes = Arc::new(Mutex::new(hashes));
|
|
|
|
let hashes_clone = hashes.clone();
|
|
|
|
|
|
|
|
let gpu_verify_thread = thread::spawn(move || {
|
|
|
|
let mut hashes = hashes_clone.lock().unwrap();
|
2019-10-23 12:11:04 -07:00
|
|
|
let gpu_wait = Instant::now();
|
2019-06-08 09:21:43 -07:00
|
|
|
let res;
|
|
|
|
unsafe {
|
2019-09-26 13:36:51 -07:00
|
|
|
res = (api.poh_verify_many)(
|
2019-06-08 09:21:43 -07:00
|
|
|
hashes.as_mut_ptr() as *mut u8,
|
|
|
|
num_hashes_vec.as_ptr(),
|
|
|
|
length,
|
|
|
|
1,
|
|
|
|
);
|
|
|
|
}
|
|
|
|
if res != 0 {
|
|
|
|
panic!("GPU PoH verify many failed");
|
|
|
|
}
|
2019-10-23 12:11:04 -07:00
|
|
|
inc_new_counter_warn!(
|
|
|
|
"entry_verify-gpu_thread",
|
|
|
|
timing::duration_as_ms(&gpu_wait.elapsed()) as usize
|
|
|
|
);
|
|
|
|
timing::duration_as_ms(&gpu_wait.elapsed())
|
2019-06-08 09:21:43 -07:00
|
|
|
});
|
|
|
|
|
2019-10-23 12:11:04 -07:00
|
|
|
let tx_hashes = PAR_THREAD_POOL.with(|thread_pool| {
|
2019-06-19 16:31:32 -07:00
|
|
|
thread_pool.borrow().install(|| {
|
|
|
|
self.into_par_iter()
|
|
|
|
.map(|entry| {
|
|
|
|
if entry.transactions.is_empty() {
|
|
|
|
None
|
|
|
|
} else {
|
|
|
|
Some(hash_transactions(&entry.transactions))
|
|
|
|
}
|
|
|
|
})
|
|
|
|
.collect()
|
2019-06-08 09:21:43 -07:00
|
|
|
})
|
2019-06-19 16:31:32 -07:00
|
|
|
});
|
2019-06-08 09:21:43 -07:00
|
|
|
|
2019-10-23 12:11:04 -07:00
|
|
|
EntryVerifyState {
|
|
|
|
thread_h: Some(gpu_verify_thread),
|
|
|
|
verified: false,
|
|
|
|
tx_hashes,
|
|
|
|
start_time_ms: timing::duration_as_ms(&start.elapsed()),
|
|
|
|
hashes: Some(hashes),
|
|
|
|
}
|
2019-06-08 09:21:43 -07:00
|
|
|
}
|
2019-01-09 14:33:44 -08:00
|
|
|
}
|
|
|
|
|
2019-02-12 17:38:46 -08:00
|
|
|
pub fn next_entry_mut(start: &mut Hash, num_hashes: u64, transactions: Vec<Transaction>) -> Entry {
|
2019-02-19 22:18:57 -08:00
|
|
|
let entry = Entry::new(&start, num_hashes, transactions);
|
2019-03-01 08:57:42 -08:00
|
|
|
*start = entry.hash;
|
2019-02-12 17:38:46 -08:00
|
|
|
entry
|
|
|
|
}
|
|
|
|
|
2019-01-28 10:29:42 -08:00
|
|
|
pub fn create_ticks(num_ticks: u64, mut hash: Hash) -> Vec<Entry> {
|
2019-01-09 14:33:44 -08:00
|
|
|
let mut ticks = Vec::with_capacity(num_ticks as usize);
|
2019-01-28 10:29:42 -08:00
|
|
|
for _ in 0..num_ticks {
|
2019-02-12 17:38:46 -08:00
|
|
|
let new_tick = next_entry_mut(&mut hash, 1, vec![]);
|
2019-01-09 14:33:44 -08:00
|
|
|
ticks.push(new_tick);
|
|
|
|
}
|
|
|
|
|
|
|
|
ticks
|
|
|
|
}
|
|
|
|
|
2018-12-10 20:03:04 -08:00
|
|
|
/// Creates the next Tick or Transaction Entry `num_hashes` after `start_hash`.
|
2019-03-01 08:44:55 -08:00
|
|
|
pub fn next_entry(prev_hash: &Hash, num_hashes: u64, transactions: Vec<Transaction>) -> Entry {
|
2018-12-10 20:03:04 -08:00
|
|
|
assert!(num_hashes > 0 || transactions.is_empty());
|
|
|
|
Entry {
|
|
|
|
num_hashes,
|
2019-03-01 08:57:42 -08:00
|
|
|
hash: next_hash(prev_hash, num_hashes, &transactions),
|
2018-12-10 20:03:04 -08:00
|
|
|
transactions,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-03-06 16:31:17 -08:00
|
|
|
#[cfg(test)]
|
|
|
|
mod tests {
|
|
|
|
use super::*;
|
2018-12-07 19:16:27 -08:00
|
|
|
use crate::entry::Entry;
|
2019-09-17 15:11:29 -07:00
|
|
|
use chrono::prelude::Utc;
|
|
|
|
use solana_budget_api::budget_instruction;
|
|
|
|
use solana_sdk::{
|
|
|
|
hash::hash,
|
|
|
|
signature::{Keypair, KeypairUtil},
|
|
|
|
system_transaction,
|
|
|
|
};
|
2018-03-06 16:31:17 -08:00
|
|
|
|
2019-03-23 05:14:30 -07:00
|
|
|
fn create_sample_payment(keypair: &Keypair, hash: Hash) -> Transaction {
|
|
|
|
let pubkey = keypair.pubkey();
|
2019-04-03 08:45:57 -07:00
|
|
|
let ixs = budget_instruction::payment(&pubkey, &pubkey, 1);
|
2019-03-29 16:29:20 -07:00
|
|
|
Transaction::new_signed_instructions(&[keypair], ixs, hash)
|
2019-03-23 05:14:30 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
fn create_sample_timestamp(keypair: &Keypair, hash: Hash) -> Transaction {
|
|
|
|
let pubkey = keypair.pubkey();
|
2019-04-03 08:45:57 -07:00
|
|
|
let ix = budget_instruction::apply_timestamp(&pubkey, &pubkey, &pubkey, Utc::now());
|
2019-03-29 16:29:20 -07:00
|
|
|
Transaction::new_signed_instructions(&[keypair], vec![ix], hash)
|
2019-03-23 05:14:30 -07:00
|
|
|
}
|
|
|
|
|
2019-04-18 14:45:41 -07:00
|
|
|
fn create_sample_apply_signature(keypair: &Keypair, hash: Hash) -> Transaction {
|
2019-03-23 05:14:30 -07:00
|
|
|
let pubkey = keypair.pubkey();
|
2019-04-03 08:45:57 -07:00
|
|
|
let ix = budget_instruction::apply_signature(&pubkey, &pubkey, &pubkey);
|
2019-03-29 16:29:20 -07:00
|
|
|
Transaction::new_signed_instructions(&[keypair], vec![ix], hash)
|
2019-03-23 05:14:30 -07:00
|
|
|
}
|
|
|
|
|
2018-03-06 16:31:17 -08:00
|
|
|
#[test]
|
2018-03-09 16:02:17 -08:00
|
|
|
fn test_entry_verify() {
|
2018-03-06 16:36:45 -08:00
|
|
|
let zero = Hash::default();
|
2018-08-01 11:23:52 -07:00
|
|
|
let one = hash(&zero.as_ref());
|
2019-02-19 22:18:57 -08:00
|
|
|
assert!(Entry::new_tick(0, &zero).verify(&zero)); // base case, never used
|
|
|
|
assert!(!Entry::new_tick(0, &zero).verify(&one)); // base case, bad
|
2018-05-11 08:45:42 -07:00
|
|
|
assert!(next_entry(&zero, 1, vec![]).verify(&zero)); // inductive step
|
|
|
|
assert!(!next_entry(&zero, 1, vec![]).verify(&one)); // inductive step, bad
|
2018-03-06 16:31:17 -08:00
|
|
|
}
|
|
|
|
|
2018-03-09 16:02:17 -08:00
|
|
|
#[test]
|
2018-05-25 14:51:41 -07:00
|
|
|
fn test_transaction_reorder_attack() {
|
2018-03-09 16:02:17 -08:00
|
|
|
let zero = Hash::default();
|
|
|
|
|
|
|
|
// First, verify entries
|
2018-08-09 07:56:04 -07:00
|
|
|
let keypair = Keypair::new();
|
2019-10-19 18:23:27 -07:00
|
|
|
let tx0 = system_transaction::transfer_now(&keypair, &keypair.pubkey(), 0, zero);
|
|
|
|
let tx1 = system_transaction::transfer_now(&keypair, &keypair.pubkey(), 1, zero);
|
2019-02-19 22:18:57 -08:00
|
|
|
let mut e0 = Entry::new(&zero, 0, vec![tx0.clone(), tx1.clone()]);
|
2018-03-09 16:02:17 -08:00
|
|
|
assert!(e0.verify(&zero));
|
|
|
|
|
2018-05-25 14:51:41 -07:00
|
|
|
// Next, swap two transactions and ensure verification fails.
|
2018-05-29 09:12:27 -07:00
|
|
|
e0.transactions[0] = tx1; // <-- attack
|
|
|
|
e0.transactions[1] = tx0;
|
2018-03-09 16:02:17 -08:00
|
|
|
assert!(!e0.verify(&zero));
|
|
|
|
}
|
|
|
|
|
2018-04-02 20:07:38 -07:00
|
|
|
#[test]
|
|
|
|
fn test_witness_reorder_attack() {
|
|
|
|
let zero = Hash::default();
|
|
|
|
|
|
|
|
// First, verify entries
|
2018-08-09 07:56:04 -07:00
|
|
|
let keypair = Keypair::new();
|
2019-03-23 05:14:30 -07:00
|
|
|
let tx0 = create_sample_timestamp(&keypair, zero);
|
2019-04-18 14:45:41 -07:00
|
|
|
let tx1 = create_sample_apply_signature(&keypair, zero);
|
2019-02-19 22:18:57 -08:00
|
|
|
let mut e0 = Entry::new(&zero, 0, vec![tx0.clone(), tx1.clone()]);
|
2018-04-02 20:07:38 -07:00
|
|
|
assert!(e0.verify(&zero));
|
|
|
|
|
2018-05-25 14:51:41 -07:00
|
|
|
// Next, swap two witness transactions and ensure verification fails.
|
2018-05-29 09:12:27 -07:00
|
|
|
e0.transactions[0] = tx1; // <-- attack
|
|
|
|
e0.transactions[1] = tx0;
|
2018-04-02 20:07:38 -07:00
|
|
|
assert!(!e0.verify(&zero));
|
|
|
|
}
|
|
|
|
|
2018-03-06 16:31:17 -08:00
|
|
|
#[test]
|
2018-05-11 08:45:42 -07:00
|
|
|
fn test_next_entry() {
|
2018-03-06 16:36:45 -08:00
|
|
|
let zero = Hash::default();
|
2018-05-11 09:34:46 -07:00
|
|
|
let tick = next_entry(&zero, 1, vec![]);
|
2018-04-16 13:38:31 -07:00
|
|
|
assert_eq!(tick.num_hashes, 1);
|
2019-03-01 08:57:42 -08:00
|
|
|
assert_ne!(tick.hash, zero);
|
2018-06-18 13:23:15 -07:00
|
|
|
|
|
|
|
let tick = next_entry(&zero, 0, vec![]);
|
|
|
|
assert_eq!(tick.num_hashes, 0);
|
2019-03-01 08:57:42 -08:00
|
|
|
assert_eq!(tick.hash, zero);
|
2018-06-20 11:40:41 -07:00
|
|
|
|
2018-08-09 07:56:04 -07:00
|
|
|
let keypair = Keypair::new();
|
2019-03-23 05:14:30 -07:00
|
|
|
let tx0 = create_sample_timestamp(&keypair, zero);
|
2018-06-20 11:40:41 -07:00
|
|
|
let entry0 = next_entry(&zero, 1, vec![tx0.clone()]);
|
|
|
|
assert_eq!(entry0.num_hashes, 1);
|
2019-03-01 08:57:42 -08:00
|
|
|
assert_eq!(entry0.hash, next_hash(&zero, 1, &vec![tx0]));
|
2018-03-06 16:31:17 -08:00
|
|
|
}
|
2018-06-21 16:15:26 -07:00
|
|
|
|
|
|
|
#[test]
|
|
|
|
#[should_panic]
|
|
|
|
fn test_next_entry_panic() {
|
|
|
|
let zero = Hash::default();
|
2018-08-09 07:56:04 -07:00
|
|
|
let keypair = Keypair::new();
|
2019-10-19 18:23:27 -07:00
|
|
|
let tx = system_transaction::transfer_now(&keypair, &keypair.pubkey(), 0, zero);
|
2018-06-21 16:15:26 -07:00
|
|
|
next_entry(&zero, 0, vec![tx]);
|
|
|
|
}
|
2018-10-12 22:27:52 -07:00
|
|
|
|
2019-01-09 14:33:44 -08:00
|
|
|
#[test]
|
|
|
|
fn test_verify_slice() {
|
|
|
|
solana_logger::setup();
|
|
|
|
let zero = Hash::default();
|
|
|
|
let one = hash(&zero.as_ref());
|
|
|
|
assert!(vec![][..].verify(&zero)); // base case
|
2019-02-19 22:18:57 -08:00
|
|
|
assert!(vec![Entry::new_tick(0, &zero)][..].verify(&zero)); // singleton case 1
|
|
|
|
assert!(!vec![Entry::new_tick(0, &zero)][..].verify(&one)); // singleton case 2, bad
|
2019-01-09 14:33:44 -08:00
|
|
|
assert!(vec![next_entry(&zero, 0, vec![]); 2][..].verify(&zero)); // inductive step
|
|
|
|
|
|
|
|
let mut bad_ticks = vec![next_entry(&zero, 0, vec![]); 2];
|
2019-03-01 08:57:42 -08:00
|
|
|
bad_ticks[1].hash = one;
|
2019-01-09 14:33:44 -08:00
|
|
|
assert!(!bad_ticks.verify(&zero)); // inductive step, bad
|
|
|
|
}
|
|
|
|
|
2019-06-08 09:21:43 -07:00
|
|
|
#[test]
|
|
|
|
fn test_verify_slice_with_hashes() {
|
|
|
|
solana_logger::setup();
|
|
|
|
let zero = Hash::default();
|
|
|
|
let one = hash(&zero.as_ref());
|
|
|
|
let two = hash(&one.as_ref());
|
|
|
|
assert!(vec![][..].verify(&one)); // base case
|
|
|
|
assert!(vec![Entry::new_tick(1, &two)][..].verify(&one)); // singleton case 1
|
|
|
|
assert!(!vec![Entry::new_tick(1, &two)][..].verify(&two)); // singleton case 2, bad
|
|
|
|
|
|
|
|
let mut ticks = vec![next_entry(&one, 1, vec![])];
|
|
|
|
ticks.push(next_entry(&ticks.last().unwrap().hash, 1, vec![]));
|
|
|
|
assert!(ticks.verify(&one)); // inductive step
|
|
|
|
|
|
|
|
let mut bad_ticks = vec![next_entry(&one, 1, vec![])];
|
|
|
|
bad_ticks.push(next_entry(&bad_ticks.last().unwrap().hash, 1, vec![]));
|
|
|
|
bad_ticks[1].hash = one;
|
|
|
|
assert!(!bad_ticks.verify(&one)); // inductive step, bad
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn test_verify_slice_with_hashes_and_transactions() {
|
|
|
|
solana_logger::setup();
|
|
|
|
let zero = Hash::default();
|
|
|
|
let one = hash(&zero.as_ref());
|
|
|
|
let two = hash(&one.as_ref());
|
|
|
|
let alice_pubkey = Keypair::default();
|
|
|
|
let tx0 = create_sample_payment(&alice_pubkey, one);
|
|
|
|
let tx1 = create_sample_timestamp(&alice_pubkey, one);
|
|
|
|
assert!(vec![][..].verify(&one)); // base case
|
|
|
|
assert!(vec![next_entry(&one, 1, vec![tx0.clone()])][..].verify(&one)); // singleton case 1
|
|
|
|
assert!(!vec![next_entry(&one, 1, vec![tx0.clone()])][..].verify(&two)); // singleton case 2, bad
|
|
|
|
|
|
|
|
let mut ticks = vec![next_entry(&one, 1, vec![tx0.clone()])];
|
|
|
|
ticks.push(next_entry(
|
|
|
|
&ticks.last().unwrap().hash,
|
|
|
|
1,
|
|
|
|
vec![tx1.clone()],
|
|
|
|
));
|
|
|
|
assert!(ticks.verify(&one)); // inductive step
|
|
|
|
|
|
|
|
let mut bad_ticks = vec![next_entry(&one, 1, vec![tx0])];
|
|
|
|
bad_ticks.push(next_entry(&bad_ticks.last().unwrap().hash, 1, vec![tx1]));
|
|
|
|
bad_ticks[1].hash = one;
|
|
|
|
assert!(!bad_ticks.verify(&one)); // inductive step, bad
|
|
|
|
}
|
2018-03-06 16:31:17 -08:00
|
|
|
}
|