2018-03-30 10:43:38 -07:00
|
|
|
//! The `entry` module is a fundamental building block of Proof of History. It contains a
|
|
|
|
//! unique ID that is the hash of the Entry before it, plus the hash of the
|
|
|
|
//! transactions within it. Entries cannot be reordered, and its field `num_hashes`
|
2018-03-29 11:20:54 -07:00
|
|
|
//! represents an approximate amount of time since the last Entry was created.
|
2018-03-06 16:31:17 -08:00
|
|
|
use event::Event;
|
2018-03-26 21:03:26 -07:00
|
|
|
use hash::{extend_and_hash, hash, Hash};
|
2018-03-28 21:02:47 -07:00
|
|
|
use rayon::prelude::*;
|
2018-03-06 16:31:17 -08:00
|
|
|
|
2018-03-29 11:20:54 -07:00
|
|
|
/// Each Entry contains three pieces of data. The `num_hashes` field is the number
|
|
|
|
/// of hashes performed since the previous entry. The `id` field is the result
|
|
|
|
/// of hashing `id` from the previous entry `num_hashes` times. The `events`
|
|
|
|
/// field points to Events that took place shortly after `id` was generated.
|
|
|
|
///
|
|
|
|
/// If you divide `num_hashes` by the amount of time it takes to generate a new hash, you
|
|
|
|
/// get a duration estimate since the last Entry. Since processing power increases
|
|
|
|
/// over time, one should expect the duration `num_hashes` represents to decrease proportionally.
|
|
|
|
/// Though processing power varies across nodes, the network gives priority to the
|
|
|
|
/// fastest processor. Duration should therefore be estimated by assuming that the hash
|
|
|
|
/// was generated by the fastest processor at the time the entry was recorded.
|
2018-03-06 16:31:17 -08:00
|
|
|
#[derive(Serialize, Deserialize, Debug, PartialEq, Eq, Clone)]
|
2018-03-06 19:22:30 -08:00
|
|
|
pub struct Entry {
|
2018-03-06 16:31:17 -08:00
|
|
|
pub num_hashes: u64,
|
2018-03-06 16:36:45 -08:00
|
|
|
pub id: Hash,
|
2018-03-09 15:16:29 -08:00
|
|
|
pub events: Vec<Event>,
|
2018-03-06 16:31:17 -08:00
|
|
|
}
|
|
|
|
|
2018-03-06 19:22:30 -08:00
|
|
|
impl Entry {
|
2018-05-16 16:49:58 -07:00
|
|
|
/// Creates the next Entry `num_hashes` after `start_hash`.
|
|
|
|
pub fn new(start_hash: &Hash, cur_hashes: u64, events: Vec<Event>) -> Self {
|
|
|
|
let num_hashes = cur_hashes + if events.is_empty() { 0 } else { 1 };
|
|
|
|
let id = next_hash(start_hash, 0, &events);
|
|
|
|
Entry {
|
|
|
|
num_hashes,
|
|
|
|
id,
|
|
|
|
events,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Creates the next Tick Entry `num_hashes` after `start_hash`.
|
|
|
|
pub fn new_mut(start_hash: &mut Hash, cur_hashes: &mut u64, events: Vec<Event>) -> Self {
|
|
|
|
let entry = Self::new(start_hash, *cur_hashes, events);
|
|
|
|
*start_hash = entry.id;
|
|
|
|
*cur_hashes = 0;
|
|
|
|
entry
|
|
|
|
}
|
|
|
|
|
2018-03-22 13:40:28 -07:00
|
|
|
/// Creates a Entry from the number of hashes `num_hashes` since the previous event
|
|
|
|
/// and that resulting `id`.
|
2018-03-06 16:36:45 -08:00
|
|
|
pub fn new_tick(num_hashes: u64, id: &Hash) -> Self {
|
2018-03-06 16:31:17 -08:00
|
|
|
Entry {
|
|
|
|
num_hashes,
|
|
|
|
id: *id,
|
2018-03-09 15:16:29 -08:00
|
|
|
events: vec![],
|
2018-03-06 16:31:17 -08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-03-22 13:40:28 -07:00
|
|
|
/// Verifies self.id is the result of hashing a `start_hash` `self.num_hashes` times.
|
2018-03-06 16:31:17 -08:00
|
|
|
/// If the event is not a Tick, then hash that as well.
|
2018-03-06 16:36:45 -08:00
|
|
|
pub fn verify(&self, start_hash: &Hash) -> bool {
|
2018-03-28 21:02:47 -07:00
|
|
|
self.events.par_iter().all(|event| event.verify())
|
|
|
|
&& self.id == next_hash(start_hash, self.num_hashes, &self.events)
|
2018-03-06 16:31:17 -08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-04-02 19:47:48 -07:00
|
|
|
fn add_event_data(hash_data: &mut Vec<u8>, event: &Event) {
|
|
|
|
match *event {
|
|
|
|
Event::Transaction(ref tr) => {
|
|
|
|
hash_data.push(0u8);
|
|
|
|
hash_data.extend_from_slice(&tr.sig);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-03-22 13:40:28 -07:00
|
|
|
/// Creates the hash `num_hashes` after `start_hash`. If the event contains
|
2018-04-16 13:38:31 -07:00
|
|
|
/// a signature, the final hash will be a hash of both the previous ID and
|
2018-03-06 16:31:17 -08:00
|
|
|
/// the signature.
|
2018-03-09 15:16:29 -08:00
|
|
|
pub fn next_hash(start_hash: &Hash, num_hashes: u64, events: &[Event]) -> Hash {
|
2018-03-06 16:31:17 -08:00
|
|
|
let mut id = *start_hash;
|
2018-03-09 15:16:29 -08:00
|
|
|
for _ in 1..num_hashes {
|
2018-03-06 16:31:17 -08:00
|
|
|
id = hash(&id);
|
|
|
|
}
|
2018-03-09 15:16:29 -08:00
|
|
|
|
|
|
|
// Hash all the event data
|
|
|
|
let mut hash_data = vec![];
|
|
|
|
for event in events {
|
2018-04-02 19:47:48 -07:00
|
|
|
add_event_data(&mut hash_data, event);
|
2018-03-06 16:31:17 -08:00
|
|
|
}
|
2018-03-09 15:16:29 -08:00
|
|
|
|
|
|
|
if !hash_data.is_empty() {
|
2018-04-16 13:38:31 -07:00
|
|
|
extend_and_hash(&id, &hash_data)
|
|
|
|
} else if num_hashes != 0 {
|
|
|
|
hash(&id)
|
|
|
|
} else {
|
|
|
|
id
|
2018-03-09 15:16:29 -08:00
|
|
|
}
|
2018-03-06 16:31:17 -08:00
|
|
|
}
|
|
|
|
|
2018-05-11 08:45:42 -07:00
|
|
|
/// Creates the next Tick or Event Entry `num_hashes` after `start_hash`.
|
|
|
|
pub fn next_entry(start_hash: &Hash, num_hashes: u64, events: Vec<Event>) -> Entry {
|
2018-03-06 16:31:17 -08:00
|
|
|
Entry {
|
|
|
|
num_hashes,
|
2018-05-11 08:45:42 -07:00
|
|
|
id: next_hash(start_hash, num_hashes, &events),
|
|
|
|
events: events,
|
2018-03-06 16:31:17 -08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
#[cfg(test)]
|
|
|
|
mod tests {
|
|
|
|
use super::*;
|
2018-04-02 20:07:38 -07:00
|
|
|
use chrono::prelude::*;
|
2018-05-16 16:49:58 -07:00
|
|
|
use entry::Entry;
|
2018-03-26 21:03:26 -07:00
|
|
|
use event::Event;
|
2018-03-09 16:02:17 -08:00
|
|
|
use hash::hash;
|
|
|
|
use signature::{KeyPair, KeyPairUtil};
|
2018-05-23 22:53:10 -07:00
|
|
|
use transaction::Transaction;
|
2018-03-06 16:31:17 -08:00
|
|
|
|
|
|
|
#[test]
|
2018-03-09 16:02:17 -08:00
|
|
|
fn test_entry_verify() {
|
2018-03-06 16:36:45 -08:00
|
|
|
let zero = Hash::default();
|
2018-03-06 16:31:17 -08:00
|
|
|
let one = hash(&zero);
|
2018-03-06 19:22:30 -08:00
|
|
|
assert!(Entry::new_tick(0, &zero).verify(&zero)); // base case
|
|
|
|
assert!(!Entry::new_tick(0, &zero).verify(&one)); // base case, bad
|
2018-05-11 08:45:42 -07:00
|
|
|
assert!(next_entry(&zero, 1, vec![]).verify(&zero)); // inductive step
|
|
|
|
assert!(!next_entry(&zero, 1, vec![]).verify(&one)); // inductive step, bad
|
2018-03-06 16:31:17 -08:00
|
|
|
}
|
|
|
|
|
2018-03-09 16:02:17 -08:00
|
|
|
#[test]
|
|
|
|
fn test_event_reorder_attack() {
|
|
|
|
let zero = Hash::default();
|
|
|
|
|
|
|
|
// First, verify entries
|
|
|
|
let keypair = KeyPair::new();
|
2018-05-16 16:35:21 -07:00
|
|
|
let tr0 = Event::new_transaction(&keypair, keypair.pubkey(), 0, zero);
|
|
|
|
let tr1 = Event::new_transaction(&keypair, keypair.pubkey(), 1, zero);
|
2018-05-16 16:49:58 -07:00
|
|
|
let mut e0 = Entry::new(&zero, 0, vec![tr0.clone(), tr1.clone()]);
|
2018-03-09 16:02:17 -08:00
|
|
|
assert!(e0.verify(&zero));
|
|
|
|
|
|
|
|
// Next, swap two events and ensure verification fails.
|
|
|
|
e0.events[0] = tr1; // <-- attack
|
|
|
|
e0.events[1] = tr0;
|
|
|
|
assert!(!e0.verify(&zero));
|
|
|
|
}
|
|
|
|
|
2018-04-02 20:07:38 -07:00
|
|
|
#[test]
|
|
|
|
fn test_witness_reorder_attack() {
|
|
|
|
let zero = Hash::default();
|
|
|
|
|
|
|
|
// First, verify entries
|
|
|
|
let keypair = KeyPair::new();
|
2018-05-23 22:53:10 -07:00
|
|
|
let tr0 = Event::Transaction(Transaction::new_timestamp(&keypair, Utc::now(), zero));
|
|
|
|
let tr1 = Event::Transaction(Transaction::new_signature(
|
|
|
|
&keypair,
|
|
|
|
Default::default(),
|
|
|
|
zero,
|
|
|
|
));
|
2018-05-16 16:49:58 -07:00
|
|
|
let mut e0 = Entry::new(&zero, 0, vec![tr0.clone(), tr1.clone()]);
|
2018-04-02 20:07:38 -07:00
|
|
|
assert!(e0.verify(&zero));
|
|
|
|
|
|
|
|
// Next, swap two witness events and ensure verification fails.
|
|
|
|
e0.events[0] = tr1; // <-- attack
|
|
|
|
e0.events[1] = tr0;
|
|
|
|
assert!(!e0.verify(&zero));
|
|
|
|
}
|
|
|
|
|
2018-03-06 16:31:17 -08:00
|
|
|
#[test]
|
2018-05-11 08:45:42 -07:00
|
|
|
fn test_next_entry() {
|
2018-03-06 16:36:45 -08:00
|
|
|
let zero = Hash::default();
|
2018-05-11 09:34:46 -07:00
|
|
|
let tick = next_entry(&zero, 1, vec![]);
|
2018-04-16 13:38:31 -07:00
|
|
|
assert_eq!(tick.num_hashes, 1);
|
|
|
|
assert_ne!(tick.id, zero);
|
2018-03-06 16:31:17 -08:00
|
|
|
}
|
|
|
|
}
|