2018-02-18 08:59:15 -08:00
|
|
|
//! The `log` crate provides the foundational data structures for Proof-of-History,
|
|
|
|
//! an ordered log of events in time.
|
2018-02-15 09:13:56 -08:00
|
|
|
|
2018-02-18 08:59:15 -08:00
|
|
|
/// Each log entry contains three pieces of data. The 'num_hashes' field is the number
|
|
|
|
/// of hashes performed since the previous entry. The 'end_hash' field is the result
|
|
|
|
/// of hashing 'end_hash' from the previous entry 'num_hashes' times. The 'event'
|
|
|
|
/// field points to an Event that took place shortly after 'end_hash' was generated.
|
2018-02-15 09:13:56 -08:00
|
|
|
///
|
2018-02-15 09:48:30 -08:00
|
|
|
/// If you divide 'num_hashes' by the amount of time it takes to generate a new hash, you
|
2018-02-15 09:13:56 -08:00
|
|
|
/// get a duration estimate since the last event. Since processing power increases
|
2018-02-15 09:48:30 -08:00
|
|
|
/// over time, one should expect the duration 'num_hashes' represents to decrease proportionally.
|
2018-02-15 09:13:56 -08:00
|
|
|
/// Though processing power varies across nodes, the network gives priority to the
|
|
|
|
/// fastest processor. Duration should therefore be estimated by assuming that the hash
|
|
|
|
/// was generated by the fastest processor at the time the entry was logged.
|
2018-02-19 15:17:13 -08:00
|
|
|
|
|
|
|
use digest::generic_array::GenericArray;
|
|
|
|
use digest::generic_array::typenum::U32;
|
|
|
|
pub type Sha256Hash = GenericArray<u8, U32>;
|
|
|
|
|
2018-02-17 19:51:41 -08:00
|
|
|
#[derive(Debug, PartialEq, Eq, Clone)]
|
2018-02-18 08:53:38 -08:00
|
|
|
pub struct Entry {
|
2018-02-15 09:57:32 -08:00
|
|
|
pub num_hashes: u64,
|
2018-02-19 15:17:13 -08:00
|
|
|
pub end_hash: Sha256Hash,
|
2018-02-18 08:53:38 -08:00
|
|
|
pub event: Event,
|
2018-02-15 09:57:32 -08:00
|
|
|
}
|
|
|
|
|
2018-02-18 08:53:38 -08:00
|
|
|
/// When 'event' is Tick, the event represents a simple clock tick, and exists for the
|
2018-02-15 09:13:56 -08:00
|
|
|
/// sole purpose of improving the performance of event log verification. A tick can
|
2018-02-16 08:56:10 -08:00
|
|
|
/// be generated in 'num_hashes' hashes and verified in 'num_hashes' hashes. By logging
|
|
|
|
/// a hash alongside the tick, each tick and be verified in parallel using the 'end_hash'
|
|
|
|
/// of the preceding tick to seed its hashing.
|
2018-02-17 19:51:41 -08:00
|
|
|
#[derive(Debug, PartialEq, Eq, Clone)]
|
2018-02-18 08:53:38 -08:00
|
|
|
pub enum Event {
|
2018-02-15 09:57:32 -08:00
|
|
|
Tick,
|
2018-02-20 12:07:54 -08:00
|
|
|
UserDataKey(Sha256Hash),
|
2018-02-15 09:13:56 -08:00
|
|
|
}
|
|
|
|
|
2018-02-18 08:53:38 -08:00
|
|
|
impl Entry {
|
|
|
|
/// Creates a Entry from the number of hashes 'num_hashes' since the previous event
|
2018-02-15 09:48:30 -08:00
|
|
|
/// and that resulting 'end_hash'.
|
2018-02-19 15:17:13 -08:00
|
|
|
pub fn new_tick(num_hashes: u64, end_hash: &Sha256Hash) -> Self {
|
2018-02-18 08:53:38 -08:00
|
|
|
Entry {
|
2018-02-15 09:48:30 -08:00
|
|
|
num_hashes,
|
2018-02-19 15:17:13 -08:00
|
|
|
end_hash: *end_hash,
|
2018-02-20 12:07:54 -08:00
|
|
|
event: Event::Tick,
|
2018-02-15 09:48:30 -08:00
|
|
|
}
|
2018-02-15 09:13:56 -08:00
|
|
|
}
|
|
|
|
|
2018-02-15 09:48:30 -08:00
|
|
|
/// Verifies self.end_hash is the result of hashing a 'start_hash' 'self.num_hashes' times.
|
2018-02-20 12:07:54 -08:00
|
|
|
/// If the event is a UserDataKey, then hash that as well.
|
2018-02-19 15:17:13 -08:00
|
|
|
pub fn verify(self: &Self, start_hash: &Sha256Hash) -> bool {
|
2018-02-20 12:07:54 -08:00
|
|
|
self.end_hash == next_hash(start_hash, self.num_hashes, &self.event)
|
2018-02-15 10:45:04 -08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-02-19 15:17:13 -08:00
|
|
|
pub fn hash(val: &[u8]) -> Sha256Hash {
|
|
|
|
use sha2::{Digest, Sha256};
|
|
|
|
let mut hasher = Sha256::default();
|
|
|
|
hasher.input(val);
|
|
|
|
hasher.result()
|
2018-02-19 11:09:56 -08:00
|
|
|
}
|
|
|
|
|
2018-02-20 12:07:54 -08:00
|
|
|
/// Return the hash of the given hash extended with the given value.
|
|
|
|
pub fn extend_and_hash(end_hash: &Sha256Hash, val: &[u8]) -> Sha256Hash {
|
|
|
|
let mut hash_data = end_hash.to_vec();
|
|
|
|
hash_data.extend_from_slice(val);
|
|
|
|
hash(&hash_data)
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn next_hash(start_hash: &Sha256Hash, num_hashes: u64, event: &Event) -> Sha256Hash {
|
2018-02-19 15:17:13 -08:00
|
|
|
let mut end_hash = *start_hash;
|
2018-02-15 10:45:04 -08:00
|
|
|
for _ in 0..num_hashes {
|
2018-02-19 15:17:13 -08:00
|
|
|
end_hash = hash(&end_hash);
|
2018-02-15 09:13:56 -08:00
|
|
|
}
|
2018-02-20 12:07:54 -08:00
|
|
|
if let Event::UserDataKey(key) = *event {
|
|
|
|
return extend_and_hash(&end_hash, &key);
|
|
|
|
}
|
|
|
|
end_hash
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Creates the next Tick Entry 'num_hashes' after 'start_hash'.
|
|
|
|
pub fn next_entry(start_hash: &Sha256Hash, num_hashes: u64, event: Event) -> Entry {
|
|
|
|
Entry {
|
|
|
|
num_hashes,
|
|
|
|
end_hash: next_hash(start_hash, num_hashes, &event),
|
|
|
|
event,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Creates the next Tick Entry 'num_hashes' after 'start_hash'.
|
|
|
|
pub fn next_tick(start_hash: &Sha256Hash, num_hashes: u64) -> Entry {
|
|
|
|
next_entry(start_hash, num_hashes, Event::Tick)
|
2018-02-15 09:13:56 -08:00
|
|
|
}
|
|
|
|
|
|
|
|
/// Verifies the hashes and counts of a slice of events are all consistent.
|
2018-02-19 15:17:13 -08:00
|
|
|
pub fn verify_slice(events: &[Entry], start_hash: &Sha256Hash) -> bool {
|
2018-02-15 09:13:56 -08:00
|
|
|
use rayon::prelude::*;
|
2018-02-19 15:17:13 -08:00
|
|
|
let genesis = [Entry::new_tick(Default::default(), start_hash)];
|
2018-02-15 09:13:56 -08:00
|
|
|
let event_pairs = genesis.par_iter().chain(events).zip(events);
|
2018-02-19 15:17:13 -08:00
|
|
|
event_pairs.all(|(x0, x1)| x1.verify(&x0.end_hash))
|
2018-02-15 09:13:56 -08:00
|
|
|
}
|
|
|
|
|
|
|
|
/// Verifies the hashes and events serially. Exists only for reference.
|
2018-02-19 15:17:13 -08:00
|
|
|
pub fn verify_slice_seq(events: &[Entry], start_hash: &Sha256Hash) -> bool {
|
2018-02-18 08:53:38 -08:00
|
|
|
let genesis = [Entry::new_tick(0, start_hash)];
|
2018-02-15 15:00:05 -08:00
|
|
|
let mut event_pairs = genesis.iter().chain(events).zip(events);
|
2018-02-19 15:17:13 -08:00
|
|
|
event_pairs.all(|(x0, x1)| x1.verify(&x0.end_hash))
|
2018-02-15 09:13:56 -08:00
|
|
|
}
|
|
|
|
|
2018-02-15 10:50:48 -08:00
|
|
|
/// Create a vector of Ticks of length 'len' from 'start_hash' hash and 'num_hashes'.
|
2018-02-19 15:17:13 -08:00
|
|
|
pub fn create_ticks(start_hash: &Sha256Hash, num_hashes: u64, len: usize) -> Vec<Entry> {
|
2018-02-20 12:07:54 -08:00
|
|
|
use std::iter;
|
|
|
|
let mut end_hash = *start_hash;
|
|
|
|
iter::repeat(Event::Tick)
|
|
|
|
.take(len)
|
|
|
|
.map(|event| {
|
|
|
|
let entry = next_entry(&end_hash, num_hashes, event);
|
|
|
|
end_hash = entry.end_hash;
|
|
|
|
entry
|
|
|
|
})
|
|
|
|
.collect()
|
2018-02-15 09:13:56 -08:00
|
|
|
}
|
|
|
|
|
2018-02-15 16:47:05 -08:00
|
|
|
#[cfg(test)]
|
|
|
|
mod tests {
|
|
|
|
use super::*;
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn test_event_verify() {
|
2018-02-19 15:17:13 -08:00
|
|
|
let zero = Sha256Hash::default();
|
|
|
|
let one = hash(&zero);
|
|
|
|
assert!(Entry::new_tick(0, &zero).verify(&zero)); // base case
|
|
|
|
assert!(!Entry::new_tick(0, &zero).verify(&one)); // base case, bad
|
|
|
|
assert!(next_tick(&zero, 1).verify(&zero)); // inductive step
|
|
|
|
assert!(!next_tick(&zero, 1).verify(&one)); // inductive step, bad
|
2018-02-15 16:47:05 -08:00
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn test_next_tick() {
|
2018-02-19 15:17:13 -08:00
|
|
|
let zero = Sha256Hash::default();
|
|
|
|
assert_eq!(next_tick(&zero, 1).num_hashes, 1)
|
2018-02-15 16:47:05 -08:00
|
|
|
}
|
|
|
|
|
2018-02-19 15:17:13 -08:00
|
|
|
fn verify_slice_generic(verify_slice: fn(&[Entry], &Sha256Hash) -> bool) {
|
|
|
|
let zero = Sha256Hash::default();
|
|
|
|
let one = hash(&zero);
|
|
|
|
assert!(verify_slice(&vec![], &zero)); // base case
|
|
|
|
assert!(verify_slice(&vec![Entry::new_tick(0, &zero)], &zero)); // singleton case 1
|
|
|
|
assert!(!verify_slice(&vec![Entry::new_tick(0, &zero)], &one)); // singleton case 2, bad
|
|
|
|
assert!(verify_slice(&create_ticks(&zero, 0, 2), &zero)); // inductive step
|
|
|
|
|
|
|
|
let mut bad_ticks = create_ticks(&zero, 0, 2);
|
|
|
|
bad_ticks[1].end_hash = one;
|
|
|
|
assert!(!verify_slice(&bad_ticks, &zero)); // inductive step, bad
|
2018-02-16 08:14:42 -08:00
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn test_verify_slice() {
|
|
|
|
verify_slice_generic(verify_slice);
|
2018-02-15 16:47:05 -08:00
|
|
|
}
|
2018-02-16 08:14:42 -08:00
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn test_verify_slice_seq() {
|
|
|
|
verify_slice_generic(verify_slice_seq);
|
|
|
|
}
|
|
|
|
|
2018-02-15 16:47:05 -08:00
|
|
|
}
|
|
|
|
|
2018-02-15 09:13:56 -08:00
|
|
|
#[cfg(all(feature = "unstable", test))]
|
|
|
|
mod bench {
|
|
|
|
extern crate test;
|
|
|
|
use self::test::Bencher;
|
2018-02-18 08:59:15 -08:00
|
|
|
use log::*;
|
2018-02-15 09:13:56 -08:00
|
|
|
|
|
|
|
#[bench]
|
|
|
|
fn event_bench(bencher: &mut Bencher) {
|
2018-02-19 15:17:13 -08:00
|
|
|
let start_hash = Default::default();
|
2018-02-19 15:51:32 -08:00
|
|
|
let events = create_ticks(&start_hash, 10_000, 8);
|
2018-02-16 09:38:12 -08:00
|
|
|
bencher.iter(|| {
|
2018-02-19 15:17:13 -08:00
|
|
|
assert!(verify_slice(&events, &start_hash));
|
2018-02-16 09:38:12 -08:00
|
|
|
});
|
|
|
|
}
|
|
|
|
|
|
|
|
#[bench]
|
|
|
|
fn event_bench_seq(bencher: &mut Bencher) {
|
2018-02-19 15:17:13 -08:00
|
|
|
let start_hash = Default::default();
|
2018-02-19 15:51:32 -08:00
|
|
|
let events = create_ticks(&start_hash, 10_000, 8);
|
2018-02-15 09:13:56 -08:00
|
|
|
bencher.iter(|| {
|
2018-02-19 15:17:13 -08:00
|
|
|
assert!(verify_slice_seq(&events, &start_hash));
|
2018-02-15 09:13:56 -08:00
|
|
|
});
|
|
|
|
}
|
|
|
|
}
|