Merge pull request #53 from garious/monorphic-entry

Monomorphisize Entry and Event
This commit is contained in:
Greg Fitzgerald 2018-03-06 20:39:11 -07:00 committed by GitHub
commit a4c081d3a1
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
10 changed files with 52 additions and 96 deletions

View File

@ -25,7 +25,7 @@ pub enum AccountingError {
pub type Result<T> = result::Result<T, AccountingError>;
pub struct Accountant {
pub historian: Historian<i64>,
pub historian: Historian,
pub balances: HashMap<PublicKey, i64>,
pub first_id: Hash,
pub last_id: Hash,
@ -34,7 +34,7 @@ pub struct Accountant {
impl Accountant {
pub fn new_from_entries<I>(entries: I, ms_per_tick: Option<u64>) -> Self
where
I: IntoIterator<Item = Entry<i64>>,
I: IntoIterator<Item = Entry>,
{
let mut entries = entries.into_iter();
@ -43,7 +43,7 @@ impl Accountant {
let entry0 = entries.next().unwrap();
let start_hash = entry0.id;
let hist = Historian::<i64>::new(&start_hash, ms_per_tick);
let hist = Historian::new(&start_hash, ms_per_tick);
let mut acc = Accountant {
historian: hist,
balances: HashMap::new(),
@ -121,11 +121,7 @@ impl Accountant {
Ok(())
}
fn process_verified_event(
self: &mut Self,
event: &Event<i64>,
allow_deposits: bool,
) -> Result<()> {
fn process_verified_event(self: &mut Self, event: &Event, allow_deposits: bool) -> Result<()> {
match *event {
Event::Tick => Ok(()),
Event::Transaction(ref tr) => self.process_verified_transaction(tr, allow_deposits),

View File

@ -22,7 +22,7 @@ pub enum Request {
#[derive(Serialize, Deserialize, Debug)]
pub enum Response {
Balance { key: PublicKey, val: Option<i64> },
Entries { entries: Vec<Entry<i64>> },
Entries { entries: Vec<Entry> },
Id { id: Hash, is_last: bool },
}

View File

@ -11,11 +11,10 @@ use std::thread::sleep;
use std::time::Duration;
use std::sync::mpsc::SendError;
fn create_log(hist: &Historian<Hash>, seed: &Hash) -> Result<(), SendError<Event<Hash>>> {
fn create_log(hist: &Historian, seed: &Hash) -> Result<(), SendError<Event>> {
sleep(Duration::from_millis(15));
let asset = Hash::default();
let keypair = generate_keypair();
let tr = Transaction::new(&keypair, get_pubkey(&keypair), asset, *seed);
let tr = Transaction::new(&keypair, get_pubkey(&keypair), 42, *seed);
let event0 = Event::Transaction(tr);
hist.sender.send(event0)?;
sleep(Duration::from_millis(10));
@ -27,7 +26,7 @@ fn main() {
let hist = Historian::new(&seed, Some(10));
create_log(&hist, &seed).expect("send error");
drop(hist.sender);
let entries: Vec<Entry<Hash>> = hist.receiver.iter().collect();
let entries: Vec<Entry> = hist.receiver.iter().collect();
for entry in &entries {
println!("{:?}", entry);
}

View File

@ -5,13 +5,13 @@ extern crate serde_json;
extern crate silk;
use silk::genesis::Genesis;
use silk::log::verify_slice_i64;
use silk::log::verify_slice;
use std::io::stdin;
fn main() {
let gen: Genesis = serde_json::from_reader(stdin()).unwrap();
let entries = gen.create_entries();
verify_slice_i64(&entries, &entries[0].id);
verify_slice(&entries, &entries[0].id);
for x in entries {
println!("{}", serde_json::to_string(&x).unwrap());
}

View File

@ -1,15 +1,14 @@
use hash::{extend_and_hash, hash, Hash};
use serde::Serialize;
use event::Event;
#[derive(Serialize, Deserialize, Debug, PartialEq, Eq, Clone)]
pub struct Entry<T> {
pub struct Entry {
pub num_hashes: u64,
pub id: Hash,
pub event: Event<T>,
pub event: Event,
}
impl<T: Serialize> Entry<T> {
impl Entry {
/// Creates a Entry from the number of hashes 'num_hashes' since the previous event
/// and that resulting 'id'.
pub fn new_tick(num_hashes: u64, id: &Hash) -> Self {
@ -33,7 +32,7 @@ impl<T: Serialize> Entry<T> {
/// Creates the hash 'num_hashes' after start_hash. If the event contains
/// signature, the final hash will be a hash of both the previous ID and
/// the signature.
pub fn next_hash<T: Serialize>(start_hash: &Hash, num_hashes: u64, event: &Event<T>) -> Hash {
pub fn next_hash(start_hash: &Hash, num_hashes: u64, event: &Event) -> Hash {
let mut id = *start_hash;
let sig = event.get_signature();
let start_index = if sig.is_some() { 1 } else { 0 };
@ -47,7 +46,7 @@ pub fn next_hash<T: Serialize>(start_hash: &Hash, num_hashes: u64, event: &Event
}
/// Creates the next Entry 'num_hashes' after 'start_hash'.
pub fn create_entry<T: Serialize>(start_hash: &Hash, cur_hashes: u64, event: Event<T>) -> Entry<T> {
pub fn create_entry(start_hash: &Hash, cur_hashes: u64, event: Event) -> Entry {
let sig = event.get_signature();
let num_hashes = cur_hashes + if sig.is_some() { 1 } else { 0 };
let id = next_hash(start_hash, 0, &event);
@ -59,11 +58,7 @@ pub fn create_entry<T: Serialize>(start_hash: &Hash, cur_hashes: u64, event: Eve
}
/// Creates the next Tick Entry 'num_hashes' after 'start_hash'.
pub fn create_entry_mut<T: Serialize>(
start_hash: &mut Hash,
cur_hashes: &mut u64,
event: Event<T>,
) -> Entry<T> {
pub fn create_entry_mut(start_hash: &mut Hash, cur_hashes: &mut u64, event: Event) -> Entry {
let entry = create_entry(start_hash, *cur_hashes, event);
*start_hash = entry.id;
*cur_hashes = 0;
@ -71,7 +66,7 @@ pub fn create_entry_mut<T: Serialize>(
}
/// Creates the next Tick Entry 'num_hashes' after 'start_hash'.
pub fn next_tick<T: Serialize>(start_hash: &Hash, num_hashes: u64) -> Entry<T> {
pub fn next_tick(start_hash: &Hash, num_hashes: u64) -> Entry {
let event = Event::Tick;
Entry {
num_hashes,
@ -88,15 +83,15 @@ mod tests {
fn test_event_verify() {
let zero = Hash::default();
let one = hash(&zero);
assert!(Entry::<u8>::new_tick(0, &zero).verify(&zero)); // base case
assert!(!Entry::<u8>::new_tick(0, &zero).verify(&one)); // base case, bad
assert!(next_tick::<u8>(&zero, 1).verify(&zero)); // inductive step
assert!(!next_tick::<u8>(&zero, 1).verify(&one)); // inductive step, bad
assert!(Entry::new_tick(0, &zero).verify(&zero)); // base case
assert!(!Entry::new_tick(0, &zero).verify(&one)); // base case, bad
assert!(next_tick(&zero, 1).verify(&zero)); // inductive step
assert!(!next_tick(&zero, 1).verify(&one)); // inductive step, bad
}
#[test]
fn test_next_tick() {
let zero = Hash::default();
assert_eq!(next_tick::<Hash>(&zero, 1).num_hashes, 1)
assert_eq!(next_tick(&zero, 1).num_hashes, 1)
}
}

View File

@ -2,7 +2,6 @@
use signature::Signature;
use transaction::Transaction;
use serde::Serialize;
/// When 'event' is Tick, the event represents a simple clock tick, and exists for the
/// sole purpose of improving the performance of event log verification. A tick can
@ -10,12 +9,12 @@ use serde::Serialize;
/// a hash alongside the tick, each tick and be verified in parallel using the 'id'
/// of the preceding tick to seed its hashing.
#[derive(Serialize, Deserialize, Debug, PartialEq, Eq, Clone)]
pub enum Event<T> {
pub enum Event {
Tick,
Transaction(Transaction<T>),
Transaction(Transaction<i64>),
}
impl<T: Serialize> Event<T> {
impl Event {
pub fn get_signature(&self) -> Option<Signature> {
match *self {
Event::Tick => None,

View File

@ -56,12 +56,12 @@ impl Genesis {
get_pubkey(&self.get_keypair())
}
pub fn create_transaction(&self, asset: i64, to: &PublicKey) -> Event<i64> {
pub fn create_transaction(&self, asset: i64, to: &PublicKey) -> Event {
let tr = Transaction::new(&self.get_keypair(), *to, asset, self.get_seed());
Event::Transaction(tr)
}
pub fn create_events(&self) -> Vec<Event<i64>> {
pub fn create_events(&self) -> Vec<Event> {
let pubkey = self.get_pubkey();
let event0 = Event::Tick;
let event1 = self.create_transaction(self.tokens, &pubkey);
@ -75,7 +75,7 @@ impl Genesis {
events
}
pub fn create_entries(&self) -> Vec<Entry<i64>> {
pub fn create_entries(&self) -> Vec<Entry> {
create_entries(&self.get_seed(), self.create_events())
}
}
@ -83,7 +83,7 @@ impl Genesis {
#[cfg(test)]
mod tests {
use super::*;
use log::verify_slice_i64;
use log::verify_slice;
#[test]
fn test_create_events() {
@ -110,12 +110,12 @@ mod tests {
#[test]
fn test_verify_entries() {
let entries = Genesis::new(100, vec![]).create_entries();
assert!(verify_slice_i64(&entries, &entries[0].id));
assert!(verify_slice(&entries, &entries[0].id));
}
#[test]
fn test_verify_entries_with_transactions() {
let entries = Genesis::new(100, vec![Creator::new(42)]).create_entries();
assert!(verify_slice_i64(&entries, &entries[0].id));
assert!(verify_slice(&entries, &entries[0].id));
}
}

View File

@ -10,17 +10,15 @@ use entry::Entry;
use logger::{ExitReason, Logger};
use signature::Signature;
use event::Event;
use serde::Serialize;
use std::fmt::Debug;
pub struct Historian<T> {
pub sender: SyncSender<Event<T>>,
pub receiver: Receiver<Entry<T>>,
pub struct Historian {
pub sender: SyncSender<Event>,
pub receiver: Receiver<Entry>,
pub thread_hdl: JoinHandle<ExitReason>,
pub signatures: HashSet<Signature>,
}
impl<T: 'static + Serialize + Clone + Debug + Send> Historian<T> {
impl Historian {
pub fn new(start_hash: &Hash, ms_per_tick: Option<u64>) -> Self {
let (sender, event_receiver) = sync_channel(1000);
let (entry_sender, receiver) = sync_channel(1000);
@ -40,8 +38,8 @@ impl<T: 'static + Serialize + Clone + Debug + Send> Historian<T> {
fn create_logger(
start_hash: Hash,
ms_per_tick: Option<u64>,
receiver: Receiver<Event<T>>,
sender: SyncSender<Entry<T>>,
receiver: Receiver<Event>,
sender: SyncSender<Entry>,
) -> JoinHandle<ExitReason> {
spawn(move || {
let mut logger = Logger::new(receiver, sender, start_hash);
@ -100,7 +98,7 @@ mod tests {
#[test]
fn test_historian_closed_sender() {
let zero = Hash::default();
let hist = Historian::<u8>::new(&zero, None);
let hist = Historian::new(&zero, None);
drop(hist.receiver);
hist.sender.send(Event::Tick).unwrap();
assert_eq!(
@ -124,7 +122,7 @@ mod tests {
sleep(Duration::from_millis(30));
hist.sender.send(Event::Tick).unwrap();
drop(hist.sender);
let entries: Vec<Entry<Hash>> = hist.receiver.iter().collect();
let entries: Vec<Entry> = hist.receiver.iter().collect();
// Ensure one entry is sent back for each tick sent in.
assert_eq!(entries.len(), 1);

View File

@ -14,33 +14,18 @@
/// was generated by the fastest processor at the time the entry was logged.
use hash::Hash;
use serde::Serialize;
use entry::{create_entry_mut, next_tick, Entry};
use event::Event;
use rayon::prelude::*;
/// Verifies the hashes and counts of a slice of events are all consistent.
pub fn verify_slice(events: &[Entry<Hash>], start_hash: &Hash) -> bool {
pub fn verify_slice(events: &[Entry], start_hash: &Hash) -> bool {
let genesis = [Entry::new_tick(Default::default(), start_hash)];
let event_pairs = genesis.par_iter().chain(events).zip(events);
event_pairs.all(|(x0, x1)| x1.verify(&x0.id))
}
/// Verifies the hashes and counts of a slice of events are all consistent.
pub fn verify_slice_i64(events: &[Entry<i64>], start_hash: &Hash) -> bool {
let genesis = [Entry::new_tick(Default::default(), start_hash)];
let event_pairs = genesis.par_iter().chain(events).zip(events);
event_pairs.all(|(x0, x1)| x1.verify(&x0.id))
}
/// Verifies the hashes and events serially. Exists only for reference.
pub fn verify_slice_seq<T: Serialize>(events: &[Entry<T>], start_hash: &Hash) -> bool {
let genesis = [Entry::new_tick(0, start_hash)];
let mut event_pairs = genesis.iter().chain(events).zip(events);
event_pairs.all(|(x0, x1)| x1.verify(&x0.id))
}
pub fn create_entries<T: Serialize>(start_hash: &Hash, events: Vec<Event<T>>) -> Vec<Entry<T>> {
pub fn create_entries(start_hash: &Hash, events: Vec<Event>) -> Vec<Entry> {
let mut id = *start_hash;
events
.into_iter()
@ -49,7 +34,7 @@ pub fn create_entries<T: Serialize>(start_hash: &Hash, events: Vec<Event<T>>) ->
}
/// Create a vector of Ticks of length 'len' from 'start_hash' hash and 'num_hashes'.
pub fn next_ticks(start_hash: &Hash, num_hashes: u64, len: usize) -> Vec<Entry<Hash>> {
pub fn next_ticks(start_hash: &Hash, num_hashes: u64, len: usize) -> Vec<Entry> {
let mut id = *start_hash;
let mut ticks = vec![];
for _ in 0..len {
@ -67,7 +52,8 @@ mod tests {
use transaction::Transaction;
use hash::hash;
fn verify_slice_generic(verify_slice: fn(&[Entry<Hash>], &Hash) -> bool) {
#[test]
fn test_verify_slice() {
let zero = Hash::default();
let one = hash(&zero);
assert!(verify_slice(&vec![], &zero)); // base case
@ -80,25 +66,14 @@ mod tests {
assert!(!verify_slice(&bad_ticks, &zero)); // inductive step, bad
}
#[test]
fn test_verify_slice() {
verify_slice_generic(verify_slice);
}
#[test]
fn test_verify_slice_seq() {
verify_slice_generic(verify_slice_seq::<Hash>);
}
#[test]
fn test_reorder_attack() {
let zero = Hash::default();
let one = hash(&zero);
// First, verify entries
let keypair = generate_keypair();
let tr0 = Transaction::new(&keypair, get_pubkey(&keypair), zero, zero);
let tr1 = Transaction::new(&keypair, get_pubkey(&keypair), one, zero);
let tr0 = Transaction::new(&keypair, get_pubkey(&keypair), 0, zero);
let tr1 = Transaction::new(&keypair, get_pubkey(&keypair), 1, zero);
let events = vec![Event::Transaction(tr0), Event::Transaction(tr1)];
let mut entries = create_entries(&zero, events);
assert!(verify_slice(&entries, &zero));

View File

@ -10,8 +10,6 @@ use std::time::{Duration, Instant};
use hash::Hash;
use entry::{create_entry_mut, Entry};
use event::Event;
use serde::Serialize;
use std::fmt::Debug;
use serde_json;
#[derive(Debug, PartialEq, Eq)]
@ -20,20 +18,16 @@ pub enum ExitReason {
SendDisconnected,
}
pub struct Logger<T> {
pub sender: SyncSender<Entry<T>>,
pub receiver: Receiver<Event<T>>,
pub struct Logger {
pub sender: SyncSender<Entry>,
pub receiver: Receiver<Event>,
pub last_id: Hash,
pub num_hashes: u64,
pub num_ticks: u64,
}
impl<T: Serialize + Clone + Debug> Logger<T> {
pub fn new(
receiver: Receiver<Event<T>>,
sender: SyncSender<Entry<T>>,
start_hash: Hash,
) -> Self {
impl Logger {
pub fn new(receiver: Receiver<Event>, sender: SyncSender<Entry>, start_hash: Hash) -> Self {
Logger {
receiver,
sender,
@ -43,7 +37,7 @@ impl<T: Serialize + Clone + Debug> Logger<T> {
}
}
pub fn log_event(&mut self, event: Event<T>) -> Result<Entry<T>, ExitReason> {
pub fn log_event(&mut self, event: Event) -> Result<Entry, ExitReason> {
let entry = create_entry_mut(&mut self.last_id, &mut self.num_hashes, event);
println!("{}", serde_json::to_string(&entry).unwrap());
Ok(entry)