2018-02-18 08:59:15 -08:00
|
|
|
//! The `log` crate provides the foundational data structures for Proof-of-History,
|
|
|
|
//! an ordered log of events in time.
|
2018-02-15 09:13:56 -08:00
|
|
|
|
2018-02-18 08:59:15 -08:00
|
|
|
/// Each log entry contains three pieces of data. The 'num_hashes' field is the number
|
2018-03-04 06:34:38 -08:00
|
|
|
/// of hashes performed since the previous entry. The 'id' field is the result
|
|
|
|
/// of hashing 'id' from the previous entry 'num_hashes' times. The 'event'
|
|
|
|
/// field points to an Event that took place shortly after 'id' was generated.
|
2018-02-15 09:13:56 -08:00
|
|
|
///
|
2018-02-15 09:48:30 -08:00
|
|
|
/// If you divide 'num_hashes' by the amount of time it takes to generate a new hash, you
|
2018-02-15 09:13:56 -08:00
|
|
|
/// get a duration estimate since the last event. Since processing power increases
|
2018-02-15 09:48:30 -08:00
|
|
|
/// over time, one should expect the duration 'num_hashes' represents to decrease proportionally.
|
2018-02-15 09:13:56 -08:00
|
|
|
/// Though processing power varies across nodes, the network gives priority to the
|
|
|
|
/// fastest processor. Duration should therefore be estimated by assuming that the hash
|
|
|
|
/// was generated by the fastest processor at the time the entry was logged.
|
2018-02-19 15:17:13 -08:00
|
|
|
|
2018-02-20 15:26:11 -08:00
|
|
|
use generic_array::GenericArray;
|
2018-03-02 07:43:54 -08:00
|
|
|
use generic_array::typenum::U32;
|
2018-02-26 15:42:31 -08:00
|
|
|
use serde::Serialize;
|
2018-03-06 11:26:39 -08:00
|
|
|
use event::Event;
|
2018-03-04 06:28:51 -08:00
|
|
|
use sha2::{Digest, Sha256};
|
|
|
|
use rayon::prelude::*;
|
2018-02-26 15:42:31 -08:00
|
|
|
|
2018-02-19 15:17:13 -08:00
|
|
|
pub type Sha256Hash = GenericArray<u8, U32>;
|
|
|
|
|
2018-02-20 15:26:11 -08:00
|
|
|
#[derive(Serialize, Deserialize, Debug, PartialEq, Eq, Clone)]
|
2018-02-26 14:37:33 -08:00
|
|
|
pub struct Entry<T> {
|
2018-02-15 09:57:32 -08:00
|
|
|
pub num_hashes: u64,
|
2018-03-04 06:34:38 -08:00
|
|
|
pub id: Sha256Hash,
|
2018-02-26 14:37:33 -08:00
|
|
|
pub event: Event<T>,
|
2018-02-15 09:57:32 -08:00
|
|
|
}
|
|
|
|
|
2018-03-06 11:35:12 -08:00
|
|
|
impl<T: Serialize> Entry<T> {
|
2018-02-18 08:53:38 -08:00
|
|
|
/// Creates a Entry from the number of hashes 'num_hashes' since the previous event
|
2018-03-04 06:34:38 -08:00
|
|
|
/// and that resulting 'id'.
|
|
|
|
pub fn new_tick(num_hashes: u64, id: &Sha256Hash) -> Self {
|
2018-02-18 08:53:38 -08:00
|
|
|
Entry {
|
2018-02-15 09:48:30 -08:00
|
|
|
num_hashes,
|
2018-03-04 06:34:38 -08:00
|
|
|
id: *id,
|
2018-02-20 12:07:54 -08:00
|
|
|
event: Event::Tick,
|
2018-02-15 09:48:30 -08:00
|
|
|
}
|
2018-02-15 09:13:56 -08:00
|
|
|
}
|
2018-03-06 11:35:12 -08:00
|
|
|
|
|
|
|
/// Verifies self.id is the result of hashing a 'start_hash' 'self.num_hashes' times.
|
|
|
|
/// If the event is not a Tick, then hash that as well.
|
|
|
|
pub fn verify(&self, start_hash: &Sha256Hash) -> bool {
|
|
|
|
if !self.event.verify() {
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
self.id == next_hash(start_hash, self.num_hashes, &self.event)
|
|
|
|
}
|
2018-02-15 10:45:04 -08:00
|
|
|
}
|
|
|
|
|
2018-02-24 05:53:36 -08:00
|
|
|
/// Return a Sha256 hash for the given data.
|
2018-02-19 15:17:13 -08:00
|
|
|
pub fn hash(val: &[u8]) -> Sha256Hash {
|
|
|
|
let mut hasher = Sha256::default();
|
|
|
|
hasher.input(val);
|
|
|
|
hasher.result()
|
2018-02-19 11:09:56 -08:00
|
|
|
}
|
|
|
|
|
2018-02-20 12:07:54 -08:00
|
|
|
/// Return the hash of the given hash extended with the given value.
|
2018-03-04 06:34:38 -08:00
|
|
|
pub fn extend_and_hash(id: &Sha256Hash, val: &[u8]) -> Sha256Hash {
|
|
|
|
let mut hash_data = id.to_vec();
|
2018-02-20 12:07:54 -08:00
|
|
|
hash_data.extend_from_slice(val);
|
|
|
|
hash(&hash_data)
|
|
|
|
}
|
|
|
|
|
2018-03-04 08:52:33 -08:00
|
|
|
/// Creates the hash 'num_hashes' after start_hash. If the event contains
|
|
|
|
/// signature, the final hash will be a hash of both the previous ID and
|
|
|
|
/// the signature.
|
2018-02-26 15:42:31 -08:00
|
|
|
pub fn next_hash<T: Serialize>(
|
2018-02-26 14:31:01 -08:00
|
|
|
start_hash: &Sha256Hash,
|
|
|
|
num_hashes: u64,
|
2018-02-26 15:42:31 -08:00
|
|
|
event: &Event<T>,
|
2018-02-26 14:31:01 -08:00
|
|
|
) -> Sha256Hash {
|
2018-03-04 06:34:38 -08:00
|
|
|
let mut id = *start_hash;
|
2018-03-06 11:26:39 -08:00
|
|
|
let sig = event.get_signature();
|
2018-03-04 08:52:33 -08:00
|
|
|
let start_index = if sig.is_some() { 1 } else { 0 };
|
|
|
|
for _ in start_index..num_hashes {
|
2018-03-04 06:34:38 -08:00
|
|
|
id = hash(&id);
|
2018-02-15 09:13:56 -08:00
|
|
|
}
|
2018-03-04 08:52:33 -08:00
|
|
|
if let Some(sig) = sig {
|
|
|
|
id = extend_and_hash(&id, &sig);
|
2018-03-04 08:21:45 -08:00
|
|
|
}
|
2018-03-04 08:52:33 -08:00
|
|
|
id
|
2018-02-20 12:07:54 -08:00
|
|
|
}
|
|
|
|
|
2018-03-04 08:52:33 -08:00
|
|
|
/// Creates the next Entry 'num_hashes' after 'start_hash'.
|
2018-03-04 13:30:39 -08:00
|
|
|
pub fn create_entry<T: Serialize>(
|
2018-02-26 14:37:33 -08:00
|
|
|
start_hash: &Sha256Hash,
|
2018-03-04 13:30:39 -08:00
|
|
|
cur_hashes: u64,
|
2018-02-26 15:42:31 -08:00
|
|
|
event: Event<T>,
|
|
|
|
) -> Entry<T> {
|
2018-03-06 11:26:39 -08:00
|
|
|
let sig = event.get_signature();
|
2018-03-04 13:30:39 -08:00
|
|
|
let num_hashes = cur_hashes + if sig.is_some() { 1 } else { 0 };
|
|
|
|
let id = next_hash(start_hash, 0, &event);
|
2018-02-20 12:07:54 -08:00
|
|
|
Entry {
|
|
|
|
num_hashes,
|
2018-03-04 13:30:39 -08:00
|
|
|
id,
|
2018-02-20 12:07:54 -08:00
|
|
|
event,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-02-26 15:42:31 -08:00
|
|
|
/// Creates the next Tick Entry 'num_hashes' after 'start_hash'.
|
2018-03-04 13:30:39 -08:00
|
|
|
pub fn create_entry_mut<T: Serialize>(
|
2018-02-26 14:31:01 -08:00
|
|
|
start_hash: &mut Sha256Hash,
|
2018-03-04 13:30:39 -08:00
|
|
|
cur_hashes: &mut u64,
|
2018-02-26 15:42:31 -08:00
|
|
|
event: Event<T>,
|
|
|
|
) -> Entry<T> {
|
2018-03-04 13:30:39 -08:00
|
|
|
let entry = create_entry(start_hash, *cur_hashes, event);
|
2018-03-04 06:34:38 -08:00
|
|
|
*start_hash = entry.id;
|
2018-03-04 13:30:39 -08:00
|
|
|
*cur_hashes = 0;
|
2018-02-26 10:01:19 -08:00
|
|
|
entry
|
|
|
|
}
|
|
|
|
|
2018-02-20 12:07:54 -08:00
|
|
|
/// Creates the next Tick Entry 'num_hashes' after 'start_hash'.
|
2018-02-26 15:42:31 -08:00
|
|
|
pub fn next_tick<T: Serialize>(start_hash: &Sha256Hash, num_hashes: u64) -> Entry<T> {
|
2018-03-04 13:30:39 -08:00
|
|
|
let event = Event::Tick;
|
|
|
|
Entry {
|
|
|
|
num_hashes,
|
|
|
|
id: next_hash(start_hash, num_hashes, &event),
|
|
|
|
event,
|
|
|
|
}
|
2018-02-15 09:13:56 -08:00
|
|
|
}
|
|
|
|
|
|
|
|
/// Verifies the hashes and counts of a slice of events are all consistent.
|
2018-02-26 14:37:33 -08:00
|
|
|
pub fn verify_slice(events: &[Entry<Sha256Hash>], start_hash: &Sha256Hash) -> bool {
|
2018-02-19 15:17:13 -08:00
|
|
|
let genesis = [Entry::new_tick(Default::default(), start_hash)];
|
2018-02-15 09:13:56 -08:00
|
|
|
let event_pairs = genesis.par_iter().chain(events).zip(events);
|
2018-03-06 11:35:12 -08:00
|
|
|
event_pairs.all(|(x0, x1)| x1.verify(&x0.id))
|
2018-02-15 09:13:56 -08:00
|
|
|
}
|
|
|
|
|
2018-02-28 17:04:35 -08:00
|
|
|
/// Verifies the hashes and counts of a slice of events are all consistent.
|
2018-03-05 16:29:32 -08:00
|
|
|
pub fn verify_slice_i64(events: &[Entry<i64>], start_hash: &Sha256Hash) -> bool {
|
2018-02-28 17:04:35 -08:00
|
|
|
let genesis = [Entry::new_tick(Default::default(), start_hash)];
|
|
|
|
let event_pairs = genesis.par_iter().chain(events).zip(events);
|
2018-03-06 11:35:12 -08:00
|
|
|
event_pairs.all(|(x0, x1)| x1.verify(&x0.id))
|
2018-02-28 17:04:35 -08:00
|
|
|
}
|
|
|
|
|
2018-02-15 09:13:56 -08:00
|
|
|
/// Verifies the hashes and events serially. Exists only for reference.
|
2018-02-26 15:42:31 -08:00
|
|
|
pub fn verify_slice_seq<T: Serialize>(events: &[Entry<T>], start_hash: &Sha256Hash) -> bool {
|
2018-02-18 08:53:38 -08:00
|
|
|
let genesis = [Entry::new_tick(0, start_hash)];
|
2018-02-15 15:00:05 -08:00
|
|
|
let mut event_pairs = genesis.iter().chain(events).zip(events);
|
2018-03-06 11:35:12 -08:00
|
|
|
event_pairs.all(|(x0, x1)| x1.verify(&x0.id))
|
2018-02-15 09:13:56 -08:00
|
|
|
}
|
|
|
|
|
2018-02-26 15:42:31 -08:00
|
|
|
pub fn create_entries<T: Serialize>(
|
2018-02-26 14:31:01 -08:00
|
|
|
start_hash: &Sha256Hash,
|
2018-02-26 15:42:31 -08:00
|
|
|
events: Vec<Event<T>>,
|
|
|
|
) -> Vec<Entry<T>> {
|
2018-03-04 06:34:38 -08:00
|
|
|
let mut id = *start_hash;
|
2018-02-26 10:01:19 -08:00
|
|
|
events
|
2018-02-26 15:42:31 -08:00
|
|
|
.into_iter()
|
2018-03-04 13:30:39 -08:00
|
|
|
.map(|event| create_entry_mut(&mut id, &mut 0, event))
|
2018-02-26 10:01:19 -08:00
|
|
|
.collect()
|
|
|
|
}
|
|
|
|
|
2018-02-15 10:50:48 -08:00
|
|
|
/// Create a vector of Ticks of length 'len' from 'start_hash' hash and 'num_hashes'.
|
2018-03-04 13:30:39 -08:00
|
|
|
pub fn next_ticks(start_hash: &Sha256Hash, num_hashes: u64, len: usize) -> Vec<Entry<Sha256Hash>> {
|
2018-03-04 06:34:38 -08:00
|
|
|
let mut id = *start_hash;
|
2018-03-04 13:30:39 -08:00
|
|
|
let mut ticks = vec![];
|
|
|
|
for _ in 0..len {
|
|
|
|
let entry = next_tick(&id, num_hashes);
|
|
|
|
id = entry.id;
|
|
|
|
ticks.push(entry);
|
|
|
|
}
|
|
|
|
ticks
|
2018-02-15 09:13:56 -08:00
|
|
|
}
|
|
|
|
|
2018-02-15 16:47:05 -08:00
|
|
|
#[cfg(test)]
|
|
|
|
mod tests {
|
|
|
|
use super::*;
|
2018-03-06 11:48:26 -08:00
|
|
|
use signature::{generate_keypair, get_pubkey};
|
|
|
|
use transaction::{sign_claim_data, sign_transaction_data, Transaction};
|
2018-02-15 16:47:05 -08:00
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn test_event_verify() {
|
2018-02-19 15:17:13 -08:00
|
|
|
let zero = Sha256Hash::default();
|
|
|
|
let one = hash(&zero);
|
2018-03-06 11:35:12 -08:00
|
|
|
assert!(Entry::<u8>::new_tick(0, &zero).verify(&zero)); // base case
|
|
|
|
assert!(!Entry::<u8>::new_tick(0, &zero).verify(&one)); // base case, bad
|
|
|
|
assert!(next_tick::<u8>(&zero, 1).verify(&zero)); // inductive step
|
|
|
|
assert!(!next_tick::<u8>(&zero, 1).verify(&one)); // inductive step, bad
|
2018-02-15 16:47:05 -08:00
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn test_next_tick() {
|
2018-02-19 15:17:13 -08:00
|
|
|
let zero = Sha256Hash::default();
|
2018-02-26 15:42:31 -08:00
|
|
|
assert_eq!(next_tick::<Sha256Hash>(&zero, 1).num_hashes, 1)
|
2018-02-15 16:47:05 -08:00
|
|
|
}
|
|
|
|
|
2018-02-26 14:37:33 -08:00
|
|
|
fn verify_slice_generic(verify_slice: fn(&[Entry<Sha256Hash>], &Sha256Hash) -> bool) {
|
2018-02-19 15:17:13 -08:00
|
|
|
let zero = Sha256Hash::default();
|
|
|
|
let one = hash(&zero);
|
|
|
|
assert!(verify_slice(&vec![], &zero)); // base case
|
|
|
|
assert!(verify_slice(&vec![Entry::new_tick(0, &zero)], &zero)); // singleton case 1
|
|
|
|
assert!(!verify_slice(&vec![Entry::new_tick(0, &zero)], &one)); // singleton case 2, bad
|
2018-03-04 13:30:39 -08:00
|
|
|
assert!(verify_slice(&next_ticks(&zero, 0, 2), &zero)); // inductive step
|
2018-02-19 15:17:13 -08:00
|
|
|
|
2018-03-04 13:30:39 -08:00
|
|
|
let mut bad_ticks = next_ticks(&zero, 0, 2);
|
2018-03-04 06:34:38 -08:00
|
|
|
bad_ticks[1].id = one;
|
2018-02-19 15:17:13 -08:00
|
|
|
assert!(!verify_slice(&bad_ticks, &zero)); // inductive step, bad
|
2018-02-16 08:14:42 -08:00
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn test_verify_slice() {
|
|
|
|
verify_slice_generic(verify_slice);
|
2018-02-15 16:47:05 -08:00
|
|
|
}
|
2018-02-16 08:14:42 -08:00
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn test_verify_slice_seq() {
|
2018-02-26 15:42:31 -08:00
|
|
|
verify_slice_generic(verify_slice_seq::<Sha256Hash>);
|
2018-02-16 08:14:42 -08:00
|
|
|
}
|
|
|
|
|
2018-02-20 13:46:36 -08:00
|
|
|
#[test]
|
|
|
|
fn test_reorder_attack() {
|
|
|
|
let zero = Sha256Hash::default();
|
|
|
|
let one = hash(&zero);
|
|
|
|
|
2018-03-02 09:58:43 -08:00
|
|
|
// First, verify entries
|
2018-03-01 16:01:55 -08:00
|
|
|
let keypair = generate_keypair();
|
2018-03-05 11:48:09 -08:00
|
|
|
let event0 = Event::new_claim(
|
|
|
|
get_pubkey(&keypair),
|
|
|
|
zero,
|
|
|
|
zero,
|
|
|
|
sign_claim_data(&zero, &keypair, &zero),
|
|
|
|
);
|
|
|
|
let event1 = Event::new_claim(
|
|
|
|
get_pubkey(&keypair),
|
|
|
|
one,
|
|
|
|
zero,
|
|
|
|
sign_claim_data(&one, &keypair, &zero),
|
|
|
|
);
|
2018-03-01 16:01:55 -08:00
|
|
|
let events = vec![event0, event1];
|
2018-03-04 13:30:39 -08:00
|
|
|
let mut entries = create_entries(&zero, events);
|
2018-02-24 05:53:36 -08:00
|
|
|
assert!(verify_slice(&entries, &zero));
|
2018-02-20 13:46:36 -08:00
|
|
|
|
2018-03-02 09:58:43 -08:00
|
|
|
// Next, swap two events and ensure verification fails.
|
2018-02-20 13:46:36 -08:00
|
|
|
let event0 = entries[0].event.clone();
|
|
|
|
let event1 = entries[1].event.clone();
|
|
|
|
entries[0].event = event1;
|
|
|
|
entries[1].event = event0;
|
2018-02-24 05:53:36 -08:00
|
|
|
assert!(!verify_slice(&entries, &zero));
|
2018-02-20 13:46:36 -08:00
|
|
|
}
|
|
|
|
|
2018-02-24 05:53:36 -08:00
|
|
|
#[test]
|
2018-02-26 10:01:19 -08:00
|
|
|
fn test_claim() {
|
|
|
|
let keypair = generate_keypair();
|
2018-03-06 13:37:08 -08:00
|
|
|
let asset = hash(b"hello, world");
|
2018-02-24 05:53:36 -08:00
|
|
|
let zero = Sha256Hash::default();
|
2018-03-05 11:48:09 -08:00
|
|
|
let event0 = Event::new_claim(
|
|
|
|
get_pubkey(&keypair),
|
2018-03-06 13:37:08 -08:00
|
|
|
asset,
|
2018-03-05 11:48:09 -08:00
|
|
|
zero,
|
2018-03-06 13:37:08 -08:00
|
|
|
sign_claim_data(&asset, &keypair, &zero),
|
2018-03-05 11:48:09 -08:00
|
|
|
);
|
2018-03-04 13:30:39 -08:00
|
|
|
let entries = create_entries(&zero, vec![event0]);
|
2018-02-24 05:53:36 -08:00
|
|
|
assert!(verify_slice(&entries, &zero));
|
|
|
|
}
|
2018-02-24 09:27:51 -08:00
|
|
|
|
|
|
|
#[test]
|
2018-02-26 10:01:19 -08:00
|
|
|
fn test_wrong_data_claim_attack() {
|
|
|
|
let keypair = generate_keypair();
|
2018-03-05 11:48:09 -08:00
|
|
|
let zero = Sha256Hash::default();
|
2018-03-02 09:58:43 -08:00
|
|
|
let event0 = Event::new_claim(
|
|
|
|
get_pubkey(&keypair),
|
|
|
|
hash(b"goodbye cruel world"),
|
2018-03-05 11:48:09 -08:00
|
|
|
zero,
|
|
|
|
sign_claim_data(&hash(b"hello, world"), &keypair, &zero),
|
2018-03-02 09:58:43 -08:00
|
|
|
);
|
2018-03-04 13:30:39 -08:00
|
|
|
let entries = create_entries(&zero, vec![event0]);
|
2018-02-26 10:01:19 -08:00
|
|
|
assert!(!verify_slice(&entries, &zero));
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn test_transfer() {
|
2018-03-05 11:48:09 -08:00
|
|
|
let zero = Sha256Hash::default();
|
2018-02-26 10:01:19 -08:00
|
|
|
let keypair0 = generate_keypair();
|
|
|
|
let keypair1 = generate_keypair();
|
2018-02-28 09:07:54 -08:00
|
|
|
let pubkey1 = get_pubkey(&keypair1);
|
2018-03-06 13:37:08 -08:00
|
|
|
let asset = hash(b"hello, world");
|
2018-03-06 10:54:45 -08:00
|
|
|
let event0 = Event::Transaction(Transaction {
|
2018-03-03 19:41:05 -08:00
|
|
|
from: get_pubkey(&keypair0),
|
2018-02-28 09:07:54 -08:00
|
|
|
to: pubkey1,
|
2018-03-06 13:37:08 -08:00
|
|
|
asset,
|
2018-03-05 11:48:09 -08:00
|
|
|
last_id: zero,
|
2018-03-06 13:37:08 -08:00
|
|
|
sig: sign_transaction_data(&asset, &keypair0, &pubkey1, &zero),
|
2018-03-06 09:49:40 -08:00
|
|
|
});
|
2018-03-04 13:30:39 -08:00
|
|
|
let entries = create_entries(&zero, vec![event0]);
|
2018-02-26 10:01:19 -08:00
|
|
|
assert!(verify_slice(&entries, &zero));
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn test_wrong_data_transfer_attack() {
|
|
|
|
let keypair0 = generate_keypair();
|
|
|
|
let keypair1 = generate_keypair();
|
2018-02-28 09:07:54 -08:00
|
|
|
let pubkey1 = get_pubkey(&keypair1);
|
2018-03-06 13:37:08 -08:00
|
|
|
let asset = hash(b"hello, world");
|
2018-03-05 11:48:09 -08:00
|
|
|
let zero = Sha256Hash::default();
|
2018-03-06 10:54:45 -08:00
|
|
|
let event0 = Event::Transaction(Transaction {
|
2018-03-03 19:41:05 -08:00
|
|
|
from: get_pubkey(&keypair0),
|
2018-02-28 09:07:54 -08:00
|
|
|
to: pubkey1,
|
2018-03-06 13:37:08 -08:00
|
|
|
asset: hash(b"goodbye cruel world"), // <-- attack!
|
2018-03-05 11:48:09 -08:00
|
|
|
last_id: zero,
|
2018-03-06 13:37:08 -08:00
|
|
|
sig: sign_transaction_data(&asset, &keypair0, &pubkey1, &zero),
|
2018-03-06 09:49:40 -08:00
|
|
|
});
|
2018-03-04 13:30:39 -08:00
|
|
|
let entries = create_entries(&zero, vec![event0]);
|
2018-02-26 10:01:19 -08:00
|
|
|
assert!(!verify_slice(&entries, &zero));
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn test_transfer_hijack_attack() {
|
|
|
|
let keypair0 = generate_keypair();
|
|
|
|
let keypair1 = generate_keypair();
|
2018-02-28 09:07:54 -08:00
|
|
|
let thief_keypair = generate_keypair();
|
|
|
|
let pubkey1 = get_pubkey(&keypair1);
|
2018-03-06 13:37:08 -08:00
|
|
|
let asset = hash(b"hello, world");
|
2018-03-05 11:48:09 -08:00
|
|
|
let zero = Sha256Hash::default();
|
2018-03-06 10:54:45 -08:00
|
|
|
let event0 = Event::Transaction(Transaction {
|
2018-03-03 19:41:05 -08:00
|
|
|
from: get_pubkey(&keypair0),
|
2018-02-28 09:07:54 -08:00
|
|
|
to: get_pubkey(&thief_keypair), // <-- attack!
|
2018-03-06 13:37:08 -08:00
|
|
|
asset: hash(b"goodbye cruel world"),
|
2018-03-05 11:48:09 -08:00
|
|
|
last_id: zero,
|
2018-03-06 13:37:08 -08:00
|
|
|
sig: sign_transaction_data(&asset, &keypair0, &pubkey1, &zero),
|
2018-03-06 09:49:40 -08:00
|
|
|
});
|
2018-03-04 13:30:39 -08:00
|
|
|
let entries = create_entries(&zero, vec![event0]);
|
2018-02-24 09:27:51 -08:00
|
|
|
assert!(!verify_slice(&entries, &zero));
|
|
|
|
}
|
2018-02-15 16:47:05 -08:00
|
|
|
}
|
|
|
|
|
2018-02-15 09:13:56 -08:00
|
|
|
#[cfg(all(feature = "unstable", test))]
|
|
|
|
mod bench {
|
|
|
|
extern crate test;
|
|
|
|
use self::test::Bencher;
|
2018-02-18 08:59:15 -08:00
|
|
|
use log::*;
|
2018-02-15 09:13:56 -08:00
|
|
|
|
|
|
|
#[bench]
|
|
|
|
fn event_bench(bencher: &mut Bencher) {
|
2018-02-19 15:17:13 -08:00
|
|
|
let start_hash = Default::default();
|
2018-03-04 13:30:39 -08:00
|
|
|
let events = next_ticks(&start_hash, 10_000, 8);
|
2018-02-16 09:38:12 -08:00
|
|
|
bencher.iter(|| {
|
2018-02-19 15:17:13 -08:00
|
|
|
assert!(verify_slice(&events, &start_hash));
|
2018-02-16 09:38:12 -08:00
|
|
|
});
|
|
|
|
}
|
|
|
|
|
|
|
|
#[bench]
|
|
|
|
fn event_bench_seq(bencher: &mut Bencher) {
|
2018-02-19 15:17:13 -08:00
|
|
|
let start_hash = Default::default();
|
2018-03-04 13:30:39 -08:00
|
|
|
let events = next_ticks(&start_hash, 10_000, 8);
|
2018-02-15 09:13:56 -08:00
|
|
|
bencher.iter(|| {
|
2018-02-26 16:03:50 -08:00
|
|
|
assert!(verify_slice_seq(&events, &start_hash));
|
2018-02-15 09:13:56 -08:00
|
|
|
});
|
|
|
|
}
|
|
|
|
}
|