Merge pull request #42 from garious/genesis

Make num_hashes more intuitive
This commit is contained in:
Greg Fitzgerald 2018-03-04 13:05:38 -07:00 committed by GitHub
commit 7da8a5e2d1
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
11 changed files with 91 additions and 111 deletions

View File

@ -71,9 +71,9 @@ fn main() {
Running the program should produce a log similar to:
```rust
Entry { num_hashes: 0, end_hash: [0, ...], event: Tick }
Entry { num_hashes: 2, end_hash: [67, ...], event: Transaction { data: [37, ...] } }
Entry { num_hashes: 3, end_hash: [123, ...], event: Tick }
Entry { num_hashes: 0, id: [0, ...], event: Tick }
Entry { num_hashes: 3, id: [67, ...], event: Transaction { data: [37, ...] } }
Entry { num_hashes: 3, id: [123, ...], event: Tick }
```
Proof-of-History
@ -86,7 +86,7 @@ assert!(verify_slice(&entries, &seed));
```
[It's a proof!](https://en.wikipedia.org/wiki/CurryHoward_correspondence) For each entry returned by the
historian, we can verify that `end_hash` is the result of applying a sha256 hash to the previous `end_hash`
historian, we can verify that `id` is the result of applying a sha256 hash to the previous `id`
exactly `num_hashes` times, and then hashing then event data on top of that. Because the event data is
included in the hash, the events cannot be reordered without regenerating all the hashes.

View File

@ -1,17 +1,17 @@
msc {
client,historian,logger;
logger=>historian [ label = "e0 = Entry{hash: h0, n: 0, event: Tick}" ] ;
logger=>historian [ label = "e0 = Entry{id: h0, n: 0, event: Tick}" ] ;
logger=>logger [ label = "h1 = hash(h0)" ] ;
logger=>logger [ label = "h2 = hash(h1)" ] ;
client=>historian [ label = "Claim(d0)" ] ;
historian=>logger [ label = "Claim(d0)" ] ;
client=>historian [ label = "Transaction(d0)" ] ;
historian=>logger [ label = "Transaction(d0)" ] ;
logger=>logger [ label = "h3 = hash(h2 + d0)" ] ;
logger=>historian [ label = "e1 = Entry{hash: hash(h3), n: 2, event: Claim(d0)}" ] ;
logger=>historian [ label = "e1 = Entry{id: hash(h3), n: 3, event: Transaction(d0)}" ] ;
logger=>logger [ label = "h4 = hash(h3)" ] ;
logger=>logger [ label = "h5 = hash(h4)" ] ;
logger=>logger [ label = "h6 = hash(h5)" ] ;
logger=>historian [ label = "e2 = Entry{hash: h6, n: 3, event: Tick}" ] ;
logger=>historian [ label = "e2 = Entry{id: h6, n: 3, event: Tick}" ] ;
client=>historian [ label = "collect()" ] ;
historian=>client [ label = "entries = [e0, e1, e2]" ] ;
client=>client [ label = "verify_slice(entries, h0)" ] ;

View File

@ -3,13 +3,15 @@
//! transfer funds to other users.
use log::{hash, Entry, Sha256Hash};
use event::{Event, PublicKey, Signature};
use event::{get_pubkey, sign_transaction_data, Event, PublicKey, Signature};
use genesis::Genesis;
use historian::Historian;
use ring::signature::Ed25519KeyPair;
use std::sync::mpsc::SendError;
use std::collections::HashMap;
use std::result;
use std::thread::sleep;
use std::time::Duration;
#[derive(Debug, PartialEq, Eq)]
pub enum AccountingError {
@ -23,7 +25,7 @@ pub type Result<T> = result::Result<T, AccountingError>;
pub struct Accountant {
pub historian: Historian<u64>,
pub balances: HashMap<PublicKey, u64>,
pub end_hash: Sha256Hash,
pub last_id: Sha256Hash,
}
impl Accountant {
@ -33,7 +35,7 @@ impl Accountant {
let mut acc = Accountant {
historian: hist,
balances: HashMap::new(),
end_hash: start_hash,
last_id: start_hash,
};
for (i, event) in gen.create_events().into_iter().enumerate() {
acc.process_verified_event(event, i < 2).unwrap();
@ -48,23 +50,12 @@ impl Accountant {
}
if let Some(last_entry) = entries.last() {
self.end_hash = last_entry.end_hash;
self.last_id = last_entry.id;
}
entries
}
pub fn deposit(self: &mut Self, n: u64, keypair: &Ed25519KeyPair) -> Result<Signature> {
use event::{get_pubkey, sign_claim_data};
let to = get_pubkey(keypair);
let sig = sign_claim_data(&n, keypair);
let event = Event::new_claim(to, n, sig);
if !self.historian.verify_event(&event) {
return Err(AccountingError::InvalidEvent);
}
self.process_verified_event(event, true).map(|_| sig)
}
fn is_deposit(allow_deposits: bool, from: &PublicKey, to: &PublicKey) -> bool {
allow_deposits && from == to
}
@ -118,7 +109,6 @@ impl Accountant {
keypair: &Ed25519KeyPair,
to: PublicKey,
) -> Result<Signature> {
use event::{get_pubkey, sign_transaction_data};
let from = get_pubkey(keypair);
let sig = sign_transaction_data(&n, keypair, &to);
let event = Event::Transaction {
@ -135,8 +125,6 @@ impl Accountant {
}
pub fn wait_on_signature(self: &mut Self, wait_sig: &Signature) {
use std::thread::sleep;
use std::time::Duration;
let mut entries = self.sync();
let mut found = false;
while !found {
@ -158,6 +146,8 @@ mod tests {
use event::{generate_keypair, get_pubkey};
use logger::ExitReason;
use genesis::Creator;
use std::thread::sleep;
use std::time::Duration;
#[test]
fn test_accountant() {
@ -180,8 +170,6 @@ mod tests {
#[test]
fn test_invalid_transfer() {
use std::thread::sleep;
use std::time::Duration;
let bob = Creator::new(1_000);
let bob_pubkey = bob.pubkey;
let alice = Genesis::new(11_000, vec![bob]);
@ -210,9 +198,6 @@ mod tests {
let mut acc = Accountant::new(&alice, Some(2));
let alice_keypair = alice.get_keypair();
let bob_keypair = generate_keypair();
let sig = acc.deposit(10_000, &alice_keypair).unwrap();
acc.wait_on_signature(&sig);
let bob_pubkey = get_pubkey(&bob_keypair);
let sig = acc.transfer(500, &alice_keypair, bob_pubkey).unwrap();
acc.wait_on_signature(&sig);

View File

@ -1,6 +1,8 @@
use std::io;
use accountant::Accountant;
use event::{Event, PublicKey, Signature};
use std::net::UdpSocket;
use bincode::{deserialize, serialize};
pub struct AccountantSkel {
pub obj: Accountant,
@ -60,8 +62,6 @@ impl AccountantSkel {
/// UDP Server that forwards messages to Accountant methods.
pub fn serve(self: &mut Self, addr: &str) -> io::Result<()> {
use std::net::UdpSocket;
use bincode::{deserialize, serialize};
let socket = UdpSocket::bind(addr)?;
let mut buf = vec![0u8; 1024];
loop {

View File

@ -5,7 +5,7 @@
use std::net::UdpSocket;
use std::io;
use bincode::{deserialize, serialize};
use event::{PublicKey, Signature};
use event::{get_pubkey, sign_transaction_data, PublicKey, Signature};
use ring::signature::Ed25519KeyPair;
use accountant_skel::{Request, Response};
@ -40,7 +40,6 @@ impl AccountantStub {
keypair: &Ed25519KeyPair,
to: PublicKey,
) -> io::Result<Signature> {
use event::{get_pubkey, sign_transaction_data};
let from = get_pubkey(keypair);
let sig = sign_transaction_data(&n, keypair, &to);
self.transfer_signed(from, to, n, sig).map(|_| sig)

View File

@ -1,19 +1,18 @@
//extern crate serde_json;
extern crate silk;
use silk::accountant_stub::AccountantStub;
use silk::accountant_skel::AccountantSkel;
use silk::accountant::Accountant;
use silk::event::{generate_keypair, get_pubkey, sign_transaction_data, verify_event, Event};
use silk::genesis::Genesis;
use std::time::Instant;
use std::net::UdpSocket;
use std::thread::{sleep, spawn};
use std::time::Duration;
//use std::io::stdin;
fn main() {
use silk::accountant_stub::AccountantStub;
use silk::accountant_skel::AccountantSkel;
use silk::accountant::Accountant;
use silk::event::{generate_keypair, get_pubkey, sign_transaction_data};
use silk::genesis::Genesis;
use std::time::Instant;
use std::net::UdpSocket;
use std::thread::{sleep, spawn};
use std::time::Duration;
let addr = "127.0.0.1:8000";
let send_addr = "127.0.0.1:8001";
@ -53,7 +52,6 @@ fn main() {
);
println!("Verify signatures...");
use silk::event::{verify_event, Event};
let now = Instant::now();
for &(k, s) in &sigs {
let e = Event::Transaction {

View File

@ -22,7 +22,7 @@ fn main() {
drop(logger.sender);
let entries = receiver.iter().collect::<Vec<_>>();
verify_slice_u64(&entries, &entries[0].end_hash);
verify_slice_u64(&entries, &entries[0].id);
println!("[");
let len = entries.len();
for (i, x) in entries.iter().enumerate() {

View File

@ -2,9 +2,9 @@
//! an ordered log of events in time.
/// Each log entry contains three pieces of data. The 'num_hashes' field is the number
/// of hashes performed since the previous entry. The 'end_hash' field is the result
/// of hashing 'end_hash' from the previous entry 'num_hashes' times. The 'event'
/// field points to an Event that took place shortly after 'end_hash' was generated.
/// of hashes performed since the previous entry. The 'id' field is the result
/// of hashing 'id' from the previous entry 'num_hashes' times. The 'event'
/// field points to an Event that took place shortly after 'id' was generated.
///
/// If you divide 'num_hashes' by the amount of time it takes to generate a new hash, you
/// get a duration estimate since the last event. Since processing power increases
@ -16,7 +16,10 @@
use generic_array::GenericArray;
use generic_array::typenum::{U32, U64};
use ring::signature::Ed25519KeyPair;
use ring::{rand, signature};
use untrusted;
use serde::Serialize;
use bincode::serialize;
pub type PublicKey = GenericArray<u8, U32>;
pub type Signature = GenericArray<u8, U64>;
@ -24,7 +27,7 @@ pub type Signature = GenericArray<u8, U64>;
/// When 'event' is Tick, the event represents a simple clock tick, and exists for the
/// sole purpose of improving the performance of event log verification. A tick can
/// be generated in 'num_hashes' hashes and verified in 'num_hashes' hashes. By logging
/// a hash alongside the tick, each tick and be verified in parallel using the 'end_hash'
/// a hash alongside the tick, each tick and be verified in parallel using the 'id'
/// of the preceding tick to seed its hashing.
#[derive(Serialize, Deserialize, Debug, PartialEq, Eq, Clone)]
pub enum Event<T> {
@ -50,8 +53,6 @@ impl<T> Event<T> {
/// Return a new ED25519 keypair
pub fn generate_keypair() -> Ed25519KeyPair {
use ring::{rand, signature};
use untrusted;
let rng = rand::SystemRandom::new();
let pkcs8_bytes = signature::Ed25519KeyPair::generate_pkcs8(&rng).unwrap();
signature::Ed25519KeyPair::from_pkcs8(untrusted::Input::from(&pkcs8_bytes)).unwrap()
@ -64,7 +65,6 @@ pub fn get_pubkey(keypair: &Ed25519KeyPair) -> PublicKey {
/// Return a signature for the given data using the private key from the given keypair.
fn sign_serialized<T: Serialize>(data: &T, keypair: &Ed25519KeyPair) -> Signature {
use bincode::serialize;
let serialized = serialize(data).unwrap();
GenericArray::clone_from_slice(keypair.sign(&serialized).as_ref())
}
@ -86,8 +86,6 @@ pub fn sign_claim_data<T: Serialize>(data: &T, keypair: &Ed25519KeyPair) -> Sign
/// Verify a signed message with the given public key.
pub fn verify_signature(peer_public_key_bytes: &[u8], msg_bytes: &[u8], sig_bytes: &[u8]) -> bool {
use untrusted;
use ring::signature;
let peer_public_key = untrusted::Input::from(peer_public_key_bytes);
let msg = untrusted::Input::from(msg_bytes);
let sig = untrusted::Input::from(sig_bytes);
@ -102,7 +100,6 @@ pub fn get_signature<T>(event: &Event<T>) -> Option<Signature> {
}
pub fn verify_event<T: Serialize>(event: &Event<T>) -> bool {
use bincode::serialize;
if let Event::Transaction {
from,
to,

View File

@ -3,13 +3,14 @@
use std::thread::JoinHandle;
use std::collections::HashSet;
use std::sync::mpsc::{Receiver, SyncSender};
use std::sync::mpsc::{sync_channel, Receiver, SyncSender};
use std::time::Instant;
use log::{hash, Entry, Sha256Hash};
use logger::{verify_event_and_reserve_signature, ExitReason, Logger};
use event::{Event, Signature};
use serde::Serialize;
use std::fmt::Debug;
use std::thread;
pub struct Historian<T> {
pub sender: SyncSender<Event<T>>,
@ -20,7 +21,6 @@ pub struct Historian<T> {
impl<T: 'static + Serialize + Clone + Debug + Send> Historian<T> {
pub fn new(start_hash: &Sha256Hash, ms_per_tick: Option<u64>) -> Self {
use std::sync::mpsc::sync_channel;
let (sender, event_receiver) = sync_channel(1000);
let (entry_sender, receiver) = sync_channel(1000);
let thread_hdl =
@ -46,7 +46,6 @@ impl<T: 'static + Serialize + Clone + Debug + Send> Historian<T> {
receiver: Receiver<Event<T>>,
sender: SyncSender<Entry<T>>,
) -> JoinHandle<(Entry<T>, ExitReason)> {
use std::thread;
thread::spawn(move || {
let mut logger = Logger::new(receiver, sender, start_hash);
let now = Instant::now();
@ -54,7 +53,7 @@ impl<T: 'static + Serialize + Clone + Debug + Send> Historian<T> {
if let Err(err) = logger.log_events(now, ms_per_tick) {
return err;
}
logger.end_hash = hash(&logger.end_hash);
logger.last_id = hash(&logger.last_id);
logger.num_hashes += 1;
}
})

View File

@ -2,9 +2,9 @@
//! an ordered log of events in time.
/// Each log entry contains three pieces of data. The 'num_hashes' field is the number
/// of hashes performed since the previous entry. The 'end_hash' field is the result
/// of hashing 'end_hash' from the previous entry 'num_hashes' times. The 'event'
/// field points to an Event that took place shortly after 'end_hash' was generated.
/// of hashes performed since the previous entry. The 'id' field is the result
/// of hashing 'id' from the previous entry 'num_hashes' times. The 'event'
/// field points to an Event that took place shortly after 'id' was generated.
///
/// If you divide 'num_hashes' by the amount of time it takes to generate a new hash, you
/// get a duration estimate since the last event. Since processing power increases
@ -16,24 +16,27 @@
use generic_array::GenericArray;
use generic_array::typenum::U32;
use serde::Serialize;
use event::*;
use event::{get_signature, verify_event, Event};
use sha2::{Digest, Sha256};
use rayon::prelude::*;
use std::iter;
pub type Sha256Hash = GenericArray<u8, U32>;
#[derive(Serialize, Deserialize, Debug, PartialEq, Eq, Clone)]
pub struct Entry<T> {
pub num_hashes: u64,
pub end_hash: Sha256Hash,
pub id: Sha256Hash,
pub event: Event<T>,
}
impl<T> Entry<T> {
/// Creates a Entry from the number of hashes 'num_hashes' since the previous event
/// and that resulting 'end_hash'.
pub fn new_tick(num_hashes: u64, end_hash: &Sha256Hash) -> Self {
/// and that resulting 'id'.
pub fn new_tick(num_hashes: u64, id: &Sha256Hash) -> Self {
Entry {
num_hashes,
end_hash: *end_hash,
id: *id,
event: Event::Tick,
}
}
@ -41,40 +44,39 @@ impl<T> Entry<T> {
/// Return a Sha256 hash for the given data.
pub fn hash(val: &[u8]) -> Sha256Hash {
use sha2::{Digest, Sha256};
let mut hasher = Sha256::default();
hasher.input(val);
hasher.result()
}
/// Return the hash of the given hash extended with the given value.
pub fn extend_and_hash(end_hash: &Sha256Hash, val: &[u8]) -> Sha256Hash {
let mut hash_data = end_hash.to_vec();
pub fn extend_and_hash(id: &Sha256Hash, val: &[u8]) -> Sha256Hash {
let mut hash_data = id.to_vec();
hash_data.extend_from_slice(val);
hash(&hash_data)
}
pub fn hash_event<T>(end_hash: &Sha256Hash, event: &Event<T>) -> Sha256Hash {
match get_signature(event) {
None => *end_hash,
Some(sig) => extend_and_hash(end_hash, &sig),
}
}
/// Creates the hash 'num_hashes' after start_hash, plus an additional hash for any event data.
/// Creates the hash 'num_hashes' after start_hash. If the event contains
/// signature, the final hash will be a hash of both the previous ID and
/// the signature.
pub fn next_hash<T: Serialize>(
start_hash: &Sha256Hash,
num_hashes: u64,
event: &Event<T>,
) -> Sha256Hash {
let mut end_hash = *start_hash;
for _ in 0..num_hashes {
end_hash = hash(&end_hash);
let mut id = *start_hash;
let sig = get_signature(event);
let start_index = if sig.is_some() { 1 } else { 0 };
for _ in start_index..num_hashes {
id = hash(&id);
}
hash_event(&end_hash, event)
if let Some(sig) = sig {
id = extend_and_hash(&id, &sig);
}
id
}
/// Creates the next Tick Entry 'num_hashes' after 'start_hash'.
/// Creates the next Entry 'num_hashes' after 'start_hash'.
pub fn next_entry<T: Serialize>(
start_hash: &Sha256Hash,
num_hashes: u64,
@ -82,7 +84,7 @@ pub fn next_entry<T: Serialize>(
) -> Entry<T> {
Entry {
num_hashes,
end_hash: next_hash(start_hash, num_hashes, &event),
id: next_hash(start_hash, num_hashes, &event),
event,
}
}
@ -94,7 +96,7 @@ pub fn next_entry_mut<T: Serialize>(
event: Event<T>,
) -> Entry<T> {
let entry = next_entry(start_hash, num_hashes, event);
*start_hash = entry.end_hash;
*start_hash = entry.id;
entry
}
@ -103,36 +105,34 @@ pub fn next_tick<T: Serialize>(start_hash: &Sha256Hash, num_hashes: u64) -> Entr
next_entry(start_hash, num_hashes, Event::Tick)
}
/// Verifies self.end_hash is the result of hashing a 'start_hash' 'self.num_hashes' times.
/// Verifies self.id is the result of hashing a 'start_hash' 'self.num_hashes' times.
/// If the event is not a Tick, then hash that as well.
pub fn verify_entry<T: Serialize>(entry: &Entry<T>, start_hash: &Sha256Hash) -> bool {
if !verify_event(&entry.event) {
return false;
}
entry.end_hash == next_hash(start_hash, entry.num_hashes, &entry.event)
entry.id == next_hash(start_hash, entry.num_hashes, &entry.event)
}
/// Verifies the hashes and counts of a slice of events are all consistent.
pub fn verify_slice(events: &[Entry<Sha256Hash>], start_hash: &Sha256Hash) -> bool {
use rayon::prelude::*;
let genesis = [Entry::new_tick(Default::default(), start_hash)];
let event_pairs = genesis.par_iter().chain(events).zip(events);
event_pairs.all(|(x0, x1)| verify_entry(&x1, &x0.end_hash))
event_pairs.all(|(x0, x1)| verify_entry(&x1, &x0.id))
}
/// Verifies the hashes and counts of a slice of events are all consistent.
pub fn verify_slice_u64(events: &[Entry<u64>], start_hash: &Sha256Hash) -> bool {
use rayon::prelude::*;
let genesis = [Entry::new_tick(Default::default(), start_hash)];
let event_pairs = genesis.par_iter().chain(events).zip(events);
event_pairs.all(|(x0, x1)| verify_entry(&x1, &x0.end_hash))
event_pairs.all(|(x0, x1)| verify_entry(&x1, &x0.id))
}
/// Verifies the hashes and events serially. Exists only for reference.
pub fn verify_slice_seq<T: Serialize>(events: &[Entry<T>], start_hash: &Sha256Hash) -> bool {
let genesis = [Entry::new_tick(0, start_hash)];
let mut event_pairs = genesis.iter().chain(events).zip(events);
event_pairs.all(|(x0, x1)| verify_entry(&x1, &x0.end_hash))
event_pairs.all(|(x0, x1)| verify_entry(&x1, &x0.id))
}
pub fn create_entries<T: Serialize>(
@ -140,10 +140,10 @@ pub fn create_entries<T: Serialize>(
num_hashes: u64,
events: Vec<Event<T>>,
) -> Vec<Entry<T>> {
let mut end_hash = *start_hash;
let mut id = *start_hash;
events
.into_iter()
.map(|event| next_entry_mut(&mut end_hash, num_hashes, event))
.map(|event| next_entry_mut(&mut id, num_hashes, event))
.collect()
}
@ -153,17 +153,17 @@ pub fn create_ticks(
num_hashes: u64,
len: usize,
) -> Vec<Entry<Sha256Hash>> {
use std::iter;
let mut end_hash = *start_hash;
let mut id = *start_hash;
iter::repeat(Event::Tick)
.take(len)
.map(|event| next_entry_mut(&mut end_hash, num_hashes, event))
.map(|event| next_entry_mut(&mut id, num_hashes, event))
.collect()
}
#[cfg(test)]
mod tests {
use super::*;
use event::{generate_keypair, get_pubkey, sign_claim_data, sign_transaction_data};
#[test]
fn test_event_verify() {
@ -190,7 +190,7 @@ mod tests {
assert!(verify_slice(&create_ticks(&zero, 0, 2), &zero)); // inductive step
let mut bad_ticks = create_ticks(&zero, 0, 2);
bad_ticks[1].end_hash = one;
bad_ticks[1].id = one;
assert!(!verify_slice(&bad_ticks, &zero)); // inductive step, bad
}

View File

@ -6,9 +6,9 @@
//! The resulting stream of entries represents ordered events in time.
use std::collections::HashSet;
use std::sync::mpsc::{Receiver, SyncSender};
use std::sync::mpsc::{Receiver, SyncSender, TryRecvError};
use std::time::{Duration, Instant};
use log::{hash_event, Entry, Sha256Hash};
use log::{extend_and_hash, Entry, Sha256Hash};
use event::{get_signature, verify_event, Event, Signature};
use serde::Serialize;
use std::fmt::Debug;
@ -22,7 +22,7 @@ pub enum ExitReason {
pub struct Logger<T> {
pub sender: SyncSender<Entry<T>>,
pub receiver: Receiver<Event<T>>,
pub end_hash: Sha256Hash,
pub last_id: Sha256Hash,
pub num_hashes: u64,
pub num_ticks: u64,
}
@ -52,16 +52,19 @@ impl<T: Serialize + Clone + Debug> Logger<T> {
Logger {
receiver,
sender,
end_hash: start_hash,
last_id: start_hash,
num_hashes: 0,
num_ticks: 0,
}
}
pub fn log_event(&mut self, event: Event<T>) -> Result<(), (Entry<T>, ExitReason)> {
self.end_hash = hash_event(&self.end_hash, &event);
if let Some(sig) = get_signature(&event) {
self.last_id = extend_and_hash(&self.last_id, &sig);
self.num_hashes += 1;
}
let entry = Entry {
end_hash: self.end_hash,
id: self.last_id,
num_hashes: self.num_hashes,
event,
};
@ -77,7 +80,6 @@ impl<T: Serialize + Clone + Debug> Logger<T> {
epoch: Instant,
ms_per_tick: Option<u64>,
) -> Result<(), (Entry<T>, ExitReason)> {
use std::sync::mpsc::TryRecvError;
loop {
if let Some(ms) = ms_per_tick {
if epoch.elapsed() > Duration::from_millis((self.num_ticks + 1) * ms) {
@ -94,7 +96,7 @@ impl<T: Serialize + Clone + Debug> Logger<T> {
}
Err(TryRecvError::Disconnected) => {
let entry = Entry {
end_hash: self.end_hash,
id: self.last_id,
num_hashes: self.num_hashes,
event: Event::Tick,
};
@ -149,12 +151,12 @@ mod tests {
#[test]
fn test_genesis_no_creators() {
let entries = run_genesis(Genesis::new(100, vec![]));
assert!(verify_slice_u64(&entries, &entries[0].end_hash));
assert!(verify_slice_u64(&entries, &entries[0].id));
}
#[test]
fn test_genesis() {
let entries = run_genesis(Genesis::new(100, vec![Creator::new(42)]));
assert!(verify_slice_u64(&entries, &entries[0].end_hash));
assert!(verify_slice_u64(&entries, &entries[0].id));
}
}