2018-07-09 05:29:01 -07:00
|
|
|
//! # Queueing Honey Badger
|
|
|
|
//!
|
2018-07-18 07:46:46 -07:00
|
|
|
//! This works exactly like Dynamic Honey Badger, but it has a transaction queue built in. Whenever
|
|
|
|
//! an epoch is output, it will automatically select a list of pending transactions and propose it
|
|
|
|
//! for the next one. The user can continuously add more pending transactions to the queue.
|
|
|
|
//!
|
|
|
|
//! **Note**: `QueueingHoneyBadger` currently requires at least two validators.
|
|
|
|
//!
|
|
|
|
//! ## How it works
|
|
|
|
//!
|
|
|
|
//! Queueing Honey Badger runs a Dynamic Honey Badger internally, and automatically inputs a list
|
|
|
|
//! of pending transactions as its contribution at the beginning of each epoch. These are selected
|
|
|
|
//! by making a random choice of _B / N_ out of the first _B_ entries in the queue, where _B_ is the
|
|
|
|
//! configurable `batch_size` parameter, and _N_ is the current number of validators.
|
|
|
|
//!
|
|
|
|
//! After each output, the transactions that made it into the new batch are removed from the queue.
|
|
|
|
//!
|
|
|
|
//! The random choice of transactions is made to reduce redundancy even if all validators have
|
|
|
|
//! roughly the same entries in their queues. By selecting a random fraction of the first _B_
|
2018-07-18 12:41:21 -07:00
|
|
|
//! entries, any two nodes will likely make almost disjoint contributions instead of proposing
|
2018-07-18 07:46:46 -07:00
|
|
|
//! the same transaction multiple times.
|
2018-07-09 05:29:01 -07:00
|
|
|
|
2018-07-12 08:53:12 -07:00
|
|
|
use std::cmp;
|
2018-07-09 05:29:01 -07:00
|
|
|
use std::fmt::Debug;
|
2018-07-25 14:38:33 -07:00
|
|
|
use std::fmt::{self, Display};
|
2018-07-09 05:29:01 -07:00
|
|
|
use std::hash::Hash;
|
|
|
|
use std::marker::PhantomData;
|
|
|
|
|
2018-07-25 14:38:33 -07:00
|
|
|
use failure::{Backtrace, Context, Fail};
|
2018-07-14 00:20:02 -07:00
|
|
|
use rand::Rand;
|
2018-07-09 05:29:01 -07:00
|
|
|
use serde::{Deserialize, Serialize};
|
|
|
|
|
|
|
|
use dynamic_honey_badger::{self, Batch as DhbBatch, DynamicHoneyBadger, Message};
|
2018-07-21 01:18:08 -07:00
|
|
|
use messaging::{self, DistAlgorithm};
|
2018-07-10 08:29:31 -07:00
|
|
|
use transaction_queue::TransactionQueue;
|
2018-07-09 05:29:01 -07:00
|
|
|
|
|
|
|
pub use dynamic_honey_badger::{Change, ChangeState, Input};
|
|
|
|
|
2018-07-25 14:38:33 -07:00
|
|
|
/// Queueing honey badger error variants.
|
|
|
|
#[derive(Debug, Fail)]
|
|
|
|
pub enum ErrorKind {
|
|
|
|
#[fail(display = "Input error: {}", _0)]
|
|
|
|
Input(dynamic_honey_badger::Error),
|
|
|
|
#[fail(display = "Handle message error: {}", _0)]
|
|
|
|
HandleMessage(dynamic_honey_badger::Error),
|
|
|
|
#[fail(display = "Propose error: {}", _0)]
|
|
|
|
Propose(dynamic_honey_badger::Error),
|
|
|
|
}
|
|
|
|
|
|
|
|
/// A queueing honey badger error.
|
|
|
|
#[derive(Debug)]
|
|
|
|
pub struct Error {
|
|
|
|
inner: Context<ErrorKind>,
|
|
|
|
}
|
|
|
|
|
|
|
|
impl Fail for Error {
|
|
|
|
fn cause(&self) -> Option<&Fail> {
|
|
|
|
self.inner.cause()
|
|
|
|
}
|
|
|
|
|
|
|
|
fn backtrace(&self) -> Option<&Backtrace> {
|
|
|
|
self.inner.backtrace()
|
2018-07-09 05:29:01 -07:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-07-25 14:38:33 -07:00
|
|
|
impl Error {
|
|
|
|
pub fn kind(&self) -> &ErrorKind {
|
|
|
|
self.inner.get_context()
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl From<ErrorKind> for Error {
|
|
|
|
fn from(kind: ErrorKind) -> Error {
|
|
|
|
Error {
|
|
|
|
inner: Context::new(kind),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl From<Context<ErrorKind>> for Error {
|
|
|
|
fn from(inner: Context<ErrorKind>) -> Error {
|
|
|
|
Error { inner }
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl Display for Error {
|
|
|
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
|
|
|
Display::fmt(&self.inner, f)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
pub type Result<T> = ::std::result::Result<T, Error>;
|
|
|
|
|
2018-07-09 05:29:01 -07:00
|
|
|
/// A Queueing Honey Badger builder, to configure the parameters and create new instances of
|
|
|
|
/// `QueueingHoneyBadger`.
|
2018-07-17 06:54:12 -07:00
|
|
|
pub struct QueueingHoneyBadgerBuilder<Tx, NodeUid: Rand> {
|
2018-07-09 05:29:01 -07:00
|
|
|
/// Shared network data.
|
2018-07-17 06:54:12 -07:00
|
|
|
dyn_hb: DynamicHoneyBadger<Vec<Tx>, NodeUid>,
|
2018-07-09 05:29:01 -07:00
|
|
|
/// The target number of transactions to be included in each batch.
|
|
|
|
batch_size: usize,
|
|
|
|
_phantom: PhantomData<Tx>,
|
|
|
|
}
|
|
|
|
|
|
|
|
impl<Tx, NodeUid> QueueingHoneyBadgerBuilder<Tx, NodeUid>
|
|
|
|
where
|
|
|
|
Tx: Eq + Serialize + for<'r> Deserialize<'r> + Debug + Hash + Clone,
|
2018-07-05 09:20:53 -07:00
|
|
|
NodeUid: Eq + Ord + Clone + Debug + Serialize + for<'r> Deserialize<'r> + Hash + Rand,
|
2018-07-09 05:29:01 -07:00
|
|
|
{
|
|
|
|
/// Returns a new `QueueingHoneyBadgerBuilder` configured to use the node IDs and cryptographic
|
|
|
|
/// keys specified by `netinfo`.
|
2018-07-21 01:18:08 -07:00
|
|
|
// TODO: Make it easier to build a `QueueingHoneyBadger` with a `JoinPlan`. Handle `Step`
|
|
|
|
// conversion internally.
|
2018-07-17 06:54:12 -07:00
|
|
|
pub fn new(dyn_hb: DynamicHoneyBadger<Vec<Tx>, NodeUid>) -> Self {
|
2018-07-09 05:29:01 -07:00
|
|
|
// TODO: Use the defaults from `HoneyBadgerBuilder`.
|
|
|
|
QueueingHoneyBadgerBuilder {
|
2018-07-17 06:54:12 -07:00
|
|
|
dyn_hb,
|
2018-07-09 05:29:01 -07:00
|
|
|
batch_size: 100,
|
|
|
|
_phantom: PhantomData,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Sets the target number of transactions per batch.
|
2018-07-17 06:54:12 -07:00
|
|
|
pub fn batch_size(mut self, batch_size: usize) -> Self {
|
2018-07-09 05:29:01 -07:00
|
|
|
self.batch_size = batch_size;
|
|
|
|
self
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Creates a new Queueing Honey Badger instance with an empty buffer.
|
2018-07-21 01:18:08 -07:00
|
|
|
pub fn build(self) -> (QueueingHoneyBadger<Tx, NodeUid>, Step<Tx, NodeUid>)
|
2018-07-09 05:29:01 -07:00
|
|
|
where
|
|
|
|
Tx: Serialize + for<'r> Deserialize<'r> + Debug + Hash + Eq,
|
|
|
|
{
|
|
|
|
self.build_with_transactions(None)
|
|
|
|
.expect("building without transactions cannot fail")
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Returns a new Queueing Honey Badger instance that starts with the given transactions in its
|
|
|
|
/// buffer.
|
2018-07-21 01:18:08 -07:00
|
|
|
pub fn build_with_transactions<TI>(
|
|
|
|
self,
|
|
|
|
txs: TI,
|
|
|
|
) -> Result<(QueueingHoneyBadger<Tx, NodeUid>, Step<Tx, NodeUid>)>
|
2018-07-09 05:29:01 -07:00
|
|
|
where
|
|
|
|
TI: IntoIterator<Item = Tx>,
|
|
|
|
Tx: Serialize + for<'r> Deserialize<'r> + Debug + Hash + Eq,
|
|
|
|
{
|
|
|
|
let queue = TransactionQueue(txs.into_iter().collect());
|
|
|
|
let mut qhb = QueueingHoneyBadger {
|
2018-07-17 06:54:12 -07:00
|
|
|
dyn_hb: self.dyn_hb,
|
2018-07-09 05:29:01 -07:00
|
|
|
queue,
|
|
|
|
batch_size: self.batch_size,
|
|
|
|
};
|
2018-07-21 01:18:08 -07:00
|
|
|
let step = qhb.propose()?;
|
|
|
|
Ok((qhb, step))
|
2018-07-09 05:29:01 -07:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
/// A Honey Badger instance that can handle adding and removing nodes and manages a transaction
|
|
|
|
/// queue.
|
2018-07-24 04:12:06 -07:00
|
|
|
#[derive(Debug)]
|
2018-07-09 05:29:01 -07:00
|
|
|
pub struct QueueingHoneyBadger<Tx, NodeUid>
|
|
|
|
where
|
|
|
|
Tx: Eq + Serialize + for<'r> Deserialize<'r> + Debug + Hash,
|
2018-07-05 09:20:53 -07:00
|
|
|
NodeUid: Ord + Clone + Serialize + for<'r> Deserialize<'r> + Debug + Rand,
|
2018-07-09 05:29:01 -07:00
|
|
|
{
|
|
|
|
/// The target number of transactions to be included in each batch.
|
|
|
|
batch_size: usize,
|
|
|
|
/// The internal `DynamicHoneyBadger` instance.
|
|
|
|
dyn_hb: DynamicHoneyBadger<Vec<Tx>, NodeUid>,
|
|
|
|
/// The queue of pending transactions that haven't been output in a batch yet.
|
|
|
|
queue: TransactionQueue<Tx>,
|
|
|
|
}
|
|
|
|
|
2018-07-19 06:09:50 -07:00
|
|
|
pub type Step<Tx, NodeUid> = messaging::Step<QueueingHoneyBadger<Tx, NodeUid>>;
|
2018-07-09 04:35:26 -07:00
|
|
|
|
2018-07-09 05:29:01 -07:00
|
|
|
impl<Tx, NodeUid> DistAlgorithm for QueueingHoneyBadger<Tx, NodeUid>
|
|
|
|
where
|
|
|
|
Tx: Eq + Serialize + for<'r> Deserialize<'r> + Debug + Hash + Clone,
|
2018-07-05 09:20:53 -07:00
|
|
|
NodeUid: Eq + Ord + Clone + Serialize + for<'r> Deserialize<'r> + Debug + Hash + Rand,
|
2018-07-09 05:29:01 -07:00
|
|
|
{
|
|
|
|
type NodeUid = NodeUid;
|
|
|
|
type Input = Input<Tx, NodeUid>;
|
|
|
|
type Output = Batch<Tx, NodeUid>;
|
|
|
|
type Message = Message<NodeUid>;
|
|
|
|
type Error = Error;
|
|
|
|
|
2018-07-19 06:09:50 -07:00
|
|
|
fn input(&mut self, input: Self::Input) -> Result<Step<Tx, NodeUid>> {
|
2018-07-09 05:29:01 -07:00
|
|
|
// User transactions are forwarded to `HoneyBadger` right away. Internal messages are
|
|
|
|
// in addition signed and broadcast.
|
2018-07-21 01:18:08 -07:00
|
|
|
match input {
|
2018-07-09 05:29:01 -07:00
|
|
|
Input::User(tx) => {
|
|
|
|
self.queue.0.push_back(tx);
|
2018-07-21 01:18:08 -07:00
|
|
|
Ok(Step::default())
|
2018-07-09 05:29:01 -07:00
|
|
|
}
|
2018-07-25 14:38:33 -07:00
|
|
|
Input::Change(change) => Ok(self
|
|
|
|
.dyn_hb
|
|
|
|
.input(Input::Change(change))
|
|
|
|
.map_err(ErrorKind::Input)?
|
|
|
|
.convert()),
|
2018-07-21 01:18:08 -07:00
|
|
|
}
|
2018-07-09 05:29:01 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
fn handle_message(
|
|
|
|
&mut self,
|
|
|
|
sender_id: &NodeUid,
|
|
|
|
message: Self::Message,
|
2018-07-19 06:09:50 -07:00
|
|
|
) -> Result<Step<Tx, NodeUid>> {
|
2018-07-21 01:18:08 -07:00
|
|
|
let mut step = self
|
|
|
|
.dyn_hb
|
2018-07-25 14:38:33 -07:00
|
|
|
.handle_message(sender_id, message)
|
|
|
|
.map_err(ErrorKind::HandleMessage)?
|
2018-07-21 01:18:08 -07:00
|
|
|
.convert::<Self>();
|
|
|
|
for batch in &step.output {
|
2018-07-09 05:29:01 -07:00
|
|
|
self.queue.remove_all(batch.iter());
|
|
|
|
}
|
2018-07-21 01:18:08 -07:00
|
|
|
step.extend(self.propose()?);
|
|
|
|
Ok(step)
|
2018-07-09 05:29:01 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
fn terminated(&self) -> bool {
|
|
|
|
false
|
|
|
|
}
|
|
|
|
|
|
|
|
fn our_id(&self) -> &NodeUid {
|
|
|
|
self.dyn_hb.our_id()
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl<Tx, NodeUid> QueueingHoneyBadger<Tx, NodeUid>
|
|
|
|
where
|
|
|
|
Tx: Eq + Serialize + for<'r> Deserialize<'r> + Debug + Hash + Clone,
|
2018-07-05 09:20:53 -07:00
|
|
|
NodeUid: Eq + Ord + Clone + Debug + Serialize + for<'r> Deserialize<'r> + Hash + Rand,
|
2018-07-09 05:29:01 -07:00
|
|
|
{
|
|
|
|
/// Returns a new `QueueingHoneyBadgerBuilder` configured to use the node IDs and cryptographic
|
|
|
|
/// keys specified by `netinfo`.
|
2018-07-17 06:54:12 -07:00
|
|
|
pub fn builder(
|
|
|
|
dyn_hb: DynamicHoneyBadger<Vec<Tx>, NodeUid>,
|
|
|
|
) -> QueueingHoneyBadgerBuilder<Tx, NodeUid> {
|
|
|
|
QueueingHoneyBadgerBuilder::new(dyn_hb)
|
2018-07-09 05:29:01 -07:00
|
|
|
}
|
|
|
|
|
2018-07-13 08:30:02 -07:00
|
|
|
/// Returns a reference to the internal `DynamicHoneyBadger` instance.
|
|
|
|
pub fn dyn_hb(&self) -> &DynamicHoneyBadger<Vec<Tx>, NodeUid> {
|
|
|
|
&self.dyn_hb
|
|
|
|
}
|
|
|
|
|
2018-07-09 05:29:01 -07:00
|
|
|
/// Initiates the next epoch by proposing a batch from the queue.
|
2018-07-21 01:18:08 -07:00
|
|
|
fn propose(&mut self) -> Result<Step<Tx, NodeUid>> {
|
2018-07-12 08:53:12 -07:00
|
|
|
let amount = cmp::max(1, self.batch_size / self.dyn_hb.netinfo().num_nodes());
|
2018-07-24 05:46:48 -07:00
|
|
|
// TODO: This will loop indefinitely if we are the only validator.
|
|
|
|
let mut step = Step::default();
|
|
|
|
while !self.dyn_hb.has_input() {
|
2018-07-12 08:53:12 -07:00
|
|
|
let proposal = self.queue.choose(amount, self.batch_size);
|
2018-07-25 14:38:33 -07:00
|
|
|
step.extend(
|
|
|
|
self.dyn_hb
|
|
|
|
.input(Input::User(proposal))
|
|
|
|
.map_err(ErrorKind::Propose)?
|
|
|
|
.convert(),
|
|
|
|
);
|
2018-07-12 08:53:12 -07:00
|
|
|
}
|
2018-07-24 05:46:48 -07:00
|
|
|
Ok(step)
|
2018-07-09 05:29:01 -07:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
pub type Batch<Tx, NodeUid> = DhbBatch<Vec<Tx>, NodeUid>;
|