proper calculation of miner fee in memory pool

This commit is contained in:
Svyatoslav Nikolsky 2019-01-16 12:20:28 +03:00
parent 5c760edf05
commit 34d921fbb0
22 changed files with 239 additions and 158 deletions

1
Cargo.lock generated
View File

@ -1487,6 +1487,7 @@ dependencies = [
"elastic-array 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)",
"lazy_static 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
"lru-cache 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
"network 0.1.0",
"parking_lot 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)",
"primitives 0.1.0",
"serialization 0.1.0",

View File

@ -19,3 +19,6 @@ script = { path = "../script" }
[dev-dependencies]
test-data = { path = "../test-data" }
[features]
test-helpers = []

View File

@ -345,6 +345,7 @@ mod tests {
use storage::SharedStore;
use network::{ConsensusParams, Network};
use memory_pool::MemoryPool;
use fee::NonZeroFeeCalculator;
use self::test_data::{ChainBuilder, TransactionBuilder};
use super::{BlockAssembler, SizePolicy, NextStep, BlockTemplate};
@ -404,8 +405,8 @@ mod tests {
let mut pool = MemoryPool::new();
let storage: SharedStore = Arc::new(BlockChainDatabase::init_test_chain(vec![test_data::genesis().into()]));
pool.insert_verified(chain.at(0).into());
pool.insert_verified(chain.at(1).into());
pool.insert_verified(chain.at(0).into(), &NonZeroFeeCalculator);
pool.insert_verified(chain.at(1).into(), &NonZeroFeeCalculator);
(BlockAssembler {
miner_address: &"t1h8SqgtM3QM5e2M8EzhhT1yL2PXXtA6oqe".into(),

View File

@ -1,13 +1,42 @@
use chain::Transaction;
use ser::Serializable;
use storage::TransactionProvider;
use storage::{TransactionOutputProvider, DuplexTransactionOutputProvider};
use MemoryPool;
pub fn transaction_fee(store: &TransactionProvider, transaction: &Transaction) -> u64 {
let mut inputs_sum = transaction.inputs.iter().map(|input| {
let input_transaction = store.transaction(&input.previous_output.hash)
.expect("transaction must be verified by caller");
input_transaction.outputs[input.previous_output.index as usize].value
}).sum::<u64>();
/// Transaction fee calculator for memory pool
pub trait MemoryPoolFeeCalculator {
/// Compute transaction fee
fn calculate(&self, memory_pool: &MemoryPool, tx: &Transaction) -> u64;
}
/// Fee calculator that computes sum of real transparent fee + real shielded fee.
pub struct FeeCalculator<'a>(pub &'a TransactionOutputProvider);
impl<'a> MemoryPoolFeeCalculator for FeeCalculator<'a> {
fn calculate(&self, memory_pool: &MemoryPool, tx: &Transaction) -> u64 {
let tx_out_provider = DuplexTransactionOutputProvider::new(self.0, memory_pool);
transaction_fee(&tx_out_provider, tx)
}
}
/// Used in tests in this && external crates
#[cfg(any(test, feature = "test-helpers"))]
pub struct NonZeroFeeCalculator;
#[cfg(any(test, feature = "test-helpers"))]
impl MemoryPoolFeeCalculator for NonZeroFeeCalculator {
fn calculate(&self, _: &MemoryPool, tx: &Transaction) -> u64 {
// add 100_000_000 to make sure tx won't be rejected by txpoool because of fee
// + but keep ordering by outputs sum
100_000_000 + tx.outputs.iter().fold(0, |acc, output| acc + output.value)
}
}
pub fn transaction_fee(store: &TransactionOutputProvider, transaction: &Transaction) -> u64 {
let mut inputs_sum = transaction.inputs.iter().map(|input|
store.transaction_output(&input.previous_output, ::std::usize::MAX)
.expect("transaction must be verified by caller")
.value).sum::<u64>();
inputs_sum += transaction.join_split.as_ref().map(|js| js.descriptions.iter()
.map(|jsd| jsd.value_pub_new)
.sum::<u64>()).unwrap_or_default();
@ -20,8 +49,8 @@ pub fn transaction_fee(store: &TransactionProvider, transaction: &Transaction) -
inputs_sum.saturating_sub(outputs_sum)
}
pub fn transaction_fee_rate(store: &TransactionProvider, transaction: &Transaction) -> u64 {
transaction_fee(store, transaction) / transaction.serialized_size() as u64
pub fn transaction_fee_rate(store: &TransactionOutputProvider, tx: &Transaction) -> u64 {
transaction_fee(store, tx) / tx.serialized_size() as u64
}
#[cfg(test)]
@ -54,10 +83,10 @@ mod tests {
let db = Arc::new(BlockChainDatabase::init_test_chain(vec![b0.into(), b1.into()]));
assert_eq!(transaction_fee(db.as_transaction_provider(), &tx0), 0);
assert_eq!(transaction_fee(db.as_transaction_provider(), &tx2), 500_000);
assert_eq!(transaction_fee(db.as_transaction_output_provider(), &tx0), 0);
assert_eq!(transaction_fee(db.as_transaction_output_provider(), &tx2), 500_000);
assert_eq!(transaction_fee_rate(db.as_transaction_provider(), &tx0), 0);
assert_eq!(transaction_fee_rate(db.as_transaction_provider(), &tx2), 4_901);
assert_eq!(transaction_fee_rate(db.as_transaction_output_provider(), &tx0), 0);
assert_eq!(transaction_fee_rate(db.as_transaction_output_provider(), &tx2), 4_901);
}
}

View File

@ -19,4 +19,7 @@ mod memory_pool;
pub use block_assembler::{BlockAssembler, BlockTemplate};
pub use memory_pool::{MemoryPool, HashedOutPoint, Information as MemoryPoolInformation,
OrderingStrategy as MemoryPoolOrderingStrategy, DoubleSpendCheckResult, NonFinalDoubleSpendSet};
pub use fee::{transaction_fee, transaction_fee_rate};
pub use fee::{FeeCalculator, transaction_fee, transaction_fee_rate};
#[cfg(feature = "test-helpers")]
pub use fee::NonZeroFeeCalculator;

View File

@ -17,6 +17,7 @@ use std::collections::VecDeque;
use std::hash::{Hash, Hasher};
use ser::{Serializable, serialize};
use heapsize::HeapSizeOf;
use fee::MemoryPoolFeeCalculator;
/// Transactions ordering strategy
#[cfg_attr(feature="cargo-clippy", allow(enum_variant_names))]
@ -60,13 +61,13 @@ pub struct Entry {
/// Throughout index of this transaction in memory pool (non persistent)
pub storage_index: u64,
/// Transaction fee (stored for efficiency)
pub miner_fee: i64,
pub miner_fee: u64,
/// Virtual transaction fee (a way to prioritize/penalize transaction)
pub miner_virtual_fee: i64,
/// size + Sum(size) for all in-pool descendants
pub package_size: usize,
/// miner_fee + Sum(miner_fee) for all in-pool descendants
pub package_miner_fee: i64,
pub package_miner_fee: u64,
/// miner_virtual_fee + Sum(miner_virtual_fee) for all in-pool descendants
pub package_miner_virtual_fee: i64,
}
@ -123,7 +124,7 @@ struct ByTransactionScoreOrderedEntry {
/// Transaction size
size: usize,
/// Transaction fee
miner_fee: i64,
miner_fee: u64,
/// Virtual transaction fee
miner_virtual_fee: i64,
}
@ -135,7 +136,7 @@ struct ByPackageScoreOrderedEntry {
/// size + Sum(size) for all in-pool descendants
package_size: usize,
/// miner_fee + Sum(miner_fee) for all in-pool descendants
package_miner_fee: i64,
package_miner_fee: u64,
/// miner_virtual_fee + Sum(miner_virtual_fee) for all in-pool descendants
package_miner_virtual_fee: i64,
}
@ -240,8 +241,8 @@ impl PartialOrd for ByTransactionScoreOrderedEntry {
impl Ord for ByTransactionScoreOrderedEntry {
fn cmp(&self, other: &Self) -> Ordering {
// lesser miner score means later removal
let left = (self.miner_fee + self.miner_virtual_fee) * (other.size as i64);
let right = (other.miner_fee + other.miner_virtual_fee) * (self.size as i64);
let left = (self.miner_fee as i64 + self.miner_virtual_fee) * (other.size as i64);
let right = (other.miner_fee as i64 + other.miner_virtual_fee) * (self.size as i64);
let order = right.cmp(&left);
if order != Ordering::Equal {
return order
@ -260,8 +261,8 @@ impl PartialOrd for ByPackageScoreOrderedEntry {
impl Ord for ByPackageScoreOrderedEntry {
fn cmp(&self, other: &Self) -> Ordering {
// lesser miner score means later removal
let left = (self.package_miner_fee + self.package_miner_virtual_fee) * (other.package_size as i64);
let right = (other.package_miner_fee + other.package_miner_virtual_fee) * (self.package_size as i64);
let left = (self.package_miner_fee as i64 + self.package_miner_virtual_fee) * (other.package_size as i64);
let right = (other.package_miner_fee as i64 + other.package_miner_virtual_fee) * (self.package_size as i64);
let order = right.cmp(&left);
if order != Ordering::Equal {
return order
@ -649,14 +650,16 @@ impl MemoryPool {
}
/// Insert verified transaction to the `MemoryPool`
pub fn insert_verified(&mut self, t: IndexedTransaction) {
let entry = self.make_entry(t);
let descendants = self.storage.remove_by_parent_hash(&entry.hash);
self.storage.insert(entry);
if let Some(descendants_iter) = descendants.map(|d| d.into_iter()) {
for descendant in descendants_iter {
let descendant_entry = self.make_entry(descendant);
self.storage.insert(descendant_entry);
pub fn insert_verified<FC: MemoryPoolFeeCalculator>(&mut self, t: IndexedTransaction, fc: &FC) {
if let Some(entry) = self.make_entry(t, fc) {
let descendants = self.storage.remove_by_parent_hash(&entry.hash);
self.storage.insert(entry);
if let Some(descendants_iter) = descendants.map(|d| d.into_iter()) {
for descendant in descendants_iter {
if let Some(descendant_entry) = self.make_entry(descendant, fc) {
self.storage.insert(descendant_entry);
}
}
}
}
}
@ -747,12 +750,18 @@ impl MemoryPool {
self.storage.is_output_spent(prevout)
}
fn make_entry(&mut self, t: IndexedTransaction) -> Entry {
fn make_entry<FC: MemoryPoolFeeCalculator>(&mut self, t: IndexedTransaction, fc: &FC) -> Option<Entry> {
let ancestors = self.get_ancestors(&t.raw);
let size = self.get_transaction_size(&t.raw);
let storage_index = self.get_storage_index();
let miner_fee = self.get_transaction_miner_fee(&t.raw);
Entry {
let miner_fee = fc.calculate(self, &t.raw);
// do not accept any transactions that have negative OR zero fee
if miner_fee == 0 {
return None;
}
Some(Entry {
transaction: t.raw,
hash: t.hash,
ancestors: ancestors,
@ -764,7 +773,7 @@ impl MemoryPool {
package_size: size,
package_miner_fee: miner_fee,
package_miner_virtual_fee: 0,
}
})
}
fn get_ancestors(&self, t: &Transaction) -> HashSet<H256> {
@ -784,12 +793,6 @@ impl MemoryPool {
t.serialized_size()
}
fn get_transaction_miner_fee(&self, t: &Transaction) -> i64 {
let input_value = 0; // TODO: sum all inputs of transaction
let output_value = t.outputs.iter().fold(0, |acc, output| acc + output.value);
(output_value - input_value) as i64
}
#[cfg(not(test))]
fn get_storage_index(&mut self) -> u64 {
self.storage.counter += 1;
@ -868,33 +871,38 @@ impl<'a> Iterator for MemoryPoolIterator<'a> {
}
#[cfg(test)]
mod tests {
pub mod tests {
extern crate test_data;
use chain::{Transaction, OutPoint};
use heapsize::HeapSizeOf;
use fee::NonZeroFeeCalculator;
use super::{MemoryPool, OrderingStrategy, DoubleSpendCheckResult};
use self::test_data::{ChainBuilder, TransactionBuilder};
fn to_memory_pool(chain: &mut ChainBuilder) -> MemoryPool {
let mut pool = MemoryPool::new();
for transaction in chain.transactions.iter().cloned() {
pool.insert_verified(transaction.into());
pool.insert_verified(transaction.into(), &NonZeroFeeCalculator);
}
pool
}
fn default_tx() -> Transaction {
TransactionBuilder::with_output(1).into()
}
#[test]
fn test_memory_pool_heap_size() {
let mut pool = MemoryPool::new();
let size1 = pool.heap_size_of_children();
pool.insert_verified(Transaction::default().into());
pool.insert_verified(default_tx().into(), &NonZeroFeeCalculator);
let size2 = pool.heap_size_of_children();
assert!(size2 > size1);
pool.insert_verified(Transaction::default().into());
pool.insert_verified(default_tx().into(), &NonZeroFeeCalculator);
let size3 = pool.heap_size_of_children();
assert!(size3 > size2);
}
@ -902,11 +910,11 @@ mod tests {
#[test]
fn test_memory_pool_insert_same_transaction() {
let mut pool = MemoryPool::new();
pool.insert_verified(Transaction::default().into());
pool.insert_verified(default_tx().into(), &NonZeroFeeCalculator);
assert_eq!(pool.get_transactions_ids().len(), 1);
// insert the same transaction again
pool.insert_verified(Transaction::default().into());
pool.insert_verified(default_tx().into(), &NonZeroFeeCalculator);
assert_eq!(pool.get_transactions_ids().len(), 1);
}
@ -916,11 +924,11 @@ mod tests {
assert_eq!(pool.read_with_strategy(OrderingStrategy::ByTimestamp), None);
assert_eq!(pool.read_n_with_strategy(100, OrderingStrategy::ByTimestamp), vec![]);
pool.insert_verified(Transaction::default().into());
assert_eq!(pool.read_with_strategy(OrderingStrategy::ByTimestamp), Some(Transaction::default().hash()));
assert_eq!(pool.read_n_with_strategy(100, OrderingStrategy::ByTimestamp), vec![Transaction::default().hash()]);
assert_eq!(pool.read_with_strategy(OrderingStrategy::ByTimestamp), Some(Transaction::default().hash()));
assert_eq!(pool.read_n_with_strategy(100, OrderingStrategy::ByTimestamp), vec![Transaction::default().hash()]);
pool.insert_verified(default_tx().into(), &NonZeroFeeCalculator);
assert_eq!(pool.read_with_strategy(OrderingStrategy::ByTimestamp), Some(default_tx().hash()));
assert_eq!(pool.read_n_with_strategy(100, OrderingStrategy::ByTimestamp), vec![default_tx().hash()]);
assert_eq!(pool.read_with_strategy(OrderingStrategy::ByTimestamp), Some(default_tx().hash()));
assert_eq!(pool.read_n_with_strategy(100, OrderingStrategy::ByTimestamp), vec![default_tx().hash()]);
}
#[test]
@ -929,15 +937,15 @@ mod tests {
assert_eq!(pool.remove_with_strategy(OrderingStrategy::ByTimestamp), None);
assert_eq!(pool.remove_n_with_strategy(100, OrderingStrategy::ByTimestamp), vec![]);
pool.insert_verified(Transaction::default().into());
pool.insert_verified(default_tx().into(), &NonZeroFeeCalculator);
let removed = pool.remove_with_strategy(OrderingStrategy::ByTimestamp);
assert!(removed.is_some());
assert_eq!(removed.unwrap(), Transaction::default().into());
assert_eq!(removed.unwrap(), default_tx().into());
pool.insert_verified(Transaction::default().into());
pool.insert_verified(default_tx().into(), &NonZeroFeeCalculator);
let removed = pool.remove_n_with_strategy(100, OrderingStrategy::ByTimestamp);
assert_eq!(removed.len(), 1);
assert_eq!(removed[0], Transaction::default().into());
assert_eq!(removed[0], default_tx().into());
assert_eq!(pool.remove_with_strategy(OrderingStrategy::ByTimestamp), None);
assert_eq!(pool.remove_n_with_strategy(100, OrderingStrategy::ByTimestamp), vec![]);
@ -947,13 +955,13 @@ mod tests {
fn test_memory_pool_remove_by_hash() {
let mut pool = MemoryPool::new();
pool.insert_verified(Transaction::default().into());
pool.insert_verified(default_tx().into(), &NonZeroFeeCalculator);
assert_eq!(pool.get_transactions_ids().len(), 1);
// remove and check remaining transactions
let removed = pool.remove_by_hash(&Transaction::default().hash());
let removed = pool.remove_by_hash(&default_tx().hash());
assert!(removed.is_some());
assert_eq!(removed.unwrap(), Transaction::default());
assert_eq!(removed.unwrap(), default_tx());
assert_eq!(pool.get_transactions_ids().len(), 0);
// remove non-existant transaction
@ -970,9 +978,9 @@ mod tests {
// insert child, then parent
let mut pool = MemoryPool::new();
pool.insert_verified(chain.at(2).into()); // timestamp 0
pool.insert_verified(chain.at(1).into()); // timestamp 1
pool.insert_verified(chain.at(0).into()); // timestamp 2
pool.insert_verified(chain.at(2).into(), &NonZeroFeeCalculator); // timestamp 0
pool.insert_verified(chain.at(1).into(), &NonZeroFeeCalculator); // timestamp 1
pool.insert_verified(chain.at(0).into(), &NonZeroFeeCalculator); // timestamp 2
// check that parent transaction was removed before child trnasaction
let transactions = pool.remove_n_with_strategy(3, OrderingStrategy::ByTimestamp);
@ -1015,7 +1023,7 @@ mod tests {
assert_eq!(pool.get_transactions_ids().len(), 2);
// insert child transaction back to the pool & assert transactions are removed in correct order
pool.insert_verified(chain.at(1).into());
pool.insert_verified(chain.at(1).into(), &NonZeroFeeCalculator);
let transactions = pool.remove_n_with_strategy(3, OrderingStrategy::ByTransactionScore);
assert_eq!(transactions.len(), 3);
assert_eq!(transactions[0], chain.at(0).into());
@ -1034,7 +1042,7 @@ mod tests {
let mut transactions_size = 0;
for transaction_index in 0..4 {
pool.insert_verified(chain.at(transaction_index).into());
pool.insert_verified(chain.at(transaction_index).into(), &NonZeroFeeCalculator);
transactions_size += chain.size(transaction_index);
let info = pool.information();
@ -1113,7 +1121,7 @@ mod tests {
.into_input(0).set_output(50).store(chain) // transaction0 -> transaction1
.set_default_input(1).set_output(35).store(chain) // transaction2
.into_input(0).set_output(10).store(chain) // transaction2 -> transaction3
.into_input(0).set_output(100).store(chain); // transaction2 -> transaction3 -> transaction4
.into_input(0).set_output(100).store(chain); // transaction2 -> transaction3 -> transaction4
let mut pool = MemoryPool::new();
@ -1122,8 +1130,8 @@ mod tests {
// <
// score({ transaction2 }) = 35/60
let expected = vec![chain.hash(2), chain.hash(0)];
pool.insert_verified(chain.at(0).into());
pool.insert_verified(chain.at(2).into());
pool.insert_verified(chain.at(0).into(), &NonZeroFeeCalculator);
pool.insert_verified(chain.at(2).into(), &NonZeroFeeCalculator);
assert_eq!(pool.read_n_with_strategy(2, OrderingStrategy::ByPackageScore), expected);
// { transaction0, transaction1 } now have bigger score than { transaction2 }:
@ -1132,7 +1140,7 @@ mod tests {
// score({ transaction2 }) = 35/60 ~ 0.583
// => chain1 is boosted
// => so transaction with lesser individual score (but with bigger package score) is mined first
pool.insert_verified(chain.at(1).into());
pool.insert_verified(chain.at(1).into(), &NonZeroFeeCalculator);
let expected = vec![chain.hash(0), chain.hash(1), chain.hash(2)];
assert_eq!(pool.read_n_with_strategy(3, OrderingStrategy::ByPackageScore), expected);
@ -1141,7 +1149,7 @@ mod tests {
// >
// score({ transaction2, transaction3 }) = (35 + 10) / 120 ~ 0.375
// => chain2 is not boosted
pool.insert_verified(chain.at(3).into());
pool.insert_verified(chain.at(3).into(), &NonZeroFeeCalculator);
let expected = vec![chain.hash(0), chain.hash(1), chain.hash(2), chain.hash(3)];
assert_eq!(pool.read_n_with_strategy(4, OrderingStrategy::ByPackageScore), expected);
@ -1150,7 +1158,7 @@ mod tests {
// <
// score({ transaction2, transaction3, transaction4 }) = (35 + 10 + 100) / 180 ~ 0.806
// => chain2 is boosted
pool.insert_verified(chain.at(4).into());
pool.insert_verified(chain.at(4).into(), &NonZeroFeeCalculator);
let expected = vec![chain.hash(2), chain.hash(3), chain.hash(4), chain.hash(0), chain.hash(1)];
assert_eq!(pool.read_n_with_strategy(5, OrderingStrategy::ByPackageScore), expected);
@ -1171,18 +1179,18 @@ mod tests {
let mut pool = MemoryPool::new();
// chain1_parent is not linked to the chain1_grandchild
// transaction0 is not linked to the transaction2
// => they are in separate chains now
// => chain2 has greater score than both of these chains
pool.insert_verified(chain.at(3).into());
pool.insert_verified(chain.at(0).into());
pool.insert_verified(chain.at(2).into());
// => transaction3 has greater score than both of these chains
pool.insert_verified(chain.at(3).into(), &NonZeroFeeCalculator);
pool.insert_verified(chain.at(0).into(), &NonZeroFeeCalculator);
pool.insert_verified(chain.at(2).into(), &NonZeroFeeCalculator);
let expected = vec![chain.hash(3), chain.hash(0), chain.hash(2)];
assert_eq!(pool.read_n_with_strategy(3, OrderingStrategy::ByPackageScore), expected);
// insert the missing transaction to link together chain1
// => it now will have better score than chain2
pool.insert_verified(chain.at(1).into());
pool.insert_verified(chain.at(1).into(), &NonZeroFeeCalculator);
let expected = vec![chain.hash(0), chain.hash(1), chain.hash(3), chain.hash(2)];
assert_eq!(pool.read_n_with_strategy(4, OrderingStrategy::ByPackageScore), expected);
}
@ -1206,9 +1214,9 @@ mod tests {
// insert level1 + level2. There are two chains:
// score({ transaction3, transaction5 }) = 40 + 60
// score({ transaction4, transaction5 }) = 50 + 60
pool.insert_verified(chain.at(5).into());
pool.insert_verified(chain.at(3).into());
pool.insert_verified(chain.at(4).into());
pool.insert_verified(chain.at(5).into(), &NonZeroFeeCalculator);
pool.insert_verified(chain.at(3).into(), &NonZeroFeeCalculator);
pool.insert_verified(chain.at(4).into(), &NonZeroFeeCalculator);
let expected = vec![chain.hash(4), chain.hash(3), chain.hash(5)];
assert_eq!(pool.read_n_with_strategy(3, OrderingStrategy::ByTransactionScore), expected);
assert_eq!(pool.read_n_with_strategy(3, OrderingStrategy::ByPackageScore), expected);
@ -1217,7 +1225,7 @@ mod tests {
// score({ transaction3, transaction5 }) = 40 + 60
// score({ transaction4, transaction5 }) = 50 + 60
// score({ transaction2, transaction5 }) = 30 + 60
pool.insert_verified(chain.at(2).into());
pool.insert_verified(chain.at(2).into(), &NonZeroFeeCalculator);
let expected = vec![chain.hash(4), chain.hash(3), chain.hash(2), chain.hash(5)];
assert_eq!(pool.read_n_with_strategy(4, OrderingStrategy::ByTransactionScore), expected);
assert_eq!(pool.read_n_with_strategy(4, OrderingStrategy::ByPackageScore), expected);
@ -1227,7 +1235,7 @@ mod tests {
// score({ transaction1, transaction4, transaction5 }) = 20 + 50 + 60 / 3 ~ 0.333
// score({ transaction2, transaction5 }) = 30 + 60 / 2 = 0.45
// but second chain will be removed first anyway because previous #1 ({ transaction4, transaction5}) now depends on level 01
pool.insert_verified(chain.at(1).into());
pool.insert_verified(chain.at(1).into(), &NonZeroFeeCalculator);
let expected = vec![chain.hash(3), chain.hash(2), chain.hash(1), chain.hash(4), chain.hash(5)];
assert_eq!(pool.read_n_with_strategy(5, OrderingStrategy::ByTransactionScore), expected);
assert_eq!(pool.read_n_with_strategy(5, OrderingStrategy::ByPackageScore), expected);
@ -1237,7 +1245,7 @@ mod tests {
// score({ transaction0, transaction4, transaction5 }) = (10 + 50 + 60) / (60 + 60 + 142) ~ 0.458
// score({ transaction1, transaction3, transaction5 }) = (20 + 50 + 60) / (60 + 60 + 142) ~ 0.496
// score({ transaction2, transaction5 }) = (30 + 60) / (60 + 142) ~ 0.445
pool.insert_verified(chain.at(0).into());
pool.insert_verified(chain.at(0).into(), &NonZeroFeeCalculator);
let expected = vec![chain.hash(2), chain.hash(1), chain.hash(0), chain.hash(4), chain.hash(3), chain.hash(5)];
assert_eq!(pool.read_n_with_strategy(6, OrderingStrategy::ByTransactionScore), expected);
assert_eq!(pool.read_n_with_strategy(6, OrderingStrategy::ByPackageScore), expected);
@ -1258,17 +1266,17 @@ mod tests {
assert!(!pool.is_spent(&OutPoint { hash: chain.hash(1), index: 0, }));
assert!(!pool.is_spent(&OutPoint { hash: chain.hash(2), index: 0, }));
pool.insert_verified(chain.at(0).into());
pool.insert_verified(chain.at(0).into(), &NonZeroFeeCalculator);
assert!(!pool.is_spent(&OutPoint { hash: chain.hash(0), index: 0, }));
assert!(!pool.is_spent(&OutPoint { hash: chain.hash(1), index: 0, }));
assert!(!pool.is_spent(&OutPoint { hash: chain.hash(2), index: 0, }));
pool.insert_verified(chain.at(1).into());
pool.insert_verified(chain.at(1).into(), &NonZeroFeeCalculator);
assert!(!pool.is_spent(&OutPoint { hash: chain.hash(0), index: 0, }));
assert!(!pool.is_spent(&OutPoint { hash: chain.hash(1), index: 0, }));
assert!(!pool.is_spent(&OutPoint { hash: chain.hash(2), index: 0, }));
pool.insert_verified(chain.at(2).into());
pool.insert_verified(chain.at(2).into(), &NonZeroFeeCalculator);
assert!(pool.is_spent(&OutPoint { hash: chain.hash(0), index: 0, }));
assert!(!pool.is_spent(&OutPoint { hash: chain.hash(1), index: 0, }));
assert!(!pool.is_spent(&OutPoint { hash: chain.hash(2), index: 0, }));
@ -1291,10 +1299,10 @@ mod tests {
.reset().add_output(40).store(chain); // transaction3
let mut pool = MemoryPool::new();
pool.insert_verified(chain.at(0).into());
pool.insert_verified(chain.at(1).into());
pool.insert_verified(chain.at(2).into());
pool.insert_verified(chain.at(3).into());
pool.insert_verified(chain.at(0).into(), &NonZeroFeeCalculator);
pool.insert_verified(chain.at(1).into(), &NonZeroFeeCalculator);
pool.insert_verified(chain.at(2).into(), &NonZeroFeeCalculator);
pool.insert_verified(chain.at(3).into(), &NonZeroFeeCalculator);
assert_eq!(pool.information().transactions_count, 4);
assert_eq!(pool.remove_by_prevout(&OutPoint { hash: chain.hash(0), index: 0 }), Some(vec![chain.at(1).into(), chain.at(2).into()]));
@ -1314,9 +1322,9 @@ mod tests {
.reset().set_input(&chain.at(0), 2).add_output(70).store(chain); // no double spend: t0[2] -> t6
let mut pool = MemoryPool::new();
pool.insert_verified(chain.at(1).into());
pool.insert_verified(chain.at(2).into());
pool.insert_verified(chain.at(4).into());
pool.insert_verified(chain.at(1).into(), &NonZeroFeeCalculator);
pool.insert_verified(chain.at(2).into(), &NonZeroFeeCalculator);
pool.insert_verified(chain.at(4).into(), &NonZeroFeeCalculator);
// when output is spent by nonfinal transaction
match pool.check_double_spend(&chain.at(3)) {
DoubleSpendCheckResult::NonFinalDoubleSpend(set) => {
@ -1359,7 +1367,7 @@ mod tests {
.reset().set_input(&chain.at(0), 0).add_output(40).store(chain); // good replacement: t0[0] -> t2
let mut pool = MemoryPool::new();
pool.insert_verified(chain.at(1).into());
pool.insert_verified(chain.at(1).into(), &NonZeroFeeCalculator);
// when output is spent by nonfinal transaction
match pool.check_double_spend(&chain.at(2)) {
@ -1386,13 +1394,13 @@ mod tests {
}
#[test]
fn test_memory_poolis_spent() {
let tx1: Transaction = TransactionBuilder::with_default_input(0).into();
let tx2: Transaction = TransactionBuilder::with_default_input(1).into();
fn test_memory_pool_is_spent() {
let tx1: Transaction = TransactionBuilder::with_default_input(0).set_output(1).into();
let tx2: Transaction = TransactionBuilder::with_default_input(1).set_output(1).into();
let out1 = tx1.inputs[0].previous_output.clone();
let out2 = tx2.inputs[0].previous_output.clone();
let mut memory_pool = MemoryPool::new();
memory_pool.insert_verified(tx1.into());
memory_pool.insert_verified(tx1.into(), &NonZeroFeeCalculator);
assert!(memory_pool.is_spent(&out1));
assert!(!memory_pool.is_spent(&out2));
}

View File

@ -13,4 +13,5 @@ serialization = { path = "../serialization" }
chain = { path = "../chain" }
display_derive = "0.0.0"
bitcrypto = { path = "../crypto" }
lazy_static = "*"
lazy_static = "*"
network = { path = "../network" }

View File

@ -2,7 +2,7 @@
//! require sophisticated (in more than one source) previous transaction lookups
use chain::{OutPoint, TransactionOutput};
use storage::TransactionOutputProvider;
use TransactionOutputProvider;
#[derive(Clone, Copy)]
pub struct DuplexTransactionOutputProvider<'a> {

View File

@ -10,6 +10,7 @@ extern crate serialization as ser;
extern crate chain;
extern crate bitcrypto as crypto;
#[macro_use] extern crate lazy_static;
extern crate network;
mod best_block;
mod block_ancestors;
@ -19,6 +20,7 @@ mod block_iterator;
mod block_origin;
mod block_provider;
mod block_ref;
mod duplex_store;
mod error;
mod store;
mod transaction_meta;
@ -35,6 +37,7 @@ pub use block_iterator::BlockIterator;
pub use block_origin::{BlockOrigin, SideChainOrigin};
pub use block_provider::{BlockHeaderProvider, BlockProvider, IndexedBlockProvider};
pub use block_ref::BlockRef;
pub use duplex_store::{DuplexTransactionOutputProvider, NoopStore};
pub use error::Error;
pub use store::{AsSubstore, Store, SharedStore, CanonStore, ConfigStore};
pub use transaction_meta::TransactionMeta;

View File

@ -30,6 +30,4 @@ network = { path = "../network" }
[dev-dependencies]
test-data = { path = "../test-data" }
[features]
dev = []
miner = { path = "../miner", features = ["test-helpers"] }

View File

@ -1,9 +1,9 @@
use std::collections::{VecDeque, HashSet};
use std::fmt;
use linked_hash_map::LinkedHashMap;
use chain::{BlockHeader, Transaction, IndexedBlockHeader, IndexedBlock, IndexedTransaction};
use chain::{BlockHeader, Transaction, IndexedBlockHeader, IndexedBlock, IndexedTransaction, OutPoint, TransactionOutput};
use storage;
use miner::{MemoryPoolOrderingStrategy, MemoryPoolInformation};
use miner::{MemoryPoolOrderingStrategy, MemoryPoolInformation, FeeCalculator};
use primitives::bytes::Bytes;
use primitives::hash::H256;
use utils::{BestHeadersChain, BestHeadersChainInformation, HashQueueChain, HashPosition};
@ -604,7 +604,7 @@ impl Chain {
memory_pool.remove_by_prevout(&input.previous_output);
}
// now insert transaction itself
memory_pool.insert_verified(transaction);
memory_pool.insert_verified(transaction, &FeeCalculator(self.storage.as_transaction_output_provider()));
}
/// Calculate block locator hashes for hash queue
@ -665,6 +665,18 @@ impl storage::TransactionProvider for Chain {
}
}
impl storage::TransactionOutputProvider for Chain {
fn transaction_output(&self, outpoint: &OutPoint, transaction_index: usize) -> Option<TransactionOutput> {
self.memory_pool.read().transaction_output(outpoint, transaction_index)
.or_else(|| self.storage.transaction_output(outpoint, transaction_index))
}
fn is_spent(&self, outpoint: &OutPoint) -> bool {
self.memory_pool.read().is_spent(outpoint)
|| self.storage.is_spent(outpoint)
}
}
impl storage::BlockHeaderProvider for Chain {
fn block_header_bytes(&self, block_ref: storage::BlockRef) -> Option<Bytes> {
use ser::serialize;
@ -884,19 +896,19 @@ mod tests {
#[test]
fn chain_transaction_state() {
let db = Arc::new(BlockChainDatabase::init_test_chain(vec![test_data::genesis().into()]));
let db = Arc::new(BlockChainDatabase::init_test_chain(vec![test_data::genesis().into(), test_data::block_h1().into()]));
let mut chain = Chain::new(db, Arc::new(RwLock::new(MemoryPool::new())));
let genesis_block = test_data::genesis();
let block1 = test_data::block_h1();
let block2 = test_data::block_h2();
let tx1: Transaction = test_data::TransactionBuilder::with_version(1).into();
let tx2: Transaction = test_data::TransactionBuilder::with_version(2).into();
let tx2: Transaction = test_data::TransactionBuilder::with_input(&test_data::block_h1().transactions[0], 0).into();
let tx1_hash = tx1.hash();
let tx2_hash = tx2.hash();
chain.verify_transaction(tx1.into());
chain.insert_verified_transaction(tx2.into());
assert_eq!(chain.transaction_state(&genesis_block.transactions[0].hash()), TransactionState::Stored);
assert_eq!(chain.transaction_state(&block1.transactions[0].hash()), TransactionState::Unknown);
assert_eq!(chain.transaction_state(&block2.transactions[0].hash()), TransactionState::Unknown);
assert_eq!(chain.transaction_state(&tx1_hash), TransactionState::Verifying);
assert_eq!(chain.transaction_state(&tx2_hash), TransactionState::InMemory);
}
@ -961,13 +973,18 @@ mod tests {
#[test]
fn chain_transactions_hashes_with_state() {
let input_tx1 = test_data::block_h1().transactions[0].clone();
let input_tx2 = test_data::block_h2().transactions[0].clone();
let test_chain = &mut test_data::ChainBuilder::new();
test_data::TransactionBuilder::with_output(100).store(test_chain) // t1
.into_input(0).add_output(200).store(test_chain) // t1 -> t2
test_data::TransactionBuilder::with_input(&input_tx1, 0)
.add_output(1_000).store(test_chain) // t1
.into_input(0).add_output(400).store(test_chain) // t1 -> t2
.into_input(0).add_output(300).store(test_chain) // t1 -> t2 -> t3
.set_default_input(0).set_output(400).store(test_chain); // t4
.set_input(&input_tx2, 0).set_output(400).store(test_chain); // t4
let db = Arc::new(BlockChainDatabase::init_test_chain(vec![test_data::genesis().into()]));
let db = Arc::new(BlockChainDatabase::init_test_chain(vec![
test_data::genesis().into(), test_data::block_h1().into(), test_data::block_h2().into(),
]));
let mut chain = Chain::new(db, Arc::new(RwLock::new(MemoryPool::new())));
chain.insert_verified_transaction(test_chain.at(0).into());
chain.insert_verified_transaction(test_chain.at(1).into());
@ -983,14 +1000,18 @@ mod tests {
#[test]
fn memory_pool_transactions_are_reverified_after_reorganization() {
let b0 = test_data::block_builder().header().build().build();
let b0 = test_data::block_builder()
.header().build()
.transaction().coinbase().output().value(100_000).build().build()
.build();
let b1 = test_data::block_builder().header().nonce(1.into()).parent(b0.hash()).build().build();
let b2 = test_data::block_builder().header().nonce(2.into()).parent(b0.hash()).build().build();
let b3 = test_data::block_builder().header().parent(b2.hash()).build().build();
let tx1: Transaction = test_data::TransactionBuilder::with_version(1).into();
let input_tx = b0.transactions[0].clone();
let tx1: Transaction = test_data::TransactionBuilder::with_version(1).set_input(&input_tx, 0).into();
let tx1_hash = tx1.hash();
let tx2: Transaction = test_data::TransactionBuilder::with_version(2).into();
let tx2: Transaction = test_data::TransactionBuilder::with_input(&input_tx, 0).into();
let tx2_hash = tx2.hash();
let db = Arc::new(BlockChainDatabase::init_test_chain(vec![b0.into()]));
@ -1015,7 +1036,8 @@ mod tests {
#[test]
fn fork_chain_block_transaction_is_removed_from_on_block_insert() {
let genesis = test_data::genesis();
let genesis = test_data::block_h1();
let input_tx = genesis.transactions[0].clone();
let b0 = test_data::block_builder().header().parent(genesis.hash()).build().build(); // genesis -> b0
let b1 = test_data::block_builder().header().nonce(1.into()).parent(b0.hash()).build()
.transaction().output().value(10).build().build()
@ -1024,13 +1046,16 @@ mod tests {
.transaction().output().value(20).build().build()
.build(); // genesis -> b0 -> b1[tx1] -> b2[tx2]
let b3 = test_data::block_builder().header().nonce(2.into()).parent(b0.hash()).build()
.transaction().output().value(30).build().build()
.transaction().input().hash(input_tx.hash()).index(0).build()
.output().value(50).build().build()
.build(); // genesis -> b0 -> b3[tx3]
let b4 = test_data::block_builder().header().parent(b3.hash()).build()
.transaction().output().value(40).build().build()
.transaction().input().hash(b3.transactions[0].hash()).index(0).build()
.output().value(40).build().build()
.build(); // genesis -> b0 -> b3[tx3] -> b4[tx4]
let b5 = test_data::block_builder().header().parent(b4.hash()).build()
.transaction().output().value(50).build().build()
.transaction().input().hash(b4.transactions[0].hash()).index(0).build()
.output().value(30).build().build()
.build(); // genesis -> b0 -> b3[tx3] -> b4[tx4] -> b5[tx5]
let tx1 = b1.transactions[0].clone();
@ -1041,7 +1066,7 @@ mod tests {
let tx4 = b4.transactions[0].clone();
let tx5 = b5.transactions[0].clone();
let db = Arc::new(BlockChainDatabase::init_test_chain(vec![genesis.into()]));
let db = Arc::new(BlockChainDatabase::init_test_chain(vec![test_data::genesis().into(), genesis.into()]));
let mut chain = Chain::new(db, Arc::new(RwLock::new(MemoryPool::new())));
chain.insert_verified_transaction(tx3.into());
@ -1080,35 +1105,35 @@ mod tests {
.input().hash(tx0.hash()).index(0).build()
.build()
.build(); // genesis -> b0[tx1]
// tx1 && tx2 are spending same output
// tx from b0 && tx2 are spending same output
let tx2: Transaction = test_data::TransactionBuilder::with_output(20).add_input(&tx0, 0).into();
let tx3: Transaction = test_data::TransactionBuilder::with_output(20).add_input(&tx0, 1).into();
// insert tx2 to memory pool
let db = Arc::new(BlockChainDatabase::init_test_chain(vec![test_data::genesis().into()]));
let mut chain = Chain::new(db, Arc::new(RwLock::new(MemoryPool::new())));
chain.insert_verified_transaction(tx2.clone().into());
chain.insert_verified_transaction(tx3.clone().into());
// insert verified block with tx1
chain.insert_best_block(b0.into()).expect("no error");
// => tx2 is removed from memory pool, but tx3 remains
assert_eq!(chain.information().transactions.transactions_count, 1);
assert_eq!(chain.information().transactions.transactions_count, 0);
}
#[test]
fn update_memory_pool_transaction() {
use self::test_data::{ChainBuilder, TransactionBuilder};
let input_tx = test_data::block_h1().transactions[0].clone();
let data_chain = &mut ChainBuilder::new();
TransactionBuilder::with_output(10).add_output(10).add_output(10).store(data_chain) // transaction0
TransactionBuilder::with_input(&input_tx, 0).set_output(100).store(data_chain) // transaction0
.reset().set_input(&data_chain.at(0), 0).add_output(20).lock().store(data_chain) // transaction0 -> transaction1
.reset().set_input(&data_chain.at(0), 0).add_output(30).store(data_chain); // transaction0 -> transaction2
let db = Arc::new(BlockChainDatabase::init_test_chain(vec![test_data::genesis().into()]));
let db = Arc::new(BlockChainDatabase::init_test_chain(vec![test_data::genesis().into(), test_data::block_h1().into()]));
let mut chain = Chain::new(db, Arc::new(RwLock::new(MemoryPool::new())));
chain.insert_verified_transaction(data_chain.at(0).into());
chain.insert_verified_transaction(data_chain.at(1).into());
assert_eq!(chain.information().transactions.transactions_count, 1);
assert_eq!(chain.information().transactions.transactions_count, 2);
chain.insert_verified_transaction(data_chain.at(2).into());
assert_eq!(chain.information().transactions.transactions_count, 1); // tx was replaces
assert_eq!(chain.information().transactions.transactions_count, 2); // tx was replaced
}
}

View File

@ -1278,6 +1278,10 @@ pub mod tests {
}
}
fn storage_with_block1() -> StorageRef {
Arc::new(BlockChainDatabase::init_test_chain(vec![test_data::genesis().into(), test_data::block_h1().into()]))
}
fn create_sync(storage: Option<StorageRef>, verifier: Option<DummyVerifier>) -> (Arc<DummyTaskExecutor>, ClientCoreRef<SynchronizationClientCore<DummyTaskExecutor>>, Arc<SynchronizationClient<DummyTaskExecutor, DummyVerifier>>) {
let sync_peers = Arc::new(PeersImpl::default());
let storage = match storage {
@ -1918,17 +1922,19 @@ pub mod tests {
#[test]
fn transaction_is_accepted_when_not_synchronizing() {
let (_, core, sync) = create_sync(None, None);
let (_, core, sync) = create_sync(Some(storage_with_block1()), None);
let input_tx = test_data::block_h1().transactions[0].clone();
sync.on_transaction(1, test_data::TransactionBuilder::with_version(1).into());
let tx1: Transaction = test_data::TransactionBuilder::with_input(&input_tx, 0).set_output(100).into();
sync.on_transaction(1, tx1.clone().into());
assert_eq!(core.lock().information().chain.transactions.transactions_count, 1);
let b1 = test_data::block_h1();
sync.on_headers(1, types::Headers::with_headers(vec![b1.block_header.clone()]));
let b2 = test_data::block_h2();
sync.on_headers(1, types::Headers::with_headers(vec![b2.block_header.clone()]));
assert!(core.lock().information().state.is_nearly_saturated());
sync.on_transaction(1, test_data::TransactionBuilder::with_version(2).into());
sync.on_transaction(1, test_data::TransactionBuilder::with_input(&tx1, 0).into());
assert_eq!(core.lock().information().chain.transactions.transactions_count, 2);
}
@ -1943,11 +1949,12 @@ pub mod tests {
#[test]
fn orphaned_transaction_is_verified_when_input_is_received() {
let input_tx = test_data::block_h1().transactions[0].clone();
let chain = &mut test_data::ChainBuilder::new();
test_data::TransactionBuilder::with_output(10).store(chain) // t0
.set_input(&chain.at(0), 0).set_output(20).store(chain); // t0 -> t1
test_data::TransactionBuilder::with_input(&input_tx, 0).set_output(100).store(chain) // t0
.set_input(&chain.at(0), 0).set_output(20).store(chain); // t0 -> t1
let (_, core, sync) = create_sync(None, None);
let (_, core, sync) = create_sync(Some(storage_with_block1()), None);
sync.on_transaction(1, chain.at(1).into());
assert_eq!(core.lock().information().chain.transactions.transactions_count, 0);

View File

@ -400,7 +400,7 @@ pub mod tests {
use primitives::hash::H256;
use chain::Transaction;
use inbound_connection::tests::DummyOutboundSyncConnection;
use miner::MemoryPool;
use miner::{NonZeroFeeCalculator, MemoryPool};
use local_node::tests::{default_filterload, make_filteradd};
use synchronization_executor::Task;
use synchronization_executor::tests::DummyTaskExecutor;
@ -561,7 +561,7 @@ pub mod tests {
// when memory pool is non-empty
let transaction = Transaction::default();
let transaction_hash = transaction.hash();
memory_pool.write().insert_verified(transaction.into());
memory_pool.write().insert_verified(transaction.into(), &NonZeroFeeCalculator);
// when asking for memory pool transactions ids
server.execute(ServerTask::Mempool(0));
// => respond with inventory
@ -600,7 +600,7 @@ pub mod tests {
let tx_verified_hash = tx_verified.hash();
// given in-memory transaction
{
memory_pool.write().insert_verified(tx_verified.clone().into());
memory_pool.write().insert_verified(tx_verified.clone().into(), &NonZeroFeeCalculator);
}
// when asking for known in-memory transaction
let inventory = vec![

View File

@ -96,9 +96,9 @@ mod tests {
use std::sync::Arc;
use parking_lot::RwLock;
use chain::OutPoint;
use storage::{TransactionOutputProvider};
use storage::TransactionOutputProvider;
use db::BlockChainDatabase;
use miner::MemoryPool;
use miner::{MemoryPool, NonZeroFeeCalculator};
use super::MemoryPoolTransactionOutputProvider;
#[test]
@ -113,9 +113,9 @@ mod tests {
let storage = Arc::new(BlockChainDatabase::init_test_chain(vec![test_data::genesis().into()]));
let memory_pool = Arc::new(RwLock::new(MemoryPool::new()));
{
memory_pool.write().insert_verified(dchain.at(0).into());
memory_pool.write().insert_verified(dchain.at(1).into());
memory_pool.write().insert_verified(dchain.at(2).into());
memory_pool.write().insert_verified(dchain.at(0).into(), &NonZeroFeeCalculator);
memory_pool.write().insert_verified(dchain.at(1).into(), &NonZeroFeeCalculator);
memory_pool.write().insert_verified(dchain.at(2).into(), &NonZeroFeeCalculator);
}
// when inserting t3:

View File

@ -87,6 +87,11 @@ impl TransactionBuilder {
builder.set_join_split(join_split)
}
pub fn with_input(transaction: &Transaction, output_index: u32) -> TransactionBuilder {
let builder = TransactionBuilder::default();
builder.add_input(transaction, output_index)
}
pub fn reset(self) -> TransactionBuilder {
TransactionBuilder::default()
}

View File

@ -1,9 +1,8 @@
use keys::Address;
use network::{ConsensusParams};
use storage::{TransactionOutputProvider, BlockHeaderProvider};
use storage::{DuplexTransactionOutputProvider, TransactionOutputProvider, BlockHeaderProvider};
use script::{self, Builder};
use sigops::{transaction_sigops};
use duplex_store::DuplexTransactionOutputProvider;
use sigops::transaction_sigops;
use deployments::BlockDeployments;
use canon::CanonBlock;
use error::{Error, TransactionError};

View File

@ -1,5 +1,5 @@
use rayon::prelude::{IntoParallelRefIterator, IndexedParallelIterator, ParallelIterator};
use storage::Store;
use storage::{DuplexTransactionOutputProvider, Store};
use network::ConsensusParams;
use error::Error;
use canon::CanonBlock;
@ -7,7 +7,6 @@ use accept_block::BlockAcceptor;
use accept_header::HeaderAcceptor;
use accept_transaction::TransactionAcceptor;
use deployments::BlockDeployments;
use duplex_store::DuplexTransactionOutputProvider;
use VerificationLevel;
pub struct ChainAcceptor<'a> {

View File

@ -1,9 +1,9 @@
use ser::Serializable;
use crypto::Groth16VerifyingKey;
use storage::{TransactionMetaProvider, TransactionOutputProvider, Nullifier, EpochTag, NullifierTracker};
use storage::{Nullifier, NullifierTracker, EpochTag, TransactionMetaProvider, TransactionOutputProvider,
DuplexTransactionOutputProvider};
use network::{ConsensusParams};
use script::{Script, verify_script, VerificationFlags, TransactionSignatureChecker, TransactionInputSigner, SighashBase};
use duplex_store::DuplexTransactionOutputProvider;
use deployments::BlockDeployments;
use sapling::accept_sapling;
use sigops::transaction_sigops;

View File

@ -2,11 +2,11 @@
use hash::H256;
use chain::{IndexedBlock, IndexedBlockHeader, BlockHeader, Transaction};
use storage::{SharedStore, TransactionOutputProvider, BlockHeaderProvider, BlockOrigin};
use storage::{SharedStore, TransactionOutputProvider, BlockHeaderProvider, BlockOrigin,
DuplexTransactionOutputProvider, NoopStore};
use network::ConsensusParams;
use error::{Error, TransactionError};
use canon::{CanonBlock, CanonTransaction};
use duplex_store::{DuplexTransactionOutputProvider, NoopStore};
use verify_chain::ChainVerifier;
use verify_header::HeaderVerifier;
use verify_transaction::MemoryPoolTransactionVerifier;

View File

@ -81,7 +81,6 @@ extern crate assert_matches;
pub mod constants;
mod canon;
mod deployments;
mod duplex_store;
mod equihash;
mod error;
mod sapling;

View File

@ -2,7 +2,7 @@ use std::collections::HashSet;
use chain::IndexedBlock;
use network::ConsensusParams;
use sigops::transaction_sigops;
use duplex_store::NoopStore;
use storage::NoopStore;
use error::{Error, TransactionError};
pub struct BlockVerifier<'a> {

View File

@ -3,7 +3,7 @@ use ser::Serializable;
use chain::{IndexedTransaction, BTC_TX_VERSION, OVERWINTER_TX_VERSION,
OVERWINTER_TX_VERSION_GROUP_ID, SAPLING_TX_VERSION_GROUP_ID};
use network::{ConsensusParams};
use duplex_store::NoopStore;
use storage::NoopStore;
use sigops::transaction_sigops;
use error::TransactionError;
use constants::{MIN_COINBASE_SIZE, MAX_COINBASE_SIZE};