ChainVerifier refactor in progress

This commit is contained in:
debris 2016-12-11 04:01:48 +01:00
parent 06971497ee
commit f1475696c8
8 changed files with 440 additions and 20 deletions

View File

@ -261,9 +261,7 @@ impl Transaction {
}
pub fn total_spends(&self) -> u64 {
self.outputs
.iter()
.fold(0u64, |acc, out| acc + out.value)
self.outputs.iter().map(|output| output.value).sum()
}
}

View File

@ -77,7 +77,7 @@ impl ChainVerifier {
fn block_sigops(&self, block: &db::IndexedBlock) -> usize {
// strict pay-to-script-hash signature operations count toward block
// signature operations limit is enforced with BIP16
let store = StoreWithUnretainedOutputs::new(&self.store, block);
let store = StoreWithUnretainedOutputs::new(self.store.as_previous_transaction_output_provider(), block);
let bip16_active = self.verify_p2sh(block.header.raw.time);
block.transactions.iter().map(|tx| {
transaction_sigops(&tx.raw, &store, bip16_active)
@ -125,7 +125,7 @@ impl ChainVerifier {
}
}
let unretained_store = StoreWithUnretainedOutputs::new(&self.store, block);
let unretained_store = StoreWithUnretainedOutputs::new(self.store.as_previous_transaction_output_provider(), block);
let mut total_unspent = 0u64;
for (tx_index, tx) in block.transactions.iter().enumerate().skip(1) {
let mut total_claimed: u64 = 0;
@ -157,7 +157,7 @@ impl ChainVerifier {
}
let expected_max = utils::block_reward_satoshi(at_height) + total_unspent;
if coinbase_spends > expected_max{
if coinbase_spends > expected_max {
return Err(Error::CoinbaseOverspend { expected_max: expected_max, actual: coinbase_spends });
}
@ -186,9 +186,9 @@ impl ChainVerifier {
}
// must not be coinbase (sequence = 0 is returned above)
if transaction.is_coinbase() { return Err(TransactionError::MisplacedCoinbase(sequence)); }
if transaction.is_coinbase() { return Err(TransactionError::MisplacedCoinbase); }
let unretained_store = StoreWithUnretainedOutputs::new(&self.store, prevout_provider);
let unretained_store = StoreWithUnretainedOutputs::new(self.store.as_previous_transaction_output_provider(), prevout_provider);
for (input_index, input) in transaction.inputs().iter().enumerate() {
// signature verification
let signer: TransactionInputSigner = transaction.clone().into();

View File

@ -5,6 +5,8 @@ use primitives::hash::H256;
pub enum Error {
/// has an equal duplicate in the chain
Duplicate,
/// Contains duplicated transactions
DuplicatedTransactions,
/// No transactions in block
Empty,
/// Invalid proof-of-work (Block hash does not satisfy nBits)
@ -54,7 +56,7 @@ pub enum TransactionError {
/// Too many signature operations once p2sh operations included
SigopsP2SH(usize),
/// Coinbase transaction is found at position that is not 0
MisplacedCoinbase(usize),
MisplacedCoinbase,
/// Not fully spent transaction with the same hash already exists, bip30.
UnspentTransactionWithTheSameHash,
/// Using output that is surely spent

View File

@ -25,8 +25,14 @@ mod sigops;
mod task;
mod utils;
mod verify_block;
mod verify_ordered_block;
pub use primitives::{uint, hash, compact};
pub use verify_block::BlockVerifier;
pub use verify_ordered_block::{OrderedBlockVerifier, OrderedBlock};
pub use chain_verifier::{Chain, ChainVerifier, VerificationResult, MAX_BLOCK_SIZE, MAX_BLOCK_SIGOPS};
pub use error::{Error, TransactionError};
pub use sigops::{transaction_sigops, StoreWithUnretainedOutputs};

View File

@ -1,14 +1,14 @@
use chain::{Transaction, TransactionOutput, OutPoint};
use db::{PreviousTransactionOutputProvider, SharedStore};
use db::{PreviousTransactionOutputProvider};
use script::Script;
pub struct StoreWithUnretainedOutputs<'a, T> where T: 'a {
store: &'a SharedStore,
outputs: &'a T,
pub struct StoreWithUnretainedOutputs<'a> {
store: &'a PreviousTransactionOutputProvider,
outputs: &'a PreviousTransactionOutputProvider,
}
impl<'a, T> StoreWithUnretainedOutputs<'a, T> where T: PreviousTransactionOutputProvider {
pub fn new(store: &'a SharedStore, outputs: &'a T) -> Self {
impl<'a> StoreWithUnretainedOutputs<'a> {
pub fn new(store: &'a PreviousTransactionOutputProvider, outputs: &'a PreviousTransactionOutputProvider) -> Self {
StoreWithUnretainedOutputs {
store: store,
outputs: outputs,
@ -16,10 +16,9 @@ impl<'a, T> StoreWithUnretainedOutputs<'a, T> where T: PreviousTransactionOutput
}
}
impl<'a, T> PreviousTransactionOutputProvider for StoreWithUnretainedOutputs<'a, T> where T: PreviousTransactionOutputProvider {
impl<'a> PreviousTransactionOutputProvider for StoreWithUnretainedOutputs<'a> {
fn previous_transaction_output(&self, prevout: &OutPoint) -> Option<TransactionOutput> {
self.store.transaction(&prevout.hash)
.and_then(|tx| tx.outputs.into_iter().nth(prevout.index as usize))
self.store.previous_transaction_output(prevout)
.or_else(|| self.outputs.previous_transaction_output(prevout))
}
}
@ -29,6 +28,14 @@ pub fn transaction_sigops(
store: &PreviousTransactionOutputProvider,
bip16_active: bool
) -> Option<usize> {
if bip16_active {
transaction_sigops_raw(transaction, Some(store))
} else {
transaction_sigops_raw(transaction, None)
}
}
pub fn transaction_sigops_raw(transaction: &Transaction, store: Option<&PreviousTransactionOutputProvider>) -> Option<usize> {
let output_sigops: usize = transaction.outputs.iter().map(|output| {
let output_script: Script = output.script_pubkey.clone().into();
output_script.sigops_count(false)
@ -44,7 +51,7 @@ pub fn transaction_sigops(
for input in &transaction.inputs {
let input_script: Script = input.script_sig.clone().into();
input_sigops += input_script.sigops_count(false);
if bip16_active {
if let Some(store) = store {
let previous_output = match store.previous_transaction_output(&input.previous_output) {
Some(output) => output,
None => return None,

View File

@ -63,7 +63,9 @@ pub fn retarget_timespan(retarget_timestamp: u32, last_timestamp: u32) -> u32 {
/// Returns work required for given header
pub fn work_required(parent_hash: H256, time: u32, height: u32, store: &BlockHeaderProvider, network: Magic) -> Compact {
assert!(height != 0, "cannot calculate required work for genesis block");
if height == 0 {
return network.max_bits();
}
let parent_header = store.block_header(parent_hash.clone().into()).expect("self.height != 0; qed");

View File

@ -0,0 +1,210 @@
use std::collections::HashSet;
use db::IndexedBlock;
use sigops::transaction_sigops_raw;
use error::{Error, TransactionError};
// imports to rethink
use chain_verifier::{MAX_BLOCK_SIZE, MAX_BLOCK_SIGOPS};
pub struct BlockVerifier<'a> {
pub empty: BlockEmpty<'a>,
pub coinbase: BlockCoinbase<'a>,
pub serialized_size: BlockSerializedSize<'a>,
pub extra_coinbases: BlockExtraCoinbases<'a>,
pub transactions_uniqueness: BlockTransactionsUniqueness<'a>,
pub sigops: BlockSigops<'a>,
pub merkle_root: BlockMerkleRoot<'a>,
}
impl<'a> BlockVerifier<'a> {
pub fn new(block: &'a IndexedBlock) -> Self {
BlockVerifier {
empty: BlockEmpty::new(block),
coinbase: BlockCoinbase::new(block),
serialized_size: BlockSerializedSize::new(block),
extra_coinbases: BlockExtraCoinbases::new(block),
transactions_uniqueness: BlockTransactionsUniqueness::new(block),
sigops: BlockSigops::new(block),
merkle_root: BlockMerkleRoot::new(block),
}
}
pub fn check(&self) -> Result<(), Error> {
try!(self.empty.check());
try!(self.coinbase.check());
try!(self.serialized_size.check());
try!(self.extra_coinbases.check());
try!(self.transactions_uniqueness.check());
try!(self.sigops.check());
try!(self.merkle_root.check());
Ok(())
}
}
trait BlockRule {
fn check(&self) -> Result<(), Error>;
}
pub struct BlockEmpty<'a> {
block: &'a IndexedBlock,
}
impl<'a> BlockEmpty<'a> {
fn new(block: &'a IndexedBlock) -> Self {
BlockEmpty {
block: block,
}
}
}
impl<'a> BlockRule for BlockEmpty<'a> {
fn check(&self) -> Result<(), Error> {
if self.block.transactions.is_empty() {
Err(Error::Empty)
} else {
Ok(())
}
}
}
pub struct BlockSerializedSize<'a> {
block: &'a IndexedBlock,
}
impl<'a> BlockSerializedSize<'a> {
fn new(block: &'a IndexedBlock) -> Self {
BlockSerializedSize {
block: block,
}
}
}
impl<'a> BlockRule for BlockSerializedSize<'a> {
fn check(&self) -> Result<(), Error> {
let size = self.block.size();
if size > MAX_BLOCK_SIZE {
Err(Error::Size(size))
} else {
Ok(())
}
}
}
pub struct BlockCoinbase<'a> {
block: &'a IndexedBlock,
}
impl<'a> BlockCoinbase<'a> {
fn new(block: &'a IndexedBlock) -> Self {
BlockCoinbase {
block: block,
}
}
}
impl<'a> BlockRule for BlockCoinbase<'a> {
fn check(&self) -> Result<(), Error> {
if self.block.transactions.first().map(|tx| tx.raw.is_coinbase()).unwrap_or(false) {
Ok(())
} else {
Err(Error::Coinbase)
}
}
}
pub struct BlockExtraCoinbases<'a> {
block: &'a IndexedBlock,
}
impl<'a> BlockExtraCoinbases<'a> {
fn new(block: &'a IndexedBlock) -> Self {
BlockExtraCoinbases {
block: block,
}
}
}
impl<'a> BlockRule for BlockExtraCoinbases<'a> {
fn check(&self) -> Result<(), Error> {
let misplaced = self.block.transactions.iter()
.skip(1)
.position(|tx| tx.raw.is_coinbase());
match misplaced {
Some(index) => Err(Error::Transaction(index + 1, TransactionError::MisplacedCoinbase)),
None => Ok(()),
}
}
}
pub struct BlockTransactionsUniqueness<'a> {
block: &'a IndexedBlock,
}
impl<'a> BlockTransactionsUniqueness<'a> {
fn new(block: &'a IndexedBlock) -> Self {
BlockTransactionsUniqueness {
block: block,
}
}
}
impl<'a> BlockRule for BlockTransactionsUniqueness<'a> {
fn check(&self) -> Result<(), Error> {
let hashes = self.block.transactions.iter().map(|tx| tx.hash.clone()).collect::<HashSet<_>>();
if hashes.len() == self.block.transactions.len() {
Ok(())
} else {
Err(Error::DuplicatedTransactions)
}
}
}
pub struct BlockSigops<'a> {
block: &'a IndexedBlock,
}
impl<'a> BlockSigops<'a> {
fn new(block: &'a IndexedBlock) -> Self {
BlockSigops {
block: block,
}
}
}
impl<'a> BlockRule for BlockSigops<'a> {
fn check(&self) -> Result<(), Error> {
// We cannot know if bip16 is enabled at this point so we disable it.
let sigops = self.block.transactions.iter()
.map(|tx| transaction_sigops_raw(&tx.raw, None).expect("bip16 is disabled"))
.sum::<usize>();
if sigops > MAX_BLOCK_SIGOPS {
Err(Error::MaximumSigops)
} else {
Ok(())
}
}
}
pub struct BlockMerkleRoot<'a> {
block: &'a IndexedBlock,
}
impl<'a> BlockMerkleRoot<'a> {
fn new(block: &'a IndexedBlock) -> Self {
BlockMerkleRoot {
block: block,
}
}
}
impl<'a> BlockRule for BlockMerkleRoot<'a> {
fn check(&self) -> Result<(), Error> {
if self.block.merkle_root() == self.block.header.raw.merkle_root_hash {
Ok(())
} else {
Err(Error::MerkleRoot)
}
}
}

View File

@ -0,0 +1,195 @@
use std::ops;
use network::{Magic, ConsensusParams};
use db::{SharedStore, IndexedBlock, PreviousTransactionOutputProvider, BlockHeaderProvider};
use sigops::{StoreWithUnretainedOutputs, transaction_sigops};
use utils::{work_required, block_reward_satoshi};
use error::Error;
// imports to rethink
use chain_verifier::MAX_BLOCK_SIGOPS;
const EXPECT_ORDERED: &'static str = "Block ancestors expected to be found in database";
/// Flexible verification of ordered block
pub struct OrderedBlockVerifier<'a> {
pub finality: BlockFinality<'a>,
pub sigops: BlockSigops<'a>,
pub work: BlockWork<'a>,
pub coinbase_claim: BlockCoinbaseClaim<'a>,
}
impl<'a> OrderedBlockVerifier<'a> {
pub fn new(store: &'a SharedStore, network: Magic, block: OrderedBlock<'a>, height: u32) -> Self {
let params = network.consensus_params();
OrderedBlockVerifier {
finality: BlockFinality::new(block, height),
sigops: BlockSigops::new(block, store.as_previous_transaction_output_provider(), params),
work: BlockWork::new(block, store.as_block_header_provider(), height, network),
coinbase_claim: BlockCoinbaseClaim::new(block, store.as_previous_transaction_output_provider(), height),
}
}
pub fn check(&self) -> Result<(), Error> {
try!(self.finality.check());
try!(self.sigops.check());
try!(self.work.check());
try!(self.coinbase_claim.check());
Ok(())
}
}
/// Blocks whose parents are known to be in the chain
#[derive(Clone, Copy)]
pub struct OrderedBlock<'a> {
block: &'a IndexedBlock,
}
impl<'a> OrderedBlock<'a> {
pub fn new(block: &'a IndexedBlock) -> Self {
OrderedBlock {
block: block,
}
}
}
impl<'a> ops::Deref for OrderedBlock<'a> {
type Target = IndexedBlock;
fn deref(&self) -> &Self::Target {
self.block
}
}
trait OrderedBlockRule {
/// If verification fails returns an error
fn check(&self) -> Result<(), Error>;
}
pub struct BlockFinality<'a> {
block: OrderedBlock<'a>,
height: u32,
}
impl<'a> BlockFinality<'a> {
fn new(block: OrderedBlock<'a>, height: u32) -> Self {
BlockFinality {
block: block,
height: height,
}
}
}
impl<'a> OrderedBlockRule for BlockFinality<'a> {
fn check(&self) -> Result<(), Error> {
if self.block.is_final(self.height) {
Ok(())
} else {
Err(Error::NonFinalBlock)
}
}
}
pub struct BlockSigops<'a> {
block: OrderedBlock<'a>,
store: &'a PreviousTransactionOutputProvider,
consensus_params: ConsensusParams,
}
impl<'a> BlockSigops<'a> {
fn new(block: OrderedBlock<'a>, store: &'a PreviousTransactionOutputProvider, consensus_params: ConsensusParams) -> Self {
BlockSigops {
block: block,
store: store,
consensus_params: consensus_params,
}
}
}
impl<'a> OrderedBlockRule for BlockSigops<'a> {
fn check(&self) -> Result<(), Error> {
let store = StoreWithUnretainedOutputs::new(self.store, &*self.block);
let bip16_active = self.block.header.raw.time >= self.consensus_params.bip16_time;
let sigops = self.block.transactions.iter()
.map(|tx| transaction_sigops(&tx.raw, &store, bip16_active).expect(EXPECT_ORDERED))
.sum::<usize>();
if sigops > MAX_BLOCK_SIGOPS {
Err(Error::MaximumSigops)
} else {
Ok(())
}
}
}
pub struct BlockWork<'a> {
block: OrderedBlock<'a>,
store: &'a BlockHeaderProvider,
height: u32,
network: Magic,
}
impl<'a> BlockWork<'a> {
fn new(block: OrderedBlock<'a>, store: &'a BlockHeaderProvider, height: u32, network: Magic) -> Self {
BlockWork {
block: block,
store: store,
height: height,
network: network,
}
}
}
impl<'a> OrderedBlockRule for BlockWork<'a> {
fn check(&self) -> Result<(), Error> {
let previous_header_hash = self.block.header.raw.previous_header_hash.clone();
let time = self.block.header.raw.time;
let work = work_required(previous_header_hash, time, self.height, self.store, self.network);
if work == self.block.header.raw.bits {
Ok(())
} else {
Err(Error::Difficulty)
}
}
}
pub struct BlockCoinbaseClaim<'a> {
block: OrderedBlock<'a>,
store: &'a PreviousTransactionOutputProvider,
height: u32,
}
impl<'a> BlockCoinbaseClaim<'a> {
fn new(block: OrderedBlock<'a>, store: &'a PreviousTransactionOutputProvider, height: u32) -> Self {
BlockCoinbaseClaim {
block: block,
store: store,
height: height,
}
}
}
impl<'a> OrderedBlockRule for BlockCoinbaseClaim<'a> {
fn check(&self) -> Result<(), Error> {
let store = StoreWithUnretainedOutputs::new(self.store, &*self.block);
let total_outputs = self.block.transactions.iter()
.skip(1)
.flat_map(|tx| tx.raw.inputs.iter())
.map(|input| store.previous_transaction_output(&input.previous_output).expect(EXPECT_ORDERED))
.map(|output| output.value)
.sum::<u64>();
let total_inputs = self.block.transactions.iter()
.skip(1)
.map(|tx| tx.raw.total_spends())
.sum::<u64>();
let claim = self.block.transactions[0].raw.total_spends();
let (fees, overflow) = total_outputs.overflowing_sub(total_inputs);
let reward = fees + block_reward_satoshi(self.height);
if overflow || claim > reward {
Err(Error::CoinbaseOverspend { expected_max: reward, actual: claim })
} else {
Ok(())
}
}
}