verification refactor almost done

This commit is contained in:
debris 2016-12-12 12:04:19 +01:00
parent 552b7b2468
commit 64a3d4d080
20 changed files with 552 additions and 486 deletions

25
Cargo.lock generated
View File

@ -7,10 +7,8 @@ dependencies = [
"ethcore-devtools 1.3.0",
"log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
"network 0.1.0",
"parking_lot 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
"primitives 0.1.0",
"rayon 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)",
"scoped-pool 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
"script 0.1.0",
"serialization 0.1.0",
"test-data 0.1.0",
@ -855,26 +853,11 @@ dependencies = [
"semver 0.1.20 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "scoped-pool"
version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"crossbeam 0.2.10 (registry+https://github.com/rust-lang/crates.io-index)",
"scopeguard 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
"variance 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "scoped-tls"
version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "scopeguard"
version = "0.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "script"
version = "0.1.0"
@ -1174,11 +1157,6 @@ name = "utf8-ranges"
version = "0.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "variance"
version = "0.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "vec_map"
version = "0.6.0"
@ -1303,9 +1281,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
"checksum rust-crypto 0.2.36 (registry+https://github.com/rust-lang/crates.io-index)" = "f76d05d3993fd5f4af9434e8e436db163a12a9d40e1a58a726f27a01dfd12a2a"
"checksum rustc-serialize 0.3.22 (registry+https://github.com/rust-lang/crates.io-index)" = "237546c689f20bb44980270c73c3b9edd0891c1be49cc1274406134a66d3957b"
"checksum rustc_version 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)" = "c5f5376ea5e30ce23c03eb77cbe4962b988deead10910c372b226388b594c084"
"checksum scoped-pool 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "817a3a15e704545ce59ed2b5c60a5d32bda4d7869befb8b36667b658a6c00b43"
"checksum scoped-tls 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "f417c22df063e9450888a7561788e9bd46d3bb3c1466435b4eccb903807f147d"
"checksum scopeguard 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "59a076157c1e2dc561d8de585151ee6965d910dd4dcb5dabb7ae3e83981a6c57"
"checksum semver 0.1.20 (registry+https://github.com/rust-lang/crates.io-index)" = "d4f410fedcf71af0345d7607d246e7ad15faaadd49d240ee3b24e5dc21a820ac"
"checksum serde 0.8.19 (registry+https://github.com/rust-lang/crates.io-index)" = "58a19c0871c298847e6b68318484685cd51fa5478c0c905095647540031356e5"
"checksum serde_codegen 0.8.9 (registry+https://github.com/rust-lang/crates.io-index)" = "da68810d845f8e33a80243c28794650397056cbe7aea4c9c7516f55d1061c94e"
@ -1336,7 +1312,6 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
"checksum unicode-xid 0.0.3 (registry+https://github.com/rust-lang/crates.io-index)" = "36dff09cafb4ec7c8cf0023eb0b686cb6ce65499116a12201c9e11840ca01beb"
"checksum url 1.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "48ccf7bd87a81b769cf84ad556e034541fb90e1cd6d4bc375c822ed9500cd9d7"
"checksum utf8-ranges 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "a1ca13c08c41c9c3e04224ed9ff80461d97e121589ff27c753a16cb10830ae0f"
"checksum variance 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "3abfc2be1fb59663871379ea884fd81de80c496f2274e021c01d6fe56cd77b05"
"checksum vec_map 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)" = "cac5efe5cb0fa14ec2f84f83c701c562ee63f6dcc680861b21d65c682adfb05f"
"checksum vecio 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "0795a11576d29ae80525a3fda315bf7b534f8feb9d34101e5fe63fb95bb2fd24"
"checksum void 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "6a02e4885ed3bc0f2de90ea6dd45ebcbb66dacffe03547fadbb0eeae2770887d"

View File

@ -3,10 +3,8 @@ use chain::{OutPoint, TransactionOutput};
use db::{SharedStore, IndexedTransaction, PreviousTransactionOutputProvider};
use network::Magic;
use memory_pool::{MemoryPool, OrderingStrategy, Entry};
use verification::{
work_required, block_reward_satoshi, transaction_sigops,
MAX_BLOCK_SIZE, MAX_BLOCK_SIGOPS
};
use verification::{work_required, block_reward_satoshi, transaction_sigops};
pub use verification::constants::{MAX_BLOCK_SIZE, MAX_BLOCK_SIGOPS};
const BLOCK_VERSION: u32 = 0x20000000;
const BLOCK_HEADER_SIZE: u32 = 4 + 32 + 32 + 4 + 4 + 4;
@ -162,7 +160,7 @@ impl<'a, T> FittingTransactionsIterator<'a, T> where T: Iterator<Item = &'a Entr
}
}
impl<'a, T> PreviousTransactionOutputProvider for FittingTransactionsIterator<'a, T> {
impl<'a, T> PreviousTransactionOutputProvider for FittingTransactionsIterator<'a, T> where T: Send + Sync {
fn previous_transaction_output(&self, prevout: &OutPoint) -> Option<TransactionOutput> {
self.store.previous_transaction_output(prevout)
.or_else(|| {
@ -174,7 +172,7 @@ impl<'a, T> PreviousTransactionOutputProvider for FittingTransactionsIterator<'a
}
}
impl<'a, T> Iterator for FittingTransactionsIterator<'a, T> where T: Iterator<Item = &'a Entry> {
impl<'a, T> Iterator for FittingTransactionsIterator<'a, T> where T: Iterator<Item = &'a Entry> + Send + Sync {
type Item = &'a Entry;
fn next(&mut self) -> Option<Self::Item> {
@ -263,7 +261,7 @@ impl BlockAssembler {
#[cfg(test)]
mod tests {
use db::IndexedTransaction;
use verification::{MAX_BLOCK_SIZE, MAX_BLOCK_SIGOPS};
use verification::constants::{MAX_BLOCK_SIZE, MAX_BLOCK_SIGOPS};
use memory_pool::Entry;
use super::{SizePolicy, NextStep, FittingTransactionsIterator};

View File

@ -49,7 +49,7 @@ use std::sync::Arc;
use parking_lot::RwLock;
use tokio_core::reactor::Handle;
use network::Magic;
use verification::ChainVerifier;
use verification::BackwardsCompatibleChainVerifier as ChainVerifier;
/// Sync errors.
#[derive(Debug)]

View File

@ -341,7 +341,7 @@ mod tests {
use synchronization_verifier::tests::DummyVerifier;
use tokio_core::reactor::{Core, Handle};
use primitives::bytes::Bytes;
use verification::ChainVerifier;
use verification::BackwardsCompatibleChainVerifier as ChainVerifier;
struct DummyOutboundSyncConnection;

View File

@ -29,7 +29,7 @@ use synchronization_verifier::{Verifier, VerificationSink, BlockVerificationSink
use compact_block_builder::build_compact_block;
use hash_queue::HashPosition;
use miner::transaction_fee_rate;
use verification::ChainVerifier;
use verification::BackwardsCompatibleChainVerifier as ChainVerifier;
use time;
use std::time::Duration;
use miner::{BlockAssembler, BlockTemplate};
@ -1827,7 +1827,7 @@ pub mod tests {
use synchronization_verifier::tests::DummyVerifier;
use synchronization_server::ServerTaskIndex;
use primitives::hash::H256;
use verification::ChainVerifier;
use verification::BackwardsCompatibleChainVerifier as ChainVerifier;
use p2p::event_loop;
use test_data;
use db::{self, BlockHeaderProvider};

View File

@ -6,7 +6,7 @@ use chain::{Transaction, OutPoint, TransactionOutput};
use network::Magic;
use primitives::hash::H256;
use synchronization_chain::ChainRef;
use verification::{ChainVerifier, Verify as VerificationVerify, Chain};
use verification::{BackwardsCompatibleChainVerifier as ChainVerifier, Verify as VerificationVerify, Chain};
use db::{SharedStore, IndexedBlock, PreviousTransactionOutputProvider, TransactionOutputObserver};
use time::get_time;

View File

@ -4,10 +4,8 @@ version = "0.1.0"
authors = ["Nikolay Volf <nikvolf@gmail.com>"]
[dependencies]
parking_lot = "0.3"
time = "0.1"
log = "0.3"
scoped-pool = "1.0"
rayon = "0.5"
ethcore-devtools = { path = "../devtools" }

View File

@ -1,7 +1,7 @@
use network::{Magic, ConsensusParams};
use db::PreviousTransactionOutputProvider;
use sigops::transaction_sigops;
use utils::block_reward_satoshi;
use work::block_reward_satoshi;
use duplex_store::DuplexTransactionOutputProvider;
use canon::CanonBlock;
use constants::MAX_BLOCK_SIGOPS;

View File

@ -30,10 +30,23 @@ impl<'a> ChainAcceptor<'a> {
pub fn check(&self) -> Result<(), Error> {
try!(self.block.check());
try!(self.header.check());
self.transactions.par_iter()
.enumerate()
.fold(|| Ok(()), |result, (index, tx)| result.and_then(|_| tx.check().map_err(|err| Error::Transaction(index, err))))
.reduce(|| Ok(()), |acc, check| acc.and(check))?;
try!(self.check_transactions_with_eval(true));
Ok(())
}
/// backwards test compatibility
/// TODO: get rid of this
pub fn check_with_eval(&self, eval: bool) -> Result<(), Error> {
try!(self.block.check());
try!(self.header.check());
try!(self.check_transactions_with_eval(eval));
Ok(())
}
fn check_transactions_with_eval(&self, eval: bool) -> Result<(), Error> {
self.transactions.par_iter()
.enumerate()
.fold(|| Ok(()), |result, (index, tx)| result.and_then(|_| tx.check_with_eval(eval).map_err(|err| Error::Transaction(index, err))))
.reduce(|| Ok(()), |acc, check| acc.and(check))
}
}

View File

@ -5,7 +5,7 @@ use db::BlockHeaderProvider;
use canon::{CanonHeader, EXPECT_CANON};
use constants::MIN_BLOCK_VERSION;
use error::Error;
use utils::work_required;
use work::work_required;
pub struct HeaderAcceptor<'a> {
pub version: HeaderVersion<'a>,

View File

@ -48,6 +48,20 @@ impl<'a> TransactionAcceptor<'a> {
try!(self.eval.check());
Ok(())
}
/// backwards test compatibility
/// TODO: get rid of this
pub fn check_with_eval(&self, eval: bool) -> Result<(), TransactionError> {
try!(self.bip30.check());
try!(self.missing_inputs.check());
// TODO: double spends
try!(self.maturity.check());
try!(self.overspent.check());
if eval {
try!(self.eval.check());
}
Ok(())
}
}
pub struct MemoryPoolTransactionAcceptor<'a> {
@ -56,6 +70,7 @@ pub struct MemoryPoolTransactionAcceptor<'a> {
pub maturity: TransactionMaturity<'a>,
pub overspent: TransactionOverspent<'a>,
pub sigops: TransactionSigops<'a>,
pub eval: TransactionEval<'a>,
}
impl<'a> MemoryPoolTransactionAcceptor<'a> {
@ -75,7 +90,8 @@ impl<'a> MemoryPoolTransactionAcceptor<'a> {
missing_inputs: TransactionMissingInputs::new(transaction, prevout_store),
maturity: TransactionMaturity::new(transaction, meta_store, height),
overspent: TransactionOverspent::new(transaction, prevout_store),
sigops: TransactionSigops::new(transaction, prevout_store, params, MAX_BLOCK_SIGOPS, time),
sigops: TransactionSigops::new(transaction, prevout_store, params.clone(), MAX_BLOCK_SIGOPS, time),
eval: TransactionEval::new(transaction, prevout_store, params, height, time),
}
}
@ -86,6 +102,7 @@ impl<'a> MemoryPoolTransactionAcceptor<'a> {
try!(self.maturity.check());
try!(self.overspent.check());
try!(self.sigops.check());
try!(self.eval.check());
Ok(())
}
}

View File

@ -1,17 +1,17 @@
//! Bitcoin chain verifier
use std::collections::BTreeSet;
use scoped_pool::Pool;
use hash::H256;
use db::{self, BlockLocation, PreviousTransactionOutputProvider, BlockHeaderProvider, TransactionOutputObserver};
use network::{Magic, ConsensusParams};
use db::{self, IndexedBlockHeader, BlockLocation, PreviousTransactionOutputProvider, BlockHeaderProvider, TransactionOutputObserver};
use network::Magic;
use error::{Error, TransactionError};
use sigops::{StoreWithUnretainedOutputs, transaction_sigops};
use {Verify, chain, utils};
use constants::{BLOCK_MAX_FUTURE, COINBASE_MATURITY, MAX_BLOCK_SIZE, MAX_BLOCK_SIGOPS};
const TRANSACTIONS_VERIFY_THREADS: usize = 8;
const TRANSACTIONS_VERIFY_PARALLEL_THRESHOLD: usize = 32;
use {Verify, chain};
use canon::{CanonBlock, CanonTransaction};
use duplex_store::DuplexTransactionOutputProvider;
use verify_chain::ChainVerifier;
use verify_header::HeaderVerifier;
use verify_transaction::MemoryPoolTransactionVerifier;
use accept_chain::ChainAcceptor;
use accept_transaction::MemoryPoolTransactionAcceptor;
#[derive(PartialEq, Debug)]
/// Block verification chain
@ -27,24 +27,20 @@ pub enum Chain {
/// Verification result
pub type VerificationResult = Result<Chain, Error>;
pub struct ChainVerifier {
pub struct BackwardsCompatibleChainVerifier {
store: db::SharedStore,
skip_pow: bool,
skip_sig: bool,
network: Magic,
consensus_params: ConsensusParams,
pool: Pool,
}
impl ChainVerifier {
impl BackwardsCompatibleChainVerifier {
pub fn new(store: db::SharedStore, network: Magic) -> Self {
ChainVerifier {
BackwardsCompatibleChainVerifier {
store: store,
skip_pow: false,
skip_sig: false,
network: network,
consensus_params: network.consensus_params(),
pool: Pool::new(TRANSACTIONS_VERIFY_THREADS),
}
}
@ -60,104 +56,43 @@ impl ChainVerifier {
self
}
pub fn verify_p2sh(&self, time: u32) -> bool {
time >= self.consensus_params.bip16_time
fn verify_block(&self, block: &db::IndexedBlock) -> VerificationResult {
let current_time = ::time::get_time().sec as u32;
// first run pre-verification
let chain_verifier = ChainVerifier::new(block, self.network, current_time);
try!(chain_verifier.check_with_pow(!self.skip_pow));
// check pre-verified header location
// TODO: now this function allows full verification for sidechain block
// it should allow full verification only for canon blocks
let location = match self.store.accepted_location(&block.header.raw) {
Some(location) => location,
None => return Ok(Chain::Orphan),
};
// now do full verification
let canon_block = CanonBlock::new(block);
let chain_acceptor = ChainAcceptor::new(&self.store, self.network, canon_block, location.height());
try!(chain_acceptor.check_with_eval(!self.skip_sig));
match location {
BlockLocation::Main(_) => Ok(Chain::Main),
BlockLocation::Side(_) => Ok(Chain::Side),
}
}
pub fn verify_clocktimeverify(&self, height: u32) -> bool {
height >= self.consensus_params.bip65_height
}
/// Returns number of block signature operations.
/// NOTE: This function expects all previous blocks to be already in database.
fn block_sigops(&self, block: &db::IndexedBlock) -> usize {
// strict pay-to-script-hash signature operations count toward block
// signature operations limit is enforced with BIP16
let store = StoreWithUnretainedOutputs::new(self.store.as_previous_transaction_output_provider(), block);
let bip16_active = self.verify_p2sh(block.header.raw.time);
block.transactions.iter().map(|tx| {
transaction_sigops(&tx.raw, &store, bip16_active)
.expect("missing tx, out of order verification or malformed db")
}).sum()
}
fn ordered_verify(&self, block: &db::IndexedBlock, at_height: u32) -> Result<(), Error> {
if !block.is_final(at_height) {
return Err(Error::NonFinalBlock);
}
// transaction verification including number of signature operations checking
if self.block_sigops(block) > MAX_BLOCK_SIGOPS {
return Err(Error::MaximumSigops);
}
let block_hash = block.hash();
// check that difficulty matches the adjusted level
//if let Some(work) = self.work_required(block, at_height) {
if at_height != 0 && !self.skip_pow {
let work = utils::work_required(
block.header.raw.previous_header_hash.clone(),
block.header.raw.time,
at_height,
self.store.as_block_header_provider(),
self.network
);
if !self.skip_pow && work != block.header.raw.bits {
trace!(target: "verification", "pow verification error at height: {}", at_height);
trace!(target: "verification", "expected work: {:?}, got {:?}", work, block.header.raw.bits);
return Err(Error::Difficulty);
}
}
let coinbase_spends = block.transactions[0].raw.total_spends();
// bip30
for (tx_index, tx) in block.transactions.iter().enumerate() {
if let Some(meta) = self.store.transaction_meta(&tx.hash) {
if !meta.is_fully_spent() && !self.consensus_params.is_bip30_exception(&block_hash, at_height) {
return Err(Error::Transaction(tx_index, TransactionError::UnspentTransactionWithTheSameHash));
}
}
}
let unretained_store = StoreWithUnretainedOutputs::new(self.store.as_previous_transaction_output_provider(), block);
let mut total_unspent = 0u64;
for (tx_index, tx) in block.transactions.iter().enumerate().skip(1) {
let mut total_claimed: u64 = 0;
for input in &tx.raw.inputs {
// Coinbase maturity check
if let Some(previous_meta) = self.store.transaction_meta(&input.previous_output.hash) {
// check if it exists only
// it will fail a little later if there is no transaction at all
if previous_meta.is_coinbase() &&
(at_height < COINBASE_MATURITY || at_height - COINBASE_MATURITY < previous_meta.height()) {
return Err(Error::Transaction(tx_index, TransactionError::Maturity));
}
}
let previous_output = unretained_store.previous_transaction_output(&input.previous_output)
.expect("missing tx, out of order verification or malformed db");
total_claimed += previous_output.value;
}
let total_spends = tx.raw.total_spends();
if total_claimed < total_spends {
return Err(Error::Transaction(tx_index, TransactionError::Overspend));
}
// total_claimed is greater than total_spends, checked above and returned otherwise, cannot overflow; qed
total_unspent += total_claimed - total_spends;
}
let expected_max = utils::block_reward_satoshi(at_height) + total_unspent;
if coinbase_spends > expected_max {
return Err(Error::CoinbaseOverspend { expected_max: expected_max, actual: coinbase_spends });
}
Ok(())
pub fn verify_block_header(
&self,
_block_header_provider: &BlockHeaderProvider,
hash: &H256,
header: &chain::BlockHeader
) -> Result<(), Error> {
// let's do only preverifcation
// TODO: full verification
let current_time = ::time::get_time().sec as u32;
let header = IndexedBlockHeader::new(hash.clone(), header.clone());
let header_verifier = HeaderVerifier::new(&header, self.network, current_time);
header_verifier.check_with_pow(!self.skip_pow)
}
pub fn verify_transaction<T>(
@ -166,210 +101,29 @@ impl ChainVerifier {
height: u32,
time: u32,
transaction: &chain::Transaction,
sequence: usize
_sequence: usize
) -> Result<(), TransactionError> where T: PreviousTransactionOutputProvider + TransactionOutputObserver {
let indexed_tx = transaction.clone().into();
// let's do preverification first
let tx_verifier = MemoryPoolTransactionVerifier::new(&indexed_tx);
try!(tx_verifier.check());
use script::{
TransactionInputSigner,
TransactionSignatureChecker,
VerificationFlags,
Script,
verify_script,
};
if sequence == 0 {
return Ok(());
}
// must not be coinbase (sequence = 0 is returned above)
if transaction.is_coinbase() { return Err(TransactionError::MisplacedCoinbase); }
let unretained_store = StoreWithUnretainedOutputs::new(self.store.as_previous_transaction_output_provider(), prevout_provider);
for (input_index, input) in transaction.inputs().iter().enumerate() {
// signature verification
let signer: TransactionInputSigner = transaction.clone().into();
let paired_output = match unretained_store.previous_transaction_output(&input.previous_output) {
Some(output) => output,
_ => return Err(TransactionError::UnknownReference(input.previous_output.hash.clone()))
};
// unwrap_or(false) is actually not right!
// but can be here because of two reasons
// - this function is not responsible for checking if previous transactions
// in currently processed block / mempool already spent this output
// - if we process transactions from mempool we shouldn't care if transactions before it
// spent this output, cause they may not make their way into the block due to their size
// or sigops limit
if prevout_provider.is_spent(&input.previous_output).unwrap_or(false) {
return Err(TransactionError::UsingSpentOutput(input.previous_output.hash.clone(), input.previous_output.index))
}
let checker = TransactionSignatureChecker {
signer: signer,
input_index: input_index,
};
let input: Script = input.script_sig.clone().into();
let output: Script = paired_output.script_pubkey.into();
let flags = VerificationFlags::default()
.verify_p2sh(self.verify_p2sh(time))
.verify_clocktimeverify(self.verify_clocktimeverify(height));
// for tests only, skips as late as possible
if self.skip_sig { continue; }
if let Err(e) = verify_script(&input, &output, &flags, &checker) {
trace!(target: "verification", "transaction signature verification failure: {:?}", e);
trace!(target: "verification", "input:\n{}", input);
trace!(target: "verification", "output:\n{}", output);
// todo: log error here
return Err(TransactionError::Signature(input_index))
}
}
Ok(())
}
pub fn verify_block_header(
&self,
block_header_provider: &BlockHeaderProvider,
hash: &H256,
header: &chain::BlockHeader
) -> Result<(), Error> {
// target difficulty threshold
if !self.skip_pow && !utils::is_valid_proof_of_work(self.network.max_bits(), header.bits, hash) {
return Err(Error::Pow);
}
// check if block timestamp is not far in the future
if utils::age(header.time) < -BLOCK_MAX_FUTURE {
return Err(Error::FuturisticTimestamp);
}
if let Some(median_timestamp) = self.median_timestamp(block_header_provider, header) {
// TODO: make timestamp validation on testnet work...
if self.network != Magic::Testnet && median_timestamp >= header.time {
trace!(
target: "verification", "median timestamp verification failed, median: {}, current: {}",
median_timestamp,
header.time
);
return Err(Error::Timestamp);
}
}
Ok(())
}
fn verify_block(&self, block: &db::IndexedBlock) -> VerificationResult {
use task::Task;
let hash = block.hash();
// There should be at least 1 transaction
if block.transactions.is_empty() {
return Err(Error::Empty);
}
// block header checks
try!(self.verify_block_header(self.store.as_block_header_provider(), &hash, &block.header.raw));
// todo: serialized_size function is at least suboptimal
let size = block.size();
if size > MAX_BLOCK_SIZE {
return Err(Error::Size(size))
}
// verify merkle root
if block.merkle_root() != block.header.raw.merkle_root_hash {
return Err(Error::MerkleRoot);
}
let first_tx = &block.transactions[0].raw;
// check first transaction is a coinbase transaction
if !first_tx.is_coinbase() {
return Err(Error::Coinbase)
}
// check that coinbase has a valid signature
// is_coinbase() = true above guarantees that there is at least one input
let coinbase_script_len = first_tx.inputs[0].script_sig.len();
if coinbase_script_len < 2 || coinbase_script_len > 100 {
return Err(Error::CoinbaseSignatureLength(coinbase_script_len));
}
let location = match self.store.accepted_location(&block.header.raw) {
Some(location) => location,
None => return Ok(Chain::Orphan),
};
if block.transactions.len() > TRANSACTIONS_VERIFY_PARALLEL_THRESHOLD {
// todo: might use on-stack vector (smallvec/elastic array)
let mut transaction_tasks: Vec<Task> = Vec::with_capacity(TRANSACTIONS_VERIFY_THREADS);
let mut last = 0;
for num_task in 0..TRANSACTIONS_VERIFY_THREADS {
let from = last;
last = from + ::std::cmp::max(1, block.transactions.len() / TRANSACTIONS_VERIFY_THREADS);
if num_task == TRANSACTIONS_VERIFY_THREADS - 1 { last = block.transactions.len(); };
transaction_tasks.push(Task::new(block, location.height(), from, last));
}
self.pool.scoped(|scope| {
for task in transaction_tasks.iter_mut() {
scope.execute(move || task.progress(self))
}
self.store.flush();
});
for task in transaction_tasks.into_iter() {
if let Err((index, tx_err)) = task.result() {
return Err(Error::Transaction(index, tx_err));
}
}
}
else {
for (index, tx) in block.transactions.iter().enumerate() {
if let Err(tx_err) = self.verify_transaction(block, location.height(), block.header.raw.time, &tx.raw, index) {
return Err(Error::Transaction(index, tx_err));
}
}
}
// todo: pre-process projected block number once verification is parallel!
match location {
BlockLocation::Main(block_number) => {
try!(self.ordered_verify(block, block_number));
Ok(Chain::Main)
},
BlockLocation::Side(block_number) => {
try!(self.ordered_verify(block, block_number));
Ok(Chain::Side)
},
}
}
fn median_timestamp(&self, block_header_provider: &BlockHeaderProvider, header: &chain::BlockHeader) -> Option<u32> {
let mut timestamps = BTreeSet::new();
let mut block_ref = header.previous_header_hash.clone().into();
// TODO: optimize it, so it does not make 11 redundant queries each time
for _ in 0..11 {
let previous_header = match block_header_provider.block_header(block_ref) {
Some(h) => h,
None => { break; }
};
timestamps.insert(previous_header.time);
block_ref = previous_header.previous_header_hash.into();
}
if timestamps.len() > 2 {
let timestamps: Vec<_> = timestamps.into_iter().collect();
Some(timestamps[timestamps.len() / 2])
}
else { None }
let canon_tx = CanonTransaction::new(&indexed_tx);
// now let's do full verification
let prevouts = DuplexTransactionOutputProvider::new(self.store.as_previous_transaction_output_provider(), prevout_provider);
let tx_acceptor = MemoryPoolTransactionAcceptor::new(
self.store.as_transaction_meta_provider(),
prevouts,
self.network,
canon_tx,
height,
time
);
tx_acceptor.check()
}
}
impl Verify for ChainVerifier {
impl Verify for BackwardsCompatibleChainVerifier {
fn verify(&self, block: &db::IndexedBlock) -> VerificationResult {
let result = self.verify_block(block);
trace!(
@ -382,6 +136,361 @@ impl Verify for ChainVerifier {
}
}
//pub struct ChainVerifier {
//store: db::SharedStore,
//skip_pow: bool,
//skip_sig: bool,
//network: Magic,
//consensus_params: ConsensusParams,
//pool: Pool,
//}
//impl ChainVerifier {
//pub fn new(store: db::SharedStore, network: Magic) -> Self {
//ChainVerifier {
//store: store,
//skip_pow: false,
//skip_sig: false,
//network: network,
//consensus_params: network.consensus_params(),
//pool: Pool::new(TRANSACTIONS_VERIFY_THREADS),
//}
//}
//#[cfg(test)]
//pub fn pow_skip(mut self) -> Self {
//self.skip_pow = true;
//self
//}
//#[cfg(test)]
//pub fn signatures_skip(mut self) -> Self {
//self.skip_sig = true;
//self
//}
//pub fn verify_p2sh(&self, time: u32) -> bool {
//time >= self.consensus_params.bip16_time
//}
//pub fn verify_clocktimeverify(&self, height: u32) -> bool {
//height >= self.consensus_params.bip65_height
//}
///// Returns number of block signature operations.
///// NOTE: This function expects all previous blocks to be already in database.
//fn block_sigops(&self, block: &db::IndexedBlock) -> usize {
//// strict pay-to-script-hash signature operations count toward block
//// signature operations limit is enforced with BIP16
//let store = StoreWithUnretainedOutputs::new(self.store.as_previous_transaction_output_provider(), block);
//let bip16_active = self.verify_p2sh(block.header.raw.time);
//block.transactions.iter().map(|tx| {
//transaction_sigops(&tx.raw, &store, bip16_active)
//.expect("missing tx, out of order verification or malformed db")
//}).sum()
//}
//fn ordered_verify(&self, block: &db::IndexedBlock, at_height: u32) -> Result<(), Error> {
//if !block.is_final(at_height) {
//return Err(Error::NonFinalBlock);
//}
//// transaction verification including number of signature operations checking
//if self.block_sigops(block) > MAX_BLOCK_SIGOPS {
//return Err(Error::MaximumSigops);
//}
//let block_hash = block.hash();
//// check that difficulty matches the adjusted level
////if let Some(work) = self.work_required(block, at_height) {
//if at_height != 0 && !self.skip_pow {
//let work = utils::work_required(
//block.header.raw.previous_header_hash.clone(),
//block.header.raw.time,
//at_height,
//self.store.as_block_header_provider(),
//self.network
//);
//if !self.skip_pow && work != block.header.raw.bits {
//trace!(target: "verification", "pow verification error at height: {}", at_height);
//trace!(target: "verification", "expected work: {:?}, got {:?}", work, block.header.raw.bits);
//return Err(Error::Difficulty);
//}
//}
//let coinbase_spends = block.transactions[0].raw.total_spends();
//// bip30
//for (tx_index, tx) in block.transactions.iter().enumerate() {
//if let Some(meta) = self.store.transaction_meta(&tx.hash) {
//if !meta.is_fully_spent() && !self.consensus_params.is_bip30_exception(&block_hash, at_height) {
//return Err(Error::Transaction(tx_index, TransactionError::UnspentTransactionWithTheSameHash));
//}
//}
//}
//let unretained_store = StoreWithUnretainedOutputs::new(self.store.as_previous_transaction_output_provider(), block);
//let mut total_unspent = 0u64;
//for (tx_index, tx) in block.transactions.iter().enumerate().skip(1) {
//let mut total_claimed: u64 = 0;
//for input in &tx.raw.inputs {
//// Coinbase maturity check
//if let Some(previous_meta) = self.store.transaction_meta(&input.previous_output.hash) {
//// check if it exists only
//// it will fail a little later if there is no transaction at all
//if previous_meta.is_coinbase() &&
//(at_height < COINBASE_MATURITY || at_height - COINBASE_MATURITY < previous_meta.height()) {
//return Err(Error::Transaction(tx_index, TransactionError::Maturity));
//}
//}
//let previous_output = unretained_store.previous_transaction_output(&input.previous_output)
//.expect("missing tx, out of order verification or malformed db");
//total_claimed += previous_output.value;
//}
//let total_spends = tx.raw.total_spends();
//if total_claimed < total_spends {
//return Err(Error::Transaction(tx_index, TransactionError::Overspend));
//}
//// total_claimed is greater than total_spends, checked above and returned otherwise, cannot overflow; qed
//total_unspent += total_claimed - total_spends;
//}
//let expected_max = utils::block_reward_satoshi(at_height) + total_unspent;
//if coinbase_spends > expected_max {
//return Err(Error::CoinbaseOverspend { expected_max: expected_max, actual: coinbase_spends });
//}
//Ok(())
//}
//pub fn verify_transaction<T>(
//&self,
//prevout_provider: &T,
//height: u32,
//time: u32,
//transaction: &chain::Transaction,
//sequence: usize
//) -> Result<(), TransactionError> where T: PreviousTransactionOutputProvider + TransactionOutputObserver {
//use script::{
//TransactionInputSigner,
//TransactionSignatureChecker,
//VerificationFlags,
//Script,
//verify_script,
//};
//if sequence == 0 {
//return Ok(());
//}
//// must not be coinbase (sequence = 0 is returned above)
//if transaction.is_coinbase() { return Err(TransactionError::MisplacedCoinbase); }
//let unretained_store = StoreWithUnretainedOutputs::new(self.store.as_previous_transaction_output_provider(), prevout_provider);
//for (input_index, input) in transaction.inputs().iter().enumerate() {
//// signature verification
//let signer: TransactionInputSigner = transaction.clone().into();
//let paired_output = match unretained_store.previous_transaction_output(&input.previous_output) {
//Some(output) => output,
//_ => return Err(TransactionError::UnknownReference(input.previous_output.hash.clone()))
//};
//// unwrap_or(false) is actually not right!
//// but can be here because of two reasons
//// - this function is not responsible for checking if previous transactions
//// in currently processed block / mempool already spent this output
//// - if we process transactions from mempool we shouldn't care if transactions before it
//// spent this output, cause they may not make their way into the block due to their size
//// or sigops limit
//if prevout_provider.is_spent(&input.previous_output).unwrap_or(false) {
//return Err(TransactionError::UsingSpentOutput(input.previous_output.hash.clone(), input.previous_output.index))
//}
//let checker = TransactionSignatureChecker {
//signer: signer,
//input_index: input_index,
//};
//let input: Script = input.script_sig.clone().into();
//let output: Script = paired_output.script_pubkey.into();
//let flags = VerificationFlags::default()
//.verify_p2sh(self.verify_p2sh(time))
//.verify_clocktimeverify(self.verify_clocktimeverify(height));
//// for tests only, skips as late as possible
//if self.skip_sig { continue; }
//if let Err(e) = verify_script(&input, &output, &flags, &checker) {
//trace!(target: "verification", "transaction signature verification failure: {:?}", e);
//trace!(target: "verification", "input:\n{}", input);
//trace!(target: "verification", "output:\n{}", output);
//// todo: log error here
//return Err(TransactionError::Signature(input_index))
//}
//}
//Ok(())
//}
//pub fn verify_block_header(
//&self,
//block_header_provider: &BlockHeaderProvider,
//hash: &H256,
//header: &chain::BlockHeader
//) -> Result<(), Error> {
//// target difficulty threshold
//if !self.skip_pow && !utils::is_valid_proof_of_work(self.network.max_bits(), header.bits, hash) {
//return Err(Error::Pow);
//}
//// check if block timestamp is not far in the future
//if utils::age(header.time) < -BLOCK_MAX_FUTURE {
//return Err(Error::FuturisticTimestamp);
//}
//if let Some(median_timestamp) = self.median_timestamp(block_header_provider, header) {
//// TODO: make timestamp validation on testnet work...
//if self.network != Magic::Testnet && median_timestamp >= header.time {
//trace!(
//target: "verification", "median timestamp verification failed, median: {}, current: {}",
//median_timestamp,
//header.time
//);
//return Err(Error::Timestamp);
//}
//}
//Ok(())
//}
//fn verify_block(&self, block: &db::IndexedBlock) -> VerificationResult {
//use task::Task;
//let hash = block.hash();
//// There should be at least 1 transaction
//if block.transactions.is_empty() {
//return Err(Error::Empty);
//}
//// block header checks
//try!(self.verify_block_header(self.store.as_block_header_provider(), &hash, &block.header.raw));
//// todo: serialized_size function is at least suboptimal
//let size = block.size();
//if size > MAX_BLOCK_SIZE {
//return Err(Error::Size(size))
//}
//// verify merkle root
//if block.merkle_root() != block.header.raw.merkle_root_hash {
//return Err(Error::MerkleRoot);
//}
//let first_tx = &block.transactions[0].raw;
//// check first transaction is a coinbase transaction
//if !first_tx.is_coinbase() {
//return Err(Error::Coinbase)
//}
//// check that coinbase has a valid signature
//// is_coinbase() = true above guarantees that there is at least one input
//let coinbase_script_len = first_tx.inputs[0].script_sig.len();
//if coinbase_script_len < 2 || coinbase_script_len > 100 {
//return Err(Error::CoinbaseSignatureLength(coinbase_script_len));
//}
//let location = match self.store.accepted_location(&block.header.raw) {
//Some(location) => location,
//None => return Ok(Chain::Orphan),
//};
//if block.transactions.len() > TRANSACTIONS_VERIFY_PARALLEL_THRESHOLD {
//// todo: might use on-stack vector (smallvec/elastic array)
//let mut transaction_tasks: Vec<Task> = Vec::with_capacity(TRANSACTIONS_VERIFY_THREADS);
//let mut last = 0;
//for num_task in 0..TRANSACTIONS_VERIFY_THREADS {
//let from = last;
//last = from + ::std::cmp::max(1, block.transactions.len() / TRANSACTIONS_VERIFY_THREADS);
//if num_task == TRANSACTIONS_VERIFY_THREADS - 1 { last = block.transactions.len(); };
//transaction_tasks.push(Task::new(block, location.height(), from, last));
//}
//self.pool.scoped(|scope| {
//for task in transaction_tasks.iter_mut() {
//scope.execute(move || task.progress(self))
//}
//self.store.flush();
//});
//for task in transaction_tasks.into_iter() {
//if let Err((index, tx_err)) = task.result() {
//return Err(Error::Transaction(index, tx_err));
//}
//}
//}
//else {
//for (index, tx) in block.transactions.iter().enumerate() {
//if let Err(tx_err) = self.verify_transaction(block, location.height(), block.header.raw.time, &tx.raw, index) {
//return Err(Error::Transaction(index, tx_err));
//}
//}
//}
//// todo: pre-process projected block number once verification is parallel!
//match location {
//BlockLocation::Main(block_number) => {
//try!(self.ordered_verify(block, block_number));
//Ok(Chain::Main)
//},
//BlockLocation::Side(block_number) => {
//try!(self.ordered_verify(block, block_number));
//Ok(Chain::Side)
//},
//}
//}
//fn median_timestamp(&self, block_header_provider: &BlockHeaderProvider, header: &chain::BlockHeader) -> Option<u32> {
//let mut timestamps = BTreeSet::new();
//let mut block_ref = header.previous_header_hash.clone().into();
//// TODO: optimize it, so it does not make 11 redundant queries each time
//for _ in 0..11 {
//let previous_header = match block_header_provider.block_header(block_ref) {
//Some(h) => h,
//None => { break; }
//};
//timestamps.insert(previous_header.time);
//block_ref = previous_header.previous_header_hash.into();
//}
//if timestamps.len() > 2 {
//let timestamps: Vec<_> = timestamps.into_iter().collect();
//Some(timestamps[timestamps.len() / 2])
//}
//else { None }
//}
//}
//impl Verify for ChainVerifier {
//fn verify(&self, block: &db::IndexedBlock) -> VerificationResult {
//let result = self.verify_block(block);
//trace!(
//target: "verification", "Block {} (transactions: {}) verification finished. Result {:?}",
//block.hash().to_reversed_str(),
//block.transactions.len(),
//result,
//);
//result
//}
//}
#[cfg(test)]
mod tests {
use std::sync::Arc;
@ -389,7 +498,7 @@ mod tests {
use network::Magic;
use devtools::RandomTempPath;
use {script, test_data};
use super::ChainVerifier;
use super::BackwardsCompatibleChainVerifier as ChainVerifier;
use super::super::{Verify, Chain, Error, TransactionError};
#[test]
@ -440,9 +549,13 @@ mod tests {
let genesis_coinbase = genesis.transactions()[0].hash();
let block = test_data::block_builder()
.transaction().coinbase().build()
.transaction()
.coinbase()
.output().value(1).build()
.build()
.transaction()
.input().hash(genesis_coinbase).build()
.output().value(2).build()
.build()
.merkled_header().parent(genesis.hash()).build()
.build();
@ -477,9 +590,13 @@ mod tests {
let reference_tx = genesis.transactions()[1].hash();
let block = test_data::block_builder()
.transaction().coinbase().build()
.transaction()
.coinbase()
.output().value(2).build()
.build()
.transaction()
.input().hash(reference_tx).build()
.output().value(1).build()
.build()
.merkled_header().parent(genesis.hash()).build()
.build();
@ -511,7 +628,10 @@ mod tests {
let first_tx_hash = genesis.transactions()[1].hash();
let block = test_data::block_builder()
.transaction().coinbase().build()
.transaction()
.coinbase()
.output().value(2).build()
.build()
.transaction()
.input().hash(first_tx_hash).build()
.output().value(30).build()
@ -546,17 +666,23 @@ mod tests {
.build();
storage.insert_block(&genesis).expect("Genesis should be inserted with no errors");
let genesis_coinbase = genesis.transactions()[1].hash();
let first_tx_hash = genesis.transactions()[1].hash();
let block = test_data::block_builder()
.transaction().coinbase().build()
.transaction()
.input().hash(genesis_coinbase).build()
.output().value(30).build()
.output().value(20).build()
.coinbase()
.output().value(2).build()
.build()
.transaction()
.input().hash(first_tx_hash).build()
.output().value(19).build()
.output().value(31).build()
.build()
.derived_transaction(1, 0)
.output().value(35).build()
.output().value(20).build()
.build()
.derived_transaction(1, 1)
.output().value(20).build()
.build()
.merkled_header().parent(genesis.hash()).build()
.build();
@ -636,7 +762,7 @@ mod tests {
}
let mut builder_tx2 = script::Builder::default();
for _ in 0..11000 {
for _ in 0..11001 {
builder_tx2 = builder_tx2.push_opcode(script::Opcode::OP_CHECKSIG)
}

View File

@ -7,3 +7,15 @@ pub const MAX_BLOCK_SIGOPS: usize = 20_000;
pub const MIN_COINBASE_SIZE: usize = 2;
pub const MAX_COINBASE_SIZE: usize = 100;
pub const MIN_BLOCK_VERSION: u32 = 0;
pub const RETARGETING_FACTOR: u32 = 4;
pub const TARGET_SPACING_SECONDS: u32 = 10 * 60;
pub const DOUBLE_SPACING_SECONDS: u32 = 2 * TARGET_SPACING_SECONDS;
pub const TARGET_TIMESPAN_SECONDS: u32 = 2 * 7 * 24 * 60 * 60;
// The upper and lower bounds for retargeting timespan
pub const MIN_TIMESPAN: u32 = TARGET_TIMESPAN_SECONDS / RETARGETING_FACTOR;
pub const MAX_TIMESPAN: u32 = TARGET_TIMESPAN_SECONDS * RETARGETING_FACTOR;
// Target number of blocks, 2 weaks, 2016
pub const RETARGETING_INTERVAL: u32 = TARGET_TIMESPAN_SECONDS / TARGET_SPACING_SECONDS;

View File

@ -1,9 +1,8 @@
//! Some transaction validation rules,
//! require sophisticated (in more than one source) previous transaction lookups
use primitives::hash::H256;
use chain::{OutPoint, TransactionOutput};
use db::{PreviousTransactionOutputProvider, TransactionMetaProvider, TransactionMeta};
use db::PreviousTransactionOutputProvider;
#[derive(Clone, Copy)]
pub struct DuplexTransactionOutputProvider<'a> {
@ -26,26 +25,3 @@ impl<'a> PreviousTransactionOutputProvider for DuplexTransactionOutputProvider<'
.or_else(|| self.second.previous_transaction_output(prevout))
}
}
#[derive(Clone, Copy)]
pub struct DuplexTransactionMetaProvider<'a> {
first: &'a TransactionMetaProvider,
second: &'a TransactionMetaProvider,
}
impl<'a> DuplexTransactionMetaProvider<'a> {
pub fn new(first: &'a TransactionMetaProvider, second: &'a TransactionMetaProvider) -> Self {
DuplexTransactionMetaProvider {
first: first,
second: second,
}
}
}
impl<'a> TransactionMetaProvider for DuplexTransactionMetaProvider<'a> {
fn transaction_meta(&self, hash: &H256) -> Option<TransactionMeta> {
self.first.transaction_meta(hash)
.or_else(|| self.second.transaction_meta(hash))
}
}

View File

@ -42,11 +42,9 @@
//! so instead we might want to call AcceptMemoryPoolTransaction on each tx
//! that is inserted into assembled block
extern crate parking_lot;
extern crate time;
#[macro_use]
extern crate log;
extern crate scoped_pool;
extern crate rayon;
extern crate db;
@ -61,12 +59,6 @@ extern crate ethcore_devtools as devtools;
#[cfg(test)]
extern crate test_data;
mod chain_verifier;
mod error;
mod sigops;
mod task;
mod utils;
pub mod constants;
mod duplex_store;
mod canon;
@ -79,24 +71,29 @@ mod verify_chain;
mod verify_header;
mod verify_transaction;
mod chain_verifier;
mod error;
mod sigops;
mod work;
pub use primitives::{uint, hash, compact};
pub use canon::{CanonBlock, CanonHeader, CanonTransaction};
pub use accept_block::BlockAcceptor;
pub use accept_chain::ChainAcceptor;
pub use accept_header::HeaderAcceptor;
pub use accept_transaction::{TransactionAcceptor, MemoryPoolTransactionAcceptor};
pub use verify_block::BlockVerifier;
pub use verify_chain::ChainVerifier as XXXChainVerifier;
pub use verify_chain::ChainVerifier as ChainVerifier;
pub use verify_header::HeaderVerifier;
pub use verify_transaction::{TransactionVerifier, MemoryPoolTransactionVerifier};
pub use chain_verifier::{Chain, ChainVerifier, VerificationResult};
pub use chain_verifier::{Chain, BackwardsCompatibleChainVerifier, VerificationResult};
pub use error::{Error, TransactionError};
pub use sigops::{transaction_sigops, StoreWithUnretainedOutputs};
pub use utils::{work_required, is_valid_proof_of_work, is_valid_proof_of_work_hash, block_reward_satoshi};
pub use sigops::transaction_sigops;
pub use work::{work_required, is_valid_proof_of_work, is_valid_proof_of_work_hash, block_reward_satoshi};
/// Interface for block verification
pub trait Verify : Send + Sync {

View File

@ -1,28 +1,7 @@
use chain::{Transaction, TransactionOutput, OutPoint};
use db::{PreviousTransactionOutputProvider};
use chain::Transaction;
use db::PreviousTransactionOutputProvider;
use script::Script;
pub struct StoreWithUnretainedOutputs<'a> {
store: &'a PreviousTransactionOutputProvider,
outputs: &'a PreviousTransactionOutputProvider,
}
impl<'a> StoreWithUnretainedOutputs<'a> {
pub fn new(store: &'a PreviousTransactionOutputProvider, outputs: &'a PreviousTransactionOutputProvider) -> Self {
StoreWithUnretainedOutputs {
store: store,
outputs: outputs,
}
}
}
impl<'a> PreviousTransactionOutputProvider for StoreWithUnretainedOutputs<'a> {
fn previous_transaction_output(&self, prevout: &OutPoint) -> Option<TransactionOutput> {
self.store.previous_transaction_output(prevout)
.or_else(|| self.outputs.previous_transaction_output(prevout))
}
}
pub fn transaction_sigops(
transaction: &Transaction,
store: &PreviousTransactionOutputProvider,

View File

@ -1,38 +0,0 @@
use chain_verifier::ChainVerifier;
use super::TransactionError;
use db::IndexedBlock;
pub struct Task<'a> {
block: &'a IndexedBlock,
block_height: u32,
from: usize,
to: usize,
result: Result<(), TransactionCheckError>,
}
type TransactionCheckError = (usize, TransactionError);
impl<'a> Task<'a> {
pub fn new(block: &'a IndexedBlock, block_height: u32, from: usize, to: usize) -> Self {
Task {
block: block,
block_height: block_height,
from: from,
to: to,
result: Ok(()),
}
}
pub fn progress(&mut self, verifier: &ChainVerifier) {
for index in self.from..self.to {
if let Err(e) = verifier.verify_transaction(self.block, self.block_height, self.block.header.raw.time, &self.block.transactions[index].raw, index) {
self.result = Err((index, e))
}
}
self.result = Ok(());
}
pub fn result(self) -> Result<(), TransactionCheckError> {
self.result
}
}

View File

@ -24,10 +24,23 @@ impl<'a> ChainVerifier<'a> {
pub fn check(&self) -> Result<(), Error> {
try!(self.block.check());
try!(self.header.check());
try!(self.check_transactions());
Ok(())
}
/// backwards test compatibility
/// TODO: get rid of this
pub fn check_with_pow(&self, pow: bool) -> Result<(), Error> {
try!(self.block.check());
try!(self.header.check_with_pow(pow));
try!(self.check_transactions());
Ok(())
}
fn check_transactions(&self) -> Result<(), Error> {
self.transactions.par_iter()
.enumerate()
.fold(|| Ok(()), |result, (index, tx)| result.and_then(|_| tx.check().map_err(|err| Error::Transaction(index, err))))
.reduce(|| Ok(()), |acc, check| acc.and(check))?;
Ok(())
.reduce(|| Ok(()), |acc, check| acc.and(check))
}
}

View File

@ -1,7 +1,7 @@
use primitives::compact::Compact;
use db::IndexedBlockHeader;
use network::Magic;
use utils::is_valid_proof_of_work;
use work::is_valid_proof_of_work;
use error::Error;
use constants::BLOCK_MAX_FUTURE;
@ -20,6 +20,17 @@ impl<'a> HeaderVerifier<'a> {
pub fn check(&self) -> Result<(), Error> {
try!(self.proof_of_work.check());
try!(self.timestamp.check());
Ok(())
}
/// backwards test compatibility
/// TODO: get rid of this
pub fn check_with_pow(&self, pow: bool) -> Result<(), Error> {
if pow {
try!(self.proof_of_work.check());
}
try!(self.timestamp.check());
Ok(())
}
}

View File

@ -5,17 +5,10 @@ use primitives::uint::U256;
use network::Magic;
use db::{BlockHeaderProvider, BlockRef};
const RETARGETING_FACTOR: u32 = 4;
const TARGET_SPACING_SECONDS: u32 = 10 * 60;
const DOUBLE_SPACING_SECONDS: u32 = 2 * TARGET_SPACING_SECONDS;
const TARGET_TIMESPAN_SECONDS: u32 = 2 * 7 * 24 * 60 * 60;
// The upper and lower bounds for retargeting timespan
const MIN_TIMESPAN: u32 = TARGET_TIMESPAN_SECONDS / RETARGETING_FACTOR;
const MAX_TIMESPAN: u32 = TARGET_TIMESPAN_SECONDS * RETARGETING_FACTOR;
// Target number of blocks, 2 weaks, 2016
pub const RETARGETING_INTERVAL: u32 = TARGET_TIMESPAN_SECONDS / TARGET_SPACING_SECONDS;
use constants::{
DOUBLE_SPACING_SECONDS,
TARGET_TIMESPAN_SECONDS, MIN_TIMESPAN, MAX_TIMESPAN, RETARGETING_INTERVAL
};
pub fn is_retarget_height(height: u32) -> bool {
height % RETARGETING_INTERVAL == 0
@ -142,10 +135,6 @@ pub fn block_reward_satoshi(block_height: u32) -> u64 {
res
}
pub fn age(protocol_time: u32) -> i64 {
::time::get_time().sec - protocol_time as i64
}
#[cfg(test)]
mod tests {
use primitives::hash::H256;