--verification-level && --verification-edge

This commit is contained in:
Svyatoslav Nikolsky 2017-08-10 14:54:06 +03:00
parent 9b108c0789
commit 46a7c4f7f2
13 changed files with 290 additions and 65 deletions

View File

@ -4,6 +4,7 @@
use compact::Compact;
use ser::{Stream, Serializable};
use chain::Block;
use primitives::hash::H256;
use super::ConsensusParams;
const MAGIC_MAINNET: u32 = 0xD9B4BEF9;
@ -88,6 +89,14 @@ impl Magic {
}
}
pub fn default_verification_edge(&self) -> H256 {
match *self {
Magic::Mainnet => H256::from_reversed_str("0000000000000000030abc968e1bd635736e880b946085c93152969b9a81a6e2"),
Magic::Testnet => H256::from_reversed_str("000000000871ee6842d3648317ccc8a435eb8cc3c2429aee94faff9ba26b05a0"),
_ => self.genesis_block().hash(),
}
}
pub fn consensus_params(&self) -> ConsensusParams {
ConsensusParams::with_magic(*self)
}

View File

@ -78,6 +78,16 @@ args:
help: Execute command when the best block changes (%s in cmd is replaced by block hash)
takes_value: true
value_name: command
- verification-level:
long: verification-level
help: Blocks verification level - full (default), header (scripts are not verified), none (no verification at all)
takes_value: true
value_name: LEVEL
- verification-edge:
long: verification-edge
help: Non-default verification-level is applied until block with given hash is met.
takes_value: true
value_name: BLOCK
subcommands:
- import:
about: Import blocks from bitcoin core database
@ -85,6 +95,3 @@ subcommands:
- PATH:
required: true
help: Path of the bitcoin core database
- skip-verification:
long: skip-verification
help: Skip blocks verification

View File

@ -9,9 +9,8 @@ pub fn import(cfg: Config, matches: &ArgMatches) -> Result<(), String> {
try!(init_db(&cfg, &db));
let blk_path = matches.value_of("PATH").expect("PATH is required in cli.yml; qed");
let skip_verification = matches.is_present("skip-verification");
let mut writer = create_sync_blocks_writer(db, cfg.magic, !skip_verification);
let mut writer = create_sync_blocks_writer(db, cfg.magic, cfg.verification_params);
let blk_dir = try!(::import::open_blk_dir(blk_path).map_err(|_| "Import directory does not exist".to_owned()));
let mut counter = 0;

View File

@ -111,7 +111,7 @@ pub fn start(cfg: config::Config) -> Result<(), String> {
};
let sync_peers = create_sync_peers();
let local_sync_node = create_local_sync_node(cfg.magic, db.clone(), sync_peers.clone());
let local_sync_node = create_local_sync_node(cfg.magic, db.clone(), sync_peers.clone(), cfg.verification_params);
let sync_connection_factory = create_sync_connection_factory(sync_peers.clone(), local_sync_node.clone());
if let Some(block_notify_command) = cfg.block_notify_command {

View File

@ -5,7 +5,10 @@ use p2p::InternetProtocol;
use seednodes::{mainnet_seednodes, testnet_seednodes};
use rpc_apis::ApiSet;
use {USER_AGENT, REGTEST_USER_AGENT};
use primitives::hash::H256;
use rpc::HttpConfiguration as RpcHttpConfig;
use verification::VerificationLevel;
use sync::VerificationParameters;
pub struct Config {
pub magic: Magic,
@ -22,6 +25,7 @@ pub struct Config {
pub internet_protocol: InternetProtocol,
pub rpc_config: RpcHttpConfig,
pub block_notify_command: Option<String>,
pub verification_params: VerificationParameters,
}
pub const DEFAULT_DB_CACHE: usize = 512;
@ -97,6 +101,22 @@ pub fn parse(matches: &clap::ArgMatches) -> Result<Config, String> {
None => None,
};
let verification_level = match matches.value_of("verification-level") {
Some(s) if s == "full" => VerificationLevel::Full,
Some(s) if s == "header" => VerificationLevel::Header,
Some(s) if s == "none" => VerificationLevel::NoVerification,
Some(s) => return Err(format!("Invalid verification level: {}", s)),
None => VerificationLevel::Full,
};
let verification_edge = match matches.value_of("verification-edge") {
Some(s) if verification_level != VerificationLevel::Full => {
let edge: H256 = s.parse().map_err(|_| "Invalid verification edge".to_owned())?;
edge.reversed()
},
_ => magic.default_verification_edge(),
};
let config = Config {
print_to_console: print_to_console,
magic: magic,
@ -112,6 +132,10 @@ pub fn parse(matches: &clap::ArgMatches) -> Result<Config, String> {
internet_protocol: only_net,
rpc_config: rpc_config,
block_notify_command: block_notify_command,
verification_params: VerificationParameters {
verification_level: verification_level,
verification_edge: verification_edge,
},
};
Ok(config)

View File

@ -20,6 +20,7 @@ extern crate sync;
extern crate import;
extern crate rpc as ethcore_rpc;
extern crate primitives;
extern crate verification;
mod commands;
mod config;

View File

@ -10,6 +10,7 @@ use synchronization_verifier::{Verifier, SyncVerifier, VerificationTask,
VerificationSink, BlockVerificationSink, TransactionVerificationSink};
use types::StorageRef;
use utils::OrphanBlocksPool;
use VerificationParameters;
/// Maximum number of orphaned in-memory blocks
pub const MAX_ORPHANED_BLOCKS: usize = 1024;
@ -24,8 +25,6 @@ pub struct BlocksWriter {
verifier: SyncVerifier<BlocksWriterSink>,
/// Verification events receiver
sink: Arc<BlocksWriterSinkData>,
/// True if verification is enabled
verification: bool,
}
/// Verification events receiver
@ -44,16 +43,15 @@ struct BlocksWriterSinkData {
impl BlocksWriter {
/// Create new synchronous blocks writer
pub fn new(storage: StorageRef, network: Magic, verification: bool) -> BlocksWriter {
pub fn new(storage: StorageRef, network: Magic, verification_params: VerificationParameters) -> BlocksWriter {
let sink_data = Arc::new(BlocksWriterSinkData::new(storage.clone()));
let sink = Arc::new(BlocksWriterSink::new(sink_data.clone()));
let verifier = SyncVerifier::new(network, storage.clone(), sink);
let verifier = SyncVerifier::new(network, storage.clone(), sink, verification_params);
BlocksWriter {
storage: storage,
orphaned_blocks_pool: OrphanBlocksPool::new(),
verifier: verifier,
sink: sink_data,
verification: verification,
}
}
@ -78,16 +76,9 @@ impl BlocksWriter {
let mut verification_queue: VecDeque<chain::IndexedBlock> = self.orphaned_blocks_pool.remove_blocks_for_parent(block.hash());
verification_queue.push_front(block);
while let Some(block) = verification_queue.pop_front() {
if self.verification {
self.verifier.verify_block(block);
if let Some(err) = self.sink.error() {
return Err(err);
}
} else {
let hash = block.hash().clone();
self.storage.insert(block).map_err(Error::Database)?;
self.storage.canonize(&hash).map_err(Error::Database)?;
self.verifier.verify_block(block);
if let Some(err) = self.sink.error() {
return Err(err);
}
}
@ -157,13 +148,22 @@ mod tests {
use std::sync::Arc;
use db::{BlockChainDatabase};
use network::Magic;
use verification::VerificationLevel;
use super::super::Error;
use super::{BlocksWriter, MAX_ORPHANED_BLOCKS};
use VerificationParameters;
fn default_verification_params() -> VerificationParameters {
VerificationParameters {
verification_level: VerificationLevel::Full,
verification_edge: 0u8.into(),
}
}
#[test]
fn blocks_writer_appends_blocks() {
let db = Arc::new(BlockChainDatabase::init_test_chain(vec![test_data::genesis().into()]));
let mut blocks_target = BlocksWriter::new(db.clone(), Magic::Testnet, true);
let mut blocks_target = BlocksWriter::new(db.clone(), Magic::Testnet, default_verification_params());
blocks_target.append_block(test_data::block_h1().into()).expect("Expecting no error");
assert_eq!(db.best_block().number, 1);
}
@ -172,7 +172,7 @@ mod tests {
fn blocks_writer_verification_error() {
let db = Arc::new(BlockChainDatabase::init_test_chain(vec![test_data::genesis().into()]));
let blocks = test_data::build_n_empty_blocks_from_genesis((MAX_ORPHANED_BLOCKS + 2) as u32, 1);
let mut blocks_target = BlocksWriter::new(db.clone(), Magic::Testnet, true);
let mut blocks_target = BlocksWriter::new(db.clone(), Magic::Testnet, default_verification_params());
for (index, block) in blocks.into_iter().skip(1).enumerate() {
match blocks_target.append_block(block.into()) {
Err(Error::TooManyOrphanBlocks) if index == MAX_ORPHANED_BLOCKS => (),
@ -186,7 +186,7 @@ mod tests {
#[test]
fn blocks_writer_out_of_order_block() {
let db = Arc::new(BlockChainDatabase::init_test_chain(vec![test_data::genesis().into()]));
let mut blocks_target = BlocksWriter::new(db.clone(), Magic::Testnet, true);
let mut blocks_target = BlocksWriter::new(db.clone(), Magic::Testnet, default_verification_params());
let wrong_block = test_data::block_builder()
.header().parent(test_data::genesis().hash()).build()
@ -201,7 +201,7 @@ mod tests {
#[test]
fn blocks_writer_append_to_existing_db() {
let db = Arc::new(BlockChainDatabase::init_test_chain(vec![test_data::genesis().into()]));
let mut blocks_target = BlocksWriter::new(db.clone(), Magic::Testnet, true);
let mut blocks_target = BlocksWriter::new(db.clone(), Magic::Testnet, default_verification_params());
assert!(blocks_target.append_block(test_data::genesis().into()).is_ok());
assert_eq!(db.best_block().number, 0);

View File

@ -56,6 +56,16 @@ pub enum Error {
Verification(String),
}
#[derive(Debug)]
/// Verification parameters.
pub struct VerificationParameters {
/// Blocks verification level.
pub verification_level: verification::VerificationLevel,
/// Blocks verification edge: all blocks before this are validated using verification_level.
/// All blocks after this (inclusive) are validated using VerificationLevel::Full level.
pub verification_edge: H256,
}
/// Synchronization events listener
pub trait SyncListener: Send + 'static {
/// Called when node switches to synchronization state
@ -65,8 +75,8 @@ pub trait SyncListener: Send + 'static {
}
/// Create blocks writer.
pub fn create_sync_blocks_writer(db: db::SharedStore, network: Magic, verification: bool) -> blocks_writer::BlocksWriter {
blocks_writer::BlocksWriter::new(db, network, verification)
pub fn create_sync_blocks_writer(db: db::SharedStore, network: Magic, verification_params: VerificationParameters) -> blocks_writer::BlocksWriter {
blocks_writer::BlocksWriter::new(db, network, verification_params)
}
/// Create synchronization peers
@ -77,7 +87,7 @@ pub fn create_sync_peers() -> PeersRef {
}
/// Creates local sync node for given `db`
pub fn create_local_sync_node(network: Magic, db: db::SharedStore, peers: PeersRef) -> LocalNodeRef {
pub fn create_local_sync_node(network: Magic, db: db::SharedStore, peers: PeersRef, verification_params: VerificationParameters) -> LocalNodeRef {
use miner::MemoryPool;
use synchronization_chain::Chain as SyncChain;
use synchronization_executor::LocalSynchronizationTaskExecutor as SyncExecutor;
@ -103,7 +113,7 @@ pub fn create_local_sync_node(network: Magic, db: db::SharedStore, peers: PeersR
let sync_server = Arc::new(ServerImpl::new(peers.clone(), db.clone(), memory_pool.clone(), sync_executor.clone()));
let sync_client_core = SynchronizationClientCore::new(sync_client_config, sync_state.clone(), peers.clone(), sync_executor.clone(), sync_chain, chain_verifier.clone());
let verifier_sink = Arc::new(CoreVerificationSink::new(sync_client_core.clone()));
let verifier = AsyncVerifier::new(chain_verifier, db.clone(), memory_pool.clone(), verifier_sink);
let verifier = AsyncVerifier::new(chain_verifier, db.clone(), memory_pool.clone(), verifier_sink, verification_params);
let sync_client = SynchronizationClient::new(sync_state.clone(), sync_client_core, verifier);
Arc::new(SyncNode::new(network, db, memory_pool, peers, sync_state, sync_executor, sync_client, sync_server))
}

View File

@ -1,15 +1,18 @@
use std::collections::VecDeque;
use std::sync::Arc;
use std::sync::mpsc::{channel, Sender, Receiver};
use std::sync::atomic::{AtomicBool, Ordering};
use std::thread;
use parking_lot::Mutex;
use time::get_time;
use chain::{IndexedBlock, IndexedTransaction};
use network::Magic;
use primitives::hash::H256;
use verification::{BackwardsCompatibleChainVerifier as ChainVerifier, Verify as VerificationVerify};
use verification::{BackwardsCompatibleChainVerifier as ChainVerifier, Verify as VerificationVerify,
Error as VerificationError, VerificationLevel};
use types::{BlockHeight, StorageRef, MemoryPoolRef};
use utils::MemoryPoolTransactionOutputProvider;
use VerificationParameters;
/// Block verification events sink
pub trait BlockVerificationSink : Send + Sync + 'static {
@ -58,6 +61,45 @@ pub struct AsyncVerifier {
verification_worker_thread: Option<thread::JoinHandle<()>>,
}
/// Chain verifier wrapper to deal with verification parameters.
pub struct ChainVerifierWrapper {
/// Original verifier.
pub verifier: Arc<ChainVerifier>,
/// Verification parameters.
verification_params: VerificationParameters,
/// Is verification edge passed.
pub enforce_full_verification: AtomicBool,
}
impl ChainVerifierWrapper {
/// Create new chain verifier wrapper.
pub fn new(verifier: Arc<ChainVerifier>, storage: &StorageRef, verification_params: VerificationParameters) -> Self {
let enforce_full_verification = AtomicBool::new(storage.contains_block(verification_params.verification_edge.clone().into()));
ChainVerifierWrapper {
verifier: verifier,
verification_params: verification_params,
enforce_full_verification: enforce_full_verification,
}
}
/// Verify block.
pub fn verify_block(&self, block: &IndexedBlock) -> Result<(), VerificationError> {
let enforce_full_verification = if block.hash() == &self.verification_params.verification_edge {
self.enforce_full_verification.store(true, Ordering::Relaxed);
true
} else {
self.enforce_full_verification.load(Ordering::Relaxed)
};
let verification_level = if enforce_full_verification {
VerificationLevel::Full
} else {
self.verification_params.verification_level
};
self.verifier.verify(verification_level, block)
}
}
impl VerificationTask {
/// Returns transaction reference if it is transaction verification task
pub fn transaction(&self) -> Option<&IndexedTransaction> {
@ -70,13 +112,14 @@ impl VerificationTask {
impl AsyncVerifier {
/// Create new async verifier
pub fn new<T: VerificationSink>(verifier: Arc<ChainVerifier>, storage: StorageRef, memory_pool: MemoryPoolRef, sink: Arc<T>) -> Self {
pub fn new<T: VerificationSink>(verifier: Arc<ChainVerifier>, storage: StorageRef, memory_pool: MemoryPoolRef, sink: Arc<T>, verification_params: VerificationParameters) -> Self {
let (verification_work_sender, verification_work_receiver) = channel();
AsyncVerifier {
verification_work_sender: Mutex::new(verification_work_sender),
verification_worker_thread: Some(thread::Builder::new()
.name("Sync verification thread".to_string())
.spawn(move || {
let verifier = ChainVerifierWrapper::new(verifier, &storage, verification_params);
AsyncVerifier::verification_worker_proc(sink, storage, memory_pool, verifier, verification_work_receiver)
})
.expect("Error creating sync verification thread"))
@ -84,7 +127,7 @@ impl AsyncVerifier {
}
/// Thread procedure for handling verification tasks
fn verification_worker_proc<T: VerificationSink>(sink: Arc<T>, storage: StorageRef, memory_pool: MemoryPoolRef, verifier: Arc<ChainVerifier>, work_receiver: Receiver<VerificationTask>) {
fn verification_worker_proc<T: VerificationSink>(sink: Arc<T>, storage: StorageRef, memory_pool: MemoryPoolRef, verifier: ChainVerifierWrapper, work_receiver: Receiver<VerificationTask>) {
while let Ok(task) = work_receiver.recv() {
if !AsyncVerifier::execute_single_task(&sink, &storage, &memory_pool, &verifier, task) {
break;
@ -95,7 +138,7 @@ impl AsyncVerifier {
}
/// Execute single verification task
pub fn execute_single_task<T: VerificationSink>(sink: &Arc<T>, storage: &StorageRef, memory_pool: &MemoryPoolRef, verifier: &Arc<ChainVerifier>, task: VerificationTask) -> bool {
pub fn execute_single_task<T: VerificationSink>(sink: &Arc<T>, storage: &StorageRef, memory_pool: &MemoryPoolRef, verifier: &ChainVerifierWrapper, task: VerificationTask) -> bool {
// block verification && insertion can lead to reorganization
// => transactions from decanonized blocks should be put back to the MemoryPool
// => they must be verified again
@ -107,7 +150,7 @@ impl AsyncVerifier {
match task {
VerificationTask::VerifyBlock(block) => {
// verify block
match verifier.verify(&block) {
match verifier.verify_block(&block) {
Ok(_) => {
if let Some(tasks) = sink.on_block_verification_success(block) {
tasks_queue.extend(tasks);
@ -127,7 +170,7 @@ impl AsyncVerifier {
},
Ok(tx_output_provider) => {
let time: u32 = get_time().sec as u32;
match verifier.verify_mempool_transaction(&tx_output_provider, height, time, &transaction.raw) {
match verifier.verifier.verify_mempool_transaction(&tx_output_provider, height, time, &transaction.raw) {
Ok(_) => sink.on_transaction_verification_success(transaction.into()),
Err(e) => sink.on_transaction_verification_error(&format!("{:?}", e), &transaction.hash),
}
@ -175,26 +218,27 @@ impl Verifier for AsyncVerifier {
/// Synchronous synchronization verifier
pub struct SyncVerifier<T: VerificationSink> {
/// Verifier
verifier: ChainVerifier,
verifier: ChainVerifierWrapper,
/// Verification sink
sink: Arc<T>,
}
impl<T> SyncVerifier<T> where T: VerificationSink {
/// Create new sync verifier
pub fn new(network: Magic, storage: StorageRef, sink: Arc<T>) -> Self {
pub fn new(network: Magic, storage: StorageRef, sink: Arc<T>, verification_params: VerificationParameters) -> Self {
let verifier = ChainVerifier::new(storage.clone(), network);
let verifier = ChainVerifierWrapper::new(Arc::new(verifier), &storage, verification_params);
SyncVerifier {
verifier: verifier,
sink: sink,
}
}
}
}
impl<T> Verifier for SyncVerifier<T> where T: VerificationSink {
/// Verify block
fn verify_block(&self, block: IndexedBlock) {
match self.verifier.verify(&block) {
match self.verifier.verify_block(&block) {
Ok(_) => {
// SyncVerifier is used for bulk blocks import only
// => there are no memory pool
@ -213,15 +257,21 @@ impl<T> Verifier for SyncVerifier<T> where T: VerificationSink {
#[cfg(test)]
pub mod tests {
extern crate test_data;
use std::sync::Arc;
use std::sync::atomic::Ordering;
use std::collections::{HashSet, HashMap};
use verification::BackwardsCompatibleChainVerifier as ChainVerifier;
use db::BlockChainDatabase;
use network::Magic;
use verification::{VerificationLevel, BackwardsCompatibleChainVerifier as ChainVerifier, Error as VerificationError, TransactionError};
use synchronization_client_core::CoreVerificationSink;
use synchronization_executor::tests::DummyTaskExecutor;
use primitives::hash::H256;
use chain::{IndexedBlock, IndexedTransaction};
use super::{Verifier, BlockVerificationSink, TransactionVerificationSink, AsyncVerifier, VerificationTask};
use super::{Verifier, BlockVerificationSink, TransactionVerificationSink, AsyncVerifier, VerificationTask, ChainVerifierWrapper};
use types::{BlockHeight, StorageRef, MemoryPoolRef};
use VerificationParameters;
#[derive(Default)]
pub struct DummyVerifier {
@ -230,7 +280,7 @@ pub mod tests {
actual_checks: HashSet<H256>,
storage: Option<StorageRef>,
memory_pool: Option<MemoryPoolRef>,
verifier: Option<Arc<ChainVerifier>>,
verifier: Option<ChainVerifierWrapper>,
}
impl DummyVerifier {
@ -247,7 +297,10 @@ pub mod tests {
}
pub fn set_verifier(&mut self, verifier: Arc<ChainVerifier>) {
self.verifier = Some(verifier);
self.verifier = Some(ChainVerifierWrapper::new(verifier, self.storage.as_ref().unwrap(), VerificationParameters {
verification_level: VerificationLevel::Full,
verification_edge: 0u8.into(),
}));
}
pub fn error_when_verifying(&mut self, hash: H256, err: &str) {
@ -293,4 +346,98 @@ pub mod tests {
}
}
}
#[test]
fn verifier_wrapper_switches_to_full_mode() {
let storage: StorageRef = Arc::new(BlockChainDatabase::init_test_chain(vec![test_data::genesis().into()]));
let verifier = Arc::new(ChainVerifier::new(storage.clone(), Magic::Mainnet));
// switching to full verification when block is already in db
assert_eq!(ChainVerifierWrapper::new(verifier.clone(), &storage, VerificationParameters {
verification_level: VerificationLevel::NoVerification,
verification_edge: test_data::genesis().hash(),
}).enforce_full_verification.load(Ordering::Relaxed), true);
// switching to full verification when block with given hash is coming
let wrapper = ChainVerifierWrapper::new(verifier, &storage, VerificationParameters {
verification_level: VerificationLevel::NoVerification,
verification_edge: test_data::block_h1().hash(),
});
assert_eq!(wrapper.enforce_full_verification.load(Ordering::Relaxed), false);
let block: IndexedBlock = test_data::block_h1().into();
let _ = wrapper.verify_block(&block);
assert_eq!(wrapper.enforce_full_verification.load(Ordering::Relaxed), true);
}
#[test]
fn verification_level_header_accept_incorrect_transaction() {
let mut blocks: Vec<IndexedBlock> = vec![test_data::genesis().into()];
let mut rolling_hash = blocks[0].hash().clone();
for _ in 1..101 {
let next_block = test_data::block_builder()
.transaction()
.coinbase()
.output().value(5000000000).build()
.build()
.merkled_header()
.parent(rolling_hash.clone())
.bits(Magic::Unitest.max_bits())
.build()
.build();
rolling_hash = next_block.hash();
blocks.push(next_block.into());
}
let coinbase_transaction_hash = blocks[0].transactions[0].hash.clone();
let last_block_hash = blocks[blocks.len() - 1].hash().clone();
let storage: StorageRef = Arc::new(BlockChainDatabase::init_test_chain(blocks));
let verifier = Arc::new(ChainVerifier::new(storage.clone(), Magic::Unitest));
let bad_transaction_block: IndexedBlock = test_data::block_builder()
.transaction().coinbase().output().value(50).build().build()
.transaction()
.input().hash(coinbase_transaction_hash).build()
.output().value(1000).build()
.build()
.merkled_header()
.parent(last_block_hash)
.bits(Magic::Unitest.max_bits())
.build()
.build().into();
// Ok(()) when tx script is not checked
let wrapper = ChainVerifierWrapper::new(verifier.clone(), &storage, VerificationParameters {
verification_level: VerificationLevel::Header,
verification_edge: 1.into(),
});
assert_eq!(wrapper.verify_block(&bad_transaction_block), Ok(()));
// Error when tx script is checked
let wrapper = ChainVerifierWrapper::new(verifier, &storage, VerificationParameters {
verification_level: VerificationLevel::Full,
verification_edge: 1.into(),
});
assert_eq!(wrapper.verify_block(&bad_transaction_block), Err(VerificationError::Transaction(1, TransactionError::Signature(0))));
}
#[test]
fn verification_level_none_accept_incorrect_block() {
let storage: StorageRef = Arc::new(BlockChainDatabase::init_test_chain(vec![test_data::genesis().into()]));
let verifier = Arc::new(ChainVerifier::new(storage.clone(), Magic::Unitest));
let bad_block: IndexedBlock = test_data::block_builder().header().build().build().into();
// Ok(()) when nothing is verified
let wrapper = ChainVerifierWrapper::new(verifier.clone(), &storage, VerificationParameters {
verification_level: VerificationLevel::NoVerification,
verification_edge: 1.into(),
});
assert_eq!(wrapper.verify_block(&bad_block), Ok(()));
// Error when everything is verified
let wrapper = ChainVerifierWrapper::new(verifier, &storage, VerificationParameters {
verification_level: VerificationLevel::Full,
verification_edge: 1.into(),
});
assert_eq!(wrapper.verify_block(&bad_block), Err(VerificationError::Empty));
}
}

View File

@ -8,6 +8,7 @@ use accept_header::HeaderAcceptor;
use accept_transaction::TransactionAcceptor;
use deployments::Deployments;
use duplex_store::DuplexTransactionOutputProvider;
use VerificationLevel;
pub struct ChainAcceptor<'a> {
pub block: BlockAcceptor<'a>,
@ -16,10 +17,11 @@ pub struct ChainAcceptor<'a> {
}
impl<'a> ChainAcceptor<'a> {
pub fn new(store: &'a Store, network: Magic, block: CanonBlock<'a>, height: u32, deployments: &'a Deployments) -> Self {
pub fn new(store: &'a Store, network: Magic, verification_level: VerificationLevel, block: CanonBlock<'a>, height: u32, deployments: &'a Deployments) -> Self {
trace!(target: "verification", "Block verification {}", block.hash().to_reversed_str());
let output_store = DuplexTransactionOutputProvider::new(store.as_transaction_output_provider(), block.raw());
let headers = store.as_block_header_provider();
ChainAcceptor {
block: BlockAcceptor::new(store.as_transaction_output_provider(), network, block, height, deployments, headers),
header: HeaderAcceptor::new(headers, network, block.header(), height, deployments),
@ -31,6 +33,7 @@ impl<'a> ChainAcceptor<'a> {
output_store,
network,
tx,
verification_level,
block.hash(),
height,
block.header.raw.time,

View File

@ -8,6 +8,7 @@ use sigops::transaction_sigops;
use canon::CanonTransaction;
use constants::{COINBASE_MATURITY, MAX_BLOCK_SIGOPS};
use error::TransactionError;
use VerificationLevel;
pub struct TransactionAcceptor<'a> {
pub bip30: TransactionBip30<'a>,
@ -27,6 +28,7 @@ impl<'a> TransactionAcceptor<'a> {
output_store: DuplexTransactionOutputProvider<'a>,
network: Magic,
transaction: CanonTransaction<'a>,
verification_level: VerificationLevel,
block_hash: &'a H256,
height: u32,
time: u32,
@ -42,7 +44,7 @@ impl<'a> TransactionAcceptor<'a> {
maturity: TransactionMaturity::new(transaction, meta_store, height),
overspent: TransactionOverspent::new(transaction, output_store),
double_spent: TransactionDoubleSpend::new(transaction, output_store),
eval: TransactionEval::new(transaction, output_store, &params, height, time, deployments, headers),
eval: TransactionEval::new(transaction, output_store, verification_level, &params, height, time, deployments, headers),
}
}
@ -88,7 +90,7 @@ impl<'a> MemoryPoolTransactionAcceptor<'a> {
overspent: TransactionOverspent::new(transaction, output_store),
sigops: TransactionSigops::new(transaction, output_store, params.clone(), MAX_BLOCK_SIGOPS, time),
double_spent: TransactionDoubleSpend::new(transaction, output_store),
eval: TransactionEval::new(transaction, output_store, &params, height, time, deployments, headers),
eval: TransactionEval::new(transaction, output_store, VerificationLevel::Full, &params, height, time, deployments, headers),
}
}
@ -273,6 +275,7 @@ impl<'a> TransactionSigops<'a> {
pub struct TransactionEval<'a> {
transaction: CanonTransaction<'a>,
store: DuplexTransactionOutputProvider<'a>,
verification_level: VerificationLevel,
verify_p2sh: bool,
verify_locktime: bool,
verify_checksequence: bool,
@ -283,6 +286,7 @@ impl<'a> TransactionEval<'a> {
fn new(
transaction: CanonTransaction<'a>,
store: DuplexTransactionOutputProvider<'a>,
verification_level: VerificationLevel,
params: &ConsensusParams,
height: u32,
time: u32,
@ -298,6 +302,7 @@ impl<'a> TransactionEval<'a> {
TransactionEval {
transaction: transaction,
store: store,
verification_level: verification_level,
verify_p2sh: verify_p2sh,
verify_locktime: verify_locktime,
verify_checksequence: verify_checksequence,
@ -306,6 +311,11 @@ impl<'a> TransactionEval<'a> {
}
fn check(&self) -> Result<(), TransactionError> {
if self.verification_level == VerificationLevel::Header
|| self.verification_level == VerificationLevel::NoVerification {
return Ok(());
}
if self.transaction.raw.is_coinbase() {
return Ok(());
}

View File

@ -13,7 +13,7 @@ use verify_transaction::MemoryPoolTransactionVerifier;
use accept_chain::ChainAcceptor;
use accept_transaction::MemoryPoolTransactionAcceptor;
use deployments::Deployments;
use Verify;
use {Verify, VerificationLevel};
pub struct BackwardsCompatibleChainVerifier {
store: SharedStore,
@ -30,7 +30,11 @@ impl BackwardsCompatibleChainVerifier {
}
}
fn verify_block(&self, block: &IndexedBlock) -> Result<(), Error> {
fn verify_block(&self, verification_level: VerificationLevel, block: &IndexedBlock) -> Result<(), Error> {
if verification_level == VerificationLevel::NoVerification {
return Ok(());
}
let current_time = ::time::get_time().sec as u32;
// first run pre-verification
let chain_verifier = ChainVerifier::new(block, self.network, current_time);
@ -46,21 +50,21 @@ impl BackwardsCompatibleChainVerifier {
},
BlockOrigin::CanonChain { block_number } => {
let canon_block = CanonBlock::new(block);
let chain_acceptor = ChainAcceptor::new(self.store.as_store(), self.network, canon_block, block_number, &self.deployments);
let chain_acceptor = ChainAcceptor::new(self.store.as_store(), self.network, verification_level, canon_block, block_number, &self.deployments);
chain_acceptor.check()?;
},
BlockOrigin::SideChain(origin) => {
let block_number = origin.block_number;
let fork = self.store.fork(origin)?;
let canon_block = CanonBlock::new(block);
let chain_acceptor = ChainAcceptor::new(fork.store(), self.network, canon_block, block_number, &self.deployments);
let chain_acceptor = ChainAcceptor::new(fork.store(), self.network, verification_level, canon_block, block_number, &self.deployments);
chain_acceptor.check()?;
},
BlockOrigin::SideChainBecomingCanonChain(origin) => {
let block_number = origin.block_number;
let fork = self.store.fork(origin)?;
let canon_block = CanonBlock::new(block);
let chain_acceptor = ChainAcceptor::new(fork.store(), self.network, canon_block, block_number, &self.deployments);
let chain_acceptor = ChainAcceptor::new(fork.store(), self.network, verification_level, canon_block, block_number, &self.deployments);
chain_acceptor.check()?;
},
}
@ -114,8 +118,8 @@ impl BackwardsCompatibleChainVerifier {
}
impl Verify for BackwardsCompatibleChainVerifier {
fn verify(&self, block: &IndexedBlock) -> Result<(), Error> {
let result = self.verify_block(block);
fn verify(&self, level: VerificationLevel, block: &IndexedBlock) -> Result<(), Error> {
let result = self.verify_block(level, block);
trace!(
target: "verification", "Block {} (transactions: {}) verification finished. Result {:?}",
block.hash().to_reversed_str(),
@ -136,14 +140,14 @@ mod tests {
use network::Magic;
use script;
use super::BackwardsCompatibleChainVerifier as ChainVerifier;
use {Verify, Error, TransactionError};
use {Verify, Error, TransactionError, VerificationLevel};
#[test]
fn verify_orphan() {
let storage = Arc::new(BlockChainDatabase::init_test_chain(vec![test_data::genesis().into()]));
let b2 = test_data::block_h2().into();
let verifier = ChainVerifier::new(storage, Magic::Unitest);
assert_eq!(Err(Error::Database(DBError::UnknownParent)), verifier.verify(&b2));
assert_eq!(Err(Error::Database(DBError::UnknownParent)), verifier.verify(VerificationLevel::Full, &b2));
}
#[test]
@ -151,7 +155,7 @@ mod tests {
let storage = Arc::new(BlockChainDatabase::init_test_chain(vec![test_data::genesis().into()]));
let b1 = test_data::block_h1();
let verifier = ChainVerifier::new(storage, Magic::Unitest);
assert!(verifier.verify(&b1.into()).is_ok());
assert!(verifier.verify(VerificationLevel::Full, &b1.into()).is_ok());
}
@ -164,7 +168,7 @@ mod tests {
]);
let b1 = test_data::block_h2();
let verifier = ChainVerifier::new(Arc::new(storage), Magic::Unitest);
assert!(verifier.verify(&b1.into()).is_ok());
assert!(verifier.verify(VerificationLevel::Full, &b1.into()).is_ok());
}
#[test]
@ -199,7 +203,7 @@ mod tests {
TransactionError::Maturity,
));
assert_eq!(expected, verifier.verify(&block.into()));
assert_eq!(expected, verifier.verify(VerificationLevel::Full, &block.into()));
}
#[test]
@ -231,7 +235,7 @@ mod tests {
.build();
let verifier = ChainVerifier::new(Arc::new(storage), Magic::Unitest);
assert!(verifier.verify(&block.into()).is_ok());
assert!(verifier.verify(VerificationLevel::Full, &block.into()).is_ok());
}
#[test]
@ -267,7 +271,7 @@ mod tests {
.build();
let verifier = ChainVerifier::new(Arc::new(storage), Magic::Unitest);
assert!(verifier.verify(&block.into()).is_ok());
assert!(verifier.verify(VerificationLevel::Full, &block.into()).is_ok());
}
#[test]
@ -308,7 +312,7 @@ mod tests {
let verifier = ChainVerifier::new(Arc::new(storage), Magic::Unitest);
let expected = Err(Error::Transaction(2, TransactionError::Overspend));
assert_eq!(expected, verifier.verify(&block.into()));
assert_eq!(expected, verifier.verify(VerificationLevel::Full, &block.into()));
}
#[test]
@ -348,7 +352,7 @@ mod tests {
.build();
let verifier = ChainVerifier::new(Arc::new(storage), Magic::Unitest);
assert!(verifier.verify(&block.into()).is_ok());
assert!(verifier.verify(VerificationLevel::Full, &block.into()).is_ok());
}
#[test]
@ -396,7 +400,7 @@ mod tests {
let verifier = ChainVerifier::new(Arc::new(storage), Magic::Unitest);
let expected = Err(Error::MaximumSigops);
assert_eq!(expected, verifier.verify(&block.into()));
assert_eq!(expected, verifier.verify(VerificationLevel::Full, &block.into()));
}
#[test]
@ -423,6 +427,6 @@ mod tests {
actual: 5000000001
});
assert_eq!(expected, verifier.verify(&block.into()));
assert_eq!(expected, verifier.verify(VerificationLevel::Full, &block.into()));
}
}

View File

@ -107,7 +107,18 @@ pub use sigops::transaction_sigops;
pub use timestamp::median_timestamp;
pub use work::{work_required, is_valid_proof_of_work, is_valid_proof_of_work_hash, block_reward_satoshi};
#[derive(Debug, Clone, Copy, PartialEq)]
/// Blocks verification level.
pub enum VerificationLevel {
/// Full verification.
Full,
/// Transaction scripts are not checked.
Header,
/// No verification at all.
NoVerification,
}
/// Interface for block verification
pub trait Verify : Send + Sync {
fn verify(&self, block: &chain::IndexedBlock) -> Result<(), Error>;
fn verify(&self, level: VerificationLevel, block: &chain::IndexedBlock) -> Result<(), Error>;
}