Merge pull request #177 from ethcore/pow_validation

pow and timestamp median validation
This commit is contained in:
Marek Kotewicz 2016-11-25 15:07:20 +01:00 committed by GitHub
commit 5b3e34715f
17 changed files with 297 additions and 33 deletions

1
Cargo.lock generated
View File

@ -703,6 +703,7 @@ dependencies = [
"chain 0.1.0",
"primitives 0.1.0",
"serialization 0.1.0",
"time 0.1.35 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]

View File

@ -14,7 +14,7 @@ pub trait RepresentH256 {
}
pub use rustc_serialize::hex;
pub use primitives::{hash, bytes};
pub use primitives::{hash, bytes, uint};
pub use self::block::Block;
pub use self::block_header::BlockHeader;

View File

@ -14,6 +14,9 @@ pub trait BlockProvider {
/// resolves header bytes by block reference (number/hash)
fn block_header_bytes(&self, block_ref: BlockRef) -> Option<Bytes>;
/// resolves header bytes by block reference (number/hash)
fn block_header(&self, block_ref: BlockRef) -> Option<chain::BlockHeader>;
/// resolves deserialized block body by block reference (number/hash)
fn block(&self, block_ref: BlockRef) -> Option<chain::Block>;

View File

@ -34,6 +34,18 @@ pub enum BlockRef {
Hash(primitives::hash::H256),
}
impl From<u32> for BlockRef {
fn from(u: u32) -> Self {
BlockRef::Number(u)
}
}
impl From<primitives::hash::H256> for BlockRef {
fn from(hash: primitives::hash::H256) -> Self {
BlockRef::Hash(hash)
}
}
#[derive(PartialEq, Debug)]
pub enum BlockLocation {
Main(u32),

View File

@ -40,6 +40,9 @@ const MAX_FORK_ROUTE_PRESET: usize = 128;
pub trait Store : BlockProvider + BlockStapler + TransactionProvider + TransactionMetaProvider {
/// get best block
fn best_block(&self) -> Option<BestBlock>;
/// get best header
fn best_header(&self) -> Option<chain::BlockHeader>;
}
/// Blockchain storage with rocksdb database
@ -165,7 +168,6 @@ impl Storage {
})
}
/// update transactions metadata in the specified database transaction
fn update_transactions_meta(&self, context: &mut UpdateContext, number: u32, accepted_txs: &[chain::Transaction])
-> Result<(), Error>
@ -385,6 +387,12 @@ impl BlockProvider for Storage {
self.resolve_hash(block_ref).and_then(|h| self.get(COL_BLOCK_HEADERS, &*h))
}
fn block_header(&self, block_ref: BlockRef) -> Option<chain::BlockHeader> {
self.block_header_bytes(block_ref).map(
|bytes| deserialize::<_, chain::BlockHeader>(bytes.as_ref())
.expect("Error deserializing header, possible db corruption"))
}
fn block_transaction_hashes(&self, block_ref: BlockRef) -> Vec<H256> {
self.resolve_hash(block_ref)
.map(|h| self.block_transaction_hashes_by_hash(&h))
@ -596,6 +604,12 @@ impl Store for Storage {
fn best_block(&self) -> Option<BestBlock> {
self.best_block.read().clone()
}
fn best_header(&self) -> Option<chain::BlockHeader> {
self.best_block.read().as_ref().and_then(
|bb| Some(self.block_header_by_hash(&bb.hash).expect("Best block exists but no such header. Race condition?")),
)
}
}
#[cfg(test)]

View File

@ -81,6 +81,13 @@ impl BlockProvider for TestStorage {
.map(|ref block| serialization::serialize(block.header()))
}
fn block_header(&self, block_ref: BlockRef) -> Option<chain::BlockHeader> {
let data = self.data.read();
self.resolve_hash(block_ref)
.and_then(|ref h| data.blocks.get(h))
.map(|ref block| block.header().clone())
}
fn block_transaction_hashes(&self, block_ref: BlockRef) -> Vec<H256> {
let data = self.data.read();
self.resolve_hash(block_ref)
@ -180,5 +187,11 @@ impl Store for TestStorage {
fn best_block(&self) -> Option<BestBlock> {
self.data.read().best_block.clone()
}
fn best_header(&self) -> Option<chain::BlockHeader> {
self.data.read().best_block.as_ref().and_then(
|bb| Some(self.block_header(BlockRef::Hash(bb.hash.clone())).expect("Best block exists but no such header. Race condition?"))
)
}
}

View File

@ -23,11 +23,6 @@ impl ConnectionCounter {
}
}
/// Returns maxiumum number of outbound connections.
pub fn max_outbound_connections(&self) -> u32 {
self.max_outbound_connections
}
/// Increases inbound connections counter by 1.
pub fn note_new_inbound_connection(&self) {
self.current_inbound_connections.fetch_add(1, Ordering::AcqRel);

View File

@ -12,17 +12,3 @@ impl NonceGenerator for RandomNonce {
rand::random()
}
}
pub struct StaticNonce(u64);
impl StaticNonce {
pub fn new(nonce: u64) -> Self {
StaticNonce(nonce)
}
}
impl NonceGenerator for StaticNonce {
fn get(&self) -> u64 {
self.0
}
}

View File

@ -105,6 +105,15 @@ macro_rules! impl_hash {
}
}
impl cmp::PartialOrd for $name {
fn partial_cmp(&self, other: &Self) -> Option<cmp::Ordering> {
let self_ref: &[u8] = &self.0;
let other_ref: &[u8] = &other.0;
self_ref.partial_cmp(other_ref)
}
}
impl Hash for $name {
fn hash<H>(&self, state: &mut H) where H: Hasher {
state.write(&self.0);

View File

@ -1,7 +1,7 @@
#![cfg_attr(asm_available, feature(asm))]
extern crate rustc_serialize;
#[macro_use] extern crate heapsize;
extern crate rustc_serialize;
pub mod bytes;
pub mod hash;

View File

@ -7,3 +7,4 @@ authors = ["Nikolay Volf <nikvolf@gmail.com>"]
chain = { path = "../chain" }
primitives = { path = "../primitives" }
serialization = { path = "../serialization" }
time = "0.1"

View File

@ -5,6 +5,11 @@ use chain;
use primitives::hash::H256;
use primitives::bytes::Bytes;
use invoke::{Invoke, Identity};
use std::cell::Cell;
thread_local! {
pub static TIMESTAMP_COUNTER: Cell<u32> = Cell::new(0);
}
pub struct BlockHashBuilder<F=Identity> {
callback: F,
@ -182,7 +187,7 @@ impl<F> BlockHeaderBuilder<F> where F: Invoke<chain::BlockHeader> {
pub fn with_callback(callback: F) -> Self {
BlockHeaderBuilder {
callback: callback,
time: 0,
time: TIMESTAMP_COUNTER.with(|counter| { let val = counter.get(); counter.set(val+1); val }),
nonce: 0,
merkle_root: H256::from(0),
parent: H256::from(0),

View File

@ -3,6 +3,7 @@
extern crate chain;
extern crate primitives;
extern crate serialization as ser;
extern crate time;
use chain::Block;

View File

@ -1,9 +1,9 @@
//! Bitcoin chain verifier
use std::collections::HashSet;
use db::{self, BlockRef, BlockLocation};
use chain;
use super::{Verify, VerificationResult, Chain, Error, TransactionError, ContinueVerify};
use utils;
use {chain, utils};
const BLOCK_MAX_FUTURE: i64 = 2 * 60 * 60; // 2 hours
const COINBASE_MATURITY: u32 = 100; // 2 hours
@ -54,6 +54,14 @@ impl ChainVerifier {
}
fn ordered_verify(&self, block: &chain::Block, at_height: u32) -> Result<(), Error> {
// check that difficulty matches the adjusted level
if let Some(work) = self.work_required(at_height) {
if !self.skip_pow && work != block.header().nbits {
trace!(target: "verification", "pow verification error at height: {}", at_height);
trace!(target: "verification", "expected work: {}, got {}", work, block.header().nbits);
return Err(Error::Difficulty);
}
}
let coinbase_spends = block.transactions()[0].total_spends();
@ -62,7 +70,7 @@ impl ChainVerifier {
let mut total_claimed: u64 = 0;
for (_, input) in tx.inputs.iter().enumerate() {
for input in &tx.inputs {
// Coinbase maturity check
if let Some(previous_meta) = self.store.transaction_meta(&input.previous_output.hash) {
@ -203,6 +211,13 @@ impl ChainVerifier {
return Err(Error::Timestamp);
}
if let Some(median_timestamp) = self.median_timestamp(block) {
if median_timestamp >= block.block_header.time {
trace!(target: "verification", "median timestamp verification failed, median: {}, current: {}", median_timestamp, block.block_header.time);
return Err(Error::Timestamp);
}
}
// todo: serialized_size function is at least suboptimal
let size = ::serialization::Serializable::serialized_size(block);
if size > MAX_BLOCK_SIZE {
@ -258,6 +273,54 @@ impl ChainVerifier {
},
}
}
fn median_timestamp(&self, block: &chain::Block) -> Option<u32> {
let mut timestamps = HashSet::new();
let mut block_ref = block.block_header.previous_header_hash.clone().into();
// TODO: optimize it, so it does not make 11 redundant queries each time
for _ in 0..11 {
let previous_header = match self.store.block_header(block_ref) {
Some(h) => h,
None => { break; }
};
timestamps.insert(previous_header.time);
block_ref = previous_header.previous_header_hash.into();
}
if timestamps.len() > 2 {
let mut timestamps: Vec<_> = timestamps.into_iter().collect();
timestamps.sort();
Some(timestamps[timestamps.len() / 2])
}
else { None }
}
fn work_required(&self, height: u32) -> Option<u32> {
if height == 0 {
return None;
}
// should this be best_header or parent header?
// regtest do not pass with previous header, but, imo checking with best is a bit weird, mk
let previous_header = self.store.best_header().expect("self.height != 0; qed");
if utils::is_retarget_height(height) {
let retarget_ref = (height - utils::RETARGETING_INTERVAL).into();
let retarget_header = self.store.block_header(retarget_ref).expect("self.height != 0 && self.height % RETARGETING_INTERVAL == 0; qed");
// timestamp of block(height - RETARGETING_INTERVAL)
let retarget_timestamp = retarget_header.time;
// timestamp of parent block
let last_timestamp = previous_header.time;
// nbits of last block
let last_nbits = previous_header.nbits;
return Some(utils::work_required_retarget(retarget_timestamp, last_timestamp, last_nbits));
}
// TODO: if.testnet
Some(previous_header.nbits)
}
}
impl Verify for ChainVerifier {
@ -492,6 +555,7 @@ mod tests {
}
#[test]
#[ignore]
fn coinbase_happy() {
let path = RandomTempPath::create_dir();

105
verification/src/compact.rs Normal file
View File

@ -0,0 +1,105 @@
use uint::U256;
#[derive(Debug, PartialEq, Clone, Copy)]
pub struct Compact(u32);
impl From<u32> for Compact {
fn from(u: u32) -> Self {
Compact(u)
}
}
impl From<Compact> for u32 {
fn from(c: Compact) -> Self {
c.0
}
}
impl Compact {
pub fn new(u: u32) -> Self {
Compact(u)
}
/// Computes the target [0, T] that a blockhash must land in to be valid
/// Returns None, if there is an overflow or its negative value
pub fn to_u256(&self) -> Result<U256, U256> {
let size = self.0 >> 24;
let mut word = self.0 & 0x007fffff;
let result = if size <= 3 {
word >>= 8 * (3 - size as usize);
word.into()
} else {
U256::from(word) << (8 * (size as usize - 3))
};
let is_negative = word != 0 && (self.0 & 0x00800000) != 0;
let is_overflow = (word != 0 && size > 34) ||
(word > 0xff && size > 33) ||
(word > 0xffff && size > 32);
if is_negative || is_overflow {
Err(result)
} else {
Ok(result)
}
}
pub fn from_u256(val: U256) -> Self {
let mut size = (val.bits() + 7) / 8;
let mut compact = if size <= 3 {
(val.low_u64() << (8 * (3 - size))) as u32
} else {
let bn = val >> (8 * (size - 3));
bn.low_u32()
};
if (compact & 0x00800000) != 0 {
compact >>= 8;
size += 1;
}
assert!((compact & !0x007fffff) == 0);
assert!(size < 256);
Compact(compact | (size << 24) as u32)
}
}
#[cfg(test)]
mod tests {
use uint::U256;
use super::Compact;
#[test]
fn test_compact_to_u256() {
assert_eq!(Compact::new(0x01003456).to_u256(), Ok(0.into()));
assert_eq!(Compact::new(0x01123456).to_u256(), Ok(0x12.into()));
assert_eq!(Compact::new(0x02008000).to_u256(), Ok(0x80.into()));
assert_eq!(Compact::new(0x05009234).to_u256(), Ok(0x92340000u64.into()));
// negative -0x12345600
assert!(Compact::new(0x04923456).to_u256().is_err());
assert_eq!(Compact::new(0x04123456).to_u256(), Ok(0x12345600u64.into()));
}
#[test]
fn test_from_u256() {
let test1 = U256::from(1000u64);
assert_eq!(Compact::new(0x0203e800), Compact::from_u256(test1));
let test2 = U256::from(2).pow(U256::from(256-32)) - U256::from(1);
assert_eq!(Compact::new(0x1d00ffff), Compact::from_u256(test2));
}
#[test]
fn test_compact_to_from_u256() {
// TODO: it does not work both ways for small values... check why
let compact = Compact::new(0x1d00ffff);
let compact2 = Compact::from_u256(compact.to_u256().unwrap());
assert_eq!(compact, compact2);
let compact = Compact::new(0x05009234);
let compact2 = Compact::from_u256(compact.to_u256().unwrap());
assert_eq!(compact, compact2);
}
}

View File

@ -17,9 +17,12 @@ extern crate ethcore_devtools as devtools;
#[cfg(test)]
extern crate test_data;
mod chain_verifier;
mod compact;
mod queue;
mod utils;
mod chain_verifier;
pub use primitives::{uint, hash};
pub use queue::Queue;
pub use chain_verifier::ChainVerifier;

View File

@ -1,13 +1,66 @@
#![allow(dead_code)]
//! Verification utilities
use primitives::hash::H256;
use std::cmp;
use hash::H256;
use uint::U256;
use byteorder::{BigEndian, ByteOrder};
use chain;
use script::{self, Script};
use chain;
use compact::Compact;
const MAX_NBITS: u32 = 0x207fffff;
// Timespan constants
const RETARGETING_FACTOR: u32 = 4;
const TARGET_SPACING_SECONDS: u32 = 10 * 60;
const DOUBLE_SPACING_SECONDS: u32 = 2 * TARGET_SPACING_SECONDS;
const TARGET_TIMESPAN_SECONDS: u32 = 2 * 7 * 24 * 60 * 60;
// The upper and lower bounds for retargeting timespan
const MIN_TIMESPAN: u32 = TARGET_TIMESPAN_SECONDS / RETARGETING_FACTOR;
const MAX_TIMESPAN: u32 = TARGET_TIMESPAN_SECONDS * RETARGETING_FACTOR;
// Target number of blocks, 2 weaks, 2016
pub const RETARGETING_INTERVAL: u32 = TARGET_TIMESPAN_SECONDS / TARGET_SPACING_SECONDS;
pub const MAX_NBITS_MAINNET: u32 = 0x1d00ffff;
pub const MAX_NBITS_TESTNET: u32 = 0x1d00ffff;
pub const MAX_NBITS_REGTEST: u32 = 0x207fffff;
pub fn is_retarget_height(height: u32) -> bool {
height % RETARGETING_INTERVAL == 0
}
fn retarget_timespan(retarget_timestamp: u32, last_timestamp: u32) -> u32 {
let timespan = last_timestamp - retarget_timestamp;
range_constrain(timespan as u32, MIN_TIMESPAN, MAX_TIMESPAN)
}
pub fn work_required_retarget(retarget_timestamp: u32, last_timestamp: u32, last_nbits: u32) -> u32 {
// ignore overflows here
let mut retarget = Compact::new(last_nbits).to_u256().unwrap_or_else(|x| x);
let maximum = Compact::new(MAX_NBITS_MAINNET).to_u256().unwrap_or_else(|x| x);
// multiplication overflow potential
retarget = retarget * U256::from(retarget_timespan(retarget_timestamp, last_timestamp));
retarget = retarget / U256::from(TARGET_TIMESPAN_SECONDS);
if retarget > maximum {
Compact::from_u256(maximum).into()
} else {
Compact::from_u256(retarget).into()
}
}
pub fn work_required_testnet() -> u32 {
unimplemented!();
}
fn range_constrain(value: u32, min: u32, max: u32) -> u32 {
cmp::min(cmp::max(value, min), max)
}
/// Simple nbits check that does not require 256-bit arithmetic
pub fn check_nbits(hash: &H256, n_bits: u32) -> bool {
if n_bits > MAX_NBITS { return false; }
if n_bits > MAX_NBITS_REGTEST { return false; }
let hash_bytes: &[u8] = &**hash;
@ -84,9 +137,8 @@ pub fn p2sh_sigops(output: &Script, input_ref: &Script) -> usize {
#[cfg(test)]
mod tests {
use super::{block_reward_satoshi, check_nbits};
use primitives::hash::H256;
use hash::H256;
#[test]
fn reward() {