Merge pull request #197 from ethcore/chain_verifier_network

chain verifier does not have hardcoded network constants
This commit is contained in:
Nikolay Volf 2016-11-25 22:27:21 +03:00 committed by GitHub
commit 2437f17d20
35 changed files with 202 additions and 132 deletions

13
Cargo.lock generated
View File

@ -8,6 +8,7 @@ dependencies = [
"ethcore-devtools 1.3.0",
"linked-hash-map 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
"network 0.1.0",
"parking_lot 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
"primitives 0.1.0",
"script 0.1.0",
@ -328,6 +329,7 @@ dependencies = [
"bitcrypto 0.1.0",
"byteorder 0.5.3 (registry+https://github.com/rust-lang/crates.io-index)",
"chain 0.1.0",
"network 0.1.0",
"primitives 0.1.0",
"serialization 0.1.0",
]
@ -391,6 +393,14 @@ dependencies = [
"ws2_32-sys 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "network"
version = "0.1.0"
dependencies = [
"chain 0.1.0",
"serialization 0.1.0",
]
[[package]]
name = "nix"
version = "0.7.0"
@ -461,6 +471,7 @@ dependencies = [
"futures-cpupool 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
"message 0.1.0",
"network 0.1.0",
"ns-dns-tokio 0.1.0 (git+https://github.com/debris/abstract-ns)",
"parking_lot 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
"primitives 0.1.0",
@ -506,6 +517,7 @@ dependencies = [
"log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
"message 0.1.0",
"miner 0.1.0",
"network 0.1.0",
"p2p 0.1.0",
"script 0.1.0",
"sync 0.1.0",
@ -674,6 +686,7 @@ dependencies = [
"message 0.1.0",
"miner 0.1.0",
"murmur3 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
"network 0.1.0",
"p2p 0.1.0",
"parking_lot 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
"primitives 0.1.0",

View File

@ -13,6 +13,7 @@ clap = { version = "2", features = ["yaml"] }
chain = { path = "chain" }
keys = { path = "keys" }
message = { path = "message" }
network = { path = "network" }
miner = { path = "miner" }
p2p = { path = "p2p" }
script = { path = "script" }

View File

@ -10,3 +10,4 @@ bitcrypto = { path = "../crypto" }
chain = { path = "../chain" }
primitives = { path = "../primitives" }
serialization = { path = "../serialization" }
network = { path = "../network" }

View File

@ -3,10 +3,8 @@ mod block_header_and_ids;
mod block_transactions;
mod block_transactions_request;
mod command;
mod consensus;
mod inventory;
mod ip;
mod magic;
mod port;
mod prefilled_transaction;
mod service;
@ -16,10 +14,8 @@ pub use self::block_header_and_ids::BlockHeaderAndIDs;
pub use self::block_transactions::BlockTransactions;
pub use self::block_transactions_request::BlockTransactionsRequest;
pub use self::command::Command;
pub use self::consensus::ConsensusParams;
pub use self::inventory::{InventoryVector, InventoryType};
pub use self::ip::IpAddress;
pub use self::magic::Magic;
pub use self::port::Port;
pub use self::prefilled_transaction::PrefilledTransaction;
pub use self::service::Services;

View File

@ -9,10 +9,8 @@ pub enum Error {
Deserialize,
/// Command has wrong format or is unsupported.
InvalidCommand,
/// Network magic is not supported.
InvalidMagic,
/// Network magic comes from different network.
WrongMagic,
InvalidMagic,
/// Invalid checksum.
InvalidChecksum,
/// Invalid version.
@ -37,7 +35,6 @@ impl error::Error for Error {
Error::Deserialize => "Message Deserialization Error",
Error::InvalidCommand => "Invalid Message Command",
Error::InvalidMagic => "Invalid Network Magic",
Error::WrongMagic => "Wrong Network Magic",
Error::InvalidChecksum => "Invalid message chacksum",
Error::InvalidVersion => "Unsupported protocol version",
}

View File

@ -3,6 +3,7 @@ extern crate bitcrypto as crypto;
extern crate chain;
extern crate primitives;
extern crate serialization as ser;
extern crate network;
pub mod common;
mod message;
@ -12,7 +13,7 @@ mod error;
pub use primitives::{hash, bytes};
pub use common::{Command, Magic, Services};
pub use common::{Command, Services};
pub use message::{Message, MessageHeader, Payload, to_raw_message};
pub use serialization::{serialize_payload, deserialize_payload};
pub use error::{Error, MessageResult};

View File

@ -1,6 +1,7 @@
use ser::Stream;
use bytes::{TaggedBytes, Bytes};
use common::{Magic, Command};
use network::Magic;
use common::Command;
use serialization::serialize_payload;
use {Payload, MessageResult, MessageHeader};

View File

@ -1,7 +1,8 @@
use hash::H32;
use ser::{Serializable, Stream, Reader};
use crypto::checksum;
use common::{Command, Magic};
use network::Magic;
use common::Command;
use Error;
#[derive(Debug, PartialEq)]
@ -31,9 +32,9 @@ impl MessageHeader {
let mut reader = Reader::new(data);
let magic: u32 = try!(reader.read());
let magic = try!(Magic::from_u32(magic));
let magic = Magic::from(magic);
if expected != magic {
return Err(Error::WrongMagic);
return Err(Error::InvalidMagic);
}
let header = MessageHeader {
@ -61,7 +62,7 @@ impl Serializable for MessageHeader {
mod tests {
use bytes::Bytes;
use ser::serialize;
use common::Magic;
use network::Magic;
use super::MessageHeader;
#[test]

8
network/Cargo.toml Normal file
View File

@ -0,0 +1,8 @@
[package]
name = "network"
version = "0.1.0"
authors = ["debris <marek.kotewicz@gmail.com>"]
[dependencies]
serialization = { path = "../serialization" }
chain = { path = "../chain" }

View File

@ -14,7 +14,7 @@ pub struct ConsensusParams {
impl ConsensusParams {
pub fn with_magic(magic: Magic) -> Self {
match magic {
Magic::Mainnet => ConsensusParams {
Magic::Mainnet | Magic::Other(_) => ConsensusParams {
bip16_time: 1333238400, // Apr 1 2012
bip65_height: 388381, // 000000000000000004c2b624ed5d7756c508d90fd0da2c7c679febfa6c4735f0
},

8
network/src/lib.rs Normal file
View File

@ -0,0 +1,8 @@
extern crate chain;
extern crate serialization as ser;
mod consensus;
mod magic;
pub use consensus::ConsensusParams;
pub use magic::Magic;

View File

@ -3,21 +3,28 @@
use ser::{Stream, Serializable};
use chain::Block;
use Error;
use super::ConsensusParams;
const MAGIC_MAINNET: u32 = 0xD9B4BEF9;
const MAGIC_TESTNET: u32 = 0x0709110B;
const MAGIC_REGTEST: u32 = 0xDAB5BFFA;
const MAX_NBITS_MAINNET: u32 = 0x1d00ffff;
const MAX_NBITS_TESTNET: u32 = 0x1d00ffff;
const MAX_NBITS_REGTEST: u32 = 0x207fffff;
/// Bitcoin network
/// https://bitcoin.org/en/glossary/mainnet
#[derive(Debug, PartialEq, Eq, Clone, Copy)]
pub enum Magic {
/// The original and main network for Bitcoin transactions, where satoshis have real economic value.
Mainnet,
/// The main bitcoin testnet.
Testnet,
/// Bitcoin regtest network.
Regtest,
/// Any other network. By default behaves like bitcoin mainnet.
Other(u32),
}
impl From<Magic> for u32 {
@ -26,23 +33,34 @@ impl From<Magic> for u32 {
Magic::Mainnet => MAGIC_MAINNET,
Magic::Testnet => MAGIC_TESTNET,
Magic::Regtest => MAGIC_REGTEST,
Magic::Other(magic) => magic,
}
}
}
impl From<u32> for Magic {
fn from(u: u32) -> Self {
match u {
MAGIC_MAINNET => Magic::Mainnet,
MAGIC_TESTNET => Magic::Testnet,
MAGIC_REGTEST => Magic::Regtest,
other => Magic::Other(other),
}
}
}
impl Magic {
pub fn from_u32(magic: u32) -> Result<Self, Error> {
match magic {
MAGIC_MAINNET => Ok(Magic::Mainnet),
MAGIC_TESTNET => Ok(Magic::Testnet),
MAGIC_REGTEST => Ok(Magic::Regtest),
_ => Err(Error::InvalidMagic),
pub fn max_nbits(&self) -> u32 {
match *self {
Magic::Mainnet | Magic::Other(_) => MAX_NBITS_MAINNET,
Magic::Testnet => MAX_NBITS_TESTNET,
Magic::Regtest => MAX_NBITS_REGTEST,
}
}
pub fn port(&self) -> u16 {
match *self {
Magic::Mainnet => 8333,
Magic::Mainnet | Magic::Other(_) => 8333,
Magic::Testnet => 18333,
Magic::Regtest => 18444,
}
@ -50,7 +68,7 @@ impl Magic {
pub fn rpc_port(&self) -> u16 {
match *self {
Magic::Mainnet => 8332,
Magic::Mainnet | Magic::Other(_) => 8332,
Magic::Testnet => 18332,
Magic::Regtest => 18443,
}
@ -58,7 +76,7 @@ impl Magic {
pub fn genesis_block(&self) -> Block {
match *self {
Magic::Mainnet => "0100000000000000000000000000000000000000000000000000000000000000000000003ba3edfd7a7b12b27ac72c3e67768f617fc81bc3888a51323a9fb8aa4b1e5e4a29ab5f49ffff001d1dac2b7c0101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff4d04ffff001d0104455468652054696d65732030332f4a616e2f32303039204368616e63656c6c6f72206f6e206272696e6b206f66207365636f6e64206261696c6f757420666f722062616e6b73ffffffff0100f2052a01000000434104678afdb0fe5548271967f1a67130b7105cd6a828e03909a67962e0ea1f61deb649f6bc3f4cef38c4f35504e51ec112de5c384df7ba0b8d578a4c702b6bf11d5fac00000000".into(),
Magic::Mainnet | Magic::Other(_) => "0100000000000000000000000000000000000000000000000000000000000000000000003ba3edfd7a7b12b27ac72c3e67768f617fc81bc3888a51323a9fb8aa4b1e5e4a29ab5f49ffff001d1dac2b7c0101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff4d04ffff001d0104455468652054696d65732030332f4a616e2f32303039204368616e63656c6c6f72206f6e206272696e6b206f66207365636f6e64206261696c6f757420666f722062616e6b73ffffffff0100f2052a01000000434104678afdb0fe5548271967f1a67130b7105cd6a828e03909a67962e0ea1f61deb649f6bc3f4cef38c4f35504e51ec112de5c384df7ba0b8d578a4c702b6bf11d5fac00000000".into(),
Magic::Testnet => "0100000000000000000000000000000000000000000000000000000000000000000000003ba3edfd7a7b12b27ac72c3e67768f617fc81bc3888a51323a9fb8aa4b1e5e4adae5494dffff001d1aa4ae180101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff4d04ffff001d0104455468652054696d65732030332f4a616e2f32303039204368616e63656c6c6f72206f6e206272696e6b206f66207365636f6e64206261696c6f757420666f722062616e6b73ffffffff0100f2052a01000000434104678afdb0fe5548271967f1a67130b7105cd6a828e03909a67962e0ea1f61deb649f6bc3f4cef38c4f35504e51ec112de5c384df7ba0b8d578a4c702b6bf11d5fac00000000".into(),
Magic::Regtest => "0100000000000000000000000000000000000000000000000000000000000000000000003ba3edfd7a7b12b27ac72c3e67768f617fc81bc3888a51323a9fb8aa4b1e5e4adae5494dffff7f20020000000101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff4d04ffff001d0104455468652054696d65732030332f4a616e2f32303039204368616e63656c6c6f72206f6e206272696e6b206f66207365636f6e64206261696c6f757420666f722062616e6b73ffffffff0100f2052a01000000434104678afdb0fe5548271967f1a67130b7105cd6a828e03909a67962e0ea1f61deb649f6bc3f4cef38c4f35504e51ec112de5c384df7ba0b8d578a4c702b6bf11d5fac00000000".into(),
}
@ -77,18 +95,27 @@ impl Serializable for Magic {
#[cfg(test)]
mod tests {
use Error;
use super::{Magic, MAGIC_MAINNET, MAGIC_TESTNET, MAGIC_REGTEST};
use super::{
Magic, MAGIC_MAINNET, MAGIC_TESTNET, MAGIC_REGTEST,
MAX_NBITS_MAINNET, MAX_NBITS_TESTNET, MAX_NBITS_REGTEST,
};
#[test]
fn test_network_magic_number() {
assert_eq!(MAGIC_MAINNET, Magic::Mainnet.into());
assert_eq!(MAGIC_TESTNET, Magic::Testnet.into());
assert_eq!(MAGIC_REGTEST, Magic::Regtest.into());
assert_eq!(Magic::from_u32(MAGIC_MAINNET).unwrap(), Magic::Mainnet);
assert_eq!(Magic::from_u32(MAGIC_TESTNET).unwrap(), Magic::Testnet);
assert_eq!(Magic::from_u32(MAGIC_REGTEST).unwrap(), Magic::Regtest);
assert_eq!(Magic::from_u32(0).unwrap_err(), Error::InvalidMagic);
assert_eq!(Magic::Mainnet, MAGIC_MAINNET.into());
assert_eq!(Magic::Testnet, MAGIC_TESTNET.into());
assert_eq!(Magic::Regtest, MAGIC_REGTEST.into());
assert_eq!(Magic::Other(0), 0.into());
}
#[test]
fn test_network_max_nbits() {
assert_eq!(Magic::Mainnet.max_nbits(), MAX_NBITS_MAINNET);
assert_eq!(Magic::Testnet.max_nbits(), MAX_NBITS_TESTNET);
assert_eq!(Magic::Regtest.max_nbits(), MAX_NBITS_REGTEST);
}
#[test]

View File

@ -19,3 +19,4 @@ primitives = { path = "../primitives"}
bitcrypto = { path = "../crypto" }
message = { path = "../message" }
serialization = { path = "../serialization" }
network = { path = "../network" }

View File

@ -2,7 +2,7 @@ use std::{io, cmp};
use futures::{Future, Poll, Async};
use message::{Message, MessageResult, Error};
use message::types::{Version, Verack};
use message::common::Magic;
use network::Magic;
use io::{write_message, WriteMessage, ReadMessage, read_message};
pub fn handshake<A>(a: A, magic: Magic, version: Version, min_version: u32) -> Handshake<A> where A: io::Write + io::Read {
@ -199,7 +199,8 @@ mod tests {
use futures::Future;
use bytes::Bytes;
use ser::Stream;
use message::{Magic, Message};
use network::Magic;
use message::Message;
use message::types::Verack;
use message::types::version::{Version, V0, V106, V70001};
use super::{handshake, accept_handshake, HandshakeResult};

View File

@ -2,7 +2,8 @@ use std::io;
use futures::{Future, Poll, Async};
use tokio_core::io::{read_exact, ReadExact};
use crypto::checksum;
use message::{Error, MessageHeader, MessageResult, Magic, Command};
use network::Magic;
use message::{Error, MessageHeader, MessageResult, Command};
use bytes::Bytes;
use io::{read_header, ReadHeader};
@ -68,7 +69,8 @@ impl<A> Future for ReadAnyMessage<A> where A: io::Read {
mod tests {
use futures::Future;
use bytes::Bytes;
use message::{Magic, Error};
use network::Magic;
use message::Error;
use super::read_any_message;
#[test]
@ -79,7 +81,7 @@ mod tests {
let expected = (name, nonce);
assert_eq!(read_any_message(raw.as_ref(), Magic::Mainnet).wait().unwrap(), Ok(expected));
assert_eq!(read_any_message(raw.as_ref(), Magic::Testnet).wait().unwrap(), Err(Error::WrongMagic));
assert_eq!(read_any_message(raw.as_ref(), Magic::Testnet).wait().unwrap(), Err(Error::InvalidMagic));
}
#[test]

View File

@ -1,7 +1,8 @@
use std::io;
use futures::{Future, Poll, Async};
use tokio_core::io::{ReadExact, read_exact};
use message::{MessageHeader, MessageResult, Magic};
use message::{MessageHeader, MessageResult};
use network::Magic;
pub fn read_header<A>(a: A, magic: Magic) -> ReadHeader<A> where A: io::Read {
ReadHeader {
@ -30,7 +31,8 @@ impl<A> Future for ReadHeader<A> where A: io::Read {
mod tests {
use futures::Future;
use bytes::Bytes;
use message::{Magic, MessageHeader, Error};
use network::Magic;
use message::{MessageHeader, Error};
use super::read_header;
#[test]
@ -44,7 +46,7 @@ mod tests {
};
assert_eq!(read_header(raw.as_ref(), Magic::Mainnet).wait().unwrap().1, Ok(expected));
assert_eq!(read_header(raw.as_ref(), Magic::Testnet).wait().unwrap().1, Err(Error::WrongMagic));
assert_eq!(read_header(raw.as_ref(), Magic::Testnet).wait().unwrap().1, Err(Error::InvalidMagic));
}
#[test]

View File

@ -1,7 +1,8 @@
use std::io;
use std::marker::PhantomData;
use futures::{Poll, Future, Async};
use message::{MessageResult, Error, Magic, Payload};
use network::Magic;
use message::{MessageResult, Error, Payload};
use io::{read_header, ReadHeader, read_payload, ReadPayload};
pub fn read_message<M, A>(a: A, magic: Magic, version: u32) -> ReadMessage<M, A>
@ -74,7 +75,8 @@ impl<M, A> Future for ReadMessage<M, A> where A: io::Read, M: Payload {
mod tests {
use futures::Future;
use bytes::Bytes;
use message::{Magic, Error};
use network::Magic;
use message::Error;
use message::types::{Ping, Pong};
use super::read_message;
@ -83,7 +85,7 @@ mod tests {
let raw: Bytes = "f9beb4d970696e6700000000000000000800000083c00c765845303b6da97786".into();
let ping = Ping::new(u64::from_str_radix("8677a96d3b304558", 16).unwrap());
assert_eq!(read_message(raw.as_ref(), Magic::Mainnet, 0).wait().unwrap().1, Ok(ping));
assert_eq!(read_message::<Ping, _>(raw.as_ref(), Magic::Testnet, 0).wait().unwrap().1, Err(Error::WrongMagic));
assert_eq!(read_message::<Ping, _>(raw.as_ref(), Magic::Testnet, 0).wait().unwrap().1, Err(Error::InvalidMagic));
assert_eq!(read_message::<Pong, _>(raw.as_ref(), Magic::Mainnet, 0).wait().unwrap().1, Err(Error::InvalidCommand));
}

View File

@ -15,6 +15,7 @@ extern crate bitcrypto as crypto;
extern crate message;
extern crate primitives;
extern crate serialization as ser;
extern crate network;
mod io;
mod net;

View File

@ -3,7 +3,8 @@ use std::time::Duration;
use futures::{Future, Poll};
use tokio_core::reactor::Handle;
use tokio_core::net::TcpStream;
use message::{MessageResult, Magic};
use network::Magic;
use message::{MessageResult};
use io::{accept_handshake, AcceptHandshake, Deadline, deadline};
use net::{Config, Connection};

View File

@ -1,5 +1,6 @@
use std::net::SocketAddr;
use message::common::{Magic, Services, NetAddress};
use network::Magic;
use message::common::{Services, NetAddress};
use message::types::version::{Version, V0, V106, V70001};
use util::time::{Time, RealTime};
use util::nonce::{NonceGenerator, RandomNonce};

View File

@ -4,8 +4,8 @@ use std::net::SocketAddr;
use futures::{Future, Poll, Async};
use tokio_core::reactor::Handle;
use tokio_core::net::{TcpStream, TcpStreamNew};
use network::Magic;
use message::Error;
use message::common::Magic;
use message::types::Version;
use io::{handshake, Handshake, Deadline, deadline};
use net::{Config, Connection};

View File

@ -1,5 +1,5 @@
use std::net;
use message::Magic;
use network::Magic;
use message::common::Services;
use io::SharedTcpStream;

View File

@ -1,5 +1,5 @@
use std::net::SocketAddr;
use message::Magic;
use network::Magic;
pub type PeerId = usize;

View File

@ -8,7 +8,7 @@ pub fn import(cfg: Config, matches: &ArgMatches) -> Result<(), String> {
// TODO: this might be unnecessary here!
try!(init_db(&cfg, &db));
let mut writer = create_sync_blocks_writer(db);
let mut writer = create_sync_blocks_writer(db, cfg.magic);
let blk_path = matches.value_of("PATH").expect("PATH is required in cli.yml; qed");
let blk_dir = try!(::import::open_blk_dir(blk_path).map_err(|_| "Import directory does not exist".to_owned()));

View File

@ -32,7 +32,7 @@ pub fn start(cfg: config::Config) -> Result<(), String> {
};
let sync_handle = el.handle();
let sync_connection_factory = create_sync_connection_factory(&sync_handle, cfg.magic.consensus_params(), db);
let sync_connection_factory = create_sync_connection_factory(&sync_handle, cfg.magic, db);
let p2p = try!(p2p::P2P::new(p2p_cfg, sync_connection_factory, el.handle()).map_err(|x| x.to_string()));
try!(p2p.run().map_err(|_| "Failed to start p2p module"));

View File

@ -1,6 +1,6 @@
use std::net;
use clap;
use message::Magic;
use network::Magic;
use {USER_AGENT, REGTEST_USER_AGENT};
pub struct Config {
@ -29,18 +29,18 @@ pub fn parse(matches: &clap::ArgMatches) -> Result<Config, String> {
};
let (in_connections, out_connections) = match magic {
Magic::Testnet | Magic::Mainnet => (10, 10),
Magic::Testnet | Magic::Mainnet | Magic::Other(_) => (10, 10),
Magic::Regtest => (1, 0),
};
let p2p_threads = match magic {
Magic::Testnet | Magic::Mainnet => 4,
Magic::Testnet | Magic::Mainnet | Magic::Other(_) => 4,
Magic::Regtest => 1,
};
// to skip idiotic 30 seconds delay in test-scripts
let user_agent = match magic {
Magic::Testnet | Magic::Mainnet => USER_AGENT,
Magic::Testnet | Magic::Mainnet | Magic::Other(_) => USER_AGENT,
Magic::Regtest => REGTEST_USER_AGENT,
};

View File

@ -12,6 +12,7 @@ extern crate chain;
extern crate keys;
extern crate script;
extern crate message;
extern crate network;
extern crate p2p;
extern crate sync;
extern crate import;

View File

@ -26,6 +26,7 @@ script = { path = "../script" }
serialization = { path = "../serialization" }
test-data = { path = "../test-data" }
verification = { path = "../verification" }
network = { path = "../network" }
[features]
dev = []

View File

@ -1,8 +1,9 @@
use std::sync::Arc;
use chain;
use db;
use super::Error;
use network::Magic;
use verification::{Verify, ChainVerifier};
use super::Error;
pub struct BlocksWriter {
storage: Arc<db::Store>,
@ -10,10 +11,10 @@ pub struct BlocksWriter {
}
impl BlocksWriter {
pub fn new(storage: db::SharedStore) -> BlocksWriter {
pub fn new(storage: db::SharedStore, network: Magic) -> BlocksWriter {
BlocksWriter {
storage: storage.clone(),
verifier: ChainVerifier::new(storage),
verifier: ChainVerifier::new(storage, network),
}
}
@ -32,18 +33,17 @@ impl BlocksWriter {
#[cfg(test)]
mod tests {
use db;
use db::Store;
use std::sync::Arc;
use db::{self, Store};
use network::Magic;
use {test_data, verification};
use super::super::Error;
use super::BlocksWriter;
use test_data;
use verification;
#[test]
fn blocks_writer_appends_blocks() {
let db = Arc::new(db::TestStorage::with_genesis_block());
let mut blocks_target = BlocksWriter::new(db.clone());
let mut blocks_target = BlocksWriter::new(db.clone(), Magic::Testnet);
blocks_target.append_block(test_data::block_h1()).expect("Expecting no error");
assert_eq!(db.best_block().expect("Block is inserted").number, 1);
}
@ -51,7 +51,7 @@ mod tests {
#[test]
fn blocks_writer_verification_error() {
let db = Arc::new(db::TestStorage::with_genesis_block());
let mut blocks_target = BlocksWriter::new(db.clone());
let mut blocks_target = BlocksWriter::new(db.clone(), Magic::Testnet);
match blocks_target.append_block(test_data::block_h2()).unwrap_err() {
Error::OutOfOrderBlock => (),
_ => panic!("Unexpected error"),
@ -62,7 +62,7 @@ mod tests {
#[test]
fn blocks_writer_out_of_order_block() {
let db = Arc::new(db::TestStorage::with_genesis_block());
let mut blocks_target = BlocksWriter::new(db.clone());
let mut blocks_target = BlocksWriter::new(db.clone(), Magic::Testnet);
let wrong_block = test_data::block_builder()
.header().parent(test_data::genesis().hash()).build()

View File

@ -21,6 +21,7 @@ extern crate serialization as ser;
#[cfg(test)]
extern crate ethcore_devtools as devtools;
extern crate rand;
extern crate network;
mod best_headers_chain;
mod blocks_writer;
@ -42,7 +43,7 @@ mod synchronization_verifier;
use std::sync::Arc;
use parking_lot::RwLock;
use tokio_core::reactor::Handle;
use message::common::ConsensusParams;
use network::Magic;
/// Sync errors.
#[derive(Debug)]
@ -56,12 +57,12 @@ pub enum Error {
}
/// Create blocks writer.
pub fn create_sync_blocks_writer(db: db::SharedStore) -> blocks_writer::BlocksWriter {
blocks_writer::BlocksWriter::new(db)
pub fn create_sync_blocks_writer(db: db::SharedStore, network: Magic) -> blocks_writer::BlocksWriter {
blocks_writer::BlocksWriter::new(db, network)
}
/// Create inbound synchronization connections factory for given `db`.
pub fn create_sync_connection_factory(handle: &Handle, consensus_params: ConsensusParams, db: db::SharedStore) -> p2p::LocalSyncNodeRef {
pub fn create_sync_connection_factory(handle: &Handle, network: Magic, db: db::SharedStore) -> p2p::LocalSyncNodeRef {
use synchronization_chain::Chain as SyncChain;
use synchronization_executor::LocalSynchronizationTaskExecutor as SyncExecutor;
use local_node::LocalNode as SyncNode;
@ -74,7 +75,7 @@ pub fn create_sync_connection_factory(handle: &Handle, consensus_params: Consens
let sync_executor = SyncExecutor::new(sync_chain.clone());
let sync_server = Arc::new(SynchronizationServer::new(sync_chain.clone(), sync_executor.clone()));
let sync_client_core = SynchronizationClientCore::new(SynchronizationConfig::new(), handle, sync_executor.clone(), sync_chain.clone());
let verifier = AsyncVerifier::new(consensus_params, sync_chain, sync_client_core.clone());
let verifier = AsyncVerifier::new(network, sync_chain, sync_client_core.clone());
let sync_client = SynchronizationClient::new(sync_client_core, verifier);
let sync_node = Arc::new(SyncNode::new(sync_server, sync_client, sync_executor));
SyncConnectionFactory::with_local_node(sync_node)

View File

@ -3,7 +3,7 @@ use std::sync::Arc;
use std::sync::mpsc::{channel, Sender, Receiver};
use parking_lot::Mutex;
use chain::{Block, Transaction};
use message::common::ConsensusParams;
use network::{Magic, ConsensusParams};
use primitives::hash::H256;
use verification::{ChainVerifier, Verify as VerificationVerify};
use synchronization_chain::ChainRef;
@ -48,16 +48,16 @@ pub struct AsyncVerifier {
impl AsyncVerifier {
/// Create new async verifier
pub fn new<T: VerificationSink>(consensus_params: ConsensusParams, chain: ChainRef, sink: Arc<Mutex<T>>) -> Self {
pub fn new<T: VerificationSink>(network: Magic, chain: ChainRef, sink: Arc<Mutex<T>>) -> Self {
let (verification_work_sender, verification_work_receiver) = channel();
let storage = chain.read().storage();
let verifier = ChainVerifier::new(storage);
let verifier = ChainVerifier::new(storage, network);
AsyncVerifier {
verification_work_sender: verification_work_sender,
verification_worker_thread: Some(thread::Builder::new()
.name("Sync verification thread".to_string())
.spawn(move || {
AsyncVerifier::verification_worker_proc(sink, chain, consensus_params, verifier, verification_work_receiver)
AsyncVerifier::verification_worker_proc(sink, chain, network.consensus_params(), verifier, verification_work_receiver)
})
.expect("Error creating verification thread"))
}

View File

@ -4,18 +4,17 @@ version = "0.1.0"
authors = ["Nikolay Volf <nikvolf@gmail.com>"]
[dependencies]
byteorder = "0.5"
parking_lot = "0.3"
linked-hash-map = "0.3"
time = "0.1"
log = "0.3"
ethcore-devtools = { path = "../devtools" }
primitives = { path = "../primitives" }
chain = { path = "../chain" }
serialization = { path = "../serialization" }
parking_lot = "0.3"
linked-hash-map = "0.3"
test-data = { path = "../test-data" }
byteorder = "0.5"
time = "0.1"
script = { path = "../script" }
log = "0.3"
[dependencies.db]
path = "../db"
features = ["dev"]
network = { path = "../network" }
db = { path = "../db", features = ["dev"] }

View File

@ -1,7 +1,8 @@
//! Bitcoin chain verifier
use std::collections::HashSet;
use std::collections::BTreeSet;
use db::{self, BlockRef, BlockLocation};
use network::Magic;
use super::{Verify, VerificationResult, Chain, Error, TransactionError, ContinueVerify};
use {chain, utils};
@ -18,16 +19,18 @@ pub struct ChainVerifier {
verify_clocktimeverify: bool,
skip_pow: bool,
skip_sig: bool,
network: Magic,
}
impl ChainVerifier {
pub fn new(store: db::SharedStore) -> Self {
pub fn new(store: db::SharedStore, network: Magic) -> Self {
ChainVerifier {
store: store,
verify_p2sh: false,
verify_clocktimeverify: false,
skip_pow: false,
skip_sig: false
skip_sig: false,
network: network,
}
}
@ -55,7 +58,7 @@ impl ChainVerifier {
fn ordered_verify(&self, block: &chain::Block, at_height: u32) -> Result<(), Error> {
// check that difficulty matches the adjusted level
if let Some(work) = self.work_required(at_height) {
if let Some(work) = self.work_required(block, at_height) {
if !self.skip_pow && work != block.header().nbits {
trace!(target: "verification", "pow verification error at height: {}", at_height);
trace!(target: "verification", "expected work: {}, got {}", work, block.header().nbits);
@ -202,7 +205,7 @@ impl ChainVerifier {
}
// target difficulty threshold
if !self.skip_pow && !utils::check_nbits(&hash, block.header().nbits) {
if !self.skip_pow && !utils::check_nbits(self.network.max_nbits(), &hash, block.header().nbits) {
return Err(Error::Pow);
}
@ -275,7 +278,7 @@ impl ChainVerifier {
}
fn median_timestamp(&self, block: &chain::Block) -> Option<u32> {
let mut timestamps = HashSet::new();
let mut timestamps = BTreeSet::new();
let mut block_ref = block.block_header.previous_header_hash.clone().into();
// TODO: optimize it, so it does not make 11 redundant queries each time
for _ in 0..11 {
@ -288,21 +291,19 @@ impl ChainVerifier {
}
if timestamps.len() > 2 {
let mut timestamps: Vec<_> = timestamps.into_iter().collect();
timestamps.sort();
let timestamps: Vec<_> = timestamps.into_iter().collect();
Some(timestamps[timestamps.len() / 2])
}
else { None }
}
fn work_required(&self, height: u32) -> Option<u32> {
fn work_required(&self, block: &chain::Block, height: u32) -> Option<u32> {
if height == 0 {
return None;
}
// should this be best_header or parent header?
// regtest do not pass with previous header, but, imo checking with best is a bit weird, mk
let previous_header = self.store.best_header().expect("self.height != 0; qed");
let previous_ref = block.block_header.previous_header_hash.clone().into();
let previous_header = self.store.block_header(previous_ref).expect("self.height != 0; qed");
if utils::is_retarget_height(height) {
let retarget_ref = (height - utils::RETARGETING_INTERVAL).into();
@ -314,7 +315,7 @@ impl ChainVerifier {
// nbits of last block
let last_nbits = previous_header.nbits;
return Some(utils::work_required_retarget(retarget_timestamp, last_timestamp, last_nbits));
return Some(utils::work_required_retarget(self.network.max_nbits(), retarget_timestamp, last_timestamp, last_nbits));
}
// TODO: if.testnet
@ -356,20 +357,19 @@ impl ContinueVerify for ChainVerifier {
#[cfg(test)]
mod tests {
use std::sync::Arc;
use db::{TestStorage, Storage, Store, BlockStapler};
use network::Magic;
use devtools::RandomTempPath;
use {script, test_data};
use super::ChainVerifier;
use super::super::{Verify, Chain, Error, TransactionError};
use db::{TestStorage, Storage, Store, BlockStapler};
use test_data;
use std::sync::Arc;
use devtools::RandomTempPath;
use script;
#[test]
fn verify_orphan() {
let storage = TestStorage::with_blocks(&vec![test_data::genesis()]);
let b2 = test_data::block_h2();
let verifier = ChainVerifier::new(Arc::new(storage));
let verifier = ChainVerifier::new(Arc::new(storage), Magic::Testnet);
assert_eq!(Chain::Orphan, verifier.verify(&b2).unwrap());
}
@ -378,7 +378,7 @@ mod tests {
fn verify_smoky() {
let storage = TestStorage::with_blocks(&vec![test_data::genesis()]);
let b1 = test_data::block_h1();
let verifier = ChainVerifier::new(Arc::new(storage));
let verifier = ChainVerifier::new(Arc::new(storage), Magic::Testnet);
assert_eq!(Chain::Main, verifier.verify(&b1).unwrap());
}
@ -391,7 +391,7 @@ mod tests {
]
);
let b1 = test_data::block_h170();
let verifier = ChainVerifier::new(Arc::new(storage));
let verifier = ChainVerifier::new(Arc::new(storage), Magic::Testnet);
assert_eq!(Chain::Main, verifier.verify(&b1).unwrap());
}
@ -403,7 +403,7 @@ mod tests {
]
);
let b170 = test_data::block_h170();
let verifier = ChainVerifier::new(Arc::new(storage));
let verifier = ChainVerifier::new(Arc::new(storage), Magic::Testnet);
let should_be = Err(Error::Transaction(
1,
@ -437,7 +437,7 @@ mod tests {
.merkled_header().parent(genesis.hash()).build()
.build();
let verifier = ChainVerifier::new(Arc::new(storage)).pow_skip().signatures_skip();
let verifier = ChainVerifier::new(Arc::new(storage), Magic::Testnet).pow_skip().signatures_skip();
let expected = Err(Error::Transaction(
1,
@ -473,7 +473,7 @@ mod tests {
.merkled_header().parent(genesis.hash()).build()
.build();
let verifier = ChainVerifier::new(Arc::new(storage)).pow_skip().signatures_skip();
let verifier = ChainVerifier::new(Arc::new(storage), Magic::Testnet).pow_skip().signatures_skip();
let expected = Ok(Chain::Main);
assert_eq!(expected, verifier.verify(&block));
@ -511,7 +511,7 @@ mod tests {
.merkled_header().parent(genesis.hash()).build()
.build();
let verifier = ChainVerifier::new(Arc::new(storage)).pow_skip().signatures_skip();
let verifier = ChainVerifier::new(Arc::new(storage), Magic::Testnet).pow_skip().signatures_skip();
let expected = Ok(Chain::Main);
assert_eq!(expected, verifier.verify(&block));
@ -548,7 +548,7 @@ mod tests {
.merkled_header().parent(genesis.hash()).build()
.build();
let verifier = ChainVerifier::new(Arc::new(storage)).pow_skip().signatures_skip();
let verifier = ChainVerifier::new(Arc::new(storage), Magic::Testnet).pow_skip().signatures_skip();
let expected = Err(Error::Transaction(2, TransactionError::Overspend));
assert_eq!(expected, verifier.verify(&block));
@ -592,7 +592,7 @@ mod tests {
.merkled_header().parent(best_hash).build()
.build();
let verifier = ChainVerifier::new(Arc::new(storage)).pow_skip().signatures_skip();
let verifier = ChainVerifier::new(Arc::new(storage), Magic::Testnet).pow_skip().signatures_skip();
let expected = Ok(Chain::Main);
@ -644,7 +644,7 @@ mod tests {
.merkled_header().parent(genesis.hash()).build()
.build();
let verifier = ChainVerifier::new(Arc::new(storage)).pow_skip().signatures_skip();
let verifier = ChainVerifier::new(Arc::new(storage), Magic::Testnet).pow_skip().signatures_skip();
let expected = Err(Error::MaximumSigops);
assert_eq!(expected, verifier.verify(&block));
@ -670,7 +670,7 @@ mod tests {
.merkled_header().parent(genesis.hash()).build()
.build();
let verifier = ChainVerifier::new(Arc::new(storage)).pow_skip().signatures_skip();
let verifier = ChainVerifier::new(Arc::new(storage), Magic::Testnet).pow_skip().signatures_skip();
let expected = Err(Error::CoinbaseOverspend {
expected_max: 5000000000,

View File

@ -1,17 +1,19 @@
//! Bitcoin blocks verification
extern crate db;
extern crate primitives;
extern crate chain;
extern crate serialization;
extern crate byteorder;
extern crate parking_lot;
extern crate linked_hash_map;
extern crate byteorder;
extern crate time;
extern crate script;
#[macro_use]
extern crate log;
extern crate db;
extern crate chain;
extern crate network;
extern crate primitives;
extern crate serialization;
extern crate script;
#[cfg(test)]
extern crate ethcore_devtools as devtools;
#[cfg(test)]

View File

@ -21,10 +21,6 @@ const MAX_TIMESPAN: u32 = TARGET_TIMESPAN_SECONDS * RETARGETING_FACTOR;
// Target number of blocks, 2 weaks, 2016
pub const RETARGETING_INTERVAL: u32 = TARGET_TIMESPAN_SECONDS / TARGET_SPACING_SECONDS;
pub const MAX_NBITS_MAINNET: u32 = 0x1d00ffff;
pub const MAX_NBITS_TESTNET: u32 = 0x1d00ffff;
pub const MAX_NBITS_REGTEST: u32 = 0x207fffff;
pub fn is_retarget_height(height: u32) -> bool {
height % RETARGETING_INTERVAL == 0
}
@ -34,10 +30,10 @@ fn retarget_timespan(retarget_timestamp: u32, last_timestamp: u32) -> u32 {
range_constrain(timespan as u32, MIN_TIMESPAN, MAX_TIMESPAN)
}
pub fn work_required_retarget(retarget_timestamp: u32, last_timestamp: u32, last_nbits: u32) -> u32 {
pub fn work_required_retarget(max_nbits: u32, retarget_timestamp: u32, last_timestamp: u32, last_nbits: u32) -> u32 {
// ignore overflows here
let mut retarget = Compact::new(last_nbits).to_u256().unwrap_or_else(|x| x);
let maximum = Compact::new(MAX_NBITS_MAINNET).to_u256().unwrap_or_else(|x| x);
let maximum = Compact::new(max_nbits).to_u256().unwrap_or_else(|x| x);
// multiplication overflow potential
retarget = retarget * U256::from(retarget_timespan(retarget_timestamp, last_timestamp));
@ -59,8 +55,10 @@ fn range_constrain(value: u32, min: u32, max: u32) -> u32 {
}
/// Simple nbits check that does not require 256-bit arithmetic
pub fn check_nbits(hash: &H256, n_bits: u32) -> bool {
if n_bits > MAX_NBITS_REGTEST { return false; }
pub fn check_nbits(max_nbits: u32, hash: &H256, n_bits: u32) -> bool {
if n_bits > max_nbits {
return false;
}
let hash_bytes: &[u8] = &**hash;
@ -137,6 +135,7 @@ pub fn p2sh_sigops(output: &Script, input_ref: &Script) -> usize {
#[cfg(test)]
mod tests {
use network::Magic;
use super::{block_reward_satoshi, check_nbits};
use hash::H256;
@ -154,29 +153,31 @@ mod tests {
#[test]
fn nbits() {
let max_nbits = Magic::Regtest.max_nbits();
// strictly equal
let hash = H256::from_reversed_str("00000000000000001bc330000000000000000000000000000000000000000000");
let nbits = 0x181bc330u32;
assert!(check_nbits(&hash, nbits));
assert!(check_nbits(max_nbits, &hash, nbits));
// nbits match but not equal (greater)
let hash = H256::from_reversed_str("00000000000000001bc330000000000000000000000000000000000000000001");
let nbits = 0x181bc330u32;
assert!(!check_nbits(&hash, nbits));
assert!(!check_nbits(max_nbits, &hash, nbits));
// greater
let hash = H256::from_reversed_str("ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff");
let nbits = 0x181bc330u32;
assert!(!check_nbits(&hash, nbits));
assert!(!check_nbits(max_nbits, &hash, nbits));
// some real examples
let hash = H256::from_reversed_str("000000000000000001f942eb4bfa0aeccb6a14c268f4c72d5fff17270da771b9");
let nbits = 404129525;
assert!(check_nbits(&hash, nbits));
assert!(check_nbits(max_nbits, &hash, nbits));
let hash = H256::from_reversed_str("00000000000000000e753ef636075711efd2cbf5a8473c7c5b67755a3701e0c2");
let nbits = 404129525;
assert!(check_nbits(&hash, nbits));
assert!(check_nbits(max_nbits, &hash, nbits));
}
}