diff --git a/.travis.yml b/.travis.yml index 756d5b33..1d1e8535 100644 --- a/.travis.yml +++ b/.travis.yml @@ -26,6 +26,7 @@ addons: - g++-4.8 script: - ./tools/test.sh + - ./tools/bench.sh after_success: | [ false ] && [ $TRAVIS_BRANCH = master ] && diff --git a/Cargo.lock b/Cargo.lock index 693c51c6..d5a030fa 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -64,6 +64,19 @@ name = "base58" version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" +[[package]] +name = "bencher" +version = "0.1.0" +dependencies = [ + "chain 0.1.0", + "db 0.1.0", + "ethcore-devtools 1.3.0", + "primitives 0.1.0", + "test-data 0.1.0", + "time 0.1.35 (registry+https://github.com/rust-lang/crates.io-index)", + "verification 0.1.0", +] + [[package]] name = "bit-vec" version = "0.4.3" @@ -482,6 +495,7 @@ name = "pbtc" version = "0.1.0" dependencies = [ "app_dirs 1.1.1 (registry+https://github.com/rust-lang/crates.io-index)", + "bencher 0.1.0", "chain 0.1.0", "clap 2.18.0 (registry+https://github.com/rust-lang/crates.io-index)", "db 0.1.0", diff --git a/Cargo.toml b/Cargo.toml index 6a16dd68..07eb1989 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -20,6 +20,7 @@ db = { path = "db" } verification = { path = "verification" } sync = { path = "sync" } import = { path = "import" } +bencher = { path = "bencher" } [[bin]] path = "pbtc/main.rs" diff --git a/bencher/Cargo.toml b/bencher/Cargo.toml new file mode 100644 index 00000000..0aec1566 --- /dev/null +++ b/bencher/Cargo.toml @@ -0,0 +1,19 @@ +[package] +name = "bencher" +version = "0.1.0" +license = "GPL-3.0" +authors = ["Ethcore "] +description = "Parity bitcoin client." + +[dependencies] +db = { path = "../db" } +verification = { path = "../verification" } +chain = { path = "../chain" } +primitives = { path = "../primitives" } +ethcore-devtools = { path = "../devtools" } +test-data = { path = "../test-data" } +time = "*" + +[[bin]] +path = "src/main.rs" +name = "bencher" diff --git a/bencher/src/database.rs b/bencher/src/database.rs new file mode 100644 index 00000000..48bc9c4c --- /dev/null +++ b/bencher/src/database.rs @@ -0,0 +1,226 @@ +use devtools::RandomTempPath; +use db::{Storage, BlockStapler, BlockProvider, BlockRef, BlockInsertedChain}; +use test_data; + +use super::Benchmark; + +pub fn fetch(benchmark: &mut Benchmark) { + // params + const BLOCKS: usize = 1000; + + benchmark.samples(BLOCKS); + + // test setup + let path = RandomTempPath::create_dir(); + let store = Storage::new(path.as_path()).unwrap(); + + let genesis = test_data::genesis(); + store.insert_block(&genesis).unwrap(); + + let genesis = test_data::genesis(); + store.insert_block(&genesis).unwrap(); + + let mut rolling_hash = genesis.hash(); + let mut blocks = Vec::new(); + let mut hashes = Vec::new(); + + for x in 0..BLOCKS { + let next_block = test_data::block_builder() + .transaction() + .coinbase() + .output().value(5000000000).build() + .build() + .merkled_header().parent(rolling_hash.clone()).nonce(x as u32).build() + .build(); + rolling_hash = next_block.hash(); + blocks.push(next_block); + hashes.push(rolling_hash.clone()); + } + + for block in blocks.iter() { store.insert_block(block).unwrap(); } + + // bench + benchmark.start(); + for _ in 0..BLOCKS { + let block = store.block(BlockRef::Hash(hashes[0].clone())).unwrap(); + assert_eq!(&block.hash(), &hashes[0]); + } + benchmark.stop(); +} + +pub fn write(benchmark: &mut Benchmark) { + // params + const BLOCKS: usize = 1000; + benchmark.samples(BLOCKS); + + // setup + let path = RandomTempPath::create_dir(); + let store = Storage::new(path.as_path()).unwrap(); + + let genesis = test_data::genesis(); + store.insert_block(&genesis).unwrap(); + + let mut rolling_hash = genesis.hash(); + + let mut blocks = Vec::new(); + + for x in 0..BLOCKS { + let next_block = test_data::block_builder() + .transaction() + .coinbase() + .output().value(5000000000).build() + .build() + .merkled_header().parent(rolling_hash.clone()).nonce(x as u32).build() + .build(); + rolling_hash = next_block.hash(); + blocks.push(next_block); + } + + // bench + benchmark.start(); + for idx in 0..BLOCKS { + store.insert_block(&blocks[idx]).unwrap(); + } + benchmark.stop(); +} + +pub fn reorg_short(benchmark: &mut Benchmark) { + // params + const BLOCKS: usize = 1000; + benchmark.samples(BLOCKS); + + // setup + let path = RandomTempPath::create_dir(); + let store = Storage::new(path.as_path()).unwrap(); + + let genesis = test_data::genesis(); + store.insert_block(&genesis).unwrap(); + + let mut rolling_hash = genesis.hash(); + + let mut blocks = Vec::new(); + + for x in 0..BLOCKS { + let base = rolling_hash.clone(); + + let next_block = test_data::block_builder() + .transaction() + .coinbase() + .output().value(5000000000).build() + .build() + .merkled_header().parent(rolling_hash.clone()).nonce(x as u32 * 4).build() + .build(); + rolling_hash = next_block.hash(); + blocks.push(next_block); + + let next_block_side = test_data::block_builder() + .transaction() + .coinbase() + .output().value(5000000000).build() + .build() + .merkled_header().parent(base).nonce(x as u32 * 4 + 2).build() + .build(); + let next_base = next_block_side.hash(); + blocks.push(next_block_side); + + let next_block_side_continue = test_data::block_builder() + .transaction() + .coinbase() + .output().value(5000000000).build() + .build() + .merkled_header().parent(next_base).nonce(x as u32 * 4 + 3).build() + .build(); + blocks.push(next_block_side_continue); + + let next_block_continue = test_data::block_builder() + .transaction() + .coinbase() + .output().value(5000000000).build() + .build() + .merkled_header().parent(rolling_hash.clone()).nonce(x as u32 * 4 + 1).build() + .build(); + rolling_hash = next_block_continue.hash(); + blocks.push(next_block_continue); + } + + let mut total: usize = 0; + let mut reorgs: usize = 0; + + // bench + benchmark.start(); + for idx in 0..BLOCKS { + total += 1; + if let BlockInsertedChain::Reorganized(_) = store.insert_block(&blocks[idx]).unwrap() { + reorgs += 1; + } + } + benchmark.stop(); + + // reorgs occur twice per iteration except last one where there only one, blocks are inserted with rate 4/iteration + // so reorgs = total/2 - 1 + assert_eq!(1000, total); + assert_eq!(499, reorgs); +} + +// 1. write 12000 blocks +// 2. write 100 blocks that has 100 transaction each spending outputs from first 1000 blocks +pub fn write_heavy(benchmark: &mut Benchmark) { + // params + const BLOCKS_INITIAL: usize = 12000; + const BLOCKS: usize = 100; + const TRANSACTIONS: usize = 100; + + benchmark.samples(BLOCKS); + + // test setup + let path = RandomTempPath::create_dir(); + let store = Storage::new(path.as_path()).unwrap(); + + let genesis = test_data::genesis(); + store.insert_block(&genesis).unwrap(); + + let genesis = test_data::genesis(); + store.insert_block(&genesis).unwrap(); + + let mut rolling_hash = genesis.hash(); + let mut blocks = Vec::new(); + let mut hashes = Vec::new(); + + for x in 0..BLOCKS_INITIAL { + let next_block = test_data::block_builder() + .transaction() + .coinbase() + .output().value(5000000000).build() + .build() + .merkled_header().parent(rolling_hash.clone()).nonce(x as u32).build() + .build(); + rolling_hash = next_block.hash(); + blocks.push(next_block); + hashes.push(rolling_hash.clone()); + } + + for b in 0..BLOCKS { + let mut builder = test_data::block_builder() + .transaction().coinbase().build(); + + for t in 0..TRANSACTIONS { + builder = builder.transaction() + .input().hash(blocks[b*TRANSACTIONS+t].transactions()[0].hash()).build() // default index is 0 which is ok + .output().value(1000).build() + .build(); + } + + let next_block = builder.merkled_header().parent(rolling_hash).build().build(); + + rolling_hash = next_block.hash(); + blocks.push(next_block); + hashes.push(rolling_hash.clone()); + } + + for block in blocks[..BLOCKS_INITIAL].iter() { store.insert_block(block).unwrap(); } + + // bench + benchmark.start(); + for block in blocks[BLOCKS_INITIAL+1..].iter() { store.insert_block(block).unwrap(); } + benchmark.stop(); +} diff --git a/bencher/src/main.rs b/bencher/src/main.rs new file mode 100644 index 00000000..e37f119e --- /dev/null +++ b/bencher/src/main.rs @@ -0,0 +1,73 @@ +extern crate db; +extern crate chain; +extern crate ethcore_devtools as devtools; +extern crate test_data; +extern crate time; + +mod database; + +use time::{PreciseTime, Duration}; +use std::io::Write; +use std::str; + +#[derive(Default)] +pub struct Benchmark { + start: Option, + end: Option, + samples: Option, +} + +impl Benchmark { + pub fn start(&mut self) { + self.start = Some(PreciseTime::now()); + } + + pub fn stop(&mut self) { + self.end = Some(PreciseTime::now()); + } + + pub fn evaluate(&self) -> Duration { + self.start.expect("benchmarch never ended").to(self.end.expect("benchmark never started")) + } + + pub fn samples(&mut self, samples: usize) { + self.samples = Some(samples); + } +} + +fn decimal_mark(s: String) -> String { + let bytes: Vec<_> = s.bytes().rev().collect(); + let chunks: Vec<_> = bytes.chunks(3).map(|chunk| str::from_utf8(chunk).unwrap()).collect(); + let result: Vec<_> = chunks.join(",").bytes().rev().collect(); + String::from_utf8(result).unwrap() +} + + +fn run_benchmark(name: &str, f: F) where F: FnOnce(&mut Benchmark) { + print!("{}: ", name); + ::std::io::stdout().flush().unwrap(); + + let mut benchmark = Benchmark::default(); + f(&mut benchmark); + if let Some(samples) = benchmark.samples { + println!("{} ns/sample", + decimal_mark(format!("{}", benchmark.evaluate().num_nanoseconds().unwrap() / samples as i64)), + ); + } + else { + println!("{} ns", decimal_mark(format!("{}", benchmark.evaluate().num_nanoseconds().unwrap()))); + } +} + +macro_rules! benchmark { + ($t:expr) => { + run_benchmark(stringify!($t), $t); + }; +} + +fn main() { + benchmark!(database::fetch); + benchmark!(database::write); + benchmark!(database::reorg_short); + benchmark!(database::write_heavy); +} diff --git a/tools/bench.sh b/tools/bench.sh new file mode 100755 index 00000000..30a63ef8 --- /dev/null +++ b/tools/bench.sh @@ -0,0 +1,3 @@ +#!/bin/bash + +cargo run --manifest-path ./bencher/Cargo.toml --release