Merge pull request #51 from paritytech/pzec_cleanup

Cleanup some code + documentation
This commit is contained in:
Svyatoslav Nikolsky 2019-03-12 15:58:52 +03:00 committed by GitHub
commit 84b4d6687b
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
16 changed files with 46 additions and 1174 deletions

View File

@ -38,7 +38,7 @@ Get the peer count.
### Blockchain
The Parity-bitcoin `blockchain` data interface.
The Parity Zcash `blockchain` data interface.
#### getbestblockhash
@ -84,7 +84,7 @@ Get statistics about the unspent transaction output set.
### Miner
The Parity-bitcoin `miner` data interface.
The Parity Zcash `miner` data interface.
#### getblocktemplate
@ -94,7 +94,7 @@ Get block template for mining.
### Raw
The Parity-bitcoin `raw` data interface.
The Parity Zcash `raw` data interface.
#### getrawtransaction

View File

@ -101,7 +101,7 @@ cargo test --all
By default parity connects to Zcash seednodes. Full list is [here](./pzec/seednodes.rs).
To start syncing the main network, just start the client, passing selected fork flag. For example:
To start syncing the main network, just start the client without any arguments:
```
./target/release/pzec
@ -121,17 +121,19 @@ To not print any syncing progress add `--quiet` flag:
## Importing zcashd database
It it is possible to import existing `bitcoind` database:
It it is possible to import existing `zcashd` database:
```
# where $BITCOIND_DB is path to your bitcoind database, e.g., "/Users/user/Library/Application Support"
./target/release/pzec import "$BITCOIND_DB/Bitcoin/blocks"
# where $ZCASH_DB is path to your zcashd database. By default:
# on macOS: "/Users/user/Library/Application Support/Zcash"
# on Linux: "~/.zcash"
./target/release/pzec import "$ZCASH_DB/blocks"
```
By default import verifies imported the blocks. You can disable this, by adding `--verification-level==none` flag.
By default import verifies imported the blocks. You can disable this, by adding `--verification-level=none` flag.
```
./target/release/pzec import "#BITCOIND_DB/Bitcoin/blocks" --verification-level==none
./target/release/pzec import "$ZCASH_DB/blocks" --verification-level=none
```
## Command line interface

View File

@ -24,7 +24,7 @@ use kv::{
use storage::{
BlockRef, Error, BlockHeaderProvider, BlockProvider, BlockOrigin, TransactionMeta, IndexedBlockProvider,
TransactionMetaProvider, TransactionProvider, TransactionOutputProvider, BlockChain, Store,
SideChainOrigin, ForkChain, Forkable, CanonStore, ConfigStore, BestBlock, NullifierTracker,
SideChainOrigin, ForkChain, Forkable, CanonStore, BestBlock, NullifierTracker,
EpochTag, EpochRef, SproutTreeState, SaplingTreeState, TreeStateProvider,
};
@ -733,23 +733,3 @@ impl<T> Store for BlockChainDatabase<T> where T: KeyValueDatabase {
self.block_header(self.best_block().hash.into()).expect("best block header should be in db; qed")
}
}
impl<T> ConfigStore for BlockChainDatabase<T> where T: KeyValueDatabase {
fn consensus_fork(&self) -> Result<Option<String>, Error> {
match self.db.get(&Key::Configuration("consensus_fork"))
.map(KeyState::into_option)
.map(|x| x.and_then(Value::as_configuration)) {
Ok(Some(consensus_fork)) => String::from_utf8(consensus_fork.into())
.map_err(|e| Error::DatabaseError(format!("{}", e)))
.map(Some),
Ok(None) => Ok(None),
Err(e) => Err(Error::DatabaseError(e.into())),
}
}
fn set_consensus_fork(&self, consensus_fork: &str) -> Result<(), Error> {
let mut update = DBTransaction::new();
update.insert(KeyValue::Configuration("consensus_fork", consensus_fork.as_bytes().into()));
self.db.write(update).map_err(Error::DatabaseError)
}
}

View File

@ -139,8 +139,6 @@ struct FittingTransactionsIterator<'a, T> {
block_height: u32,
/// New block time
block_time: u32,
/// Are OP_CHECKDATASIG && OP_CHECKDATASIGVERIFY enabled for this block.
checkdatasig_active: bool,
/// Size policy decides if transactions size fits the block
block_size: SizePolicy,
/// Sigops policy decides if transactions sigops fits the block
@ -161,14 +159,12 @@ impl<'a, T> FittingTransactionsIterator<'a, T> where T: Iterator<Item = &'a Entr
max_block_sigops: u32,
block_height: u32,
block_time: u32,
checkdatasig_active: bool,
) -> Self {
FittingTransactionsIterator {
store: store,
iter: iter,
block_height: block_height,
block_time: block_time,
checkdatasig_active,
// reserve some space for header and transactions len field
block_size: SizePolicy::new(BLOCK_HEADER_SIZE + 4, max_block_size, 1_000, 50),
sigops: SizePolicy::new(0, max_block_sigops, 8, 50),
@ -210,7 +206,7 @@ impl<'a, T> Iterator for FittingTransactionsIterator<'a, T> where T: Iterator<It
let transaction_size = entry.size as u32;
let bip16_active = true;
let sigops_count = transaction_sigops(&entry.transaction, self, bip16_active, self.checkdatasig_active) as u32;
let sigops_count = transaction_sigops(&entry.transaction, self, bip16_active) as u32;
let size_step = self.block_size.decide(transaction_size);
let sigops_step = self.sigops.decide(sigops_count);
@ -284,8 +280,7 @@ impl<'a> BlockAssembler<'a> {
self.max_block_size,
self.max_block_sigops,
height,
time,
false);
time);
for entry in tx_iter {
// miner_fee is i64, but we can safely cast it to u64
// memory pool should restrict miner fee to be positive

View File

@ -81,7 +81,11 @@ impl Network {
}
pub fn default_verification_edge(&self) -> H256 {
self.genesis_block().hash()
match *self {
// block #410100, best checkpoint of zcashd as of 12.03.2019
Network::Mainnet => H256::from_reversed_str("0000000002c565958f783a24a4ac17cde898ff525e75ed9baf66861b0b9fcada"),
_ => self.genesis_block().hash(),
}
}
}

View File

@ -59,42 +59,6 @@ pub struct VerificationFlags {
///
/// See BIP112 for details
pub verify_checksequence: bool,
/// Support OP_CAT opcode
pub verify_concat: bool,
/// Support OP_SPLIT opcode
///
/// This opcode replaces OP_SUBSTR => enabling both OP_SPLIT && OP_SUBSTR would be an error
pub verify_split: bool,
/// Support OP_AND opcode
pub verify_and: bool,
/// Support OP_OR opcode
pub verify_or: bool,
/// Support OP_XOR opcode
pub verify_xor: bool,
/// Support OP_DIV opcode
pub verify_div: bool,
/// Support OP_MOD opcode
pub verify_mod: bool,
/// Support OP_BIN2NUM opcode
///
/// This opcode replaces OP_RIGHT => enabling both OP_BIN2NUM && OP_RIGHT would be an error
pub verify_bin2num: bool,
/// Support OP_NUM2BIN opcode
///
/// This opcode replaces OP_LEFT => enabling both OP_NUM2BIN && OP_LEFT would be an error
pub verify_num2bin: bool,
/// Support OP_CHECKDATASIG and OP_CHECKDATASIGVERIFY opcodes.
pub verify_checkdatasig: bool,
}
impl VerificationFlags {
@ -138,53 +102,4 @@ impl VerificationFlags {
self
}
pub fn verify_concat(mut self, value: bool) -> Self {
self.verify_concat = value;
self
}
pub fn verify_split(mut self, value: bool) -> Self {
self.verify_split = value;
self
}
pub fn verify_and(mut self, value: bool) -> Self {
self.verify_and = value;
self
}
pub fn verify_or(mut self, value: bool) -> Self {
self.verify_or = value;
self
}
pub fn verify_xor(mut self, value: bool) -> Self {
self.verify_xor = value;
self
}
pub fn verify_div(mut self, value: bool) -> Self {
self.verify_div = value;
self
}
pub fn verify_mod(mut self, value: bool) -> Self {
self.verify_mod = value;
self
}
pub fn verify_bin2num(mut self, value: bool) -> Self {
self.verify_bin2num = value;
self
}
pub fn verify_num2bin(mut self, value: bool) -> Self {
self.verify_num2bin = value;
self
}
pub fn verify_checkdatasig(mut self, value: bool) -> Self {
self.verify_checkdatasig = value;
self
}
}

File diff suppressed because it is too large Load Diff

View File

@ -1,6 +1,5 @@
//! Script opcodes.
use std::fmt;
use flags::VerificationFlags;
/// Script opcodes.
#[repr(u8)]
@ -213,10 +212,6 @@ pub enum Opcode {
OP_NOP8 = 0xb7,
OP_NOP9 = 0xb8,
OP_NOP10 = 0xb9,
// BCH crypto
OP_CHECKDATASIG = 0xba,
OP_CHECKDATASIGVERIFY = 0xbb,
}
impl fmt::Display for Opcode {
@ -434,27 +429,15 @@ impl Opcode {
0xb8 => Some(OP_NOP9),
0xb9 => Some(OP_NOP10),
// BCH crypto
0xba => Some(OP_CHECKDATASIG),
0xbb => Some(OP_CHECKDATASIGVERIFY),
_ => None,
}
}
pub fn is_disabled(&self, flags: &VerificationFlags) -> bool {
pub fn is_disabled(&self) -> bool {
use self::Opcode::*;
match *self {
OP_CAT if !flags.verify_concat => true,
OP_SUBSTR if !flags.verify_split => true,
OP_AND if !flags.verify_and => true,
OP_OR if !flags.verify_or => true,
OP_XOR if !flags.verify_xor => true,
OP_DIV if !flags.verify_div => true,
OP_MOD if !flags.verify_mod => true,
OP_RIGHT if !flags.verify_bin2num => true,
OP_LEFT if !flags.verify_num2bin => true,
OP_INVERT | OP_2MUL | OP_2DIV |
OP_CAT | OP_SUBSTR | OP_AND | OP_OR | OP_XOR | OP_DIV |
OP_MOD | OP_RIGHT | OP_LEFT | OP_INVERT | OP_2MUL | OP_2DIV |
OP_MUL | OP_LSHIFT | OP_RSHIFT => true,
_ => false,
}
@ -695,9 +678,5 @@ mod tests {
assert_eq!(Opcode::OP_NOP8, Opcode::from_u8(Opcode::OP_NOP8 as u8).unwrap());
assert_eq!(Opcode::OP_NOP9, Opcode::from_u8(Opcode::OP_NOP9 as u8).unwrap());
assert_eq!(Opcode::OP_NOP10, Opcode::from_u8(Opcode::OP_NOP10 as u8).unwrap());
// BCH crypto
assert_eq!(Opcode::OP_CHECKDATASIG, Opcode::from_u8(Opcode::OP_CHECKDATASIG as u8).unwrap());
assert_eq!(Opcode::OP_CHECKDATASIGVERIFY, Opcode::from_u8(Opcode::OP_CHECKDATASIGVERIFY as u8).unwrap());
}
}

View File

@ -286,7 +286,7 @@ impl Script {
Opcodes { position: 0, script: self }
}
pub fn sigops_count(&self, checkdatasig_active: bool, serialized_script: bool) -> usize {
pub fn sigops_count(&self, serialized_script: bool) -> usize {
let mut last_opcode = Opcode::OP_0;
let mut total = 0;
for opcode in self.opcodes() {
@ -300,9 +300,6 @@ impl Script {
Opcode::OP_CHECKSIG | Opcode::OP_CHECKSIGVERIFY => {
total += 1;
},
Opcode::OP_CHECKDATASIG | Opcode::OP_CHECKDATASIGVERIFY if checkdatasig_active => {
total += 1;
},
Opcode::OP_CHECKMULTISIG | Opcode::OP_CHECKMULTISIGVERIFY => {
if serialized_script && last_opcode.is_within_op_n() {
total += last_opcode.decode_op_n() as usize;
@ -370,7 +367,7 @@ impl Script {
}
}
pub fn pay_to_script_hash_sigops(&self, checkdatasig_active: bool, prev_out: &Script) -> usize {
pub fn pay_to_script_hash_sigops(&self, prev_out: &Script) -> usize {
if !prev_out.is_pay_to_script_hash() {
return 0;
}
@ -386,7 +383,7 @@ impl Script {
.to_vec()
.into();
script.sigops_count(checkdatasig_active, true)
script.sigops_count(true)
}
}
@ -550,11 +547,11 @@ OP_ADD
#[test]
fn test_sigops_count() {
assert_eq!(1usize, Script::from("76a914aab76ba4877d696590d94ea3e02948b55294815188ac").sigops_count(false, false));
assert_eq!(2usize, Script::from("522102004525da5546e7603eefad5ef971e82f7dad2272b34e6b3036ab1fe3d299c22f21037d7f2227e6c646707d1c61ecceb821794124363a2cf2c1d2a6f28cf01e5d6abe52ae").sigops_count(false, true));
assert_eq!(20usize, Script::from("522102004525da5546e7603eefad5ef971e82f7dad2272b34e6b3036ab1fe3d299c22f21037d7f2227e6c646707d1c61ecceb821794124363a2cf2c1d2a6f28cf01e5d6abe52ae").sigops_count(false, false));
assert_eq!(0usize, Script::from("a9146262b64aec1f4a4c1d21b32e9c2811dd2171fd7587").sigops_count(false, false));
assert_eq!(1usize, Script::from("4104ae1a62fe09c5f51b13905f07f06b99a2f7159b2225f374cd378d71302fa28414e7aab37397f554a7df5f142c21c1b7303b8a0626f1baded5c72a704f7e6cd84cac").sigops_count(false, false));
assert_eq!(1usize, Script::from("76a914aab76ba4877d696590d94ea3e02948b55294815188ac").sigops_count(false));
assert_eq!(2usize, Script::from("522102004525da5546e7603eefad5ef971e82f7dad2272b34e6b3036ab1fe3d299c22f21037d7f2227e6c646707d1c61ecceb821794124363a2cf2c1d2a6f28cf01e5d6abe52ae").sigops_count(true));
assert_eq!(20usize, Script::from("522102004525da5546e7603eefad5ef971e82f7dad2272b34e6b3036ab1fe3d299c22f21037d7f2227e6c646707d1c61ecceb821794124363a2cf2c1d2a6f28cf01e5d6abe52ae").sigops_count(false));
assert_eq!(0usize, Script::from("a9146262b64aec1f4a4c1d21b32e9c2811dd2171fd7587").sigops_count(false));
assert_eq!(1usize, Script::from("4104ae1a62fe09c5f51b13905f07f06b99a2f7159b2225f374cd378d71302fa28414e7aab37397f554a7df5f142c21c1b7303b8a0626f1baded5c72a704f7e6cd84cac").sigops_count(false));
}
#[test]
@ -569,7 +566,7 @@ OP_ADD
script[max_block_sigops - block_sigops + 3] = (overmax >> 16) as u8;
script[max_block_sigops - block_sigops + 4] = (overmax >> 24) as u8;
let script: Script = script.into();
assert_eq!(script.sigops_count(false, false), 20001);
assert_eq!(script.sigops_count(false), 20001);
}
#[test]
@ -583,7 +580,7 @@ OP_ADD
script[max_block_sigops - block_sigops + 4] = 0xff;
script[max_block_sigops - block_sigops + 5] = 0xff;
let script: Script = script.into();
assert_eq!(script.sigops_count(false, false), 20001);
assert_eq!(script.sigops_count(false), 20001);
}
@ -677,14 +674,4 @@ OP_ADD
assert_eq!(script.script_type(), ScriptType::ScriptHash);
assert_eq!(script.num_signatures_required(), 1);
}
#[test]
fn test_num_signatures_with_checkdatasig() {
let script = Builder::default().push_opcode(Opcode::OP_CHECKDATASIG).into_script();
assert_eq!(script.sigops_count(false, false), 0);
assert_eq!(script.sigops_count(true, false), 1);
let script = Builder::default().push_opcode(Opcode::OP_CHECKDATASIGVERIFY).into_script();
assert_eq!(script.sigops_count(false, false), 0);
assert_eq!(script.sigops_count(true, false), 1);
}
}

View File

@ -40,7 +40,7 @@ pub use block_provider::{BlockHeaderProvider, BlockProvider, IndexedBlockProvide
pub use block_ref::BlockRef;
pub use duplex_store::{DuplexTransactionOutputProvider, NoopStore};
pub use error::Error;
pub use store::{AsSubstore, Store, SharedStore, CanonStore, ConfigStore};
pub use store::{AsSubstore, Store, SharedStore, CanonStore};
pub use transaction_meta::TransactionMeta;
pub use transaction_provider::{TransactionProvider, TransactionOutputProvider, TransactionMetaProvider};
pub use nullifier_tracker::NullifierTracker;

View File

@ -2,23 +2,14 @@ use std::sync::Arc;
use chain::BlockHeader;
use {
BestBlock, BlockProvider, BlockHeaderProvider, TransactionProvider, TransactionMetaProvider,
TransactionOutputProvider, BlockChain, IndexedBlockProvider, Forkable, Error, NullifierTracker,
TransactionOutputProvider, BlockChain, IndexedBlockProvider, Forkable, NullifierTracker,
TreeStateProvider,
};
pub trait CanonStore: Store + Forkable + ConfigStore {
pub trait CanonStore: Store + Forkable {
fn as_store(&self) -> &Store;
}
/// Configuration storage interface
pub trait ConfigStore {
/// get consensus_fork this database is configured for
fn consensus_fork(&self) -> Result<Option<String>, Error>;
/// set consensus_fork this database is configured for
fn set_consensus_fork(&self, consensus_fork: &str) -> Result<(), Error>;
}
/// Blockchain storage interface
pub trait Store: AsSubstore {
/// get best block

View File

@ -115,7 +115,6 @@ pub struct BlockSigops<'a> {
block: CanonBlock<'a>,
store: &'a TransactionOutputProvider,
bip16_active: bool,
checkdatasig_active: bool,
max_block_sigops: usize,
}
@ -126,13 +125,11 @@ impl<'a> BlockSigops<'a> {
consensus: &'a ConsensusParams,
) -> Self {
let bip16_active = block.header.raw.time >= consensus.bip16_time;
let checkdatasig_active = false;
BlockSigops {
block: block,
store: store,
bip16_active,
checkdatasig_active,
max_block_sigops: consensus.max_block_sigops(),
}
}
@ -140,7 +137,7 @@ impl<'a> BlockSigops<'a> {
fn check(&self) -> Result<(), Error> {
let store = DuplexTransactionOutputProvider::new(self.store, &*self.block);
let sigops = self.block.transactions.iter()
.map(|tx| transaction_sigops(&tx.raw, &store, self.bip16_active, self.checkdatasig_active))
.map(|tx| transaction_sigops(&tx.raw, &store, self.bip16_active))
.fold(0, |acc, tx_sigops| (acc + tx_sigops));
if sigops > self.max_block_sigops {

View File

@ -313,8 +313,7 @@ impl<'a> TransactionSigops<'a> {
fn check(&self) -> Result<(), TransactionError> {
let bip16_active = self.time >= self.consensus_params.bip16_time;
let checkdatasig_active = false;
let sigops = transaction_sigops(&self.transaction.raw, &self.store, bip16_active, checkdatasig_active);
let sigops = transaction_sigops(&self.transaction.raw, &self.store, bip16_active);
if sigops > self.max_sigops {
Err(TransactionError::MaxSigops)
} else {
@ -333,8 +332,6 @@ pub struct TransactionEval<'a> {
verify_checksequence: bool,
verify_dersig: bool,
verify_nulldummy: bool,
verify_monolith_opcodes: bool,
verify_magnetic_anomaly_opcodes: bool,
verify_sigpushonly: bool,
verify_cleanstack: bool,
consensus_branch_id: u32,
@ -354,12 +351,10 @@ impl<'a> TransactionEval<'a> {
let verify_strictenc = false;
let verify_locktime = height >= params.bip65_height;
let verify_dersig = height >= params.bip66_height;
let verify_monolith_opcodes = false;
let verify_magnetic_anomaly_opcodes = false;
let verify_checksequence = deployments.csv();
let verify_sigpushonly = verify_magnetic_anomaly_opcodes;
let verify_cleanstack = verify_magnetic_anomaly_opcodes;
let verify_sigpushonly = false;
let verify_cleanstack = false;
let consensus_branch_id = params.consensus_branch_id(height);
@ -373,8 +368,6 @@ impl<'a> TransactionEval<'a> {
verify_checksequence: verify_checksequence,
verify_dersig: verify_dersig,
verify_nulldummy: false,
verify_monolith_opcodes: verify_monolith_opcodes,
verify_magnetic_anomaly_opcodes: verify_magnetic_anomaly_opcodes,
verify_sigpushonly: verify_sigpushonly,
verify_cleanstack: verify_cleanstack,
consensus_branch_id: consensus_branch_id,
@ -435,16 +428,6 @@ impl<'a> TransactionEval<'a> {
.verify_checksequence(self.verify_checksequence)
.verify_dersig(self.verify_dersig)
.verify_nulldummy(self.verify_nulldummy)
.verify_concat(self.verify_monolith_opcodes)
.verify_split(self.verify_monolith_opcodes)
.verify_and(self.verify_monolith_opcodes)
.verify_or(self.verify_monolith_opcodes)
.verify_xor(self.verify_monolith_opcodes)
.verify_div(self.verify_monolith_opcodes)
.verify_mod(self.verify_monolith_opcodes)
.verify_bin2num(self.verify_monolith_opcodes)
.verify_num2bin(self.verify_monolith_opcodes)
.verify_checkdatasig(self.verify_magnetic_anomaly_opcodes)
.verify_sigpushonly(self.verify_sigpushonly)
.verify_cleanstack(self.verify_cleanstack);

View File

@ -10,11 +10,10 @@ pub fn transaction_sigops(
transaction: &Transaction,
store: &TransactionOutputProvider,
bip16_active: bool,
checkdatasig_active: bool,
) -> usize {
let output_sigops: usize = transaction.outputs.iter().map(|output| {
let output_script: Script = output.script_pubkey.clone().into();
output_script.sigops_count(checkdatasig_active, false)
output_script.sigops_count(false)
}).sum();
// TODO: bitcoin/bitcoin also includes input_sigops here
@ -27,14 +26,14 @@ pub fn transaction_sigops(
for input in &transaction.inputs {
let input_script: Script = input.script_sig.clone().into();
input_sigops += input_script.sigops_count(checkdatasig_active, false);
input_sigops += input_script.sigops_count(false);
if bip16_active {
let previous_output = match store.transaction_output(&input.previous_output, usize::max_value()) {
Some(output) => output,
None => continue,
};
let prevout_script: Script = previous_output.script_pubkey.into();
bip16_sigops += input_script.pay_to_script_hash_sigops(checkdatasig_active, &prevout_script);
bip16_sigops += input_script.pay_to_script_hash_sigops(&prevout_script);
}
}

View File

@ -163,7 +163,7 @@ impl<'a> BlockSigops<'a> {
fn check(&self) -> Result<(), Error> {
// We cannot know if bip16 is enabled at this point so we disable it.
let sigops = self.block.transactions.iter()
.map(|tx| transaction_sigops(&tx.raw, &NoopStore, false, false))
.map(|tx| transaction_sigops(&tx.raw, &NoopStore, false))
.sum::<usize>();
if sigops > self.max_sigops {

View File

@ -261,7 +261,7 @@ impl<'a> TransactionSigops<'a> {
}
fn check(&self) -> Result<(), TransactionError> {
let sigops = transaction_sigops(&self.transaction.raw, &NoopStore, false, false);
let sigops = transaction_sigops(&self.transaction.raw, &NoopStore, false);
if sigops > self.max_sigops {
Err(TransactionError::MaxSigops)
} else {