Merge remote-tracking branch 'origin/master' into sprout-groth

This commit is contained in:
NikVolf 2019-04-29 17:41:19 +03:00
commit 8cc14160b1
17 changed files with 479 additions and 474 deletions

4
Cargo.lock generated
View File

@ -174,11 +174,13 @@ dependencies = [
[[package]]
name = "bn"
version = "0.4.4"
source = "git+https://github.com/paritytech/bn#7f6a93623fe1867a5de6e2b9f4196581a3594f84"
source = "git+https://github.com/paritytech/bn#162149011cb30ad4ad417be2cf1c3a4d15575274"
dependencies = [
"byteorder 1.2.7 (registry+https://github.com/rust-lang/crates.io-index)",
"crunchy 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
"lazy_static 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
"rand 0.5.5 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc-hex 2.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)",
]

View File

@ -1,6 +1,5 @@
pub use bn::{Fr, Fq, Fq2, G1, G2, Group, arith::{U256, U512}, AffineG1, AffineG2};
pub use bn::{Fr, Fq, Fq2, G1, G2, Group, arith::{U256, U512}, AffineG1, AffineG2, CurveError};
use bn::pairing;
use std::ops::Neg;
use json::pghr13 as json;
#[derive(Clone)]
@ -46,6 +45,13 @@ pub enum Error {
NotFqMember,
NotFq2Member,
InvalidSignPrefix,
Curve(CurveError),
}
impl From<CurveError> for Error {
fn from(e: CurveError) -> Self {
Error::Curve(e)
}
}
#[derive(Clone)]
@ -63,138 +69,18 @@ pub struct Proof {
impl Proof {
pub fn from_raw(data: &[u8; 296]) -> Result<Self, Error> {
Ok(Proof {
a: g1_from_compressed(&data[0..33])?,
a_prime: g1_from_compressed(&data[33..66])?,
b: g2_from_compressed(&data[66..131])?,
b_prime: g1_from_compressed(&data[131..164])?,
c: g1_from_compressed(&data[164..197])?,
c_prime: g1_from_compressed(&data[197..230])?,
k: g1_from_compressed(&data[230..263])?,
h: g1_from_compressed(&data[263..296])?,
a: G1::from_compressed(&data[0..33])?,
a_prime: G1::from_compressed(&data[33..66])?,
b: G2::from_compressed(&data[66..131])?,
b_prime: G1::from_compressed(&data[131..164])?,
c: G1::from_compressed(&data[164..197])?,
c_prime: G1::from_compressed(&data[197..230])?,
k: G1::from_compressed(&data[230..263])?,
h: G1::from_compressed(&data[263..296])?,
})
}
}
lazy_static! {
// integer modulus for Fq field
pub static ref FQ: U256 = U256::from([
0x3c208c16d87cfd47,
0x97816a916871ca8d,
0xb85045b68181585d,
0x30644e72e131a029
]);
pub static ref G1_B: Fq = Fq::from_u256(3.into()).expect("3 is a valid field element and static; qed");
pub static ref FQ_MINUS3_DIV4: Fq =
Fq::from_u256(3.into()).expect("3 is a valid field element and static; qed").neg() *
Fq::from_u256(4.into()).expect("4 is a valid field element and static; qed").inverse()
.expect("4 has inverse in Fq and is static; qed");
pub static ref FQ_MINUS1_DIV2: Fq =
Fq::from_u256(1.into()).expect("1 is a valid field element and static; qed").neg() *
Fq::from_u256(2.into()).expect("2 is a valid field element and static; qed").inverse()
.expect("2 has inverse in Fq and is static; qed");
}
// Shankss algorithm for q ≡ 3 (mod 4)
// (FQ mod 4 = 3)
fn fq_sqrt(a: Fq) -> Option<Fq> {
let a1 = a.pow(*FQ_MINUS3_DIV4);
let a1a = a1 * a;
let a0 = a1 * (a1a);
let mut am1 = *FQ;
am1.sub(&1.into(), &*FQ);
if a0 == Fq::from_u256(am1).unwrap() {
None
} else {
Some(a1a)
}
}
fn fq2_to_u512(e: Fq2) -> U512 {
let c0 = e.real().into_u256();
let c1 = e.imaginary().into_u256();
U512::new(&c1, &c0, &FQ)
}
// Algorithm 9 Square root computation over Fq2, with q ≡ 3 (mod 4)
// from https://eprint.iacr.org/2012/685.pdf (Square root computation over even extension fields)
fn fq2_sqrt(a: Fq2) -> Option<Fq2> {
let a1 = a.pow(FQ_MINUS3_DIV4.into_u256());
let a1a = a1 * a;
let alpha = a1 * a1a;
let a0 = alpha.pow(*FQ) * alpha;
if a0 == Fq2::one().neg() {
return None;
}
if alpha == Fq2::one().neg() {
Some(Fq2::i() * a1a)
} else {
let b = (alpha + Fq2::one()).pow(FQ_MINUS1_DIV2.into_u256());
Some(b * a1a)
}
}
fn g1_from_compressed(data: &[u8]) -> Result<G1, Error> {
if data.len() != 33 { return Err(Error::InvalidRawInput); }
let sign = data[0];
let fq = deserialize_fq(&data[1..])?;
let x = fq;
let y_squared = (fq * fq * fq) + *G1_B;
let mut y = fq_sqrt(y_squared).ok_or(Error::InvalidFieldElement)?;
if sign == 2 && y.into_u256().get_bit(0).expect("bit 0 always exist; qed") { y = y.neg(); }
else if sign == 3 && !y.into_u256().get_bit(0).expect("bit 0 always exist; qed") { y = y.neg(); }
else if sign != 3 && sign != 2 {
return Err(Error::InvalidSignPrefix);
}
AffineG1::new(x, y).map_err(|_| Error::InvalidCurvePoint).map(Into::into)
}
fn g2_from_compressed(data: &[u8]) -> Result<G2, Error> {
if data.len() != 65 { return Err(Error::InvalidRawInput); }
let sign = data[0];
let x = deserialize_fq2(&data[1..])?;
let y_squared = (x * x * x) + G2::b();
let y = fq2_sqrt(y_squared).ok_or(Error::InvalidFieldElement)?;
let y_neg = -y;
let y_gt = fq2_to_u512(y) > fq2_to_u512(y_neg);
let e_y = if sign == 10 { if y_gt { y_neg } else { y } }
else if sign == 11 { if y_gt { y } else { y_neg } }
else {
return Err(Error::InvalidSignPrefix);
};
AffineG2::new(x, e_y).map_err(|_| Error::InvalidCurvePoint).map(Into::into)
}
fn deserialize_fq(data: &[u8]) -> Result<Fq, Error> {
let u256 = U256::from_slice(data).map_err(|_| Error::InvalidU256Encoding)?;
Ok(Fq::from_u256(u256).map_err(|_| Error::NotFqMember)?)
}
fn deserialize_fq2(data: &[u8]) -> Result<Fq2, Error> {
let u512 = U512::from_slice(data).map_err(|_| Error::InvalidU512Encoding)?;
let (res, c0) = u512.divrem(&Fq::modulus());
Ok(Fq2::new(
Fq::from_u256(c0).map_err(|_| Error::NotFqMember)?,
Fq::from_u256(res.ok_or(Error::NotFq2Member)?).map_err(|_| Error::NotFqMember)?,
))
}
pub fn verify(vk: &VerifyingKey, primary_input: &[Fr], proof: &Proof) -> bool {
let p2 = G2::one();
@ -223,55 +109,6 @@ mod tests {
use super::*;
use json;
fn hex(s: &'static str) -> Vec<u8> {
use hex::FromHex;
s.from_hex().unwrap()
}
#[test]
fn sqrt_fq() {
// from zcash test_proof.cpp
let fq1 = Fq::from_str("5204065062716160319596273903996315000119019512886596366359652578430118331601").unwrap();
let fq2 = Fq::from_str("348579348568").unwrap();
assert_eq!(fq1, fq_sqrt(fq2).expect("348579348568 is quadratic residue"));
}
#[test]
fn sqrt_fq2() {
// from zcash test_proof.cpp
let x1 = Fq2::new(
Fq::from_str("12844195307879678418043983815760255909500142247603239203345049921980497041944").unwrap(),
Fq::from_str("7476417578426924565731404322659619974551724117137577781074613937423560117731").unwrap(),
);
let x2 = Fq2::new(
Fq::from_str("3345897230485723946872934576923485762803457692345760237495682347502347589474").unwrap(),
Fq::from_str("1234912378405347958234756902345768290345762348957605678245967234857634857676").unwrap(),
);
assert_eq!(fq2_sqrt(x2).unwrap(), x1);
// i is sqrt(-1)
assert_eq!(
fq2_sqrt(Fq2::one().neg()).unwrap(),
Fq2::i(),
);
// no sqrt for (1 + 2i)
assert!(
fq2_sqrt(Fq2::new(Fq::from_str("1").unwrap(), Fq::from_str("2").unwrap())).is_none()
);
}
#[test]
fn g1_deserialize() {
let g1 = g1_from_compressed(&hex("0230644e72e131a029b85045b68181585d97816a916871ca8d3c208c16d87cfd46")).expect("Invalid g1 decompress result");
assert_eq!(g1.x(), Fq::from_str("21888242871839275222246405745257275088696311157297823662689037894645226208582").unwrap());
assert_eq!(g1.y(), Fq::from_str("3969792565221544645472939191694882283483352126195956956354061729942568608776").unwrap());
assert_eq!(g1.z(), Fq::one());
}
fn vkey() -> VerifyingKey {
json::pghr13::decode(include_bytes!("../../res/sprout-verifying-key.json")).expect("known to be good").into()
}
@ -481,51 +318,4 @@ mod tests {
assert!(verify(&vk, &primary_input[..], &proof));
}
#[test]
fn g2_deserialize() {
let g2 = g2_from_compressed(
&hex("0a023aed31b5a9e486366ea9988b05dba469c6206e58361d9c065bbea7d928204a761efc6e4fa08ed227650134b52c7f7dd0463963e8a4bf21f4899fe5da7f984a")
).expect("Valid g2 point hex encoding");
assert_eq!(g2.x(),
Fq2::new(
Fq::from_str("5923585509243758863255447226263146374209884951848029582715967108651637186684").unwrap(),
Fq::from_str("5336385337059958111259504403491065820971993066694750945459110579338490853570").unwrap(),
)
);
assert_eq!(g2.y(),
Fq2::new(
Fq::from_str("10374495865873200088116930399159835104695426846400310764827677226300185211748").unwrap(),
Fq::from_str("5256529835065685814318509161957442385362539991735248614869838648137856366932").unwrap(),
)
);
// 0b prefix is point reflection on the curve
let g2 = -g2_from_compressed(
&hex("0b023aed31b5a9e486366ea9988b05dba469c6206e58361d9c065bbea7d928204a761efc6e4fa08ed227650134b52c7f7dd0463963e8a4bf21f4899fe5da7f984a")
).expect("Valid g2 point hex encoding");
assert_eq!(g2.x(),
Fq2::new(
Fq::from_str("5923585509243758863255447226263146374209884951848029582715967108651637186684").unwrap(),
Fq::from_str("5336385337059958111259504403491065820971993066694750945459110579338490853570").unwrap(),
)
);
assert_eq!(g2.y(),
Fq2::new(
Fq::from_str("10374495865873200088116930399159835104695426846400310764827677226300185211748").unwrap(),
Fq::from_str("5256529835065685814318509161957442385362539991735248614869838648137856366932").unwrap(),
)
);
// valid point but invalid sign prefix
assert!(
g2_from_compressed(
&hex("0c023aed31b5a9e486366ea9988b05dba469c6206e58361d9c065bbea7d928204a761efc6e4fa08ed227650134b52c7f7dd0463963e8a4bf21f4899fe5da7f984a")
).is_err()
);
}
}
}

View File

@ -1,6 +1,7 @@
use chain::Transaction;
use ser::Serializable;
use storage::{TransactionOutputProvider, DuplexTransactionOutputProvider};
use verification::checked_transaction_fee;
use MemoryPool;
/// Transaction fee calculator for memory pool
@ -32,43 +33,12 @@ impl MemoryPoolFeeCalculator for NonZeroFeeCalculator {
}
}
/// Compute miner fee for given transaction.
/// Compute miner fee for given (memory pool) transaction.
///
/// It could return a wrong value (that should have overflow/underflow) if either outputs sum,
/// inputs sum or their difference overflows/underflows. But since it is used for prioritizing
/// verified transactions && verification checks that values are correct, the call is safe.
pub fn transaction_fee(store: &TransactionOutputProvider, transaction: &Transaction) -> u64 {
let mut inputs_sum = transaction.inputs.iter().map(|input|
store.transaction_output(&input.previous_output, ::std::usize::MAX)
.expect("transaction must be verified by caller")
.value)
.fold(0u64, |acc, value| acc.saturating_add(value));
if let Some(ref join_split) = transaction.join_split {
let js_value_pub_new = join_split.descriptions.iter()
.fold(0u64, |acc, jsd| acc.saturating_add(jsd.value_pub_new));
inputs_sum = inputs_sum.saturating_add(js_value_pub_new);
}
if let Some(ref sapling) = transaction.sapling {
if sapling.balancing_value > 0 {
inputs_sum = inputs_sum.saturating_add(sapling.balancing_value as u64);
}
}
let mut outputs_sum = transaction.outputs.iter().map(|output| output.value)
.fold(0u64, |acc, value| acc.saturating_add(value));
if let Some(ref join_split) = transaction.join_split {
let js_value_pub_old = join_split.descriptions.iter()
.fold(0u64, |acc, jsd| acc.saturating_add(jsd.value_pub_old));
outputs_sum = outputs_sum.saturating_add(js_value_pub_old);
}
if let Some(ref sapling) = transaction.sapling {
if sapling.balancing_value < 0 {
inputs_sum = inputs_sum.saturating_add(sapling.balancing_value
.checked_neg().unwrap_or(::std::i64::MAX) as u64);
}
}
inputs_sum.saturating_sub(outputs_sum)
/// If any error occurs during computation, zero fee is returned. Normally, zero fee
/// transactions are not accepted to the memory pool.
pub fn transaction_fee(store: &TransactionOutputProvider, tx: &Transaction) -> u64 {
checked_transaction_fee(store, ::std::usize::MAX, tx).unwrap_or(0)
}
pub fn transaction_fee_rate(store: &TransactionOutputProvider, tx: &Transaction) -> u64 {
@ -77,15 +47,13 @@ pub fn transaction_fee_rate(store: &TransactionOutputProvider, tx: &Transaction)
#[cfg(test)]
mod tests {
extern crate test_data;
use std::sync::Arc;
use storage::{AsSubstore};
use storage::AsSubstore;
use db::BlockChainDatabase;
use super::*;
use super::transaction_fee_rate;
#[test]
fn test_transaction_fee() {
fn transaction_fee_rate_works() {
let b0 = test_data::block_builder().header().nonce(1.into()).build()
.transaction()
.output().value(1_000_000).build()
@ -104,11 +72,9 @@ mod tests {
let tx2 = b1.transactions[0].clone();
let db = Arc::new(BlockChainDatabase::init_test_chain(vec![b0.into(), b1.into()]));
let store = db.as_transaction_output_provider();
assert_eq!(transaction_fee(db.as_transaction_output_provider(), &tx0), 0);
assert_eq!(transaction_fee(db.as_transaction_output_provider(), &tx2), 500_000);
assert_eq!(transaction_fee_rate(db.as_transaction_output_provider(), &tx0), 0);
assert_eq!(transaction_fee_rate(db.as_transaction_output_provider(), &tx2), 4_901);
assert_eq!(transaction_fee_rate(store, &tx0), 0);
assert_eq!(transaction_fee_rate(store, &tx2), 4_901);
}
}

View File

@ -102,11 +102,11 @@ args:
value_name: ADDRESS
subcommands:
- import:
about: Import blocks from a Bitcoin Core database.
about: Import blocks from a zcashd database.
args:
- PATH:
required: true
help: Path of the Bitcoin Core database.
help: Path of the zcashd database.
- rollback:
about: Rollback the database to given canonical-chain block.
args:

View File

@ -621,7 +621,7 @@ pub fn eval_script(
if v1 == v2 {
stack.push(vec![1].into());
} else {
stack.push(vec![0].into());
stack.push(Bytes::new());
}
},
Opcode::OP_EQUALVERIFY => {
@ -738,7 +738,7 @@ pub fn eval_script(
if v2 <= v3 && v3 < v1 {
stack.push(vec![1].into());
} else {
stack.push(vec![0].into());
stack.push(Bytes::new());
}
},
Opcode::OP_RIPEMD160 => {
@ -774,7 +774,7 @@ pub fn eval_script(
if success {
stack.push(vec![1].into());
} else {
stack.push(vec![0].into());
stack.push(Bytes::new());
}
},
Opcode::OP_CHECKSIGVERIFY if !success => {
@ -829,7 +829,7 @@ pub fn eval_script(
if success {
stack.push(vec![1].into());
} else {
stack.push(vec![0].into());
stack.push(Bytes::new());
}
},
Opcode::OP_CHECKMULTISIGVERIFY if !success => {
@ -951,7 +951,7 @@ mod tests {
.push_opcode(Opcode::OP_EQUAL)
.into_script();
let result = Ok(false);
let stack = vec![vec![0].into()].into();
let stack = vec![Bytes::new()].into();
basic_test(&script, result, stack);
}
@ -1783,7 +1783,7 @@ mod tests {
.push_opcode(Opcode::OP_WITHIN)
.into_script();
let result = Ok(false);
let stack = vec![vec![0].into()].into();
let stack = vec![Bytes::new()].into();
basic_test(&script, result, stack);
}
@ -1822,7 +1822,7 @@ mod tests {
input_index: 0,
input_amount: 0,
consensus_branch_id: 0,
cache: None,
cache: Default::default(),
};
let input: Script = "47304402202cb265bf10707bf49346c3515dd3d16fc454618c58ec0a0ff448a676c54ff71302206c6624d762a1fcef4618284ead8f08678ac05b13c84235f1654e6ad168233e8201410414e301b2328f17442c0b8310d787bf3d8a404cfbd0704f135b6ad4b2d3ee751310f981926e53a6e8c39bd7d3fefd576c543cce493cbac06388f2651d1aacbfcd".into();
let output: Script = "76a914df3bd30160e6c6145baaf2c88a8844c13a00d1d588ac".into();
@ -1841,7 +1841,7 @@ mod tests {
input_index: 0,
input_amount: 0,
consensus_branch_id: 0,
cache: None,
cache: Default::default(),
};
let input: Script = "00483045022100deeb1f13b5927b5e32d877f3c42a4b028e2e0ce5010fdb4e7f7b5e2921c1dcd2022068631cb285e8c1be9f061d2968a18c3163b780656f30a049effee640e80d9bff01483045022100ee80e164622c64507d243bd949217d666d8b16486e153ac6a1f8e04c351b71a502203691bef46236ca2b4f5e60a82a853a33d6712d6a1e7bf9a65e575aeb7328db8c014cc9524104a882d414e478039cd5b52a92ffb13dd5e6bd4515497439dffd691a0f12af9575fa349b5694ed3155b136f09e63975a1700c9f4d4df849323dac06cf3bd6458cd41046ce31db9bdd543e72fe3039a1f1c047dab87037c36a669ff90e28da1848f640de68c2fe913d363a51154a0c62d7adea1b822d05035077418267b1a1379790187410411ffd36c70776538d079fbae117dc38effafb33304af83ce4894589747aee1ef992f63280567f52f5ba870678b4ab4ff6c8ea600bd217870a8b4f1f09f3a8e8353ae".into();
let output: Script = "a9141a8b0026343166625c7475f01e48b5ede8c0252e87".into();
@ -1860,7 +1860,7 @@ mod tests {
input_index: 0,
input_amount: 0,
consensus_branch_id: 0,
cache: None,
cache: Default::default(),
};
let input: Script = "483045022052ffc1929a2d8bd365c6a2a4e3421711b4b1e1b8781698ca9075807b4227abcb0221009984107ddb9e3813782b095d0d84361ed4c76e5edaf6561d252ae162c2341cfb01".into();
let output: Script = "410411db93e1dcdb8a016b49840f8c53bc1eb68a382e97b1482ecad7b148a6909a5cb2e0eaddfb84ccf9744464f82e160bfa9b8b64f9d4c03f999b8643f656b412a3ac".into();
@ -1879,7 +1879,7 @@ mod tests {
input_index: 0,
input_amount: 0,
consensus_branch_id: 0,
cache: None,
cache: Default::default(),
};
let input: Script = "4b3048022200002b83d59c1d23c08efd82ee0662fec23309c3adbcbd1f0b8695378db4b14e736602220000334a96676e58b1bb01784cb7c556dd8ce1c220171904da22e18fe1e7d1510db5014104d0fe07ff74c9ef5b00fed1104fad43ecf72dbab9e60733e4f56eacf24b20cf3b8cd945bcabcc73ba0158bf9ce769d43e94bd58c5c7e331a188922b3fe9ca1f5a".into();
let output: Script = "76a9147a2a3b481ca80c4ba7939c54d9278e50189d94f988ac".into();
@ -1898,7 +1898,7 @@ mod tests {
input_index: 0,
input_amount: 0,
consensus_branch_id: 0,
cache: None,
cache: Default::default(),
};
let input: Script = "483045022100d92e4b61452d91a473a43cde4b469a472467c0ba0cbd5ebba0834e4f4762810402204802b76b7783db57ac1f61d2992799810e173e91055938750815b6d8a675902e014f".into();
let output: Script = "76009f69905160a56b210378d430274f8c5ec1321338151e9f27f4c676a008bdf8638d07c0b6be9ab35c71ad6c".into();

View File

@ -26,12 +26,12 @@ impl From<SighashBase> for u32 {
/// Signature portions cache.
#[derive(Debug, Default, PartialEq)]
pub struct SighashCache {
pub hash_prevouts: H256,
pub hash_sequence: H256,
pub hash_outputs: H256,
pub hash_join_split: H256,
pub hash_sapling_spends: H256,
pub hash_sapling_outputs: H256,
pub hash_prevouts: Option<H256>,
pub hash_sequence: Option<H256>,
pub hash_outputs: Option<H256>,
pub hash_join_split: Option<H256>,
pub hash_sapling_spends: Option<H256>,
pub hash_sapling_outputs: Option<H256>,
}
#[cfg_attr(feature="cargo-clippy", allow(doc_markdown))]
@ -151,7 +151,7 @@ impl TransactionInputSigner {
/// Pass None as input_index to compute transparent input signature
pub fn signature_hash(
&self,
cache: &mut Option<SighashCache>,
cache: &mut SighashCache,
input_index: Option<usize>,
input_amount: u64,
script_pubkey: &Script,
@ -248,7 +248,7 @@ impl TransactionInputSigner {
/// Overwinter/sapling version of the signature.
fn signature_hash_post_overwinter(
&self,
cache: &mut Option<SighashCache>,
cache: &mut SighashCache,
input_index: Option<usize>,
input_amount: u64,
script_pubkey: &Script,
@ -258,35 +258,35 @@ impl TransactionInputSigner {
sapling: bool,
) -> H256 {
// compute signature portions that can be reused for other inputs
let hash_prevouts = cache.as_ref().map(|c| c.hash_prevouts)
.unwrap_or_else(|| compute_hash_prevouts(sighash, &self.inputs));
let hash_sequence = cache.as_ref().map(|c| c.hash_sequence)
.unwrap_or_else(|| compute_hash_sequence(sighash, &self.inputs));
let hash_outputs = compute_hash_outputs(cache, sighash, input_index, &self.outputs);
let hash_join_split = cache.as_ref().map(|c| c.hash_join_split)
.unwrap_or_else(|| compute_hash_join_split(self.join_split.as_ref()));
let hash_sapling_spends = if sapling {
cache.as_ref().map(|c| c.hash_sapling_spends)
.unwrap_or_else(|| compute_hash_sapling_spends(self.sapling.as_ref()))
} else {
0u8.into()
};
let hash_sapling_outputs = if sapling {
cache.as_ref().map(|c| c.hash_sapling_outputs)
.unwrap_or_else(|| compute_hash_sapling_outputs(self.sapling.as_ref()))
} else {
0u8.into()
};
//
// compute_* decides if it wants to use cached value
// compute_* decides if it wants to cache computed value
let (hash_prevouts, cache_hash_prevouts) = compute_hash_prevouts(cache, sighash, &self.inputs);
let (hash_sequence, cache_hash_sequence) = compute_hash_sequence(cache, sighash, &self.inputs);
let (hash_outputs, cache_hash_outputs) = compute_hash_outputs(cache, sighash, input_index, &self.outputs);
let (hash_join_split, cache_hash_join_split) = compute_hash_join_split(cache, self.join_split.as_ref());
let (hash_sapling_spends, cache_hash_sapling_spends) = compute_hash_sapling_spends(cache, sapling, self.sapling.as_ref());
let (hash_sapling_outputs, cache_hash_sapling_outputs) = compute_hash_sapling_outputs(cache, sapling, self.sapling.as_ref());
// update cache
*cache = Some(SighashCache {
hash_prevouts,
hash_sequence,
hash_outputs,
hash_join_split,
hash_sapling_spends,
hash_sapling_outputs,
});
if cache_hash_prevouts {
cache.hash_prevouts = Some(hash_prevouts);
}
if cache_hash_sequence {
cache.hash_sequence = Some(hash_sequence);
}
if cache_hash_outputs {
cache.hash_outputs = Some(hash_outputs);
}
if cache_hash_join_split {
cache.hash_join_split = Some(hash_join_split);
}
if cache_hash_sapling_spends {
cache.hash_sapling_spends = Some(hash_sapling_spends);
}
if cache_hash_sapling_outputs {
cache.hash_sapling_outputs = Some(hash_sapling_outputs);
}
let mut personalization = [0u8; 16];
personalization[..12].copy_from_slice(b"ZcashSigHash");
@ -341,85 +341,101 @@ impl TransactionInputSigner {
}
}
fn compute_hash_prevouts(sighash: Sighash, inputs: &[UnsignedTransactionInput]) -> H256 {
fn compute_hash_prevouts(
cache: &SighashCache,
sighash: Sighash,
inputs: &[UnsignedTransactionInput],
) -> (H256, bool) {
const PERSONALIZATION: &'static [u8; 16] = b"ZcashPrevoutHash";
match sighash.anyone_can_pay {
false => {
false => (cache.hash_prevouts.unwrap_or_else(|| {
let mut stream = Stream::default();
for input in inputs {
stream.append(&input.previous_output);
}
blake2b_personal(PERSONALIZATION, &stream.out())
},
true => 0u8.into(),
}), true),
true => (0u8.into(), false),
}
}
fn compute_hash_sequence(sighash: Sighash, inputs: &[UnsignedTransactionInput]) -> H256 {
fn compute_hash_sequence(
cache: &SighashCache,
sighash: Sighash,
inputs: &[UnsignedTransactionInput],
) -> (H256, bool) {
const PERSONALIZATION: &'static [u8; 16] = b"ZcashSequencHash";
match sighash.base {
SighashBase::All if !sighash.anyone_can_pay => {
SighashBase::All if !sighash.anyone_can_pay => (cache.hash_sequence.unwrap_or_else(|| {
let mut stream = Stream::default();
for input in inputs {
stream.append(&input.sequence);
}
blake2b_personal(PERSONALIZATION, &stream.out())
},
_ => 0u8.into(),
}), true),
_ => (0u8.into(), false),
}
}
fn compute_hash_outputs(
cache: &mut Option<SighashCache>,
cache: &SighashCache,
sighash: Sighash,
input_index: Option<usize>,
outputs: &[TransactionOutput]
) -> H256 {
) -> (H256, bool) {
const PERSONALIZATION: &'static [u8; 16] = b"ZcashOutputsHash";
match (sighash.base, input_index) {
(SighashBase::All, _) => {
cache.as_ref().map(|c| c.hash_outputs)
.unwrap_or_else(|| {
let mut stream = Stream::default();
for output in outputs {
stream.append(output);
}
blake2b_personal(PERSONALIZATION, &stream.out())
})
},
(SighashBase::All, _) => (cache.hash_outputs.unwrap_or_else(|| {
let mut stream = Stream::default();
for output in outputs {
stream.append(output);
}
blake2b_personal(PERSONALIZATION, &stream.out())
}), true),
(SighashBase::Single, Some(input_index)) if input_index < outputs.len() => {
let mut stream = Stream::default();
stream.append(&outputs[input_index]);
blake2b_personal(PERSONALIZATION, &stream.out())
(blake2b_personal(PERSONALIZATION, &stream.out()), false)
},
_ => 0u8.into(),
_ => (0u8.into(), false),
}
}
fn compute_hash_join_split(join_split: Option<&JoinSplit>) -> H256 {
fn compute_hash_join_split(
cache: &SighashCache,
join_split: Option<&JoinSplit>,
) -> (H256, bool) {
const PERSONALIZATION: &'static [u8; 16] = b"ZcashJSplitsHash";
match join_split {
Some(join_split) if !join_split.descriptions.is_empty() => {
Some(join_split) if !join_split.descriptions.is_empty() => (cache.hash_join_split.unwrap_or_else(|| {
let mut stream = Stream::default();
for description in &join_split.descriptions {
stream.append(description);
}
stream.append(&join_split.pubkey);
blake2b_personal(PERSONALIZATION, &stream.out())
},
_ => 0u8.into(),
}), true),
_ => (0u8.into(), false),
}
}
fn compute_hash_sapling_spends(sapling: Option<&Sapling>) -> H256 {
fn compute_hash_sapling_spends(
cache: &SighashCache,
is_sapling: bool,
sapling: Option<&Sapling>,
) -> (H256, bool) {
const PERSONALIZATION: &'static [u8; 16] = b"ZcashSSpendsHash";
if !is_sapling {
return (0u8.into(), false);
}
match sapling {
Some(sapling) if !sapling.spends.is_empty() => {
Some(sapling) if !sapling.spends.is_empty() => (cache.hash_sapling_spends.unwrap_or_else(|| {
let mut stream = Stream::default();
for spend in &sapling.spends {
stream.append(&spend.value_commitment);
@ -429,23 +445,31 @@ fn compute_hash_sapling_spends(sapling: Option<&Sapling>) -> H256 {
stream.append(&spend.zkproof);
}
blake2b_personal(PERSONALIZATION, &stream.out())
},
_ => 0u8.into(),
}), true),
_ => (0u8.into(), false),
}
}
fn compute_hash_sapling_outputs(sapling: Option<&Sapling>) -> H256 {
fn compute_hash_sapling_outputs(
cache: &SighashCache,
is_sapling: bool,
sapling: Option<&Sapling>,
) -> (H256, bool) {
const PERSONALIZATION: &'static [u8; 16] = b"ZcashSOutputHash";
if !is_sapling {
return (0u8.into(), false);
}
match sapling {
Some(sapling) if !sapling.outputs.is_empty() => {
Some(sapling) if !sapling.outputs.is_empty() => (cache.hash_sapling_outputs.unwrap_or_else(|| {
let mut stream = Stream::default();
for output in &sapling.outputs {
stream.append(output);
}
blake2b_personal(PERSONALIZATION, &stream.out())
},
_ => 0u8.into(),
}), true),
_ => (0u8.into(), false),
}
}
@ -504,7 +528,7 @@ mod tests {
sapling: None,
};
let mut cache = None;
let mut cache = Default::default();
let hash = input_signer.signature_hash(&mut cache, Some(0), 0, &previous_output, SighashBase::All.into(), 0);
assert_eq!(hash, expected_signature_hash);
}
@ -534,7 +558,7 @@ mod tests {
let expected: H256 = result.parse().unwrap();
let expected = expected.reversed();
let mut cache = None;
let mut cache = Default::default();
let input_index = if input_index as u64 == ::std::u64::MAX { None } else { Some(input_index) };
let hash = signer.signature_hash(&mut cache, input_index, 0, &script, hash_type as u32, consensus_branch_id);
if expected != hash {
@ -566,50 +590,64 @@ mod tests {
}
}
// tx#1 from block#419201
#[test]
fn test_sighash_cache_works_correctly() {
// tx#1 from block#419201
// https://zcash.blockexplorer.com/api/rawblock/00000000014d117faa2ea701b24261d364a6c6a62e5bc4bc27335eb9b3c1e2a8
let spend_tx: Transaction = "0400008085202f8901ddd8ddd4a8713d9443e11f1a79adb737e974bece08608b6b04017d9b436b9399010000006a473044022004cd1a5a48b015213fa0810028d98271d219aa4a7293928dea01946f9e3db53102206837946d92757a460c8d7a2d64872890abbfa3f572cd1e8fabf5a7ca8997de26012102fa947bb7cfa50aa6e83f6296d95334d7f55cd43e9c873404f2550f6ba006d5aaffffffff00000000009465060021f65ff9ffffffff0001b40f6b0d76653bc236b045c7dd16e0e8e1a8fa6fa9f7d5120c1e7289aee78d67f9d8ae3d9707dcb30064b8f7afcc2fc1aca8918f263c58da6c7806cfad133d11fedf35174cf837149b3f2559a70055398c6ab3eba99a3335f4d360488d88266fb533abea66784d930ff8f1574eca66374d4fa559f462d65c0d8af6e7b2770dfb804f9d29388b651b2af0d9d21ad3cd6aebeeb8ecdc98b208aa027e6dcc8f27a13d643d650934faa98a809fa0c61ea3f796f96565cbee80a176a9258621ec3574ce5e7f6ceb70db4bd36f2feb983648f8405ebda405645f005f455f3dd96ea7d5081ba13a6a90cae0aebc7ec7a4589058bf67dc35c511de423d0c29d95febeaf08a32f4e123f39d4bb964d836eeaf2eb825c68c0f7ce62ab8f048f61a28998c1f0340d9660c849b86d8f639955d5d2e15458875dd547c86fd96a74c48b3eb6fc3d31ffdfa24d78afdbb0dbfa8200e87562668293bbab1ca9fc67af4f7369f0e12b6e6e07388189381b38059737ddc3322cbbfd6d1bce912d2bacdfa66f3f22835fc0f3f213e6abef8eacfaffd4c204296900e1651d1c9cbf981364629250b4bafaa5e4d1cbba21a03f6270ddbcaed684caceea5b2870a856a11835923277e5648db0d92ad60f280c7dbb1c2820000fa3117f3e1e0a08cec9f3dfeaf2b6d8d714e072b674d5dbb53420dc9cf67c8f0010665119a8200dd774f0107b17a7398706cf10eecd219f252b2d6813f4d8a672a1dca61f68c75cefb9f2ba7653bdae0b2faa6e76afe9ff62d2ecdfc72d497210517a66f2bf39f402991e5608e754c551e75bd26e7f33474b68d690d5285bd949182731fc49b43d4673aaca20d665c0d0b6ad7cced361c91b06e114e46495400738ef9d528744267fd47d3239e4548a6a3ff6e43b6ca821f32fc261f2c649674a4dc2bac93d177e0c44f4c78694f50ce374978599de6aefbd37e892de81d8d6012675f31daa75fb35bbb339754355e67ea4cf0b8c67d573af3f4f3382b3f408682a50d58767a796ca1ed3dca4227f9d107dc08c0e53134d4fa6e06792182873aa895f3373c388b0c9c7d4a2c065b6f1cd807ef3f4d7b2737eeb90ebb557c859ff17b898d350d8cffc7ca1e08dcb9baba5a336f17e6eba7a2425da8c43caefbd7fa58390e78ad083c36720336fe824983d1fa17402e89d3c224e994248c88ec2f547aeb48227705fd8a4ac3f9f30b139b4e30bb0b82af9bae87800c875c19d6fbd45a26763d056bef6899e031442185ae50a5ed24b006a852f8ce3d55b2d2d9f4179797f93bbffd8905cc9ce69cebc8a17e1e8b8eb5e1e675620c70b22de348969b993246520c00bfa467125a2528a829120b3f64d2c1f58f45cb31a1d15ba368d7df55aa65e65ad4a8f5bc63e06396d40964b3aff6084a83f567b186b1c70072dfebc873638409".into();
assert_eq!(spend_tx.hash().reversed(), "66e2e3dfb9c51eb961004e0eb8bfd3820239c4f11614b65a1fffb60e01858580".into());
assert_eq!(spend_tx.inputs[0].previous_output.hash.reversed(), "99936b439b7d01046b8b6008cebe74e937b7ad791a1fe143943d71a8d4ddd8dd".into());
assert_eq!(spend_tx.inputs[0].previous_output.index, 1);
let test_cases: Vec<(Transaction, Transaction, usize)> = vec![
(
// tx#1 from block#419201
// https://zcash.blockexplorer.com/api/rawblock/00000000014d117faa2ea701b24261d364a6c6a62e5bc4bc27335eb9b3c1e2a8
"0400008085202f8901ddd8ddd4a8713d9443e11f1a79adb737e974bece08608b6b04017d9b436b9399010000006a473044022004cd1a5a48b015213fa0810028d98271d219aa4a7293928dea01946f9e3db53102206837946d92757a460c8d7a2d64872890abbfa3f572cd1e8fabf5a7ca8997de26012102fa947bb7cfa50aa6e83f6296d95334d7f55cd43e9c873404f2550f6ba006d5aaffffffff00000000009465060021f65ff9ffffffff0001b40f6b0d76653bc236b045c7dd16e0e8e1a8fa6fa9f7d5120c1e7289aee78d67f9d8ae3d9707dcb30064b8f7afcc2fc1aca8918f263c58da6c7806cfad133d11fedf35174cf837149b3f2559a70055398c6ab3eba99a3335f4d360488d88266fb533abea66784d930ff8f1574eca66374d4fa559f462d65c0d8af6e7b2770dfb804f9d29388b651b2af0d9d21ad3cd6aebeeb8ecdc98b208aa027e6dcc8f27a13d643d650934faa98a809fa0c61ea3f796f96565cbee80a176a9258621ec3574ce5e7f6ceb70db4bd36f2feb983648f8405ebda405645f005f455f3dd96ea7d5081ba13a6a90cae0aebc7ec7a4589058bf67dc35c511de423d0c29d95febeaf08a32f4e123f39d4bb964d836eeaf2eb825c68c0f7ce62ab8f048f61a28998c1f0340d9660c849b86d8f639955d5d2e15458875dd547c86fd96a74c48b3eb6fc3d31ffdfa24d78afdbb0dbfa8200e87562668293bbab1ca9fc67af4f7369f0e12b6e6e07388189381b38059737ddc3322cbbfd6d1bce912d2bacdfa66f3f22835fc0f3f213e6abef8eacfaffd4c204296900e1651d1c9cbf981364629250b4bafaa5e4d1cbba21a03f6270ddbcaed684caceea5b2870a856a11835923277e5648db0d92ad60f280c7dbb1c2820000fa3117f3e1e0a08cec9f3dfeaf2b6d8d714e072b674d5dbb53420dc9cf67c8f0010665119a8200dd774f0107b17a7398706cf10eecd219f252b2d6813f4d8a672a1dca61f68c75cefb9f2ba7653bdae0b2faa6e76afe9ff62d2ecdfc72d497210517a66f2bf39f402991e5608e754c551e75bd26e7f33474b68d690d5285bd949182731fc49b43d4673aaca20d665c0d0b6ad7cced361c91b06e114e46495400738ef9d528744267fd47d3239e4548a6a3ff6e43b6ca821f32fc261f2c649674a4dc2bac93d177e0c44f4c78694f50ce374978599de6aefbd37e892de81d8d6012675f31daa75fb35bbb339754355e67ea4cf0b8c67d573af3f4f3382b3f408682a50d58767a796ca1ed3dca4227f9d107dc08c0e53134d4fa6e06792182873aa895f3373c388b0c9c7d4a2c065b6f1cd807ef3f4d7b2737eeb90ebb557c859ff17b898d350d8cffc7ca1e08dcb9baba5a336f17e6eba7a2425da8c43caefbd7fa58390e78ad083c36720336fe824983d1fa17402e89d3c224e994248c88ec2f547aeb48227705fd8a4ac3f9f30b139b4e30bb0b82af9bae87800c875c19d6fbd45a26763d056bef6899e031442185ae50a5ed24b006a852f8ce3d55b2d2d9f4179797f93bbffd8905cc9ce69cebc8a17e1e8b8eb5e1e675620c70b22de348969b993246520c00bfa467125a2528a829120b3f64d2c1f58f45cb31a1d15ba368d7df55aa65e65ad4a8f5bc63e06396d40964b3aff6084a83f567b186b1c70072dfebc873638409".into(),
// donor tx for input #0:
// tx#3 from block#409840
// https://zcash.blockexplorer.com/api/rawblock/0000000002d83a0d7d5011a19d2bd89125dc22d63b6484f2792fd1d636c4d940
"030000807082c4030b10d6644275fdb7553601349f524ce0a4fb6acc1d17551249b7cb87cb97e07f1f100000006b483045022100977b46b263f691777cb13b9b9c623ce15ccef2d5d5f1efcb7fd1f16aeac98fe20220090ecb6f82cccb37f295ec3c898c1c9b5bb3f46f7b524bc641137a9ce6277bbb012102be56007d075b0ae8e9de4027b61af2b0c8f458c5d1cc6c0a3e0a7f699cfb96c7feffffff8308f2430ed380564b53e7e4fdb16fbb30d1c482dc5fae68613e69d368608c44190000006a47304402201b5673ce6c541a42eac79742e7d1a1c9f51456d5012226985067eec93922f96f0220064c88fa17711860e5a06ebf8849cd4dcbb8f944c39ff227a99a91d8c82a4621012102be56007d075b0ae8e9de4027b61af2b0c8f458c5d1cc6c0a3e0a7f699cfb96c7feffffff5c89bda835f37289182b3123c49c7906629631e4c3c883de97fb637f92802c16580000006a47304402203533eca9827a92959ee7b8c0ea8154b62e9935bc8ca4c61020ff268bb336c59402200e14f6ea6f2e9e0bce19db50b2b10ed3d1e40db957aae3b491797540932dd8ea012102be56007d075b0ae8e9de4027b61af2b0c8f458c5d1cc6c0a3e0a7f699cfb96c7feffffff9b057ba8d3e81fa9c10a98c4ee258df57a9eb1a80f1fb22b08b1685d1ada17fc1f0000006b483045022100e845ab5355bd877641e8238d9f16ac1345af346e81fbeeedda128f23dec5f71002207367e2e38d32e6843aa8c52eedd6b1fbda08d8436e26e0e80e035cb2314710d7012102be56007d075b0ae8e9de4027b61af2b0c8f458c5d1cc6c0a3e0a7f699cfb96c7feffffff9e01f46736183d109a9739638cfff185f62aebf2e9a010f795d80a0b82daad50200000006b483045022100fdf3156db7f2cad51acfd4fcda6ec9f3608ba68f49d5d947e7504a00522f6a4102202f4c74cc34843efbecd53612fd8ea065d8f79b3419190e1792fe3fa03b8d5447012102be56007d075b0ae8e9de4027b61af2b0c8f458c5d1cc6c0a3e0a7f699cfb96c7fefffffffafa19844c386065fd650694ec15094a24e4a6499eb585a0c0544fbf5be9e002010000006a47304402205e2a73750d0ee3672184da65e356fbb95023586ef77c8f01d87eecab09aba5e7022015f8b13eece11b9c7a57bd869b5f44af3a4dfc07ebaf324f899b6541f7ffba050121025792386461f81e038989e4ac62a4142e1e987ae740906ab3032a04e4ac74967bfeffffff1ae4ec68f85c5d67797eccba7653c1a2bc5df34423366af7793bf1232c2bacb9000000006a47304402207501656ce6d97dbd5573953c117cb8fbdc61e09be58798cdab162304291311860220088945ac2142cf3689d7686b146720e4ac1c7f5292c13406fc2628a8e63ff221012103fbdca468248d731579fb6e756566d48903a84a16cd0f415cddfb5c41458bb262feffffff2d805a191e7699a2dcadf2444f682afe2be378b20e3b4b4431f57f32c7d554dd000000006b483045022100b39190d59a549f04f1e22d637f997b1b1ab7abda98d34a67fb616d380d247ea5022039feb0bf51e496dcfad037b0c675038d0c36793a26a66e58a9039524bd6e3e6c012103bb4932a7677891b8945557cb23530b6e9a688f0fa6deac31c9e323f0edf40439feffffff71934ffa32133dc62c7e4b2a8b10f51a8aa0a96099b4fc1e648ea9c676884c6f000000006b48304502210094af623575cec584e3d4406ed30a2f6364a0c6ba493cb77ea0f0d6e8d372cc5f02206ee94b089bfc4e347cf5de9de27785102e3bb146276868a4d9220d405d4e28e2012102cb666a57bb47dc4d447795e439f9d03d7b935f94fd92f80ecabc6a41061a50e8feffffff1bddd5649e33d1595c956b574d3dbb3883d25d1b9be93ef3ce7fad492534d8e4000000006b483045022100800c193e55b1234ed405248ebd69250fb0373bcfe6dda2da593c8f4213d10e5e02202c7b7387449b74125ee8d1f8c381d30f9474f0d5718e33ddd0a9179f5c6ca5ff0121020b2c90de955d7b4bf93415147b8bb43af3186b46e743316ee662ec9136899bf9fefffffffec3b71b6d478340583518ac04357d7a01e39ef311a9b7a9eed4bd3afe8a2a39010000006b483045022100b0e445d7bb23bf2400428d17d8b076d1ea6f415981ef9b806c714697538eaad402203b29f0108126b3345f48dfe22a8b743f3fd91b3c809272d8f81d566530540aa20121029506cd31b962743382a7c5b372d4a6ce66584f7aafefd358ad1b720902c3c907feffffff023d4a0f00000000001976a914e212f89515c07fc61c01fd9ccee566544956822088acef30a006000000001976a91414c42abe82c257103f4589e738f4f05b0f0c600e88ace54006000441060000".into(),
// we're testing input#0
0,
),
(
// tx#3 from block#420083
// https://zcash.blockexplorer.com/api/rawtx/56d3fd4241520573c4f90e8b89a634cb3b3c4f2cfca34e7b3208de4678bf67b2
"0400008085202f890666a4b9fe9308b92f1785f9e18a18f473d5a8c44ac3abf5cf52d43773a5bab580000000006b4830450221008336e1a227f894d41a4f9a13bb67ae3a0bc0be6558dd4c17ffa79732f3908f8802200978b7abfa0cd38cf73a161aedfcdf78600a3112ad8a5821d12f709bf9e640b4032102c13a62ce58bf064cd5e3bdb8ad61e07ff5fe854284728edd1117125c7bffbfeeffffffffb9fcb288f427e0556f629d248656d6bab6d2b6343aee8ffd8eddfb754b6f8fe0000000006b483045022100bf6744adf8ad5a3aea4e6170ff5e638702179832f8ab7c20721a5d7db169944a0220147ab55f2316097fe54a67c9fa08950ab3645cf475ba7a61596f39ea03893da40321032bf181cbe48f2ab8ae2b2c3840eff37615a0299bb90b49bfecbc36ebc83c7b6bffffffffb9ee9a5db14130b14cb28a8488ee25a451b31b7793925564c2cf158b5750d148010000006a473044022038f9774cd6654a6efec33150d27a2e2b8177e11c98cd3196b157c18ed2af20fa02203f3ea40ca22fb9fc1c28d06105c5a2289311ab03aa5fb03509230e1a87644cfa0321025371db306496fa8ec28ffc8ed28f1c8fd93c751d7b2448d7959656a6fbfd36faffffffff6f3447fe2d7f1a35ea6024083868067786fc2d2a8e65787dc5a5268efae2290a010000006a47304402207a975173da91e93280f5664f58e07959d18408554a28c428836b90b9e0e73578022070c495faa40b8c1fa784454e29f8a621f1824a182f68c6600cb0daffc7b4e45f0321025778f397884e483eb74ae861d52abc625b114052dfd40936b3d505767f38ed33ffffffff794ff48194c959056ff80ef547262259c9f1583d860301425c04a6824807b820010000006b483045022100bd707bc1a01edb5bfc5e6120490ec07cb330a78aa366d718f291980bdfd15758022035f8e860431c915591b3cde8fced541eca8a26c8966b92368f09a0356f6d92800321030ede2c25e6d034f4a855f9976cd67615e4279656f775fe13be300a4f6b688b8effffffff1fe56983eee881144093b6e4e2953779ea2cee051005bd1e78a06edceda90333010000006b483045022100a5a36678dd45ec9e4de1a275829da911dded2c59bb0e7467110bb9c6fc8cb099022033a2c22315ae87aedff8c1c9631c4c890cca9690544433dec52dc358332334400321023e4a72e165868a1257f53e093eda3c1e95c7e36b5a4a32bf78c4d0d8417addc7ffffffff01eaf82800000000001976a91417379a9efb16569951887cb5365791ad9895389788ac00000000000000000000000000000000000000".into(),
// donor tx for input #0:
// https://zcash.blockexplorer.com/api/rawtx/80b5baa57337d452cff5abc34ac4a8d573f4188ae1f985172fb90893feb9a466
"030000807082c4030363fa1654f87357fa2f9daf554424d97fe9b0275f820e10fa80c713329816b085000000006a47304402200fc27c0e1c63142f6903608967f66432ca88c95b54cc80794b099b82d0a2df780220440a9c21de092f4cc33f83405a27023362961e2423b5957b387442b30b9fea8f01210227855d8d44fa991a92a8608df49a3a76bb2a37a6ec4146df59bedabc6ba97572feffffff956dc5a8869f4ea22e806f2422248b840d2fae0abaaaaec798b97bc8358cdb5d000000006a47304402203c3dcf0206a5b48ef517d2d23f04a3d6a2383536c208daa7d6f5ce9a2cf61e5402206ac954167c7f6ec1e8be22c997c32104dc0368b3f2d97fea670a9d6490a169bc012102fbdf4afcad38290d7268db506165af2e4fbd99632c2f609b2512f951af17b116feffffffb126f38d5cb212b2ec56184dde40acb66583d26ae979dc54893f5321fe329df8000000006a473044022054842385d5596ae51626c507dacb575274df071ed76f064825206e90687503fa02201e7e3f9c34adf182776048b0b87a9ff7cb34819ca97866a92fb1c960257e56d2012103c6d5ce7fcb7483662e702ff6a40141ce6e03cdf10206513a6ee33543194bcea3feffffff020f9f1000000000001976a9146b5f30eb97d6908d7319b57b5c453612045bc98688ac085a0f00000000001976a9140a47e7e4b7767f6cd0eb630a2bc3a69c9f9175f788ac430f0600620f060000".into(),
// we're testing input#0
0,
)
];
// donor tx for input #1:
// tx#3 from block#409840
// https://zcash.blockexplorer.com/api/rawblock/0000000002d83a0d7d5011a19d2bd89125dc22d63b6484f2792fd1d636c4d940
let donor_tx: Transaction = "030000807082c4030b10d6644275fdb7553601349f524ce0a4fb6acc1d17551249b7cb87cb97e07f1f100000006b483045022100977b46b263f691777cb13b9b9c623ce15ccef2d5d5f1efcb7fd1f16aeac98fe20220090ecb6f82cccb37f295ec3c898c1c9b5bb3f46f7b524bc641137a9ce6277bbb012102be56007d075b0ae8e9de4027b61af2b0c8f458c5d1cc6c0a3e0a7f699cfb96c7feffffff8308f2430ed380564b53e7e4fdb16fbb30d1c482dc5fae68613e69d368608c44190000006a47304402201b5673ce6c541a42eac79742e7d1a1c9f51456d5012226985067eec93922f96f0220064c88fa17711860e5a06ebf8849cd4dcbb8f944c39ff227a99a91d8c82a4621012102be56007d075b0ae8e9de4027b61af2b0c8f458c5d1cc6c0a3e0a7f699cfb96c7feffffff5c89bda835f37289182b3123c49c7906629631e4c3c883de97fb637f92802c16580000006a47304402203533eca9827a92959ee7b8c0ea8154b62e9935bc8ca4c61020ff268bb336c59402200e14f6ea6f2e9e0bce19db50b2b10ed3d1e40db957aae3b491797540932dd8ea012102be56007d075b0ae8e9de4027b61af2b0c8f458c5d1cc6c0a3e0a7f699cfb96c7feffffff9b057ba8d3e81fa9c10a98c4ee258df57a9eb1a80f1fb22b08b1685d1ada17fc1f0000006b483045022100e845ab5355bd877641e8238d9f16ac1345af346e81fbeeedda128f23dec5f71002207367e2e38d32e6843aa8c52eedd6b1fbda08d8436e26e0e80e035cb2314710d7012102be56007d075b0ae8e9de4027b61af2b0c8f458c5d1cc6c0a3e0a7f699cfb96c7feffffff9e01f46736183d109a9739638cfff185f62aebf2e9a010f795d80a0b82daad50200000006b483045022100fdf3156db7f2cad51acfd4fcda6ec9f3608ba68f49d5d947e7504a00522f6a4102202f4c74cc34843efbecd53612fd8ea065d8f79b3419190e1792fe3fa03b8d5447012102be56007d075b0ae8e9de4027b61af2b0c8f458c5d1cc6c0a3e0a7f699cfb96c7fefffffffafa19844c386065fd650694ec15094a24e4a6499eb585a0c0544fbf5be9e002010000006a47304402205e2a73750d0ee3672184da65e356fbb95023586ef77c8f01d87eecab09aba5e7022015f8b13eece11b9c7a57bd869b5f44af3a4dfc07ebaf324f899b6541f7ffba050121025792386461f81e038989e4ac62a4142e1e987ae740906ab3032a04e4ac74967bfeffffff1ae4ec68f85c5d67797eccba7653c1a2bc5df34423366af7793bf1232c2bacb9000000006a47304402207501656ce6d97dbd5573953c117cb8fbdc61e09be58798cdab162304291311860220088945ac2142cf3689d7686b146720e4ac1c7f5292c13406fc2628a8e63ff221012103fbdca468248d731579fb6e756566d48903a84a16cd0f415cddfb5c41458bb262feffffff2d805a191e7699a2dcadf2444f682afe2be378b20e3b4b4431f57f32c7d554dd000000006b483045022100b39190d59a549f04f1e22d637f997b1b1ab7abda98d34a67fb616d380d247ea5022039feb0bf51e496dcfad037b0c675038d0c36793a26a66e58a9039524bd6e3e6c012103bb4932a7677891b8945557cb23530b6e9a688f0fa6deac31c9e323f0edf40439feffffff71934ffa32133dc62c7e4b2a8b10f51a8aa0a96099b4fc1e648ea9c676884c6f000000006b48304502210094af623575cec584e3d4406ed30a2f6364a0c6ba493cb77ea0f0d6e8d372cc5f02206ee94b089bfc4e347cf5de9de27785102e3bb146276868a4d9220d405d4e28e2012102cb666a57bb47dc4d447795e439f9d03d7b935f94fd92f80ecabc6a41061a50e8feffffff1bddd5649e33d1595c956b574d3dbb3883d25d1b9be93ef3ce7fad492534d8e4000000006b483045022100800c193e55b1234ed405248ebd69250fb0373bcfe6dda2da593c8f4213d10e5e02202c7b7387449b74125ee8d1f8c381d30f9474f0d5718e33ddd0a9179f5c6ca5ff0121020b2c90de955d7b4bf93415147b8bb43af3186b46e743316ee662ec9136899bf9fefffffffec3b71b6d478340583518ac04357d7a01e39ef311a9b7a9eed4bd3afe8a2a39010000006b483045022100b0e445d7bb23bf2400428d17d8b076d1ea6f415981ef9b806c714697538eaad402203b29f0108126b3345f48dfe22a8b743f3fd91b3c809272d8f81d566530540aa20121029506cd31b962743382a7c5b372d4a6ce66584f7aafefd358ad1b720902c3c907feffffff023d4a0f00000000001976a914e212f89515c07fc61c01fd9ccee566544956822088acef30a006000000001976a91414c42abe82c257103f4589e738f4f05b0f0c600e88ace54006000441060000".into();
assert_eq!(donor_tx.hash().reversed(), "99936b439b7d01046b8b6008cebe74e937b7ad791a1fe143943d71a8d4ddd8dd".into());
for (spend_tx, donor_tx, input_index) in test_cases {
let output_index = spend_tx.inputs[input_index].previous_output.index as usize;
// prepare tx signature checker
let consensus_branch_id = 0x76b809bb; // sapling starts from block#419200
let signer: TransactionInputSigner = spend_tx.clone().into();
let mut checker = TransactionSignatureChecker {
signer,
input_index: 0,
input_amount: donor_tx.outputs[1].value,
consensus_branch_id,
cache: None,
};
// prepare tx signature checker
let consensus_branch_id = 0x76b809bb; // all test cases are for sapling era
let signer: TransactionInputSigner = spend_tx.clone().into();
let mut checker = TransactionSignatureChecker {
signer,
input_index,
input_amount: donor_tx.outputs[output_index].value,
consensus_branch_id,
cache: Default::default(),
};
// calculate signature => fill cache
checker.signer.signature_hash(
&mut checker.cache,
None,
0,
&From::from(vec![]),
::sign::SighashBase::All.into(),
consensus_branch_id,
);
// calculate signature => fill cache
checker.signer.signature_hash(
&mut checker.cache,
None,
0,
&From::from(vec![]),
::sign::SighashBase::All.into(),
consensus_branch_id,
);
// and finally check input#0 (the cached signature portions are used here)
let input: Script = spend_tx.inputs[0].script_sig.clone().into();
let output: Script = donor_tx.outputs[1].script_pubkey.clone().into();
let flags = VerificationFlags::default()
.verify_p2sh(true)
.verify_locktime(true)
.verify_dersig(true);
assert_eq!(verify_script(&input, &output, &flags, &mut checker), Ok(()));
// and finally check input (the cached signature portions are used here)
let input: Script = spend_tx.inputs[input_index].script_sig.clone().into();
let output: Script = donor_tx.outputs[output_index].script_pubkey.clone().into();
let flags = VerificationFlags::default()
.verify_p2sh(true)
.verify_locktime(true)
.verify_dersig(true);
assert_eq!(verify_script(&input, &output, &flags, &mut checker), Ok(()));
}
}
}

View File

@ -53,7 +53,7 @@ pub struct TransactionSignatureChecker {
pub input_index: usize,
pub input_amount: u64,
pub consensus_branch_id: u32,
pub cache: Option<SighashCache>,
pub cache: SighashCache,
}
impl SignatureChecker for TransactionSignatureChecker {

View File

@ -310,11 +310,12 @@ impl Chain {
self.verifying_headers.extend(headers.iter().map(|h| h.hash))
}
/// Remove headers from verifying queue
pub fn headers_verified(&mut self, headers: &[IndexedBlockHeader]) {
for header in headers {
self.verifying_headers.remove(&header.hash);
}
/// Remove headers from verifying queue.
///
/// Returns all headers that still have VerifyingHeader state (i.e. they are not Verifying || Stored).
pub fn headers_verified(&mut self, mut headers: Vec<IndexedBlockHeader>) -> Vec<IndexedBlockHeader> {
headers.retain(|header| self.verifying_headers.remove(&header.hash));
headers
}
/// Schedule blocks hashes for requesting
@ -336,6 +337,9 @@ impl Chain {
/// chain, guarantees the header has already been pre-verified. The opposite isn't true -
/// if the header isn't in the chain, it could have been (in rare cases) pre-verified.
pub fn verify_block(&mut self, header: IndexedBlockHeader) -> bool {
// when we start verifying block, forget that we (possibly) verifying header of the block
self.verifying_headers.remove(&header.hash);
// insert header to the in-memory chain in case when it is not already there (non-headers-first sync)
self.hash_chain.push_back_at(VERIFYING_QUEUE, header.hash.clone());
self.headers_chain.insert(header)
@ -362,7 +366,7 @@ impl Chain {
match block_origin {
storage::BlockOrigin::KnownBlock => {
// there should be no known blocks at this point
unreachable!();
unreachable!("Trying to re-insert known block: {}", block.hash().to_reversed_str());
},
// case 1: block has been added to the main branch
storage::BlockOrigin::CanonChain { .. } => {

View File

@ -255,7 +255,7 @@ impl<T> ClientCore for SynchronizationClientCore<T> where T: TaskExecutor {
/// Try to queue synchronization of unknown blocks when blocks headers are received.
fn on_headers(&mut self, peer_index: PeerIndex, headers: Vec<IndexedBlockHeader>) -> Option<Vec<IndexedBlockHeader>> {
assert!(! headers.is_empty(), "This must be checked in incoming connection");
assert!(!headers.is_empty(), "This is checked in incoming connection");
// update peers to select next tasks
self.peers_tasks.on_headers_received(peer_index);
@ -421,6 +421,13 @@ impl<T> ClientCore for SynchronizationClientCore<T> where T: TaskExecutor {
self.chain.forget_block_leave_header(&block.header.hash);
// remember this block as unknown
if !self.orphaned_blocks_pool.contains_unknown_block(&block.header.hash) {
trace!(
target: "sync",
"Inserting unknown orphan block: {}. Block state: {:?}, parent state: {:?}",
block.header.hash.to_reversed_str(),
block_state,
parent_block_state,
);
self.orphaned_blocks_pool.insert_unknown_block(block);
}
}
@ -455,9 +462,27 @@ impl<T> ClientCore for SynchronizationClientCore<T> where T: TaskExecutor {
entry.insert((blocks_to_verify_hashes.into_iter().collect(), Vec::new()));
}
}
trace!(
target: "sync",
"Scheduling verification of blocks: {}..{} First block state: {:?}, parent state: {:?}",
blocks_to_verify[0].hash().to_reversed_str(),
blocks_to_verify[blocks_to_verify.len() - 1].hash().to_reversed_str(),
block_state,
parent_block_state,
);
result = Some(blocks_to_verify);
},
BlockState::Requested | BlockState::Scheduled => {
trace!(
target: "sync",
"Inserting known orphan block: {}. Block state: {:?}, parent state: {:?}",
block.header.hash.to_reversed_str(),
block_state,
parent_block_state,
);
// remember peer as useful
self.peers_tasks.useful_peer(peer_index);
// remember as orphan block
@ -1058,9 +1083,16 @@ impl<T> SynchronizationClientCore<T> where T: TaskExecutor {
}
fn on_headers_verification_success(&mut self, headers: Vec<IndexedBlockHeader>) {
self.chain.headers_verified(&headers);
self.chain.schedule_blocks_headers(headers);
let headers = self.chain.headers_verified(headers);
if !headers.is_empty() {
trace!(
target: "sync",
"Scheduling retrieval of headers: {}..{}",
headers[0].hash.to_reversed_str(),
headers[headers.len() - 1].hash.to_reversed_str(),
);
self.chain.schedule_blocks_headers(headers);
}
// switch to synchronization state
if !self.state.is_synchronizing() {
@ -1076,7 +1108,7 @@ impl<T> SynchronizationClientCore<T> where T: TaskExecutor {
}
fn on_headers_verification_error(&mut self, peer: PeerIndex, error: String, hash: H256, headers: Vec<IndexedBlockHeader>) {
self.chain.headers_verified(&headers);
self.chain.headers_verified(headers);
if self.config.close_connection_on_bad_block {
self.peers.misbehaving(
@ -1305,7 +1337,7 @@ pub mod tests {
use std::sync::Arc;
use parking_lot::{Mutex, RwLock};
use chain::{Block, Transaction};
use chain::{Block, Transaction, IndexedBlock};
use db::BlockChainDatabase;
use message::common::InventoryVector;
use message::{Services, types};
@ -1314,7 +1346,7 @@ pub mod tests {
use primitives::hash::H256;
use verification::BackwardsCompatibleChainVerifier as ChainVerifier;
use inbound_connection::tests::DummyOutboundSyncConnection;
use synchronization_chain::Chain;
use synchronization_chain::{Chain, BlockState};
use synchronization_client::{SynchronizationClient, Client};
use synchronization_peers::PeersImpl;
use synchronization_executor::Task;
@ -2459,4 +2491,59 @@ pub mod tests {
assert_eq!(data.lock().is_synchronizing, false);
assert_eq!(data.lock().best_blocks.len(), 3);
}
#[test]
fn known_blocks_are_ignored_in_headers_verification_success() {
let (_, sync, _) = create_sync(None, None);
let mut sync = sync.lock();
let block1: IndexedBlock = test_data::block_h1().into();
let block2: IndexedBlock = test_data::block_h2().into();
let header1 = block1.header.clone();
let header2 = block2.header.clone();
let hash1 = *block1.hash();
let hash2 = *block2.hash();
// WHEN
// we have orphaned [block2]
sync.orphaned_blocks_pool.insert_unknown_block(block2.clone());
// THEN:
// [header1] received => [header1] verification starts
sync.on_headers(0, vec![header1.clone()]);
assert_eq!(sync.chain().block_state(&hash1), BlockState::VerifyingHeader);
assert_eq!(sync.chain().block_state(&hash2), BlockState::Unknown);
// [header1] verification ends => [block1] is requested
sync.on_headers_verification_success(vec![header1.clone()]);
assert_eq!(sync.chain().block_state(&hash1), BlockState::Requested);
assert_eq!(sync.chain().block_state(&hash2), BlockState::Unknown);
// [header2] received => [header2] verification starts
sync.on_headers(0, vec![header2.clone()]);
assert_eq!(sync.chain().block_state(&hash1), BlockState::Requested);
assert_eq!(sync.chain().block_state(&hash2), BlockState::VerifyingHeader);
// [block1] received => [block1, block2] verification starts
sync.on_block(0, block1.clone());
assert_eq!(sync.chain().block_state(&hash1), BlockState::Verifying);
assert_eq!(sync.chain().block_state(&hash2), BlockState::Verifying); // pre-fix: VerifyingHeader
// [block1, block2] verification ends => [block1, block2] are inserted into DB
sync.on_block_verification_success(block1.clone());
sync.on_block_verification_success(block2.clone());
assert_eq!(sync.chain().block_state(&hash1), BlockState::Stored);
assert_eq!(sync.chain().block_state(&hash2), BlockState::Stored); // pre-fix: VerifyingHeader
// [header2] verification ends => [block2] is requested
sync.on_headers_verification_success(vec![header2.clone()]);
assert_eq!(sync.chain().block_state(&hash1), BlockState::Stored);
assert_eq!(sync.chain().block_state(&hash2), BlockState::Stored); // pre-fix: Requested
// [block2] received => [block2] verification starts
sync.on_block(0, block2.clone());
assert_eq!(sync.chain().block_state(&hash1), BlockState::Stored);
assert_eq!(sync.chain().block_state(&hash2), BlockState::Stored); // pre-fix: Verifying
}
}

File diff suppressed because one or more lines are too long

View File

@ -6,8 +6,9 @@ use script::{self, Builder};
use sigops::transaction_sigops;
use deployments::BlockDeployments;
use canon::CanonBlock;
use error::{Error, TransactionError};
use error::Error;
use timestamp::median_timestamp;
use fee::checked_transaction_fee;
/// Flexible verification of ordered block
pub struct BlockAcceptor<'a> {
@ -174,38 +175,11 @@ impl<'a> BlockCoinbaseMinerReward<'a> {
let mut fees: u64 = 0;
for (tx_idx, tx) in self.block.transactions.iter().enumerate().skip(1) {
// (1) Total sum of all referenced outputs
let mut incoming: u64 = 0;
for input in tx.raw.inputs.iter() {
let prevout = store.transaction_output(&input.previous_output, tx_idx);
let (sum, overflow) = incoming.overflowing_add(prevout.map(|o| o.value).unwrap_or(0));
if overflow {
return Err(Error::ReferencedInputsSumOverflow)
}
incoming = sum;
}
let join_split_public_new = tx.raw.join_split.iter()
.flat_map(|js| &js.descriptions)
.map(|d| d.value_pub_new)
.sum::<u64>();
incoming = match incoming.overflowing_add(join_split_public_new) {
(_, true) => return Err(Error::ReferencedInputsSumOverflow),
(incoming, _) => incoming,
};
// (2) Total sum of all outputs
let spends = tx.raw.total_spends();
// Difference between (1) and (2)
let (difference, overflow) = incoming.overflowing_sub(spends);
if overflow {
return Err(Error::Transaction(tx_idx, TransactionError::Overspend))
}
let tx_fee = checked_transaction_fee(&store, tx_idx, &tx.raw)
.map_err(|tx_err| Error::Transaction(tx_idx, tx_err))?;
// Adding to total fees (with possible overflow)
let (sum, overflow) = fees.overflowing_add(difference);
let (sum, overflow) = fees.overflowing_add(tx_fee);
if overflow {
return Err(Error::TransactionFeesOverflow)
}
@ -355,10 +329,13 @@ impl<'a> BlockSaplingRoot<'a> {
mod tests {
extern crate test_data;
use std::collections::HashMap;
use chain::{OutPoint, TransactionOutput};
use db::BlockChainDatabase;
use storage::SaplingTreeState;
use network::{ConsensusParams, Network};
use storage::{SaplingTreeState, TransactionOutputProvider};
use {Error, CanonBlock};
use super::{BlockCoinbaseScript, BlockSaplingRoot};
use super::{BlockCoinbaseScript, BlockSaplingRoot, BlockCoinbaseMinerReward};
#[test]
fn test_block_coinbase_script() {
@ -423,4 +400,30 @@ mod tests {
actual: "0000000000000000000000000000000000000000000000000000000000000000".into(),
}));
}
#[test]
fn test_coinbase_overspend_b419221() {
struct Store(HashMap<OutPoint, TransactionOutput>);
impl TransactionOutputProvider for Store {
fn transaction_output(&self, outpoint: &OutPoint, _transaction_index: usize) -> Option<TransactionOutput> {
self.0.get(outpoint).cloned()
}
fn is_spent(&self, _outpoint: &OutPoint) -> bool {
false
}
}
let (block, donors) = test_data::block_h419221_with_donors();
let store = Store(donors.into_iter().flat_map(|donor| {
let hash = donor.hash();
donor.outputs.into_iter().enumerate().map(move |(index, output)| (OutPoint {
hash: hash.clone(),
index: index as u32,
}, output))
}).collect());
let consensus = ConsensusParams::new(Network::Mainnet);
assert_eq!(BlockCoinbaseMinerReward::new(CanonBlock::new(&block.into()), &store, &consensus, 419221).check(), Ok(()));
}
}

View File

@ -12,7 +12,7 @@ use chain::{OVERWINTER_TX_VERSION, SAPLING_TX_VERSION, OVERWINTER_TX_VERSION_GRO
use constants::COINBASE_MATURITY;
use error::TransactionError;
use primitives::hash::H256;
use VerificationLevel;
use {checked_transaction_fee, VerificationLevel};
use tree_cache::TreeCache;
pub struct TransactionAcceptor<'a> {
@ -22,7 +22,6 @@ pub struct TransactionAcceptor<'a> {
pub bip30: TransactionBip30<'a>,
pub missing_inputs: TransactionMissingInputs<'a>,
pub maturity: TransactionMaturity<'a>,
pub overspent: TransactionOverspent<'a>,
pub double_spent: TransactionDoubleSpend<'a>,
pub eval: TransactionEval<'a>,
pub join_split: JoinSplitVerification<'a>,
@ -54,7 +53,6 @@ impl<'a> TransactionAcceptor<'a> {
bip30: TransactionBip30::new_for_sync(transaction, meta_store),
missing_inputs: TransactionMissingInputs::new(transaction, output_store, transaction_index),
maturity: TransactionMaturity::new(transaction, meta_store, height),
overspent: TransactionOverspent::new(transaction, output_store),
double_spent: TransactionDoubleSpend::new(transaction, output_store),
eval: TransactionEval::new(transaction, output_store, consensus, verification_level, height, time, deployments),
join_split: JoinSplitVerification::new(consensus, transaction, nullifier_tracker, tree_state_provider),
@ -74,7 +72,6 @@ impl<'a> TransactionAcceptor<'a> {
self.bip30.check()?;
self.missing_inputs.check()?;
self.maturity.check()?;
self.overspent.check()?;
self.double_spent.check()?;
// to make sure we're using the sighash-cache, let's make all sighash-related
@ -274,24 +271,8 @@ impl<'a> TransactionOverspent<'a> {
return Ok(());
}
let available_public = self.transaction.raw.inputs.iter()
.map(|input| self.store.transaction_output(&input.previous_output, usize::max_value()).map(|o| o.value).unwrap_or(0))
.sum::<u64>();
let available_join_split = self.transaction.raw.join_split.iter()
.flat_map(|js| &js.descriptions)
.map(|d| d.value_pub_new)
.sum::<u64>();
let total_available = available_public + available_join_split;
let spends = self.transaction.raw.total_spends();
if spends > total_available {
Err(TransactionError::Overspend)
} else {
Ok(())
}
checked_transaction_fee(&self.store, ::std::usize::MAX, &self.transaction.raw)
.map(|_| ())
}
}
@ -387,7 +368,7 @@ impl<'a> TransactionEval<'a> {
input_index: 0,
input_amount: 0,
consensus_branch_id: self.consensus_branch_id,
cache: None,
cache: Default::default(),
};
// generate sighash that is not associated with a transparent input
@ -785,7 +766,7 @@ mod tests {
input_index: 0,
input_amount: 0,
consensus_branch_id: 0,
cache: None,
cache: Default::default(),
};
let flags = VerificationFlags::default()

View File

@ -53,7 +53,7 @@ impl BackwardsCompatibleChainVerifier {
match block_origin {
BlockOrigin::KnownBlock => {
// there should be no known blocks at this point
unreachable!();
unreachable!("Trying to re-verify known block: {}", block.hash().reversed());
},
BlockOrigin::CanonChain { block_number } => {
let tx_out_provider = CachedTransactionOutputProvider::new(self.store.as_store().as_transaction_output_provider());

View File

@ -49,8 +49,6 @@ pub enum Error {
TransactionFeeAndRewardOverflow,
/// Sum of the transaction fees in block exceeds u64::max
TransactionFeesOverflow,
/// Sum of all referenced outputs in block transactions resulted in the overflow
ReferencedInputsSumOverflow,
/// Non-canonical tranasctions ordering within block
NonCanonicalTransactionOrdering,
/// Database error
@ -155,4 +153,3 @@ pub enum TransactionError {
/// Unknown anchor used in join split
UnknownAnchor(H256),
}

110
verification/src/fee.rs Normal file
View File

@ -0,0 +1,110 @@
use chain::Transaction;
use storage::TransactionOutputProvider;
use TransactionError;
/// Compute miner fee for given transaction.
///
/// Returns None if overflow/underflow happens during computation. Missed prevout
/// is treated as 0-value.
pub fn checked_transaction_fee(store: &TransactionOutputProvider, tx_idx: usize, tx: &Transaction) -> Result<u64, TransactionError> {
// (1) Total sum of all transparent + shielded inputs
let mut incoming: u64 = 0;
for (input_idx, input) in tx.inputs.iter().enumerate() {
let prevout = match store.transaction_output(&input.previous_output, tx_idx) {
Some(prevout) => prevout,
None => return Err(TransactionError::Input(input_idx)),
};
incoming = match incoming.checked_add(prevout.value) {
Some(incoming) => incoming,
None => return Err(TransactionError::InputValueOverflow),
};
}
if let Some(ref join_split) = tx.join_split {
for js_desc in &join_split.descriptions {
incoming = match incoming.checked_add(js_desc.value_pub_new) {
Some(incoming) => incoming,
None => return Err(TransactionError::InputValueOverflow),
};
}
}
if let Some(ref sapling) = tx.sapling {
if sapling.balancing_value > 0 {
let balancing_value = sapling.balancing_value as u64;
incoming = match incoming.checked_add(balancing_value) {
Some(incoming) => incoming,
None => return Err(TransactionError::InputValueOverflow),
};
}
}
// (2) Total sum of all outputs
let mut spends = tx.total_spends();
if let Some(ref join_split) = tx.join_split {
for js_desc in &join_split.descriptions {
spends = match spends.checked_add(js_desc.value_pub_old) {
Some(spends) => spends,
None => return Err(TransactionError::OutputValueOverflow),
};
}
}
if let Some(ref sapling) = tx.sapling {
if sapling.balancing_value < 0 {
let balancing_value = match sapling.balancing_value.checked_neg() {
Some(balancing_value) => balancing_value as u64,
None => return Err(TransactionError::OutputValueOverflow),
};
spends = match spends.checked_add(balancing_value) {
Some(spends) => spends,
None => return Err(TransactionError::OutputValueOverflow),
};
}
}
// (3) Fee is the difference between (1) and (2)
match incoming.checked_sub(spends) {
Some(fee) => Ok(fee),
None => Err(TransactionError::Overspend),
}
}
#[cfg(test)]
mod tests {
extern crate test_data;
use std::sync::Arc;
use storage::AsSubstore;
use db::BlockChainDatabase;
use super::*;
#[test]
fn test_transaction_fee() {
let b0 = test_data::block_builder().header().nonce(1.into()).build()
.transaction()
.output().value(1_000_000).build()
.output().value(2_000_000).build()
.build()
.build();
let tx0 = b0.transactions[0].clone();
let tx0_hash = tx0.hash();
let b1 = test_data::block_builder().header().parent(b0.hash().clone()).nonce(2.into()).build()
.transaction()
.input().hash(tx0_hash.clone()).index(0).build()
.input().hash(tx0_hash).index(1).build()
.output().value(2_500_000).build()
.build()
.build();
let tx2 = b1.transactions[0].clone();
let db = Arc::new(BlockChainDatabase::init_test_chain(vec![b0.into(), b1.into()]));
let store = db.as_transaction_output_provider();
assert_eq!(checked_transaction_fee(store, ::std::usize::MAX, &tx0), Err(TransactionError::Overspend));
assert_eq!(checked_transaction_fee(store, ::std::usize::MAX, &tx2), Ok(500_000));
}
}

View File

@ -85,6 +85,7 @@ mod canon;
mod deployments;
mod equihash;
mod error;
mod fee;
mod sapling;
mod sigops;
mod sprout;
@ -123,6 +124,7 @@ pub use verify_transaction::{TransactionVerifier, MemoryPoolTransactionVerifier}
pub use chain_verifier::BackwardsCompatibleChainVerifier;
pub use error::{Error, TransactionError};
pub use fee::checked_transaction_fee;
pub use sigops::transaction_sigops;
pub use timestamp::{median_timestamp, median_timestamp_inclusive};
pub use work::{work_required, is_valid_proof_of_work, is_valid_proof_of_work_hash};

View File

@ -306,7 +306,7 @@ mod tests {
fn compute_sighash(tx: Transaction) -> [u8; 32] {
let signer: TransactionInputSigner = tx.into();
signer.signature_hash(&mut None, None, 0, &From::from(vec![]), SighashBase::All.into(), 0x76b809bb).into()
signer.signature_hash(&mut Default::default(), None, 0, &From::from(vec![]), SighashBase::All.into(), 0x76b809bb).into()
}
fn run_accept_sapling(tx: Transaction) -> Result<(), Error> {