Merge pull request #313 from str4d/delint

Remove a bunch of lint
This commit is contained in:
ebfull 2020-11-02 13:48:08 -07:00 committed by GitHub
commit baab9d3ca3
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
26 changed files with 113 additions and 117 deletions

View File

@ -1,5 +1,3 @@
use protobuf_codegen_pure;
fn main() { fn main() {
protobuf_codegen_pure::Codegen::new() protobuf_codegen_pure::Codegen::new()
.out_dir("src/proto") .out_dir("src/proto")

View File

@ -19,7 +19,7 @@ impl Iterator for NodeDataIterator {
Some(leaf(2)) Some(leaf(2))
} else if self.cursor == 3 { } else if self.cursor == 3 {
Some(self.tree.root_node().expect("always exists").data().clone()) Some(self.tree.root_node().expect("always exists").data().clone())
} else if self.return_stack.len() > 0 { } else if !self.return_stack.is_empty() {
self.return_stack.pop() self.return_stack.pop()
} else { } else {
for n_append in self for n_append in self

View File

@ -3,7 +3,7 @@ use zcash_history::{Entry, EntryLink, NodeData, Tree};
#[path = "lib/shared.rs"] #[path = "lib/shared.rs"]
mod share; mod share;
fn draft(into: &mut Vec<(u32, Entry)>, vec: &Vec<NodeData>, peak_pos: usize, h: u32) { fn draft(into: &mut Vec<(u32, Entry)>, vec: &[NodeData], peak_pos: usize, h: u32) {
let node_data = vec[peak_pos - 1].clone(); let node_data = vec[peak_pos - 1].clone();
let peak: Entry = match h { let peak: Entry = match h {
0 => node_data.into(), 0 => node_data.into(),
@ -19,8 +19,8 @@ fn draft(into: &mut Vec<(u32, Entry)>, vec: &Vec<NodeData>, peak_pos: usize, h:
into.push(((peak_pos - 1) as u32, peak)); into.push(((peak_pos - 1) as u32, peak));
} }
fn prepare_tree(vec: &Vec<NodeData>) -> Tree { fn prepare_tree(vec: &[NodeData]) -> Tree {
assert!(vec.len() > 0); assert!(!vec.is_empty());
// integer log2 of (vec.len()+1), -1 // integer log2 of (vec.len()+1), -1
let mut h = (32 - ((vec.len() + 1) as u32).leading_zeros() - 1) - 1; let mut h = (32 - ((vec.len() + 1) as u32).leading_zeros() - 1) - 1;
@ -34,8 +34,8 @@ fn prepare_tree(vec: &Vec<NodeData>) -> Tree {
loop { loop {
if peak_pos > vec.len() { if peak_pos > vec.len() {
// left child, -2^h // left child, -2^h
peak_pos = peak_pos - (1 << h); peak_pos -= 1 << h;
h = h - 1; h -= 1;
} }
if peak_pos <= vec.len() { if peak_pos <= vec.len() {
@ -62,7 +62,7 @@ fn prepare_tree(vec: &Vec<NodeData>) -> Tree {
while h > 0 { while h > 0 {
let left_pos = peak_pos - (1 << h); let left_pos = peak_pos - (1 << h);
let right_pos = peak_pos - 1; let right_pos = peak_pos - 1;
h = h - 1; h -= 1;
// drafting left child // drafting left child
draft(&mut extra, vec, left_pos, h); draft(&mut extra, vec, left_pos, h);
@ -80,7 +80,7 @@ fn prepare_tree(vec: &Vec<NodeData>) -> Tree {
} }
fn main() { fn main() {
let number = match std::env::args().skip(1).next() { let number = match std::env::args().nth(1) {
None => { None => {
eprintln!("writer <number of nodes> [<out_file>]"); eprintln!("writer <number of nodes> [<out_file>]");
std::process::exit(1); std::process::exit(1);

View File

@ -84,15 +84,14 @@ impl Tree {
/// ///
/// Will panic if `peaks` is empty. /// Will panic if `peaks` is empty.
pub fn new(length: u32, peaks: Vec<(u32, Entry)>, extra: Vec<(u32, Entry)>) -> Self { pub fn new(length: u32, peaks: Vec<(u32, Entry)>, extra: Vec<(u32, Entry)>) -> Self {
assert!(peaks.len() > 0); assert!(!peaks.is_empty());
let mut result = Tree::invalid(); let mut result = Tree::invalid();
result.stored_count = length; result.stored_count = length;
let mut gen = 0;
let mut root = EntryLink::Stored(peaks[0].0); let mut root = EntryLink::Stored(peaks[0].0);
for (idx, node) in peaks.into_iter() { for (gen, (idx, node)) in peaks.into_iter().enumerate() {
result.stored.insert(idx, node); result.stored.insert(idx, node);
if gen != 0 { if gen != 0 {
let next_generated = combine_nodes( let next_generated = combine_nodes(
@ -105,7 +104,6 @@ impl Tree {
); );
root = result.push_generated(next_generated); root = result.push_generated(next_generated);
} }
gen += 1;
} }
for (idx, node) in extra { for (idx, node) in extra {
@ -209,7 +207,7 @@ impl Tree {
fn pop(&mut self) { fn pop(&mut self) {
self.stored.remove(&(self.stored_count - 1)); self.stored.remove(&(self.stored_count - 1));
self.stored_count = self.stored_count - 1; self.stored_count -= 1;
} }
/// Truncate one leaf from the end of the tree. /// Truncate one leaf from the end of the tree.
@ -368,8 +366,7 @@ mod tests {
assert!(length >= 3); assert!(length >= 3);
let mut tree = initial(); let mut tree = initial();
for i in 2..length { for i in 2..length {
tree.append_leaf(leaf(i + 1).into()) tree.append_leaf(leaf(i + 1)).expect("Failed to append");
.expect("Failed to append");
} }
tree tree
@ -683,9 +680,10 @@ mod tests {
if number & (number - 1) == 0 { if number & (number - 1) == 0 {
if let EntryLink::Stored(_) = tree.root() { true } if let EntryLink::Stored(_) = tree.root() { true }
else { false } else { false }
} else if let EntryLink::Generated(_) = tree.root() {
true
} else { } else {
if let EntryLink::Generated(_) = tree.root() { true } false
else { false }
} }
) )
} }
@ -708,12 +706,13 @@ mod tests {
let total = add - delete + 2; let total = add - delete + 2;
TestResult::from_bool( TestResult::from_bool(
if total & total - 1 == 0 { if total & (total - 1) == 0 {
if let EntryLink::Stored(_) = tree.root() { true } if let EntryLink::Stored(_) = tree.root() { true }
else { false } else { false }
} else if let EntryLink::Generated(_) = tree.root() {
true
} else { } else {
if let EntryLink::Generated(_) = tree.root() { true } false
else { false }
} }
) )
} }

View File

@ -1,7 +1,6 @@
//! Structs and methods for handling Zcash block headers. //! Structs and methods for handling Zcash block headers.
use byteorder::{LittleEndian, ReadBytesExt, WriteBytesExt}; use byteorder::{LittleEndian, ReadBytesExt, WriteBytesExt};
use hex;
use sha2::{Digest, Sha256}; use sha2::{Digest, Sha256};
use std::fmt; use std::fmt;
use std::io::{self, Read, Write}; use std::io::{self, Read, Write};

View File

@ -258,7 +258,7 @@ fn generate_pedersen_hash_exp_table() -> Vec<Vec<Vec<SubgroupPoint>>> {
let mut base = SubgroupPoint::identity(); let mut base = SubgroupPoint::identity();
for _ in 0..(1 << window) { for _ in 0..(1 << window) {
table.push(base.clone()); table.push(base);
base += g; base += g;
} }

View File

@ -110,8 +110,8 @@ impl Clone for FullViewingKey {
fn clone(&self) -> Self { fn clone(&self) -> Self {
FullViewingKey { FullViewingKey {
vk: ViewingKey { vk: ViewingKey {
ak: self.vk.ak.clone(), ak: self.vk.ak,
nk: self.vk.nk.clone(), nk: self.vk.nk,
}, },
ovk: self.ovk, ovk: self.ovk,
} }

View File

@ -505,7 +505,6 @@ mod tests {
use super::{CommitmentTree, Hashable, IncrementalWitness, MerklePath, PathFiller}; use super::{CommitmentTree, Hashable, IncrementalWitness, MerklePath, PathFiller};
use crate::sapling::Node; use crate::sapling::Node;
use hex;
use std::convert::TryInto; use std::convert::TryInto;
use std::io::{self, Read, Write}; use std::io::{self, Read, Write};
@ -608,11 +607,11 @@ mod tests {
#[test] #[test]
fn empty_root_test_vectors() { fn empty_root_test_vectors() {
let mut tmp = [0u8; 32]; let mut tmp = [0u8; 32];
for i in 0..HEX_EMPTY_ROOTS.len() { for (i, &expected) in HEX_EMPTY_ROOTS.iter().enumerate() {
Node::empty_root(i) Node::empty_root(i)
.write(&mut tmp[..]) .write(&mut tmp[..])
.expect("length is 32 bytes"); .expect("length is 32 bytes");
assert_eq!(hex::encode(tmp), HEX_EMPTY_ROOTS[i]); assert_eq!(hex::encode(tmp), expected);
} }
} }
@ -633,11 +632,11 @@ mod tests {
fn empty_commitment_tree_roots() { fn empty_commitment_tree_roots() {
let tree = CommitmentTree::<Node>::new(); let tree = CommitmentTree::<Node>::new();
let mut tmp = [0u8; 32]; let mut tmp = [0u8; 32];
for i in 1..HEX_EMPTY_ROOTS.len() { for (i, &expected) in HEX_EMPTY_ROOTS.iter().enumerate().skip(1) {
tree.root_inner(i, PathFiller::empty()) tree.root_inner(i, PathFiller::empty())
.write(&mut tmp[..]) .write(&mut tmp[..])
.expect("length is 32 bytes"); .expect("length is 32 bytes");
assert_eq!(hex::encode(tmp), HEX_EMPTY_ROOTS[i]); assert_eq!(hex::encode(tmp), expected);
} }
} }
@ -1035,7 +1034,7 @@ mod tests {
if let Some(leaf) = leaf { if let Some(leaf) = leaf {
let path = witness.path().expect("should be able to create a path"); let path = witness.path().expect("should be able to create a path");
let expected = MerklePath::from_slice_with_depth( let expected = MerklePath::from_slice_with_depth(
&mut hex::decode(paths[paths_i]).unwrap(), &hex::decode(paths[paths_i]).unwrap(),
TESTING_DEPTH, TESTING_DEPTH,
) )
.unwrap(); .unwrap();

View File

@ -408,6 +408,8 @@ fn parse_note_plaintext_without_memo<P: consensus::Parameters>(
Some((note, to)) Some((note, to))
} }
#[allow(clippy::if_same_then_else)]
#[allow(clippy::needless_bool)]
pub fn plaintext_version_is_valid<P: consensus::Parameters>( pub fn plaintext_version_is_valid<P: consensus::Parameters>(
params: &P, params: &P,
height: BlockHeight, height: BlockHeight,

View File

@ -137,7 +137,7 @@ pub mod test {
fn test_pedersen_hash_points() { fn test_pedersen_hash_points() {
let test_vectors = pedersen_hash_vectors::get_vectors(); let test_vectors = pedersen_hash_vectors::get_vectors();
assert!(test_vectors.len() > 0); assert!(!test_vectors.is_empty());
for v in test_vectors.iter() { for v in test_vectors.iter() {
let input_bools: Vec<bool> = v.input_bits.iter().map(|&i| i == 1).collect(); let input_bools: Vec<bool> = v.input_bits.iter().map(|&i| i == 1).collect();

View File

@ -40,7 +40,7 @@ pub struct ProofGenerationKey {
impl ProofGenerationKey { impl ProofGenerationKey {
pub fn to_viewing_key(&self) -> ViewingKey { pub fn to_viewing_key(&self) -> ViewingKey {
ViewingKey { ViewingKey {
ak: self.ak.clone(), ak: self.ak,
nk: constants::PROOF_GENERATION_KEY_GENERATOR * self.nsk, nk: constants::PROOF_GENERATION_KEY_GENERATOR * self.nsk,
} }
} }
@ -143,9 +143,7 @@ impl PaymentAddress {
Diversifier(tmp) Diversifier(tmp)
}; };
// Check that the diversifier is valid // Check that the diversifier is valid
if diversifier.g_d().is_none() { diversifier.g_d()?;
return None;
}
let pk_d = jubjub::SubgroupPoint::from_bytes(bytes[11..43].try_into().unwrap()); let pk_d = jubjub::SubgroupPoint::from_bytes(bytes[11..43].try_into().unwrap());
if pk_d.is_some().into() { if pk_d.is_some().into() {
@ -182,7 +180,7 @@ impl PaymentAddress {
value, value,
rseed: randomness, rseed: randomness,
g_d, g_d,
pk_d: self.pk_d.clone(), pk_d: self.pk_d,
}) })
} }
} }

View File

@ -184,16 +184,14 @@ pub fn batch_verify<'a, R: RngCore>(
s.mul_assign(&z); s.mul_assign(&z);
s = s.neg(); s = s.neg();
r = r * z; r *= z;
c.mul_assign(&z); c.mul_assign(&z);
acc = acc + r + (&entry.vk.0 * c) + (p_g * s); acc = acc + r + (entry.vk.0 * c) + (p_g * s);
} }
acc = acc.mul_by_cofactor().into(); acc.mul_by_cofactor().is_identity().into()
acc.is_identity().into()
} }
#[cfg(test)] #[cfg(test)]

View File

@ -155,6 +155,7 @@ mod tests {
} }
} }
#[allow(clippy::useless_vec)]
#[test] #[test]
fn vector() { fn vector() {
macro_rules! eval { macro_rules! eval {

View File

@ -111,7 +111,7 @@ impl SaplingOutput {
let note = Note { let note = Note {
g_d, g_d,
pk_d: to.pk_d().clone(), pk_d: *to.pk_d(),
value: value.into(), value: value.into(),
rseed, rseed,
}; };
@ -140,7 +140,7 @@ impl SaplingOutput {
let (zkproof, cv) = prover.output_proof( let (zkproof, cv) = prover.output_proof(
ctx, ctx,
encryptor.esk().clone(), *encryptor.esk(),
self.to, self.to,
self.note.rcm(), self.note.rcm(),
self.note.value, self.note.value,
@ -207,7 +207,7 @@ impl TransparentInputs {
use ripemd160::Ripemd160; use ripemd160::Ripemd160;
use sha2::{Digest, Sha256}; use sha2::{Digest, Sha256};
if &hash[..] != &Ripemd160::digest(&Sha256::digest(&pubkey))[..] { if hash[..] != Ripemd160::digest(&Sha256::digest(&pubkey))[..] {
return Err(Error::InvalidAddress); return Err(Error::InvalidAddress);
} }
} }
@ -590,7 +590,7 @@ impl<'a, P: consensus::Parameters, R: RngCore + CryptoRng> Builder<'a, P, R> {
self.spends[0].extsk.expsk.ovk, self.spends[0].extsk.expsk.ovk,
PaymentAddress::from_parts( PaymentAddress::from_parts(
self.spends[0].diversifier, self.spends[0].diversifier,
self.spends[0].note.pk_d.clone(), self.spends[0].note.pk_d,
) )
.ok_or(Error::InvalidAddress)?, .ok_or(Error::InvalidAddress)?,
) )
@ -703,7 +703,7 @@ impl<'a, P: consensus::Parameters, R: RngCore + CryptoRng> Builder<'a, P, R> {
let (pk_d, payment_address) = loop { let (pk_d, payment_address) = loop {
let dummy_ivk = jubjub::Fr::random(&mut self.rng); let dummy_ivk = jubjub::Fr::random(&mut self.rng);
let pk_d = g_d * dummy_ivk; let pk_d = g_d * dummy_ivk;
if let Some(addr) = PaymentAddress::from_parts(diversifier, pk_d.clone()) { if let Some(addr) = PaymentAddress::from_parts(diversifier, pk_d) {
break (pk_d, addr); break (pk_d, addr);
} }
}; };
@ -950,12 +950,7 @@ mod tests {
// Create a tx with a sapling spend. binding_sig should be present // Create a tx with a sapling spend. binding_sig should be present
builder builder
.add_sapling_spend( .add_sapling_spend(extsk, *to.diversifier(), note1, witness1.path().unwrap())
extsk.clone(),
*to.diversifier(),
note1.clone(),
witness1.path().unwrap(),
)
.unwrap(); .unwrap();
builder builder
@ -1008,12 +1003,7 @@ mod tests {
{ {
let mut builder = Builder::new(TEST_NETWORK, H0); let mut builder = Builder::new(TEST_NETWORK, H0);
builder builder
.add_sapling_output( .add_sapling_output(ovk, to.clone(), Amount::from_u64(50000).unwrap(), None)
ovk.clone(),
to.clone(),
Amount::from_u64(50000).unwrap(),
None,
)
.unwrap(); .unwrap();
assert_eq!( assert_eq!(
builder.build(consensus::BranchId::Sapling, &MockTxProver), builder.build(consensus::BranchId::Sapling, &MockTxProver),
@ -1058,12 +1048,7 @@ mod tests {
) )
.unwrap(); .unwrap();
builder builder
.add_sapling_output( .add_sapling_output(ovk, to.clone(), Amount::from_u64(30000).unwrap(), None)
ovk.clone(),
to.clone(),
Amount::from_u64(30000).unwrap(),
None,
)
.unwrap(); .unwrap();
builder builder
.add_transparent_output( .add_transparent_output(

View File

@ -161,8 +161,8 @@ impl TzeIn {
Ok(TzeIn { Ok(TzeIn {
prevout, prevout,
witness: tze::Witness { witness: tze::Witness {
extension_id: u32::try_from(extension_id).map_err(|e| to_io_error(e))?, extension_id: u32::try_from(extension_id).map_err(to_io_error)?,
mode: u32::try_from(mode).map_err(|e| to_io_error(e))?, mode: u32::try_from(mode).map_err(to_io_error)?,
payload, payload,
}, },
}) })
@ -177,12 +177,12 @@ impl TzeIn {
CompactSize::write( CompactSize::write(
&mut writer, &mut writer,
usize::try_from(self.witness.extension_id).map_err(|e| to_io_error(e))?, usize::try_from(self.witness.extension_id).map_err(to_io_error)?,
)?; )?;
CompactSize::write( CompactSize::write(
&mut writer, &mut writer,
usize::try_from(self.witness.mode).map_err(|e| to_io_error(e))?, usize::try_from(self.witness.mode).map_err(to_io_error)?,
) )
} }
@ -219,8 +219,8 @@ impl TzeOut {
Ok(TzeOut { Ok(TzeOut {
value, value,
precondition: tze::Precondition { precondition: tze::Precondition {
extension_id: u32::try_from(extension_id).map_err(|e| to_io_error(e))?, extension_id: u32::try_from(extension_id).map_err(to_io_error)?,
mode: u32::try_from(mode).map_err(|e| to_io_error(e))?, mode: u32::try_from(mode).map_err(to_io_error)?,
payload, payload,
}, },
}) })
@ -231,11 +231,11 @@ impl TzeOut {
CompactSize::write( CompactSize::write(
&mut writer, &mut writer,
usize::try_from(self.precondition.extension_id).map_err(|e| to_io_error(e))?, usize::try_from(self.precondition.extension_id).map_err(to_io_error)?,
)?; )?;
CompactSize::write( CompactSize::write(
&mut writer, &mut writer,
usize::try_from(self.precondition.mode).map_err(|e| to_io_error(e))?, usize::try_from(self.precondition.mode).map_err(to_io_error)?,
)?; )?;
Vector::write(&mut writer, &self.precondition.payload, |w, b| { Vector::write(&mut writer, &self.precondition.payload, |w, b| {
w.write_u8(*b) w.write_u8(*b)

View File

@ -167,52 +167,49 @@ mod tests {
#[test] #[test]
fn amount_in_range() { fn amount_in_range() {
let zero = b"\x00\x00\x00\x00\x00\x00\x00\x00"; let zero = b"\x00\x00\x00\x00\x00\x00\x00\x00";
assert_eq!(Amount::from_u64_le_bytes(zero.clone()).unwrap(), Amount(0)); assert_eq!(Amount::from_u64_le_bytes(*zero).unwrap(), Amount(0));
assert_eq!( assert_eq!(
Amount::from_nonnegative_i64_le_bytes(zero.clone()).unwrap(), Amount::from_nonnegative_i64_le_bytes(*zero).unwrap(),
Amount(0) Amount(0)
); );
assert_eq!(Amount::from_i64_le_bytes(zero.clone()).unwrap(), Amount(0)); assert_eq!(Amount::from_i64_le_bytes(*zero).unwrap(), Amount(0));
let neg_one = b"\xff\xff\xff\xff\xff\xff\xff\xff"; let neg_one = b"\xff\xff\xff\xff\xff\xff\xff\xff";
assert!(Amount::from_u64_le_bytes(neg_one.clone()).is_err()); assert!(Amount::from_u64_le_bytes(*neg_one).is_err());
assert!(Amount::from_nonnegative_i64_le_bytes(neg_one.clone()).is_err()); assert!(Amount::from_nonnegative_i64_le_bytes(*neg_one).is_err());
assert_eq!( assert_eq!(Amount::from_i64_le_bytes(*neg_one).unwrap(), Amount(-1));
Amount::from_i64_le_bytes(neg_one.clone()).unwrap(),
Amount(-1)
);
let max_money = b"\x00\x40\x07\x5a\xf0\x75\x07\x00"; let max_money = b"\x00\x40\x07\x5a\xf0\x75\x07\x00";
assert_eq!( assert_eq!(
Amount::from_u64_le_bytes(max_money.clone()).unwrap(), Amount::from_u64_le_bytes(*max_money).unwrap(),
Amount(MAX_MONEY) Amount(MAX_MONEY)
); );
assert_eq!( assert_eq!(
Amount::from_nonnegative_i64_le_bytes(max_money.clone()).unwrap(), Amount::from_nonnegative_i64_le_bytes(*max_money).unwrap(),
Amount(MAX_MONEY) Amount(MAX_MONEY)
); );
assert_eq!( assert_eq!(
Amount::from_i64_le_bytes(max_money.clone()).unwrap(), Amount::from_i64_le_bytes(*max_money).unwrap(),
Amount(MAX_MONEY) Amount(MAX_MONEY)
); );
let max_money_p1 = b"\x01\x40\x07\x5a\xf0\x75\x07\x00"; let max_money_p1 = b"\x01\x40\x07\x5a\xf0\x75\x07\x00";
assert!(Amount::from_u64_le_bytes(max_money_p1.clone()).is_err()); assert!(Amount::from_u64_le_bytes(*max_money_p1).is_err());
assert!(Amount::from_nonnegative_i64_le_bytes(max_money_p1.clone()).is_err()); assert!(Amount::from_nonnegative_i64_le_bytes(*max_money_p1).is_err());
assert!(Amount::from_i64_le_bytes(max_money_p1.clone()).is_err()); assert!(Amount::from_i64_le_bytes(*max_money_p1).is_err());
let neg_max_money = b"\x00\xc0\xf8\xa5\x0f\x8a\xf8\xff"; let neg_max_money = b"\x00\xc0\xf8\xa5\x0f\x8a\xf8\xff";
assert!(Amount::from_u64_le_bytes(neg_max_money.clone()).is_err()); assert!(Amount::from_u64_le_bytes(*neg_max_money).is_err());
assert!(Amount::from_nonnegative_i64_le_bytes(neg_max_money.clone()).is_err()); assert!(Amount::from_nonnegative_i64_le_bytes(*neg_max_money).is_err());
assert_eq!( assert_eq!(
Amount::from_i64_le_bytes(neg_max_money.clone()).unwrap(), Amount::from_i64_le_bytes(*neg_max_money).unwrap(),
Amount(-MAX_MONEY) Amount(-MAX_MONEY)
); );
let neg_max_money_m1 = b"\xff\xbf\xf8\xa5\x0f\x8a\xf8\xff"; let neg_max_money_m1 = b"\xff\xbf\xf8\xa5\x0f\x8a\xf8\xff";
assert!(Amount::from_u64_le_bytes(neg_max_money_m1.clone()).is_err()); assert!(Amount::from_u64_le_bytes(*neg_max_money_m1).is_err());
assert!(Amount::from_nonnegative_i64_le_bytes(neg_max_money_m1.clone()).is_err()); assert!(Amount::from_nonnegative_i64_le_bytes(*neg_max_money_m1).is_err());
assert!(Amount::from_i64_le_bytes(neg_max_money_m1.clone()).is_err()); assert!(Amount::from_i64_le_bytes(*neg_max_money_m1).is_err());
} }
#[test] #[test]

View File

@ -1,7 +1,6 @@
//! Structs and methods for handling Zcash transactions. //! Structs and methods for handling Zcash transactions.
use byteorder::{LittleEndian, ReadBytesExt, WriteBytesExt}; use byteorder::{LittleEndian, ReadBytesExt, WriteBytesExt};
use hex;
use sha2::{Digest, Sha256}; use sha2::{Digest, Sha256};
use std::fmt; use std::fmt;
use std::io::{self, Read, Write}; use std::io::{self, Read, Write};
@ -127,6 +126,12 @@ impl std::fmt::Debug for TransactionData {
} }
} }
impl Default for TransactionData {
fn default() -> Self {
TransactionData::new()
}
}
impl TransactionData { impl TransactionData {
pub fn new() -> Self { pub fn new() -> Self {
TransactionData { TransactionData {

View File

@ -226,12 +226,9 @@ fn test_tze_tx_parse() {
match tx { match tx {
Ok(tx) => assert!(!tx.tze_inputs.is_empty()), Ok(tx) => assert!(!tx.tze_inputs.is_empty()),
Err(e) => assert!( Err(e) => panic!(
false, "An error occurred parsing a serialized TZE transaction: {}",
format!( e
"An error occurred parsing a serialized TZE transaction: {}",
e
)
), ),
} }
} }

View File

@ -23,7 +23,7 @@ pub fn generate_random_rseed<P: consensus::Parameters, R: RngCore + CryptoRng>(
) -> Rseed { ) -> Rseed {
if params.is_nu_active(NetworkUpgrade::Canopy, height) { if params.is_nu_active(NetworkUpgrade::Canopy, height) {
let mut buffer = [0u8; 32]; let mut buffer = [0u8; 32];
&rng.fill_bytes(&mut buffer); rng.fill_bytes(&mut buffer);
Rseed::AfterZip212(buffer) Rseed::AfterZip212(buffer)
} else { } else {
Rseed::BeforeZip212(jubjub::Fr::random(rng)) Rseed::BeforeZip212(jubjub::Fr::random(rng))

View File

@ -99,6 +99,12 @@ struct ChainCode([u8; 32]);
#[derive(Clone, Copy, Debug, PartialEq)] #[derive(Clone, Copy, Debug, PartialEq)]
pub struct DiversifierIndex(pub [u8; 11]); pub struct DiversifierIndex(pub [u8; 11]);
impl Default for DiversifierIndex {
fn default() -> Self {
DiversifierIndex::new()
}
}
impl DiversifierIndex { impl DiversifierIndex {
pub fn new() -> Self { pub fn new() -> Self {
DiversifierIndex([0; 11]) DiversifierIndex([0; 11])

View File

@ -326,7 +326,7 @@ impl EdwardsPoint {
let mut t1 = bls12_381::Scalar::one(); let mut t1 = bls12_381::Scalar::one();
t1.add_assign(c.get_value().get()?); t1.add_assign(c.get_value().get()?);
let res = t1.invert().map(|t1| t0 * &t1); let res = t1.invert().map(|t1| t0 * t1);
if bool::from(res.is_some()) { if bool::from(res.is_some()) {
Ok(res.unwrap()) Ok(res.unwrap())
} else { } else {
@ -352,7 +352,7 @@ impl EdwardsPoint {
let mut t1 = bls12_381::Scalar::one(); let mut t1 = bls12_381::Scalar::one();
t1.sub_assign(c.get_value().get()?); t1.sub_assign(c.get_value().get()?);
let res = t1.invert().map(|t1| t0 * &t1); let res = t1.invert().map(|t1| t0 * t1);
if bool::from(res.is_some()) { if bool::from(res.is_some()) {
Ok(res.unwrap()) Ok(res.unwrap())
} else { } else {
@ -427,7 +427,7 @@ impl EdwardsPoint {
let mut t1 = bls12_381::Scalar::one(); let mut t1 = bls12_381::Scalar::one();
t1.add_assign(c.get_value().get()?); t1.add_assign(c.get_value().get()?);
let ret = t1.invert().map(|t1| t0 * &t1); let ret = t1.invert().map(|t1| t0 * t1);
if bool::from(ret.is_some()) { if bool::from(ret.is_some()) {
Ok(ret.unwrap()) Ok(ret.unwrap())
} else { } else {
@ -452,7 +452,7 @@ impl EdwardsPoint {
let mut t1 = bls12_381::Scalar::one(); let mut t1 = bls12_381::Scalar::one();
t1.sub_assign(c.get_value().get()?); t1.sub_assign(c.get_value().get()?);
let ret = t1.invert().map(|t1| t0 * &t1); let ret = t1.invert().map(|t1| t0 * t1);
if bool::from(ret.is_some()) { if bool::from(ret.is_some()) {
Ok(ret.unwrap()) Ok(ret.unwrap())
} else { } else {
@ -489,7 +489,7 @@ impl MontgomeryPoint {
let mut t0 = *self.x.get_value().get()?; let mut t0 = *self.x.get_value().get()?;
t0.mul_assign(MONTGOMERY_SCALE); t0.mul_assign(MONTGOMERY_SCALE);
let ret = self.y.get_value().get()?.invert().map(|invy| t0 * &invy); let ret = self.y.get_value().get()?.invert().map(|invy| t0 * invy);
if bool::from(ret.is_some()) { if bool::from(ret.is_some()) {
Ok(ret.unwrap()) Ok(ret.unwrap())
} else { } else {
@ -511,7 +511,7 @@ impl MontgomeryPoint {
t0.sub_assign(&bls12_381::Scalar::one()); t0.sub_assign(&bls12_381::Scalar::one());
t1.add_assign(&bls12_381::Scalar::one()); t1.add_assign(&bls12_381::Scalar::one());
let ret = t1.invert().map(|t1| t0 * &t1); let ret = t1.invert().map(|t1| t0 * t1);
if bool::from(ret.is_some()) { if bool::from(ret.is_some()) {
Ok(ret.unwrap()) Ok(ret.unwrap())
} else { } else {
@ -552,7 +552,7 @@ impl MontgomeryPoint {
let mut d = *other.x.get_value().get()?; let mut d = *other.x.get_value().get()?;
d.sub_assign(self.x.get_value().get()?); d.sub_assign(self.x.get_value().get()?);
let ret = d.invert().map(|d| n * &d); let ret = d.invert().map(|d| n * d);
if bool::from(ret.is_some()) { if bool::from(ret.is_some()) {
Ok(ret.unwrap()) Ok(ret.unwrap())
} else { } else {

View File

@ -52,7 +52,7 @@ impl InputNote {
)?; )?;
// Witness into the merkle tree // Witness into the merkle tree
let mut cur = cm.clone(); let mut cur = cm;
for (i, layer) in auth_path.iter().enumerate() { for (i, layer) in auth_path.iter().enumerate() {
let cs = &mut cs.namespace(|| format!("layer {}", i)); let cs = &mut cs.namespace(|| format!("layer {}", i));

View File

@ -73,7 +73,7 @@ pub fn generate_circuit_generator(mut gen: jubjub::SubgroupPoint) -> FixedGenera
for _ in 0..FIXED_BASE_CHUNKS_PER_GENERATOR { for _ in 0..FIXED_BASE_CHUNKS_PER_GENERATOR {
let mut coeffs = vec![(Scalar::zero(), Scalar::one())]; let mut coeffs = vec![(Scalar::zero(), Scalar::one())];
let mut g = gen.clone(); let mut g = gen;
for _ in 0..7 { for _ in 0..7 {
let g_affine = jubjub::ExtendedPoint::from(g).to_affine(); let g_affine = jubjub::ExtendedPoint::from(g).to_affine();
coeffs.push((g_affine.get_u(), g_affine.get_v())); coeffs.push((g_affine.get_u(), g_affine.get_v()));
@ -143,7 +143,7 @@ fn generate_pedersen_circuit_generators() -> Vec<Vec<Vec<(Scalar, Scalar)>>> {
for _ in 0..PEDERSEN_HASH_CHUNKS_PER_GENERATOR { for _ in 0..PEDERSEN_HASH_CHUNKS_PER_GENERATOR {
// Create (x, y) coeffs for this chunk // Create (x, y) coeffs for this chunk
let mut coeffs = vec![]; let mut coeffs = vec![];
let mut g = gen.clone(); let mut g = gen;
// coeffs = g, g*2, g*3, g*4 // coeffs = g, g*2, g*3, g*4
for _ in 0..4 { for _ in 0..4 {

View File

@ -151,7 +151,7 @@ pub fn parse_parameters<R: io::Read>(
) { ) {
let mut spend_fs = hashreader::HashReader::new(spend_fs); let mut spend_fs = hashreader::HashReader::new(spend_fs);
let mut output_fs = hashreader::HashReader::new(output_fs); let mut output_fs = hashreader::HashReader::new(output_fs);
let mut sprout_fs = sprout_fs.map(|fs| hashreader::HashReader::new(fs)); let mut sprout_fs = sprout_fs.map(hashreader::HashReader::new);
// Deserialize params // Deserialize params
let spend_params = Parameters::<Bls12>::read(&mut spend_fs, false) let spend_params = Parameters::<Bls12>::read(&mut spend_fs, false)

View File

@ -26,6 +26,12 @@ pub struct SaplingProvingContext {
cv_sum: jubjub::ExtendedPoint, cv_sum: jubjub::ExtendedPoint,
} }
impl Default for SaplingProvingContext {
fn default() -> Self {
SaplingProvingContext::new()
}
}
impl SaplingProvingContext { impl SaplingProvingContext {
/// Construct a new context to be used with a single transaction. /// Construct a new context to be used with a single transaction.
pub fn new() -> Self { pub fn new() -> Self {
@ -85,7 +91,7 @@ impl SaplingProvingContext {
let note = Note { let note = Note {
value, value,
g_d: diversifier.g_d().expect("was a valid diversifier before"), g_d: diversifier.g_d().expect("was a valid diversifier before"),
pk_d: payment_address.pk_d().clone(), pk_d: *payment_address.pk_d(),
rseed, rseed,
}; };
@ -187,7 +193,7 @@ impl SaplingProvingContext {
// We now have a full witness for the output proof. // We now have a full witness for the output proof.
let instance = Output { let instance = Output {
value_commitment: Some(value_commitment.clone()), value_commitment: Some(value_commitment.clone()),
payment_address: Some(payment_address.clone()), payment_address: Some(payment_address),
commitment_randomness: Some(rcm), commitment_randomness: Some(rcm),
esk: Some(esk), esk: Some(esk),
}; };

View File

@ -18,6 +18,12 @@ pub struct SaplingVerificationContext {
cv_sum: jubjub::ExtendedPoint, cv_sum: jubjub::ExtendedPoint,
} }
impl Default for SaplingVerificationContext {
fn default() -> Self {
SaplingVerificationContext::new()
}
}
impl SaplingVerificationContext { impl SaplingVerificationContext {
/// Construct a new context to be used with a single transaction. /// Construct a new context to be used with a single transaction.
pub fn new() -> Self { pub fn new() -> Self {
@ -137,7 +143,7 @@ impl SaplingVerificationContext {
binding_sig: Signature, binding_sig: Signature,
) -> bool { ) -> bool {
// Obtain current cv_sum from the context // Obtain current cv_sum from the context
let mut bvk = PublicKey(self.cv_sum.clone()); let mut bvk = PublicKey(self.cv_sum);
// Compute value balance // Compute value balance
let value_balance = match compute_value_balance(value_balance) { let value_balance = match compute_value_balance(value_balance) {