Merge pull request #313 from str4d/delint

Remove a bunch of lint
This commit is contained in:
ebfull 2020-11-02 13:48:08 -07:00 committed by GitHub
commit baab9d3ca3
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
26 changed files with 113 additions and 117 deletions

View File

@ -1,5 +1,3 @@
use protobuf_codegen_pure;
fn main() {
protobuf_codegen_pure::Codegen::new()
.out_dir("src/proto")

View File

@ -19,7 +19,7 @@ impl Iterator for NodeDataIterator {
Some(leaf(2))
} else if self.cursor == 3 {
Some(self.tree.root_node().expect("always exists").data().clone())
} else if self.return_stack.len() > 0 {
} else if !self.return_stack.is_empty() {
self.return_stack.pop()
} else {
for n_append in self

View File

@ -3,7 +3,7 @@ use zcash_history::{Entry, EntryLink, NodeData, Tree};
#[path = "lib/shared.rs"]
mod share;
fn draft(into: &mut Vec<(u32, Entry)>, vec: &Vec<NodeData>, peak_pos: usize, h: u32) {
fn draft(into: &mut Vec<(u32, Entry)>, vec: &[NodeData], peak_pos: usize, h: u32) {
let node_data = vec[peak_pos - 1].clone();
let peak: Entry = match h {
0 => node_data.into(),
@ -19,8 +19,8 @@ fn draft(into: &mut Vec<(u32, Entry)>, vec: &Vec<NodeData>, peak_pos: usize, h:
into.push(((peak_pos - 1) as u32, peak));
}
fn prepare_tree(vec: &Vec<NodeData>) -> Tree {
assert!(vec.len() > 0);
fn prepare_tree(vec: &[NodeData]) -> Tree {
assert!(!vec.is_empty());
// integer log2 of (vec.len()+1), -1
let mut h = (32 - ((vec.len() + 1) as u32).leading_zeros() - 1) - 1;
@ -34,8 +34,8 @@ fn prepare_tree(vec: &Vec<NodeData>) -> Tree {
loop {
if peak_pos > vec.len() {
// left child, -2^h
peak_pos = peak_pos - (1 << h);
h = h - 1;
peak_pos -= 1 << h;
h -= 1;
}
if peak_pos <= vec.len() {
@ -62,7 +62,7 @@ fn prepare_tree(vec: &Vec<NodeData>) -> Tree {
while h > 0 {
let left_pos = peak_pos - (1 << h);
let right_pos = peak_pos - 1;
h = h - 1;
h -= 1;
// drafting left child
draft(&mut extra, vec, left_pos, h);
@ -80,7 +80,7 @@ fn prepare_tree(vec: &Vec<NodeData>) -> Tree {
}
fn main() {
let number = match std::env::args().skip(1).next() {
let number = match std::env::args().nth(1) {
None => {
eprintln!("writer <number of nodes> [<out_file>]");
std::process::exit(1);

View File

@ -84,15 +84,14 @@ impl Tree {
///
/// Will panic if `peaks` is empty.
pub fn new(length: u32, peaks: Vec<(u32, Entry)>, extra: Vec<(u32, Entry)>) -> Self {
assert!(peaks.len() > 0);
assert!(!peaks.is_empty());
let mut result = Tree::invalid();
result.stored_count = length;
let mut gen = 0;
let mut root = EntryLink::Stored(peaks[0].0);
for (idx, node) in peaks.into_iter() {
for (gen, (idx, node)) in peaks.into_iter().enumerate() {
result.stored.insert(idx, node);
if gen != 0 {
let next_generated = combine_nodes(
@ -105,7 +104,6 @@ impl Tree {
);
root = result.push_generated(next_generated);
}
gen += 1;
}
for (idx, node) in extra {
@ -209,7 +207,7 @@ impl Tree {
fn pop(&mut self) {
self.stored.remove(&(self.stored_count - 1));
self.stored_count = self.stored_count - 1;
self.stored_count -= 1;
}
/// Truncate one leaf from the end of the tree.
@ -368,8 +366,7 @@ mod tests {
assert!(length >= 3);
let mut tree = initial();
for i in 2..length {
tree.append_leaf(leaf(i + 1).into())
.expect("Failed to append");
tree.append_leaf(leaf(i + 1)).expect("Failed to append");
}
tree
@ -683,9 +680,10 @@ mod tests {
if number & (number - 1) == 0 {
if let EntryLink::Stored(_) = tree.root() { true }
else { false }
} else if let EntryLink::Generated(_) = tree.root() {
true
} else {
if let EntryLink::Generated(_) = tree.root() { true }
else { false }
false
}
)
}
@ -708,12 +706,13 @@ mod tests {
let total = add - delete + 2;
TestResult::from_bool(
if total & total - 1 == 0 {
if total & (total - 1) == 0 {
if let EntryLink::Stored(_) = tree.root() { true }
else { false }
} else if let EntryLink::Generated(_) = tree.root() {
true
} else {
if let EntryLink::Generated(_) = tree.root() { true }
else { false }
false
}
)
}

View File

@ -1,7 +1,6 @@
//! Structs and methods for handling Zcash block headers.
use byteorder::{LittleEndian, ReadBytesExt, WriteBytesExt};
use hex;
use sha2::{Digest, Sha256};
use std::fmt;
use std::io::{self, Read, Write};

View File

@ -258,7 +258,7 @@ fn generate_pedersen_hash_exp_table() -> Vec<Vec<Vec<SubgroupPoint>>> {
let mut base = SubgroupPoint::identity();
for _ in 0..(1 << window) {
table.push(base.clone());
table.push(base);
base += g;
}

View File

@ -110,8 +110,8 @@ impl Clone for FullViewingKey {
fn clone(&self) -> Self {
FullViewingKey {
vk: ViewingKey {
ak: self.vk.ak.clone(),
nk: self.vk.nk.clone(),
ak: self.vk.ak,
nk: self.vk.nk,
},
ovk: self.ovk,
}

View File

@ -505,7 +505,6 @@ mod tests {
use super::{CommitmentTree, Hashable, IncrementalWitness, MerklePath, PathFiller};
use crate::sapling::Node;
use hex;
use std::convert::TryInto;
use std::io::{self, Read, Write};
@ -608,11 +607,11 @@ mod tests {
#[test]
fn empty_root_test_vectors() {
let mut tmp = [0u8; 32];
for i in 0..HEX_EMPTY_ROOTS.len() {
for (i, &expected) in HEX_EMPTY_ROOTS.iter().enumerate() {
Node::empty_root(i)
.write(&mut tmp[..])
.expect("length is 32 bytes");
assert_eq!(hex::encode(tmp), HEX_EMPTY_ROOTS[i]);
assert_eq!(hex::encode(tmp), expected);
}
}
@ -633,11 +632,11 @@ mod tests {
fn empty_commitment_tree_roots() {
let tree = CommitmentTree::<Node>::new();
let mut tmp = [0u8; 32];
for i in 1..HEX_EMPTY_ROOTS.len() {
for (i, &expected) in HEX_EMPTY_ROOTS.iter().enumerate().skip(1) {
tree.root_inner(i, PathFiller::empty())
.write(&mut tmp[..])
.expect("length is 32 bytes");
assert_eq!(hex::encode(tmp), HEX_EMPTY_ROOTS[i]);
assert_eq!(hex::encode(tmp), expected);
}
}
@ -1035,7 +1034,7 @@ mod tests {
if let Some(leaf) = leaf {
let path = witness.path().expect("should be able to create a path");
let expected = MerklePath::from_slice_with_depth(
&mut hex::decode(paths[paths_i]).unwrap(),
&hex::decode(paths[paths_i]).unwrap(),
TESTING_DEPTH,
)
.unwrap();

View File

@ -408,6 +408,8 @@ fn parse_note_plaintext_without_memo<P: consensus::Parameters>(
Some((note, to))
}
#[allow(clippy::if_same_then_else)]
#[allow(clippy::needless_bool)]
pub fn plaintext_version_is_valid<P: consensus::Parameters>(
params: &P,
height: BlockHeight,

View File

@ -137,7 +137,7 @@ pub mod test {
fn test_pedersen_hash_points() {
let test_vectors = pedersen_hash_vectors::get_vectors();
assert!(test_vectors.len() > 0);
assert!(!test_vectors.is_empty());
for v in test_vectors.iter() {
let input_bools: Vec<bool> = v.input_bits.iter().map(|&i| i == 1).collect();

View File

@ -40,7 +40,7 @@ pub struct ProofGenerationKey {
impl ProofGenerationKey {
pub fn to_viewing_key(&self) -> ViewingKey {
ViewingKey {
ak: self.ak.clone(),
ak: self.ak,
nk: constants::PROOF_GENERATION_KEY_GENERATOR * self.nsk,
}
}
@ -143,9 +143,7 @@ impl PaymentAddress {
Diversifier(tmp)
};
// Check that the diversifier is valid
if diversifier.g_d().is_none() {
return None;
}
diversifier.g_d()?;
let pk_d = jubjub::SubgroupPoint::from_bytes(bytes[11..43].try_into().unwrap());
if pk_d.is_some().into() {
@ -182,7 +180,7 @@ impl PaymentAddress {
value,
rseed: randomness,
g_d,
pk_d: self.pk_d.clone(),
pk_d: self.pk_d,
})
}
}

View File

@ -184,16 +184,14 @@ pub fn batch_verify<'a, R: RngCore>(
s.mul_assign(&z);
s = s.neg();
r = r * z;
r *= z;
c.mul_assign(&z);
acc = acc + r + (&entry.vk.0 * c) + (p_g * s);
acc = acc + r + (entry.vk.0 * c) + (p_g * s);
}
acc = acc.mul_by_cofactor().into();
acc.is_identity().into()
acc.mul_by_cofactor().is_identity().into()
}
#[cfg(test)]

View File

@ -155,6 +155,7 @@ mod tests {
}
}
#[allow(clippy::useless_vec)]
#[test]
fn vector() {
macro_rules! eval {

View File

@ -111,7 +111,7 @@ impl SaplingOutput {
let note = Note {
g_d,
pk_d: to.pk_d().clone(),
pk_d: *to.pk_d(),
value: value.into(),
rseed,
};
@ -140,7 +140,7 @@ impl SaplingOutput {
let (zkproof, cv) = prover.output_proof(
ctx,
encryptor.esk().clone(),
*encryptor.esk(),
self.to,
self.note.rcm(),
self.note.value,
@ -207,7 +207,7 @@ impl TransparentInputs {
use ripemd160::Ripemd160;
use sha2::{Digest, Sha256};
if &hash[..] != &Ripemd160::digest(&Sha256::digest(&pubkey))[..] {
if hash[..] != Ripemd160::digest(&Sha256::digest(&pubkey))[..] {
return Err(Error::InvalidAddress);
}
}
@ -590,7 +590,7 @@ impl<'a, P: consensus::Parameters, R: RngCore + CryptoRng> Builder<'a, P, R> {
self.spends[0].extsk.expsk.ovk,
PaymentAddress::from_parts(
self.spends[0].diversifier,
self.spends[0].note.pk_d.clone(),
self.spends[0].note.pk_d,
)
.ok_or(Error::InvalidAddress)?,
)
@ -703,7 +703,7 @@ impl<'a, P: consensus::Parameters, R: RngCore + CryptoRng> Builder<'a, P, R> {
let (pk_d, payment_address) = loop {
let dummy_ivk = jubjub::Fr::random(&mut self.rng);
let pk_d = g_d * dummy_ivk;
if let Some(addr) = PaymentAddress::from_parts(diversifier, pk_d.clone()) {
if let Some(addr) = PaymentAddress::from_parts(diversifier, pk_d) {
break (pk_d, addr);
}
};
@ -950,12 +950,7 @@ mod tests {
// Create a tx with a sapling spend. binding_sig should be present
builder
.add_sapling_spend(
extsk.clone(),
*to.diversifier(),
note1.clone(),
witness1.path().unwrap(),
)
.add_sapling_spend(extsk, *to.diversifier(), note1, witness1.path().unwrap())
.unwrap();
builder
@ -1008,12 +1003,7 @@ mod tests {
{
let mut builder = Builder::new(TEST_NETWORK, H0);
builder
.add_sapling_output(
ovk.clone(),
to.clone(),
Amount::from_u64(50000).unwrap(),
None,
)
.add_sapling_output(ovk, to.clone(), Amount::from_u64(50000).unwrap(), None)
.unwrap();
assert_eq!(
builder.build(consensus::BranchId::Sapling, &MockTxProver),
@ -1058,12 +1048,7 @@ mod tests {
)
.unwrap();
builder
.add_sapling_output(
ovk.clone(),
to.clone(),
Amount::from_u64(30000).unwrap(),
None,
)
.add_sapling_output(ovk, to.clone(), Amount::from_u64(30000).unwrap(), None)
.unwrap();
builder
.add_transparent_output(

View File

@ -161,8 +161,8 @@ impl TzeIn {
Ok(TzeIn {
prevout,
witness: tze::Witness {
extension_id: u32::try_from(extension_id).map_err(|e| to_io_error(e))?,
mode: u32::try_from(mode).map_err(|e| to_io_error(e))?,
extension_id: u32::try_from(extension_id).map_err(to_io_error)?,
mode: u32::try_from(mode).map_err(to_io_error)?,
payload,
},
})
@ -177,12 +177,12 @@ impl TzeIn {
CompactSize::write(
&mut writer,
usize::try_from(self.witness.extension_id).map_err(|e| to_io_error(e))?,
usize::try_from(self.witness.extension_id).map_err(to_io_error)?,
)?;
CompactSize::write(
&mut writer,
usize::try_from(self.witness.mode).map_err(|e| to_io_error(e))?,
usize::try_from(self.witness.mode).map_err(to_io_error)?,
)
}
@ -219,8 +219,8 @@ impl TzeOut {
Ok(TzeOut {
value,
precondition: tze::Precondition {
extension_id: u32::try_from(extension_id).map_err(|e| to_io_error(e))?,
mode: u32::try_from(mode).map_err(|e| to_io_error(e))?,
extension_id: u32::try_from(extension_id).map_err(to_io_error)?,
mode: u32::try_from(mode).map_err(to_io_error)?,
payload,
},
})
@ -231,11 +231,11 @@ impl TzeOut {
CompactSize::write(
&mut writer,
usize::try_from(self.precondition.extension_id).map_err(|e| to_io_error(e))?,
usize::try_from(self.precondition.extension_id).map_err(to_io_error)?,
)?;
CompactSize::write(
&mut writer,
usize::try_from(self.precondition.mode).map_err(|e| to_io_error(e))?,
usize::try_from(self.precondition.mode).map_err(to_io_error)?,
)?;
Vector::write(&mut writer, &self.precondition.payload, |w, b| {
w.write_u8(*b)

View File

@ -167,52 +167,49 @@ mod tests {
#[test]
fn amount_in_range() {
let zero = b"\x00\x00\x00\x00\x00\x00\x00\x00";
assert_eq!(Amount::from_u64_le_bytes(zero.clone()).unwrap(), Amount(0));
assert_eq!(Amount::from_u64_le_bytes(*zero).unwrap(), Amount(0));
assert_eq!(
Amount::from_nonnegative_i64_le_bytes(zero.clone()).unwrap(),
Amount::from_nonnegative_i64_le_bytes(*zero).unwrap(),
Amount(0)
);
assert_eq!(Amount::from_i64_le_bytes(zero.clone()).unwrap(), Amount(0));
assert_eq!(Amount::from_i64_le_bytes(*zero).unwrap(), Amount(0));
let neg_one = b"\xff\xff\xff\xff\xff\xff\xff\xff";
assert!(Amount::from_u64_le_bytes(neg_one.clone()).is_err());
assert!(Amount::from_nonnegative_i64_le_bytes(neg_one.clone()).is_err());
assert_eq!(
Amount::from_i64_le_bytes(neg_one.clone()).unwrap(),
Amount(-1)
);
assert!(Amount::from_u64_le_bytes(*neg_one).is_err());
assert!(Amount::from_nonnegative_i64_le_bytes(*neg_one).is_err());
assert_eq!(Amount::from_i64_le_bytes(*neg_one).unwrap(), Amount(-1));
let max_money = b"\x00\x40\x07\x5a\xf0\x75\x07\x00";
assert_eq!(
Amount::from_u64_le_bytes(max_money.clone()).unwrap(),
Amount::from_u64_le_bytes(*max_money).unwrap(),
Amount(MAX_MONEY)
);
assert_eq!(
Amount::from_nonnegative_i64_le_bytes(max_money.clone()).unwrap(),
Amount::from_nonnegative_i64_le_bytes(*max_money).unwrap(),
Amount(MAX_MONEY)
);
assert_eq!(
Amount::from_i64_le_bytes(max_money.clone()).unwrap(),
Amount::from_i64_le_bytes(*max_money).unwrap(),
Amount(MAX_MONEY)
);
let max_money_p1 = b"\x01\x40\x07\x5a\xf0\x75\x07\x00";
assert!(Amount::from_u64_le_bytes(max_money_p1.clone()).is_err());
assert!(Amount::from_nonnegative_i64_le_bytes(max_money_p1.clone()).is_err());
assert!(Amount::from_i64_le_bytes(max_money_p1.clone()).is_err());
assert!(Amount::from_u64_le_bytes(*max_money_p1).is_err());
assert!(Amount::from_nonnegative_i64_le_bytes(*max_money_p1).is_err());
assert!(Amount::from_i64_le_bytes(*max_money_p1).is_err());
let neg_max_money = b"\x00\xc0\xf8\xa5\x0f\x8a\xf8\xff";
assert!(Amount::from_u64_le_bytes(neg_max_money.clone()).is_err());
assert!(Amount::from_nonnegative_i64_le_bytes(neg_max_money.clone()).is_err());
assert!(Amount::from_u64_le_bytes(*neg_max_money).is_err());
assert!(Amount::from_nonnegative_i64_le_bytes(*neg_max_money).is_err());
assert_eq!(
Amount::from_i64_le_bytes(neg_max_money.clone()).unwrap(),
Amount::from_i64_le_bytes(*neg_max_money).unwrap(),
Amount(-MAX_MONEY)
);
let neg_max_money_m1 = b"\xff\xbf\xf8\xa5\x0f\x8a\xf8\xff";
assert!(Amount::from_u64_le_bytes(neg_max_money_m1.clone()).is_err());
assert!(Amount::from_nonnegative_i64_le_bytes(neg_max_money_m1.clone()).is_err());
assert!(Amount::from_i64_le_bytes(neg_max_money_m1.clone()).is_err());
assert!(Amount::from_u64_le_bytes(*neg_max_money_m1).is_err());
assert!(Amount::from_nonnegative_i64_le_bytes(*neg_max_money_m1).is_err());
assert!(Amount::from_i64_le_bytes(*neg_max_money_m1).is_err());
}
#[test]

View File

@ -1,7 +1,6 @@
//! Structs and methods for handling Zcash transactions.
use byteorder::{LittleEndian, ReadBytesExt, WriteBytesExt};
use hex;
use sha2::{Digest, Sha256};
use std::fmt;
use std::io::{self, Read, Write};
@ -127,6 +126,12 @@ impl std::fmt::Debug for TransactionData {
}
}
impl Default for TransactionData {
fn default() -> Self {
TransactionData::new()
}
}
impl TransactionData {
pub fn new() -> Self {
TransactionData {

View File

@ -226,12 +226,9 @@ fn test_tze_tx_parse() {
match tx {
Ok(tx) => assert!(!tx.tze_inputs.is_empty()),
Err(e) => assert!(
false,
format!(
"An error occurred parsing a serialized TZE transaction: {}",
e
)
Err(e) => panic!(
"An error occurred parsing a serialized TZE transaction: {}",
e
),
}
}

View File

@ -23,7 +23,7 @@ pub fn generate_random_rseed<P: consensus::Parameters, R: RngCore + CryptoRng>(
) -> Rseed {
if params.is_nu_active(NetworkUpgrade::Canopy, height) {
let mut buffer = [0u8; 32];
&rng.fill_bytes(&mut buffer);
rng.fill_bytes(&mut buffer);
Rseed::AfterZip212(buffer)
} else {
Rseed::BeforeZip212(jubjub::Fr::random(rng))

View File

@ -99,6 +99,12 @@ struct ChainCode([u8; 32]);
#[derive(Clone, Copy, Debug, PartialEq)]
pub struct DiversifierIndex(pub [u8; 11]);
impl Default for DiversifierIndex {
fn default() -> Self {
DiversifierIndex::new()
}
}
impl DiversifierIndex {
pub fn new() -> Self {
DiversifierIndex([0; 11])

View File

@ -326,7 +326,7 @@ impl EdwardsPoint {
let mut t1 = bls12_381::Scalar::one();
t1.add_assign(c.get_value().get()?);
let res = t1.invert().map(|t1| t0 * &t1);
let res = t1.invert().map(|t1| t0 * t1);
if bool::from(res.is_some()) {
Ok(res.unwrap())
} else {
@ -352,7 +352,7 @@ impl EdwardsPoint {
let mut t1 = bls12_381::Scalar::one();
t1.sub_assign(c.get_value().get()?);
let res = t1.invert().map(|t1| t0 * &t1);
let res = t1.invert().map(|t1| t0 * t1);
if bool::from(res.is_some()) {
Ok(res.unwrap())
} else {
@ -427,7 +427,7 @@ impl EdwardsPoint {
let mut t1 = bls12_381::Scalar::one();
t1.add_assign(c.get_value().get()?);
let ret = t1.invert().map(|t1| t0 * &t1);
let ret = t1.invert().map(|t1| t0 * t1);
if bool::from(ret.is_some()) {
Ok(ret.unwrap())
} else {
@ -452,7 +452,7 @@ impl EdwardsPoint {
let mut t1 = bls12_381::Scalar::one();
t1.sub_assign(c.get_value().get()?);
let ret = t1.invert().map(|t1| t0 * &t1);
let ret = t1.invert().map(|t1| t0 * t1);
if bool::from(ret.is_some()) {
Ok(ret.unwrap())
} else {
@ -489,7 +489,7 @@ impl MontgomeryPoint {
let mut t0 = *self.x.get_value().get()?;
t0.mul_assign(MONTGOMERY_SCALE);
let ret = self.y.get_value().get()?.invert().map(|invy| t0 * &invy);
let ret = self.y.get_value().get()?.invert().map(|invy| t0 * invy);
if bool::from(ret.is_some()) {
Ok(ret.unwrap())
} else {
@ -511,7 +511,7 @@ impl MontgomeryPoint {
t0.sub_assign(&bls12_381::Scalar::one());
t1.add_assign(&bls12_381::Scalar::one());
let ret = t1.invert().map(|t1| t0 * &t1);
let ret = t1.invert().map(|t1| t0 * t1);
if bool::from(ret.is_some()) {
Ok(ret.unwrap())
} else {
@ -552,7 +552,7 @@ impl MontgomeryPoint {
let mut d = *other.x.get_value().get()?;
d.sub_assign(self.x.get_value().get()?);
let ret = d.invert().map(|d| n * &d);
let ret = d.invert().map(|d| n * d);
if bool::from(ret.is_some()) {
Ok(ret.unwrap())
} else {

View File

@ -52,7 +52,7 @@ impl InputNote {
)?;
// Witness into the merkle tree
let mut cur = cm.clone();
let mut cur = cm;
for (i, layer) in auth_path.iter().enumerate() {
let cs = &mut cs.namespace(|| format!("layer {}", i));

View File

@ -73,7 +73,7 @@ pub fn generate_circuit_generator(mut gen: jubjub::SubgroupPoint) -> FixedGenera
for _ in 0..FIXED_BASE_CHUNKS_PER_GENERATOR {
let mut coeffs = vec![(Scalar::zero(), Scalar::one())];
let mut g = gen.clone();
let mut g = gen;
for _ in 0..7 {
let g_affine = jubjub::ExtendedPoint::from(g).to_affine();
coeffs.push((g_affine.get_u(), g_affine.get_v()));
@ -143,7 +143,7 @@ fn generate_pedersen_circuit_generators() -> Vec<Vec<Vec<(Scalar, Scalar)>>> {
for _ in 0..PEDERSEN_HASH_CHUNKS_PER_GENERATOR {
// Create (x, y) coeffs for this chunk
let mut coeffs = vec![];
let mut g = gen.clone();
let mut g = gen;
// coeffs = g, g*2, g*3, g*4
for _ in 0..4 {

View File

@ -151,7 +151,7 @@ pub fn parse_parameters<R: io::Read>(
) {
let mut spend_fs = hashreader::HashReader::new(spend_fs);
let mut output_fs = hashreader::HashReader::new(output_fs);
let mut sprout_fs = sprout_fs.map(|fs| hashreader::HashReader::new(fs));
let mut sprout_fs = sprout_fs.map(hashreader::HashReader::new);
// Deserialize params
let spend_params = Parameters::<Bls12>::read(&mut spend_fs, false)

View File

@ -26,6 +26,12 @@ pub struct SaplingProvingContext {
cv_sum: jubjub::ExtendedPoint,
}
impl Default for SaplingProvingContext {
fn default() -> Self {
SaplingProvingContext::new()
}
}
impl SaplingProvingContext {
/// Construct a new context to be used with a single transaction.
pub fn new() -> Self {
@ -85,7 +91,7 @@ impl SaplingProvingContext {
let note = Note {
value,
g_d: diversifier.g_d().expect("was a valid diversifier before"),
pk_d: payment_address.pk_d().clone(),
pk_d: *payment_address.pk_d(),
rseed,
};
@ -187,7 +193,7 @@ impl SaplingProvingContext {
// We now have a full witness for the output proof.
let instance = Output {
value_commitment: Some(value_commitment.clone()),
payment_address: Some(payment_address.clone()),
payment_address: Some(payment_address),
commitment_randomness: Some(rcm),
esk: Some(esk),
};

View File

@ -18,6 +18,12 @@ pub struct SaplingVerificationContext {
cv_sum: jubjub::ExtendedPoint,
}
impl Default for SaplingVerificationContext {
fn default() -> Self {
SaplingVerificationContext::new()
}
}
impl SaplingVerificationContext {
/// Construct a new context to be used with a single transaction.
pub fn new() -> Self {
@ -137,7 +143,7 @@ impl SaplingVerificationContext {
binding_sig: Signature,
) -> bool {
// Obtain current cv_sum from the context
let mut bvk = PublicKey(self.cv_sum.clone());
let mut bvk = PublicKey(self.cv_sum);
// Compute value balance
let value_balance = match compute_value_balance(value_balance) {