fix(diagnostics): Hex-encode debug format of commitments, nonces, and nullifiers (#5960)

* Format commitments and nonces as hex

* Format Sprout and Sapling nullifiers as hex when debugging

* Format Sprout commitments as hex when debugging

* Format redpallas keys as hex when debugging

* Update code that we're going to delete in the next PR anyway
This commit is contained in:
teor 2023-01-17 23:57:22 +10:00 committed by GitHub
parent 1bb8a9c924
commit 8c451968ee
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
27 changed files with 200 additions and 78 deletions

View File

@ -98,7 +98,7 @@ impl Block {
None => Err(CommitmentError::MissingBlockHeight {
block_hash: self.hash(),
}),
Some(height) => Commitment::from_bytes(self.header.commitment_bytes, network, height),
Some(height) => Commitment::from_bytes(*self.header.commitment_bytes, network, height),
}
}

View File

@ -1,16 +1,16 @@
//! Randomised property testing for [`Block`]s.
use std::{collections::HashMap, sync::Arc};
use proptest::{
arbitrary::{any, Arbitrary},
prelude::*,
};
use std::{collections::HashMap, sync::Arc};
use crate::{
amount::NonNegative,
block,
fmt::SummaryDebug,
fmt::{HexDebug, SummaryDebug},
history_tree::HistoryTree,
parameters::{
Network,
@ -482,13 +482,13 @@ impl Block {
// needs to be well-formed, i.e. smaller than 𝑞_J, so we
// arbitrarily set it to 1.
let block_header = Arc::make_mut(&mut block.header);
block_header.commitment_bytes = [0u8; 32];
block_header.commitment_bytes = [0u8; 32].into();
block_header.commitment_bytes[0] = 1;
}
std::cmp::Ordering::Equal => {
// The Heartwood activation block has a hardcoded all-zeroes commitment.
let block_header = Arc::make_mut(&mut block.header);
block_header.commitment_bytes = [0u8; 32];
block_header.commitment_bytes = [0u8; 32].into();
}
std::cmp::Ordering::Greater => {
// Set the correct commitment bytes according to the network upgrade.
@ -505,10 +505,12 @@ impl Block {
&auth_data_root,
);
let block_header = Arc::make_mut(&mut block.header);
block_header.commitment_bytes = hash_block_commitments.into();
block_header.commitment_bytes =
hash_block_commitments.bytes_in_serialized_order().into();
} else {
let block_header = Arc::make_mut(&mut block.header);
block_header.commitment_bytes = history_tree_root.into();
block_header.commitment_bytes =
history_tree_root.bytes_in_serialized_order().into();
}
}
}
@ -723,10 +725,10 @@ impl Arbitrary for Header {
(4u32..(i32::MAX as u32)),
any::<Hash>(),
any::<merkle::Root>(),
any::<[u8; 32]>(),
any::<HexDebug<[u8; 32]>>(),
serialization::arbitrary::datetime_u32(),
any::<CompactDifficulty>(),
any::<[u8; 32]>(),
any::<HexDebug<[u8; 32]>>(),
any::<equihash::Solution>(),
)
.prop_map(

View File

@ -1,5 +1,7 @@
//! The Commitment enum, used for the corresponding block header field.
use std::fmt;
use hex::{FromHex, ToHex};
use thiserror::Error;
@ -97,6 +99,8 @@ pub(crate) const CHAIN_HISTORY_ACTIVATION_RESERVED: [u8; 32] = [0; 32];
impl Commitment {
/// Returns `bytes` as the Commitment variant for `network` and `height`.
//
// TODO: rename as from_bytes_in_serialized_order()
pub(super) fn from_bytes(
bytes: [u8; 32],
network: Network,
@ -126,6 +130,8 @@ impl Commitment {
}
/// Returns the serialized bytes for this Commitment.
//
// TODO: refactor as bytes_in_serialized_order(&self)
#[cfg(test)]
pub(super) fn to_bytes(self) -> [u8; 32] {
use Commitment::*;
@ -145,9 +151,23 @@ impl Commitment {
// - add methods for maintaining the MMR peaks, and calculating the root
// hash from the current set of peaks
// - move to a separate file
#[derive(Clone, Copy, Debug, Eq, PartialEq, Serialize, Deserialize)]
#[derive(Clone, Copy, Eq, PartialEq, Serialize, Deserialize)]
pub struct ChainHistoryMmrRootHash([u8; 32]);
impl fmt::Display for ChainHistoryMmrRootHash {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.write_str(&self.encode_hex::<String>())
}
}
impl fmt::Debug for ChainHistoryMmrRootHash {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.debug_tuple("ChainHistoryMmrRootHash")
.field(&self.encode_hex::<String>())
.finish()
}
}
impl From<[u8; 32]> for ChainHistoryMmrRootHash {
fn from(hash: [u8; 32]) -> Self {
ChainHistoryMmrRootHash(hash)
@ -183,6 +203,11 @@ impl ChainHistoryMmrRootHash {
ChainHistoryMmrRootHash(internal_byte_order)
}
/// Returns the serialized bytes for this Commitment.
pub fn bytes_in_serialized_order(&self) -> [u8; 32] {
self.0
}
}
impl ToHex for &ChainHistoryMmrRootHash {
@ -222,9 +247,23 @@ impl FromHex for ChainHistoryMmrRootHash {
/// - the transaction authorising data in this block.
///
/// Introduced in NU5.
#[derive(Clone, Copy, Debug, Eq, PartialEq, Serialize, Deserialize)]
#[derive(Clone, Copy, Eq, PartialEq, Serialize, Deserialize)]
pub struct ChainHistoryBlockTxAuthCommitmentHash([u8; 32]);
impl fmt::Display for ChainHistoryBlockTxAuthCommitmentHash {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.write_str(&self.encode_hex::<String>())
}
}
impl fmt::Debug for ChainHistoryBlockTxAuthCommitmentHash {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.debug_tuple("ChainHistoryBlockTxAuthCommitmentHash")
.field(&self.encode_hex::<String>())
.finish()
}
}
impl From<[u8; 32]> for ChainHistoryBlockTxAuthCommitmentHash {
fn from(hash: [u8; 32]) -> Self {
ChainHistoryBlockTxAuthCommitmentHash(hash)
@ -292,6 +331,11 @@ impl ChainHistoryBlockTxAuthCommitmentHash {
ChainHistoryBlockTxAuthCommitmentHash(internal_byte_order)
}
/// Returns the serialized bytes for this Commitment.
pub fn bytes_in_serialized_order(&self) -> [u8; 32] {
self.0
}
}
impl ToHex for &ChainHistoryBlockTxAuthCommitmentHash {

View File

@ -6,6 +6,7 @@ use chrono::{DateTime, Duration, Utc};
use thiserror::Error;
use crate::{
fmt::HexDebug,
serialization::{TrustedPreallocate, MAX_PROTOCOL_MESSAGE_LEN},
work::{difficulty::CompactDifficulty, equihash::Solution},
};
@ -58,7 +59,7 @@ pub struct Header {
/// this field cannot be parsed without the network and height. Use
/// [`Block::commitment`](super::Block::commitment) to get the parsed
/// [`Commitment`](super::Commitment).
pub commitment_bytes: [u8; 32],
pub commitment_bytes: HexDebug<[u8; 32]>,
/// The block timestamp is a Unix epoch time (UTC) when the miner
/// started hashing the header (according to the miner).
@ -77,7 +78,7 @@ pub struct Header {
/// An arbitrary field that miners can change to modify the header
/// hash in order to produce a hash less than or equal to the
/// target threshold.
pub nonce: [u8; 32],
pub nonce: HexDebug<[u8; 32]>,
/// The Equihash solution.
pub solution: Solution,

View File

@ -6,6 +6,7 @@ use byteorder::{LittleEndian, ReadBytesExt, WriteBytesExt};
use chrono::{TimeZone, Utc};
use crate::{
block::{header::ZCASH_BLOCK_VERSION, merkle, Block, CountedHeader, Hash, Header},
serialization::{
CompactSizeMessage, ReadZcashExt, SerializationError, ZcashDeserialize,
ZcashDeserializeInto, ZcashSerialize,
@ -13,8 +14,6 @@ use crate::{
work::{difficulty::CompactDifficulty, equihash},
};
use super::{header::ZCASH_BLOCK_VERSION, merkle, Block, CountedHeader, Hash, Header};
/// The maximum size of a Zcash block, in bytes.
///
/// Post-Sapling, this is also the maximum size of a transaction
@ -85,7 +84,7 @@ impl ZcashDeserialize for Header {
version,
previous_block_hash: Hash::zcash_deserialize(&mut reader)?,
merkle_root: merkle::Root(reader.read_32_bytes()?),
commitment_bytes: reader.read_32_bytes()?,
commitment_bytes: reader.read_32_bytes()?.into(),
// This can't panic, because all u32 values are valid `Utc.timestamp`s
time: Utc
.timestamp_opt(reader.read_u32::<LittleEndian>()?.into(), 0)
@ -94,7 +93,7 @@ impl ZcashDeserialize for Header {
"out-of-range number of seconds and/or invalid nanosecond",
))?,
difficulty_threshold: CompactDifficulty(reader.read_u32::<LittleEndian>()?),
nonce: reader.read_32_bytes()?,
nonce: reader.read_32_bytes()?.into(),
solution: equihash::Solution::zcash_deserialize(reader)?,
})
}

View File

@ -107,7 +107,7 @@ proptest! {
let commitment = block.commitment(network);
if let Ok(commitment) = commitment {
let commitment_bytes = commitment.to_bytes();
prop_assert_eq![block.header.commitment_bytes, commitment_bytes];
prop_assert_eq![block.header.commitment_bytes.0, commitment_bytes];
}
// Check the block size limit

View File

@ -160,3 +160,38 @@ where
type Strategy = BoxedStrategy<Self>;
}
/// Wrapper to override `Debug`, redirecting it to hex-encode the type.
/// The type must be hex-encodable.
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Serialize, Deserialize)]
#[cfg_attr(any(test, feature = "proptest-impl"), derive(Arbitrary))]
#[serde(transparent)]
pub struct HexDebug<T: AsRef<[u8]>>(pub T);
impl<T: AsRef<[u8]>> fmt::Debug for HexDebug<T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_tuple(std::any::type_name::<T>())
.field(&hex::encode(self.as_ref()))
.finish()
}
}
impl<T: AsRef<[u8]>> ops::Deref for HexDebug<T> {
type Target = T;
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl<T: AsRef<[u8]>> ops::DerefMut for HexDebug<T> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.0
}
}
impl<T: AsRef<[u8]>> From<T> for HexDebug<T> {
fn from(t: T) -> Self {
Self(t)
}
}

View File

@ -74,8 +74,8 @@ impl Arbitrary for Signature<SpendAuth> {
fn arbitrary_with(_args: Self::Parameters) -> Self::Strategy {
(array::uniform32(any::<u8>()), array::uniform32(any::<u8>()))
.prop_map(|(r_bytes, s_bytes)| Self {
r_bytes,
s_bytes,
r_bytes: r_bytes.into(),
s_bytes: s_bytes.into(),
_marker: PhantomData,
})
.boxed()

View File

@ -1,3 +1,5 @@
//! BCTV14 proofs for Zebra.
use std::{fmt, io};
use serde::{Deserialize, Serialize};

View File

@ -1,3 +1,5 @@
//! Groth16 proofs for Zebra.
use std::{fmt, io};
use serde::{Deserialize, Serialize};

View File

@ -229,7 +229,7 @@ impl Verifier {
let s = {
// XXX-pallas: should not use CtOption here
let maybe_scalar = pallas::Scalar::from_repr(s_bytes);
let maybe_scalar = pallas::Scalar::from_repr(*s_bytes);
if maybe_scalar.is_some().into() {
maybe_scalar.unwrap()
} else {
@ -258,10 +258,10 @@ impl Verifier {
//
// This validates the `rk` element, whose type is
// SpendAuthSig^{Orchard}.Public, i.e. .
VerificationKey::<SpendAuth>::try_from(vk_bytes.bytes)?.point
VerificationKey::<SpendAuth>::try_from(*vk_bytes.bytes)?.point
}
Inner::Binding { vk_bytes, .. } => {
VerificationKey::<Binding>::try_from(vk_bytes.bytes)?.point
VerificationKey::<Binding>::try_from(*vk_bytes.bytes)?.point
}
};

View File

@ -12,13 +12,16 @@ use std::{io, marker::PhantomData};
use super::SigType;
use crate::serialization::{ReadZcashExt, SerializationError, ZcashDeserialize, ZcashSerialize};
use crate::{
fmt::HexDebug,
serialization::{ReadZcashExt, SerializationError, ZcashDeserialize, ZcashSerialize},
};
/// A RedPallas signature.
#[derive(Copy, Clone, Debug, Eq, PartialEq, Deserialize, Serialize)]
pub struct Signature<T: SigType> {
pub(crate) r_bytes: [u8; 32],
pub(crate) s_bytes: [u8; 32],
pub(crate) r_bytes: HexDebug<[u8; 32]>,
pub(crate) s_bytes: HexDebug<[u8; 32]>,
pub(crate) _marker: PhantomData<T>,
}
@ -29,8 +32,8 @@ impl<T: SigType> From<[u8; 64]> for Signature<T> {
let mut s_bytes = [0; 32];
s_bytes.copy_from_slice(&bytes[32..64]);
Signature {
r_bytes,
s_bytes,
r_bytes: r_bytes.into(),
s_bytes: s_bytes.into(),
_marker: PhantomData,
}
}

View File

@ -1,4 +1,5 @@
use std::convert::{TryFrom, TryInto};
//! Redpallas signing keys for Zebra.
use std::marker::PhantomData;
use group::{ff::PrimeField, GroupEncoding};
@ -117,8 +118,8 @@ impl<T: SigType> SigningKey<T> {
let s_bytes = (nonce + (c * self.sk)).to_repr();
Signature {
r_bytes,
s_bytes,
r_bytes: r_bytes.into(),
s_bytes: s_bytes.into(),
_marker: PhantomData,
}
}

View File

@ -1,8 +1,12 @@
//! Redpallas verification keys for Zebra.
use std::marker::PhantomData;
use group::{cofactor::CofactorGroup, ff::PrimeField, GroupEncoding};
use halo2::pasta::pallas;
use crate::fmt::HexDebug;
use super::*;
/// A refinement type for `[u8; 32]` indicating that the bytes represent
@ -13,14 +17,14 @@ use super::*;
/// used in signature verification.
#[derive(Copy, Clone, Debug, Deserialize, Eq, PartialEq, Serialize)]
pub struct VerificationKeyBytes<T: SigType> {
pub(crate) bytes: [u8; 32],
pub(crate) bytes: HexDebug<[u8; 32]>,
pub(crate) _marker: PhantomData<T>,
}
impl<T: SigType> From<[u8; 32]> for VerificationKeyBytes<T> {
fn from(bytes: [u8; 32]) -> VerificationKeyBytes<T> {
VerificationKeyBytes {
bytes,
bytes: bytes.into(),
_marker: PhantomData,
}
}
@ -28,7 +32,7 @@ impl<T: SigType> From<[u8; 32]> for VerificationKeyBytes<T> {
impl<T: SigType> From<VerificationKeyBytes<T>> for [u8; 32] {
fn from(refined: VerificationKeyBytes<T>) -> [u8; 32] {
refined.bytes
*refined.bytes
}
}
@ -65,7 +69,7 @@ impl<T: SigType> From<VerificationKey<T>> for VerificationKeyBytes<T> {
impl<T: SigType> From<VerificationKey<T>> for [u8; 32] {
fn from(pk: VerificationKey<T>) -> [u8; 32] {
pk.bytes.bytes
*pk.bytes.bytes
}
}
@ -107,7 +111,7 @@ impl VerificationKey<SpendAuth> {
use super::private::Sealed;
let point = self.point + (SpendAuth::basepoint() * randomizer);
let bytes = VerificationKeyBytes {
bytes: point.to_bytes(),
bytes: point.to_bytes().into(),
_marker: PhantomData,
};
VerificationKey { point, bytes }
@ -118,7 +122,7 @@ impl<T: SigType> VerificationKey<T> {
pub(crate) fn from_scalar(s: &pallas::Scalar) -> VerificationKey<T> {
let point = T::basepoint() * s;
let bytes = VerificationKeyBytes {
bytes: point.to_bytes(),
bytes: point.to_bytes().into(),
_marker: PhantomData,
};
VerificationKey { point, bytes }
@ -154,7 +158,7 @@ impl<T: SigType> VerificationKey<T> {
let s = {
// XXX-pasta_curves: should not use CtOption here
let maybe_scalar = pallas::Scalar::from_repr(signature.s_bytes);
let maybe_scalar = pallas::Scalar::from_repr(*signature.s_bytes);
if maybe_scalar.is_some().into() {
maybe_scalar.unwrap()
} else {

View File

@ -1,22 +1,24 @@
//! Sapling nullifiers.
use crate::fmt::HexDebug;
/// A Nullifier for Sapling transactions
#[derive(Clone, Copy, Debug, Eq, PartialEq, Serialize, Deserialize, Hash)]
#[cfg_attr(
any(test, feature = "proptest-impl"),
derive(proptest_derive::Arbitrary)
)]
pub struct Nullifier(pub [u8; 32]);
pub struct Nullifier(pub HexDebug<[u8; 32]>);
impl From<[u8; 32]> for Nullifier {
fn from(buf: [u8; 32]) -> Self {
Self(buf)
Self(buf.into())
}
}
impl From<Nullifier> for [u8; 32] {
fn from(n: Nullifier) -> Self {
n.0
*n.0
}
}

View File

@ -2,6 +2,8 @@
use sha2::{Digest, Sha256};
use crate::fmt::HexDebug;
use super::note::Note;
/// The randomness used in the Pedersen Hash for note commitment.
@ -10,11 +12,11 @@ use super::note::Note;
any(test, feature = "proptest-impl"),
derive(proptest_derive::Arbitrary)
)]
pub struct CommitmentRandomness(pub [u8; 32]);
pub struct CommitmentRandomness(pub HexDebug<[u8; 32]>);
impl AsRef<[u8]> for CommitmentRandomness {
fn as_ref(&self) -> &[u8] {
&self.0
self.0.as_ref()
}
}
@ -24,11 +26,11 @@ impl AsRef<[u8]> for CommitmentRandomness {
any(test, feature = "proptest-impl"),
derive(proptest_derive::Arbitrary)
)]
pub struct NoteCommitment(pub(crate) [u8; 32]);
pub struct NoteCommitment(pub(crate) HexDebug<[u8; 32]>);
impl From<[u8; 32]> for NoteCommitment {
fn from(bytes: [u8; 32]) -> Self {
Self(bytes)
Self(bytes.into())
}
}
@ -44,18 +46,20 @@ impl From<Note> for NoteCommitment {
hasher.update(note.value.to_bytes());
hasher.update(note.rho);
hasher.update(note.rcm);
NoteCommitment(hasher.finalize().into())
let commitment: [u8; 32] = hasher.finalize().into();
NoteCommitment(commitment.into())
}
}
impl From<NoteCommitment> for [u8; 32] {
fn from(cm: NoteCommitment) -> [u8; 32] {
cm.0
*cm.0
}
}
impl From<&NoteCommitment> for [u8; 32] {
fn from(cm: &NoteCommitment) -> [u8; 32] {
cm.0
*cm.0
}
}

View File

@ -7,6 +7,7 @@ use serde::{Deserialize, Serialize};
use crate::{
amount::{Amount, NegativeAllowed, NonNegative},
block::MAX_BLOCK_BYTES,
fmt::HexDebug,
primitives::{x25519, Bctv14Proof, Groth16Proof, ZkSnarkProof},
serialization::{
ReadZcashExt, SerializationError, TrustedPreallocate, WriteZcashExt, ZcashDeserialize,
@ -25,17 +26,17 @@ use super::{commitment, note, tree};
any(test, feature = "proptest-impl"),
derive(proptest_derive::Arbitrary)
)]
pub struct RandomSeed([u8; 32]);
pub struct RandomSeed(HexDebug<[u8; 32]>);
impl From<[u8; 32]> for RandomSeed {
fn from(bytes: [u8; 32]) -> Self {
Self(bytes)
Self(bytes.into())
}
}
impl From<RandomSeed> for [u8; 32] {
fn from(rt: RandomSeed) -> [u8; 32] {
rt.0
*rt.0
}
}

View File

@ -1,6 +1,12 @@
use crate::serialization::{ReadZcashExt, SerializationError, ZcashDeserialize, ZcashSerialize};
//! Sprout message authentication codes.
use std::io::{self, Read};
use crate::{
fmt::HexDebug,
serialization::{ReadZcashExt, SerializationError, ZcashDeserialize, ZcashSerialize},
};
/// A sequence of message authentication tags ...
///
/// binding h_sig to each a_sk of the JoinSplit description, computed as
@ -10,17 +16,17 @@ use std::io::{self, Read};
any(test, feature = "proptest-impl"),
derive(proptest_derive::Arbitrary)
)]
pub struct Mac([u8; 32]);
pub struct Mac(HexDebug<[u8; 32]>);
impl From<[u8; 32]> for Mac {
fn from(bytes: [u8; 32]) -> Self {
Self(bytes)
Self(bytes.into())
}
}
impl From<Mac> for [u8; 32] {
fn from(rt: Mac) -> [u8; 32] {
rt.0
*rt.0
}
}
@ -34,7 +40,7 @@ impl ZcashDeserialize for Mac {
fn zcash_deserialize<R: Read>(mut reader: R) -> Result<Self, SerializationError> {
let bytes = reader.read_32_bytes()?;
Ok(Self(bytes))
Ok(Self(bytes.into()))
}
}

View File

@ -2,6 +2,8 @@
use serde::{Deserialize, Serialize};
use crate::fmt::HexDebug;
/// Nullifier seed, named rho in the [spec][ps].
///
/// [ps]: https://zips.z.cash/protocol/protocol.pdf#sproutkeycomponents
@ -11,23 +13,23 @@ use serde::{Deserialize, Serialize};
any(test, feature = "proptest-impl"),
derive(proptest_derive::Arbitrary)
)]
pub struct NullifierSeed(pub(crate) [u8; 32]);
pub struct NullifierSeed(pub(crate) HexDebug<[u8; 32]>);
impl AsRef<[u8]> for NullifierSeed {
fn as_ref(&self) -> &[u8] {
&self.0
self.0.as_ref()
}
}
impl From<[u8; 32]> for NullifierSeed {
fn from(bytes: [u8; 32]) -> Self {
Self(bytes)
Self(bytes.into())
}
}
impl From<NullifierSeed> for [u8; 32] {
fn from(rho: NullifierSeed) -> Self {
rho.0
*rho.0
}
}
@ -37,22 +39,22 @@ impl From<NullifierSeed> for [u8; 32] {
any(test, feature = "proptest-impl"),
derive(proptest_derive::Arbitrary)
)]
pub struct Nullifier(pub [u8; 32]);
pub struct Nullifier(pub HexDebug<[u8; 32]>);
impl From<[u8; 32]> for Nullifier {
fn from(bytes: [u8; 32]) -> Self {
Self(bytes)
Self(bytes.into())
}
}
impl From<Nullifier> for [u8; 32] {
fn from(n: Nullifier) -> Self {
n.0
*n.0
}
}
impl From<&Nullifier> for [u8; 32] {
fn from(n: &Nullifier) -> Self {
n.0
*n.0
}
}

View File

@ -992,11 +992,25 @@ impl TrustedPreallocate for transparent::Output {
/// Stores bytes that are guaranteed to be deserializable into a [`Transaction`].
///
/// Sorts in lexicographic order of the transaction's serialized data.
#[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash)]
#[derive(Clone, Eq, PartialEq, Ord, PartialOrd, Hash)]
pub struct SerializedTransaction {
bytes: Vec<u8>,
}
impl fmt::Display for SerializedTransaction {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.write_str(&hex::encode(&self.bytes))
}
}
impl fmt::Debug for SerializedTransaction {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.debug_tuple("SerializedTransaction")
.field(&hex::encode(&self.bytes))
.finish()
}
}
/// Build a [`SerializedTransaction`] by serializing a block.
impl<B: Borrow<Transaction>> From<B> for SerializedTransaction {
fn from(tx: B) -> Self {

View File

@ -55,7 +55,7 @@ impl Solution {
let input = &input[0..Solution::INPUT_LENGTH];
equihash::is_valid_solution(n, k, input, nonce, solution)?;
equihash::is_valid_solution(n, k, input, nonce.as_ref(), solution)?;
Ok(())
}

View File

@ -73,12 +73,12 @@ prop_compose! {
fn randomized_nonce(real_header: block::Header)
(fake_nonce in proptest::array::uniform32(any::<u8>())
.prop_filter("nonce must not be the actual nonce", move |fake_nonce| {
fake_nonce != &real_header.nonce
fake_nonce != &real_header.nonce.0
})
) -> Arc<block::Header> {
let mut fake_header = real_header;
fake_header.nonce = fake_nonce;
fake_header.nonce = fake_nonce.into();
Arc::new(fake_header)
}

View File

@ -67,7 +67,7 @@ static INVALID_HEADER_SOLUTION_TRANSCRIPT: Lazy<
Block::zcash_deserialize(&zebra_test::vectors::BLOCK_MAINNET_GENESIS_BYTES[..]).unwrap();
// Change nonce to something invalid
Arc::make_mut(&mut block.header).nonce = [0; 32];
Arc::make_mut(&mut block.header).nonce = [0; 32].into();
vec![(
Request::Commit(Arc::new(block)),

View File

@ -963,7 +963,7 @@ fn v4_transaction_with_conflicting_sprout_nullifier_across_joinsplits_is_rejecte
// Add a new joinsplit with the duplicate nullifier
let mut new_joinsplit = joinsplit_data.first.clone();
new_joinsplit.nullifiers[0] = duplicate_nullifier;
new_joinsplit.nullifiers[1] = sprout::note::Nullifier([2u8; 32]);
new_joinsplit.nullifiers[1] = sprout::note::Nullifier([2u8; 32].into());
joinsplit_data.rest.push(new_joinsplit);
@ -1981,8 +1981,8 @@ fn mock_sprout_join_split_data() -> (JoinSplitData<Groth16Proof>, ed25519::Signi
.try_into()
.expect("Invalid JoinSplit transparent input");
let anchor = sprout::tree::Root::default();
let first_nullifier = sprout::note::Nullifier([0u8; 32]);
let second_nullifier = sprout::note::Nullifier([1u8; 32]);
let first_nullifier = sprout::note::Nullifier([0u8; 32].into());
let second_nullifier = sprout::note::Nullifier([1u8; 32].into());
let commitment = sprout::commitment::NoteCommitment::from([0u8; 32]);
let ephemeral_key = x25519::PublicKey::from(&x25519::EphemeralSecret::new(rand::thread_rng()));
let random_seed = sprout::RandomSeed::from([0u8; 32]);

View File

@ -15,7 +15,7 @@ impl IntoDisk for sprout::Nullifier {
type Bytes = [u8; 32];
fn as_bytes(&self) -> Self::Bytes {
self.0
*self.0
}
}
@ -23,7 +23,7 @@ impl IntoDisk for sapling::Nullifier {
type Bytes = [u8; 32];
fn as_bytes(&self) -> Self::Bytes {
self.0
*self.0
}
}

View File

@ -58,7 +58,7 @@ impl FakeChainHelper for Arc<Block> {
fn set_block_commitment(mut self, block_commitment: [u8; 32]) -> Arc<Block> {
let block = Arc::make_mut(&mut self);
Arc::make_mut(&mut block.header).commitment_bytes = block_commitment;
Arc::make_mut(&mut block.header).commitment_bytes = block_commitment.into();
self
}
}

View File

@ -222,10 +222,10 @@ fn proposal_block_from_template(
version,
previous_block_hash,
merkle_root,
commitment_bytes: block_commitments_hash.into(),
commitment_bytes: block_commitments_hash.bytes_in_serialized_order().into(),
time: time.into(),
difficulty_threshold,
nonce: [0; 32],
nonce: [0; 32].into(),
solution: Solution::for_proposal(),
}),
transactions: transactions.clone(),