Merge branch main into zsa1 (#59)

For zcash_note_encryption, we have to use version 0.2 with QEDIT patch.
This commit is contained in:
Constance Beguier 2023-05-16 12:01:11 +02:00 committed by GitHub
commit c77d96c15c
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
20 changed files with 285 additions and 141 deletions

View File

@ -22,6 +22,22 @@ jobs:
command: test
args: --verbose
build:
name: Build target ${{ matrix.target }}
runs-on: ubuntu-latest
strategy:
matrix:
target:
- wasm32-wasi
steps:
- uses: actions/checkout@v3
- name: Add target
run: rustup target add ${{ matrix.target }}
- run: cargo fetch
- name: Build for ${{ matrix.target }} target
run: cargo build --verbose --no-default-features --target ${{ matrix.target }}
bitrot:
name: Bitrot check
runs-on: ubuntu-latest

View File

@ -7,6 +7,29 @@ and this project adheres to Rust's notion of
## [Unreleased]
## [0.4.0] - 2023-04-11
### Added
- `orchard::builder`:
- `{SpendInfo::new, InputView, OutputView}`
- `Builder::{spends, outputs}`
- `SpendError`
- `OutputError`
### Changed
- MSRV is now 1.60.0.
- Migrated to `ff 0.13`, `group 0.13`, `pasta_curves 0.5`, `halo2_proofs 0.3`,
`halo2_gadgets 0.3`, `reddsa 0.5`, `zcash_note_encryption 0.3`.
- `orchard::builder`:
- `Builder::{add_spend, add_output}` now use concrete error types instead of
`&'static str`s.
- `Error` has been renamed to `BuildError` to differentiate from new error
types.
- `BuildError` now implements `std::error::Error` and `std::fmt::Display`.
### Fixed
- Several bugs have been fixed that were preventing Orchard bundles from being
created or verified on 32-bit platforms, or with recent versions of Rust.
## [0.3.0] - 2022-10-19
### Added
- `orchard::Proof::add_to_batch`
@ -21,7 +44,6 @@ and this project adheres to Rust's notion of
- `impl memuse::DynamicUsage for Nullifier`
- `orchard::note_encryption`:
- `impl memuse::DynamicUsage for OrchardDomain`
- `orchard::builder::SpendInfo::new`
- `orchard::circuit::Circuit::from_action_context`
- impls of `Eq` for:
- `orchard::zip32::ChildIndex`

View File

@ -1,6 +1,6 @@
[package]
name = "orchard"
version = "0.3.0"
version = "0.4.0"
authors = [
"Sean Bowe <sean@electriccoin.co>",
"Jack Grigg <jack@electriccoin.co>",
@ -23,39 +23,43 @@ all-features = true
rustdoc-args = ["--cfg", "docsrs", "--html-in-header", "katex-header.html"]
[dependencies]
aes = "0.7"
aes = "0.8"
bitvec = "1"
blake2b_simd = "1"
ff = "0.12"
fpe = "0.5"
group = { version = "0.12.1", features = ["wnaf-memuse"] }
bridgetree = { version = "0.2", optional = true }
ff = "0.13"
fpe = "0.6"
group = { version = "0.13", features = ["wnaf-memuse"] }
halo2_gadgets = { git = "https://github.com/QED-it/halo2", branch = "zsa1" }
halo2_proofs = { git = "https://github.com/QED-it/halo2", branch = "zsa1"}
halo2_proofs = { git = "https://github.com/QED-it/halo2", branch = "zsa1", default-features = false, features = ["batch", "floor-planner-v1-legacy-pdqsort"] }
hex = "0.4"
lazy_static = "1"
memuse = { version = "0.2.1", features = ["nonempty"] }
pasta_curves = "0.4"
pasta_curves = "0.5"
proptest = { version = "1.0.0", optional = true }
rand = "0.8"
reddsa = "0.3"
reddsa = "0.5"
nonempty = "0.7"
serde = { version = "1.0", features = ["derive"] }
subtle = "2.3"
zcash_note_encryption = "0.2"
incrementalmerkletree = "0.3"
incrementalmerkletree = "0.3.1"
# Logging
tracing = "0.1"
# Developer tooling dependencies
image = { version = ">= 0.24, < 0.24.5", optional = true } # 0.24.5 has MSRV 1.61
plotters = { version = "0.3.0", optional = true }
[dev-dependencies]
bridgetree = "0.2"
criterion = "0.3"
halo2_gadgets = { git = "https://github.com/QED-it/halo2", branch = "zsa1", features = ["test-dependencies"] }
hex = "0.4"
proptest = "1.0.0"
zcash_note_encryption = { version = "0.2", features = ["pre-zip-212"] }
incrementalmerkletree = { version = "0.3", features = ["test-dependencies"] }
[target.'cfg(unix)'.dev-dependencies]
inferno = ">= 0.11, < 0.11.15"
@ -65,8 +69,10 @@ pprof = { version = "0.9", features = ["criterion", "flamegraph"] } # MSRV 1.56
bench = false
[features]
dev-graph = ["halo2_proofs/dev-graph", "plotters"]
test-dependencies = ["proptest"]
default = ["multicore"]
multicore = ["halo2_proofs/multicore"]
dev-graph = ["halo2_proofs/dev-graph", "image", "plotters"]
test-dependencies = ["bridgetree", "proptest"]
[[bench]]
name = "note_decryption"
@ -88,3 +94,5 @@ debug = true
[patch.crates-io]
zcash_note_encryption = { git = "https://github.com/QED-it/librustzcash.git", rev = "07c377ddedf71ab7c7a266d284b054a2dafc2ed4" }
bridgetree = { git = "https://github.com/zcash/incrementalmerkletree.git", rev = "ea1686e8f8f6c1e41aa97251a7eb4fadfd33df47" }
incrementalmerkletree = { git = "https://github.com/zcash/incrementalmerkletree.git", rev = "ea1686e8f8f6c1e41aa97251a7eb4fadfd33df47" }

View File

@ -1 +0,0 @@
1.61.0

3
rust-toolchain.toml Normal file
View File

@ -0,0 +1,3 @@
[toolchain]
channel = "1.61.0"
components = [ "clippy", "rustfmt" ]

View File

@ -3,6 +3,7 @@
use core::fmt;
use core::iter;
use std::collections::HashMap;
use std::fmt::Display;
use ff::Field;
use nonempty::NonEmpty;
@ -30,7 +31,7 @@ const MIN_ACTIONS: usize = 2;
/// An error type for the kinds of errors that can occur during bundle construction.
#[derive(Debug)]
pub enum Error {
pub enum BuildError {
/// A bundle could not be built because required signatures were missing.
MissingSignatures,
/// An error occurred in the process of producing a proof for a bundle.
@ -45,15 +46,66 @@ pub enum Error {
DuplicateSignature,
}
impl From<halo2_proofs::plonk::Error> for Error {
fn from(e: halo2_proofs::plonk::Error) -> Self {
Error::Proof(e)
impl Display for BuildError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
use BuildError::*;
match self {
MissingSignatures => f.write_str("Required signatures were missing during build"),
Proof(e) => f.write_str(&format!("Could not create proof: {}", e)),
ValueSum(_) => f.write_str("Overflow occurred during value construction"),
InvalidExternalSignature => f.write_str("External signature was invalid"),
DuplicateSignature => f.write_str("Signature valid for more than one input"),
}
}
}
impl From<value::OverflowError> for Error {
impl std::error::Error for BuildError {}
/// An error type for adding a spend to the builder.
#[derive(Debug, PartialEq, Eq)]
pub enum SpendError {
/// Spends aren't enabled for this builder.
SpendsDisabled,
/// The anchor provided to this builder doesn't match the merkle path used to add a spend.
AnchorMismatch,
/// The full viewing key provided didn't match the note provided
FvkMismatch,
}
impl Display for SpendError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
use SpendError::*;
f.write_str(match self {
SpendsDisabled => "Spends are not enabled for this builder",
AnchorMismatch => "All anchors must be equal.",
FvkMismatch => "FullViewingKey does not correspond to the given note",
})
}
}
impl std::error::Error for SpendError {}
/// The only error that can occur here is if outputs are disabled for this builder.
#[derive(Debug, PartialEq, Eq)]
pub struct OutputError;
impl Display for OutputError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.write_str("Outputs are not enabled for this builder")
}
}
impl std::error::Error for OutputError {}
impl From<halo2_proofs::plonk::Error> for BuildError {
fn from(e: halo2_proofs::plonk::Error) -> Self {
BuildError::Proof(e)
}
}
impl From<value::OverflowError> for BuildError {
fn from(e: value::OverflowError) -> Self {
Error::ValueSum(e)
BuildError::ValueSum(e)
}
}
@ -287,23 +339,22 @@ impl Builder {
fvk: FullViewingKey,
note: Note,
merkle_path: MerklePath,
) -> Result<(), &'static str> {
) -> Result<(), SpendError> {
if !self.flags.spends_enabled() {
return Err("Spends are not enabled for this builder");
return Err(SpendError::SpendsDisabled);
}
// Consistency check: all anchors must be equal.
let cm = note.commitment();
let path_root: Anchor =
<Option<_>>::from(merkle_path.root(cm.into())).ok_or("Derived the bottom anchor")?;
let path_root = merkle_path.root(cm.into());
if path_root != self.anchor {
return Err("All anchors must be equal.");
return Err(SpendError::AnchorMismatch);
}
// Check if note is internal or external.
let scope = fvk
.scope_for_address(&note.recipient())
.ok_or("FullViewingKey does not correspond to the given note")?;
.ok_or(SpendError::FvkMismatch)?;
self.spends.push(SpendInfo {
dummy_sk: None,
@ -325,9 +376,9 @@ impl Builder {
value: NoteValue,
asset: AssetBase,
memo: Option<[u8; 512]>,
) -> Result<(), &'static str> {
) -> Result<(), OutputError> {
if !self.flags.outputs_enabled() {
return Err("Outputs are not enabled for this builder");
return Err(OutputError);
}
self.recipients.push(RecipientInfo {
@ -352,6 +403,18 @@ impl Builder {
Ok(())
}
/// Returns the action spend components that will be produced by the
/// transaction being constructed
pub fn spends(&self) -> &Vec<impl InputView<()>> {
&self.spends
}
/// Returns the action output components that will be produced by the
/// transaction being constructed
pub fn outputs(&self) -> &Vec<impl OutputView> {
&self.recipients
}
/// The net value of the bundle to be built. The value of all spends,
/// minus the value of all outputs.
///
@ -384,7 +447,7 @@ impl Builder {
pub fn build<V: TryFrom<i64> + Copy + Into<i64>>(
self,
mut rng: impl RngCore,
) -> Result<Bundle<InProgress<Unproven, Unauthorized>, V>, Error> {
) -> Result<Bundle<InProgress<Unproven, Unauthorized>, V>, BuildError> {
let mut pre_actions: Vec<_> = Vec::new();
// Pair up the spends and recipients, extending with dummy values as necessary.
@ -459,7 +522,7 @@ impl Builder {
self.burn
.into_iter()
.map(|(asset, value)| Ok((asset, value.into()?)))
.collect::<Result<_, Error>>()?,
.collect::<Result<_, BuildError>>()?,
anchor,
InProgress {
proof: Unproven { circuits },
@ -549,7 +612,7 @@ impl<S: InProgressSignatures, V> Bundle<InProgress<Unproven, S>, V> {
self,
pk: &ProvingKey,
mut rng: impl RngCore,
) -> Result<Bundle<InProgress<Proof, S>, V>, Error> {
) -> Result<Bundle<InProgress<Proof, S>, V>, BuildError> {
let instances: Vec<_> = self
.actions()
.iter()
@ -624,10 +687,10 @@ pub enum MaybeSigned {
}
impl MaybeSigned {
fn finalize(self) -> Result<redpallas::Signature<SpendAuth>, Error> {
fn finalize(self) -> Result<redpallas::Signature<SpendAuth>, BuildError> {
match self {
Self::Signature(sig) => Ok(sig),
_ => Err(Error::MissingSignatures),
_ => Err(BuildError::MissingSignatures),
}
}
}
@ -671,7 +734,7 @@ impl<V> Bundle<InProgress<Proof, Unauthorized>, V> {
mut rng: R,
sighash: [u8; 32],
signing_keys: &[SpendAuthorizingKey],
) -> Result<Bundle<Authorized, V>, Error> {
) -> Result<Bundle<Authorized, V>, BuildError> {
signing_keys
.iter()
.fold(self.prepare(&mut rng, sighash), |partial, ask| {
@ -710,11 +773,14 @@ impl<P: fmt::Debug, V> Bundle<InProgress<P, PartiallyAuthorized>, V> {
pub fn append_signatures(
self,
signatures: &[redpallas::Signature<SpendAuth>],
) -> Result<Self, Error> {
) -> Result<Self, BuildError> {
signatures.iter().try_fold(self, Self::append_signature)
}
fn append_signature(self, signature: &redpallas::Signature<SpendAuth>) -> Result<Self, Error> {
fn append_signature(
self,
signature: &redpallas::Signature<SpendAuth>,
) -> Result<Self, BuildError> {
let mut signature_valid_for = 0usize;
let bundle = self.map_authorization(
&mut signature_valid_for,
@ -734,9 +800,9 @@ impl<P: fmt::Debug, V> Bundle<InProgress<P, PartiallyAuthorized>, V> {
|_, partial| partial,
);
match signature_valid_for {
0 => Err(Error::InvalidExternalSignature),
0 => Err(BuildError::InvalidExternalSignature),
1 => Ok(bundle),
_ => Err(Error::DuplicateSignature),
_ => Err(BuildError::DuplicateSignature),
}
}
}
@ -745,7 +811,7 @@ impl<V> Bundle<InProgress<Proof, PartiallyAuthorized>, V> {
/// Finalizes this bundle, enabling it to be included in a transaction.
///
/// Returns an error if any signatures are missing.
pub fn finalize(self) -> Result<Bundle<Authorized, V>, Error> {
pub fn finalize(self) -> Result<Bundle<Authorized, V>, BuildError> {
self.try_map_authorization(
&mut (),
|_, _, maybe| maybe.finalize(),
@ -759,12 +825,45 @@ impl<V> Bundle<InProgress<Proof, PartiallyAuthorized>, V> {
}
}
/// A trait that provides a minimized view of an Orchard input suitable for use in
/// fee and change calculation.
pub trait InputView<NoteRef> {
/// An identifier for the input being spent.
fn note_id(&self) -> &NoteRef;
/// The value of the input being spent.
fn value<V: From<u64>>(&self) -> V;
}
impl InputView<()> for SpendInfo {
fn note_id(&self) -> &() {
// The builder does not make use of note identifiers, so we can just return the unit value.
&()
}
fn value<V: From<u64>>(&self) -> V {
V::from(self.note.value().inner())
}
}
/// A trait that provides a minimized view of an Orchard output suitable for use in
/// fee and change calculation.
pub trait OutputView {
/// The value of the output being produced.
fn value<V: From<u64>>(&self) -> V;
}
impl OutputView for RecipientInfo {
fn value<V: From<u64>>(&self) -> V {
V::from(self.value.inner())
}
}
/// Generators for property testing.
#[cfg(any(test, feature = "test-dependencies"))]
#[cfg_attr(docsrs, doc(cfg(feature = "test-dependencies")))]
pub mod testing {
use bridgetree::BridgeTree;
use core::fmt::Debug;
use incrementalmerkletree::{bridgetree::BridgeTree, Tree};
use rand::{rngs::StdRng, CryptoRng, SeedableRng};
use proptest::collection::vec;
@ -860,16 +959,15 @@ pub mod testing {
rng_seed in prop::array::uniform32(prop::num::u8::ANY)
) -> ArbitraryBundleInputs<StdRng> {
const MERKLE_DEPTH_ORCHARD: u8 = crate::constants::MERKLE_DEPTH_ORCHARD as u8;
let mut tree = BridgeTree::<MerkleHashOrchard, MERKLE_DEPTH_ORCHARD>::new(100);
let mut tree = BridgeTree::<MerkleHashOrchard, u32, MERKLE_DEPTH_ORCHARD>::new(100, 0);
let mut notes_and_auth_paths: Vec<(Note, MerklePath)> = Vec::new();
for note in notes.iter() {
let leaf = MerkleHashOrchard::from_cmx(&note.commitment().into());
tree.append(&leaf);
let position = tree.witness().expect("tree is not empty");
tree.append(leaf);
let position = tree.mark().expect("tree is not empty");
let root = tree.root(0).unwrap();
let path = MerklePath::from((position, tree.authentication_path(position, &root).expect("we just witnessed the path")));
let path = MerklePath::from((position, tree.witness(position, 0).expect("we just witnessed the path")));
notes_and_auth_paths.push((*note, path));
}
@ -945,7 +1043,7 @@ mod tests {
.unwrap()
.create_proof(&pk, &mut rng)
.unwrap()
.prepare(&mut rng, [0; 32])
.prepare(rng, [0; 32])
.finalize()
.unwrap();
assert_eq!(bundle.value_balance(), &(-5000))

View File

@ -514,8 +514,9 @@ pub struct BundleAuthorizingCommitment(pub Blake2bHash);
#[cfg(any(test, feature = "test-dependencies"))]
#[cfg_attr(docsrs, doc(cfg(feature = "test-dependencies")))]
pub mod testing {
use group::ff::FromUniformBytes;
use nonempty::NonEmpty;
use pasta_curves::{arithmetic::FieldExt, pallas};
use pasta_curves::pallas;
use rand::{rngs::StdRng, SeedableRng};
use reddsa::orchard::SpendAuth;
@ -617,7 +618,7 @@ pub mod testing {
// Instead of rejecting out-of-range bytes, let's reduce them.
let mut buf = [0; 64];
buf[..32].copy_from_slice(&bytes);
pallas::Base::from_bytes_wide(&buf)
pallas::Base::from_uniform_bytes(&buf)
}
}

View File

@ -1108,7 +1108,7 @@ mod tests {
let expected_proof_size = {
let circuit_cost =
halo2_proofs::dev::CircuitCost::<pasta_curves::vesta::Point, _>::measure(
K as usize,
K,
&circuits[0],
);
assert_eq!(usize::from(circuit_cost.proof_size(1)), 5024);

View File

@ -1,11 +1,12 @@
use core::iter;
use group::ff::{Field, PrimeField};
use halo2_proofs::{
circuit::{AssignedCell, Layouter, Value},
plonk::{Advice, Column, ConstraintSystem, Constraints, Error, Expression, Selector},
poly::Rotation,
};
use pasta_curves::{arithmetic::FieldExt, pallas};
use pasta_curves::pallas;
use crate::constants::{OrchardCommitDomains, OrchardFixedBases, OrchardHashDomains, T_P};
use halo2_gadgets::{
@ -118,7 +119,7 @@ impl CommitIvkChip {
// Check that nk = b_2 (5 bits) || c (240 bits) || d_0 (9 bits) || d_1 (1 bit)
let nk_decomposition_check = {
let two_pow_245 = pallas::Base::from(1 << 49).pow(&[5, 0, 0, 0]);
let two_pow_245 = pallas::Base::from(1 << 49).pow([5, 0, 0, 0]);
b_2.clone()
+ c.clone() * two_pow_5
@ -667,7 +668,7 @@ mod tests {
fixed_bases::COMMIT_IVK_PERSONALIZATION, OrchardCommitDomains, OrchardFixedBases,
OrchardHashDomains, L_ORCHARD_BASE, T_Q,
};
use group::ff::{Field, PrimeFieldBits};
use group::ff::{Field, PrimeField, PrimeFieldBits};
use halo2_gadgets::{
ecc::{
chip::{EccChip, EccConfig},
@ -684,7 +685,7 @@ mod tests {
dev::MockProver,
plonk::{Circuit, ConstraintSystem, Error},
};
use pasta_curves::{arithmetic::FieldExt, pallas};
use pasta_curves::pallas;
use rand::rngs::OsRng;
#[test]

View File

@ -14,7 +14,6 @@ use halo2_gadgets::{
sinsemilla::{chip::SinsemillaChip, merkle::chip::MerkleChip},
};
use halo2_proofs::{
arithmetic::FieldExt,
circuit::{AssignedCell, Chip, Layouter, Value},
plonk::{self, Advice, Assigned, Column},
};
@ -72,7 +71,7 @@ impl super::Config {
}
/// An instruction set for adding two circuit words (field elements).
pub(in crate::circuit) trait AddInstruction<F: FieldExt>: Chip<F> {
pub(in crate::circuit) trait AddInstruction<F: Field>: Chip<F> {
/// Constraints `a + b` and returns the sum.
fn add(
&self,

View File

@ -1,11 +1,12 @@
use core::iter;
use group::ff::PrimeField;
use halo2_proofs::{
circuit::{AssignedCell, Layouter, Value},
plonk::{Advice, Column, ConstraintSystem, Constraints, Error, Expression, Selector},
poly::Rotation,
};
use pasta_curves::{arithmetic::FieldExt, pallas};
use pasta_curves::pallas;
use crate::{
constants::{OrchardCommitDomains, OrchardFixedBases, OrchardHashDomains, T_P},
@ -60,7 +61,7 @@ type CanonicityBounds = (
/// | b | b_0 | b_1 | 1 |
/// | | b_2 | b_3 | 0 |
///
/// https://p.z.cash/orchard-0.1:note-commit-decomposition-b?partial
/// <https://p.z.cash/orchard-0.1:note-commit-decomposition-b?partial>
#[derive(Clone, Debug)]
struct DecomposeB {
q_notecommit_b: Selector,
@ -205,7 +206,7 @@ impl DecomposeB {
/// | d | d_0 | d_1 | 1 |
/// | | d_2 | d_3 | 0 |
///
/// https://p.z.cash/orchard-0.1:note-commit-decomposition-d?partial
/// <https://p.z.cash/orchard-0.1:note-commit-decomposition-d?partial>
#[derive(Clone, Debug)]
struct DecomposeD {
q_notecommit_d: Selector,
@ -341,7 +342,7 @@ impl DecomposeD {
/// ------------------------------------
/// | e | e_0 | e_1 | 1 |
///
/// https://p.z.cash/orchard-0.1:note-commit-decomposition-e?partial
/// <https://p.z.cash/orchard-0.1:note-commit-decomposition-e?partial>
#[derive(Clone, Debug)]
struct DecomposeE {
q_notecommit_e: Selector,
@ -460,7 +461,7 @@ impl DecomposeE {
/// | g | g_0 | 1 |
/// | g_1 | g_2 | 0 |
///
/// https://p.z.cash/orchard-0.1:note-commit-decomposition-g?partial
/// <https://p.z.cash/orchard-0.1:note-commit-decomposition-g?partial>
#[derive(Clone, Debug)]
struct DecomposeG {
q_notecommit_g: Selector,
@ -582,7 +583,7 @@ impl DecomposeG {
/// ------------------------------------
/// | h | h_0 | h_1 | 1 |
///
/// https://p.z.cash/orchard-0.1:note-commit-decomposition-h?partial
/// <https://p.z.cash/orchard-0.1:note-commit-decomposition-h?partial>
#[derive(Clone, Debug)]
struct DecomposeH {
q_notecommit_h: Selector,
@ -699,7 +700,7 @@ impl DecomposeH {
/// | x(g_d) | b_0 | a | z13_a | 1 |
/// | | b_1 | a_prime | z13_a_prime | 0 |
///
/// https://p.z.cash/orchard-0.1:note-commit-canonicity-g_d?partial
/// <https://p.z.cash/orchard-0.1:note-commit-canonicity-g_d?partial>
#[derive(Clone, Debug)]
struct GdCanonicity {
q_notecommit_g_d: Selector,
@ -816,7 +817,7 @@ impl GdCanonicity {
/// | x(pk_d) | b_3 | c | z13_c | 1 |
/// | | d_0 | b3_c_prime | z14_b3_c_prime | 0 |
///
/// https://p.z.cash/orchard-0.1:note-commit-canonicity-pk_d?partial
/// <https://p.z.cash/orchard-0.1:note-commit-canonicity-pk_d?partial>
#[derive(Clone, Debug)]
struct PkdCanonicity {
q_notecommit_pk_d: Selector,
@ -932,7 +933,7 @@ impl PkdCanonicity {
/// ------------------------------------------------
/// | value | d_2 | d_3 | e_0 | 1 |
///
/// https://p.z.cash/orchard-0.1:note-commit-canonicity-v?partial
/// <https://p.z.cash/orchard-0.1:note-commit-canonicity-v?partial>
#[derive(Clone, Debug)]
struct ValueCanonicity {
q_notecommit_value: Selector,
@ -1010,7 +1011,7 @@ impl ValueCanonicity {
/// | rho | e_1 | f | z13_f | 1 |
/// | | g_0 | e1_f_prime | z14_e1_f_prime | 0 |
///
/// https://p.z.cash/orchard-0.1:note-commit-canonicity-rho?partial
/// <https://p.z.cash/orchard-0.1:note-commit-canonicity-rho?partial>
#[derive(Clone, Debug)]
struct RhoCanonicity {
q_notecommit_rho: Selector,
@ -1125,7 +1126,7 @@ impl RhoCanonicity {
/// | psi | g_1 | g_2 | z13_g | 1 |
/// | h_0 | h_1 | g1_g2_prime | z13_g1_g2_prime | 0 |
///
/// https://p.z.cash/orchard-0.1:note-commit-canonicity-psi?partial
/// <https://p.z.cash/orchard-0.1:note-commit-canonicity-psi?partial>
#[derive(Clone, Debug)]
struct PsiCanonicity {
q_notecommit_psi: Selector,
@ -2043,10 +2044,7 @@ mod tests {
dev::MockProver,
plonk::{Circuit, ConstraintSystem, Error},
};
use pasta_curves::{
arithmetic::{CurveAffine, FieldExt},
pallas,
};
use pasta_curves::{arithmetic::CurveAffine, pallas};
use rand::{rngs::OsRng, RngCore};

Binary file not shown.

View File

@ -32,7 +32,7 @@ pub const KEY_DIVERSIFICATION_PERSONALIZATION: &str = "z.cash:Orchard-gd";
#[cfg(test)]
mod tests {
use ff::PrimeField;
use pasta_curves::{arithmetic::FieldExt, pallas};
use pasta_curves::pallas;
#[test]
// Orchard uses the Pallas base field as its base field.

View File

@ -66,8 +66,8 @@ impl Nullifier {
#[cfg(any(test, feature = "test-dependencies"))]
#[cfg_attr(docsrs, doc(cfg(feature = "test-dependencies")))]
pub mod testing {
use group::Group;
use pasta_curves::{arithmetic::FieldExt, pallas};
use group::{ff::FromUniformBytes, Group};
use pasta_curves::pallas;
use proptest::collection::vec;
use proptest::prelude::*;
@ -79,7 +79,7 @@ pub mod testing {
pub fn arb_nullifier()(
bytes in vec(any::<u8>(), 64)
) -> Nullifier {
let point = pallas::Point::generator() * pallas::Scalar::from_bytes_wide(&<[u8; 64]>::try_from(bytes).unwrap());
let point = pallas::Point::generator() * pallas::Scalar::from_uniform_bytes(&<[u8; 64]>::try_from(bytes).unwrap());
Nullifier(extract_p(&point))
}
}

View File

@ -258,11 +258,7 @@ impl Domain for OrchardDomainV2 {
secret.kdf_orchard(ephemeral_key)
}
fn note_plaintext_bytes(
note: &Self::Note,
_: &Self::Recipient,
memo: &Self::Memo,
) -> NotePlaintextBytes {
fn note_plaintext_bytes(note: &Self::Note, memo: &Self::Memo) -> NotePlaintextBytes {
let mut np = [0; NOTE_PLAINTEXT_SIZE_V2];
np[0] = 0x02;
np[1..12].copy_from_slice(note.recipient().diversifier().as_array());
@ -621,7 +617,7 @@ mod tests {
// Test encryption
//
let ne = OrchardNoteEncryption::new_with_esk(esk, Some(ovk), note, recipient, tv.memo);
let ne = OrchardNoteEncryption::new_with_esk(esk, Some(ovk), note, tv.memo);
assert_eq!(ne.encrypt_note_plaintext().as_ref(), &tv.c_enc[..]);
assert_eq!(

View File

@ -3,10 +3,10 @@
use core::iter;
use core::ops::Deref;
use ff::{Field, PrimeField, PrimeFieldBits};
use ff::{Field, FromUniformBytes, PrimeField, PrimeFieldBits};
use group::{Curve, Group, GroupEncoding, WnafBase, WnafScalar};
use halo2_gadgets::{poseidon::primitives as poseidon, sinsemilla::primitives as sinsemilla};
use halo2_proofs::arithmetic::{CurveAffine, CurveExt, FieldExt};
use halo2_proofs::arithmetic::{CurveAffine, CurveExt};
use memuse::DynamicUsage;
use pasta_curves::pallas;
use subtle::{ConditionallySelectable, CtOption};
@ -176,7 +176,7 @@ impl PreparedNonZeroScalar {
///
/// [orchardkeycomponents]: https://zips.z.cash/protocol/nu5.pdf#orchardkeycomponents
pub(crate) fn to_base(x: [u8; 64]) -> pallas::Base {
pallas::Base::from_bytes_wide(&x)
pallas::Base::from_uniform_bytes(&x)
}
/// $\mathsf{ToScalar}^\mathsf{Orchard}(x) := LEOS2IP_{\ell_\mathsf{PRFexpand}}(x) (mod r_P)$
@ -185,7 +185,7 @@ pub(crate) fn to_base(x: [u8; 64]) -> pallas::Base {
///
/// [orchardkeycomponents]: https://zips.z.cash/protocol/nu5.pdf#orchardkeycomponents
pub(crate) fn to_scalar(x: [u8; 64]) -> pallas::Scalar {
pallas::Scalar::from_bytes_wide(&x)
pallas::Scalar::from_uniform_bytes(&x)
}
/// Converts from pallas::Base to pallas::Scalar (aka $x \pmod{r_\mathbb{P}}$).
@ -220,10 +220,10 @@ pub(crate) fn commit_ivk(
/// [concretediversifyhash]: https://zips.z.cash/protocol/nu5.pdf#concretediversifyhash
pub(crate) fn diversify_hash(d: &[u8; 11]) -> NonIdentityPallasPoint {
let hasher = pallas::Point::hash_to_curve(KEY_DIVERSIFICATION_PERSONALIZATION);
let pk_d = hasher(d);
let g_d = hasher(d);
// If the identity occurs, we replace it with a different fixed point.
// TODO: Replace the unwrap_or_else with a cached fixed point.
NonIdentityPallasPoint(CtOption::new(pk_d, !pk_d.is_identity()).unwrap_or_else(|| hasher(&[])))
NonIdentityPallasPoint(CtOption::new(g_d, !g_d.is_identity()).unwrap_or_else(|| hasher(&[])))
}
/// $PRF^\mathsf{nfOrchard}(nk, \rho) := Poseidon(nk, \rho)$

View File

@ -11,7 +11,7 @@ use crate::{
};
use halo2_gadgets::sinsemilla::primitives::HashDomain;
use incrementalmerkletree::{Altitude, Hashable};
use incrementalmerkletree::{Hashable, Level};
use pasta_curves::pallas;
use ff::{Field, PrimeField, PrimeFieldBits};
@ -205,7 +205,7 @@ impl Hashable for MerkleHashOrchard {
/// - when hashing two leaves, we produce a node on the layer above the leaves, i.e.
/// layer = 31, l = 0
/// - when hashing to the final root, we produce the anchor with layer = 0, l = 31.
fn combine(altitude: Altitude, left: &Self, right: &Self) -> Self {
fn combine(level: Level, left: &Self, right: &Self) -> Self {
// MerkleCRH Sinsemilla hash domain.
let domain = HashDomain::new(MERKLE_CRH_PERSONALIZATION);
@ -213,7 +213,7 @@ impl Hashable for MerkleHashOrchard {
domain
.hash(
iter::empty()
.chain(i2lebsp_k(altitude.into()).iter().copied())
.chain(i2lebsp_k(level.into()).iter().copied())
.chain(left.0.to_le_bits().iter().by_vals().take(L_ORCHARD_MERKLE))
.chain(right.0.to_le_bits().iter().by_vals().take(L_ORCHARD_MERKLE)),
)
@ -221,8 +221,8 @@ impl Hashable for MerkleHashOrchard {
)
}
fn empty_root(altitude: Altitude) -> Self {
EMPTY_ROOTS[<usize>::from(altitude)]
fn empty_root(level: Level) -> Self {
EMPTY_ROOTS[<usize>::from(level)]
}
}
@ -248,18 +248,14 @@ impl<'de> Deserialize<'de> for MerkleHashOrchard {
#[cfg_attr(docsrs, doc(cfg(feature = "test-dependencies")))]
pub mod testing {
#[cfg(test)]
use incrementalmerkletree::{
use {
crate::tree::{MerkleHashOrchard, EMPTY_ROOTS},
bridgetree::{BridgeTree, Frontier as BridgeFrontier},
Altitude, Frontier, Tree,
group::ff::PrimeField,
incrementalmerkletree::Level,
pasta_curves::pallas,
};
#[cfg(test)]
use crate::tree::{MerkleHashOrchard, EMPTY_ROOTS};
#[cfg(test)]
use group::ff::PrimeField;
#[cfg(test)]
use pasta_curves::pallas;
#[test]
fn test_vectors() {
let tv_empty_roots = crate::test_vectors::commitment_tree::test_vectors().empty_roots;
@ -268,14 +264,14 @@ pub mod testing {
assert_eq!(tv_empty_roots[height], root.to_bytes());
}
let mut tree = BridgeTree::<MerkleHashOrchard, 4>::new(100);
let mut tree = BridgeTree::<MerkleHashOrchard, u32, 4>::new(100, 0);
for (i, tv) in crate::test_vectors::merkle_path::test_vectors()
.into_iter()
.enumerate()
{
let cmx = MerkleHashOrchard::from_bytes(&tv.leaves[i]).unwrap();
tree.append(&cmx);
let position = tree.witness().expect("tree is not empty");
tree.append(cmx);
let position = tree.mark().expect("tree is not empty");
assert_eq!(position, i.into());
let root = tree.root(0).unwrap();
@ -286,7 +282,7 @@ pub mod testing {
// but BridgeTree doesn't encode these.
for j in 0..=i {
assert_eq!(
tree.authentication_path(j.try_into().unwrap(), &root),
tree.witness(j.try_into().unwrap(), 0).ok(),
Some(
tv.paths[j]
.iter()
@ -304,14 +300,14 @@ pub mod testing {
let tv_empty_roots = crate::test_vectors::commitment_tree::test_vectors().empty_roots;
for (altitude, tv_root) in tv_empty_roots.iter().enumerate() {
for (level, tv_root) in tv_empty_roots.iter().enumerate() {
assert_eq!(
MerkleHashOrchard::empty_root(Altitude::from(altitude as u8))
MerkleHashOrchard::empty_root(Level::from(level as u8))
.0
.to_repr(),
*tv_root,
"Empty root mismatch at altitude {}",
altitude
"Empty root mismatch at level {}",
level
);
}
}
@ -360,7 +356,7 @@ pub mod testing {
let mut frontier = BridgeFrontier::<MerkleHashOrchard, 32>::empty();
for commitment in commitments.iter() {
let cmx = MerkleHashOrchard(pallas::Base::from_repr(*commitment).unwrap());
frontier.append(&cmx);
frontier.append(cmx);
}
assert_eq!(frontier.root().0, pallas::Base::from_repr(anchor).unwrap());
}

View File

@ -58,7 +58,7 @@ use crate::{
primitives::redpallas::{self, Binding},
};
use crate::builder::Error;
use crate::builder::BuildError;
use crate::note::AssetBase;
/// Maximum note value.
@ -190,10 +190,10 @@ impl ValueSum {
)
}
pub(crate) fn into<V: TryFrom<i64>>(self) -> Result<V, Error> {
pub(crate) fn into<V: TryFrom<i64>>(self) -> Result<V, BuildError> {
i64::try_from(self)
.map_err(Error::ValueSum)
.and_then(|i| V::try_from(i).map_err(|_| Error::ValueSum(OverflowError)))
.map_err(BuildError::ValueSum)
.and_then(|i| V::try_from(i).map_err(|_| BuildError::ValueSum(OverflowError)))
}
}
@ -399,7 +399,8 @@ impl ValueCommitment {
#[cfg(any(test, feature = "test-dependencies"))]
#[cfg_attr(docsrs, doc(cfg(feature = "test-dependencies")))]
pub mod testing {
use pasta_curves::{arithmetic::FieldExt, pallas};
use group::ff::FromUniformBytes;
use pasta_curves::pallas;
use proptest::prelude::*;
use super::{NoteValue, ValueCommitTrapdoor, ValueSum, MAX_NOTE_VALUE, VALUE_SUM_RANGE};
@ -410,21 +411,21 @@ pub mod testing {
// Instead of rejecting out-of-range bytes, let's reduce them.
let mut buf = [0; 64];
buf[..32].copy_from_slice(&bytes);
pallas::Scalar::from_bytes_wide(&buf)
pallas::Scalar::from_uniform_bytes(&buf)
}
}
prop_compose! {
/// Generate an arbitrary [`ValueSum`] in the range of valid Zcash values.
pub fn arb_value_sum()(value in VALUE_SUM_RANGE) -> ValueSum {
ValueSum(value as i128)
ValueSum(value)
}
}
prop_compose! {
/// Generate an arbitrary [`ValueSum`] in the range of valid Zcash values.
pub fn arb_value_sum_bounded(bound: NoteValue)(value in -(bound.0 as i128)..=(bound.0 as i128)) -> ValueSum {
ValueSum(value as i128)
ValueSum(value)
}
}

View File

@ -1,11 +1,11 @@
use incrementalmerkletree::{bridgetree::BridgeTree, Hashable, Tree};
use orchard::note::AssetBase;
use bridgetree::BridgeTree;
use incrementalmerkletree::Hashable;
use orchard::{
builder::Builder,
bundle::{Authorized, Flags},
circuit::{ProvingKey, VerifyingKey},
keys::{FullViewingKey, PreparedIncomingViewingKey, Scope, SpendAuthorizingKey, SpendingKey},
note::ExtractedNoteCommitment,
note::{AssetBase, ExtractedNoteCommitment},
note_encryption_v3::OrchardDomainV3,
tree::{MerkleHashOrchard, MerklePath},
value::NoteValue,
@ -33,11 +33,11 @@ pub fn build_merkle_path(note: &Note) -> (MerklePath, Anchor) {
// Use the tree with a single leaf.
let cmx: ExtractedNoteCommitment = note.commitment().into();
let leaf = MerkleHashOrchard::from_cmx(&cmx);
let mut tree = BridgeTree::<MerkleHashOrchard, 32>::new(0);
tree.append(&leaf);
let position = tree.witness().unwrap();
let mut tree = BridgeTree::<MerkleHashOrchard, u32, 32>::new(100, 0);
tree.append(leaf);
let position = tree.mark().unwrap();
let root = tree.root(0).unwrap();
let auth_path = tree.authentication_path(position, &root).unwrap();
let auth_path = tree.witness(position, 0).unwrap();
let merkle_path = MerklePath::from_parts(
u64::from(position).try_into().unwrap(),
auth_path[..].try_into().unwrap(),
@ -76,7 +76,7 @@ fn bundle_chain() {
let unauthorized = builder.build(&mut rng).unwrap();
let sighash = unauthorized.commitment().into();
let proven = unauthorized.create_proof(&pk, &mut rng).unwrap();
proven.apply_signatures(&mut rng, sighash, &[]).unwrap()
proven.apply_signatures(rng, sighash, &[]).unwrap()
};
// Verify the shielding bundle.
@ -112,7 +112,7 @@ fn bundle_chain() {
let sighash = unauthorized.commitment().into();
let proven = unauthorized.create_proof(&pk, &mut rng).unwrap();
proven
.apply_signatures(&mut rng, sighash, &[SpendAuthorizingKey::from(&sk)])
.apply_signatures(rng, sighash, &[SpendAuthorizingKey::from(&sk)])
.unwrap()
};

View File

@ -1,8 +1,8 @@
mod builder;
use crate::builder::verify_bundle;
use incrementalmerkletree::bridgetree::BridgeTree;
use incrementalmerkletree::{Hashable, Tree};
use bridgetree::BridgeTree;
use incrementalmerkletree::Hashable;
use orchard::bundle::Authorized;
use orchard::issuance::{verify_issue_bundle, IssueBundle, Signed, Unauthorized};
use orchard::keys::{IssuanceAuthorizingKey, IssuanceValidatingKey};
@ -99,32 +99,32 @@ pub fn build_merkle_path_with_two_leaves(
note1: &Note,
note2: &Note,
) -> (MerklePath, MerklePath, Anchor) {
let mut tree = BridgeTree::<MerkleHashOrchard, 32>::new(0);
let mut tree = BridgeTree::<MerkleHashOrchard, u32, 32>::new(100, 0);
// Add first leaf
let cmx1: ExtractedNoteCommitment = note1.commitment().into();
let leaf1 = MerkleHashOrchard::from_cmx(&cmx1);
tree.append(&leaf1);
let position1 = tree.witness().unwrap();
tree.append(leaf1);
let position1 = tree.mark().unwrap();
// Add second leaf
let cmx2: ExtractedNoteCommitment = note2.commitment().into();
let leaf2 = MerkleHashOrchard::from_cmx(&cmx2);
tree.append(&leaf2);
let position2 = tree.witness().unwrap();
tree.append(leaf2);
let position2 = tree.mark().unwrap();
let root = tree.root(0).unwrap();
let anchor = root.into();
// Calculate first path
let auth_path1 = tree.authentication_path(position1, &root).unwrap();
let auth_path1 = tree.witness(position1, 0).unwrap();
let merkle_path1 = MerklePath::from_parts(
u64::from(position1).try_into().unwrap(),
auth_path1[..].try_into().unwrap(),
);
// Calculate second path
let auth_path2 = tree.authentication_path(position2, &root).unwrap();
let auth_path2 = tree.witness(position2, 0).unwrap();
let merkle_path2 = MerklePath::from_parts(
u64::from(position2).try_into().unwrap(),
auth_path2[..].try_into().unwrap(),
@ -235,17 +235,23 @@ fn build_and_verify_bundle(
anchor: Anchor,
expected_num_actions: usize,
keys: &Keychain,
) -> Result<(), &'static str> {
) -> Result<(), String> {
let rng = OsRng;
let shielded_bundle: Bundle<_, i64> = {
let mut builder = Builder::new(Flags::from_parts(true, true), anchor);
spends.iter().try_for_each(|spend| {
builder.add_spend(keys.fvk().clone(), spend.note, spend.merkle_path().clone())
})?;
outputs.iter().try_for_each(|output| {
builder.add_recipient(None, keys.recipient, output.value, output.asset, None)
})?;
spends
.iter()
.try_for_each(|spend| {
builder.add_spend(keys.fvk().clone(), spend.note, spend.merkle_path().clone())
})
.map_err(|err| err.to_string())?;
outputs
.iter()
.try_for_each(|output| {
builder.add_recipient(None, keys.recipient, output.value, output.asset, None)
})
.map_err(|err| err.to_string())?;
assets_to_burn
.into_iter()
.try_for_each(|(asset, value)| builder.add_burn(asset, value))?;